Merge branch 'master' into ui-2

This commit is contained in:
Jaco Greeff 2017-08-22 15:12:30 +02:00
commit 630118b5a5
145 changed files with 1645 additions and 1375 deletions

51
Cargo.lock generated
View File

@ -125,7 +125,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "bigint"
version = "3.0.0"
version = "4.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -301,6 +301,7 @@ dependencies = [
"ethcore-util 1.8.0",
"ethjson 0.1.0",
"rlp 0.2.0",
"rlp_derive 0.1.0",
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -526,6 +527,7 @@ dependencies = [
"price-info 1.7.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.2.0",
"rlp_derive 0.1.0",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -542,7 +544,7 @@ dependencies = [
name = "ethcore-bigint"
version = "0.1.3"
dependencies = [
"bigint 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bigint 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -652,6 +654,7 @@ dependencies = [
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.2.0",
"rlp_derive 0.1.0",
"serde 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -773,7 +776,6 @@ dependencies = [
"ethcore-devtools 1.8.0",
"ethcore-logger 1.8.0",
"heapsize 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1217,7 +1219,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "jsonrpc-core"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1229,7 +1231,7 @@ dependencies = [
[[package]]
name = "jsonrpc-http-server"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"hyper 0.10.0-a.0 (git+https://github.com/paritytech/hyper)",
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
@ -1242,7 +1244,7 @@ dependencies = [
[[package]]
name = "jsonrpc-ipc-server"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"bytes 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
@ -1255,7 +1257,7 @@ dependencies = [
[[package]]
name = "jsonrpc-macros"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-pubsub 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
@ -1265,7 +1267,7 @@ dependencies = [
[[package]]
name = "jsonrpc-minihttp-server"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"bytes 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
@ -1280,7 +1282,7 @@ dependencies = [
[[package]]
name = "jsonrpc-pubsub"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1290,7 +1292,7 @@ dependencies = [
[[package]]
name = "jsonrpc-server-utils"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"bytes 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"globset 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1303,7 +1305,7 @@ dependencies = [
[[package]]
name = "jsonrpc-tcp-server"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"bytes 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
@ -1317,7 +1319,7 @@ dependencies = [
[[package]]
name = "jsonrpc-ws-server"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#4d3ec22c7aba426988a678b489b2791e95283699"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#b5490782884218c5ccf74cd61e54904cb3a3aeed"
dependencies = [
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
@ -1860,6 +1862,7 @@ dependencies = [
"fetch 0.1.0",
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"futures-cpupool 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1971,6 +1974,7 @@ dependencies = [
"fetch 0.1.0",
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"futures-cpupool 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-ipc-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
@ -2053,7 +2057,7 @@ dependencies = [
[[package]]
name = "parity-ui-precompiled"
version = "1.4.0"
source = "git+https://github.com/paritytech/js-precompiled.git#dd9b92d9d8c244678e15163347f9adb2e2560959"
source = "git+https://github.com/paritytech/js-precompiled.git#416ced84c23b1a776d53ee4a3023eb4eb4736cf8"
dependencies = [
"parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2305,7 +2309,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "quote"
version = "0.3.10"
version = "0.3.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -2396,6 +2400,15 @@ dependencies = [
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rlp_derive"
version = "0.1.0"
dependencies = [
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.2.0",
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rocksdb"
version = "0.4.5"
@ -2453,7 +2466,7 @@ dependencies = [
name = "rpc-cli"
version = "1.4.0"
dependencies = [
"bigint 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bigint 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-util 1.8.0",
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-rpc 1.8.0",
@ -2588,7 +2601,7 @@ name = "serde_derive"
version = "1.0.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive_internals 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2730,7 +2743,7 @@ name = "syn"
version = "0.11.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"quote 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
"synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -3231,7 +3244,7 @@ dependencies = [
"checksum backtrace-sys 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "3a0d842ea781ce92be2bf78a9b38883948542749640b8378b3b2f03d1fd9f1ff"
"checksum base-x 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2f59103b47307f76e03bef1633aec7fa9e29bfb5aa6daf5a334f94233c71f6c1"
"checksum base32 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1b9605ba46d61df0410d8ac686b0007add8172eba90e8e909c347856fe794d8c"
"checksum bigint 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d0673c930652d3d4d6dcd5c45b5db4fa5f8f33994d7323618c43c083b223e8c"
"checksum bigint 4.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2b45beaa0727835a98df09295d4250afc52c3f7d375d560997ae942c95b98ceb"
"checksum bincode 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e103c8b299b28a9c6990458b7013dc4a8356a9b854c51b9883241f5866fac36e"
"checksum bit-set 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e6e1e6fb1c9e3d6fcdec57216a74eaa03e41f52a22f13a16438251d8e88b89da"
"checksum bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"
@ -3384,7 +3397,7 @@ dependencies = [
"checksum quasi_macros 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "29cec87bc2816766d7e4168302d505dd06b0a825aed41b00633d296e922e02dd"
"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
"checksum quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6683b0e23d80813b1a535841f0048c1537d3f86d63c999e8373b39a9b0eb74a"
"checksum quote 0.3.10 (registry+https://github.com/rust-lang/crates.io-index)" = "6732e32663c9c271bfc7c1823486b471f18c47a2dbf87c066897b7b51afc83be"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
"checksum rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2791d88c6defac799c3f20d74f094ca33b9332612d9aef9078519c82e4fe04a5"
"checksum rayon 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8c83adcb08e5b922e804fe1918142b422602ef11f2fd670b0b52218cb5984a20"
"checksum rayon-core 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "767d91bacddf07d442fe39257bf04fd95897d1c47c545d009f6beb03efd038f8"

View File

@ -41,7 +41,7 @@ Parity's current release is 1.7. You can download it at https://github.com/parit
## Build dependencies
**Parity requires Rust version 1.18.0 to build**
**Parity requires Rust version 1.19.0 to build**
We recommend installing Rust through [rustup](https://www.rustup.rs/). If you don't already have rustup, you can install it like this:

View File

@ -27,6 +27,7 @@ time = "0.1.35"
unicase = "1.3"
url = "1.0"
zip = { version = "0.1", default-features = false }
itertools = "0.5"
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }

View File

@ -14,7 +14,6 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::collections::BTreeMap;
use std::path::PathBuf;
use std::sync::Arc;
@ -30,8 +29,8 @@ use {WebProxyTokens, ParentFrameSettings};
mod app;
mod cache;
mod fs;
mod ui;
pub mod fs;
pub mod fetcher;
pub mod manifest;
@ -64,9 +63,10 @@ pub fn all_endpoints<F: Fetch>(
web_proxy_tokens: Arc<WebProxyTokens>,
remote: Remote,
fetch: F,
) -> Endpoints {
) -> (Vec<String>, Endpoints) {
// fetch fs dapps at first to avoid overwriting builtins
let mut pages = fs::local_endpoints(dapps_path, embeddable.clone());
let mut pages = fs::local_endpoints(dapps_path.clone(), embeddable.clone());
let local_endpoints: Vec<String> = pages.keys().cloned().collect();
for path in extra_dapps {
if let Some((id, endpoint)) = fs::local_endpoint(path.clone(), embeddable.clone()) {
pages.insert(id, endpoint);
@ -80,10 +80,10 @@ pub fn all_endpoints<F: Fetch>(
pages.insert("proxy".into(), ProxyPac::boxed(embeddable.clone(), dapps_domain.to_owned()));
pages.insert(WEB_PATH.into(), Web::boxed(embeddable.clone(), web_proxy_tokens.clone(), remote.clone(), fetch.clone()));
Arc::new(pages)
(local_endpoints, pages)
}
fn insert<T : WebApp + Default + 'static>(pages: &mut BTreeMap<String, Box<Endpoint>>, id: &str, embed_at: Embeddable) {
fn insert<T : WebApp + Default + 'static>(pages: &mut Endpoints, id: &str, embed_at: Embeddable) {
pages.insert(id.to_owned(), Box::new(match embed_at {
Embeddable::Yes(address) => PageEndpoint::new_safe_to_embed(T::default(), address),
Embeddable::No => PageEndpoint::new(T::default()),

View File

@ -16,7 +16,6 @@
//! URL Endpoint traits
use std::sync::Arc;
use std::collections::BTreeMap;
use hyper::{self, server, net};
@ -39,7 +38,7 @@ pub struct EndpointInfo {
pub icon_url: String,
}
pub type Endpoints = Arc<BTreeMap<String, Box<Endpoint>>>;
pub type Endpoints = BTreeMap<String, Box<Endpoint>>;
pub type Handler = server::Handler<net::HttpStream> + Send;
pub trait Endpoint : Send + Sync {

View File

@ -31,8 +31,7 @@ pub use self::redirect::Redirection;
pub use self::streaming::StreamingHandler;
use std::iter;
use util::Itertools;
use itertools::Itertools;
use url::Url;
use hyper::{server, header, net, uri};
use {apps, address, Embeddable};
@ -67,10 +66,20 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
// Allow fonts from data: and HTTPS.
b"font-src 'self' data: https:;".to_vec(),
// Allow inline scripts and scripts eval (webpack/jsconsole)
b"script-src 'self' 'unsafe-inline' 'unsafe-eval';".to_vec(),
// Same restrictions as script-src (fallback) with additional
{
let script_src = embeddable_on.as_ref()
.map(|e| e.extra_script_src.iter()
.map(|&(ref host, port)| address(host, port))
.join(" ")
).unwrap_or_default();
format!(
"script-src 'self' 'unsafe-inline' 'unsafe-eval' {};",
script_src
).into_bytes()
},
// Same restrictions as script-src with additional
// blob: that is required for camera access (worker)
b"worker-src 'self' 'unsafe-inline' 'unsafe-eval' blob: ;".to_vec(),
b"worker-src 'self' 'unsafe-inline' 'unsafe-eval' https: blob:;".to_vec(),
// Restrict everything else to the same origin.
b"default-src 'self';".to_vec(),
// Run in sandbox mode (although it's not fully safe since we allow same-origin and script)
@ -90,7 +99,7 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
.into_iter()
.chain(embed.extra_embed_on
.iter()
.map(|&(ref host, port)| format!("{}:{}", host, port))
.map(|&(ref host, port)| address(host, port))
);
let ancestors = if embed.host == "127.0.0.1" {

View File

@ -22,6 +22,7 @@
extern crate base32;
extern crate futures;
extern crate futures_cpupool;
extern crate itertools;
extern crate linked_hash_map;
extern crate mime_guess;
extern crate ntp;
@ -69,9 +70,11 @@ mod web;
#[cfg(test)]
mod tests;
use std::collections::HashMap;
use std::mem;
use std::path::PathBuf;
use std::sync::Arc;
use std::collections::HashMap;
use util::RwLock;
use jsonrpc_http_server::{self as http, hyper, Origin};
@ -101,31 +104,54 @@ impl<F> WebProxyTokens for F where F: Fn(String) -> Option<Origin> + Send + Sync
}
/// Current supported endpoints.
#[derive(Default, Clone)]
pub struct Endpoints {
endpoints: endpoint::Endpoints,
local_endpoints: Arc<RwLock<Vec<String>>>,
endpoints: Arc<RwLock<endpoint::Endpoints>>,
dapps_path: PathBuf,
embeddable: Option<ParentFrameSettings>,
}
impl Endpoints {
/// Returns a current list of app endpoints.
pub fn list(&self) -> Vec<apps::App> {
self.endpoints.iter().filter_map(|(ref k, ref e)| {
self.endpoints.read().iter().filter_map(|(ref k, ref e)| {
e.info().map(|ref info| apps::App::from_info(k, info))
}).collect()
}
/// Check for any changes in the local dapps folder and update.
pub fn refresh_local_dapps(&self) {
let new_local = apps::fs::local_endpoints(&self.dapps_path, self.embeddable.clone());
let old_local = mem::replace(&mut *self.local_endpoints.write(), new_local.keys().cloned().collect());
let (_, to_remove): (_, Vec<_>) = old_local
.into_iter()
.partition(|k| new_local.contains_key(&k.clone()));
let mut endpoints = self.endpoints.write();
// remove the dead dapps
for k in to_remove {
endpoints.remove(&k);
}
// new dapps to be added
for (k, v) in new_local {
if !endpoints.contains_key(&k) {
endpoints.insert(k, v);
}
}
}
}
/// Dapps server as `jsonrpc-http-server` request middleware.
pub struct Middleware {
endpoints: Endpoints,
router: router::Router,
endpoints: endpoint::Endpoints,
}
impl Middleware {
/// Get local endpoints handle.
pub fn endpoints(&self) -> Endpoints {
Endpoints {
endpoints: self.endpoints.clone(),
}
pub fn endpoints(&self) -> &Endpoints {
&self.endpoints
}
/// Creates new middleware for UI server.
@ -164,8 +190,8 @@ impl Middleware {
);
Middleware {
router: router,
endpoints: Default::default(),
router: router,
}
}
@ -176,6 +202,7 @@ impl Middleware {
remote: Remote,
ui_address: Option<(String, u16)>,
extra_embed_on: Vec<(String, u16)>,
extra_script_src: Vec<(String, u16)>,
dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>,
dapps_domain: &str,
@ -184,15 +211,15 @@ impl Middleware {
web_proxy_tokens: Arc<WebProxyTokens>,
fetch: F,
) -> Self {
let embeddable = as_embeddable(ui_address, extra_embed_on, dapps_domain);
let embeddable = as_embeddable(ui_address, extra_embed_on, extra_script_src, dapps_domain);
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(
hash_fetch::urlhint::URLHintContract::new(registrar),
sync_status.clone(),
remote.clone(),
fetch.clone(),
).embeddable_on(embeddable.clone()).allow_dapps(true));
let endpoints = apps::all_endpoints(
dapps_path,
let (local_endpoints, endpoints) = apps::all_endpoints(
dapps_path.clone(),
extra_dapps,
dapps_domain,
embeddable.clone(),
@ -200,6 +227,12 @@ impl Middleware {
remote.clone(),
fetch.clone(),
);
let endpoints = Endpoints {
endpoints: Arc::new(RwLock::new(endpoints)),
dapps_path,
local_endpoints: Arc::new(RwLock::new(local_endpoints)),
embeddable: embeddable.clone(),
};
let special = {
let mut special = special_endpoints(
@ -225,8 +258,8 @@ impl Middleware {
);
Middleware {
router: router,
endpoints: endpoints,
endpoints,
router,
}
}
}
@ -263,12 +296,14 @@ fn address(host: &str, port: u16) -> String {
fn as_embeddable(
ui_address: Option<(String, u16)>,
extra_embed_on: Vec<(String, u16)>,
extra_script_src: Vec<(String, u16)>,
dapps_domain: &str,
) -> Option<ParentFrameSettings> {
ui_address.map(|(host, port)| ParentFrameSettings {
host,
port,
extra_embed_on,
extra_script_src,
dapps_domain: dapps_domain.to_owned(),
})
}
@ -289,8 +324,10 @@ pub struct ParentFrameSettings {
pub host: String,
/// Port
pub port: u16,
/// Additional pages the pages can be embedded on.
/// Additional URLs the dapps can be embedded on.
pub extra_embed_on: Vec<(String, u16)>,
/// Additional URLs the dapp scripts can be loaded from.
pub extra_script_src: Vec<(String, u16)>,
/// Dapps Domain (web3.site)
pub dapps_domain: String,
}

View File

@ -28,7 +28,8 @@ use jsonrpc_http_server as http;
use apps;
use apps::fetcher::Fetcher;
use endpoint::{Endpoint, Endpoints, EndpointPath, Handler};
use endpoint::{Endpoint, EndpointPath, Handler};
use Endpoints;
use handlers;
use Embeddable;
@ -50,26 +51,27 @@ pub struct Router {
dapps_domain: String,
}
impl http::RequestMiddleware for Router {
fn on_request(&self, req: &server::Request<HttpStream>, control: &Control) -> http::RequestMiddlewareAction {
impl Router {
fn resolve_request(&self, req: &server::Request<HttpStream>, control: Control, refresh_dapps: bool) -> (bool, Option<Box<Handler>>) {
// Choose proper handler depending on path / domain
let url = handlers::extract_url(req);
let endpoint = extract_endpoint(&url, &self.dapps_domain);
let referer = extract_referer_endpoint(req, &self.dapps_domain);
let is_utils = endpoint.1 == SpecialEndpoint::Utils;
let is_origin_set = req.headers().get::<header::Origin>().is_some();
let is_get_request = *req.method() == hyper::Method::Get;
let is_head_request = *req.method() == hyper::Method::Head;
let has_dapp = |dapp: &str| self.endpoints
.as_ref()
.map_or(false, |endpoints| endpoints.endpoints.read().contains_key(dapp));
trace!(target: "dapps", "Routing request to {:?}. Details: {:?}", url, req);
let control = control.clone();
debug!(target: "dapps", "Handling endpoint request: {:?}", endpoint);
let handler: Option<Box<Handler>> = match (endpoint.0, endpoint.1, referer) {
(is_utils, match (endpoint.0, endpoint.1, referer) {
// Handle invalid web requests that we can recover from
(ref path, SpecialEndpoint::None, Some((ref referer, ref referer_url)))
if referer.app_id == apps::WEB_PATH
&& self.endpoints.as_ref().map(|ep| ep.contains_key(apps::WEB_PATH)).unwrap_or(false)
&& has_dapp(apps::WEB_PATH)
&& !is_web_endpoint(path)
=>
{
@ -88,11 +90,13 @@ impl http::RequestMiddleware for Router {
.map(|special| special.to_async_handler(path.clone().unwrap_or_default(), control))
},
// Then delegate to dapp
(Some(ref path), _, _) if self.endpoints.as_ref().map(|ep| ep.contains_key(&path.app_id)).unwrap_or(false) => {
(Some(ref path), _, _) if has_dapp(&path.app_id) => {
trace!(target: "dapps", "Resolving to local/builtin dapp.");
Some(self.endpoints
.as_ref()
.expect("endpoints known to be set; qed")
.endpoints
.read()
.get(&path.app_id)
.expect("endpoints known to contain key; qed")
.to_async_handler(path.clone(), control))
@ -110,6 +114,11 @@ impl http::RequestMiddleware for Router {
=>
{
trace!(target: "dapps", "Resolving to 404.");
if refresh_dapps {
debug!(target: "dapps", "Refreshing dapps and re-trying.");
self.endpoints.as_ref().map(|endpoints| endpoints.refresh_local_dapps());
return self.resolve_request(req, control, false)
} else {
Some(Box::new(handlers::ContentHandler::error(
hyper::StatusCode::NotFound,
"404 Not Found",
@ -117,6 +126,7 @@ impl http::RequestMiddleware for Router {
None,
self.embeddable_on.clone(),
)))
}
},
// Any other GET|HEAD requests to home page.
_ if (is_get_request || is_head_request) && self.special.contains_key(&SpecialEndpoint::Home) => {
@ -130,8 +140,15 @@ impl http::RequestMiddleware for Router {
trace!(target: "dapps", "Resolving to RPC call.");
None
}
};
})
}
}
impl http::RequestMiddleware for Router {
fn on_request(&self, req: &server::Request<HttpStream>, control: &Control) -> http::RequestMiddlewareAction {
let control = control.clone();
let is_origin_set = req.headers().get::<header::Origin>().is_some();
let (is_utils, handler) = self.resolve_request(req, control, self.endpoints.is_some());
match handler {
Some(handler) => http::RequestMiddlewareAction::Respond {
should_validate_hosts: !is_utils,

View File

@ -39,7 +39,7 @@ fn should_resolve_dapp() {
// then
response.assert_status("HTTP/1.1 404 Not Found");
assert_eq!(registrar.calls.lock().len(), 2);
assert_eq!(registrar.calls.lock().len(), 4);
assert_security_headers_for_embed(&response.headers);
}

View File

@ -260,6 +260,7 @@ impl Server {
remote,
signer_address,
vec![],
vec![],
dapps_path,
extra_dapps,
DAPPS_DOMAIN.into(),

View File

@ -204,4 +204,3 @@ fn should_serve_utils() {
assert_eq!(response.body.contains("function(){"), true);
assert_security_headers(&response.headers);
}

View File

@ -241,5 +241,3 @@ impl<F: Fetch> server::Handler<net::HttpStream> for WebHandler<F> {
}
}
}

View File

@ -47,6 +47,7 @@ num_cpus = "1.2"
price-info = { path = "../price-info" }
rand = "0.3"
rlp = { path = "../util/rlp" }
rlp_derive = { path = "../util/rlp_derive" }
rust-crypto = "0.2.34"
rustc-hex = "1.0"
semver = "0.6"

View File

@ -21,6 +21,7 @@ ethcore-devtools = { path = "../../devtools" }
evm = { path = "../evm" }
vm = { path = "../vm" }
rlp = { path = "../../util/rlp" }
rlp_derive = { path = "../../util/rlp_derive" }
time = "0.1"
smallvec = "0.4"
futures = "0.1"

View File

@ -100,8 +100,8 @@ pub trait LightChainClient: Send + Sync {
/// Get an iterator over a block and its ancestry.
fn ancestry_iter<'a>(&'a self, start: BlockId) -> Box<Iterator<Item=encoded::Header> + 'a>;
/// Get the signing network ID.
fn signing_network_id(&self) -> Option<u64>;
/// Get the signing chain ID.
fn signing_chain_id(&self) -> Option<u64>;
/// Get environment info for execution at a given block.
/// Fails if that block's header is not stored.
@ -260,9 +260,9 @@ impl Client {
self.chain.ancestry_iter(start)
}
/// Get the signing network id.
pub fn signing_network_id(&self) -> Option<u64> {
self.engine.signing_network_id(&self.latest_env_info())
/// Get the signing chain id.
pub fn signing_chain_id(&self) -> Option<u64> {
self.engine.signing_chain_id(&self.latest_env_info())
}
/// Flush the header queue.
@ -448,8 +448,8 @@ impl LightChainClient for Client {
Box::new(Client::ancestry_iter(self, start))
}
fn signing_network_id(&self) -> Option<u64> {
Client::signing_network_id(self)
fn signing_chain_id(&self) -> Option<u64> {
Client::signing_chain_id(self)
}
fn env_info(&self, id: BlockId) -> Option<EnvInfo> {

View File

@ -76,6 +76,8 @@ extern crate futures;
extern crate itertools;
extern crate rand;
extern crate rlp;
#[macro_use]
extern crate rlp_derive;
extern crate serde;
extern crate smallvec;
extern crate stats;

View File

@ -650,7 +650,7 @@ pub mod header {
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
/// Potentially incomplete headers request.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// Start block.
pub start: Field<HashOrNumber>,
@ -662,27 +662,6 @@ pub mod header {
pub reverse: bool,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
start: rlp.val_at(0)?,
skip: rlp.val_at(1)?,
max: rlp.val_at(2)?,
reverse: rlp.val_at(3)?
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4)
.append(&self.start)
.append(&self.skip)
.append(&self.max)
.append(&self.reverse);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -784,26 +763,12 @@ pub mod header_proof {
use util::{Bytes, U256, H256};
/// Potentially incomplete header proof request.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// Block number.
pub num: Field<u64>,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
num: rlp.val_at(0)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(1).append(&self.num);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -889,30 +854,15 @@ pub mod header_proof {
/// Request and response for transaction index.
pub mod transaction_index {
use super::{Field, NoSuchOutput, OutputKind, Output};
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
use util::H256;
/// Potentially incomplete transaction index request.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// Transaction hash to get index for.
pub hash: Field<H256>,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
hash: rlp.val_at(0)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(1).append(&self.hash);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -959,7 +909,7 @@ pub mod transaction_index {
}
/// The output of a request for transaction index.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Response {
/// Block number.
pub num: u64,
@ -976,55 +926,21 @@ pub mod transaction_index {
f(1, Output::Hash(self.hash));
}
}
impl Decodable for Response {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Response {
num: rlp.val_at(0)?,
hash: rlp.val_at(1)?,
index: rlp.val_at(2)?,
})
}
}
impl Encodable for Response {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3)
.append(&self.num)
.append(&self.hash)
.append(&self.index);
}
}
}
/// Request and response for block receipts
pub mod block_receipts {
use super::{Field, NoSuchOutput, OutputKind, Output};
use ethcore::receipt::Receipt;
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
use util::H256;
/// Potentially incomplete block receipts request.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// Block hash to get receipts for.
pub hash: Field<H256>,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
hash: rlp.val_at(0)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(1).append(&self.hash);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -1068,7 +984,7 @@ pub mod block_receipts {
}
/// The output of a request for block receipts.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct Response {
/// The block receipts.
pub receipts: Vec<Receipt>
@ -1078,20 +994,6 @@ pub mod block_receipts {
/// Fill reusable outputs by providing them to the function.
fn fill_outputs<F>(&self, _: F) where F: FnMut(usize, Output) {}
}
impl Decodable for Response {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Response {
receipts: rlp.as_list()?,
})
}
}
impl Encodable for Response {
fn rlp_append(&self, s: &mut RlpStream) {
s.append_list(&self.receipts);
}
}
}
/// Request and response for a block body
@ -1102,26 +1004,12 @@ pub mod block_body {
use util::H256;
/// Potentially incomplete block body request.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// Block hash to get receipts for.
pub hash: Field<H256>,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
hash: rlp.val_at(0)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(1).append(&self.hash);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -1201,11 +1089,10 @@ pub mod block_body {
/// A request for an account proof.
pub mod account {
use super::{Field, NoSuchOutput, OutputKind, Output};
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
use util::{Bytes, U256, H256};
/// Potentially incomplete request for an account proof.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// Block hash to request state proof for.
pub block_hash: Field<H256>,
@ -1213,23 +1100,6 @@ pub mod account {
pub address_hash: Field<H256>,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
block_hash: rlp.val_at(0)?,
address_hash: rlp.val_at(1)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2)
.append(&self.block_hash)
.append(&self.address_hash);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -1292,7 +1162,7 @@ pub mod account {
}
/// The output of a request for an account state proof.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Response {
/// Inclusion/exclusion proof
pub proof: Vec<Bytes>,
@ -1313,39 +1183,15 @@ pub mod account {
f(1, Output::Hash(self.storage_root));
}
}
impl Decodable for Response {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Response {
proof: rlp.list_at(0)?,
nonce: rlp.val_at(1)?,
balance: rlp.val_at(2)?,
code_hash: rlp.val_at(3)?,
storage_root: rlp.val_at(4)?
})
}
}
impl Encodable for Response {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(5)
.append_list::<Vec<u8>,_>(&self.proof[..])
.append(&self.nonce)
.append(&self.balance)
.append(&self.code_hash)
.append(&self.storage_root);
}
}
}
/// A request for a storage proof.
pub mod storage {
use super::{Field, NoSuchOutput, OutputKind, Output};
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
use util::{Bytes, H256};
/// Potentially incomplete request for an storage proof.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// Block hash to request state proof for.
pub block_hash: Field<H256>,
@ -1355,25 +1201,6 @@ pub mod storage {
pub key_hash: Field<H256>,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
block_hash: rlp.val_at(0)?,
address_hash: rlp.val_at(1)?,
key_hash: rlp.val_at(2)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3)
.append(&self.block_hash)
.append(&self.address_hash)
.append(&self.key_hash);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -1450,7 +1277,7 @@ pub mod storage {
}
/// The output of a request for an account state proof.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Response {
/// Inclusion/exclusion proof
pub proof: Vec<Bytes>,
@ -1464,33 +1291,15 @@ pub mod storage {
f(0, Output::Hash(self.value));
}
}
impl Decodable for Response {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Response {
proof: rlp.list_at(0)?,
value: rlp.val_at(1)?,
})
}
}
impl Encodable for Response {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2)
.append_list::<Vec<u8>,_>(&self.proof[..])
.append(&self.value);
}
}
}
/// A request for contract code.
pub mod contract_code {
use super::{Field, NoSuchOutput, OutputKind, Output};
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
use util::{Bytes, H256};
/// Potentially incomplete contract code request.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// The block hash to request the state for.
pub block_hash: Field<H256>,
@ -1498,23 +1307,6 @@ pub mod contract_code {
pub code_hash: Field<H256>,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
block_hash: rlp.val_at(0)?,
code_hash: rlp.val_at(1)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2)
.append(&self.block_hash)
.append(&self.code_hash);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;
@ -1573,7 +1365,7 @@ pub mod contract_code {
}
/// The output of a request for
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct Response {
/// The requested code.
pub code: Bytes,
@ -1583,21 +1375,6 @@ pub mod contract_code {
/// Fill reusable outputs by providing them to the function.
fn fill_outputs<F>(&self, _: F) where F: FnMut(usize, Output) {}
}
impl Decodable for Response {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Response {
code: rlp.as_val()?,
})
}
}
impl Encodable for Response {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.code);
}
}
}
/// A request for proof of execution.
@ -1608,7 +1385,7 @@ pub mod execution {
use util::{Bytes, Address, U256, H256, DBValue};
/// Potentially incomplete execution proof request.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct Incomplete {
/// The block hash to request the state for.
pub block_hash: Field<H256>,
@ -1626,38 +1403,6 @@ pub mod execution {
pub data: Bytes,
}
impl Decodable for Incomplete {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Incomplete {
block_hash: rlp.val_at(0)?,
from: rlp.val_at(1)?,
action: rlp.val_at(2)?,
gas: rlp.val_at(3)?,
gas_price: rlp.val_at(4)?,
value: rlp.val_at(5)?,
data: rlp.val_at(6)?,
})
}
}
impl Encodable for Incomplete {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(7)
.append(&self.block_hash)
.append(&self.from);
match self.action {
Action::Create => s.append_empty_data(),
Action::Call(ref addr) => s.append(addr),
};
s.append(&self.gas)
.append(&self.gas_price)
.append(&self.value)
.append(&self.data);
}
}
impl super::IncompleteRequest for Incomplete {
type Complete = Complete;
type Response = Response;

View File

@ -519,6 +519,11 @@ impl AccountProvider {
}
}
/// Returns account public key.
pub fn account_public(&self, address: Address, password: &str) -> Result<Public, Error> {
self.sstore.public(&self.sstore.account_ref(&address)?, password)
}
/// Returns each account along with name and meta.
pub fn set_account_name(&self, address: Address, name: String) -> Result<(), Error> {
self.sstore.set_name(&self.sstore.account_ref(&address)?, name)?;
@ -697,6 +702,13 @@ impl AccountProvider {
Ok(self.sstore.decrypt(&account, &password, shared_mac, message)?)
}
/// Agree on shared key.
pub fn agree(&self, address: Address, password: Option<String>, other_public: &Public) -> Result<Secret, SignError> {
let account = self.sstore.account_ref(&address)?;
let password = password.map(Ok).unwrap_or_else(|| self.password(&account))?;
Ok(self.sstore.agree(&account, &password, other_public)?)
}
/// Returns the underlying `SecretStore` reference if one exists.
pub fn list_geth_accounts(&self, testnet: bool) -> Vec<Address> {
self.sstore.list_geth_accounts(testnet).into_iter().map(|a| Address::from(a).into()).collect()

View File

@ -19,6 +19,7 @@
use std::collections::{HashMap, HashSet};
use std::sync::Arc;
use std::mem;
use itertools::Itertools;
use bloomchain as bc;
use util::*;
use rlp::*;

View File

@ -25,11 +25,9 @@ use engines::epoch::{Transition as EpochTransition};
use header::BlockNumber;
use receipt::Receipt;
use rlp::*;
use util::*;
use util::{HeapSizeOf, H256, H264, U256};
use util::kvdb::PREFIX_LEN as DB_PREFIX_LEN;
/// Represents index of extra data in database
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
pub enum ExtrasIndex {
@ -184,7 +182,7 @@ impl Key<EpochTransitions> for u64 {
}
/// Familial details concerning a block
#[derive(Debug, Clone)]
#[derive(Debug, Clone, RlpEncodable, RlpDecodable)]
pub struct BlockDetails {
/// Block number
pub number: BlockNumber,
@ -202,30 +200,8 @@ impl HeapSizeOf for BlockDetails {
}
}
impl Decodable for BlockDetails {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let details = BlockDetails {
number: rlp.val_at(0)?,
total_difficulty: rlp.val_at(1)?,
parent: rlp.val_at(2)?,
children: rlp.list_at(3)?,
};
Ok(details)
}
}
impl Encodable for BlockDetails {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.number);
s.append(&self.total_difficulty);
s.append(&self.parent);
s.append_list(&self.children);
}
}
/// Represents address of certain transaction within block
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable)]
pub struct TransactionAddress {
/// Block hash
pub block_hash: H256,
@ -237,27 +213,8 @@ impl HeapSizeOf for TransactionAddress {
fn heap_size_of_children(&self) -> usize { 0 }
}
impl Decodable for TransactionAddress {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let tx_address = TransactionAddress {
block_hash: rlp.val_at(0)?,
index: rlp.val_at(1)?,
};
Ok(tx_address)
}
}
impl Encodable for TransactionAddress {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2);
s.append(&self.block_hash);
s.append(&self.index);
}
}
/// Contains all block receipts.
#[derive(Clone)]
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct BlockReceipts {
pub receipts: Vec<Receipt>,
}
@ -270,20 +227,6 @@ impl BlockReceipts {
}
}
impl Decodable for BlockReceipts {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(BlockReceipts {
receipts: rlp.as_list()?,
})
}
}
impl Encodable for BlockReceipts {
fn rlp_append(&self, s: &mut RlpStream) {
s.append_list(&self.receipts);
}
}
impl HeapSizeOf for BlockReceipts {
fn heap_size_of_children(&self) -> usize {
self.receipts.heap_size_of_children()
@ -291,27 +234,12 @@ impl HeapSizeOf for BlockReceipts {
}
/// Candidate transitions to an epoch with specific number.
#[derive(Clone)]
#[derive(Clone, RlpEncodable, RlpDecodable)]
pub struct EpochTransitions {
pub number: u64,
pub candidates: Vec<EpochTransition>,
}
impl Encodable for EpochTransitions {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2).append(&self.number).append_list(&self.candidates);
}
}
impl Decodable for EpochTransitions {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(EpochTransitions {
number: rlp.val_at(0)?,
candidates: rlp.list_at(1)?,
})
}
}
#[cfg(test)]
mod tests {
use rlp::*;

View File

@ -15,12 +15,11 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use bloomchain as bc;
use rlp::*;
use util::HeapSizeOf;
use basic_types::LogBloom;
/// Helper structure representing bloom of the trace.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct Bloom(LogBloom);
impl From<LogBloom> for Bloom {
@ -43,18 +42,6 @@ impl Into<bc::Bloom> for Bloom {
}
}
impl Decodable for Bloom {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
LogBloom::decode(rlp).map(Bloom)
}
}
impl Encodable for Bloom {
fn rlp_append(&self, s: &mut RlpStream) {
Encodable::rlp_append(&self.0, s)
}
}
impl HeapSizeOf for Bloom {
fn heap_size_of_children(&self) -> usize {
0

View File

@ -20,9 +20,10 @@ use std::sync::{Arc, Weak};
use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering as AtomicOrdering};
use std::time::{Instant};
use time::precise_time_ns;
use itertools::Itertools;
// util
use util::{Bytes, PerfTimer, Itertools, Mutex, RwLock, MutexGuard, Hashable};
use util::{Bytes, PerfTimer, Mutex, RwLock, MutexGuard, Hashable};
use util::{journaldb, DBValue, TrieFactory, Trie};
use util::{U256, H256, Address, H2048};
use util::trie::TrieSpec;
@ -1112,20 +1113,9 @@ impl Client {
}.fake_sign(from)
}
fn do_call(&self, env_info: &EnvInfo, state: &mut State<StateDB>, increase_balance: bool, t: &SignedTransaction, analytics: CallAnalytics) -> Result<Executed, CallError> {
fn do_call(&self, env_info: &EnvInfo, state: &mut State<StateDB>, t: &SignedTransaction, analytics: CallAnalytics) -> Result<Executed, CallError> {
let original_state = if analytics.state_diffing { Some(state.clone()) } else { None };
// give the sender a sufficient balance (if calling in pending block)
if increase_balance {
let sender = t.sender();
let balance = state.balance(&sender).map_err(ExecutionError::from)?;
let needed_balance = t.value + t.gas * t.gas_price;
if balance < needed_balance {
state.add_balance(&sender, &(needed_balance - balance), state::CleanupMode::NoEmpty)
.map_err(ExecutionError::from)?;
}
}
let options = TransactOptions { tracing: analytics.transaction_tracing, vm_tracing: analytics.vm_tracing, check_nonce: false };
let mut ret = Executive::new(state, env_info, &*self.engine).transact_virtual(t, options)?;
@ -1167,7 +1157,7 @@ impl BlockChainClient for Client {
// that's just a copy of the state.
let mut state = self.state_at(block).ok_or(CallError::StatePruned)?;
self.do_call(&env_info, &mut state, block == BlockId::Pending, transaction, analytics)
self.do_call(&env_info, &mut state, transaction, analytics)
}
fn call_many(&self, transactions: &[(SignedTransaction, CallAnalytics)], block: BlockId) -> Result<Vec<Executed>, CallError> {
@ -1179,7 +1169,7 @@ impl BlockChainClient for Client {
let mut results = Vec::with_capacity(transactions.len());
for &(ref t, analytics) in transactions {
let ret = self.do_call(&env_info, &mut state, block == BlockId::Pending, t, analytics)?;
let ret = self.do_call(&env_info, &mut state, t, analytics)?;
env_info.gas_used = ret.cumulative_gas_used;
results.push(ret);
}
@ -1730,8 +1720,8 @@ impl BlockChainClient for Client {
}
}
fn signing_network_id(&self) -> Option<u64> {
self.engine.signing_network_id(&self.latest_env_info())
fn signing_chain_id(&self) -> Option<u64> {
self.engine.signing_chain_id(&self.latest_env_info())
}
fn block_extra_info(&self, id: BlockId) -> Option<BTreeMap<String, String>> {
@ -1770,9 +1760,9 @@ impl BlockChainClient for Client {
value: U256::zero(),
data: data,
};
let network_id = self.engine.signing_network_id(&self.latest_env_info());
let signature = self.engine.sign(transaction.hash(network_id))?;
let signed = SignedTransaction::new(transaction.with_signature(signature, network_id))?;
let chain_id = self.engine.signing_chain_id(&self.latest_env_info());
let signature = self.engine.sign(transaction.hash(chain_id))?;
let signed = SignedTransaction::new(transaction.with_signature(signature, chain_id))?;
self.miner.import_own_transaction(self, signed.into())
}

View File

@ -20,6 +20,7 @@ use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrder};
use std::sync::Arc;
use std::collections::{HashMap, BTreeMap};
use std::mem;
use itertools::Itertools;
use rustc_hex::FromHex;
use util::*;
use rlp::*;
@ -733,7 +734,7 @@ impl BlockChainClient for TestBlockChainClient {
self.miner.ready_transactions(info.best_block_number, info.best_block_timestamp)
}
fn signing_network_id(&self) -> Option<u64> { None }
fn signing_chain_id(&self) -> Option<u64> { None }
fn mode(&self) -> Mode { Mode::Active }
@ -764,9 +765,9 @@ impl BlockChainClient for TestBlockChainClient {
value: U256::default(),
data: data,
};
let network_id = Some(self.spec.params().network_id);
let sig = self.spec.engine.sign(transaction.hash(network_id)).unwrap();
let signed = SignedTransaction::new(transaction.with_signature(sig, network_id)).unwrap();
let chain_id = Some(self.spec.chain_id());
let sig = self.spec.engine.sign(transaction.hash(chain_id)).unwrap();
let signed = SignedTransaction::new(transaction.with_signature(sig, chain_id)).unwrap();
self.miner.import_own_transaction(self, signed.into())
}

View File

@ -15,6 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::collections::BTreeMap;
use itertools::Itertools;
use block::{OpenBlock, SealedBlock, ClosedBlock};
use blockchain::TreeRoute;
@ -33,7 +34,7 @@ use trace::LocalizedTrace;
use transaction::{LocalizedTransaction, PendingTransaction, SignedTransaction};
use verification::queue::QueueInfo as BlockQueueInfo;
use util::{U256, Address, H256, H2048, Bytes, Itertools};
use util::{U256, Address, H256, H2048, Bytes};
use util::hashdb::DBValue;
use types::ids::*;
@ -239,8 +240,8 @@ pub trait BlockChainClient : Sync + Send {
corpus.into()
}
/// Get the preferred network ID to sign on
fn signing_network_id(&self) -> Option<u64>;
/// Get the preferred chain ID to sign on
fn signing_chain_id(&self) -> Option<u64>;
/// Get the mode.
fn mode(&self) -> Mode;

View File

@ -804,9 +804,9 @@ impl Engine for AuthorityRound {
fn verify_transaction_basic(&self, t: &UnverifiedTransaction, header: &Header) -> Result<(), Error> {
t.check_low_s()?;
if let Some(n) = t.network_id() {
if let Some(n) = t.chain_id() {
if header.number() >= self.params().eip155_transition && n != self.params().chain_id {
return Err(TransactionError::InvalidNetworkId.into());
return Err(TransactionError::InvalidChainId.into());
}
}

View File

@ -16,14 +16,12 @@
//! Epoch verifiers and transitions.
use util::H256;
use error::Error;
use header::Header;
use rlp::{Encodable, Decodable, DecoderError, RlpStream, UntrustedRlp};
use util::H256;
/// A full epoch transition.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, RlpEncodable, RlpDecodable)]
pub struct Transition {
/// Block hash at which the transition occurred.
pub block_hash: H256,
@ -33,46 +31,14 @@ pub struct Transition {
pub proof: Vec<u8>,
}
impl Encodable for Transition {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3)
.append(&self.block_hash)
.append(&self.block_number)
.append(&self.proof);
}
}
impl Decodable for Transition {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(Transition {
block_hash: rlp.val_at(0)?,
block_number: rlp.val_at(1)?,
proof: rlp.val_at(2)?,
})
}
}
/// An epoch transition pending a finality proof.
/// Not all transitions need one.
#[derive(RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct PendingTransition {
/// "transition/epoch" proof from the engine.
pub proof: Vec<u8>,
}
impl Encodable for PendingTransition {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.proof);
}
}
impl Decodable for PendingTransition {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(PendingTransition {
proof: rlp.as_val()?,
})
}
}
/// Verifier for all blocks within an epoch with self-contained state.
///
/// See docs on `Engine` relating to proving functions for more details.

View File

@ -263,7 +263,7 @@ pub trait Engine : Sync + Send {
// TODO: Add flags for which bits of the transaction to check.
// TODO: consider including State in the params.
fn verify_transaction_basic(&self, t: &UnverifiedTransaction, _header: &Header) -> Result<(), Error> {
t.verify_basic(true, Some(self.params().network_id), true)?;
t.verify_basic(true, Some(self.params().chain_id), true)?;
Ok(())
}
@ -273,7 +273,7 @@ pub trait Engine : Sync + Send {
}
/// The network ID that transactions should be signed with.
fn signing_network_id(&self, _env_info: &EnvInfo) -> Option<u64> {
fn signing_chain_id(&self, _env_info: &EnvInfo) -> Option<u64> {
Some(self.params().chain_id)
}

View File

@ -62,6 +62,6 @@ impl Engine for NullEngine {
}
fn snapshot_components(&self) -> Option<Box<::snapshot::SnapshotComponents>> {
Some(Box::new(::snapshot::PowSnapshot(10000)))
Some(Box::new(::snapshot::PowSnapshot::new(10000, 10000)))
}
}

View File

@ -452,7 +452,7 @@ mod tests {
let s0: Secret = "1".sha3().into();
let v0 = tap.insert_account(s0.clone(), "").unwrap();
let v1 = tap.insert_account("0".sha3().into(), "").unwrap();
let network_id = Spec::new_validator_safe_contract().network_id();
let chain_id = Spec::new_validator_safe_contract().chain_id();
let client = generate_dummy_client_with_spec_and_accounts(Spec::new_validator_safe_contract, Some(tap));
client.engine().register_client(Arc::downgrade(&client));
let validator_contract = "0000000000000000000000000000000000000005".parse::<Address>().unwrap();
@ -466,7 +466,7 @@ mod tests {
action: Action::Call(validator_contract),
value: 0.into(),
data: "bfc708a000000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1".from_hex().unwrap(),
}.sign(&s0, Some(network_id));
}.sign(&s0, Some(chain_id));
client.miner().import_own_transaction(client.as_ref(), tx.into()).unwrap();
client.update_sealing();
assert_eq!(client.chain_info().best_block_number, 1);
@ -478,7 +478,7 @@ mod tests {
action: Action::Call(validator_contract),
value: 0.into(),
data: "4d238c8e00000000000000000000000082a978b3f5962a5b0957d9ee9eef472ee55b42f1".from_hex().unwrap(),
}.sign(&s0, Some(network_id));
}.sign(&s0, Some(chain_id));
client.miner().import_own_transaction(client.as_ref(), tx.into()).unwrap();
client.update_sealing();
// The transaction is not yet included so still unable to seal.
@ -497,7 +497,7 @@ mod tests {
action: Action::Call(Address::default()),
value: 0.into(),
data: Vec::new(),
}.sign(&s0, Some(network_id));
}.sign(&s0, Some(chain_id));
client.miner().import_own_transaction(client.as_ref(), tx.into()).unwrap();
client.update_sealing();
// Able to seal again.

View File

@ -78,8 +78,8 @@ pub enum TransactionError {
RecipientBanned,
/// Contract creation code is banned.
CodeBanned,
/// Invalid network ID given.
InvalidNetworkId,
/// Invalid chain ID given.
InvalidChainId,
}
impl fmt::Display for TransactionError {
@ -103,7 +103,7 @@ impl fmt::Display for TransactionError {
SenderBanned => "Sender is temporarily banned.".into(),
RecipientBanned => "Recipient is temporarily banned.".into(),
CodeBanned => "Contract code is temporarily banned.".into(),
InvalidNetworkId => "Transaction of this network ID is not allowed on this chain.".into(),
InvalidChainId => "Transaction of this chain ID is not allowed on this chain.".into(),
};
f.write_fmt(format_args!("Transaction error ({})", msg))

View File

@ -39,7 +39,10 @@ pub const PARITY_GAS_LIMIT_DETERMINANT: U256 = U256([37, 0, 0, 0]);
/// Number of blocks in an ethash snapshot.
// make dependent on difficulty incrment divisor?
const SNAPSHOT_BLOCKS: u64 = 30000;
const SNAPSHOT_BLOCKS: u64 = 5000;
/// Maximum number of blocks allowed in an ethash snapshot.
const MAX_SNAPSHOT_BLOCKS: u64 = 30000;
/// Ethash params.
#[derive(Debug, PartialEq)]
@ -206,7 +209,7 @@ impl Engine for Arc<Ethash> {
}
}
fn signing_network_id(&self, env_info: &EnvInfo) -> Option<u64> {
fn signing_chain_id(&self, env_info: &EnvInfo) -> Option<u64> {
if env_info.number >= self.params().eip155_transition {
Some(self.params().chain_id)
} else {
@ -397,8 +400,8 @@ impl Engine for Arc<Ethash> {
}
let check_low_s = header.number() >= self.ethash_params.homestead_transition;
let network_id = if header.number() >= self.params().eip155_transition { Some(self.params().chain_id) } else { None };
t.verify_basic(check_low_s, network_id, false)?;
let chain_id = if header.number() >= self.params().eip155_transition { Some(self.params().chain_id) } else { None };
t.verify_basic(check_low_s, chain_id, false)?;
Ok(())
}
@ -407,7 +410,7 @@ impl Engine for Arc<Ethash> {
}
fn snapshot_components(&self) -> Option<Box<::snapshot::SnapshotComponents>> {
Some(Box::new(::snapshot::PowSnapshot(SNAPSHOT_BLOCKS)))
Some(Box::new(::snapshot::PowSnapshot::new(SNAPSHOT_BLOCKS, MAX_SNAPSHOT_BLOCKS)))
}
}

View File

@ -157,7 +157,7 @@ impl<'a, B: 'a + StateBackend, E: Engine + ?Sized> Executive<'a, B, E> {
pub fn transact_virtual(&'a mut self, t: &SignedTransaction, options: TransactOptions) -> Result<Executed, ExecutionError> {
let sender = t.sender();
let balance = self.state.balance(&sender)?;
let needed_balance = t.value + t.gas * t.gas_price;
let needed_balance = t.value.saturating_add(t.gas.saturating_mul(t.gas_price));
if balance < needed_balance {
// give the sender a sufficient balance
self.state.add_balance(&sender, &(needed_balance - balance), CleanupMode::NoEmpty)?;

View File

@ -36,25 +36,25 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
Some(x) if x < 3_000_000 => &homestead_schedule,
Some(_) => &metropolis_schedule
};
let allow_network_id_of_one = number.map_or(false, |n| n >= 2_675_000);
let allow_chain_id_of_one = number.map_or(false, |n| n >= 2_675_000);
let allow_unsigned = number.map_or(false, |n| n >= 3_000_000);
let rlp: Vec<u8> = test.rlp.into();
let res = UntrustedRlp::new(&rlp)
.as_val()
.map_err(From::from)
.and_then(|t: UnverifiedTransaction| t.validate(schedule, schedule.have_delegate_call, allow_network_id_of_one, allow_unsigned));
.and_then(|t: UnverifiedTransaction| t.validate(schedule, schedule.have_delegate_call, allow_chain_id_of_one, allow_unsigned));
fail_unless(test.transaction.is_none() == res.is_err(), "Validity different");
if let (Some(tx), Some(sender)) = (test.transaction, test.sender) {
let t = res.unwrap();
fail_unless(SignedTransaction::new(t.clone()).unwrap().sender() == sender.into(), "sender mismatch");
let is_acceptable_network_id = match t.network_id() {
let is_acceptable_chain_id = match t.chain_id() {
None => true,
Some(1) if allow_network_id_of_one => true,
Some(1) if allow_chain_id_of_one => true,
_ => false,
};
fail_unless(is_acceptable_network_id, "Network ID unacceptable");
fail_unless(is_acceptable_chain_id, "Network ID unacceptable");
let data: Vec<u8> = tx.data.into();
fail_unless(t.data == data, "data mismatch");
fail_unless(t.gas_price == tx.gas_price.into(), "gas_price mismatch");

View File

@ -101,6 +101,9 @@ extern crate num;
extern crate price_info;
extern crate rand;
extern crate rlp;
#[macro_use]
extern crate rlp_derive;
extern crate rustc_hex;
extern crate semver;
extern crate stats;

View File

@ -1306,10 +1306,10 @@ mod tests {
}
fn transaction() -> SignedTransaction {
transaction_with_network_id(2)
transaction_with_chain_id(2)
}
fn transaction_with_network_id(id: u64) -> SignedTransaction {
fn transaction_with_chain_id(chain_id: u64) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: Action::Create,
@ -1318,7 +1318,7 @@ mod tests {
gas: U256::from(100_000),
gas_price: U256::zero(),
nonce: U256::zero(),
}.sign(keypair.secret(), Some(id))
}.sign(keypair.secret(), Some(chain_id))
}
#[test]
@ -1399,14 +1399,14 @@ mod tests {
let client = generate_dummy_client(2);
assert_eq!(miner.import_external_transactions(&*client, vec![transaction_with_network_id(spec.network_id()).into()]).pop().unwrap().unwrap(), TransactionImportResult::Current);
assert_eq!(miner.import_external_transactions(&*client, vec![transaction_with_chain_id(spec.chain_id()).into()]).pop().unwrap().unwrap(), TransactionImportResult::Current);
miner.update_sealing(&*client);
client.flush_queue();
assert!(miner.pending_block(0).is_none());
assert_eq!(client.chain_info().best_block_number, 3 as BlockNumber);
assert_eq!(miner.import_own_transaction(&*client, PendingTransaction::new(transaction_with_network_id(spec.network_id()).into(), None)).unwrap(), TransactionImportResult::Current);
assert_eq!(miner.import_own_transaction(&*client, PendingTransaction::new(transaction_with_chain_id(spec.chain_id()).into(), None)).unwrap(), TransactionImportResult::Current);
miner.update_sealing(&*client);
client.flush_queue();

View File

@ -32,7 +32,6 @@ use util::Mutex;
use miner::{self, Miner, MinerService};
use client::Client;
use block::IsBlock;
use std::str::FromStr;
use rlp::encode;
/// Configures stratum server options.
@ -60,7 +59,7 @@ impl SubmitPayload {
return Err(PayloadError::ArgumentsAmountUnexpected(payload.len()));
}
let nonce = match H64::from_str(clean_0x(&payload[0])) {
let nonce = match clean_0x(&payload[0]).parse::<H64>() {
Ok(nonce) => nonce,
Err(e) => {
warn!(target: "stratum", "submit_work ({}): invalid nonce ({:?})", &payload[0], e);
@ -68,7 +67,7 @@ impl SubmitPayload {
}
};
let pow_hash = match H256::from_str(clean_0x(&payload[1])) {
let pow_hash = match clean_0x(&payload[1]).parse::<H256>() {
Ok(pow_hash) => pow_hash,
Err(e) => {
warn!(target: "stratum", "submit_work ({}): invalid hash ({:?})", &payload[1], e);
@ -76,7 +75,7 @@ impl SubmitPayload {
}
};
let mix_hash = match H256::from_str(clean_0x(&payload[2])) {
let mix_hash = match clean_0x(&payload[2]).parse::<H256>() {
Ok(mix_hash) => mix_hash,
Err(e) => {
warn!(target: "stratum", "submit_work ({}): invalid mix-hash ({:?})", &payload[2], e);
@ -133,7 +132,7 @@ impl JobDispatcher for StratumJobDispatcher {
fn submit(&self, payload: Vec<String>) -> Result<(), StratumServiceError> {
let payload = SubmitPayload::from_args(payload).map_err(|e|
StratumServiceError::Dispatch(format!("{}", e))
StratumServiceError::Dispatch(e.to_string())
)?;
trace!(
@ -144,14 +143,16 @@ impl JobDispatcher for StratumJobDispatcher {
payload.mix_hash,
);
self.with_core_void(|client, miner| {
self.with_core_result(|client, miner| {
let seal = vec![encode(&payload.mix_hash).into_vec(), encode(&payload.nonce).into_vec()];
if let Err(e) = miner.submit_seal(&*client, payload.pow_hash, seal) {
match miner.submit_seal(&*client, payload.pow_hash, seal) {
Ok(_) => Ok(()),
Err(e) => {
warn!(target: "stratum", "submit_seal error: {:?}", e);
};
});
Ok(())
Err(StratumServiceError::Dispatch(e.to_string()))
}
}
})
}
}
@ -181,8 +182,11 @@ impl StratumJobDispatcher {
self.client.upgrade().and_then(|client| self.miner.upgrade().and_then(|miner| (f)(client, miner)))
}
fn with_core_void<F>(&self, f: F) where F: Fn(Arc<Client>, Arc<Miner>) {
self.client.upgrade().map(|client| self.miner.upgrade().map(|miner| (f)(client, miner)));
fn with_core_result<F>(&self, f: F) -> Result<(), StratumServiceError> where F: Fn(Arc<Client>, Arc<Miner>) -> Result<(), StratumServiceError> {
match (self.client.upgrade(), self.miner.upgrade()) {
(Some(client), Some(miner)) => f(client, miner),
_ => Ok(()),
}
}
}
@ -230,7 +234,7 @@ impl Stratum {
let dispatcher = Arc::new(StratumJobDispatcher::new(miner, client));
let stratum_svc = StratumService::start(
&SocketAddr::new(IpAddr::from_str(&options.listen_addr)?, options.port),
&SocketAddr::new(options.listen_addr.parse::<IpAddr>()?, options.port),
dispatcher.clone(),
options.secret.clone(),
)?;

View File

@ -16,6 +16,7 @@
use std::fmt;
use std::collections::BTreeMap;
use itertools::Itertools;
use util::*;
use state::Account;
use ethjson;

View File

@ -18,6 +18,7 @@
use std::fmt;
use std::collections::BTreeMap;
use itertools::Itertools;
use util::*;
use pod_account::{self, PodAccount};
use types::state_diff::StateDiff;

View File

@ -37,11 +37,24 @@ use rand::OsRng;
/// Snapshot creation and restoration for PoW chains.
/// This includes blocks from the head of the chain as a
/// loose assurance that the chain is valid.
///
/// The field is the number of blocks from the head of the chain
/// to include in the snapshot.
#[derive(Clone, Copy, PartialEq)]
pub struct PowSnapshot(pub u64);
pub struct PowSnapshot {
/// Number of blocks from the head of the chain
/// to include in the snapshot.
pub blocks: u64,
/// Number of to allow in the snapshot when restoring.
pub max_restore_blocks: u64,
}
impl PowSnapshot {
/// Create a new instance.
pub fn new(blocks: u64, max_restore_blocks: u64) -> PowSnapshot {
PowSnapshot {
blocks: blocks,
max_restore_blocks: max_restore_blocks,
}
}
}
impl SnapshotComponents for PowSnapshot {
fn chunk_all(
@ -57,7 +70,7 @@ impl SnapshotComponents for PowSnapshot {
current_hash: block_at,
writer: chunk_sink,
preferred_size: preferred_size,
}.chunk_all(self.0)
}.chunk_all(self.blocks)
}
fn rebuilder(
@ -66,7 +79,7 @@ impl SnapshotComponents for PowSnapshot {
db: Arc<KeyValueDB>,
manifest: &ManifestData,
) -> Result<Box<Rebuilder>, ::error::Error> {
PowRebuilder::new(chain, db, manifest, self.0).map(|r| Box::new(r) as Box<_>)
PowRebuilder::new(chain, db, manifest, self.max_restore_blocks).map(|r| Box::new(r) as Box<_>)
}
fn min_supported_version(&self) -> u64 { ::snapshot::MIN_SUPPORTED_STATE_CHUNK_VERSION }
@ -218,7 +231,7 @@ impl Rebuilder for PowRebuilder {
trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3);
if self.fed_blocks + num_blocks > self.snapshot_blocks {
return Err(Error::TooManyBlocks(self.snapshot_blocks, self.fed_blocks).into())
return Err(Error::TooManyBlocks(self.snapshot_blocks, self.fed_blocks + num_blocks).into())
}
// todo: assert here that these values are consistent with chunks being in order.

View File

@ -27,7 +27,7 @@ use std::path::{Path, PathBuf};
use util::Bytes;
use util::hash::H256;
use rlp::{self, Encodable, RlpStream, UntrustedRlp};
use rlp::{RlpStream, UntrustedRlp};
use super::ManifestData;
@ -49,24 +49,9 @@ pub trait SnapshotWriter {
}
// (hash, len, offset)
#[derive(RlpEncodable, RlpDecodable)]
struct ChunkInfo(H256, u64, u64);
impl Encodable for ChunkInfo {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.0).append(&self.1).append(&self.2);
}
}
impl rlp::Decodable for ChunkInfo {
fn decode(rlp: &UntrustedRlp) -> Result<Self, rlp::DecoderError> {
let hash = rlp.val_at(0)?;
let len = rlp.val_at(1)?;
let off = rlp.val_at(2)?;
Ok(ChunkInfo(hash, len, off))
}
}
/// A packed snapshot writer. This writes snapshots to a single concatenated file.
///
/// The file format is very simple and consists of three parts:

View File

@ -130,7 +130,7 @@ fn make_chain(accounts: Arc<AccountProvider>, blocks_beyond: usize, transitions:
action: Action::Call(Address::new()),
value: 1.into(),
data: Vec::new(),
}.sign(&*RICH_SECRET, client.signing_network_id());
}.sign(&*RICH_SECRET, client.signing_chain_id());
*nonce = *nonce + 1.into();
vec![transaction]
@ -176,7 +176,7 @@ fn make_chain(accounts: Arc<AccountProvider>, blocks_beyond: usize, transitions:
action: Action::Call(addr),
value: 0.into(),
data: data,
}.sign(&*RICH_SECRET, client.signing_network_id());
}.sign(&*RICH_SECRET, client.signing_chain_id());
pending.push(transaction);

View File

@ -30,7 +30,7 @@ use util::kvdb::{self, KeyValueDB, DBTransaction};
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
const SNAPSHOT_MODE: ::snapshot::PowSnapshot = ::snapshot::PowSnapshot(30000);
const SNAPSHOT_MODE: ::snapshot::PowSnapshot = ::snapshot::PowSnapshot { blocks: 30000, max_restore_blocks: 30000 };
fn chunk_and_restore(amount: u64) {
let mut canon_chain = ChainGenerator::default();

View File

@ -380,6 +380,9 @@ impl Spec {
/// Get the configured Network ID.
pub fn network_id(&self) -> u64 { self.params().network_id }
/// Get the chain ID used for signing.
pub fn chain_id(&self) -> u64 { self.params().chain_id }
/// Get the configured subprotocol name.
pub fn subprotocol_name(&self) -> String { self.params().subprotocol_name.clone() }

View File

@ -211,7 +211,7 @@ pub fn generate_dummy_client_with_spec_accounts_and_data<F>(get_test_spec: F, ac
action: Action::Create,
data: vec![],
value: U256::zero(),
}.sign(kp.secret(), Some(test_spec.network_id())), None).unwrap();
}.sign(kp.secret(), Some(test_spec.chain_id())), None).unwrap();
n += 1;
}

View File

@ -1,10 +1,9 @@
use bloomchain::Bloom;
use bloomchain::group::{BloomGroup, GroupPosition};
use rlp::*;
use basic_types::LogBloom;
/// Helper structure representing bloom of the trace.
#[derive(Clone)]
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct BlockTracesBloom(LogBloom);
impl From<LogBloom> for BlockTracesBloom {
@ -28,7 +27,7 @@ impl Into<Bloom> for BlockTracesBloom {
}
/// Represents group of X consecutive blooms.
#[derive(Clone)]
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct BlockTracesBloomGroup {
blooms: Vec<BlockTracesBloom>,
}
@ -59,34 +58,6 @@ impl Into<BloomGroup> for BlockTracesBloomGroup {
}
}
impl Decodable for BlockTracesBloom {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
LogBloom::decode(rlp).map(BlockTracesBloom)
}
}
impl Encodable for BlockTracesBloom {
fn rlp_append(&self, s: &mut RlpStream) {
Encodable::rlp_append(&self.0, s)
}
}
impl Decodable for BlockTracesBloomGroup {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let blooms = rlp.as_list()?;
let group = BlockTracesBloomGroup {
blooms: blooms
};
Ok(group)
}
}
impl Encodable for BlockTracesBloomGroup {
fn rlp_append(&self, s: &mut RlpStream) {
s.append_list(&self.blooms);
}
}
/// Represents `BloomGroup` position in database.
#[derive(PartialEq, Eq, Hash, Clone, Debug)]
pub struct TraceGroupPosition {

View File

@ -77,7 +77,7 @@ impl Decodable for FlatTrace {
}
/// Represents all traces produced by a single transaction.
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq, Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct FlatTransactionTraces(Vec<FlatTrace>);
impl From<Vec<FlatTrace>> for FlatTransactionTraces {
@ -99,18 +99,6 @@ impl FlatTransactionTraces {
}
}
impl Encodable for FlatTransactionTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append_list(&self.0);
}
}
impl Decodable for FlatTransactionTraces {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(FlatTransactionTraces(rlp.as_list()?))
}
}
impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
fn into(self) -> Vec<FlatTrace> {
self.0
@ -118,7 +106,7 @@ impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
}
/// Represents all traces produced by transactions in a single block.
#[derive(Debug, PartialEq, Clone, Default)]
#[derive(Debug, PartialEq, Clone, Default, RlpEncodableWrapper, RlpDecodableWrapper)]
pub struct FlatBlockTraces(Vec<FlatTransactionTraces>);
impl HeapSizeOf for FlatBlockTraces {
@ -140,18 +128,6 @@ impl FlatBlockTraces {
}
}
impl Encodable for FlatBlockTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append_list(&self.0);
}
}
impl Decodable for FlatBlockTraces {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(FlatBlockTraces(rlp.as_list()?))
}
}
impl Into<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn into(self) -> Vec<FlatTransactionTraces> {
self.0

View File

@ -27,7 +27,7 @@ use evm::CallType;
use super::error::Error;
/// `Call` result.
#[derive(Debug, Clone, PartialEq, Default)]
#[derive(Debug, Clone, PartialEq, Default, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
pub struct CallResult {
/// Gas used by call.
@ -36,27 +36,8 @@ pub struct CallResult {
pub output: Bytes,
}
impl Encodable for CallResult {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2);
s.append(&self.gas_used);
s.append(&self.output);
}
}
impl Decodable for CallResult {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let res = CallResult {
gas_used: rlp.val_at(0)?,
output: rlp.val_at(1)?,
};
Ok(res)
}
}
/// `Create` result.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
pub struct CreateResult {
/// Gas used by create.
@ -67,27 +48,6 @@ pub struct CreateResult {
pub address: Address,
}
impl Encodable for CreateResult {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.gas_used);
s.append(&self.code);
s.append(&self.address);
}
}
impl Decodable for CreateResult {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let res = CreateResult {
gas_used: rlp.val_at(0)?,
code: rlp.val_at(1)?,
address: rlp.val_at(2)?,
};
Ok(res)
}
}
impl CreateResult {
/// Returns bloom.
pub fn bloom(&self) -> LogBloom {
@ -96,7 +56,7 @@ impl CreateResult {
}
/// Description of a _call_ action, either a `CALL` operation or a message transction.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
pub struct Call {
/// The sending account.
@ -126,33 +86,6 @@ impl From<ActionParams> for Call {
}
}
impl Encodable for Call {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(6);
s.append(&self.from);
s.append(&self.to);
s.append(&self.value);
s.append(&self.gas);
s.append(&self.input);
s.append(&self.call_type);
}
}
impl Decodable for Call {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let res = Call {
from: rlp.val_at(0)?,
to: rlp.val_at(1)?,
value: rlp.val_at(2)?,
gas: rlp.val_at(3)?,
input: rlp.val_at(4)?,
call_type: rlp.val_at(5)?,
};
Ok(res)
}
}
impl Call {
/// Returns call action bloom.
/// The bloom contains from and to addresses.
@ -163,7 +96,7 @@ impl Call {
}
/// Description of a _create_ action, either a `CREATE` operation or a create transction.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
pub struct Create {
/// The address of the creator.
@ -187,29 +120,6 @@ impl From<ActionParams> for Create {
}
}
impl Encodable for Create {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.from);
s.append(&self.value);
s.append(&self.gas);
s.append(&self.init);
}
}
impl Decodable for Create {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let res = Create {
from: rlp.val_at(0)?,
value: rlp.val_at(1)?,
gas: rlp.val_at(2)?,
init: rlp.val_at(3)?,
};
Ok(res)
}
}
impl Create {
/// Returns bloom create action bloom.
/// The bloom contains only from address.
@ -219,7 +129,7 @@ impl Create {
}
/// Suicide action.
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
pub struct Suicide {
/// Suicided address.
@ -238,28 +148,6 @@ impl Suicide {
}
}
impl Encodable for Suicide {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.address);
s.append(&self.refund_address);
s.append(&self.balance);
}
}
impl Decodable for Suicide {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let res = Suicide {
address: rlp.val_at(0)?,
refund_address: rlp.val_at(1)?,
balance: rlp.val_at(2)?,
};
Ok(res)
}
}
/// Description of an action that we trace; will be either a call or a create.
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "ipc", binary)]
@ -394,7 +282,7 @@ impl Res {
}
}
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
/// A diff of some chunk of memory.
pub struct MemoryDiff {
@ -404,24 +292,7 @@ pub struct MemoryDiff {
pub data: Bytes,
}
impl Encodable for MemoryDiff {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2);
s.append(&self.offset);
s.append(&self.data);
}
}
impl Decodable for MemoryDiff {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(MemoryDiff {
offset: rlp.val_at(0)?,
data: rlp.val_at(1)?,
})
}
}
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
/// A diff of some storage value.
pub struct StorageDiff {
@ -431,24 +302,7 @@ pub struct StorageDiff {
pub value: U256,
}
impl Encodable for StorageDiff {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2);
s.append(&self.location);
s.append(&self.value);
}
}
impl Decodable for StorageDiff {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(StorageDiff {
location: rlp.val_at(0)?,
value: rlp.val_at(1)?,
})
}
}
#[derive(Debug, Clone, PartialEq)]
#[derive(Debug, Clone, PartialEq, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
/// A record of an executed VM operation.
pub struct VMExecutedOperation {
@ -462,28 +316,7 @@ pub struct VMExecutedOperation {
pub store_diff: Option<StorageDiff>,
}
impl Encodable for VMExecutedOperation {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.gas_used);
s.append_list(&self.stack_push);
s.append(&self.mem_diff);
s.append(&self.store_diff);
}
}
impl Decodable for VMExecutedOperation {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(VMExecutedOperation {
gas_used: rlp.val_at(0)?,
stack_push: rlp.list_at(1)?,
mem_diff: rlp.val_at(2)?,
store_diff: rlp.val_at(3)?,
})
}
}
#[derive(Debug, Clone, PartialEq, Default)]
#[derive(Debug, Clone, PartialEq, Default, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
/// A record of the execution of a single VM operation.
pub struct VMOperation {
@ -497,30 +330,7 @@ pub struct VMOperation {
pub executed: Option<VMExecutedOperation>,
}
impl Encodable for VMOperation {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.pc);
s.append(&self.instruction);
s.append(&self.gas_cost);
s.append(&self.executed);
}
}
impl Decodable for VMOperation {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let res = VMOperation {
pc: rlp.val_at(0)?,
instruction: rlp.val_at(1)?,
gas_cost: rlp.val_at(2)?,
executed: rlp.val_at(3)?,
};
Ok(res)
}
}
#[derive(Debug, Clone, PartialEq, Default)]
#[derive(Debug, Clone, PartialEq, Default, RlpEncodable, RlpDecodable)]
#[cfg_attr(feature = "ipc", binary)]
/// A record of a full VM trace for a CALL/CREATE.
pub struct VMTrace {
@ -534,26 +344,3 @@ pub struct VMTrace {
/// Thre is a 1:1 correspondance between these and a CALL/CREATE/CALLCODE/DELEGATECALL instruction.
pub subs: Vec<VMTrace>,
}
impl Encodable for VMTrace {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.parent_step);
s.append(&self.code);
s.append_list(&self.operations);
s.append_list(&self.subs);
}
}
impl Decodable for VMTrace {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let res = VMTrace {
parent_step: rlp.val_at(0)?,
code: rlp.val_at(1)?,
operations: rlp.list_at(2)?,
subs: rlp.list_at(3)?,
};
Ok(res)
}
}

View File

@ -56,6 +56,15 @@ impl Decodable for Action {
}
}
impl Encodable for Action {
fn rlp_append(&self, s: &mut RlpStream) {
match *self {
Action::Create => s.append_internal(&""),
Action::Call(ref addr) => s.append_internal(addr),
};
}
}
/// Transaction activation condition.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Condition {
@ -85,18 +94,15 @@ pub struct Transaction {
impl Transaction {
/// Append object with a without signature into RLP stream
pub fn rlp_append_unsigned_transaction(&self, s: &mut RlpStream, network_id: Option<u64>) {
s.begin_list(if network_id.is_none() { 6 } else { 9 });
pub fn rlp_append_unsigned_transaction(&self, s: &mut RlpStream, chain_id: Option<u64>) {
s.begin_list(if chain_id.is_none() { 6 } else { 9 });
s.append(&self.nonce);
s.append(&self.gas_price);
s.append(&self.gas);
match self.action {
Action::Create => s.append_empty_data(),
Action::Call(ref to) => s.append(to)
};
s.append(&self.action);
s.append(&self.value);
s.append(&self.data);
if let Some(n) = network_id {
if let Some(n) = chain_id {
s.append(&n);
s.append(&0u8);
s.append(&0u8);
@ -157,27 +163,27 @@ impl From<ethjson::transaction::Transaction> for UnverifiedTransaction {
impl Transaction {
/// The message hash of the transaction.
pub fn hash(&self, network_id: Option<u64>) -> H256 {
pub fn hash(&self, chain_id: Option<u64>) -> H256 {
let mut stream = RlpStream::new();
self.rlp_append_unsigned_transaction(&mut stream, network_id);
self.rlp_append_unsigned_transaction(&mut stream, chain_id);
stream.as_raw().sha3()
}
/// Signs the transaction as coming from `sender`.
pub fn sign(self, secret: &Secret, network_id: Option<u64>) -> SignedTransaction {
let sig = ::ethkey::sign(secret, &self.hash(network_id))
pub fn sign(self, secret: &Secret, chain_id: Option<u64>) -> SignedTransaction {
let sig = ::ethkey::sign(secret, &self.hash(chain_id))
.expect("data is valid and context has signing capabilities; qed");
SignedTransaction::new(self.with_signature(sig, network_id))
SignedTransaction::new(self.with_signature(sig, chain_id))
.expect("secret is valid so it's recoverable")
}
/// Signs the transaction with signature.
pub fn with_signature(self, sig: Signature, network_id: Option<u64>) -> UnverifiedTransaction {
pub fn with_signature(self, sig: Signature, chain_id: Option<u64>) -> UnverifiedTransaction {
UnverifiedTransaction {
unsigned: self,
r: sig.r().into(),
s: sig.s().into(),
v: sig.v() as u64 + if let Some(n) = network_id { 35 + n * 2 } else { 27 },
v: sig.v() as u64 + if let Some(n) = chain_id { 35 + n * 2 } else { 27 },
hash: 0.into(),
}.compute_hash()
}
@ -210,13 +216,13 @@ impl Transaction {
}
/// Add EIP-86 compatible empty signature.
pub fn null_sign(self, network_id: u64) -> SignedTransaction {
pub fn null_sign(self, chain_id: u64) -> SignedTransaction {
SignedTransaction {
transaction: UnverifiedTransaction {
unsigned: self,
r: U256::zero(),
s: U256::zero(),
v: network_id,
v: chain_id,
hash: 0.into(),
}.compute_hash(),
sender: UNSIGNED_SENDER,
@ -244,7 +250,7 @@ pub struct UnverifiedTransaction {
/// Plain Transaction.
unsigned: Transaction,
/// The V field of the signature; the LS bit described which half of the curve our point falls
/// in. The MS bits describe which network this transaction is for. If 27/28, its for all networks.
/// in. The MS bits describe which chain this transaction is for. If 27/28, its for all chains.
v: u64,
/// The R field of the signature; helps describe the point on the curve.
r: U256,
@ -308,10 +314,7 @@ impl UnverifiedTransaction {
s.append(&self.nonce);
s.append(&self.gas_price);
s.append(&self.gas);
match self.action {
Action::Create => s.append_empty_data(),
Action::Call(ref to) => s.append(to)
};
s.append(&self.action);
s.append(&self.value);
s.append(&self.data);
s.append(&self.v);
@ -330,8 +333,8 @@ impl UnverifiedTransaction {
/// The `v` value that appears in the RLP.
pub fn original_v(&self) -> u64 { self.v }
/// The network ID, or `None` if this is a global transaction.
pub fn network_id(&self) -> Option<u64> {
/// The chain ID, or `None` if this is a global transaction.
pub fn chain_id(&self) -> Option<u64> {
match self.v {
v if self.is_unsigned() => Some(v),
v if v > 36 => Some((v - 35) / 2),
@ -360,15 +363,15 @@ impl UnverifiedTransaction {
/// Recovers the public key of the sender.
pub fn recover_public(&self) -> Result<Public, Error> {
Ok(recover(&self.signature(), &self.unsigned.hash(self.network_id()))?)
Ok(recover(&self.signature(), &self.unsigned.hash(self.chain_id()))?)
}
/// Do basic validation, checking for valid signature and minimum gas,
// TODO: consider use in block validation.
#[cfg(test)]
#[cfg(feature = "json-tests")]
pub fn validate(self, schedule: &Schedule, require_low: bool, allow_network_id_of_one: bool, allow_empty_signature: bool) -> Result<UnverifiedTransaction, Error> {
let chain_id = if allow_network_id_of_one { Some(1) } else { None };
pub fn validate(self, schedule: &Schedule, require_low: bool, allow_chain_id_of_one: bool, allow_empty_signature: bool) -> Result<UnverifiedTransaction, Error> {
let chain_id = if allow_chain_id_of_one { Some(1) } else { None };
self.verify_basic(require_low, chain_id, allow_empty_signature)?;
if !allow_empty_signature || !self.is_unsigned() {
self.recover_public()?;
@ -388,10 +391,10 @@ impl UnverifiedTransaction {
if allow_empty_signature && self.is_unsigned() && !(self.gas_price.is_zero() && self.value.is_zero() && self.nonce.is_zero()) {
return Err(EthkeyError::InvalidSignature.into())
}
match (self.network_id(), chain_id) {
match (self.chain_id(), chain_id) {
(None, _) => {},
(Some(n), Some(m)) if n == m => {},
_ => return Err(TransactionError::InvalidNetworkId.into()),
_ => return Err(TransactionError::InvalidChainId.into()),
};
Ok(())
}
@ -555,7 +558,7 @@ mod tests {
} else { panic!(); }
assert_eq!(t.value, U256::from(0x0au64));
assert_eq!(public_to_address(&t.recover_public().unwrap()), "0f65fe9276bc9a24ae7083ae28e2660ef72df99e".into());
assert_eq!(t.network_id(), None);
assert_eq!(t.chain_id(), None);
}
#[test]
@ -572,7 +575,7 @@ mod tests {
data: b"Hello!".to_vec()
}.sign(&key.secret(), None);
assert_eq!(Address::from(key.public().sha3()), t.sender());
assert_eq!(t.network_id(), None);
assert_eq!(t.chain_id(), None);
}
#[test]
@ -586,15 +589,15 @@ mod tests {
data: b"Hello!".to_vec()
}.fake_sign(Address::from(0x69));
assert_eq!(Address::from(0x69), t.sender());
assert_eq!(t.network_id(), None);
assert_eq!(t.chain_id(), None);
let t = t.clone();
assert_eq!(Address::from(0x69), t.sender());
assert_eq!(t.network_id(), None);
assert_eq!(t.chain_id(), None);
}
#[test]
fn should_recover_from_network_specific_signing() {
fn should_recover_from_chain_specific_signing() {
use ethkey::{Random, Generator};
let key = Random.generate().unwrap();
let t = Transaction {
@ -606,7 +609,7 @@ mod tests {
data: b"Hello!".to_vec()
}.sign(&key.secret(), Some(69));
assert_eq!(Address::from(key.public().sha3()), t.sender());
assert_eq!(t.network_id(), Some(69));
assert_eq!(t.chain_id(), Some(69));
}
#[test]
@ -617,7 +620,7 @@ mod tests {
let signed = decode(&FromHex::from_hex(tx_data).unwrap());
let signed = SignedTransaction::new(signed).unwrap();
assert_eq!(signed.sender(), address.into());
flushln!("networkid: {:?}", signed.network_id());
flushln!("chainid: {:?}", signed.chain_id());
};
test_vector("f864808504a817c800825208943535353535353535353535353535353535353535808025a0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116da0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d", "0xf0f6f18bca1b28cd68e4357452947e021241e9ce");

View File

@ -6,6 +6,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
[dependencies]
rlp = { path = "../../util/rlp" }
rlp_derive = { path = "../../util/rlp_derive" }
ethcore-util = { path = "../../util" }
ethjson = { path = "../../json" }
bloomable = { path = "../../util/bloomable" }

View File

@ -16,11 +16,10 @@
//! Basic account type -- the decoded RLP from the state trie.
use rlp::*;
use util::{U256, H256};
/// Basic account type.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct BasicAccount {
/// Nonce of the account.
pub nonce: U256,
@ -31,24 +30,3 @@ pub struct BasicAccount {
/// Code hash of the account.
pub code_hash: H256,
}
impl Encodable for BasicAccount {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4)
.append(&self.nonce)
.append(&self.balance)
.append(&self.storage_root)
.append(&self.code_hash);
}
}
impl Decodable for BasicAccount {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(BasicAccount {
nonce: rlp.val_at(0)?,
balance: rlp.val_at(1)?,
storage_root: rlp.val_at(2)?,
code_hash: rlp.val_at(3)?,
})
}
}

View File

@ -19,6 +19,8 @@
extern crate ethcore_util as util;
extern crate ethjson;
extern crate rlp;
#[macro_use]
extern crate rlp_derive;
extern crate bloomable;
#[cfg(test)]

View File

@ -19,7 +19,6 @@
use std::ops::Deref;
use util::{H256, Address, Bytes, HeapSizeOf, Hashable};
use bloomable::Bloomable;
use rlp::*;
use {BlockNumber};
use ethjson;
@ -27,7 +26,7 @@ use ethjson;
pub type LogBloom = ::util::H2048;
/// A record of execution for a `LOG` operation.
#[derive(Default, Debug, Clone, PartialEq, Eq)]
#[derive(Default, Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
pub struct LogEntry {
/// The address of the contract executing at the point of the `LOG` operation.
pub address: Address,
@ -37,26 +36,6 @@ pub struct LogEntry {
pub data: Bytes,
}
impl Encodable for LogEntry {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.address);
s.append_list(&self.topics);
s.append(&self.data);
}
}
impl Decodable for LogEntry {
fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let entry = LogEntry {
address: rlp.val_at(0)?,
topics: rlp.list_at(1)?,
data: rlp.val_at(2)?,
};
Ok(entry)
}
}
impl HeapSizeOf for LogEntry {
fn heap_size_of_children(&self) -> usize {
self.topics.heap_size_of_children() + self.data.heap_size_of_children()

View File

@ -17,8 +17,9 @@
//! Wasm env module bindings
use parity_wasm::elements::ValueType::*;
use parity_wasm::interpreter::UserFunctionDescriptor;
use parity_wasm::interpreter::{self, UserFunctionDescriptor};
use parity_wasm::interpreter::UserFunctionDescriptor::*;
use super::runtime::Runtime;
pub const SIGNATURES: &'static [UserFunctionDescriptor] = &[
Static(
@ -81,63 +82,29 @@ pub const SIGNATURES: &'static [UserFunctionDescriptor] = &[
&[I32],
None,
),
Static(
"_abort",
&[],
None,
),
Static(
"abortOnCannotGrowMemory",
&[I32; 0],
Some(I32)
),
/*
THIS IS EXPERIMENTAL RUST-ONLY RUNTIME EXTERNS, THEY ARE SUBJECT TO CHANGE
AVOID YOUR WASM CONTAINS ANY OF THESE OTHERWISE
EITHER FACE THE NEED OF HARDFORK
OR YOU CAN STUCK ON SPECIFIC RUST VERSION FOR WASM COMPILATION
*/
Static(
"_rust_begin_unwind",
&[I32; 4],
None,
),
Static(
"_emscripten_memcpy_big",
&[I32; 3],
Some(I32),
),
Static(
"___syscall6",
&[I32; 2],
Some(I32),
),
Static(
"___syscall140",
&[I32; 2],
Some(I32)
),
Static(
"___syscall146",
&[I32; 2],
Some(I32)
),
Static(
"___syscall54",
&[I32; 2],
Some(I32)
),
// TODO: Get rid of it also somehow?
Static(
"_llvm_trap",
&[I32; 0],
None
),
Static(
"___setErrNo",
&[I32; 1],
None
"_llvm_bswap_i64",
&[I32; 2],
Some(I32)
),
];
pub fn native_bindings<'a>(runtime: &'a mut Runtime) -> interpreter::UserFunctions<'a> {
interpreter::UserFunctions {
executor: runtime,
functions: ::std::borrow::Cow::from(SIGNATURES),
}
}

View File

@ -32,8 +32,6 @@ mod result;
mod tests;
mod env;
use std::sync::Arc;
const DEFAULT_STACK_SPACE: u32 = 5 * 1024 * 1024;
use parity_wasm::{interpreter, elements};
@ -89,6 +87,7 @@ impl vm::Vm for WasmInterpreter {
DEFAULT_STACK_SPACE,
params.gas.low_u64(),
RuntimeContext::new(params.address, params.sender),
&self.program,
);
let mut cursor = ::std::io::Cursor::new(&*code);
@ -112,16 +111,8 @@ impl vm::Vm for WasmInterpreter {
)?;
{
let execution_params = interpreter::ExecutionParams::with_external(
"env".into(),
Arc::new(
interpreter::env_native_module(env_instance, native_bindings(&mut runtime))
.map_err(|err| {
// todo: prefer explicit panic here also?
vm::Error::Wasm(format!("Error instantiating native bindings: {:?}", err))
})?
)
).add_argument(interpreter::RuntimeValue::I32(d_ptr.as_raw() as i32));
let execution_params = runtime.execution_params()
.add_argument(interpreter::RuntimeValue::I32(d_ptr.as_raw() as i32));
let module_instance = self.program.add_module("contract", contract_module, Some(&execution_params.externals))
.map_err(|err| {
@ -158,13 +149,6 @@ impl vm::Vm for WasmInterpreter {
}
}
fn native_bindings<'a>(runtime: &'a mut Runtime) -> interpreter::UserFunctions<'a> {
interpreter::UserFunctions {
executor: runtime,
functions: ::std::borrow::Cow::from(env::SIGNATURES),
}
}
impl From<runtime::Error> for vm::Error {
fn from(err: runtime::Error) -> vm::Error {
vm::Error::Wasm(format!("WASM runtime-error: {:?}", err))

View File

@ -72,24 +72,26 @@ impl RuntimeContext {
}
/// Runtime enviroment data for wasm contract execution
pub struct Runtime<'a> {
pub struct Runtime<'a, 'b> {
gas_counter: u64,
gas_limit: u64,
dynamic_top: u32,
ext: &'a mut vm::Ext,
memory: Arc<interpreter::MemoryInstance>,
context: RuntimeContext,
instance: &'b interpreter::ProgramInstance,
}
impl<'a> Runtime<'a> {
impl<'a, 'b> Runtime<'a, 'b> {
/// New runtime for wasm contract with specified params
pub fn with_params<'b>(
ext: &'b mut vm::Ext,
pub fn with_params<'c, 'd>(
ext: &'c mut vm::Ext,
memory: Arc<interpreter::MemoryInstance>,
stack_space: u32,
gas_limit: u64,
context: RuntimeContext,
) -> Runtime<'b> {
program_instance: &'d interpreter::ProgramInstance,
) -> Runtime<'c, 'd> {
Runtime {
gas_counter: 0,
gas_limit: gas_limit,
@ -97,6 +99,7 @@ impl<'a> Runtime<'a> {
memory: memory,
ext: ext,
context: context,
instance: program_instance,
}
}
@ -449,9 +452,58 @@ impl<'a> Runtime<'a> {
Ok(Some(0i32.into()))
}
fn bswap_32(x: u32) -> u32 {
x >> 24 | x >> 8 & 0xff00 | x << 8 & 0xff0000 | x << 24
}
impl<'a> interpreter::UserFunctionExecutor for Runtime<'a> {
fn bitswap_i64(&mut self, context: interpreter::CallerContext)
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
{
let x1 = context.value_stack.pop_as::<i32>()?;
let x2 = context.value_stack.pop_as::<i32>()?;
let result = ((Runtime::bswap_32(x2 as u32) as u64) << 32
| Runtime::bswap_32(x1 as u32) as u64) as i64;
self.return_i64(result)
}
fn return_i64(&mut self, val: i64) -> Result<Option<interpreter::RuntimeValue>, interpreter::Error> {
let uval = val as u64;
let hi = (uval >> 32) as i32;
let lo = (uval << 32 >> 32) as i32;
let target = self.instance.module("contract")
.ok_or(interpreter::Error::Trap("Error locating main execution entry".to_owned()))?;
target.execute_export(
"setTempRet0",
self.execution_params().add_argument(
interpreter::RuntimeValue::I32(hi).into()
),
)?;
Ok(Some(
(lo).into()
))
}
pub fn execution_params(&mut self) -> interpreter::ExecutionParams {
use super::env;
let env_instance = self.instance.module("env")
.expect("Env module always exists; qed");
interpreter::ExecutionParams::with_external(
"env".into(),
Arc::new(
interpreter::env_native_module(env_instance, env::native_bindings(self))
.expect("Env module always exists; qed")
)
)
}
}
impl<'a, 'b> interpreter::UserFunctionExecutor for Runtime<'a, 'b> {
fn execute(&mut self, name: &str, context: interpreter::CallerContext)
-> Result<Option<interpreter::RuntimeValue>, interpreter::Error>
{
@ -494,6 +546,9 @@ impl<'a> interpreter::UserFunctionExecutor for Runtime<'a> {
"_emscripten_memcpy_big" => {
self.mem_copy(context)
},
"_llvm_bswap_i64" => {
self.bitswap_i64(context)
},
_ => {
trace!(target: "wasm", "Trapped due to unhandled function: '{}'", name);
self.user_trap(context)

View File

@ -57,7 +57,7 @@ fn empty() {
test_finalize(interpreter.exec(params, &mut ext)).unwrap()
};
assert_eq!(gas_left, U256::from(99_996));
assert_eq!(gas_left, U256::from(99_992));
}
// This test checks if the contract deserializes payload header properly.
@ -85,7 +85,7 @@ fn logger() {
};
println!("ext.store: {:?}", ext.store);
assert_eq!(gas_left, U256::from(99590));
assert_eq!(gas_left, U256::from(99327));
let address_val: H256 = address.into();
assert_eq!(
ext.store.get(&"0100000000000000000000000000000000000000000000000000000000000000".parse().unwrap()).expect("storage key to exist"),
@ -136,7 +136,7 @@ fn identity() {
}
};
assert_eq!(gas_left, U256::from(99_687));
assert_eq!(gas_left, U256::from(99_672));
assert_eq!(
Address::from_slice(&result),
@ -170,7 +170,7 @@ fn dispersion() {
}
};
assert_eq!(gas_left, U256::from(99_423));
assert_eq!(gas_left, U256::from(99_270));
assert_eq!(
result,
@ -199,7 +199,7 @@ fn suicide_not() {
}
};
assert_eq!(gas_left, U256::from(99_656));
assert_eq!(gas_left, U256::from(99_578));
assert_eq!(
result,
@ -233,7 +233,7 @@ fn suicide() {
}
};
assert_eq!(gas_left, U256::from(99_740));
assert_eq!(gas_left, U256::from(99_621));
assert!(ext.suicides.contains(&refund));
}
@ -264,7 +264,7 @@ fn create() {
assert!(ext.calls.contains(
&FakeCall {
call_type: FakeCallType::Create,
gas: U256::from(99_767),
gas: U256::from(99_674),
sender_address: None,
receive_address: None,
value: Some(1_000_000_000.into()),
@ -272,7 +272,7 @@ fn create() {
code_address: None,
}
));
assert_eq!(gas_left, U256::from(99_759));
assert_eq!(gas_left, U256::from(99_596));
}
@ -306,7 +306,7 @@ fn call_code() {
assert!(ext.calls.contains(
&FakeCall {
call_type: FakeCallType::Call,
gas: U256::from(99_061),
gas: U256::from(99_069),
sender_address: Some(sender),
receive_address: Some(receiver),
value: None,
@ -314,7 +314,7 @@ fn call_code() {
code_address: Some("0d13710000000000000000000000000000000000".parse().unwrap()),
}
));
assert_eq!(gas_left, U256::from(94196));
assert_eq!(gas_left, U256::from(94144));
// siphash result
let res = LittleEndian::read_u32(&result[..]);
@ -351,7 +351,7 @@ fn call_static() {
assert!(ext.calls.contains(
&FakeCall {
call_type: FakeCallType::Call,
gas: U256::from(99_061),
gas: U256::from(99_069),
sender_address: Some(sender),
receive_address: Some(receiver),
value: None,
@ -359,7 +359,7 @@ fn call_static() {
code_address: Some("13077bfb00000000000000000000000000000000".parse().unwrap()),
}
));
assert_eq!(gas_left, U256::from(94196));
assert_eq!(gas_left, U256::from(94144));
// siphash result
let res = LittleEndian::read_u32(&result[..]);
@ -385,6 +385,66 @@ fn realloc() {
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
}
};
assert_eq!(gas_left, U256::from(98326));
assert_eq!(gas_left, U256::from(99432));
assert_eq!(result, vec![0u8; 2]);
}
// Tests that contract's ability to read from a storage
// Test prepopulates address into storage, than executes a contract which read that address from storage and write this address into result
#[test]
fn storage_read() {
let code = load_sample!("storage_read.wasm");
let address: Address = "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6".parse().unwrap();
let mut params = ActionParams::default();
params.gas = U256::from(100_000);
params.code = Some(Arc::new(code));
let mut ext = FakeExt::new();
ext.store.insert("0100000000000000000000000000000000000000000000000000000000000000".into(), address.into());
let (gas_left, result) = {
let mut interpreter = wasm_interpreter();
let result = interpreter.exec(params, &mut ext).expect("Interpreter to execute without any errors");
match result {
GasLeft::Known(_) => { panic!("storage_read should return payload"); },
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
}
};
assert_eq!(gas_left, U256::from(99682));
assert_eq!(Address::from(&result[12..32]), address);
}
// Tests that contract's ability to read from a storage
// Test prepopulates address into storage, than executes a contract which read that address from storage and write this address into result
#[test]
fn math_add() {
::ethcore_logger::init_log();
let code = load_sample!("math.wasm");
let mut params = ActionParams::default();
params.gas = U256::from(100_000);
params.code = Some(Arc::new(code));
let mut args = [0u8; 64];
let arg_a = U256::from_dec_str("999999999999999999999999999999").unwrap();
let arg_b = U256::from_dec_str("888888888888888888888888888888").unwrap();
arg_a.to_big_endian(&mut args[0..32]);
arg_b.to_big_endian(&mut args[32..64]);
params.data = Some(args.to_vec());
let (gas_left, result) = {
let mut interpreter = wasm_interpreter();
let result = interpreter.exec(params, &mut FakeExt::new()).expect("Interpreter to execute without any errors");
match result {
GasLeft::Known(_) => { panic!("storage_read should return payload"); },
GasLeft::NeedsReturn { gas_left: gas, data: result, apply_state: _apply } => (gas, result.to_vec()),
}
};
let sum: U256 = (&result[..]).into();
assert_eq!(gas_left, U256::from(96284));
assert_eq!(sum, U256::from_dec_str("1888888888888888888888888888887").unwrap());
}

View File

@ -14,7 +14,8 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use ethkey::{KeyPair, sign, Address, Signature, Message, Public};
use ethkey::{KeyPair, sign, Address, Signature, Message, Public, Secret};
use crypto::ecdh::agree;
use {json, Error, crypto};
use account::Version;
use super::crypto::Crypto;
@ -135,6 +136,12 @@ impl SafeAccount {
crypto::ecies::decrypt(&secret, shared_mac, message).map_err(From::from)
}
/// Agree on shared key.
pub fn agree(&self, password: &str, other: &Public) -> Result<Secret, Error> {
let secret = self.crypto.secret(password)?;
agree(&secret, other).map_err(From::from)
}
/// Derive public key.
pub fn public(&self, password: &str) -> Result<Public, Error> {
let secret = self.crypto.secret(password)?;

View File

@ -97,6 +97,10 @@ impl SimpleSecretStore for EthStore {
self.store.sign_derived(account_ref, password, derivation, message)
}
fn agree(&self, account: &StoreAccountRef, password: &str, other: &Public) -> Result<Secret, Error> {
self.store.agree(account, password, other)
}
fn decrypt(&self, account: &StoreAccountRef, password: &str, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> {
let account = self.get(account)?;
account.decrypt(password, shared_mac, message)
@ -495,18 +499,26 @@ impl SimpleSecretStore for EthMultiStore {
fn sign(&self, account: &StoreAccountRef, password: &str, message: &Message) -> Result<Signature, Error> {
let accounts = self.get_matching(account, password)?;
for account in accounts {
return account.sign(password, message);
match accounts.first() {
Some(ref account) => account.sign(password, message),
None => Err(Error::InvalidPassword),
}
Err(Error::InvalidPassword)
}
fn decrypt(&self, account: &StoreAccountRef, password: &str, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> {
let accounts = self.get_matching(account, password)?;
for account in accounts {
return account.decrypt(password, shared_mac, message);
match accounts.first() {
Some(ref account) => account.decrypt(password, shared_mac, message),
None => Err(Error::InvalidPassword),
}
}
fn agree(&self, account: &StoreAccountRef, password: &str, other: &Public) -> Result<Secret, Error> {
let accounts = self.get_matching(account, password)?;
match accounts.first() {
Some(ref account) => account.agree(password, other),
None => Err(Error::InvalidPassword),
}
Err(Error::InvalidPassword)
}
fn create_vault(&self, name: &str, password: &str) -> Result<(), Error> {

View File

@ -60,6 +60,8 @@ pub trait SimpleSecretStore: Send + Sync {
fn sign_derived(&self, account_ref: &StoreAccountRef, password: &str, derivation: Derivation, message: &Message) -> Result<Signature, Error>;
/// Decrypt a messages with given account.
fn decrypt(&self, account: &StoreAccountRef, password: &str, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error>;
/// Agree on shared key.
fn agree(&self, account: &StoreAccountRef, password: &str, other: &Public) -> Result<Secret, Error>;
/// Returns all accounts in this secret store.
fn accounts(&self) -> Result<Vec<StoreAccountRef>, Error>;

View File

@ -116,6 +116,9 @@ impl trace::VMTracer for Informant {
self.stack.extend_from_slice(stack_push);
if let Some((pos, data)) = mem_diff {
if self.memory.len() < (pos + data.len()) {
self.memory.resize(pos + data.len(), 0);
}
self.memory[pos..pos + data.len()].copy_from_slice(data);
}

View File

@ -35,7 +35,7 @@ use docopt::Docopt;
use rustc_hex::FromHex;
use util::{U256, Bytes, Address};
use ethcore::spec;
use vm::ActionParams;
use vm::{ActionParams, CallType};
mod info;
mod display;
@ -47,15 +47,17 @@ EVM implementation for Parity.
Copyright 2016, 2017 Parity Technologies (UK) Ltd
Usage:
evmbin stats [options]
evmbin [options]
evmbin [-h | --help]
parity-evm stats [options]
parity-evm [options]
parity-evm [-h | --help]
Transaction options:
--code CODE Contract code as hex (without 0x).
--to ADDRESS Recipient address (without 0x).
--from ADDRESS Sender address (without 0x).
--input DATA Input data as hex (without 0x).
--gas GAS Supplied gas as hex (without 0x).
--gas-price WEI Supplied gas price as hex (without 0x).
General options:
--json Display verbose results in JSON.
@ -78,16 +80,26 @@ fn main() {
fn run<T: Informant>(args: Args, mut informant: T) {
let from = arg(args.from(), "--from");
let to = arg(args.to(), "--to");
let code = arg(args.code(), "--code");
let spec = arg(args.spec(), "--chain");
let gas = arg(args.gas(), "--gas");
let gas_price = arg(args.gas(), "--gas-price");
let data = arg(args.data(), "--input");
if code.is_none() && to == Address::default() {
die("Either --code or --to is required.");
}
let mut params = ActionParams::default();
params.call_type = if code.is_none() { CallType::Call } else { CallType::None };
params.code_address = to;
params.address = to;
params.sender = from;
params.origin = from;
params.gas = gas;
params.code = Some(Arc::new(code));
params.gas_price = gas_price;
params.code = code.map(Arc::new);
params.data = data;
informant.set_gas(gas);
@ -99,10 +111,12 @@ fn run<T: Informant>(args: Args, mut informant: T) {
struct Args {
cmd_stats: bool,
flag_from: Option<String>,
flag_to: Option<String>,
flag_code: Option<String>,
flag_gas: Option<String>,
flag_gas_price: Option<String>,
flag_input: Option<String>,
flag_spec: Option<String>,
flag_chain: Option<String>,
flag_json: bool,
}
@ -114,6 +128,13 @@ impl Args {
}
}
pub fn gas_price(&self) -> Result<U256, String> {
match self.flag_gas_price {
Some(ref gas_price) => gas_price.parse().map_err(to_string),
None => Ok(U256::zero()),
}
}
pub fn from(&self) -> Result<Address, String> {
match self.flag_from {
Some(ref from) => from.parse().map_err(to_string),
@ -121,10 +142,17 @@ impl Args {
}
}
pub fn code(&self) -> Result<Bytes, String> {
pub fn to(&self) -> Result<Address, String> {
match self.flag_to {
Some(ref to) => to.parse().map_err(to_string),
None => Ok(Address::default()),
}
}
pub fn code(&self) -> Result<Option<Bytes>, String> {
match self.flag_code {
Some(ref code) => code.from_hex().map_err(to_string),
None => Err("Code is required!".into()),
Some(ref code) => code.from_hex().map(Some).map_err(to_string),
None => Ok(None),
}
}
@ -136,7 +164,7 @@ impl Args {
}
pub fn spec(&self) -> Result<spec::Spec, String> {
Ok(match self.flag_spec {
Ok(match self.flag_chain {
Some(ref filename) => {
let file = fs::File::open(filename).map_err(|e| format!("{}", e))?;
spec::Spec::load(::std::env::temp_dir(), file)?
@ -160,3 +188,37 @@ fn die<T: fmt::Display>(msg: T) -> ! {
println!("{}", msg);
::std::process::exit(-1)
}
#[cfg(test)]
mod tests {
use docopt::Docopt;
use super::{Args, USAGE};
fn run<T: AsRef<str>>(args: &[T]) -> Args {
Docopt::new(USAGE).and_then(|d| d.argv(args.into_iter()).deserialize()).unwrap()
}
#[test]
fn should_parse_all_the_options() {
let args = run(&[
"parity-evm",
"--json",
"--gas", "1",
"--gas-price", "2",
"--from", "0000000000000000000000000000000000000003",
"--to", "0000000000000000000000000000000000000004",
"--code", "05",
"--input", "06",
"--chain", "./testfile",
]);
assert_eq!(args.flag_json, true);
assert_eq!(args.gas(), Ok(1.into()));
assert_eq!(args.gas_price(), Ok(2.into()));
assert_eq!(args.from(), Ok(3.into()));
assert_eq!(args.to(), Ok(4.into()));
assert_eq!(args.code(), Ok(Some(vec![05])));
assert_eq!(args.data(), Ok(Some(vec![06])));
assert_eq!(args.flag_chain, Some("./testfile".to_owned()));
}
}

6
js/package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "parity.js",
"version": "1.8.11",
"version": "1.8.16",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -8955,7 +8955,7 @@
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"integrity": "sha1-UWbihkV/AzBgZL5Ul+jbsMPTIIM=",
"requires": {
"brace-expansion": "1.1.8"
}
@ -11417,7 +11417,7 @@
"react-qr-reader": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/react-qr-reader/-/react-qr-reader-1.1.3.tgz",
"integrity": "sha512-ruBF8KaSwUW9nbzjO4rA7/HOCGYZuNUz9od7uBRy8SRBi24nwxWWmwa2z8R6vPGDRglA0y2Qk1aVBuC1olTnHw==",
"integrity": "sha1-dDmnZvyZPLj17u/HLCnblh1AswI=",
"requires": {
"jsqr": "git+https://github.com/JodusNodus/jsQR.git#5ba1acefa1cbb9b2bc92b49f503f2674e2ec212b",
"prop-types": "15.5.10",

View File

@ -1,6 +1,6 @@
{
"name": "parity.js",
"version": "1.8.11",
"version": "1.8.16",
"main": "release/index.js",
"jsnext:main": "src/index.js",
"author": "Parity Team <admin@parity.io>",

View File

@ -28,11 +28,18 @@ export default class Store {
this._migrateStore();
this._api = api;
// Show the first run if it hasn't been shown before
// (thus an undefined value)
this.firstrunVisible = store.get(LS_FIRST_RUN_KEY) === undefined;
// Show the first run the storage doesn't hold `false` value
const firstrunVisible = store.get(LS_FIRST_RUN_KEY) !== false;
// Only check accounts if we might show the first run
if (firstrunVisible) {
api.transport.once('open', () => {
this._checkAccounts();
});
} else {
this.firstrunVisible = false;
}
}
@action closeFirstrun = () => {
@ -50,7 +57,7 @@ export default class Store {
}
/**
* Migrate the old LocalStorage ket format
* Migrate the old LocalStorage key format
* to the new one
*/
_migrateStore () {
@ -70,12 +77,16 @@ export default class Store {
this._api.parity.allAccountsInfo()
])
.then(([ vaults, info ]) => {
const accounts = Object.keys(info).filter((address) => info[address].uuid);
const accounts = Object.keys(info)
.filter((address) => info[address].uuid)
// In DEV mode, the empty phrase account is already added
.filter((address) => address.toLowerCase() !== '0x00a329c0648769a73afac7f9381e08fb43dbea72');
// Has accounts if any vaults or accounts
const hasAccounts = (accounts && accounts.length > 0) || (vaults && vaults.length > 0);
// Show First Run if no accounts and no vaults
this.toggleFirstrun(this.firstrunVisible || !hasAccounts);
this.toggleFirstrun(!hasAccounts);
})
.catch((error) => {
console.error('checkAccounts', error);

View File

@ -59,14 +59,19 @@ class FakeTransport {
class FrameSecureApi extends SecureApi {
constructor (transport) {
super(transport.uiUrl, null, () => {
return transport;
});
super(
transport.uiUrl,
null,
() => transport,
() => 'http:'
);
}
connect () {
// Do nothing - this API does not need connecting
this.emit('connecting');
// Fetch settings
this._fetchSettings();
// Fire connected event with some delay.
setTimeout(() => {
this.emit('connected');

View File

@ -325,7 +325,8 @@ export default class SecureApi extends Api {
_fetchSettings () {
return Promise
.all([
this._uiApi.parity.dappsUrl(),
// ignore dapps disabled errors
this._uiApi.parity.dappsUrl().catch(() => null),
this._uiApi.parity.wsUrl()
])
.then(([dappsUrl, wsUrl]) => {

View File

@ -78,6 +78,8 @@ pass = "test_pass"
[secretstore]
disable = false
disable_http = false
disable_acl_check = false
nodes = []
http_interface = "local"
http_port = 8082

View File

@ -195,7 +195,7 @@ usage! {
or |c: &Config| otry!(c.websockets).interface.clone(),
flag_ws_apis: String = "web3,eth,pubsub,net,parity,parity_pubsub,traces,rpc,secretstore,shh,shh_pubsub",
or |c: &Config| otry!(c.websockets).apis.as_ref().map(|vec| vec.join(",")),
flag_ws_origins: String = "chrome-extension://*",
flag_ws_origins: String = "chrome-extension://*,moz-extension://*",
or |c: &Config| otry!(c.websockets).origins.as_ref().map(|vec| vec.join(",")),
flag_ws_hosts: String = "none",
or |c: &Config| otry!(c.websockets).hosts.as_ref().map(|vec| vec.join(",")),
@ -217,6 +217,10 @@ usage! {
// Secret Store
flag_no_secretstore: bool = false,
or |c: &Config| otry!(c.secretstore).disable.clone(),
flag_no_secretstore_http: bool = false,
or |c: &Config| otry!(c.secretstore).disable_http.clone(),
flag_no_secretstore_acl_check: bool = false,
or |c: &Config| otry!(c.secretstore).disable_acl_check.clone(),
flag_secretstore_secret: Option<String> = None,
or |c: &Config| otry!(c.secretstore).self_secret.clone().map(Some),
flag_secretstore_nodes: String = "",
@ -520,6 +524,8 @@ struct Dapps {
#[derive(Default, Debug, PartialEq, Deserialize)]
struct SecretStore {
disable: Option<bool>,
disable_http: Option<bool>,
disable_acl_check: Option<bool>,
self_secret: Option<String>,
nodes: Option<Vec<String>>,
interface: Option<String>,
@ -796,6 +802,8 @@ mod tests {
flag_no_dapps: false,
flag_no_secretstore: false,
flag_no_secretstore_http: false,
flag_no_secretstore_acl_check: false,
flag_secretstore_secret: None,
flag_secretstore_nodes: "".into(),
flag_secretstore_interface: "local".into(),
@ -1031,6 +1039,8 @@ mod tests {
}),
secretstore: Some(SecretStore {
disable: None,
disable_http: None,
disable_acl_check: None,
self_secret: None,
nodes: None,
interface: None,

View File

@ -234,6 +234,8 @@ API and Console Options:
Secret Store Options:
--no-secretstore Disable Secret Store functionality. (default: {flag_no_secretstore})
--no-secretstore-http Disable Secret Store HTTP API. (default: {flag_no_secretstore_http})
--no-acl-check Disable ACL check (useful for test environments). (default: {flag_no_secretstore_acl_check})
--secretstore-secret SECRET Hex-encoded secret key of this node.
(required, default: {flag_secretstore_secret:?}).
--secretstore-nodes NODES Comma-separated list of other secret store cluster nodes in form

View File

@ -20,6 +20,7 @@ use std::net::SocketAddr;
use std::path::{Path, PathBuf};
use std::collections::BTreeMap;
use std::cmp::max;
use std::str::FromStr;
use cli::{Args, ArgsError};
use util::{Hashable, H256, U256, Bytes, version_data, Address};
use util::journaldb::Algorithm;
@ -41,7 +42,7 @@ use ethcore_logger::Config as LogConfig;
use dir::{self, Directories, default_hypervisor_path, default_local_path, default_data_path};
use dapps::Configuration as DappsConfiguration;
use ipfs::Configuration as IpfsConfiguration;
use secretstore::Configuration as SecretStoreConfiguration;
use secretstore::{Configuration as SecretStoreConfiguration, NodeSecretKey};
use updater::{UpdatePolicy, UpdateFilter, ReleaseTrack};
use run::RunCmd;
use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, KillBlockchain, ExportState, DataFormat};
@ -551,6 +552,10 @@ impl Configuration {
Ok(options)
}
fn ui_port(&self) -> u16 {
self.args.flag_ports_shift + self.args.flag_ui_port
}
fn ntp_servers(&self) -> Vec<String> {
self.args.flag_ntp_servers.split(",").map(str::to_owned).collect()
}
@ -560,12 +565,15 @@ impl Configuration {
enabled: self.ui_enabled(),
ntp_servers: self.ntp_servers(),
interface: self.ui_interface(),
port: self.args.flag_ports_shift + self.args.flag_ui_port,
port: self.ui_port(),
hosts: self.ui_hosts(),
}
}
fn dapps_config(&self) -> DappsConfiguration {
let dev_ui = if self.args.flag_ui_no_validation { vec![("localhost".to_owned(), 3000)] } else { vec![] };
let ui_port = self.ui_port();
DappsConfiguration {
enabled: self.dapps_enabled(),
ntp_servers: self.ntp_servers(),
@ -575,17 +583,34 @@ impl Configuration {
} else {
vec![]
},
extra_embed_on: if self.args.flag_ui_no_validation {
vec![("localhost".to_owned(), 3000)]
} else {
vec![]
extra_embed_on: {
let mut extra_embed = dev_ui.clone();
match self.ui_hosts() {
// In case host validation is disabled allow all frame ancestors
None => extra_embed.push(("*".to_owned(), ui_port)),
Some(hosts) => extra_embed.extend(hosts.into_iter().filter_map(|host| {
let mut it = host.split(":");
let host = it.next();
let port = it.next().and_then(|v| u16::from_str(v).ok());
match (host, port) {
(Some(host), Some(port)) => Some((host.into(), port)),
(Some(host), None) => Some((host.into(), ui_port)),
_ => None,
}
})),
}
extra_embed
},
extra_script_src: dev_ui,
}
}
fn secretstore_config(&self) -> Result<SecretStoreConfiguration, String> {
Ok(SecretStoreConfiguration {
enabled: self.secretstore_enabled(),
http_enabled: self.secretstore_http_enabled(),
acl_check_enabled: self.secretstore_acl_check_enabled(),
self_secret: self.secretstore_self_secret()?,
nodes: self.secretstore_nodes()?,
interface: self.secretstore_interface(),
@ -993,10 +1018,13 @@ impl Configuration {
self.interface(&self.args.flag_secretstore_http_interface)
}
fn secretstore_self_secret(&self) -> Result<Option<Secret>, String> {
fn secretstore_self_secret(&self) -> Result<Option<NodeSecretKey>, String> {
match self.args.flag_secretstore_secret {
Some(ref s) => Ok(Some(s.parse()
.map_err(|e| format!("Invalid secret store secret: {}. Error: {:?}", s, e))?)),
Some(ref s) if s.len() == 64 => Ok(Some(NodeSecretKey::Plain(s.parse()
.map_err(|e| format!("Invalid secret store secret: {}. Error: {:?}", s, e))?))),
Some(ref s) if s.len() == 40 => Ok(Some(NodeSecretKey::KeyStore(s.parse()
.map_err(|e| format!("Invalid secret store secret address: {}. Error: {:?}", s, e))?))),
Some(_) => Err(format!("Invalid secret store secret. Must be either existing account address, or hex-encoded private key")),
None => Ok(None),
}
}
@ -1045,6 +1073,14 @@ impl Configuration {
!self.args.flag_no_secretstore && cfg!(feature = "secretstore")
}
fn secretstore_http_enabled(&self) -> bool {
!self.args.flag_no_secretstore_http && cfg!(feature = "secretstore")
}
fn secretstore_acl_check_enabled(&self) -> bool {
!self.args.flag_no_secretstore_acl_check
}
fn ui_enabled(&self) -> bool {
if self.args.flag_force_ui {
return true;
@ -1275,7 +1311,7 @@ mod tests {
interface: "127.0.0.1".into(),
port: 8546,
apis: ApiSet::UnsafeContext,
origins: Some(vec!["chrome-extension://*".into()]),
origins: Some(vec!["chrome-extension://*".into(), "moz-extension://*".into()]),
hosts: Some(vec![]),
signer_path: expected.into(),
ui_address: Some(("127.0.0.1".to_owned(), 8180)),
@ -1350,6 +1386,7 @@ mod tests {
whisper: Default::default(),
};
expected.secretstore_conf.enabled = cfg!(feature = "secretstore");
expected.secretstore_conf.http_enabled = cfg!(feature = "secretstore");
assert_eq!(conf.into_command().unwrap().cmd, Cmd::Run(expected));
}

View File

@ -40,6 +40,7 @@ pub struct Configuration {
pub dapps_path: PathBuf,
pub extra_dapps: Vec<PathBuf>,
pub extra_embed_on: Vec<(String, u16)>,
pub extra_script_src: Vec<(String, u16)>,
}
impl Default for Configuration {
@ -56,6 +57,7 @@ impl Default for Configuration {
dapps_path: replace_home(&data_dir, "$BASE/dapps").into(),
extra_dapps: vec![],
extra_embed_on: vec![],
extra_script_src: vec![],
}
}
}
@ -168,6 +170,7 @@ pub fn new(configuration: Configuration, deps: Dependencies) -> Result<Option<Mi
configuration.extra_dapps,
rpc::DAPPS_DOMAIN,
configuration.extra_embed_on,
configuration.extra_script_src,
).map(Some)
}
@ -214,6 +217,7 @@ mod server {
_extra_dapps: Vec<PathBuf>,
_dapps_domain: &str,
_extra_embed_on: Vec<(String, u16)>,
_extra_script_src: Vec<(String, u16)>,
) -> Result<Middleware, String> {
Err("Your Parity version has been compiled without WebApps support.".into())
}
@ -251,6 +255,7 @@ mod server {
extra_dapps: Vec<PathBuf>,
dapps_domain: &str,
extra_embed_on: Vec<(String, u16)>,
extra_script_src: Vec<(String, u16)>,
) -> Result<Middleware, String> {
let signer = deps.signer;
let parity_remote = parity_reactor::Remote::new(deps.remote.clone());
@ -262,6 +267,7 @@ mod server {
parity_remote,
deps.ui_address,
extra_embed_on,
extra_script_src,
dapps_path,
extra_dapps,
dapps_domain,
@ -291,7 +297,7 @@ mod server {
pub fn service(middleware: &Option<Middleware>) -> Option<Arc<rpc_apis::DappsService>> {
middleware.as_ref().map(|m| Arc::new(DappsServiceWrapper {
endpoints: m.endpoints()
endpoints: m.endpoints().clone(),
}) as Arc<rpc_apis::DappsService>)
}
@ -313,5 +319,10 @@ mod server {
})
.collect()
}
fn refresh_local_dapps(&self) -> bool {
self.endpoints.refresh_local_dapps();
true
}
}
}

View File

@ -94,7 +94,7 @@ impl From<UiConfiguration> for HttpConfiguration {
enabled: conf.enabled,
interface: conf.interface,
port: conf.port,
apis: rpc_apis::ApiSet::SafeContext,
apis: rpc_apis::ApiSet::UnsafeContext,
cors: None,
hosts: conf.hosts,
server_threads: None,
@ -163,7 +163,7 @@ impl Default for WsConfiguration {
interface: "127.0.0.1".into(),
port: 8546,
apis: ApiSet::UnsafeContext,
origins: Some(vec!["chrome-extension://*".into()]),
origins: Some(vec!["chrome-extension://*".into(), "moz-extension://*".into()]),
hosts: Some(Vec::new()),
signer_path: replace_home(&data_dir, "$BASE/signer").into(),
support_token_api: true,

View File

@ -56,7 +56,7 @@ use signer;
use url;
// how often to take periodic snapshots.
const SNAPSHOT_PERIOD: u64 = 10000;
const SNAPSHOT_PERIOD: u64 = 5000;
// how many blocks to wait before starting a periodic snapshot.
const SNAPSHOT_HISTORY: u64 = 100;
@ -507,7 +507,7 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
}
// Attempt to sign in the engine signer.
if !passwords.into_iter().any(|p| miner.set_engine_signer(engine_signer, p).is_ok()) {
if !passwords.iter().any(|p| miner.set_engine_signer(engine_signer, (*p).clone()).is_ok()) {
return Err(format!("No valid password for the consensus signer {}. {}", engine_signer, VERIFY_PASSWORD_HINT));
}
}
@ -734,6 +734,8 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
// secret store key server
let secretstore_deps = secretstore::Dependencies {
client: client.clone(),
account_provider: account_provider,
accounts_passwords: &passwords,
};
let secretstore_key_server = secretstore::start(cmd.secretstore_conf.clone(), secretstore_deps)?;

View File

@ -17,17 +17,32 @@
use std::collections::BTreeMap;
use std::sync::Arc;
use dir::default_data_path;
use ethcore::account_provider::AccountProvider;
use ethcore::client::Client;
use ethkey::{Secret, Public};
use helpers::replace_home;
use util::Address;
#[derive(Debug, PartialEq, Clone)]
/// This node secret key.
pub enum NodeSecretKey {
/// Stored as plain text in configuration file.
Plain(Secret),
/// Stored as account in key store.
KeyStore(Address),
}
#[derive(Debug, PartialEq, Clone)]
/// Secret store configuration
pub struct Configuration {
/// Is secret store functionality enabled?
pub enabled: bool,
/// Is HTTP API enabled?
pub http_enabled: bool,
/// Is ACL check enabled.
pub acl_check_enabled: bool,
/// This node secret.
pub self_secret: Option<Secret>,
pub self_secret: Option<NodeSecretKey>,
/// Other nodes IDs + addresses.
pub nodes: BTreeMap<Public, (String, u16)>,
/// Interface to listen to
@ -43,9 +58,13 @@ pub struct Configuration {
}
/// Secret store dependencies
pub struct Dependencies {
pub struct Dependencies<'a> {
/// Blockchain client.
pub client: Arc<Client>,
/// Account provider.
pub account_provider: Arc<AccountProvider>,
/// Passed accounts passwords.
pub accounts_passwords: &'a [String],
}
#[cfg(not(feature = "secretstore"))]
@ -65,9 +84,11 @@ mod server {
#[cfg(feature="secretstore")]
mod server {
use std::sync::Arc;
use ethcore_secretstore;
use ethkey::KeyPair;
use super::{Configuration, Dependencies};
use ansi_term::Colour::Red;
use super::{Configuration, Dependencies, NodeSecretKey};
/// Key server
pub struct KeyServer {
@ -76,17 +97,45 @@ mod server {
impl KeyServer {
/// Create new key server
pub fn new(conf: Configuration, deps: Dependencies) -> Result<Self, String> {
let self_secret = conf.self_secret.ok_or("self secret is required when using secretstore")?;
let mut conf = ethcore_secretstore::ServiceConfiguration {
listener_address: ethcore_secretstore::NodeAddress {
pub fn new(mut conf: Configuration, deps: Dependencies) -> Result<Self, String> {
if !conf.acl_check_enabled {
warn!("Running SecretStore with disabled ACL check: {}", Red.bold().paint("everyone has access to stored keys"));
}
let self_secret: Arc<ethcore_secretstore::NodeKeyPair> = match conf.self_secret.take() {
Some(NodeSecretKey::Plain(secret)) => Arc::new(ethcore_secretstore::PlainNodeKeyPair::new(
KeyPair::from_secret(secret).map_err(|e| format!("invalid secret: {}", e))?)),
Some(NodeSecretKey::KeyStore(account)) => {
// Check if account exists
if !deps.account_provider.has_account(account.clone()).unwrap_or(false) {
return Err(format!("Account {} passed as secret store node key is not found", account));
}
// Check if any passwords have been read from the password file(s)
if deps.accounts_passwords.is_empty() {
return Err(format!("No password found for the secret store node account {}", account));
}
// Attempt to sign in the engine signer.
let password = deps.accounts_passwords.iter()
.find(|p| deps.account_provider.sign(account.clone(), Some((*p).clone()), Default::default()).is_ok())
.ok_or(format!("No valid password for the secret store node account {}", account))?;
Arc::new(ethcore_secretstore::KeyStoreNodeKeyPair::new(deps.account_provider, account, password.clone())
.map_err(|e| format!("{}", e))?)
},
None => return Err("self secret is required when using secretstore".into()),
};
let key_server_name = format!("{}:{}", conf.interface, conf.port);
let mut cconf = ethcore_secretstore::ServiceConfiguration {
listener_address: if conf.http_enabled { Some(ethcore_secretstore::NodeAddress {
address: conf.http_interface.clone(),
port: conf.http_port,
},
}) } else { None },
data_path: conf.data_path.clone(),
acl_check_enabled: conf.acl_check_enabled,
cluster_config: ethcore_secretstore::ClusterConfiguration {
threads: 4,
self_private: (**self_secret).into(),
listener_address: ethcore_secretstore::NodeAddress {
address: conf.interface.clone(),
port: conf.port,
@ -99,12 +148,10 @@ mod server {
},
};
let self_key_pair = KeyPair::from_secret(self_secret.clone())
.map_err(|e| format!("valid secret is required when using secretstore. Error: {}", e))?;
conf.cluster_config.nodes.insert(self_key_pair.public().clone(), conf.cluster_config.listener_address.clone());
cconf.cluster_config.nodes.insert(self_secret.public().clone(), cconf.cluster_config.listener_address.clone());
let key_server = ethcore_secretstore::start(deps.client, conf)
.map_err(Into::<String>::into)?;
let key_server = ethcore_secretstore::start(deps.client, self_secret, cconf)
.map_err(|e| format!("Error starting KeyServer {}: {}", key_server_name, e))?;
Ok(KeyServer {
_key_server: key_server,
@ -120,6 +167,8 @@ impl Default for Configuration {
let data_dir = default_data_path();
Configuration {
enabled: true,
http_enabled: true,
acl_check_enabled: true,
self_secret: None,
nodes: BTreeMap::new(),
interface: "127.0.0.1".to_owned(),

View File

@ -24,6 +24,7 @@ serde_json = "1.0"
time = "0.1"
tokio-timer = "0.1"
transient-hashmap = "0.4"
itertools = "0.5"
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }

View File

@ -18,9 +18,10 @@ use std::io::{self, Read, Write};
use std::path::Path;
use std::{fs, time, mem};
use itertools::Itertools;
use rand::Rng;
use rand::os::OsRng;
use util::{H256, Hashable, Itertools};
use util::{H256, Hashable};
/// Providing current time in seconds
pub trait TimeProvider {

View File

@ -24,6 +24,7 @@ extern crate cid;
extern crate crypto as rust_crypto;
extern crate futures;
extern crate futures_cpupool;
extern crate itertools;
extern crate multihash;
extern crate order_stat;
extern crate rand;

View File

@ -22,12 +22,6 @@ use v1::types::LocalDapp;
pub trait DappsService: Send + Sync + 'static {
/// List available local dapps.
fn list_dapps(&self) -> Vec<LocalDapp>;
}
impl<F> DappsService for F where
F: Fn() -> Vec<LocalDapp> + Send + Sync + 'static
{
fn list_dapps(&self) -> Vec<LocalDapp> {
(*self)()
}
/// Refresh local dapps list
fn refresh_local_dapps(&self) -> bool;
}

View File

@ -133,7 +133,7 @@ impl<C: MiningBlockChainClient, M: MinerService> Dispatcher for FullDispatcher<C
-> BoxFuture<WithToken<SignedTransaction>, Error>
{
let (client, miner) = (self.client.clone(), self.miner.clone());
let network_id = client.signing_network_id();
let chain_id = client.signing_chain_id();
let address = filled.from;
future::done({
let t = Transaction {
@ -146,12 +146,12 @@ impl<C: MiningBlockChainClient, M: MinerService> Dispatcher for FullDispatcher<C
};
if accounts.is_hardware_address(address) {
hardware_signature(&*accounts, address, t, network_id).map(WithToken::No)
hardware_signature(&*accounts, address, t, chain_id).map(WithToken::No)
} else {
let hash = t.hash(network_id);
let hash = t.hash(chain_id);
let signature = try_bf!(signature(&*accounts, address, hash, password));
Ok(signature.map(|sig| {
SignedTransaction::new(t.with_signature(sig, network_id))
SignedTransaction::new(t.with_signature(sig, chain_id))
.expect("Transaction was signed by AccountsProvider; it never produces invalid signatures; qed")
}))
}
@ -358,7 +358,7 @@ impl Dispatcher for LightDispatcher {
fn sign(&self, accounts: Arc<AccountProvider>, filled: FilledTransactionRequest, password: SignWith)
-> BoxFuture<WithToken<SignedTransaction>, Error>
{
let network_id = self.client.signing_network_id();
let chain_id = self.client.signing_chain_id();
let address = filled.from;
let with_nonce = move |filled: FilledTransactionRequest, nonce| {
@ -372,14 +372,14 @@ impl Dispatcher for LightDispatcher {
};
if accounts.is_hardware_address(address) {
return hardware_signature(&*accounts, address, t, network_id).map(WithToken::No)
return hardware_signature(&*accounts, address, t, chain_id).map(WithToken::No)
}
let hash = t.hash(network_id);
let hash = t.hash(chain_id);
let signature = signature(&*accounts, address, hash, password)?;
Ok(signature.map(|sig| {
SignedTransaction::new(t.with_signature(sig, network_id))
SignedTransaction::new(t.with_signature(sig, chain_id))
.expect("Transaction was signed by AccountsProvider; it never produces invalid signatures; qed")
}))
};
@ -512,6 +512,10 @@ pub fn execute<D: Dispatcher + 'static>(
).boxed()
},
ConfirmationPayload::EthSignMessage(address, data) => {
if accounts.is_hardware_address(address) {
return future::err(errors::unsupported("Signing via hardware wallets is not supported.", None)).boxed();
}
let hash = eth_data_hash(data);
let res = signature(&accounts, address, hash, pass)
.map(|result| result
@ -522,6 +526,10 @@ pub fn execute<D: Dispatcher + 'static>(
future::done(res).boxed()
},
ConfirmationPayload::Decrypt(address, data) => {
if accounts.is_hardware_address(address) {
return future::err(errors::unsupported("Decrypting via hardware wallets is not supported.", None)).boxed();
}
let res = decrypt(&accounts, address, data, pass)
.map(|result| result
.map(RpcBytes)
@ -544,20 +552,20 @@ fn signature(accounts: &AccountProvider, address: Address, hash: H256, password:
}
// obtain a hardware signature from the given account.
fn hardware_signature(accounts: &AccountProvider, address: Address, t: Transaction, network_id: Option<u64>)
fn hardware_signature(accounts: &AccountProvider, address: Address, t: Transaction, chain_id: Option<u64>)
-> Result<SignedTransaction, Error>
{
debug_assert!(accounts.is_hardware_address(address));
let mut stream = rlp::RlpStream::new();
t.rlp_append_unsigned_transaction(&mut stream, network_id);
t.rlp_append_unsigned_transaction(&mut stream, chain_id);
let signature = accounts.sign_with_hardware(address, &stream.as_raw())
.map_err(|e| {
debug!(target: "miner", "Error signing transaction with hardware wallet: {}", e);
errors::account("Error signing transaction with hardware wallet", e)
})?;
SignedTransaction::new(t.with_signature(signature, network_id))
SignedTransaction::new(t.with_signature(signature, chain_id))
.map_err(|e| {
debug!(target: "miner", "Hardware wallet has produced invalid signature: {}", e);
errors::account("Invalid signature generated", e)

View File

@ -71,6 +71,14 @@ pub fn public_unsupported(details: Option<String>) -> Error {
}
}
pub fn unsupported<T: Into<String>>(msg: T, details: Option<T>) -> Error {
Error {
code: ErrorCode::ServerError(codes::UNSUPPORTED_REQUEST),
message: msg.into(),
data: details.map(Into::into).map(Value::String),
}
}
pub fn request_not_found() -> Error {
Error {
code: ErrorCode::ServerError(codes::REQUEST_NOT_FOUND),
@ -302,7 +310,7 @@ pub fn transaction_message(error: TransactionError) -> String {
GasLimitExceeded { limit, got } => {
format!("Transaction cost exceeds current gas limit. Limit: {}, got: {}. Try decreasing supplied gas.", limit, got)
},
InvalidNetworkId => "Invalid network id.".into(),
InvalidChainId => "Invalid chain id.".into(),
InvalidGasLimit(_) => "Supplied gas is beyond limit.".into(),
SenderBanned => "Sender is banned in local queue.".into(),
RecipientBanned => "Recipient is banned in local queue.".into(),

View File

@ -18,6 +18,7 @@ use std::sync::Arc;
use ethcore::client::MiningBlockChainClient;
use ethcore::miner::MinerService;
use ethcore::transaction::{Transaction, SignedTransaction, Action};
use util::U256;
use jsonrpc_core::Error;
use v1::helpers::CallRequest;
@ -27,13 +28,22 @@ pub fn sign_call<B: MiningBlockChainClient, M: MinerService>(
client: &Arc<B>,
miner: &Arc<M>,
request: CallRequest,
gas_cap: bool,
) -> Result<SignedTransaction, Error> {
let from = request.from.unwrap_or(0.into());
let mut gas = request.gas.unwrap_or(U256::max_value());
if gas_cap {
let max_gas = 50_000_000.into();
if gas > max_gas {
warn!("Gas limit capped to {} (from {})", max_gas, gas);
gas = max_gas
}
}
Ok(Transaction {
nonce: request.nonce.unwrap_or_else(|| client.latest_nonce(&from)),
action: request.to.map_or(Action::Create, Action::Call),
gas: request.gas.unwrap_or(50_000_000.into()),
gas,
gas_price: request.gas_price.unwrap_or_else(|| default_gas_price(&**client, &**miner)),
value: request.value.unwrap_or(0.into()),
data: request.data.unwrap_or_default(),

View File

@ -612,9 +612,9 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
self.send_raw_transaction(raw)
}
fn call(&self, request: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<Bytes, Error> {
fn call(&self, meta: Self::Metadata, request: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<Bytes, Error> {
let request = CallRequest::into(request);
let signed = match fake_sign::sign_call(&self.client, &self.miner, request) {
let signed = match fake_sign::sign_call(&self.client, &self.miner, request, meta.is_dapp()) {
Ok(signed) => signed,
Err(e) => return future::err(e).boxed(),
};
@ -628,9 +628,9 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
).boxed()
}
fn estimate_gas(&self, request: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
fn estimate_gas(&self, meta: Self::Metadata, request: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
let request = CallRequest::into(request);
let signed = match fake_sign::sign_call(&self.client, &self.miner, request) {
let signed = match fake_sign::sign_call(&self.client, &self.miner, request, meta.is_dapp()) {
Ok(signed) => signed,
Err(e) => return future::err(e).boxed(),
};

View File

@ -245,17 +245,27 @@ impl<C: Send + Sync + 'static> EthPubSub for EthPubSubClient<C> {
kind: pubsub::Kind,
params: Trailing<pubsub::Params>,
) {
match (kind, params.into()) {
let error = match (kind, params.into()) {
(pubsub::Kind::NewHeads, None) => {
self.heads_subscribers.write().push(subscriber)
self.heads_subscribers.write().push(subscriber);
return;
},
(pubsub::Kind::Logs, Some(pubsub::Params::Logs(filter))) => {
self.logs_subscribers.write().push(subscriber, filter.into());
return;
},
(pubsub::Kind::NewHeads, _) => {
errors::invalid_params("newHeads", "Expected no parameters.")
},
(pubsub::Kind::Logs, _) => {
errors::invalid_params("logs", "Expected a filter object.")
},
_ => {
let _ = subscriber.reject(errors::unimplemented(None));
errors::unimplemented(None)
},
}
};
let _ = subscriber.reject(error);
}
fn unsubscribe(&self, id: SubscriptionId) -> BoxFuture<bool, Error> {

View File

@ -383,7 +383,7 @@ impl Eth for EthClient {
self.send_raw_transaction(raw)
}
fn call(&self, req: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<Bytes, Error> {
fn call(&self, _meta: Self::Metadata, req: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<Bytes, Error> {
self.fetcher().proved_execution(req, num).and_then(|res| {
match res {
Ok(exec) => Ok(exec.output.into()),
@ -392,7 +392,7 @@ impl Eth for EthClient {
}).boxed()
}
fn estimate_gas(&self, req: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
fn estimate_gas(&self, _meta: Self::Metadata, req: CallRequest, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
// TODO: binary chop for more accurate estimates.
self.fetcher().proved_execution(req, num).and_then(|res| {
match res {

View File

@ -390,7 +390,7 @@ impl Parity for ParityClient {
ipfs::cid(content)
}
fn call(&self, _requests: Vec<CallRequest>, _block: Trailing<BlockNumber>) -> BoxFuture<Vec<Bytes>, Error> {
fn call(&self, _meta: Self::Metadata, _requests: Vec<CallRequest>, _block: Trailing<BlockNumber>) -> BoxFuture<Vec<Bytes>, Error> {
future::err(errors::light_unimplemented(None)).boxed()
}
}

View File

@ -135,6 +135,10 @@ impl<F: Fetch> ParitySet for ParitySetClient<F> {
}))
}
fn dapps_refresh(&self) -> Result<bool, Error> {
self.dapps.as_ref().map(|dapps| dapps.refresh_local_dapps()).ok_or_else(errors::dapps_disabled)
}
fn dapps_list(&self) -> Result<Vec<LocalDapp>, Error> {
self.dapps.as_ref().map(|dapps| dapps.list_dapps()).ok_or_else(errors::dapps_disabled)
}

View File

@ -17,7 +17,9 @@
//! Traces api implementation.
use jsonrpc_core::Error;
use jsonrpc_core::futures::{future, Future, BoxFuture};
use jsonrpc_macros::Trailing;
use v1::Metadata;
use v1::traits::Traces;
use v1::helpers::errors;
use v1::types::{TraceFilter, LocalizedTrace, BlockNumber, Index, CallRequest, Bytes, TraceResults, TraceOptions, H256};
@ -27,6 +29,8 @@ use v1::types::{TraceFilter, LocalizedTrace, BlockNumber, Index, CallRequest, By
pub struct TracesClient;
impl Traces for TracesClient {
type Metadata = Metadata;
fn filter(&self, _filter: TraceFilter) -> Result<Option<Vec<LocalizedTrace>>, Error> {
Err(errors::light_unimplemented(None))
}
@ -43,12 +47,12 @@ impl Traces for TracesClient {
Err(errors::light_unimplemented(None))
}
fn call(&self, _request: CallRequest, _flags: TraceOptions, _block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {
Err(errors::light_unimplemented(None))
fn call(&self, _meta: Self::Metadata, _request: CallRequest, _flags: TraceOptions, _block: Trailing<BlockNumber>) -> BoxFuture<TraceResults, Error> {
future::err(errors::light_unimplemented(None)).boxed()
}
fn call_many(&self, _request: Vec<(CallRequest, TraceOptions)>, _block: Trailing<BlockNumber>) -> Result<Vec<TraceResults>, Error> {
Err(errors::light_unimplemented(None))
fn call_many(&self, _meta: Self::Metadata, _request: Vec<(CallRequest, TraceOptions)>, _block: Trailing<BlockNumber>) -> BoxFuture<Vec<TraceResults>, Error> {
future::err(errors::light_unimplemented(None)).boxed()
}
fn raw_transaction(&self, _raw_transaction: Bytes, _flags: TraceOptions, _block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {

View File

@ -411,11 +411,11 @@ impl<C, M, S: ?Sized, U> Parity for ParityClient<C, M, S, U> where
ipfs::cid(content)
}
fn call(&self, requests: Vec<CallRequest>, block: Trailing<BlockNumber>) -> BoxFuture<Vec<Bytes>, Error> {
fn call(&self, meta: Self::Metadata, requests: Vec<CallRequest>, block: Trailing<BlockNumber>) -> BoxFuture<Vec<Bytes>, Error> {
let requests: Result<Vec<(SignedTransaction, _)>, Error> = requests
.into_iter()
.map(|request| Ok((
fake_sign::sign_call(&self.client, &self.miner, request.into())?,
fake_sign::sign_call(&self.client, &self.miner, request.into(), meta.is_dapp())?,
Default::default()
)))
.collect();

View File

@ -176,6 +176,10 @@ impl<C, M, U, F> ParitySet for ParitySetClient<C, M, U, F> where
}))
}
fn dapps_refresh(&self) -> Result<bool, Error> {
self.dapps.as_ref().map(|dapps| dapps.refresh_local_dapps()).ok_or_else(errors::dapps_disabled)
}
fn dapps_list(&self) -> Result<Vec<LocalDapp>, Error> {
self.dapps.as_ref().map(|dapps| dapps.list_dapps()).ok_or_else(errors::dapps_disabled)
}

View File

@ -124,9 +124,9 @@ impl<D: Dispatcher + 'static> Personal for PersonalClient<D> {
.map(move |tx| (tx, dispatcher))
})
.and_then(|(pending_tx, dispatcher)| {
let network_id = pending_tx.network_id();
trace!(target: "miner", "send_transaction: dispatching tx: {} for network ID {:?}",
::rlp::encode(&*pending_tx).into_vec().pretty(), network_id);
let chain_id = pending_tx.chain_id();
trace!(target: "miner", "send_transaction: dispatching tx: {} for chain ID {:?}",
::rlp::encode(&*pending_tx).into_vec().pretty(), chain_id);
dispatcher.dispatch_transaction(pending_tx).map(Into::into)
})

View File

@ -18,13 +18,15 @@
use std::sync::Arc;
use rlp::UntrustedRlp;
use ethcore::client::{MiningBlockChainClient, CallAnalytics, TransactionId, TraceId};
use ethcore::miner::MinerService;
use ethcore::transaction::SignedTransaction;
use rlp::UntrustedRlp;
use jsonrpc_core::Error;
use jsonrpc_core::futures::{self, Future, BoxFuture};
use jsonrpc_macros::Trailing;
use v1::Metadata;
use v1::traits::Traces;
use v1::helpers::{errors, fake_sign};
use v1::types::{TraceFilter, LocalizedTrace, BlockNumber, Index, CallRequest, Bytes, TraceResults, TraceOptions, H256};
@ -54,6 +56,8 @@ impl<C, M> TracesClient<C, M> {
}
impl<C, M> Traces for TracesClient<C, M> where C: MiningBlockChainClient + 'static, M: MinerService + 'static {
type Metadata = Metadata;
fn filter(&self, filter: TraceFilter) -> Result<Option<Vec<LocalizedTrace>>, Error> {
Ok(self.client.filter_traces(filter.into())
.map(|traces| traces.into_iter().map(LocalizedTrace::from).collect()))
@ -79,31 +83,35 @@ impl<C, M> Traces for TracesClient<C, M> where C: MiningBlockChainClient + 'stat
.map(LocalizedTrace::from))
}
fn call(&self, request: CallRequest, flags: TraceOptions, block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {
fn call(&self, meta: Self::Metadata, request: CallRequest, flags: TraceOptions, block: Trailing<BlockNumber>) -> BoxFuture<TraceResults, Error> {
let block = block.unwrap_or_default();
let request = CallRequest::into(request);
let signed = fake_sign::sign_call(&self.client, &self.miner, request)?;
let signed = try_bf!(fake_sign::sign_call(&self.client, &self.miner, request, meta.is_dapp()));
self.client.call(&signed, to_call_analytics(flags), block.into())
let res = self.client.call(&signed, to_call_analytics(flags), block.into())
.map(TraceResults::from)
.map_err(errors::call)
.map_err(errors::call);
futures::done(res).boxed()
}
fn call_many(&self, requests: Vec<(CallRequest, TraceOptions)>, block: Trailing<BlockNumber>) -> Result<Vec<TraceResults>, Error> {
fn call_many(&self, meta: Self::Metadata, requests: Vec<(CallRequest, TraceOptions)>, block: Trailing<BlockNumber>) -> BoxFuture<Vec<TraceResults>, Error> {
let block = block.unwrap_or_default();
let requests = requests.into_iter()
let requests = try_bf!(requests.into_iter()
.map(|(request, flags)| {
let request = CallRequest::into(request);
let signed = fake_sign::sign_call(&self.client, &self.miner, request)?;
let signed = fake_sign::sign_call(&self.client, &self.miner, request, meta.is_dapp())?;
Ok((signed, to_call_analytics(flags)))
})
.collect::<Result<Vec<_>, _>>()?;
.collect::<Result<Vec<_>, Error>>());
self.client.call_many(&requests, block.into())
let res = self.client.call_many(&requests, block.into())
.map(|results| results.into_iter().map(TraceResults::from).collect())
.map_err(errors::call)
.map_err(errors::call);
futures::done(res).boxed()
}
fn raw_transaction(&self, raw_transaction: Bytes, flags: TraceOptions, block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {

View File

@ -42,6 +42,15 @@ impl Metadata {
_ => DappId::default(),
}
}
/// Returns true if the request originates from a Dapp.
pub fn is_dapp(&self) -> bool {
if let Origin::Dapps(_) = self.origin {
true
} else {
false
}
}
}
impl jsonrpc_core::Metadata for Metadata {}

View File

@ -34,4 +34,8 @@ impl DappsService for TestDappsService {
icon_url: "title.png".into(),
}]
}
fn refresh_local_dapps(&self) -> bool {
true
}
}

View File

@ -544,7 +544,7 @@ fn rpc_eth_pending_transaction_by_hash() {
tester.miner.pending_transactions.lock().insert(H256::zero(), tx);
}
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"condition":null,"creates":null,"from":"0x0f65fe9276bc9a24ae7083ae28e2660ef72df99e","gas":"0x5208","gasPrice":"0x1","hash":"0x41df922fd0d4766fcc02e161f8295ec28522f329ae487f14d811e4b64c8d6e31","input":"0x","networkId":null,"nonce":"0x0","publicKey":"0x7ae46da747962c2ee46825839c1ef9298e3bd2e70ca2938495c3693a485ec3eaa8f196327881090ff64cf4fbb0a48485d4f83098e189ed3b7a87d5941b59f789","r":"0x48b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353","raw":"0xf85f800182520894095e7baea6a6c7c4c2dfeb977efac326af552d870a801ba048b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353a0efffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","s":"0xefffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","standardV":"0x0","to":"0x095e7baea6a6c7c4c2dfeb977efac326af552d87","transactionIndex":null,"v":"0x1b","value":"0xa"},"id":1}"#;
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"chainId":null,"condition":null,"creates":null,"from":"0x0f65fe9276bc9a24ae7083ae28e2660ef72df99e","gas":"0x5208","gasPrice":"0x1","hash":"0x41df922fd0d4766fcc02e161f8295ec28522f329ae487f14d811e4b64c8d6e31","input":"0x","nonce":"0x0","publicKey":"0x7ae46da747962c2ee46825839c1ef9298e3bd2e70ca2938495c3693a485ec3eaa8f196327881090ff64cf4fbb0a48485d4f83098e189ed3b7a87d5941b59f789","r":"0x48b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353","raw":"0xf85f800182520894095e7baea6a6c7c4c2dfeb977efac326af552d870a801ba048b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353a0efffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","s":"0xefffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804","standardV":"0x0","to":"0x095e7baea6a6c7c4c2dfeb977efac326af552d87","transactionIndex":null,"v":"0x1b","value":"0xa"},"id":1}"#;
let request = r#"{
"jsonrpc": "2.0",
"method": "eth_getTransactionByHash",
@ -860,12 +860,13 @@ fn rpc_eth_sign_transaction() {
let response = r#"{"jsonrpc":"2.0","result":{"#.to_owned() +
r#""raw":"0x"# + &rlp.to_hex() + r#"","# +
r#""tx":{"# +
r#""blockHash":null,"blockNumber":null,"condition":null,"creates":null,"# +
r#""blockHash":null,"blockNumber":null,"# +
&format!("\"chainId\":{},", t.chain_id().map_or("null".to_owned(), |n| format!("{}", n))) +
r#""condition":null,"creates":null,"# +
&format!("\"from\":\"0x{:?}\",", &address) +
r#""gas":"0x76c0","gasPrice":"0x9184e72a000","# +
&format!("\"hash\":\"0x{:?}\",", t.hash()) +
r#""input":"0x","# +
&format!("\"networkId\":{},", t.network_id().map_or("null".to_owned(), |n| format!("{}", n))) +
r#""nonce":"0x1","# +
&format!("\"publicKey\":\"0x{:?}\",", t.recover_public().unwrap()) +
&format!("\"r\":\"0x{}\",", U256::from(signature.r()).to_hex()) +

View File

@ -233,7 +233,7 @@ fn rpc_parity_remove_transaction() {
let hash = signed.hash();
let request = r#"{"jsonrpc": "2.0", "method": "parity_removeTransaction", "params":[""#.to_owned() + &format!("0x{:?}", hash) + r#""], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"condition":null,"creates":null,"from":"0x0000000000000000000000000000000000000002","gas":"0x76c0","gasPrice":"0x9184e72a000","hash":"0xa2e0da8a8064e0b9f93e95a53c2db6d01280efb8ac72a708d25487e67dd0f8fc","input":"0x","networkId":null,"nonce":"0x1","publicKey":null,"r":"0x1","raw":"0xe9018609184e72a0008276c0940000000000000000000000000000000000000005849184e72a80800101","s":"0x1","standardV":"0x4","to":"0x0000000000000000000000000000000000000005","transactionIndex":null,"v":"0x0","value":"0x9184e72a"},"id":1}"#;
let response = r#"{"jsonrpc":"2.0","result":{"blockHash":null,"blockNumber":null,"chainId":null,"condition":null,"creates":null,"from":"0x0000000000000000000000000000000000000002","gas":"0x76c0","gasPrice":"0x9184e72a000","hash":"0xa2e0da8a8064e0b9f93e95a53c2db6d01280efb8ac72a708d25487e67dd0f8fc","input":"0x","nonce":"0x1","publicKey":null,"r":"0x1","raw":"0xe9018609184e72a0008276c0940000000000000000000000000000000000000005849184e72a80800101","s":"0x1","standardV":"0x4","to":"0x0000000000000000000000000000000000000005","transactionIndex":null,"v":"0x0","value":"0x9184e72a"},"id":1}"#;
miner.pending_transactions.lock().insert(hash, signed);
assert_eq!(io.handle_request_sync(&request), Some(response.to_owned()));

View File

@ -454,12 +454,13 @@ fn should_confirm_sign_transaction_with_rlp() {
let response = r#"{"jsonrpc":"2.0","result":{"#.to_owned() +
r#""raw":"0x"# + &rlp.to_hex() + r#"","# +
r#""tx":{"# +
r#""blockHash":null,"blockNumber":null,"condition":null,"creates":null,"# +
r#""blockHash":null,"blockNumber":null,"# +
&format!("\"chainId\":{},", t.chain_id().map_or("null".to_owned(), |n| format!("{}", n))) +
r#""condition":null,"creates":null,"# +
&format!("\"from\":\"0x{:?}\",", &address) +
r#""gas":"0x989680","gasPrice":"0x1000","# +
&format!("\"hash\":\"0x{:?}\",", t.hash()) +
r#""input":"0x","# +
&format!("\"networkId\":{},", t.network_id().map_or("null".to_owned(), |n| format!("{}", n))) +
r#""nonce":"0x0","# +
&format!("\"publicKey\":\"0x{:?}\",", t.public_key().unwrap()) +
&format!("\"r\":\"0x{}\",", U256::from(signature.r()).to_hex()) +

View File

@ -297,12 +297,13 @@ fn should_add_sign_transaction_to_the_queue() {
let response = r#"{"jsonrpc":"2.0","result":{"#.to_owned() +
r#""raw":"0x"# + &rlp.to_hex() + r#"","# +
r#""tx":{"# +
r#""blockHash":null,"blockNumber":null,"condition":null,"creates":null,"# +
r#""blockHash":null,"blockNumber":null,"# +
&format!("\"chainId\":{},", t.chain_id().map_or("null".to_owned(), |n| format!("{}", n))) +
r#""condition":null,"creates":null,"# +
&format!("\"from\":\"0x{:?}\",", &address) +
r#""gas":"0x76c0","gasPrice":"0x9184e72a000","# +
&format!("\"hash\":\"0x{:?}\",", t.hash()) +
r#""input":"0x","# +
&format!("\"networkId\":{},", t.network_id().map_or("null".to_owned(), |n| format!("{}", n))) +
r#""nonce":"0x1","# +
&format!("\"publicKey\":\"0x{:?}\",", t.public_key().unwrap()) +
&format!("\"r\":\"0x{}\",", U256::from(signature.r()).to_hex()) +

View File

@ -25,12 +25,12 @@ use vm::CallType;
use jsonrpc_core::IoHandler;
use v1::tests::helpers::{TestMinerService};
use v1::{Traces, TracesClient};
use v1::{Metadata, Traces, TracesClient};
struct Tester {
client: Arc<TestBlockChainClient>,
_miner: Arc<TestMinerService>,
io: IoHandler,
io: IoHandler<Metadata>,
}
fn io() -> Tester {
@ -67,7 +67,7 @@ fn io() -> Tester {
}));
let miner = Arc::new(TestMinerService::default());
let traces = TracesClient::new(&client, &miner);
let mut io = IoHandler::new();
let mut io = IoHandler::default();
io.extend_with(traces.to_delegate());
Tester {

View File

@ -110,12 +110,12 @@ build_rpc_trait! {
fn submit_transaction(&self, Bytes) -> Result<H256, Error>;
/// Call contract, returning the output data.
#[rpc(async, name = "eth_call")]
fn call(&self, CallRequest, Trailing<BlockNumber>) -> BoxFuture<Bytes, Error>;
#[rpc(meta, name = "eth_call")]
fn call(&self, Self::Metadata, CallRequest, Trailing<BlockNumber>) -> BoxFuture<Bytes, Error>;
/// Estimate gas needed for execution of given contract.
#[rpc(async, name = "eth_estimateGas")]
fn estimate_gas(&self, CallRequest, Trailing<BlockNumber>) -> BoxFuture<U256, Error>;
#[rpc(meta, name = "eth_estimateGas")]
fn estimate_gas(&self, Self::Metadata, CallRequest, Trailing<BlockNumber>) -> BoxFuture<U256, Error>;
/// Get transaction by its hash.
#[rpc(name = "eth_getTransactionByHash")]

View File

@ -205,7 +205,7 @@ build_rpc_trait! {
fn ipfs_cid(&self, Bytes) -> Result<String, Error>;
/// Call contract, returning the output data.
#[rpc(async, name = "parity_call")]
fn call(&self, Vec<CallRequest>, Trailing<BlockNumber>) -> BoxFuture<Vec<Bytes>, Error>;
#[rpc(meta, name = "parity_call")]
fn call(&self, Self::Metadata, Vec<CallRequest>, Trailing<BlockNumber>) -> BoxFuture<Vec<Bytes>, Error>;
}
}

Some files were not shown because too many files have changed in this diff Show More