Merge remote-tracking branch 'origin/master' into eip-213

This commit is contained in:
NikVolf 2017-03-27 18:18:22 +03:00
commit efe0f8449c
265 changed files with 6912 additions and 4505 deletions

441
Cargo.lock generated
View File

@ -2,10 +2,10 @@
name = "parity" name = "parity"
version = "1.7.0" version = "1.7.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"app_dirs 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "app_dirs 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)", "ctrlc 1.1.1 (git+https://github.com/paritytech/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -27,9 +27,9 @@ dependencies = [
"ethsync 1.7.0", "ethsync 1.7.0",
"evmbin 0.1.0", "evmbin 0.1.0",
"fdlimit 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "fdlimit 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.10.0-a.0 (git+https://github.com/ethcore/hyper)", "hyper 0.10.0-a.0 (git+https://github.com/paritytech/hyper)",
"isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"number_prefix 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "number_prefix 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -39,6 +39,7 @@ dependencies = [
"parity-reactor 0.1.0", "parity-reactor 0.1.0",
"parity-rpc-client 1.4.0", "parity-rpc-client 1.4.0",
"parity-updater 1.7.0", "parity-updater 1.7.0",
"path 0.1.0",
"regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
"rpassword 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "rpassword 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -51,6 +52,7 @@ dependencies = [
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -72,7 +74,7 @@ dependencies = [
[[package]] [[package]]
name = "ansi_term" name = "ansi_term"
version = "0.7.2" version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -93,10 +95,10 @@ dependencies = [
[[package]] [[package]]
name = "arrayvec" name = "arrayvec"
version = "0.3.16" version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "nodrop 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)", "odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -203,6 +205,15 @@ dependencies = [
"stable-heap 0.1.0 (git+https://github.com/carllerche/stable-heap?rev=3c5cd1ca47)", "stable-heap 0.1.0 (git+https://github.com/carllerche/stable-heap?rev=3c5cd1ca47)",
] ]
[[package]]
name = "bytes"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"iovec 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
version = "0.1.0" version = "0.1.0"
@ -292,7 +303,7 @@ dependencies = [
[[package]] [[package]]
name = "ctrlc" name = "ctrlc"
version = "1.1.1" version = "1.1.1"
source = "git+https://github.com/ethcore/rust-ctrlc.git#f4927770f89eca80ec250911eea3adcbf579ac48" source = "git+https://github.com/paritytech/rust-ctrlc.git#b523017108bb2d571a7a69bd97bc406e63bc7a9d"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
@ -357,7 +368,7 @@ name = "eth-secp256k1"
version = "0.5.6" version = "0.5.6"
source = "git+https://github.com/ethcore/rust-secp256k1#98ad9b9ecae44a563efdd64273bcebc6b4ed81c6" source = "git+https://github.com/ethcore/rust-secp256k1#98ad9b9ecae44a563efdd64273bcebc6b4ed81c6"
dependencies = [ dependencies = [
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -405,6 +416,7 @@ dependencies = [
"ethcore-ipc 1.7.0", "ethcore-ipc 1.7.0",
"ethcore-ipc-codegen 1.7.0", "ethcore-ipc-codegen 1.7.0",
"ethcore-ipc-nano 1.7.0", "ethcore-ipc-nano 1.7.0",
"ethcore-logger 1.7.0",
"ethcore-stratum 1.7.0", "ethcore-stratum 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"ethjson 0.1.0", "ethjson 0.1.0",
@ -412,7 +424,8 @@ dependencies = [
"ethstore 0.1.0", "ethstore 0.1.0",
"evmjit 1.7.0", "evmjit 1.7.0",
"hardware-wallet 1.7.0", "hardware-wallet 1.7.0",
"hyper 0.10.0-a.0 (git+https://github.com/ethcore/hyper)", "hyper 0.10.0-a.0 (git+https://github.com/paritytech/hyper)",
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -458,10 +471,10 @@ dependencies = [
"ethcore-rpc 1.7.0", "ethcore-rpc 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"fetch 0.1.0", "fetch 0.1.0",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.10.0-a.0 (git+https://github.com/ethcore/hyper)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-http-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -567,7 +580,7 @@ dependencies = [
"ethcore-ipc-codegen 1.7.0", "ethcore-ipc-codegen 1.7.0",
"ethcore-network 1.7.0", "ethcore-network 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -581,11 +594,13 @@ dependencies = [
name = "ethcore-logger" name = "ethcore-logger"
version = "1.7.0" version = "1.7.0"
dependencies = [ dependencies = [
"ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-util 1.7.0",
"isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -594,10 +609,11 @@ dependencies = [
name = "ethcore-network" name = "ethcore-network"
version = "1.7.0" version = "1.7.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.7.0", "ethcore-devtools 1.7.0",
"ethcore-io 1.7.0", "ethcore-io 1.7.0",
"ethcore-logger 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"ethcrypto 0.1.0", "ethcrypto 0.1.0",
"ethkey 0.2.0", "ethkey 0.2.0",
@ -606,6 +622,7 @@ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.1 (git+https://github.com/ethcore/mio)", "mio 0.6.1 (git+https://github.com/ethcore/mio)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"path 0.1.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
@ -626,6 +643,7 @@ dependencies = [
"ethcore-io 1.7.0", "ethcore-io 1.7.0",
"ethcore-ipc 1.7.0", "ethcore-ipc 1.7.0",
"ethcore-light 1.7.0", "ethcore-light 1.7.0",
"ethcore-logger 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"ethcrypto 0.1.0", "ethcrypto 0.1.0",
"ethjson 0.1.0", "ethjson 0.1.0",
@ -633,11 +651,11 @@ dependencies = [
"ethstore 0.1.0", "ethstore 0.1.0",
"ethsync 1.7.0", "ethsync 1.7.0",
"fetch 0.1.0", "fetch 0.1.0",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-http-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-ipc-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-ipc-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-macros 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-macros 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"order-stat 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "order-stat 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-reactor 0.1.0", "parity-reactor 0.1.0",
@ -664,7 +682,7 @@ dependencies = [
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"ethcrypto 0.1.0", "ethcrypto 0.1.0",
"ethkey 0.2.0", "ethkey 0.2.0",
"hyper 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.10.5 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -680,13 +698,14 @@ dependencies = [
"ethcore-io 1.7.0", "ethcore-io 1.7.0",
"ethcore-rpc 1.7.0", "ethcore-rpc 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-ui 1.7.0", "parity-ui 1.7.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)", "ws 0.5.3 (git+https://github.com/paritytech/ws-rs.git?branch=mio-upstream-stable)",
] ]
[[package]] [[package]]
@ -698,24 +717,23 @@ dependencies = [
"ethcore-ipc 1.7.0", "ethcore-ipc 1.7.0",
"ethcore-ipc-codegen 1.7.0", "ethcore-ipc-codegen 1.7.0",
"ethcore-ipc-nano 1.7.0", "ethcore-ipc-nano 1.7.0",
"ethcore-logger 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-macros 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-macros 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-tcp-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-tcp-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"semver 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "ethcore-util" name = "ethcore-util"
version = "1.7.0" version = "1.7.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)", "elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -723,9 +741,9 @@ dependencies = [
"ethcore-bigint 0.1.2", "ethcore-bigint 0.1.2",
"ethcore-bloom-journal 0.1.0", "ethcore-bloom-journal 0.1.0",
"ethcore-devtools 1.7.0", "ethcore-devtools 1.7.0",
"ethcore-logger 1.7.0",
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -862,7 +880,7 @@ dependencies = [
name = "fetch" name = "fetch"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"futures-cpupool 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -881,11 +899,8 @@ dependencies = [
[[package]] [[package]]
name = "futures" name = "futures"
version = "0.1.10" version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "futures-cpupool" name = "futures-cpupool"
@ -893,7 +908,7 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -989,7 +1004,7 @@ dependencies = [
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "0.10.0-a.0" version = "0.10.0-a.0"
source = "git+https://github.com/ethcore/hyper#453c683b52208fefc32d29e4ac7c863439b2321f" source = "git+https://github.com/paritytech/hyper#453c683b52208fefc32d29e4ac7c863439b2321f"
dependencies = [ dependencies = [
"cookie 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "cookie 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1007,7 +1022,7 @@ dependencies = [
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "0.10.4" version = "0.10.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1018,7 +1033,7 @@ dependencies = [
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1030,7 +1045,7 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.10.5 (registry+https://github.com/rust-lang/crates.io-index)",
"native-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "native-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1061,6 +1076,15 @@ name = "integer-encoding"
version = "1.0.3" version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "iovec"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "ipc-common-types" name = "ipc-common-types"
version = "1.7.0" version = "1.7.0"
@ -1096,67 +1120,82 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "jsonrpc-core" name = "jsonrpc-core"
version = "6.0.0" version = "7.0.0"
source = "git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6#86d7a89c85f324b5f6671315d9b71010ca995300" source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#707cf73a7b72f2eecbf3665c53b4159ec867cbed"
dependencies = [ dependencies = [
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "jsonrpc-http-server" name = "jsonrpc-http-server"
version = "6.0.0" version = "7.0.0"
source = "git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6#86d7a89c85f324b5f6671315d9b71010ca995300" source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#707cf73a7b72f2eecbf3665c53b4159ec867cbed"
dependencies = [ dependencies = [
"hyper 0.10.0-a.0 (git+https://github.com/ethcore/hyper)", "hyper 0.10.0-a.0 (git+https://github.com/paritytech/hyper)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "jsonrpc-ipc-server" name = "jsonrpc-ipc-server"
version = "6.0.0" version = "7.0.0"
source = "git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6#86d7a89c85f324b5f6671315d9b71010ca995300" source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#707cf73a7b72f2eecbf3665c53b4159ec867cbed"
dependencies = [ dependencies = [
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-tokio-ipc 0.1.0 (git+https://github.com/nikvolf/parity-tokio-ipc)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "jsonrpc-macros" name = "jsonrpc-macros"
version = "6.0.0" version = "7.0.0"
source = "git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6#86d7a89c85f324b5f6671315d9b71010ca995300" source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#707cf73a7b72f2eecbf3665c53b4159ec867cbed"
dependencies = [ dependencies = [
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-pubsub 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "jsonrpc-tcp-server" name = "jsonrpc-pubsub"
version = "6.0.0" version = "7.0.0"
source = "git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6#86d7a89c85f324b5f6671315d9b71010ca995300" source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#707cf73a7b72f2eecbf3665c53b4159ec867cbed"
dependencies = [ dependencies = [
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "jsonrpc-server-utils"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#707cf73a7b72f2eecbf3665c53b4159ec867cbed"
dependencies = [
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-io 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "jsonrpc-tcp-server"
version = "7.0.0"
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#707cf73a7b72f2eecbf3665c53b4159ec867cbed"
dependencies = [
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-proto 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1184,6 +1223,11 @@ name = "lazycell"
version = "0.4.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazycell"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.21" version = "0.2.21"
@ -1272,38 +1316,6 @@ dependencies = [
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "mio"
version = "0.5.1"
source = "git+https://github.com/ethcore/mio?branch=v0.5.x#3842d3b250ffd7bd9b16f9586b875ddcbac2b0dd"
dependencies = [
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "mio"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.0-dev" version = "0.6.0-dev"
@ -1312,7 +1324,7 @@ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)", "slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)",
@ -1322,13 +1334,13 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.1" version = "0.6.1"
source = "git+https://github.com/ethcore/mio#ef182bae193a9c7457cd2cf661fcaffb226e3eef" source = "git+https://github.com/ethcore/mio#15a577039bed3c72f2952459f8ad687a56f63e29"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1337,23 +1349,56 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.1" version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "mio-named-pipes"
version = "0.1.4"
source = "git+https://github.com/alexcrichton/mio-named-pipes#903dc2f7eac6700c62bfdda258a599db13a9228f"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "mio-uds"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "miow" name = "miow"
version = "0.1.3" version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "miow"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1431,15 +1476,6 @@ dependencies = [
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "nix"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "nix" name = "nix"
version = "0.6.0" version = "0.6.0"
@ -1468,7 +1504,7 @@ dependencies = [
[[package]] [[package]]
name = "nodrop" name = "nodrop"
version = "0.1.6" version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)", "odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1628,7 +1664,7 @@ dependencies = [
"ethabi 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"fetch 0.1.0", "fetch 0.1.0",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1644,8 +1680,7 @@ dependencies = [
"cid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "cid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.7.0", "ethcore 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"hyper 0.10.0-a.0 (git+https://github.com/ethcore/hyper)", "jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"jsonrpc-http-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)",
"mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"multihash 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "multihash 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
@ -1670,8 +1705,8 @@ dependencies = [
name = "parity-reactor" name = "parity-reactor"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1681,15 +1716,31 @@ dependencies = [
"ethcore-rpc 1.7.0", "ethcore-rpc 1.7.0",
"ethcore-signer 1.7.0", "ethcore-signer 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)", "jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.9.5 (registry+https://github.com/rust-lang/crates.io-index)",
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)", "ws 0.5.3 (git+https://github.com/paritytech/ws-rs.git?branch=mio-upstream-stable)",
]
[[package]]
name = "parity-tokio-ipc"
version = "0.1.0"
source = "git+https://github.com/nikvolf/parity-tokio-ipc#3d4234de6bdc78688ef803935111003080fd5375"
dependencies = [
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio-named-pipes 0.1.4 (git+https://github.com/alexcrichton/mio-named-pipes)",
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-line 0.1.0 (git+https://github.com/tokio-rs/tokio-line)",
"tokio-named-pipes 0.1.0 (git+https://github.com/alexcrichton/tokio-named-pipes)",
"tokio-uds 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1711,7 +1762,7 @@ dependencies = [
[[package]] [[package]]
name = "parity-ui-precompiled" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#47da49294ad958933e85a9c4f0f2bb4df5dc47de" source = "git+https://github.com/ethcore/js-precompiled.git#6f18d6f6ddfff55e93f67e11f445eb98bc860219"
dependencies = [ dependencies = [
"parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1730,6 +1781,7 @@ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-hash-fetch 1.7.0", "parity-hash-fetch 1.7.0",
"parity-reactor 0.1.0", "parity-reactor 0.1.0",
"path 0.1.0",
"target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1754,6 +1806,10 @@ dependencies = [
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "path"
version = "0.1.0"
[[package]] [[package]]
name = "phf" name = "phf"
version = "0.7.14" version = "0.7.14"
@ -1915,7 +1971,7 @@ name = "reqwest"
version = "0.4.0" version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"hyper 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.10.5 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper-native-tls 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "hyper-native-tls 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2004,7 +2060,7 @@ dependencies = [
"ethcore-bigint 0.1.2", "ethcore-bigint 0.1.2",
"ethcore-rpc 1.7.0", "ethcore-rpc 1.7.0",
"ethcore-util 1.7.0", "ethcore-util 1.7.0",
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-rpc-client 1.4.0", "parity-rpc-client 1.4.0",
"rpassword 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "rpassword 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2204,11 +2260,6 @@ name = "siphasher"
version = "0.1.1" version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slab"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.2.0" version = "0.2.0"
@ -2394,29 +2445,60 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "tokio-core" name = "tokio-core"
version = "0.1.2" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "tokio-io"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bytes 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-line"
version = "0.1.0"
source = "git+https://github.com/tokio-rs/tokio-line#482614ae0c82daf584727ae65a80d854fe861f81"
dependencies = [
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-proto 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-named-pipes"
version = "0.1.0"
source = "git+https://github.com/alexcrichton/tokio-named-pipes#3a22f8fc9a441b548aec25bd5df3b1e0ab99fabe"
dependencies = [
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"mio-named-pipes 0.1.4 (git+https://github.com/alexcrichton/mio-named-pipes)",
"tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "tokio-proto" name = "tokio-proto"
version = "0.1.0" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"take 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "take 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2425,7 +2507,19 @@ name = "tokio-service"
version = "0.1.0" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "tokio-uds"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"mio-uds 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -2449,6 +2543,11 @@ name = "traitobject"
version = "0.0.1" version = "0.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "traitobject"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "transient-hashmap" name = "transient-hashmap"
version = "0.4.0" version = "0.4.0"
@ -2556,7 +2655,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "ws" name = "ws"
version = "0.5.3" version = "0.5.3"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#f5c0b35d660244d1b7500693c8cc28277ce1d418" source = "git+https://github.com/paritytech/ws-rs.git?branch=mio-upstream-stable#f5c0b35d660244d1b7500693c8cc28277ce1d418"
dependencies = [ dependencies = [
"bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)", "bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2612,10 +2711,10 @@ dependencies = [
[metadata] [metadata]
"checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a" "checksum advapi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e06588080cb19d0acb6739808aafa5f26bfb2ca015b2b6370028b44cf7cb8a9a"
"checksum aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67077478f0a03952bed2e6786338d400d40c25e9836e08ad50af96607317fd03" "checksum aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67077478f0a03952bed2e6786338d400d40c25e9836e08ad50af96607317fd03"
"checksum ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1f46cd5b1d660c938e3f92dfe7a73d832b3281479363dd0cd9c1c2fbf60f7962" "checksum ansi_term 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "23ac7c30002a5accbf7e8987d0632fa6de155b7c3d39d0067317a391e00a2ef6"
"checksum antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "34fde25430d87a9388dadbe6e34d7f72a462c8b43ac8d309b42b0a8505d7e2a5" "checksum antidote 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "34fde25430d87a9388dadbe6e34d7f72a462c8b43ac8d309b42b0a8505d7e2a5"
"checksum app_dirs 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b7d1c0d48a81bbb13043847f957971f4d87c81542d80ece5e84ba3cba4058fd4" "checksum app_dirs 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b7d1c0d48a81bbb13043847f957971f4d87c81542d80ece5e84ba3cba4058fd4"
"checksum arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "16e3bdb2f54b3ace0285975d59a97cf8ed3855294b2b6bc651fcf22a9c352975" "checksum arrayvec 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)" = "d89f1b0e242270b5b797778af0c8d182a1a2ccac5d8d6fadf414223cc0fab096"
"checksum aster 0.41.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ccfdf7355d9db158df68f976ed030ab0f6578af811f5a7bb6dcf221ec24e0e0" "checksum aster 0.41.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ccfdf7355d9db158df68f976ed030ab0f6578af811f5a7bb6dcf221ec24e0e0"
"checksum base-x 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2f59103b47307f76e03bef1633aec7fa9e29bfb5aa6daf5a334f94233c71f6c1" "checksum base-x 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2f59103b47307f76e03bef1633aec7fa9e29bfb5aa6daf5a334f94233c71f6c1"
"checksum base32 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1b9605ba46d61df0410d8ac686b0007add8172eba90e8e909c347856fe794d8c" "checksum base32 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1b9605ba46d61df0410d8ac686b0007add8172eba90e8e909c347856fe794d8c"
@ -2632,6 +2731,7 @@ dependencies = [
"checksum byteorder 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c40977b0ee6b9885c9013cd41d9feffdd22deb3bb4dc3a71d901cc7a77de18c8" "checksum byteorder 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c40977b0ee6b9885c9013cd41d9feffdd22deb3bb4dc3a71d901cc7a77de18c8"
"checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27" "checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27"
"checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>" "checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>"
"checksum bytes 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "46112a0060ae15e3a3f9a445428a53e082b91215b744fa27a1948842f4a64b96"
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" "checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
"checksum cid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e53e6cdfa5ca294863e8c8a32a7cdb4dc0a442c8971d47a0e75b6c27ea268a6a" "checksum cid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e53e6cdfa5ca294863e8c8a32a7cdb4dc0a442c8971d47a0e75b6c27ea268a6a"
"checksum clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "5b4fabf979ddf6419a313c1c0ada4a5b95cfd2049c56e8418d622d27b4b6ff32" "checksum clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "5b4fabf979ddf6419a313c1c0ada4a5b95cfd2049c56e8418d622d27b4b6ff32"
@ -2642,7 +2742,7 @@ dependencies = [
"checksum core-foundation-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "05eed248dc504a5391c63794fe4fb64f46f071280afaa1b73308f3c0ce4574c5" "checksum core-foundation-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "05eed248dc504a5391c63794fe4fb64f46f071280afaa1b73308f3c0ce4574c5"
"checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97" "checksum crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0c5ea215664ca264da8a9d9c3be80d2eaf30923c259d03e870388eb927508f97"
"checksum crypt32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e34988f7e069e0b2f3bfc064295161e489b2d4e04a2e4248fb94360cdf00b4ec" "checksum crypt32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e34988f7e069e0b2f3bfc064295161e489b2d4e04a2e4248fb94360cdf00b4ec"
"checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>" "checksum ctrlc 1.1.1 (git+https://github.com/paritytech/rust-ctrlc.git)" = "<none>"
"checksum daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "271ec51b7e0bee92f0d04601422c73eb76ececf197026711c97ad25038a010cf" "checksum daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "271ec51b7e0bee92f0d04601422c73eb76ececf197026711c97ad25038a010cf"
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf" "checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
"checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76" "checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76"
@ -2654,7 +2754,7 @@ dependencies = [
"checksum ethabi 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d8f6cc4c1acd005f48e1d17b06a461adac8fb6eeeb331fbf19a0e656fba91cd" "checksum ethabi 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5d8f6cc4c1acd005f48e1d17b06a461adac8fb6eeeb331fbf19a0e656fba91cd"
"checksum fdlimit 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1ee15a7050e5580b3712877157068ea713b245b080ff302ae2ca973cfcd9baa" "checksum fdlimit 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b1ee15a7050e5580b3712877157068ea713b245b080ff302ae2ca973cfcd9baa"
"checksum flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "3eeb481e957304178d2e782f2da1257f1434dfecbae883bafb61ada2a9fea3bb" "checksum flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "3eeb481e957304178d2e782f2da1257f1434dfecbae883bafb61ada2a9fea3bb"
"checksum futures 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "c1913eb7083840b1bbcbf9631b7fda55eaf35fe7ead13cca034e8946f9e2bc41" "checksum futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8e51e7f9c150ba7fd4cee9df8bf6ea3dea5b63b68955ddad19ccd35b71dcfb4d"
"checksum futures-cpupool 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bb982bb25cd8fa5da6a8eb3a460354c984ff1113da82bcb4f0b0862b5795db82" "checksum futures-cpupool 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bb982bb25cd8fa5da6a8eb3a460354c984ff1113da82bcb4f0b0862b5795db82"
"checksum gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)" = "c07c758b972368e703a562686adb39125707cc1ef3399da8c019fc6c2498a75d" "checksum gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)" = "c07c758b972368e703a562686adb39125707cc1ef3399da8c019fc6c2498a75d"
"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518" "checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
@ -2664,25 +2764,29 @@ dependencies = [
"checksum hidapi 0.3.1 (git+https://github.com/ethcore/hidapi-rs)" = "<none>" "checksum hidapi 0.3.1 (git+https://github.com/ethcore/hidapi-rs)" = "<none>"
"checksum hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d2da7d3a34cf6406d9d700111b8eafafe9a251de41ae71d8052748259343b58" "checksum hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d2da7d3a34cf6406d9d700111b8eafafe9a251de41ae71d8052748259343b58"
"checksum httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "46534074dbb80b070d60a5cb8ecadd8963a00a438ae1a95268850a7ef73b67ae" "checksum httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "46534074dbb80b070d60a5cb8ecadd8963a00a438ae1a95268850a7ef73b67ae"
"checksum hyper 0.10.0-a.0 (git+https://github.com/ethcore/hyper)" = "<none>" "checksum hyper 0.10.0-a.0 (git+https://github.com/paritytech/hyper)" = "<none>"
"checksum hyper 0.10.4 (registry+https://github.com/rust-lang/crates.io-index)" = "220407e5a263f110ec30a071787c9535918fdfc97def5680c90013c3f30c38c1" "checksum hyper 0.10.5 (registry+https://github.com/rust-lang/crates.io-index)" = "43a15e3273b2133aaac0150478ab443fb89f15c3de41d8d93d8f3bb14bf560f6"
"checksum hyper 0.9.18 (registry+https://github.com/rust-lang/crates.io-index)" = "1b9bf64f730d6ee4b0528a5f0a316363da9d8104318731509d4ccc86248f82b3" "checksum hyper 0.9.18 (registry+https://github.com/rust-lang/crates.io-index)" = "1b9bf64f730d6ee4b0528a5f0a316363da9d8104318731509d4ccc86248f82b3"
"checksum hyper-native-tls 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "afe68f772f0497a7205e751626bb8e1718568b58534b6108c73a74ef80483409" "checksum hyper-native-tls 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "afe68f772f0497a7205e751626bb8e1718568b58534b6108c73a74ef80483409"
"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11" "checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11"
"checksum igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c8c12b1795b8b168f577c45fa10379b3814dcb11b7ab702406001f0d63f40484" "checksum igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c8c12b1795b8b168f577c45fa10379b3814dcb11b7ab702406001f0d63f40484"
"checksum integer-encoding 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a053c9c7dcb7db1f2aa012c37dc176c62e4cdf14898dee0eecc606de835b8acb" "checksum integer-encoding 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a053c9c7dcb7db1f2aa012c37dc176c62e4cdf14898dee0eecc606de835b8acb"
"checksum iovec 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "29d062ee61fccdf25be172e70f34c9f6efc597e1fb8f6526e8437b2046ab26be"
"checksum isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7408a548dc0e406b7912d9f84c261cc533c1866e047644a811c133c56041ac0c" "checksum isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7408a548dc0e406b7912d9f84c261cc533c1866e047644a811c133c56041ac0c"
"checksum itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "d95557e7ba6b71377b0f2c3b3ae96c53f1b75a926a6901a500f557a370af730a" "checksum itertools 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "d95557e7ba6b71377b0f2c3b3ae96c53f1b75a926a6901a500f557a370af730a"
"checksum itoa 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "91fd9dc2c587067de817fec4ad355e3818c3d893a78cab32a0a474c7a15bb8d5" "checksum itoa 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "91fd9dc2c587067de817fec4ad355e3818c3d893a78cab32a0a474c7a15bb8d5"
"checksum jsonrpc-core 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)" = "<none>" "checksum jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)" = "<none>"
"checksum jsonrpc-http-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)" = "<none>" "checksum jsonrpc-http-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)" = "<none>"
"checksum jsonrpc-ipc-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)" = "<none>" "checksum jsonrpc-ipc-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)" = "<none>"
"checksum jsonrpc-macros 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)" = "<none>" "checksum jsonrpc-macros 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)" = "<none>"
"checksum jsonrpc-tcp-server 6.0.0 (git+https://github.com/ethcore/jsonrpc.git?branch=parity-1.6)" = "<none>" "checksum jsonrpc-pubsub 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)" = "<none>"
"checksum jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)" = "<none>"
"checksum jsonrpc-tcp-server 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)" = "<none>"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" "checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f" "checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
"checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b" "checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum lazycell 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ec38a5c22f1ef3e30d2642aa875620d60edeef36cef43c4739d86215ce816331"
"checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135" "checksum libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)" = "88ee81885f9f04bff991e306fea7c1c60a5f0f9e409e99f6b40e3311a3363135"
"checksum libusb 0.3.0 (git+https://github.com/ethcore/libusb-rs)" = "<none>" "checksum libusb 0.3.0 (git+https://github.com/ethcore/libusb-rs)" = "<none>"
"checksum libusb-sys 0.2.3 (git+https://github.com/ethcore/libusb-sys)" = "<none>" "checksum libusb-sys 0.2.3 (git+https://github.com/ethcore/libusb-sys)" = "<none>"
@ -2695,12 +2799,13 @@ dependencies = [
"checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2" "checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2"
"checksum mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e50bf542f81754ef69e5cea856946a3819f7c09ea97b4903c8bc8a89f74e7b6" "checksum mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e50bf542f81754ef69e5cea856946a3819f7c09ea97b4903c8bc8a89f74e7b6"
"checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54" "checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54"
"checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>"
"checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e"
"checksum mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)" = "<none>" "checksum mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)" = "<none>"
"checksum mio 0.6.1 (git+https://github.com/ethcore/mio)" = "<none>" "checksum mio 0.6.1 (git+https://github.com/ethcore/mio)" = "<none>"
"checksum mio 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "410a1a0ff76f5a226f1e4e3ff1756128e65cd30166e39c3892283e2ac09d5b67" "checksum mio 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5b493dc9fd96bd2077f2117f178172b0765db4dfda3ea4d8000401e6d65d3e80"
"checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a" "checksum mio-named-pipes 0.1.4 (git+https://github.com/alexcrichton/mio-named-pipes)" = "<none>"
"checksum mio-uds 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "78437f00d9615c366932cbfe79790b5c2945706ba67cf78378ffacc0069ed9de"
"checksum miow 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3e690c5df6b2f60acd45d56378981e827ff8295562fc8d34f573deb267a59cd1"
"checksum miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f2f3b1cf331de6896aabf6e9d55dca90356cc9960cca7eaaf408a355ae919"
"checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8" "checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8"
"checksum multibase 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b9c35dac080fd6e16a99924c8dfdef0af89d797dd851adab25feaffacf7850d6" "checksum multibase 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b9c35dac080fd6e16a99924c8dfdef0af89d797dd851adab25feaffacf7850d6"
"checksum multihash 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c9f70f2402fa07c16c40be8fd0a748a39257c5dc3ff5c857cbbde2f39135c505" "checksum multihash 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c9f70f2402fa07c16c40be8fd0a748a39257c5dc3ff5c857cbbde2f39135c505"
@ -2708,10 +2813,9 @@ dependencies = [
"checksum nanomsg-sys 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git?branch=parity-1.7)" = "<none>" "checksum nanomsg-sys 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git?branch=parity-1.7)" = "<none>"
"checksum native-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aa4e52995154bb6f0b41e4379a279482c9387c1632e3798ba4e511ef8c54ee09" "checksum native-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "aa4e52995154bb6f0b41e4379a279482c9387c1632e3798ba4e511ef8c54ee09"
"checksum net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "6a816012ca11cb47009693c1e0c6130e26d39e4d97ee2a13c50e868ec83e3204" "checksum net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "6a816012ca11cb47009693c1e0c6130e26d39e4d97ee2a13c50e868ec83e3204"
"checksum nix 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f05c2fc965fc1cd6b73fa57fa7b89f288178737f2f3ce9e63e4a6a141189000e"
"checksum nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a7bb1da2be7da3cbffda73fc681d509ffd9e665af478d2bee1907cee0bc64b2" "checksum nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a7bb1da2be7da3cbffda73fc681d509ffd9e665af478d2bee1907cee0bc64b2"
"checksum nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a0d95c5fa8b641c10ad0b8887454ebaafa3c92b5cd5350f8fc693adafd178e7b" "checksum nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a0d95c5fa8b641c10ad0b8887454ebaafa3c92b5cd5350f8fc693adafd178e7b"
"checksum nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4d9a22dbcebdeef7bf275cbf444d6521d4e7a2fee187b72d80dba0817120dd8f" "checksum nodrop 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "52cd74cd09beba596430cc6e3091b74007169a56246e1262f0ba451ea95117b2"
"checksum nom 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6caab12c5f97aa316cb249725aa32115118e1522b445e26c257dd77cad5ffd4e" "checksum nom 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6caab12c5f97aa316cb249725aa32115118e1522b445e26c257dd77cad5ffd4e"
"checksum num 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "c04bd954dbf96f76bab6e5bd6cef6f1ce1262d15268ce4f926d2b5b778fa7af2" "checksum num 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "c04bd954dbf96f76bab6e5bd6cef6f1ce1262d15268ce4f926d2b5b778fa7af2"
"checksum num-bigint 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "41655c8d667be847a0b72fe0888857a7b3f052f691cf40852be5fcf87b274a65" "checksum num-bigint 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "41655c8d667be847a0b72fe0888857a7b3f052f691cf40852be5fcf87b274a65"
@ -2729,6 +2833,7 @@ dependencies = [
"checksum order-stat 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "efa535d5117d3661134dbf1719b6f0ffe06f2375843b13935db186cd094105eb" "checksum order-stat 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "efa535d5117d3661134dbf1719b6f0ffe06f2375843b13935db186cd094105eb"
"checksum owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8d91377085359426407a287ab16884a0111ba473aa6844ff01d4ec20ce3d75e7" "checksum owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8d91377085359426407a287ab16884a0111ba473aa6844ff01d4ec20ce3d75e7"
"checksum parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e1d06f6ee0fda786df3784a96ee3f0629f529b91cbfb7d142f6410e6bcd1ce2c" "checksum parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e1d06f6ee0fda786df3784a96ee3f0629f529b91cbfb7d142f6410e6bcd1ce2c"
"checksum parity-tokio-ipc 0.1.0 (git+https://github.com/nikvolf/parity-tokio-ipc)" = "<none>"
"checksum parity-ui-precompiled 1.4.0 (git+https://github.com/ethcore/js-precompiled.git)" = "<none>" "checksum parity-ui-precompiled 1.4.0 (git+https://github.com/ethcore/js-precompiled.git)" = "<none>"
"checksum parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e1435e7a2a00dfebededd6c6bdbd54008001e94b4a2aadd6aef0dc4c56317621" "checksum parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e1435e7a2a00dfebededd6c6bdbd54008001e94b4a2aadd6aef0dc4c56317621"
"checksum parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb1b97670a2ffadce7c397fb80a3d687c4f3060140b885621ef1653d0e5d5068" "checksum parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb1b97670a2ffadce7c397fb80a3d687c4f3060140b885621ef1653d0e5d5068"
@ -2782,7 +2887,6 @@ dependencies = [
"checksum sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc30b1e1e8c40c121ca33b86c23308a090d19974ef001b4bf6e61fd1a0fb095c" "checksum sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc30b1e1e8c40c121ca33b86c23308a090d19974ef001b4bf6e61fd1a0fb095c"
"checksum shell32-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72f20b8f3c060374edb8046591ba28f62448c369ccbdc7b02075103fb3a9e38d" "checksum shell32-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72f20b8f3c060374edb8046591ba28f62448c369ccbdc7b02075103fb3a9e38d"
"checksum siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c44e42fa187b5a8782489cf7740cc27c3125806be2bf33563cf5e02e9533fcd" "checksum siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c44e42fa187b5a8782489cf7740cc27c3125806be2bf33563cf5e02e9533fcd"
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>" "checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4" "checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23" "checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
@ -2807,12 +2911,17 @@ dependencies = [
"checksum thread_local 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0694f51610ef7cfac7a1b81de7f1602ee5356e76541bcd62c40e71933338cab1" "checksum thread_local 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0694f51610ef7cfac7a1b81de7f1602ee5356e76541bcd62c40e71933338cab1"
"checksum time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "3c7ec6d62a20df54e07ab3b78b9a3932972f4b7981de295563686849eb3989af" "checksum time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "3c7ec6d62a20df54e07ab3b78b9a3932972f4b7981de295563686849eb3989af"
"checksum tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aef43048292ca0bae4ab32180e85f6202cf2816c2a210c396a84b99dab9270" "checksum tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aef43048292ca0bae4ab32180e85f6202cf2816c2a210c396a84b99dab9270"
"checksum tokio-core 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "52416b3e937abac22a543a7f1c66bd37feb60137ff1ab42390fa02df85347e58" "checksum tokio-core 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "3d1be481b55126f02ef88ff86748086473cb537a949fc4a8f4be403a530ae54b"
"checksum tokio-io 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6a278fde45f1be68e44995227d426aaa4841e0980bb0a21b981092f28c3c8473"
"checksum tokio-line 0.1.0 (git+https://github.com/tokio-rs/tokio-line)" = "<none>"
"checksum tokio-named-pipes 0.1.0 (git+https://github.com/alexcrichton/tokio-named-pipes)" = "<none>"
"checksum tokio-proto 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7c0d6031f94d78d7b4d509d4a7c5e1cdf524a17e7b08d1c188a83cf720e69808" "checksum tokio-proto 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7c0d6031f94d78d7b4d509d4a7c5e1cdf524a17e7b08d1c188a83cf720e69808"
"checksum tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24da22d077e0f15f55162bdbdc661228c1581892f52074fb242678d015b45162" "checksum tokio-service 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24da22d077e0f15f55162bdbdc661228c1581892f52074fb242678d015b45162"
"checksum tokio-uds 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ffc7b5fc8e19e220b29566d1750949224a518478eab9cebc8df60583242ca30a"
"checksum toml 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "fcd27a04ca509aff336ba5eb2abc58d456f52c4ff64d9724d88acb85ead560b6" "checksum toml 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "fcd27a04ca509aff336ba5eb2abc58d456f52c4ff64d9724d88acb85ead560b6"
"checksum toml 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a442dfc13508e603c3f763274361db7f79d7469a0e95c411cde53662ab30fc72" "checksum toml 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a442dfc13508e603c3f763274361db7f79d7469a0e95c411cde53662ab30fc72"
"checksum traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "07eaeb7689bb7fca7ce15628319635758eda769fed481ecfe6686ddef2600616" "checksum traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "07eaeb7689bb7fca7ce15628319635758eda769fed481ecfe6686ddef2600616"
"checksum traitobject 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "efd1f82c56340fdf16f2a953d7bda4f8fdffba13d93b00844c25572110b26079"
"checksum transient-hashmap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "715254c8f0811be1a79ad3ea5e6fa3c8eddec2b03d7f5ba78cf093e56d79c24f" "checksum transient-hashmap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "715254c8f0811be1a79ad3ea5e6fa3c8eddec2b03d7f5ba78cf093e56d79c24f"
"checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887" "checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887"
"checksum unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a5906ca2b98c799f4b1ab4557b76367ebd6ae5ef14930ec841c74aed5f3764" "checksum unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a5906ca2b98c799f4b1ab4557b76367ebd6ae5ef14930ec841c74aed5f3764"
@ -2828,7 +2937,7 @@ dependencies = [
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" "checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>" "checksum ws 0.5.3 (git+https://github.com/paritytech/ws-rs.git?branch=mio-upstream-stable)" = "<none>"
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"checksum xdg 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "77b831a5ba77110f438f0ac5583aafeb087f70432998ba6b7dcb1d32185db453" "checksum xdg 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "77b831a5ba77110f438f0ac5583aafeb087f70432998ba6b7dcb1d32185db453"
"checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef" "checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef"

View File

@ -16,7 +16,7 @@ num_cpus = "1.2"
number_prefix = "0.2" number_prefix = "0.2"
rpassword = "0.2.1" rpassword = "0.2.1"
semver = "0.5" semver = "0.5"
ansi_term = "0.7" ansi_term = "0.9"
regex = "0.1" regex = "0.1"
isatty = "0.1" isatty = "0.1"
toml = "0.2" toml = "0.2"
@ -24,9 +24,10 @@ serde = "0.9"
serde_json = "0.9" serde_json = "0.9"
app_dirs = "1.1.1" app_dirs = "1.1.1"
fdlimit = "0.1" fdlimit = "0.1"
hyper = { default-features = false, git = "https://github.com/ethcore/hyper" } ws2_32-sys = "0.2"
ctrlc = { git = "https://github.com/ethcore/rust-ctrlc.git" } hyper = { default-features = false, git = "https://github.com/paritytech/hyper" }
jsonrpc-core = { git = "https://github.com/ethcore/jsonrpc.git", branch = "parity-1.6" } ctrlc = { git = "https://github.com/paritytech/rust-ctrlc.git" }
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
ethsync = { path = "sync" } ethsync = { path = "sync" }
ethcore = { path = "ethcore" } ethcore = { path = "ethcore" }
ethcore-util = { path = "util" } ethcore-util = { path = "util" }
@ -50,6 +51,7 @@ parity-updater = { path = "updater" }
parity-reactor = { path = "util/reactor" } parity-reactor = { path = "util/reactor" }
parity-local-store = { path = "local-store" } parity-local-store = { path = "local-store" }
ethcore-dapps = { path = "dapps", optional = true } ethcore-dapps = { path = "dapps", optional = true }
path = { path = "util/path" }
clippy = { version = "0.0.103", optional = true} clippy = { version = "0.0.103", optional = true}
ethcore-secretstore = { path = "secret_store", optional = true } ethcore-secretstore = { path = "secret_store", optional = true }

View File

@ -8,33 +8,36 @@ authors = ["Parity Technologies <admin@parity.io>"]
[lib] [lib]
[dependencies] [dependencies]
rand = "0.3" base32 = "0.3"
log = "0.3"
env_logger = "0.3" env_logger = "0.3"
futures = "0.1" futures = "0.1"
jsonrpc-core = { git = "https://github.com/ethcore/jsonrpc.git", branch = "parity-1.6" }
jsonrpc-http-server = { git = "https://github.com/ethcore/jsonrpc.git", branch = "parity-1.6" }
hyper = { default-features = false, git = "https://github.com/ethcore/hyper" }
unicase = "1.3"
url = "1.0"
rustc-serialize = "0.3"
serde = "0.9"
serde_json = "0.9"
serde_derive = "0.9"
linked-hash-map = "0.3" linked-hash-map = "0.3"
log = "0.3"
parity-dapps-glue = "1.7" parity-dapps-glue = "1.7"
base32 = "0.3"
mime = "0.2" mime = "0.2"
mime_guess = "1.6.1" mime_guess = "1.6.1"
rand = "0.3"
rustc-serialize = "0.3"
serde = "0.9"
serde_derive = "0.9"
serde_json = "0.9"
time = "0.1.35" time = "0.1.35"
unicase = "1.3"
url = "1.0"
zip = { version = "0.1", default-features = false } zip = { version = "0.1", default-features = false }
jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
# TODO [ToDr] Temporary solution, server should be merged with RPC.
jsonrpc-server-utils = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.7" }
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
ethcore-rpc = { path = "../rpc" } ethcore-rpc = { path = "../rpc" }
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
fetch = { path = "../util/fetch" } fetch = { path = "../util/fetch" }
parity-ui = { path = "./ui" }
parity-hash-fetch = { path = "../hash-fetch" } parity-hash-fetch = { path = "../hash-fetch" }
parity-reactor = { path = "../util/reactor" } parity-reactor = { path = "../util/reactor" }
parity-ui = { path = "./ui" }
clippy = { version = "0.0.103", optional = true} clippy = { version = "0.0.103", optional = true}

View File

@ -19,7 +19,6 @@ use unicase::UniCase;
use hyper::{server, net, Decoder, Encoder, Next, Control}; use hyper::{server, net, Decoder, Encoder, Next, Control};
use hyper::header; use hyper::header;
use hyper::method::Method; use hyper::method::Method;
use hyper::header::AccessControlAllowOrigin;
use api::types::{App, ApiError}; use api::types::{App, ApiError};
use api::response; use api::response;
@ -27,23 +26,20 @@ use apps::fetcher::Fetcher;
use handlers::extract_url; use handlers::extract_url;
use endpoint::{Endpoint, Endpoints, Handler, EndpointPath}; use endpoint::{Endpoint, Endpoints, Handler, EndpointPath};
use jsonrpc_http_server::cors; use jsonrpc_http_server;
use jsonrpc_server_utils::cors;
#[derive(Clone)] #[derive(Clone)]
pub struct RestApi { pub struct RestApi {
cors_domains: Option<Vec<AccessControlAllowOrigin>>, cors_domains: Option<Vec<cors::AccessControlAllowOrigin>>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
fetcher: Arc<Fetcher>, fetcher: Arc<Fetcher>,
} }
impl RestApi { impl RestApi {
pub fn new(cors_domains: Vec<String>, endpoints: Arc<Endpoints>, fetcher: Arc<Fetcher>) -> Box<Endpoint> { pub fn new(cors_domains: Vec<cors::AccessControlAllowOrigin>, endpoints: Arc<Endpoints>, fetcher: Arc<Fetcher>) -> Box<Endpoint> {
Box::new(RestApi { Box::new(RestApi {
cors_domains: Some(cors_domains.into_iter().map(|domain| match domain.as_ref() { cors_domains: Some(cors_domains),
"all" | "*" | "any" => AccessControlAllowOrigin::Any,
"null" => AccessControlAllowOrigin::Null,
other => AccessControlAllowOrigin::Value(other.into()),
}).collect()),
endpoints: endpoints, endpoints: endpoints,
fetcher: fetcher, fetcher: fetcher,
}) })
@ -64,7 +60,7 @@ impl Endpoint for RestApi {
struct RestApiRouter { struct RestApiRouter {
api: RestApi, api: RestApi,
origin: Option<String>, cors_header: Option<header::AccessControlAllowOrigin>,
path: Option<EndpointPath>, path: Option<EndpointPath>,
control: Option<Control>, control: Option<Control>,
handler: Box<Handler>, handler: Box<Handler>,
@ -74,7 +70,7 @@ impl RestApiRouter {
fn new(api: RestApi, path: EndpointPath, control: Control) -> Self { fn new(api: RestApi, path: EndpointPath, control: Control) -> Self {
RestApiRouter { RestApiRouter {
path: Some(path), path: Some(path),
origin: None, cors_header: None,
control: Some(control), control: Some(control),
api: api, api: api,
handler: response::as_json_error(&ApiError { handler: response::as_json_error(&ApiError {
@ -95,8 +91,10 @@ impl RestApiRouter {
} }
/// Returns basic headers for a response (it may be overwritten by the handler) /// Returns basic headers for a response (it may be overwritten by the handler)
fn response_headers(&self) -> header::Headers { fn response_headers(cors_header: Option<header::AccessControlAllowOrigin>) -> header::Headers {
let mut headers = header::Headers::new(); let mut headers = header::Headers::new();
if let Some(cors_header) = cors_header {
headers.set(header::AccessControlAllowCredentials); headers.set(header::AccessControlAllowCredentials);
headers.set(header::AccessControlAllowMethods(vec![ headers.set(header::AccessControlAllowMethods(vec![
Method::Options, Method::Options,
@ -109,7 +107,6 @@ impl RestApiRouter {
UniCase("accept".to_owned()), UniCase("accept".to_owned()),
])); ]));
if let Some(cors_header) = cors::get_cors_header(&self.api.cors_domains, &self.origin) {
headers.set(cors_header); headers.set(cors_header);
} }
@ -120,7 +117,7 @@ impl RestApiRouter {
impl server::Handler<net::HttpStream> for RestApiRouter { impl server::Handler<net::HttpStream> for RestApiRouter {
fn on_request(&mut self, request: server::Request<net::HttpStream>) -> Next { fn on_request(&mut self, request: server::Request<net::HttpStream>) -> Next {
self.origin = cors::read_origin(&request); self.cors_header = jsonrpc_http_server::cors_header(&request, &self.api.cors_domains).into();
if let Method::Options = *request.method() { if let Method::Options = *request.method() {
self.handler = response::empty(); self.handler = response::empty();
@ -164,7 +161,7 @@ impl server::Handler<net::HttpStream> for RestApiRouter {
} }
fn on_response(&mut self, res: &mut server::Response) -> Next { fn on_response(&mut self, res: &mut server::Response) -> Next {
*res.headers_mut() = self.response_headers(); *res.headers_mut() = Self::response_headers(self.cors_header.take());
self.handler.on_response(res) self.handler.on_response(res)
} }

View File

@ -20,25 +20,27 @@
#![cfg_attr(feature="nightly", plugin(clippy))] #![cfg_attr(feature="nightly", plugin(clippy))]
extern crate base32; extern crate base32;
extern crate hyper; extern crate futures;
extern crate time; extern crate linked_hash_map;
extern crate url as url_lib; extern crate mime_guess;
extern crate unicase; extern crate rand;
extern crate rustc_serialize;
extern crate serde; extern crate serde;
extern crate serde_json; extern crate serde_json;
extern crate time;
extern crate unicase;
extern crate url as url_lib;
extern crate zip; extern crate zip;
extern crate rand;
extern crate jsonrpc_core; extern crate jsonrpc_core;
extern crate jsonrpc_http_server; extern crate jsonrpc_http_server;
extern crate mime_guess; extern crate jsonrpc_server_utils;
extern crate rustc_serialize;
extern crate ethcore_rpc; extern crate ethcore_rpc;
extern crate ethcore_util as util; extern crate ethcore_util as util;
extern crate parity_hash_fetch as hash_fetch;
extern crate linked_hash_map;
extern crate fetch; extern crate fetch;
extern crate parity_dapps_glue as parity_dapps; extern crate parity_dapps_glue as parity_dapps;
extern crate futures; extern crate parity_hash_fetch as hash_fetch;
extern crate parity_reactor; extern crate parity_reactor;
#[macro_use] #[macro_use]
@ -68,17 +70,20 @@ mod web;
mod tests; mod tests;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex}; use std::sync::Arc;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::collections::HashMap; use std::collections::HashMap;
use ethcore_rpc::{Metadata}; use jsonrpc_core::{Middleware, MetaIoHandler};
use jsonrpc_http_server::tokio_core::reactor::Remote as TokioRemote;
pub use jsonrpc_http_server::{DomainsValidation, Host, AccessControlAllowOrigin};
pub use jsonrpc_http_server::hyper;
use ethcore_rpc::Metadata;
use fetch::{Fetch, Client as FetchClient}; use fetch::{Fetch, Client as FetchClient};
use hash_fetch::urlhint::ContractClient; use hash_fetch::urlhint::ContractClient;
use jsonrpc_core::Middleware;
use jsonrpc_core::reactor::RpcHandler;
use router::auth::{Authorization, NoAuth, HttpBasicAuth};
use parity_reactor::Remote; use parity_reactor::Remote;
use router::auth::{Authorization, NoAuth, HttpBasicAuth};
use self::apps::{HOME_PAGE, DAPPS_DOMAIN}; use self::apps::{HOME_PAGE, DAPPS_DOMAIN};
@ -110,8 +115,8 @@ pub struct ServerBuilder<T: Fetch = FetchClient> {
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>, web_proxy_tokens: Arc<WebProxyTokens>,
signer_address: Option<(String, u16)>, signer_address: Option<(String, u16)>,
allowed_hosts: Option<Vec<String>>, allowed_hosts: Option<Vec<Host>>,
extra_cors: Option<Vec<String>>, extra_cors: Option<Vec<AccessControlAllowOrigin>>,
remote: Remote, remote: Remote,
fetch: Option<T>, fetch: Option<T>,
} }
@ -172,15 +177,15 @@ impl<T: Fetch> ServerBuilder<T> {
/// Change allowed hosts. /// Change allowed hosts.
/// `None` - All hosts are allowed /// `None` - All hosts are allowed
/// `Some(whitelist)` - Allow only whitelisted hosts (+ listen address) /// `Some(whitelist)` - Allow only whitelisted hosts (+ listen address)
pub fn allowed_hosts(mut self, allowed_hosts: Option<Vec<String>>) -> Self { pub fn allowed_hosts(mut self, allowed_hosts: DomainsValidation<Host>) -> Self {
self.allowed_hosts = allowed_hosts; self.allowed_hosts = allowed_hosts.into();
self self
} }
/// Extra cors headers. /// Extra cors headers.
/// `None` - no additional CORS URLs /// `None` - no additional CORS URLs
pub fn extra_cors_headers(mut self, cors: Option<Vec<String>>) -> Self { pub fn extra_cors_headers(mut self, cors: DomainsValidation<AccessControlAllowOrigin>) -> Self {
self.extra_cors = cors; self.extra_cors = cors.into();
self self
} }
@ -192,7 +197,7 @@ impl<T: Fetch> ServerBuilder<T> {
/// Asynchronously start server with no authentication, /// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error. /// returns result with `Server` handle on success or an error.
pub fn start_unsecured_http<S: Middleware<Metadata>>(self, addr: &SocketAddr, handler: RpcHandler<Metadata, S>) -> Result<Server, ServerError> { pub fn start_unsecured_http<S: Middleware<Metadata>>(self, addr: &SocketAddr, handler: MetaIoHandler<Metadata, S>, tokio_remote: TokioRemote) -> Result<Server, ServerError> {
let fetch = self.fetch_client()?; let fetch = self.fetch_client()?;
Server::start_http( Server::start_http(
addr, addr,
@ -207,13 +212,14 @@ impl<T: Fetch> ServerBuilder<T> {
self.sync_status, self.sync_status,
self.web_proxy_tokens, self.web_proxy_tokens,
self.remote, self.remote,
tokio_remote,
fetch, fetch,
) )
} }
/// Asynchronously start server with `HTTP Basic Authentication`, /// Asynchronously start server with `HTTP Basic Authentication`,
/// return result with `Server` handle on success or an error. /// return result with `Server` handle on success or an error.
pub fn start_basic_auth_http<S: Middleware<Metadata>>(self, addr: &SocketAddr, username: &str, password: &str, handler: RpcHandler<Metadata, S>) -> Result<Server, ServerError> { pub fn start_basic_auth_http<S: Middleware<Metadata>>(self, addr: &SocketAddr, username: &str, password: &str, handler: MetaIoHandler<Metadata, S>, tokio_remote: TokioRemote) -> Result<Server, ServerError> {
let fetch = self.fetch_client()?; let fetch = self.fetch_client()?;
Server::start_http( Server::start_http(
addr, addr,
@ -228,6 +234,7 @@ impl<T: Fetch> ServerBuilder<T> {
self.sync_status, self.sync_status,
self.web_proxy_tokens, self.web_proxy_tokens,
self.remote, self.remote,
tokio_remote,
fetch, fetch,
) )
} }
@ -243,12 +250,11 @@ impl<T: Fetch> ServerBuilder<T> {
/// Webapps HTTP server. /// Webapps HTTP server.
pub struct Server { pub struct Server {
server: Option<hyper::server::Listening>, server: Option<hyper::server::Listening>,
panic_handler: Arc<Mutex<Option<Box<Fn() -> () + Send>>>>,
} }
impl Server { impl Server {
/// Returns a list of allowed hosts or `None` if all hosts are allowed. /// Returns a list of allowed hosts or `None` if all hosts are allowed.
fn allowed_hosts(hosts: Option<Vec<String>>, bind_address: String) -> Option<Vec<String>> { fn allowed_hosts(hosts: Option<Vec<Host>>, bind_address: String) -> Option<Vec<Host>> {
let mut allowed = Vec::new(); let mut allowed = Vec::new();
match hosts { match hosts {
@ -263,16 +269,19 @@ impl Server {
} }
/// Returns a list of CORS domains for API endpoint. /// Returns a list of CORS domains for API endpoint.
fn cors_domains(signer_address: Option<(String, u16)>, extra_cors: Option<Vec<String>>) -> Vec<String> { fn cors_domains(
signer_address: Option<(String, u16)>,
extra_cors: Option<Vec<AccessControlAllowOrigin>>,
) -> Vec<AccessControlAllowOrigin> {
let basic_cors = match signer_address { let basic_cors = match signer_address {
Some(signer_address) => vec![ Some(signer_address) => [
format!("http://{}{}", HOME_PAGE, DAPPS_DOMAIN), format!("http://{}{}", HOME_PAGE, DAPPS_DOMAIN),
format!("http://{}{}:{}", HOME_PAGE, DAPPS_DOMAIN, signer_address.1), format!("http://{}{}:{}", HOME_PAGE, DAPPS_DOMAIN, signer_address.1),
format!("http://{}", address(&signer_address)), format!("http://{}", address(&signer_address)),
format!("https://{}{}", HOME_PAGE, DAPPS_DOMAIN), format!("https://{}{}", HOME_PAGE, DAPPS_DOMAIN),
format!("https://{}{}:{}", HOME_PAGE, DAPPS_DOMAIN, signer_address.1), format!("https://{}{}:{}", HOME_PAGE, DAPPS_DOMAIN, signer_address.1),
format!("https://{}", address(&signer_address)), format!("https://{}", address(&signer_address)),
], ].into_iter().map(|val| AccessControlAllowOrigin::Value(val.into())).collect(),
None => vec![], None => vec![],
}; };
@ -284,10 +293,10 @@ impl Server {
fn start_http<A: Authorization + 'static, F: Fetch, T: Middleware<Metadata>>( fn start_http<A: Authorization + 'static, F: Fetch, T: Middleware<Metadata>>(
addr: &SocketAddr, addr: &SocketAddr,
hosts: Option<Vec<String>>, hosts: Option<Vec<Host>>,
extra_cors: Option<Vec<String>>, extra_cors: Option<Vec<AccessControlAllowOrigin>>,
authorization: A, authorization: A,
handler: RpcHandler<Metadata, T>, handler: MetaIoHandler<Metadata, T>,
dapps_path: PathBuf, dapps_path: PathBuf,
extra_dapps: Vec<PathBuf>, extra_dapps: Vec<PathBuf>,
signer_address: Option<(String, u16)>, signer_address: Option<(String, u16)>,
@ -295,9 +304,9 @@ impl Server {
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
web_proxy_tokens: Arc<WebProxyTokens>, web_proxy_tokens: Arc<WebProxyTokens>,
remote: Remote, remote: Remote,
tokio_remote: TokioRemote,
fetch: F, fetch: F,
) -> Result<Server, ServerError> { ) -> Result<Server, ServerError> {
let panic_handler = Arc::new(Mutex::new(None));
let authorization = Arc::new(authorization); let authorization = Arc::new(authorization);
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new( let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(
hash_fetch::urlhint::URLHintContract::new(registrar), hash_fetch::urlhint::URLHintContract::new(registrar),
@ -318,7 +327,7 @@ impl Server {
let special = Arc::new({ let special = Arc::new({
let mut special = HashMap::new(); let mut special = HashMap::new();
special.insert(router::SpecialEndpoint::Rpc, rpc::rpc(handler, cors_domains.clone(), panic_handler.clone())); special.insert(router::SpecialEndpoint::Rpc, rpc::rpc(handler, tokio_remote, cors_domains.clone()));
special.insert(router::SpecialEndpoint::Utils, apps::utils()); special.insert(router::SpecialEndpoint::Utils, apps::utils());
special.insert( special.insert(
router::SpecialEndpoint::Api, router::SpecialEndpoint::Api,
@ -346,17 +355,11 @@ impl Server {
Server { Server {
server: Some(l), server: Some(l),
panic_handler: panic_handler,
} }
}) })
.map_err(ServerError::from) .map_err(ServerError::from)
} }
/// Set callback for panics.
pub fn set_panic_handler<F>(&self, handler: F) where F : Fn() -> () + Send + 'static {
*self.panic_handler.lock().unwrap() = Some(Box::new(handler));
}
#[cfg(test)] #[cfg(test)]
/// Returns address that this server is bound to. /// Returns address that this server is bound to.
pub fn addr(&self) -> &SocketAddr { pub fn addr(&self) -> &SocketAddr {
@ -408,6 +411,7 @@ fn address(address: &(String, u16)) -> String {
#[cfg(test)] #[cfg(test)]
mod util_tests { mod util_tests {
use super::Server; use super::Server;
use jsonrpc_http_server::AccessControlAllowOrigin;
#[test] #[test]
fn should_return_allowed_hosts() { fn should_return_allowed_hosts() {
@ -432,18 +436,18 @@ mod util_tests {
// when // when
let none = Server::cors_domains(None, None); let none = Server::cors_domains(None, None);
let some = Server::cors_domains(Some(("127.0.0.1".into(), 18180)), None); let some = Server::cors_domains(Some(("127.0.0.1".into(), 18180)), None);
let extra = Server::cors_domains(None, Some(vec!["all".to_owned()])); let extra = Server::cors_domains(None, Some(vec!["all".into()]));
// then // then
assert_eq!(none, Vec::<String>::new()); assert_eq!(none, Vec::<AccessControlAllowOrigin>::new());
assert_eq!(some, vec![ assert_eq!(some, vec![
"http://parity.web3.site".to_owned(), "http://parity.web3.site".into(),
"http://parity.web3.site:18180".into(), "http://parity.web3.site:18180".into(),
"http://127.0.0.1:18180".into(), "http://127.0.0.1:18180".into(),
"https://parity.web3.site".into(), "https://parity.web3.site".into(),
"https://parity.web3.site:18180".into(), "https://parity.web3.site:18180".into(),
"https://127.0.0.1:18180".into() "https://127.0.0.1:18180".into(),
]); ]);
assert_eq!(extra, vec!["all".to_owned()]); assert_eq!(extra, vec![AccessControlAllowOrigin::Any]);
} }
} }

View File

@ -19,18 +19,13 @@ use apps::DAPPS_DOMAIN;
use hyper::{server, header, StatusCode}; use hyper::{server, header, StatusCode};
use hyper::net::HttpStream; use hyper::net::HttpStream;
use jsonrpc_http_server::{is_host_header_valid};
use handlers::ContentHandler; use handlers::ContentHandler;
use jsonrpc_http_server;
use jsonrpc_server_utils::hosts;
pub fn is_valid(request: &server::Request<HttpStream>, allowed_hosts: &[String], endpoints: Vec<String>) -> bool { pub fn is_valid(req: &server::Request<HttpStream>, allowed_hosts: &Option<Vec<hosts::Host>>) -> bool {
let mut endpoints = endpoints.iter() let header_valid = jsonrpc_http_server::is_host_allowed(req, allowed_hosts);
.map(|endpoint| format!("{}{}", endpoint, DAPPS_DOMAIN)) match (header_valid, req.headers().get::<header::Host>()) {
.collect::<Vec<String>>();
endpoints.extend_from_slice(allowed_hosts);
let header_valid = is_host_header_valid(request, &endpoints);
match (header_valid, request.headers().get::<header::Host>()) {
(true, _) => true, (true, _) => true,
(_, Some(host)) => host.hostname.ends_with(DAPPS_DOMAIN), (_, Some(host)) => host.hostname.ends_with(DAPPS_DOMAIN),
_ => false, _ => false,

View File

@ -24,14 +24,16 @@ use address;
use std::cmp; use std::cmp;
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashMap; use std::collections::HashMap;
use url::{Url, Host}; use url::{Url, Host};
use hyper::{self, server, header, Next, Encoder, Decoder, Control, StatusCode}; use hyper::{self, server, header, Next, Encoder, Decoder, Control, StatusCode};
use hyper::net::HttpStream; use hyper::net::HttpStream;
use jsonrpc_server_utils::hosts;
use apps::{self, DAPPS_DOMAIN}; use apps::{self, DAPPS_DOMAIN};
use apps::fetcher::Fetcher; use apps::fetcher::Fetcher;
use endpoint::{Endpoint, Endpoints, EndpointPath}; use endpoint::{Endpoint, Endpoints, EndpointPath};
use handlers::{self, Redirection, ContentHandler}; use handlers::{self, Redirection, ContentHandler};
use self::auth::{Authorization, Authorized};
/// Special endpoints are accessible on every domain (every dapp) /// Special endpoints are accessible on every domain (every dapp)
#[derive(Debug, PartialEq, Hash, Eq)] #[derive(Debug, PartialEq, Hash, Eq)]
@ -42,18 +44,18 @@ pub enum SpecialEndpoint {
None, None,
} }
pub struct Router<A: Authorization + 'static> { pub struct Router<A: auth::Authorization + 'static> {
control: Option<Control>, control: Option<Control>,
signer_address: Option<(String, u16)>, signer_address: Option<(String, u16)>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
fetch: Arc<Fetcher>, fetch: Arc<Fetcher>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
authorization: Arc<A>, authorization: Arc<A>,
allowed_hosts: Option<Vec<String>>, allowed_hosts: Option<Vec<hosts::Host>>,
handler: Box<server::Handler<HttpStream> + Send>, handler: Box<server::Handler<HttpStream> + Send>,
} }
impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> { impl<A: auth::Authorization + 'static> server::Handler<HttpStream> for Router<A> {
fn on_request(&mut self, req: server::Request<HttpStream>) -> Next { fn on_request(&mut self, req: server::Request<HttpStream>) -> Next {
// Choose proper handler depending on path / domain // Choose proper handler depending on path / domain
@ -66,20 +68,18 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
trace!(target: "dapps", "Routing request to {:?}. Details: {:?}", url, req); trace!(target: "dapps", "Routing request to {:?}. Details: {:?}", url, req);
// Validate Host header // Validate Host header
if let Some(ref hosts) = self.allowed_hosts { trace!(target: "dapps", "Validating host headers against: {:?}", self.allowed_hosts);
trace!(target: "dapps", "Validating host headers against: {:?}", hosts); let is_valid = is_utils || host_validation::is_valid(&req, &self.allowed_hosts);
let is_valid = is_utils || host_validation::is_valid(&req, hosts, self.endpoints.keys().cloned().collect());
if !is_valid { if !is_valid {
debug!(target: "dapps", "Rejecting invalid host header."); debug!(target: "dapps", "Rejecting invalid host header.");
self.handler = host_validation::host_invalid_response(); self.handler = host_validation::host_invalid_response();
return self.handler.on_request(req); return self.handler.on_request(req);
} }
}
trace!(target: "dapps", "Checking authorization."); trace!(target: "dapps", "Checking authorization.");
// Check authorization // Check authorization
let auth = self.authorization.is_authorized(&req); let auth = self.authorization.is_authorized(&req);
if let Authorized::No(handler) = auth { if let auth::Authorized::No(handler) = auth {
debug!(target: "dapps", "Authorization denied."); debug!(target: "dapps", "Authorization denied.");
self.handler = handler; self.handler = handler;
return self.handler.on_request(req); return self.handler.on_request(req);
@ -181,7 +181,7 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
} }
} }
impl<A: Authorization> Router<A> { impl<A: auth::Authorization> Router<A> {
pub fn new( pub fn new(
control: Control, control: Control,
signer_address: Option<(String, u16)>, signer_address: Option<(String, u16)>,
@ -189,7 +189,7 @@ impl<A: Authorization> Router<A> {
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
authorization: Arc<A>, authorization: Arc<A>,
allowed_hosts: Option<Vec<String>>, allowed_hosts: Option<Vec<hosts::Host>>,
) -> Self { ) -> Self {
let handler = special.get(&SpecialEndpoint::Utils) let handler = special.get(&SpecialEndpoint::Utils)

View File

@ -14,55 +14,69 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::{Arc, Mutex}; use std::sync::Arc;
use hyper; use hyper;
use ethcore_rpc::{Metadata, Origin}; use ethcore_rpc::{Metadata, Origin};
use jsonrpc_core::Middleware; use jsonrpc_core::{Middleware, MetaIoHandler};
use jsonrpc_core::reactor::RpcHandler; use jsonrpc_http_server::{self as http, AccessControlAllowOrigin, HttpMetaExtractor};
use jsonrpc_http_server::{Rpc, ServerHandler, PanicHandler, AccessControlAllowOrigin, HttpMetaExtractor}; use jsonrpc_http_server::tokio_core::reactor::Remote;
use endpoint::{Endpoint, EndpointPath, Handler}; use endpoint::{Endpoint, EndpointPath, Handler};
pub fn rpc<T: Middleware<Metadata>>( pub fn rpc<T: Middleware<Metadata>>(
handler: RpcHandler<Metadata, T>, handler: MetaIoHandler<Metadata, T>,
cors_domains: Vec<String>, remote: Remote,
panic_handler: Arc<Mutex<Option<Box<Fn() -> () + Send>>>>, cors_domains: Vec<AccessControlAllowOrigin>,
) -> Box<Endpoint> { ) -> Box<Endpoint> {
Box::new(RpcEndpoint { Box::new(RpcEndpoint {
handler: handler, handler: Arc::new(handler),
remote: remote,
meta_extractor: Arc::new(MetadataExtractor), meta_extractor: Arc::new(MetadataExtractor),
panic_handler: panic_handler, cors_domain: Some(cors_domains),
cors_domain: Some(cors_domains.into_iter().map(AccessControlAllowOrigin::Value).collect()),
// NOTE [ToDr] We don't need to do any hosts validation here. It's already done in router. // NOTE [ToDr] We don't need to do any hosts validation here. It's already done in router.
allowed_hosts: None, allowed_hosts: None,
}) })
} }
struct RpcEndpoint<T: Middleware<Metadata>> { struct RpcEndpoint<T: Middleware<Metadata>> {
handler: RpcHandler<Metadata, T>, handler: Arc<MetaIoHandler<Metadata, T>>,
remote: Remote,
meta_extractor: Arc<HttpMetaExtractor<Metadata>>, meta_extractor: Arc<HttpMetaExtractor<Metadata>>,
panic_handler: Arc<Mutex<Option<Box<Fn() -> () + Send>>>>,
cors_domain: Option<Vec<AccessControlAllowOrigin>>, cors_domain: Option<Vec<AccessControlAllowOrigin>>,
allowed_hosts: Option<Vec<String>>, allowed_hosts: Option<Vec<http::Host>>,
} }
impl<T: Middleware<Metadata>> Endpoint for RpcEndpoint<T> { impl<T: Middleware<Metadata>> Endpoint for RpcEndpoint<T> {
fn to_async_handler(&self, _path: EndpointPath, control: hyper::Control) -> Box<Handler> { fn to_async_handler(&self, _path: EndpointPath, control: hyper::Control) -> Box<Handler> {
let panic_handler = PanicHandler { handler: self.panic_handler.clone() }; Box::new(http::ServerHandler::new(
Box::new(ServerHandler::new( http::Rpc {
Rpc::new(self.handler.clone(), self.meta_extractor.clone()), handler: self.handler.clone(),
remote: self.remote.clone(),
extractor: self.meta_extractor.clone(),
},
self.cors_domain.clone(), self.cors_domain.clone(),
self.allowed_hosts.clone(), self.allowed_hosts.clone(),
panic_handler, Arc::new(NoopMiddleware),
control, control,
)) ))
} }
} }
#[derive(Default)]
struct NoopMiddleware;
impl http::RequestMiddleware for NoopMiddleware {
fn on_request(&self, request: &http::hyper::server::Request<http::hyper::net::HttpStream>) -> http::RequestMiddlewareAction {
http::RequestMiddlewareAction::Proceed {
should_continue_on_invalid_cors: request.headers().get::<http::hyper::header::Origin>().is_none(),
}
}
}
struct MetadataExtractor; struct MetadataExtractor;
impl HttpMetaExtractor<Metadata> for MetadataExtractor { impl HttpMetaExtractor<Metadata> for MetadataExtractor {
fn read_metadata(&self, request: &hyper::server::Request<hyper::net::HttpStream>) -> Metadata { fn read_metadata(&self, request: &http::hyper::server::Request<http::hyper::net::HttpStream>) -> Metadata {
let dapp_id = request.headers().get::<hyper::header::Origin>() let dapp_id = request.headers().get::<http::hyper::header::Origin>()
.map(|origin| format!("{}://{}", origin.scheme, origin.host)) .map(|origin| format!("{}://{}", origin.scheme, origin.host))
.or_else(|| { .or_else(|| {
// fallback to custom header, but only if origin is null // fallback to custom header, but only if origin is null

View File

@ -33,8 +33,8 @@ fn should_return_error() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); response.assert_status("HTTP/1.1 404 Not Found");
assert_eq!(response.headers.get(3).unwrap(), "Content-Type: application/json"); response.assert_header("Content-Type", "application/json");
assert_eq!(response.body, format!("58\n{}\n0\n\n", r#"{"code":"404","title":"Not Found","detail":"Resource you requested has not been found."}"#)); assert_eq!(response.body, format!("58\n{}\n0\n\n", r#"{"code":"404","title":"Not Found","detail":"Resource you requested has not been found."}"#));
assert_security_headers(&response.headers); assert_security_headers(&response.headers);
} }
@ -56,8 +56,8 @@ fn should_serve_apps() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert_eq!(response.headers.get(3).unwrap(), "Content-Type: application/json"); response.assert_header("Content-Type", "application/json");
assert!(response.body.contains("Parity UI"), response.body); assert!(response.body.contains("Parity UI"), response.body);
assert_security_headers(&response.headers); assert_security_headers(&response.headers);
} }
@ -79,8 +79,8 @@ fn should_handle_ping() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert_eq!(response.headers.get(3).unwrap(), "Content-Type: application/json"); response.assert_header("Content-Type", "application/json");
assert_eq!(response.body, "0\n\n".to_owned()); assert_eq!(response.body, "0\n\n".to_owned());
assert_security_headers(&response.headers); assert_security_headers(&response.headers);
} }
@ -102,7 +102,7 @@ fn should_try_to_resolve_dapp() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); response.assert_status("HTTP/1.1 404 Not Found");
assert_eq!(registrar.calls.lock().len(), 2); assert_eq!(registrar.calls.lock().len(), 2);
assert_security_headers(&response.headers); assert_security_headers(&response.headers);
} }
@ -125,12 +125,8 @@ fn should_return_signer_port_cors_headers() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert!( response.assert_header("Access-Control-Allow-Origin", "http://127.0.0.1:18180");
response.headers_raw.contains("Access-Control-Allow-Origin: http://127.0.0.1:18180"),
"CORS header for signer missing: {:?}",
response.headers
);
} }
#[test] #[test]
@ -151,12 +147,8 @@ fn should_return_signer_port_cors_headers_for_home_parity() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert!( response.assert_header("Access-Control-Allow-Origin", "http://parity.web3.site");
response.headers_raw.contains("Access-Control-Allow-Origin: http://parity.web3.site"),
"CORS header for parity.web3.site missing: {:?}",
response.headers
);
} }
@ -178,12 +170,8 @@ fn should_return_signer_port_cors_headers_for_home_parity_with_https() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert!( response.assert_header("Access-Control-Allow-Origin", "https://parity.web3.site");
response.headers_raw.contains("Access-Control-Allow-Origin: https://parity.web3.site"),
"CORS header for parity.web3.site missing: {:?}",
response.headers
);
} }
#[test] #[test]
@ -204,12 +192,8 @@ fn should_return_signer_port_cors_headers_for_home_parity_with_port() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert!( response.assert_header("Access-Control-Allow-Origin", "http://parity.web3.site:18180");
response.headers_raw.contains("Access-Control-Allow-Origin: http://parity.web3.site:18180"),
"CORS header for parity.web3.site missing: {:?}",
response.headers
);
} }
#[test] #[test]

View File

@ -114,7 +114,7 @@ impl Fetch for FakeFetch {
let data = response.lock().take().unwrap_or(b"Some content"); let data = response.lock().take().unwrap_or(b"Some content");
let cursor = io::Cursor::new(data); let cursor = io::Cursor::new(data);
tx.complete(fetch::Response::from_reader(cursor)); tx.send(fetch::Response::from_reader(cursor)).unwrap();
}); });
rx.map_err(|_| fetch::Error::Aborted).boxed() rx.map_err(|_| fetch::Error::Aborted).boxed()

View File

@ -21,13 +21,12 @@ use std::sync::Arc;
use env_logger::LogBuilder; use env_logger::LogBuilder;
use ethcore_rpc::Metadata; use ethcore_rpc::Metadata;
use jsonrpc_core::MetaIoHandler; use jsonrpc_core::MetaIoHandler;
use jsonrpc_core::reactor::RpcEventLoop;
use ServerBuilder; use ServerBuilder;
use Server; use Server;
use fetch::Fetch; use fetch::Fetch;
use devtools::http_client; use devtools::http_client;
use parity_reactor::Remote; use parity_reactor::{EventLoop, Remote};
mod registrar; mod registrar;
mod fetch; mod fetch;
@ -48,7 +47,7 @@ fn init_logger() {
pub struct ServerLoop { pub struct ServerLoop {
pub server: Server, pub server: Server,
pub event_loop: RpcEventLoop, pub event_loop: EventLoop,
} }
impl Deref for ServerLoop { impl Deref for ServerLoop {
@ -70,13 +69,12 @@ pub fn init_server<F, B>(process: F, io: MetaIoHandler<Metadata>, remote: Remote
// TODO [ToDr] When https://github.com/ethcore/jsonrpc/issues/26 is resolved // TODO [ToDr] When https://github.com/ethcore/jsonrpc/issues/26 is resolved
// this additional EventLoop wouldn't be needed, we should be able to re-use remote. // this additional EventLoop wouldn't be needed, we should be able to re-use remote.
let event_loop = RpcEventLoop::spawn(); let event_loop = EventLoop::spawn();
let handler = event_loop.handler(Arc::new(io));
let server = process(ServerBuilder::new( let server = process(ServerBuilder::new(
&dapps_path, registrar.clone(), remote, &dapps_path, registrar.clone(), remote,
)) ))
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT))) .signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), handler).unwrap(); .start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), io, event_loop.raw_remote()).unwrap();
( (
ServerLoop { server: server, event_loop: event_loop }, ServerLoop { server: server, event_loop: event_loop },
registrar, registrar,
@ -89,12 +87,12 @@ pub fn serve_with_auth(user: &str, pass: &str) -> ServerLoop {
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let event_loop = RpcEventLoop::spawn(); let event_loop = EventLoop::spawn();
let handler = event_loop.handler(Arc::new(MetaIoHandler::default())); let io = MetaIoHandler::default();
let server = ServerBuilder::new(&dapps_path, registrar, Remote::new(event_loop.remote())) let server = ServerBuilder::new(&dapps_path, registrar, event_loop.remote())
.signer_address(Some(("127.0.0.1".into(), SIGNER_PORT))) .signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)))
.allowed_hosts(None) .allowed_hosts(None.into())
.start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), user, pass, handler).unwrap(); .start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), user, pass, io, event_loop.raw_remote()).unwrap();
ServerLoop { ServerLoop {
server: server, server: server,
event_loop: event_loop, event_loop: event_loop,
@ -102,26 +100,28 @@ pub fn serve_with_auth(user: &str, pass: &str) -> ServerLoop {
} }
pub fn serve_with_rpc(io: MetaIoHandler<Metadata>) -> ServerLoop { pub fn serve_with_rpc(io: MetaIoHandler<Metadata>) -> ServerLoop {
init_server(|builder| builder.allowed_hosts(None), io, Remote::new_sync()).0 init_server(|builder| builder.allowed_hosts(None.into()), io, Remote::new_sync()).0
} }
pub fn serve_hosts(hosts: Option<Vec<String>>) -> ServerLoop { pub fn serve_hosts(hosts: Option<Vec<String>>) -> ServerLoop {
init_server(|builder| builder.allowed_hosts(hosts), Default::default(), Remote::new_sync()).0 let hosts = hosts.map(|hosts| hosts.into_iter().map(Into::into).collect());
init_server(|builder| builder.allowed_hosts(hosts.into()), Default::default(), Remote::new_sync()).0
} }
pub fn serve_extra_cors(extra_cors: Option<Vec<String>>) -> ServerLoop { pub fn serve_extra_cors(extra_cors: Option<Vec<String>>) -> ServerLoop {
init_server(|builder| builder.allowed_hosts(None).extra_cors_headers(extra_cors), Default::default(), Remote::new_sync()).0 let extra_cors = extra_cors.map(|cors| cors.into_iter().map(Into::into).collect());
init_server(|builder| builder.allowed_hosts(None.into()).extra_cors_headers(extra_cors.into()), Default::default(), Remote::new_sync()).0
} }
pub fn serve_with_registrar() -> (ServerLoop, Arc<FakeRegistrar>) { pub fn serve_with_registrar() -> (ServerLoop, Arc<FakeRegistrar>) {
init_server(|builder| builder.allowed_hosts(None), Default::default(), Remote::new_sync()) init_server(|builder| builder.allowed_hosts(None.into()), Default::default(), Remote::new_sync())
} }
pub fn serve_with_registrar_and_sync() -> (ServerLoop, Arc<FakeRegistrar>) { pub fn serve_with_registrar_and_sync() -> (ServerLoop, Arc<FakeRegistrar>) {
init_server(|builder| { init_server(|builder| {
builder builder
.sync_status(Arc::new(|| true)) .sync_status(Arc::new(|| true))
.allowed_hosts(None) .allowed_hosts(None.into())
}, Default::default(), Remote::new_sync()) }, Default::default(), Remote::new_sync())
} }
@ -133,7 +133,7 @@ pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Serv
let fetch = FakeFetch::default(); let fetch = FakeFetch::default();
let f = fetch.clone(); let f = fetch.clone();
let (server, reg) = init_server(move |builder| { let (server, reg) = init_server(move |builder| {
builder.allowed_hosts(None).fetch(f.clone()) builder.allowed_hosts(None.into()).fetch(f.clone())
}, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() }); }, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
(server, fetch, reg) (server, fetch, reg)
@ -144,7 +144,7 @@ pub fn serve_with_fetch(web_token: &'static str) -> (ServerLoop, FakeFetch) {
let f = fetch.clone(); let f = fetch.clone();
let (server, _) = init_server(move |builder| { let (server, _) = init_server(move |builder| {
builder builder
.allowed_hosts(None) .allowed_hosts(None.into())
.fetch(f.clone()) .fetch(f.clone())
.web_proxy_tokens(Arc::new(move |token| &token == web_token)) .web_proxy_tokens(Arc::new(move |token| &token == web_token))
}, Default::default(), Remote::new_sync()); }, Default::default(), Remote::new_sync());
@ -153,7 +153,7 @@ pub fn serve_with_fetch(web_token: &'static str) -> (ServerLoop, FakeFetch) {
} }
pub fn serve() -> ServerLoop { pub fn serve() -> ServerLoop {
init_server(|builder| builder.allowed_hosts(None), Default::default(), Remote::new_sync()).0 init_server(|builder| builder.allowed_hosts(None.into()), Default::default(), Remote::new_sync()).0
} }
pub fn request(server: ServerLoop, request: &str) -> http_client::Response { pub fn request(server: ServerLoop, request: &str) -> http_client::Response {

View File

@ -32,7 +32,7 @@ fn should_redirect_to_home() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned()); response.assert_status("HTTP/1.1 302 Found");
assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180"); assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
} }
@ -52,7 +52,7 @@ fn should_redirect_to_home_when_trailing_slash_is_missing() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned()); response.assert_status("HTTP/1.1 302 Found");
assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180"); assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
} }
@ -72,7 +72,7 @@ fn should_redirect_to_home_for_users_with_cached_redirection() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned()); response.assert_status("HTTP/1.1 302 Found");
assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180"); assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
} }
@ -92,7 +92,7 @@ fn should_display_404_on_invalid_dapp() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); response.assert_status("HTTP/1.1 404 Not Found");
assert_security_headers_for_embed(&response.headers); assert_security_headers_for_embed(&response.headers);
} }
@ -112,7 +112,7 @@ fn should_display_404_on_invalid_dapp_with_domain() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); response.assert_status("HTTP/1.1 404 Not Found");
assert_security_headers_for_embed(&response.headers); assert_security_headers_for_embed(&response.headers);
} }
@ -134,8 +134,8 @@ fn should_serve_rpc() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert_eq!(response.body, format!("58\n{}\n\n0\n\n", r#"{"jsonrpc":"2.0","error":{"code":-32700,"message":"Parse error","data":null},"id":null}"#)); assert_eq!(response.body, format!("4C\n{}\n\n0\n\n", r#"{"jsonrpc":"2.0","error":{"code":-32700,"message":"Parse error"},"id":null}"#));
} }
#[test] #[test]
@ -156,8 +156,8 @@ fn should_serve_rpc_at_slash_rpc() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert_eq!(response.body, format!("58\n{}\n\n0\n\n", r#"{"jsonrpc":"2.0","error":{"code":-32700,"message":"Parse error","data":null},"id":null}"#)); assert_eq!(response.body, format!("4C\n{}\n\n0\n\n", r#"{"jsonrpc":"2.0","error":{"code":-32700,"message":"Parse error"},"id":null}"#));
} }
@ -178,7 +178,7 @@ fn should_serve_proxy_pac() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert_eq!(response.body, "DD\n\nfunction FindProxyForURL(url, host) {\n\tif (shExpMatch(host, \"parity.web3.site\"))\n\t{\n\t\treturn \"PROXY 127.0.0.1:18180\";\n\t}\n\n\tif (shExpMatch(host, \"*.web3.site\"))\n\t{\n\t\treturn \"PROXY 127.0.0.1:8080\";\n\t}\n\n\treturn \"DIRECT\";\n}\n\n0\n\n".to_owned()); assert_eq!(response.body, "DD\n\nfunction FindProxyForURL(url, host) {\n\tif (shExpMatch(host, \"parity.web3.site\"))\n\t{\n\t\treturn \"PROXY 127.0.0.1:18180\";\n\t}\n\n\tif (shExpMatch(host, \"*.web3.site\"))\n\t{\n\t\treturn \"PROXY 127.0.0.1:8080\";\n\t}\n\n\treturn \"DIRECT\";\n}\n\n0\n\n".to_owned());
assert_security_headers(&response.headers); assert_security_headers(&response.headers);
} }
@ -200,7 +200,7 @@ fn should_serve_utils() {
); );
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); response.assert_status("HTTP/1.1 200 OK");
assert_eq!(response.body.contains("function(){"), true); assert_eq!(response.body.contains("function(){"), true);
assert_security_headers(&response.headers); assert_security_headers(&response.headers);
} }

View File

@ -55,8 +55,8 @@ fn should_extract_metadata() {
// given // given
let mut io = MetaIoHandler::default(); let mut io = MetaIoHandler::default();
io.add_method_with_meta("rpc_test", |_params, meta: Metadata| { io.add_method_with_meta("rpc_test", |_params, meta: Metadata| {
assert_eq!(meta.origin, Origin::Dapps("https://parity.io/".into())); assert_eq!(meta.origin, Origin::Dapps("".into()));
assert_eq!(meta.dapp_id(), "https://parity.io/".into()); assert_eq!(meta.dapp_id(), "".into());
future::ok(Value::String("Hello World!".into())).boxed() future::ok(Value::String("Hello World!".into())).boxed()
}); });
let server = serve_with_rpc(io); let server = serve_with_rpc(io);
@ -68,7 +68,6 @@ fn should_extract_metadata() {
POST /rpc/ HTTP/1.1\r\n\ POST /rpc/ HTTP/1.1\r\n\
Host: 127.0.0.1:8080\r\n\ Host: 127.0.0.1:8080\r\n\
Connection: close\r\n\ Connection: close\r\n\
Origin: https://parity.io/\r\n\
X-Parity-Origin: https://this.should.be.ignored\r\n\ X-Parity-Origin: https://this.should.be.ignored\r\n\
Content-Type: application/json\r\n\ Content-Type: application/json\r\n\
Content-Length: {}\r\n\ Content-Length: {}\r\n\

View File

@ -27,6 +27,7 @@ byteorder = "1.0"
transient-hashmap = "0.4" transient-hashmap = "0.4"
linked-hash-map = "0.3.0" linked-hash-map = "0.3.0"
lru-cache = "0.1.0" lru-cache = "0.1.0"
itertools = "0.5"
ethabi = "1.0.0" ethabi = "1.0.0"
evmjit = { path = "../evmjit", optional = true } evmjit = { path = "../evmjit", optional = true }
clippy = { version = "0.0.103", optional = true} clippy = { version = "0.0.103", optional = true}
@ -43,14 +44,12 @@ rlp = { path = "../util/rlp" }
ethcore-stratum = { path = "../stratum" } ethcore-stratum = { path = "../stratum" }
ethcore-bloom-journal = { path = "../util/bloom" } ethcore-bloom-journal = { path = "../util/bloom" }
hardware-wallet = { path = "../hw" } hardware-wallet = { path = "../hw" }
ethcore-logger = { path = "../logger" }
stats = { path = "../util/stats" } stats = { path = "../util/stats" }
hyper = { git = "https://github.com/paritytech/hyper", default-features = false }
num = "0.1" num = "0.1"
bn = { git = "https://github.com/paritytech/bn" } bn = { git = "https://github.com/paritytech/bn" }
[dependencies.hyper]
git = "https://github.com/ethcore/hyper"
default-features = false
[features] [features]
jit = ["evmjit"] jit = ["evmjit"]
evm-debug = ["slow-blocks"] evm-debug = ["slow-blocks"]

View File

@ -1,5 +1,5 @@
[package] [package]
description = "Parity LES primitives" description = "Parity Light Client Implementation"
homepage = "http://parity.io" homepage = "http://parity.io"
license = "GPL-3.0" license = "GPL-3.0"
name = "ethcore-light" name = "ethcore-light"

View File

@ -23,7 +23,7 @@
use ethcore::ids::BlockId; use ethcore::ids::BlockId;
use util::{Bytes, H256, U256, HashDB, MemoryDB}; use util::{Bytes, H256, U256, HashDB, MemoryDB};
use util::trie::{self, TrieMut, TrieDBMut, Trie, TrieDB, Recorder}; use util::trie::{self, TrieMut, TrieDBMut, Trie, TrieDB, Recorder};
use rlp::{RlpStream, UntrustedRlp, View}; use rlp::{RlpStream, UntrustedRlp};
// encode a key. // encode a key.
macro_rules! key { macro_rules! key {

View File

@ -24,7 +24,6 @@
//! - It stores only headers (and a pruned subset of them) //! - It stores only headers (and a pruned subset of them)
//! - To allow for flexibility in the database layout once that's incorporated. //! - To allow for flexibility in the database layout once that's incorporated.
// TODO: use DB instead of memory. DB Layout: just the contents of `candidates`/`headers` // TODO: use DB instead of memory. DB Layout: just the contents of `candidates`/`headers`
//
use std::collections::{BTreeMap, HashMap}; use std::collections::{BTreeMap, HashMap};

View File

@ -31,7 +31,7 @@ use ethcore::service::ClientIoMessage;
use ethcore::encoded; use ethcore::encoded;
use io::IoChannel; use io::IoChannel;
use util::{Bytes, DBValue, H256, Mutex, RwLock}; use util::{H256, Mutex, RwLock};
use self::header_chain::{AncestryIter, HeaderChain}; use self::header_chain::{AncestryIter, HeaderChain};
@ -315,50 +315,3 @@ impl LightChainClient for Client {
Client::cht_root(self, i) Client::cht_root(self, i)
} }
} }
// dummy implementation, should be removed when a `TestClient` is added.
impl ::provider::Provider for Client {
fn chain_info(&self) -> BlockChainInfo {
Client::chain_info(self)
}
fn reorg_depth(&self, _a: &H256, _b: &H256) -> Option<u64> {
None
}
fn earliest_state(&self) -> Option<u64> {
None
}
fn block_header(&self, id: BlockId) -> Option<encoded::Header> {
Client::block_header(self, id)
}
fn block_body(&self, _id: BlockId) -> Option<encoded::Body> {
None
}
fn block_receipts(&self, _hash: &H256) -> Option<Bytes> {
None
}
fn state_proof(&self, _req: ::request::StateProof) -> Vec<Bytes> {
Vec::new()
}
fn contract_code(&self, _req: ::request::ContractCode) -> Bytes {
Vec::new()
}
fn header_proof(&self, _req: ::request::HeaderProof) -> Option<(encoded::Header, Vec<Bytes>)> {
None
}
fn transaction_proof(&self, _req: ::request::TransactionProof) -> Option<Vec<DBValue>> {
None
}
fn ready_transactions(&self) -> Vec<::ethcore::transaction::PendingTransaction> {
Vec::new()
}
}

View File

@ -26,7 +26,7 @@
//! use-cases like sending transactions from a personal account. //! use-cases like sending transactions from a personal account.
//! //!
//! The light client performs a header-only sync, doing verification and pruning //! The light client performs a header-only sync, doing verification and pruning
//! historical blocks. Upon pruning, batches of 2048 blocks have a number => hash //! historical blocks. Upon pruning, batches of 2048 blocks have a number => (hash, TD)
//! mapping sealed into "canonical hash tries" which can later be used to verify //! mapping sealed into "canonical hash tries" which can later be used to verify
//! historical block queries from peers. //! historical block queries from peers.
@ -57,7 +57,7 @@ mod types;
pub use self::provider::Provider; pub use self::provider::Provider;
pub use self::transaction_queue::TransactionQueue; pub use self::transaction_queue::TransactionQueue;
pub use types::les_request as request; pub use types::request as request;
#[macro_use] #[macro_use]
extern crate log; extern crate log;

View File

@ -20,7 +20,7 @@ use network::{NetworkContext, PeerId, NodeId};
use super::{Announcement, LightProtocol, ReqId}; use super::{Announcement, LightProtocol, ReqId};
use super::error::Error; use super::error::Error;
use request::{self, Request}; use request::Requests;
/// An I/O context which allows sending and receiving packets as well as /// An I/O context which allows sending and receiving packets as well as
/// disconnecting peers. This is used as a generalization of the portions /// disconnecting peers. This is used as a generalization of the portions
@ -50,13 +50,13 @@ pub trait IoContext {
impl<'a> IoContext for NetworkContext<'a> { impl<'a> IoContext for NetworkContext<'a> {
fn send(&self, peer: PeerId, packet_id: u8, packet_body: Vec<u8>) { fn send(&self, peer: PeerId, packet_id: u8, packet_body: Vec<u8>) {
if let Err(e) = self.send(peer, packet_id, packet_body) { if let Err(e) = self.send(peer, packet_id, packet_body) {
debug!(target: "les", "Error sending packet to peer {}: {}", peer, e); debug!(target: "pip", "Error sending packet to peer {}: {}", peer, e);
} }
} }
fn respond(&self, packet_id: u8, packet_body: Vec<u8>) { fn respond(&self, packet_id: u8, packet_body: Vec<u8>) {
if let Err(e) = self.respond(packet_id, packet_body) { if let Err(e) = self.respond(packet_id, packet_body) {
debug!(target: "les", "Error responding to peer message: {}", e); debug!(target: "pip", "Error responding to peer message: {}", e);
} }
} }
@ -83,16 +83,17 @@ pub trait BasicContext {
fn persistent_peer_id(&self, peer: PeerId) -> Option<NodeId>; fn persistent_peer_id(&self, peer: PeerId) -> Option<NodeId>;
/// Make a request from a peer. /// Make a request from a peer.
fn request_from(&self, peer: PeerId, request: Request) -> Result<ReqId, Error>; ///
/// Fails on: nonexistent peer, network error, peer not server,
/// insufficient credits. Does not check capabilities before sending.
/// On success, returns a request id which can later be coordinated
/// with an event.
fn request_from(&self, peer: PeerId, request: Requests) -> Result<ReqId, Error>;
/// Make an announcement of new capabilities to the rest of the peers. /// Make an announcement of new capabilities to the rest of the peers.
// TODO: maybe just put this on a timer in LightProtocol? // TODO: maybe just put this on a timer in LightProtocol?
fn make_announcement(&self, announcement: Announcement); fn make_announcement(&self, announcement: Announcement);
/// Find the maximum number of requests of a specific type which can be made from
/// supplied peer.
fn max_requests(&self, peer: PeerId, kind: request::Kind) -> usize;
/// Disconnect a peer. /// Disconnect a peer.
fn disconnect_peer(&self, peer: PeerId); fn disconnect_peer(&self, peer: PeerId);
@ -123,18 +124,14 @@ impl<'a> BasicContext for TickCtx<'a> {
self.io.persistent_peer_id(id) self.io.persistent_peer_id(id)
} }
fn request_from(&self, peer: PeerId, request: Request) -> Result<ReqId, Error> { fn request_from(&self, peer: PeerId, requests: Requests) -> Result<ReqId, Error> {
self.proto.request_from(self.io, &peer, request) self.proto.request_from(self.io, &peer, requests)
} }
fn make_announcement(&self, announcement: Announcement) { fn make_announcement(&self, announcement: Announcement) {
self.proto.make_announcement(self.io, announcement); self.proto.make_announcement(self.io, announcement);
} }
fn max_requests(&self, peer: PeerId, kind: request::Kind) -> usize {
self.proto.max_requests(peer, kind)
}
fn disconnect_peer(&self, peer: PeerId) { fn disconnect_peer(&self, peer: PeerId) {
self.io.disconnect_peer(peer); self.io.disconnect_peer(peer);
} }
@ -160,18 +157,14 @@ impl<'a> BasicContext for Ctx<'a> {
self.io.persistent_peer_id(id) self.io.persistent_peer_id(id)
} }
fn request_from(&self, peer: PeerId, request: Request) -> Result<ReqId, Error> { fn request_from(&self, peer: PeerId, requests: Requests) -> Result<ReqId, Error> {
self.proto.request_from(self.io, &peer, request) self.proto.request_from(self.io, &peer, requests)
} }
fn make_announcement(&self, announcement: Announcement) { fn make_announcement(&self, announcement: Announcement) {
self.proto.make_announcement(self.io, announcement); self.proto.make_announcement(self.io, announcement);
} }
fn max_requests(&self, peer: PeerId, kind: request::Kind) -> usize {
self.proto.max_requests(peer, kind)
}
fn disconnect_peer(&self, peer: PeerId) { fn disconnect_peer(&self, peer: PeerId) {
self.io.disconnect_peer(peer); self.io.disconnect_peer(peer);
} }

View File

@ -56,6 +56,8 @@ pub enum Error {
UnknownPeer, UnknownPeer,
/// Unsolicited response. /// Unsolicited response.
UnsolicitedResponse, UnsolicitedResponse,
/// Bad back-reference in request.
BadBackReference,
/// Not a server. /// Not a server.
NotServer, NotServer,
/// Unsupported protocol version. /// Unsupported protocol version.
@ -78,6 +80,7 @@ impl Error {
Error::WrongNetwork => Punishment::Disable, Error::WrongNetwork => Punishment::Disable,
Error::UnknownPeer => Punishment::Disconnect, Error::UnknownPeer => Punishment::Disconnect,
Error::UnsolicitedResponse => Punishment::Disable, Error::UnsolicitedResponse => Punishment::Disable,
Error::BadBackReference => Punishment::Disable,
Error::NotServer => Punishment::Disable, Error::NotServer => Punishment::Disable,
Error::UnsupportedProtocolVersion(_) => Punishment::Disable, Error::UnsupportedProtocolVersion(_) => Punishment::Disable,
Error::BadProtocolVersion => Punishment::Disable, Error::BadProtocolVersion => Punishment::Disable,
@ -109,6 +112,7 @@ impl fmt::Display for Error {
Error::WrongNetwork => write!(f, "Wrong network"), Error::WrongNetwork => write!(f, "Wrong network"),
Error::UnknownPeer => write!(f, "Unknown peer"), Error::UnknownPeer => write!(f, "Unknown peer"),
Error::UnsolicitedResponse => write!(f, "Peer provided unsolicited data"), Error::UnsolicitedResponse => write!(f, "Peer provided unsolicited data"),
Error::BadBackReference => write!(f, "Bad back-reference in request."),
Error::NotServer => write!(f, "Peer not a server."), Error::NotServer => write!(f, "Peer not a server."),
Error::UnsupportedProtocolVersion(pv) => write!(f, "Unsupported protocol version: {}", pv), Error::UnsupportedProtocolVersion(pv) => write!(f, "Unsupported protocol version: {}", pv),
Error::BadProtocolVersion => write!(f, "Bad protocol version in handshake"), Error::BadProtocolVersion => write!(f, "Bad protocol version in handshake"),

File diff suppressed because it is too large Load Diff

View File

@ -26,18 +26,13 @@
//! Current default costs are picked completely arbitrarily, not based //! Current default costs are picked completely arbitrarily, not based
//! on any empirical timings or mathematical models. //! on any empirical timings or mathematical models.
use request; use request::{self, Request};
use super::packet;
use super::error::Error; use super::error::Error;
use rlp::*; use rlp::*;
use util::U256; use util::U256;
use time::{Duration, SteadyTime}; use time::{Duration, SteadyTime};
/// A request cost specification.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Cost(pub U256, pub U256);
/// Credits value. /// Credits value.
/// ///
/// Produced and recharged using `FlowParams`. /// Produced and recharged using `FlowParams`.
@ -81,92 +76,95 @@ impl Credits {
/// A cost table, mapping requests to base and per-request costs. /// A cost table, mapping requests to base and per-request costs.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct CostTable { pub struct CostTable {
headers: Cost, // cost per header base: U256, // cost per packet.
bodies: Cost, headers: U256, // cost per header
receipts: Cost, body: U256,
state_proofs: Cost, receipts: U256,
contract_codes: Cost, account: U256,
header_proofs: Cost, storage: U256,
transaction_proof: Cost, // cost per gas. code: U256,
header_proof: U256,
transaction_proof: U256, // cost per gas.
} }
impl Default for CostTable { impl Default for CostTable {
fn default() -> Self { fn default() -> Self {
// arbitrarily chosen constants. // arbitrarily chosen constants.
CostTable { CostTable {
headers: Cost(100000.into(), 10000.into()), base: 100000.into(),
bodies: Cost(150000.into(), 15000.into()), headers: 10000.into(),
receipts: Cost(50000.into(), 5000.into()), body: 15000.into(),
state_proofs: Cost(250000.into(), 25000.into()), receipts: 5000.into(),
contract_codes: Cost(200000.into(), 20000.into()), account: 25000.into(),
header_proofs: Cost(150000.into(), 15000.into()), storage: 25000.into(),
transaction_proof: Cost(100000.into(), 2.into()), code: 20000.into(),
header_proof: 15000.into(),
transaction_proof: 2.into(),
} }
} }
} }
impl Encodable for CostTable { impl Encodable for CostTable {
fn rlp_append(&self, s: &mut RlpStream) { fn rlp_append(&self, s: &mut RlpStream) {
fn append_cost(s: &mut RlpStream, msg_id: u8, cost: &Cost) { fn append_cost(s: &mut RlpStream, cost: &U256, kind: request::Kind) {
s.begin_list(3) s.begin_list(2);
.append(&msg_id)
.append(&cost.0) // hack around https://github.com/ethcore/parity/issues/4356
.append(&cost.1); Encodable::rlp_append(&kind, s);
s.append(cost);
} }
s.begin_list(7); s.begin_list(9).append(&self.base);
append_cost(s, &self.headers, request::Kind::Headers);
append_cost(s, packet::GET_BLOCK_HEADERS, &self.headers); append_cost(s, &self.body, request::Kind::Body);
append_cost(s, packet::GET_BLOCK_BODIES, &self.bodies); append_cost(s, &self.receipts, request::Kind::Receipts);
append_cost(s, packet::GET_RECEIPTS, &self.receipts); append_cost(s, &self.account, request::Kind::Account);
append_cost(s, packet::GET_PROOFS, &self.state_proofs); append_cost(s, &self.storage, request::Kind::Storage);
append_cost(s, packet::GET_CONTRACT_CODES, &self.contract_codes); append_cost(s, &self.code, request::Kind::Code);
append_cost(s, packet::GET_HEADER_PROOFS, &self.header_proofs); append_cost(s, &self.header_proof, request::Kind::HeaderProof);
append_cost(s, packet::GET_TRANSACTION_PROOF, &self.transaction_proof); append_cost(s, &self.transaction_proof, request::Kind::Execution);
} }
} }
impl RlpDecodable for CostTable { impl Decodable for CostTable {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let rlp = decoder.as_rlp(); let base = rlp.val_at(0)?;
let mut headers = None; let mut headers = None;
let mut bodies = None; let mut body = None;
let mut receipts = None; let mut receipts = None;
let mut state_proofs = None; let mut account = None;
let mut contract_codes = None; let mut storage = None;
let mut header_proofs = None; let mut code = None;
let mut header_proof = None;
let mut transaction_proof = None; let mut transaction_proof = None;
for row in rlp.iter() { for cost_list in rlp.iter().skip(1) {
let msg_id: u8 = row.val_at(0)?; let cost = cost_list.val_at(1)?;
let cost = { match cost_list.val_at(0)? {
let base = row.val_at(1)?; request::Kind::Headers => headers = Some(cost),
let per = row.val_at(2)?; request::Kind::Body => body = Some(cost),
request::Kind::Receipts => receipts = Some(cost),
Cost(base, per) request::Kind::Account => account = Some(cost),
}; request::Kind::Storage => storage = Some(cost),
request::Kind::Code => code = Some(cost),
match msg_id { request::Kind::HeaderProof => header_proof = Some(cost),
packet::GET_BLOCK_HEADERS => headers = Some(cost), request::Kind::Execution => transaction_proof = Some(cost),
packet::GET_BLOCK_BODIES => bodies = Some(cost),
packet::GET_RECEIPTS => receipts = Some(cost),
packet::GET_PROOFS => state_proofs = Some(cost),
packet::GET_CONTRACT_CODES => contract_codes = Some(cost),
packet::GET_HEADER_PROOFS => header_proofs = Some(cost),
packet::GET_TRANSACTION_PROOF => transaction_proof = Some(cost),
_ => return Err(DecoderError::Custom("Unrecognized message in cost table")),
} }
} }
let unwrap_cost = |cost: Option<U256>| cost.ok_or(DecoderError::Custom("Not all costs specified in cost table."));
Ok(CostTable { Ok(CostTable {
headers: headers.ok_or(DecoderError::Custom("No headers cost specified"))?, base: base,
bodies: bodies.ok_or(DecoderError::Custom("No bodies cost specified"))?, headers: unwrap_cost(headers)?,
receipts: receipts.ok_or(DecoderError::Custom("No receipts cost specified"))?, body: unwrap_cost(body)?,
state_proofs: state_proofs.ok_or(DecoderError::Custom("No proofs cost specified"))?, receipts: unwrap_cost(receipts)?,
contract_codes: contract_codes.ok_or(DecoderError::Custom("No contract codes specified"))?, account: unwrap_cost(account)?,
header_proofs: header_proofs.ok_or(DecoderError::Custom("No header proofs cost specified"))?, storage: unwrap_cost(storage)?,
transaction_proof: transaction_proof.ok_or(DecoderError::Custom("No transaction proof gas cost specified"))?, code: unwrap_cost(code)?,
header_proof: unwrap_cost(header_proof)?,
transaction_proof: unwrap_cost(transaction_proof)?,
}) })
} }
} }
@ -192,17 +190,19 @@ impl FlowParams {
/// Create effectively infinite flow params. /// Create effectively infinite flow params.
pub fn free() -> Self { pub fn free() -> Self {
let free_cost = Cost(0.into(), 0.into()); let free_cost: U256 = 0.into();
FlowParams { FlowParams {
limit: (!0u64).into(), limit: (!0u64).into(),
recharge: 1.into(), recharge: 1.into(),
costs: CostTable { costs: CostTable {
base: free_cost.clone(),
headers: free_cost.clone(), headers: free_cost.clone(),
bodies: free_cost.clone(), body: free_cost.clone(),
receipts: free_cost.clone(), receipts: free_cost.clone(),
state_proofs: free_cost.clone(), account: free_cost.clone(),
contract_codes: free_cost.clone(), storage: free_cost.clone(),
header_proofs: free_cost.clone(), code: free_cost.clone(),
header_proof: free_cost.clone(),
transaction_proof: free_cost, transaction_proof: free_cost,
} }
} }
@ -214,61 +214,34 @@ impl FlowParams {
/// Get a reference to the cost table. /// Get a reference to the cost table.
pub fn cost_table(&self) -> &CostTable { &self.costs } pub fn cost_table(&self) -> &CostTable { &self.costs }
/// Get the base cost of a request.
pub fn base_cost(&self) -> U256 { self.costs.base }
/// Get a reference to the recharge rate. /// Get a reference to the recharge rate.
pub fn recharge_rate(&self) -> &U256 { &self.recharge } pub fn recharge_rate(&self) -> &U256 { &self.recharge }
/// Compute the actual cost of a request, given the kind of request /// Compute the actual cost of a request, given the kind of request
/// and number of requests made. /// and number of requests made.
pub fn compute_cost(&self, kind: request::Kind, amount: usize) -> U256 { pub fn compute_cost(&self, request: &Request) -> U256 {
let cost = match kind { match *request {
request::Kind::Headers => &self.costs.headers, Request::Headers(ref req) => self.costs.headers * req.max.into(),
request::Kind::Bodies => &self.costs.bodies, Request::HeaderProof(_) => self.costs.header_proof,
request::Kind::Receipts => &self.costs.receipts, Request::Body(_) => self.costs.body,
request::Kind::StateProofs => &self.costs.state_proofs, Request::Receipts(_) => self.costs.receipts,
request::Kind::Codes => &self.costs.contract_codes, Request::Account(_) => self.costs.account,
request::Kind::HeaderProofs => &self.costs.header_proofs, Request::Storage(_) => self.costs.storage,
request::Kind::TransactionProof => &self.costs.transaction_proof, Request::Code(_) => self.costs.code,
}; Request::Execution(ref req) => self.costs.transaction_proof * req.gas,
let amount: U256 = amount.into();
cost.0 + (amount * cost.1)
}
/// Compute the maximum number of costs of a specific kind which can be made
/// with the given amount of credits
/// Saturates at `usize::max()`. This is not a problem in practice because
/// this amount of requests is already prohibitively large.
pub fn max_amount(&self, credits: &Credits, kind: request::Kind) -> usize {
use util::Uint;
use std::usize;
let cost = match kind {
request::Kind::Headers => &self.costs.headers,
request::Kind::Bodies => &self.costs.bodies,
request::Kind::Receipts => &self.costs.receipts,
request::Kind::StateProofs => &self.costs.state_proofs,
request::Kind::Codes => &self.costs.contract_codes,
request::Kind::HeaderProofs => &self.costs.header_proofs,
request::Kind::TransactionProof => &self.costs.transaction_proof,
};
let start = credits.current();
if start <= cost.0 {
return 0;
} else if cost.1 == U256::zero() {
return usize::MAX;
}
let max = (start - cost.0) / cost.1;
if max >= usize::MAX.into() {
usize::MAX
} else {
max.as_u64() as usize
} }
} }
/// Create initial credits.. /// Compute the cost of a set of requests.
/// This is the base cost plus the cost of each individual request.
pub fn compute_cost_multi(&self, requests: &[Request]) -> U256 {
requests.iter().fold(self.costs.base, |cost, req| cost + self.compute_cost(req))
}
/// Create initial credits.
pub fn create_credits(&self) -> Credits { pub fn create_credits(&self) -> Credits {
Credits { Credits {
estimate: self.limit, estimate: self.limit,

View File

@ -24,7 +24,8 @@
use std::collections::{BTreeMap, HashMap}; use std::collections::{BTreeMap, HashMap};
use std::iter::FromIterator; use std::iter::FromIterator;
use request::{self, Request}; use request::Request;
use request::Requests;
use net::{timeout, ReqId}; use net::{timeout, ReqId};
use time::{Duration, SteadyTime}; use time::{Duration, SteadyTime};
@ -35,7 +36,7 @@ pub struct RequestSet {
counter: u64, counter: u64,
base: Option<SteadyTime>, base: Option<SteadyTime>,
ids: HashMap<ReqId, u64>, ids: HashMap<ReqId, u64>,
reqs: BTreeMap<u64, Request>, reqs: BTreeMap<u64, Requests>,
} }
impl Default for RequestSet { impl Default for RequestSet {
@ -50,8 +51,8 @@ impl Default for RequestSet {
} }
impl RequestSet { impl RequestSet {
/// Push a request onto the stack. /// Push requests onto the stack.
pub fn insert(&mut self, req_id: ReqId, req: Request, now: SteadyTime) { pub fn insert(&mut self, req_id: ReqId, req: Requests, now: SteadyTime) {
let counter = self.counter; let counter = self.counter;
self.ids.insert(req_id, counter); self.ids.insert(req_id, counter);
self.reqs.insert(counter, req); self.reqs.insert(counter, req);
@ -63,8 +64,8 @@ impl RequestSet {
self.counter += 1; self.counter += 1;
} }
/// Remove a request from the stack. /// Remove a set of requests from the stack.
pub fn remove(&mut self, req_id: &ReqId, now: SteadyTime) -> Option<Request> { pub fn remove(&mut self, req_id: &ReqId, now: SteadyTime) -> Option<Requests> {
let id = match self.ids.remove(&req_id) { let id = match self.ids.remove(&req_id) {
Some(id) => id, Some(id) => id,
None => return None, None => return None,
@ -89,22 +90,10 @@ impl RequestSet {
None => return false, None => return false,
}; };
let kind = self.reqs.values() let first_req = self.reqs.values().next()
.next() .expect("base existing implies `reqs` non-empty; qed");
.map(|r| r.kind())
.expect("base time implies `reqs` non-empty; qed");
let kind_timeout = match kind { base + compute_timeout(&first_req) <= now
request::Kind::Headers => timeout::HEADERS,
request::Kind::Bodies => timeout::BODIES,
request::Kind::Receipts => timeout::RECEIPTS,
request::Kind::StateProofs => timeout::PROOFS,
request::Kind::Codes => timeout::CONTRACT_CODES,
request::Kind::HeaderProofs => timeout::HEADER_PROOFS,
request::Kind::TransactionProof => timeout::TRANSACTION_PROOF,
};
base + Duration::milliseconds(kind_timeout) <= now
} }
/// Collect all pending request ids. /// Collect all pending request ids.
@ -121,25 +110,43 @@ impl RequestSet {
pub fn is_empty(&self) -> bool { self.len() == 0 } pub fn is_empty(&self) -> bool { self.len() == 0 }
} }
// helper to calculate timeout for a specific set of requests.
// it's a base amount + some amount per request.
fn compute_timeout(reqs: &Requests) -> Duration {
Duration::milliseconds(reqs.requests().iter().fold(timeout::BASE, |tm, req| {
tm + match *req {
Request::Headers(_) => timeout::HEADERS,
Request::HeaderProof(_) => timeout::HEADER_PROOF,
Request::Receipts(_) => timeout::RECEIPT,
Request::Body(_) => timeout::BODY,
Request::Account(_) => timeout::PROOF,
Request::Storage(_) => timeout::PROOF,
Request::Code(_) => timeout::CONTRACT_CODE,
Request::Execution(_) => timeout::TRANSACTION_PROOF,
}
}))
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use net::{timeout, ReqId}; use net::ReqId;
use request::{Request, Receipts}; use request::RequestBuilder;
use time::{SteadyTime, Duration}; use time::{SteadyTime, Duration};
use super::RequestSet; use super::{RequestSet, compute_timeout};
#[test] #[test]
fn multi_timeout() { fn multi_timeout() {
let test_begin = SteadyTime::now(); let test_begin = SteadyTime::now();
let mut req_set = RequestSet::default(); let mut req_set = RequestSet::default();
let the_req = Request::Receipts(Receipts { block_hashes: Vec::new() }); let the_req = RequestBuilder::default().build();
let req_time = compute_timeout(&the_req);
req_set.insert(ReqId(0), the_req.clone(), test_begin); req_set.insert(ReqId(0), the_req.clone(), test_begin);
req_set.insert(ReqId(1), the_req, test_begin + Duration::seconds(1)); req_set.insert(ReqId(1), the_req, test_begin + Duration::seconds(1));
assert_eq!(req_set.base, Some(test_begin)); assert_eq!(req_set.base, Some(test_begin));
let test_end = test_begin + Duration::milliseconds(timeout::RECEIPTS); let test_end = test_begin + req_time;
assert!(req_set.check_timeout(test_end)); assert!(req_set.check_timeout(test_end));
req_set.remove(&ReqId(0), test_begin + Duration::seconds(1)).unwrap(); req_set.remove(&ReqId(0), test_begin + Duration::seconds(1)).unwrap();

View File

@ -16,7 +16,7 @@
//! Peer status and capabilities. //! Peer status and capabilities.
use rlp::{DecoderError, RlpDecodable, Encodable, RlpStream, UntrustedRlp, View}; use rlp::{DecoderError, Encodable, Decodable, RlpStream, UntrustedRlp};
use util::{H256, U256}; use util::{H256, U256};
use super::request_credits::FlowParams; use super::request_credits::FlowParams;
@ -91,7 +91,7 @@ struct Parser<'a> {
impl<'a> Parser<'a> { impl<'a> Parser<'a> {
// expect a specific next key, and decode the value. // expect a specific next key, and decode the value.
// error on unexpected key or invalid value. // error on unexpected key or invalid value.
fn expect<T: RlpDecodable>(&mut self, key: Key) -> Result<T, DecoderError> { fn expect<T: Decodable>(&mut self, key: Key) -> Result<T, DecoderError> {
self.expect_raw(key).and_then(|item| item.as_val()) self.expect_raw(key).and_then(|item| item.as_val())
} }
@ -110,7 +110,7 @@ impl<'a> Parser<'a> {
// get the next key and value RLP. // get the next key and value RLP.
fn get_next(&mut self) -> Result<Option<(Key, UntrustedRlp<'a>)>, DecoderError> { fn get_next(&mut self) -> Result<Option<(Key, UntrustedRlp<'a>)>, DecoderError> {
while self.pos < self.rlp.item_count() { while self.pos < self.rlp.item_count()? {
let pair = self.rlp.at(self.pos)?; let pair = self.rlp.at(self.pos)?;
let k: String = pair.val_at(0)?; let k: String = pair.val_at(0)?;
@ -374,7 +374,7 @@ mod tests {
use super::*; use super::*;
use super::super::request_credits::FlowParams; use super::super::request_credits::FlowParams;
use util::{U256, H256}; use util::{U256, H256};
use rlp::{RlpStream, UntrustedRlp, View}; use rlp::{RlpStream, UntrustedRlp};
#[test] #[test]
fn full_handshake() { fn full_handshake() {
@ -474,7 +474,7 @@ mod tests {
let handshake = write_handshake(&status, &capabilities, Some(&flow_params)); let handshake = write_handshake(&status, &capabilities, Some(&flow_params));
let interleaved = { let interleaved = {
let handshake = UntrustedRlp::new(&handshake); let handshake = UntrustedRlp::new(&handshake);
let mut stream = RlpStream::new_list(handshake.item_count() * 3); let mut stream = RlpStream::new_list(handshake.item_count().unwrap_or(0) * 3);
for item in handshake.iter() { for item in handshake.iter() {
stream.append_raw(item.as_raw(), 1); stream.append_raw(item.as_raw(), 1);

View File

@ -27,15 +27,31 @@ use network::{PeerId, NodeId};
use net::request_credits::FlowParams; use net::request_credits::FlowParams;
use net::context::IoContext; use net::context::IoContext;
use net::status::{Capabilities, Status, write_handshake}; use net::status::{Capabilities, Status, write_handshake};
use net::{encode_request, LightProtocol, Params, packet, Peer}; use net::{LightProtocol, Params, packet, Peer};
use provider::Provider; use provider::Provider;
use request::{self, Request, Headers}; use request;
use request::*;
use rlp::*; use rlp::*;
use util::{Address, Bytes, DBValue, H256, U256}; use util::{Address, H256, U256};
use std::sync::Arc; use std::sync::Arc;
// helper for encoding a single request into a packet.
// panics on bad backreference.
fn encode_single(request: Request) -> Requests {
let mut builder = RequestBuilder::default();
builder.push(request).unwrap();
builder.build()
}
// helper for making a packet out of `Requests`.
fn make_packet(req_id: usize, requests: &Requests) -> Vec<u8> {
let mut stream = RlpStream::new_list(2);
stream.append(&req_id).append_list(&requests.requests());
stream.out()
}
// expected result from a call. // expected result from a call.
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
enum Expect { enum Expect {
@ -99,35 +115,45 @@ impl Provider for TestProvider {
self.0.client.block_header(id) self.0.client.block_header(id)
} }
fn block_body(&self, id: BlockId) -> Option<encoded::Body> { fn block_body(&self, req: request::CompleteBodyRequest) -> Option<request::BodyResponse> {
self.0.client.block_body(id) self.0.client.block_body(req)
} }
fn block_receipts(&self, hash: &H256) -> Option<Bytes> { fn block_receipts(&self, req: request::CompleteReceiptsRequest) -> Option<request::ReceiptsResponse> {
self.0.client.block_receipts(&hash) self.0.client.block_receipts(req)
} }
fn state_proof(&self, req: request::StateProof) -> Vec<Bytes> { fn account_proof(&self, req: request::CompleteAccountRequest) -> Option<request::AccountResponse> {
match req.key2 {
Some(_) => vec![::util::sha3::SHA3_NULL_RLP.to_vec()],
None => {
// sort of a leaf node // sort of a leaf node
let mut stream = RlpStream::new_list(2); let mut stream = RlpStream::new_list(2);
stream.append(&req.key1).append_empty_data(); stream.append(&req.address_hash).append_empty_data();
vec![stream.out()] Some(AccountResponse {
} proof: vec![stream.out()],
} balance: 10.into(),
nonce: 100.into(),
code_hash: Default::default(),
storage_root: Default::default(),
})
} }
fn contract_code(&self, req: request::ContractCode) -> Bytes { fn storage_proof(&self, req: request::CompleteStorageRequest) -> Option<request::StorageResponse> {
req.account_key.iter().chain(req.account_key.iter()).cloned().collect() Some(StorageResponse {
proof: vec![::rlp::encode(&req.key_hash).to_vec()],
value: req.key_hash | req.address_hash,
})
} }
fn header_proof(&self, _req: request::HeaderProof) -> Option<(encoded::Header, Vec<Bytes>)> { fn contract_code(&self, req: request::CompleteCodeRequest) -> Option<request::CodeResponse> {
Some(CodeResponse {
code: req.block_hash.iter().chain(req.code_hash.iter()).cloned().collect(),
})
}
fn header_proof(&self, _req: request::CompleteHeaderProofRequest) -> Option<request::HeaderProofResponse> {
None None
} }
fn transaction_proof(&self, _req: request::TransactionProof) -> Option<Vec<DBValue>> { fn transaction_proof(&self, _req: request::CompleteExecutionRequest) -> Option<request::ExecutionResponse> {
None None
} }
@ -226,14 +252,15 @@ fn credit_overflow() {
} }
// 1000 requests is far too many for the default flow params. // 1000 requests is far too many for the default flow params.
let request = encode_request(&Request::Headers(Headers { let requests = encode_single(Request::Headers(IncompleteHeadersRequest {
start: 1.into(), start: HashOrNumber::Number(1).into(),
max: 1000, max: 1000,
skip: 0, skip: 0,
reverse: false, reverse: false,
}), 111); }));
let request = make_packet(111, &requests);
proto.handle_packet(&Expect::Punish(1), &1, packet::GET_BLOCK_HEADERS, &request); proto.handle_packet(&Expect::Punish(1), &1, packet::REQUEST, &request);
} }
// test the basic request types -- these just make sure that requests are parsed // test the basic request types -- these just make sure that requests are parsed
@ -259,33 +286,36 @@ fn get_block_headers() {
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status); proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status);
} }
let request = Headers { let request = Request::Headers(IncompleteHeadersRequest {
start: 1.into(), start: HashOrNumber::Number(1).into(),
max: 10, max: 10,
skip: 0, skip: 0,
reverse: false, reverse: false,
}; });
let req_id = 111; let req_id = 111;
let request_body = encode_request(&Request::Headers(request.clone()), req_id); let requests = encode_single(request.clone());
let request_body = make_packet(req_id, &requests);
let response = { let response = {
let headers: Vec<_> = (0..10).map(|i| provider.client.block_header(BlockId::Number(i + 1)).unwrap()).collect(); let headers: Vec<_> = (0..10).map(|i| provider.client.block_header(BlockId::Number(i + 1)).unwrap()).collect();
assert_eq!(headers.len(), 10); assert_eq!(headers.len(), 10);
let new_creds = *flow_params.limit() - flow_params.compute_cost(request::Kind::Headers, 10); let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests());
let mut response_stream = RlpStream::new_list(3); let response = vec![Response::Headers(HeadersResponse {
headers: headers,
})];
response_stream.append(&req_id).append(&new_creds).begin_list(10); let mut stream = RlpStream::new_list(3);
for header in headers { stream.append(&req_id).append(&new_creds).append_list(&response);
response_stream.append_raw(&header.into_inner(), 1);
}
response_stream.out() stream.out()
}; };
let expected = Expect::Respond(packet::BLOCK_HEADERS, response); let expected = Expect::Respond(packet::RESPONSE, response);
proto.handle_packet(&expected, &1, packet::GET_BLOCK_HEADERS, &request_body); proto.handle_packet(&expected, &1, packet::REQUEST, &request_body);
} }
#[test] #[test]
@ -308,33 +338,32 @@ fn get_block_bodies() {
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status); proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status);
} }
let request = request::Bodies { let mut builder = RequestBuilder::default();
block_hashes: (0..10).map(|i| let mut bodies = Vec::new();
provider.client.block_header(BlockId::Number(i)).unwrap().hash()
).collect()
};
for i in 0..10 {
let hash = provider.client.block_header(BlockId::Number(i)).unwrap().hash();
builder.push(Request::Body(IncompleteBodyRequest {
hash: hash.into(),
})).unwrap();
bodies.push(Response::Body(provider.client.block_body(CompleteBodyRequest {
hash: hash,
}).unwrap()));
}
let req_id = 111; let req_id = 111;
let requests = builder.build();
let request_body = make_packet(req_id, &requests);
let request_body = encode_request(&Request::Bodies(request.clone()), req_id);
let response = { let response = {
let bodies: Vec<_> = (0..10).map(|i| provider.client.block_body(BlockId::Number(i + 1)).unwrap()).collect(); let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests());
assert_eq!(bodies.len(), 10);
let new_creds = *flow_params.limit() - flow_params.compute_cost(request::Kind::Bodies, 10);
let mut response_stream = RlpStream::new_list(3); let mut response_stream = RlpStream::new_list(3);
response_stream.append(&req_id).append(&new_creds).append_list(&bodies);
response_stream.append(&req_id).append(&new_creds).begin_list(10);
for body in bodies {
response_stream.append_raw(&body.into_inner(), 1);
}
response_stream.out() response_stream.out()
}; };
let expected = Expect::Respond(packet::BLOCK_BODIES, response); let expected = Expect::Respond(packet::RESPONSE, response);
proto.handle_packet(&expected, &1, packet::GET_BLOCK_BODIES, &request_body); proto.handle_packet(&expected, &1, packet::REQUEST, &request_body);
} }
#[test] #[test]
@ -359,36 +388,37 @@ fn get_block_receipts() {
// find the first 10 block hashes starting with `f` because receipts are only provided // find the first 10 block hashes starting with `f` because receipts are only provided
// by the test client in that case. // by the test client in that case.
let block_hashes: Vec<_> = (0..1000).map(|i| let block_hashes: Vec<H256> = (0..1000)
provider.client.block_header(BlockId::Number(i)).unwrap().hash() .map(|i| provider.client.block_header(BlockId::Number(i)).unwrap().hash())
).filter(|hash| format!("{}", hash).starts_with("f")).take(10).collect(); .filter(|hash| format!("{}", hash).starts_with("f"))
.take(10)
let request = request::Receipts {
block_hashes: block_hashes.clone(),
};
let req_id = 111;
let request_body = encode_request(&Request::Receipts(request.clone()), req_id);
let response = {
let receipts: Vec<_> = block_hashes.iter()
.map(|hash| provider.client.block_receipts(hash).unwrap())
.collect(); .collect();
let new_creds = *flow_params.limit() - flow_params.compute_cost(request::Kind::Receipts, receipts.len()); let mut builder = RequestBuilder::default();
let mut receipts = Vec::new();
let mut response_stream = RlpStream::new_list(3); for hash in block_hashes.iter().cloned() {
builder.push(Request::Receipts(IncompleteReceiptsRequest { hash: hash.into() })).unwrap();
response_stream.append(&req_id).append(&new_creds).begin_list(receipts.len()); receipts.push(Response::Receipts(provider.client.block_receipts(CompleteReceiptsRequest {
for block_receipts in receipts { hash: hash
response_stream.append_raw(&block_receipts, 1); }).unwrap()));
} }
let req_id = 111;
let requests = builder.build();
let request_body = make_packet(req_id, &requests);
let response = {
assert_eq!(receipts.len(), 10);
let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests());
let mut response_stream = RlpStream::new_list(3);
response_stream.append(&req_id).append(&new_creds).append_list(&receipts);
response_stream.out() response_stream.out()
}; };
let expected = Expect::Respond(packet::RECEIPTS, response); let expected = Expect::Respond(packet::RESPONSE, response);
proto.handle_packet(&expected, &1, packet::GET_RECEIPTS, &request_body); proto.handle_packet(&expected, &1, packet::REQUEST, &request_body);
} }
#[test] #[test]
@ -397,8 +427,9 @@ fn get_state_proofs() {
let capabilities = capabilities(); let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone()); let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let provider = TestProvider(provider);
let cur_status = status(provider.client.chain_info()); let cur_status = status(provider.0.client.chain_info());
{ {
let packet_body = write_handshake(&cur_status, &capabilities, Some(&flow_params)); let packet_body = write_handshake(&cur_status, &capabilities, Some(&flow_params));
@ -407,40 +438,45 @@ fn get_state_proofs() {
} }
let req_id = 112; let req_id = 112;
let key1 = U256::from(11223344).into(); let key1: H256 = U256::from(11223344).into();
let key2 = U256::from(99988887).into(); let key2: H256 = U256::from(99988887).into();
let request = Request::StateProofs (request::StateProofs { let mut builder = RequestBuilder::default();
requests: vec![ builder.push(Request::Account(IncompleteAccountRequest {
request::StateProof { block: H256::default(), key1: key1, key2: None, from_level: 0 }, block_hash: H256::default().into(),
request::StateProof { block: H256::default(), key1: key1, key2: Some(key2), from_level: 0}, address_hash: key1.into(),
] })).unwrap();
}); builder.push(Request::Storage(IncompleteStorageRequest {
block_hash: H256::default().into(),
address_hash: key1.into(),
key_hash: key2.into(),
})).unwrap();
let request_body = encode_request(&request, req_id); let requests = builder.build();
let request_body = make_packet(req_id, &requests);
let response = { let response = {
let proofs = vec![ let responses = vec![
{ let mut stream = RlpStream::new_list(2); stream.append(&key1).append_empty_data(); vec![stream.out()] }, Response::Account(provider.account_proof(CompleteAccountRequest {
vec![::util::sha3::SHA3_NULL_RLP.to_vec()], block_hash: H256::default(),
address_hash: key1,
}).unwrap()),
Response::Storage(provider.storage_proof(CompleteStorageRequest {
block_hash: H256::default(),
address_hash: key1,
key_hash: key2,
}).unwrap()),
]; ];
let new_creds = *flow_params.limit() - flow_params.compute_cost(request::Kind::StateProofs, 2); let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests());
let mut response_stream = RlpStream::new_list(3); let mut response_stream = RlpStream::new_list(3);
response_stream.append(&req_id).append(&new_creds).append_list(&responses);
response_stream.append(&req_id).append(&new_creds).begin_list(2);
for proof in proofs {
response_stream.begin_list(proof.len());
for node in proof {
response_stream.append_raw(&node, 1);
}
}
response_stream.out() response_stream.out()
}; };
let expected = Expect::Respond(packet::PROOFS, response); let expected = Expect::Respond(packet::RESPONSE, response);
proto.handle_packet(&expected, &1, packet::GET_PROOFS, &request_body); proto.handle_packet(&expected, &1, packet::REQUEST, &request_body);
} }
#[test] #[test]
@ -459,37 +495,31 @@ fn get_contract_code() {
} }
let req_id = 112; let req_id = 112;
let key1 = U256::from(11223344).into(); let key1: H256 = U256::from(11223344).into();
let key2 = U256::from(99988887).into(); let key2: H256 = U256::from(99988887).into();
let request = Request::Codes (request::ContractCodes { let request = Request::Code(IncompleteCodeRequest {
code_requests: vec![ block_hash: key1.into(),
request::ContractCode { block_hash: H256::default(), account_key: key1 }, code_hash: key2.into(),
request::ContractCode { block_hash: H256::default(), account_key: key2 },
],
}); });
let request_body = encode_request(&request, req_id); let requests = encode_single(request.clone());
let request_body = make_packet(req_id, &requests);
let response = { let response = {
let codes: Vec<Vec<_>> = vec![ let response = vec![Response::Code(CodeResponse {
key1.iter().chain(key1.iter()).cloned().collect(), code: key1.iter().chain(key2.iter()).cloned().collect(),
key2.iter().chain(key2.iter()).cloned().collect(), })];
];
let new_creds = *flow_params.limit() - flow_params.compute_cost(request::Kind::Codes, 2); let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests());
let mut response_stream = RlpStream::new_list(3); let mut response_stream = RlpStream::new_list(3);
response_stream.append(&req_id).append(&new_creds).begin_list(2); response_stream.append(&req_id).append(&new_creds).append_list(&response);
for code in codes {
response_stream.append(&code);
}
response_stream.out() response_stream.out()
}; };
let expected = Expect::Respond(packet::CONTRACT_CODES, response); let expected = Expect::Respond(packet::RESPONSE, response);
proto.handle_packet(&expected, &1, packet::GET_CONTRACT_CODES, &request_body); proto.handle_packet(&expected, &1, packet::REQUEST, &request_body);
} }
#[test] #[test]
@ -508,8 +538,8 @@ fn proof_of_execution() {
} }
let req_id = 112; let req_id = 112;
let mut request = Request::TransactionProof (request::TransactionProof { let mut request = Request::Execution(request::IncompleteExecutionRequest {
at: H256::default(), block_hash: H256::default().into(),
from: Address::default(), from: Address::default(),
action: Action::Call(Address::default()), action: Action::Call(Address::default()),
gas: 100.into(), gas: 100.into(),
@ -519,9 +549,11 @@ fn proof_of_execution() {
}); });
// first: a valid amount to request execution of. // first: a valid amount to request execution of.
let request_body = encode_request(&request, req_id); let requests = encode_single(request.clone());
let request_body = make_packet(req_id, &requests);
let response = { let response = {
let new_creds = *flow_params.limit() - flow_params.compute_cost(request::Kind::TransactionProof, 100); let new_creds = *flow_params.limit() - flow_params.compute_cost_multi(requests.requests());
let mut response_stream = RlpStream::new_list(3); let mut response_stream = RlpStream::new_list(3);
response_stream.append(&req_id).append(&new_creds).begin_list(0); response_stream.append(&req_id).append(&new_creds).begin_list(0);
@ -529,17 +561,19 @@ fn proof_of_execution() {
response_stream.out() response_stream.out()
}; };
let expected = Expect::Respond(packet::TRANSACTION_PROOF, response); let expected = Expect::Respond(packet::RESPONSE, response);
proto.handle_packet(&expected, &1, packet::GET_TRANSACTION_PROOF, &request_body); proto.handle_packet(&expected, &1, packet::REQUEST, &request_body);
// next: way too much requested gas. // next: way too much requested gas.
if let Request::TransactionProof(ref mut req) = request { if let Request::Execution(ref mut req) = request {
req.gas = 100_000_000.into(); req.gas = 100_000_000.into();
} }
let req_id = 113; let req_id = 113;
let request_body = encode_request(&request, req_id); let requests = encode_single(request.clone());
let request_body = make_packet(req_id, &requests);
let expected = Expect::Punish(1); let expected = Expect::Punish(1);
proto.handle_packet(&expected, &1, packet::GET_TRANSACTION_PROOF, &request_body); proto.handle_packet(&expected, &1, packet::REQUEST, &request_body);
} }
#[test] #[test]
@ -554,12 +588,13 @@ fn id_guard() {
let req_id_1 = ReqId(5143); let req_id_1 = ReqId(5143);
let req_id_2 = ReqId(1111); let req_id_2 = ReqId(1111);
let req = Request::Headers(request::Headers {
start: 5u64.into(), let req = encode_single(Request::Headers(IncompleteHeadersRequest {
start: HashOrNumber::Number(5u64).into(),
max: 100, max: 100,
skip: 0, skip: 0,
reverse: false, reverse: false,
}); }));
let peer_id = 9876; let peer_id = 9876;
@ -579,15 +614,15 @@ fn id_guard() {
failed_requests: Vec::new(), failed_requests: Vec::new(),
})); }));
// first, supply wrong request type. // first, malformed responses.
{ {
let mut stream = RlpStream::new_list(3); let mut stream = RlpStream::new_list(3);
stream.append(&req_id_1.0); stream.append(&req_id_1.0);
stream.append(&4_000_000usize); stream.append(&4_000_000usize);
stream.begin_list(0); stream.begin_list(2).append(&125usize).append(&3usize);
let packet = stream.out(); let packet = stream.out();
assert!(proto.block_bodies(&peer_id, &Expect::Nothing, UntrustedRlp::new(&packet)).is_err()); assert!(proto.response(&peer_id, &Expect::Nothing, UntrustedRlp::new(&packet)).is_err());
} }
// next, do an unexpected response. // next, do an unexpected response.
@ -598,7 +633,7 @@ fn id_guard() {
stream.begin_list(0); stream.begin_list(0);
let packet = stream.out(); let packet = stream.out();
assert!(proto.receipts(&peer_id, &Expect::Nothing, UntrustedRlp::new(&packet)).is_err()); assert!(proto.response(&peer_id, &Expect::Nothing, UntrustedRlp::new(&packet)).is_err());
} }
// lastly, do a valid (but empty) response. // lastly, do a valid (but empty) response.
@ -609,7 +644,7 @@ fn id_guard() {
stream.begin_list(0); stream.begin_list(0);
let packet = stream.out(); let packet = stream.out();
assert!(proto.block_headers(&peer_id, &Expect::Nothing, UntrustedRlp::new(&packet)).is_ok()); assert!(proto.response(&peer_id, &Expect::Nothing, UntrustedRlp::new(&packet)).is_ok());
} }
let peers = proto.peers.read(); let peers = proto.peers.read();

View File

@ -18,6 +18,9 @@
//! The request service is implemented using Futures. Higher level request handlers //! The request service is implemented using Futures. Higher level request handlers
//! will take the raw data received here and extract meaningful results from it. //! will take the raw data received here and extract meaningful results from it.
// TODO [ToDr] Suppressing deprecation warnings. Rob will fix the API anyway.
#![allow(deprecated)]
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
@ -31,12 +34,12 @@ use futures::{Async, Poll, Future};
use futures::sync::oneshot::{self, Sender, Receiver}; use futures::sync::oneshot::{self, Sender, Receiver};
use network::PeerId; use network::PeerId;
use rlp::RlpStream; use rlp::RlpStream;
use util::{Bytes, DBValue, RwLock, Mutex, U256}; use util::{Bytes, RwLock, Mutex, U256, H256};
use util::sha3::{SHA3_NULL_RLP, SHA3_EMPTY_LIST_RLP}; use util::sha3::{SHA3_NULL_RLP, SHA3_EMPTY_LIST_RLP};
use net::{Handler, Status, Capabilities, Announcement, EventContext, BasicContext, ReqId}; use net::{Handler, Status, Capabilities, Announcement, EventContext, BasicContext, ReqId};
use cache::Cache; use cache::Cache;
use types::les_request::{self as les_request, Request as LesRequest}; use request::{self as basic_request, Request as NetworkRequest, Response as NetworkResponse};
pub mod request; pub mod request;
@ -46,24 +49,85 @@ struct Peer {
capabilities: Capabilities, capabilities: Capabilities,
} }
impl Peer {
// Whether a given peer can handle a specific request.
fn can_handle(&self, pending: &Pending) -> bool {
match *pending {
Pending::HeaderProof(ref req, _) =>
self.capabilities.serve_headers && self.status.head_num > req.num(),
Pending::HeaderByHash(_, _) => self.capabilities.serve_headers,
Pending::Block(ref req, _) =>
self.capabilities.serve_chain_since.as_ref().map_or(false, |x| *x >= req.header.number()),
Pending::BlockReceipts(ref req, _) =>
self.capabilities.serve_chain_since.as_ref().map_or(false, |x| *x >= req.0.number()),
Pending::Account(ref req, _) =>
self.capabilities.serve_state_since.as_ref().map_or(false, |x| *x >= req.header.number()),
Pending::Code(ref req, _) =>
self.capabilities.serve_state_since.as_ref().map_or(false, |x| *x >= req.block_id.1),
Pending::TxProof(ref req, _) =>
self.capabilities.serve_state_since.as_ref().map_or(false, |x| *x >= req.header.number()),
}
}
}
// Which portions of a CHT proof should be sent. // Which portions of a CHT proof should be sent.
enum ChtProofSender { enum ChtProofSender {
Both(Sender<(encoded::Header, U256)>), Both(Sender<(H256, U256)>),
Header(Sender<encoded::Header>), Hash(Sender<H256>),
ChainScore(Sender<U256>), ChainScore(Sender<U256>),
} }
// Attempted request info and sender to put received value. // Attempted request info and sender to put received value.
enum Pending { enum Pending {
HeaderByNumber(request::HeaderByNumber, ChtProofSender), HeaderProof(request::HeaderProof, ChtProofSender),
HeaderByHash(request::HeaderByHash, Sender<encoded::Header>), HeaderByHash(request::HeaderByHash, Sender<encoded::Header>),
Block(request::Body, Sender<encoded::Block>), Block(request::Body, Sender<encoded::Block>),
BlockReceipts(request::BlockReceipts, Sender<Vec<Receipt>>), BlockReceipts(request::BlockReceipts, Sender<Vec<Receipt>>),
Account(request::Account, Sender<BasicAccount>), Account(request::Account, Sender<Option<BasicAccount>>),
Code(request::Code, Sender<Bytes>), Code(request::Code, Sender<Bytes>),
TxProof(request::TransactionProof, Sender<Result<Executed, ExecutionError>>), TxProof(request::TransactionProof, Sender<Result<Executed, ExecutionError>>),
} }
impl Pending {
// Create a network request.
fn make_request(&self) -> NetworkRequest {
match *self {
Pending::HeaderByHash(ref req, _) => NetworkRequest::Headers(basic_request::IncompleteHeadersRequest {
start: basic_request::HashOrNumber::Hash(req.0).into(),
skip: 0,
max: 1,
reverse: false,
}),
Pending::HeaderProof(ref req, _) => NetworkRequest::HeaderProof(basic_request::IncompleteHeaderProofRequest {
num: req.num().into(),
}),
Pending::Block(ref req, _) => NetworkRequest::Body(basic_request::IncompleteBodyRequest {
hash: req.hash.into(),
}),
Pending::BlockReceipts(ref req, _) => NetworkRequest::Receipts(basic_request::IncompleteReceiptsRequest {
hash: req.0.hash().into(),
}),
Pending::Account(ref req, _) => NetworkRequest::Account(basic_request::IncompleteAccountRequest {
block_hash: req.header.hash().into(),
address_hash: ::util::Hashable::sha3(&req.address).into(),
}),
Pending::Code(ref req, _) => NetworkRequest::Code(basic_request::IncompleteCodeRequest {
block_hash: req.block_id.0.into(),
code_hash: req.code_hash.into(),
}),
Pending::TxProof(ref req, _) => NetworkRequest::Execution(basic_request::IncompleteExecutionRequest {
block_hash: req.header.hash().into(),
from: req.tx.sender(),
gas: req.tx.gas,
gas_price: req.tx.gas_price,
action: req.tx.action.clone(),
value: req.tx.value,
data: req.tx.data.clone(),
}),
}
}
}
/// On demand request service. See module docs for more details. /// On demand request service. See module docs for more details.
/// Accumulates info about all peers' capabilities and dispatches /// Accumulates info about all peers' capabilities and dispatches
/// requests to them accordingly. /// requests to them accordingly.
@ -74,6 +138,8 @@ pub struct OnDemand {
orphaned_requests: RwLock<Vec<Pending>>, orphaned_requests: RwLock<Vec<Pending>>,
} }
const RECEIVER_IN_SCOPE: &'static str = "Receiver is still in scope, so it's not dropped; qed";
impl OnDemand { impl OnDemand {
/// Create a new `OnDemand` service with the given cache. /// Create a new `OnDemand` service with the given cache.
pub fn new(cache: Arc<Mutex<Cache>>) -> Self { pub fn new(cache: Arc<Mutex<Cache>>) -> Self {
@ -85,25 +151,25 @@ impl OnDemand {
} }
} }
/// Request a header by block number and CHT root hash. /// Request a header's hash by block number and CHT root hash.
/// Returns the header. /// Returns the hash.
pub fn header_by_number(&self, ctx: &BasicContext, req: request::HeaderByNumber) -> Receiver<encoded::Header> { pub fn hash_by_number(&self, ctx: &BasicContext, req: request::HeaderProof) -> Receiver<H256> {
let (sender, receiver) = oneshot::channel(); let (sender, receiver) = oneshot::channel();
let cached = { let cached = {
let mut cache = self.cache.lock(); let mut cache = self.cache.lock();
cache.block_hash(&req.num()).and_then(|hash| cache.block_header(&hash)) cache.block_hash(&req.num())
}; };
match cached { match cached {
Some(hdr) => sender.complete(hdr), Some(hash) => sender.send(hash).expect(RECEIVER_IN_SCOPE),
None => self.dispatch_header_by_number(ctx, req, ChtProofSender::Header(sender)), None => self.dispatch(ctx, Pending::HeaderProof(req, ChtProofSender::Hash(sender))),
} }
receiver receiver
} }
/// Request a canonical block's chain score. /// Request a canonical block's chain score.
/// Returns the chain score. /// Returns the chain score.
pub fn chain_score_by_number(&self, ctx: &BasicContext, req: request::HeaderByNumber) -> Receiver<U256> { pub fn chain_score_by_number(&self, ctx: &BasicContext, req: request::HeaderProof) -> Receiver<U256> {
let (sender, receiver) = oneshot::channel(); let (sender, receiver) = oneshot::channel();
let cached = { let cached = {
let mut cache = self.cache.lock(); let mut cache = self.cache.lock();
@ -111,123 +177,46 @@ impl OnDemand {
}; };
match cached { match cached {
Some(score) => sender.complete(score), Some(score) => sender.send(score).expect(RECEIVER_IN_SCOPE),
None => self.dispatch_header_by_number(ctx, req, ChtProofSender::ChainScore(sender)), None => self.dispatch(ctx, Pending::HeaderProof(req, ChtProofSender::ChainScore(sender))),
} }
receiver receiver
} }
/// Request a canonical block's chain score. /// Request a canonical block's hash and chain score by number.
/// Returns the header and chain score. /// Returns the hash and chain score.
pub fn header_and_score_by_number(&self, ctx: &BasicContext, req: request::HeaderByNumber) -> Receiver<(encoded::Header, U256)> { pub fn hash_and_score_by_number(&self, ctx: &BasicContext, req: request::HeaderProof) -> Receiver<(H256, U256)> {
let (sender, receiver) = oneshot::channel(); let (sender, receiver) = oneshot::channel();
let cached = { let cached = {
let mut cache = self.cache.lock(); let mut cache = self.cache.lock();
let hash = cache.block_hash(&req.num()); let hash = cache.block_hash(&req.num());
( (
hash.clone().and_then(|hash| cache.block_header(&hash)), hash.clone(),
hash.and_then(|hash| cache.chain_score(&hash)), hash.and_then(|hash| cache.chain_score(&hash)),
) )
}; };
match cached { match cached {
(Some(hdr), Some(score)) => sender.complete((hdr, score)), (Some(hash), Some(score)) => sender.send((hash, score)).expect(RECEIVER_IN_SCOPE),
_ => self.dispatch_header_by_number(ctx, req, ChtProofSender::Both(sender)), _ => self.dispatch(ctx, Pending::HeaderProof(req, ChtProofSender::Both(sender))),
} }
receiver receiver
} }
// dispatch the request, completing the request if no peers available.
fn dispatch_header_by_number(&self, ctx: &BasicContext, req: request::HeaderByNumber, sender: ChtProofSender) {
let num = req.num();
let cht_num = req.cht_num();
let les_req = LesRequest::HeaderProofs(les_request::HeaderProofs {
requests: vec![les_request::HeaderProof {
cht_number: cht_num,
block_number: num,
from_level: 0,
}],
});
let pending = Pending::HeaderByNumber(req, sender);
// we're looking for a peer with serveHeaders who's far enough along in the
// chain.
for (id, peer) in self.peers.read().iter() {
if peer.capabilities.serve_headers && peer.status.head_num >= num {
match ctx.request_from(*id, les_req.clone()) {
Ok(req_id) => {
trace!(target: "on_demand", "Assigning request to peer {}", id);
self.pending_requests.write().insert(
req_id,
pending,
);
return
},
Err(e) =>
trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e),
}
}
}
trace!(target: "on_demand", "No suitable peer for request");
self.orphaned_requests.write().push(pending)
}
/// Request a header by hash. This is less accurate than by-number because we don't know /// Request a header by hash. This is less accurate than by-number because we don't know
/// where in the chain this header lies, and therefore can't find a peer who is supposed to have /// where in the chain this header lies, and therefore can't find a peer who is supposed to have
/// it as easily. /// it as easily.
pub fn header_by_hash(&self, ctx: &BasicContext, req: request::HeaderByHash) -> Receiver<encoded::Header> { pub fn header_by_hash(&self, ctx: &BasicContext, req: request::HeaderByHash) -> Receiver<encoded::Header> {
let (sender, receiver) = oneshot::channel(); let (sender, receiver) = oneshot::channel();
match self.cache.lock().block_header(&req.0) { match self.cache.lock().block_header(&req.0) {
Some(hdr) => sender.complete(hdr), Some(hdr) => sender.send(hdr).expect(RECEIVER_IN_SCOPE),
None => self.dispatch_header_by_hash(ctx, req, sender), None => self.dispatch(ctx, Pending::HeaderByHash(req, sender)),
} }
receiver receiver
} }
fn dispatch_header_by_hash(&self, ctx: &BasicContext, req: request::HeaderByHash, sender: Sender<encoded::Header>) {
let les_req = LesRequest::Headers(les_request::Headers {
start: req.0.into(),
max: 1,
skip: 0,
reverse: false,
});
// all we've got is a hash, so we'll just guess at peers who might have
// it randomly.
let mut potential_peers = self.peers.read().iter()
.filter(|&(_, peer)| peer.capabilities.serve_headers)
.map(|(id, _)| *id)
.collect::<Vec<_>>();
let mut rng = ::rand::thread_rng();
::rand::Rng::shuffle(&mut rng, &mut potential_peers);
let pending = Pending::HeaderByHash(req, sender);
for id in potential_peers {
match ctx.request_from(id, les_req.clone()) {
Ok(req_id) => {
trace!(target: "on_demand", "Assigning request to peer {}", id);
self.pending_requests.write().insert(
req_id,
pending,
);
return
}
Err(e) =>
trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e),
}
}
trace!(target: "on_demand", "No suitable peer for request");
self.orphaned_requests.write().push(pending)
}
/// Request a block, given its header. Block bodies are requestable by hash only, /// Request a block, given its header. Block bodies are requestable by hash only,
/// and the header is required anyway to verify and complete the block body /// and the header is required anyway to verify and complete the block body
/// -- this just doesn't obscure the network query. /// -- this just doesn't obscure the network query.
@ -241,7 +230,7 @@ impl OnDemand {
stream.begin_list(0); stream.begin_list(0);
stream.begin_list(0); stream.begin_list(0);
sender.complete(encoded::Block::new(stream.out())) sender.send(encoded::Block::new(stream.out())).expect(RECEIVER_IN_SCOPE);
} else { } else {
match self.cache.lock().block_body(&req.hash) { match self.cache.lock().block_body(&req.hash) {
Some(body) => { Some(body) => {
@ -249,43 +238,14 @@ impl OnDemand {
stream.append_raw(&req.header.into_inner(), 1); stream.append_raw(&req.header.into_inner(), 1);
stream.append_raw(&body.into_inner(), 2); stream.append_raw(&body.into_inner(), 2);
sender.complete(encoded::Block::new(stream.out())); sender.send(encoded::Block::new(stream.out())).expect(RECEIVER_IN_SCOPE);
} }
None => self.dispatch_block(ctx, req, sender), None => self.dispatch(ctx, Pending::Block(req, sender)),
} }
} }
receiver receiver
} }
fn dispatch_block(&self, ctx: &BasicContext, req: request::Body, sender: Sender<encoded::Block>) {
let num = req.header.number();
let les_req = LesRequest::Bodies(les_request::Bodies {
block_hashes: vec![req.hash],
});
let pending = Pending::Block(req, sender);
// we're looking for a peer with serveChainSince(num)
for (id, peer) in self.peers.read().iter() {
if peer.capabilities.serve_chain_since.as_ref().map_or(false, |x| *x >= num) {
match ctx.request_from(*id, les_req.clone()) {
Ok(req_id) => {
trace!(target: "on_demand", "Assigning request to peer {}", id);
self.pending_requests.write().insert(
req_id,
pending,
);
return
}
Err(e) =>
trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e),
}
}
}
trace!(target: "on_demand", "No suitable peer for request");
self.orphaned_requests.write().push(pending)
}
/// Request the receipts for a block. The header serves two purposes: /// Request the receipts for a block. The header serves two purposes:
/// provide the block hash to fetch receipts for, and for verification of the receipts root. /// provide the block hash to fetch receipts for, and for verification of the receipts root.
pub fn block_receipts(&self, ctx: &BasicContext, req: request::BlockReceipts) -> Receiver<Vec<Receipt>> { pub fn block_receipts(&self, ctx: &BasicContext, req: request::BlockReceipts) -> Receiver<Vec<Receipt>> {
@ -293,165 +253,64 @@ impl OnDemand {
// fast path for empty receipts. // fast path for empty receipts.
if req.0.receipts_root() == SHA3_NULL_RLP { if req.0.receipts_root() == SHA3_NULL_RLP {
sender.complete(Vec::new()) sender.send(Vec::new()).expect(RECEIVER_IN_SCOPE);
} else { } else {
match self.cache.lock().block_receipts(&req.0.hash()) { match self.cache.lock().block_receipts(&req.0.hash()) {
Some(receipts) => sender.complete(receipts), Some(receipts) => sender.send(receipts).expect(RECEIVER_IN_SCOPE),
None => self.dispatch_block_receipts(ctx, req, sender), None => self.dispatch(ctx, Pending::BlockReceipts(req, sender)),
} }
} }
receiver receiver
} }
fn dispatch_block_receipts(&self, ctx: &BasicContext, req: request::BlockReceipts, sender: Sender<Vec<Receipt>>) {
let num = req.0.number();
let les_req = LesRequest::Receipts(les_request::Receipts {
block_hashes: vec![req.0.hash()],
});
let pending = Pending::BlockReceipts(req, sender);
// we're looking for a peer with serveChainSince(num)
for (id, peer) in self.peers.read().iter() {
if peer.capabilities.serve_chain_since.as_ref().map_or(false, |x| *x >= num) {
match ctx.request_from(*id, les_req.clone()) {
Ok(req_id) => {
trace!(target: "on_demand", "Assigning request to peer {}", id);
self.pending_requests.write().insert(
req_id,
pending,
);
return
}
Err(e) =>
trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e),
}
}
}
trace!(target: "on_demand", "No suitable peer for request");
self.orphaned_requests.write().push(pending)
}
/// Request an account by address and block header -- which gives a hash to query and a state root /// Request an account by address and block header -- which gives a hash to query and a state root
/// to verify against. /// to verify against.
pub fn account(&self, ctx: &BasicContext, req: request::Account) -> Receiver<BasicAccount> { pub fn account(&self, ctx: &BasicContext, req: request::Account) -> Receiver<Option<BasicAccount>> {
let (sender, receiver) = oneshot::channel(); let (sender, receiver) = oneshot::channel();
self.dispatch_account(ctx, req, sender); self.dispatch(ctx, Pending::Account(req, sender));
receiver receiver
} }
fn dispatch_account(&self, ctx: &BasicContext, req: request::Account, sender: Sender<BasicAccount>) {
let num = req.header.number();
let les_req = LesRequest::StateProofs(les_request::StateProofs {
requests: vec![les_request::StateProof {
block: req.header.hash(),
key1: ::util::Hashable::sha3(&req.address),
key2: None,
from_level: 0,
}],
});
let pending = Pending::Account(req, sender);
// we're looking for a peer with serveStateSince(num)
for (id, peer) in self.peers.read().iter() {
if peer.capabilities.serve_state_since.as_ref().map_or(false, |x| *x >= num) {
match ctx.request_from(*id, les_req.clone()) {
Ok(req_id) => {
trace!(target: "on_demand", "Assigning request to peer {}", id);
self.pending_requests.write().insert(
req_id,
pending,
);
return
}
Err(e) =>
trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e),
}
}
}
trace!(target: "on_demand", "No suitable peer for request");
self.orphaned_requests.write().push(pending)
}
/// Request code by address, known code hash, and block header. /// Request code by address, known code hash, and block header.
pub fn code(&self, ctx: &BasicContext, req: request::Code) -> Receiver<Bytes> { pub fn code(&self, ctx: &BasicContext, req: request::Code) -> Receiver<Bytes> {
let (sender, receiver) = oneshot::channel(); let (sender, receiver) = oneshot::channel();
// fast path for no code. // fast path for no code.
if req.code_hash == ::util::sha3::SHA3_EMPTY { if req.code_hash == ::util::sha3::SHA3_EMPTY {
sender.complete(Vec::new()) sender.send(Vec::new()).expect(RECEIVER_IN_SCOPE)
} else { } else {
self.dispatch_code(ctx, req, sender); self.dispatch(ctx, Pending::Code(req, sender));
} }
receiver receiver
} }
fn dispatch_code(&self, ctx: &BasicContext, req: request::Code, sender: Sender<Bytes>) {
let num = req.block_id.1;
let les_req = LesRequest::Codes(les_request::ContractCodes {
code_requests: vec![les_request::ContractCode {
block_hash: req.block_id.0,
account_key: ::util::Hashable::sha3(&req.address),
}]
});
let pending = Pending::Code(req, sender);
// we're looking for a peer with serveStateSince(num)
for (id, peer) in self.peers.read().iter() {
if peer.capabilities.serve_state_since.as_ref().map_or(false, |x| *x >= num) {
match ctx.request_from(*id, les_req.clone()) {
Ok(req_id) => {
trace!(target: "on_demand", "Assigning request to peer {}", id);
self.pending_requests.write().insert(
req_id,
pending
);
return
}
Err(e) =>
trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e),
}
}
}
trace!(target: "on_demand", "No suitable peer for request");
self.orphaned_requests.write().push(pending)
}
/// Request proof-of-execution for a transaction. /// Request proof-of-execution for a transaction.
pub fn transaction_proof(&self, ctx: &BasicContext, req: request::TransactionProof) -> Receiver<Result<Executed, ExecutionError>> { pub fn transaction_proof(&self, ctx: &BasicContext, req: request::TransactionProof) -> Receiver<Result<Executed, ExecutionError>> {
let (sender, receiver) = oneshot::channel(); let (sender, receiver) = oneshot::channel();
self.dispatch_transaction_proof(ctx, req, sender); self.dispatch(ctx, Pending::TxProof(req, sender));
receiver receiver
} }
fn dispatch_transaction_proof(&self, ctx: &BasicContext, req: request::TransactionProof, sender: Sender<Result<Executed, ExecutionError>>) { // dispatch the request, with a "suitability" function to filter acceptable peers.
let num = req.header.number(); fn dispatch(&self, ctx: &BasicContext, pending: Pending) {
let les_req = LesRequest::TransactionProof(les_request::TransactionProof { let mut builder = basic_request::RequestBuilder::default();
at: req.header.hash(), builder.push(pending.make_request())
from: req.tx.sender(), .expect("make_request always returns fully complete request; qed");
gas: req.tx.gas,
gas_price: req.tx.gas_price, let complete = builder.build();
action: req.tx.action.clone(),
value: req.tx.value,
data: req.tx.data.clone(),
});
let pending = Pending::TxProof(req, sender);
// we're looking for a peer with serveStateSince(num)
for (id, peer) in self.peers.read().iter() { for (id, peer) in self.peers.read().iter() {
if peer.capabilities.serve_state_since.as_ref().map_or(false, |x| *x >= num) { if !peer.can_handle(&pending) { continue }
match ctx.request_from(*id, les_req.clone()) { match ctx.request_from(*id, complete.clone()) {
Ok(req_id) => { Ok(req_id) => {
trace!(target: "on_demand", "Assigning request to peer {}", id); trace!(target: "on_demand", "Assigning request to peer {}", id);
self.pending_requests.write().insert( self.pending_requests.write().insert(
req_id, req_id,
pending pending,
); );
return return
} }
@ -459,12 +318,12 @@ impl OnDemand {
trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e), trace!(target: "on_demand", "Failed to make request of peer {}: {:?}", id, e),
} }
} }
}
trace!(target: "on_demand", "No suitable peer for request"); trace!(target: "on_demand", "No suitable peer for request");
self.orphaned_requests.write().push(pending) self.orphaned_requests.write().push(pending);
} }
// dispatch orphaned requests, and discard those for which the corresponding // dispatch orphaned requests, and discard those for which the corresponding
// receiver has been dropped. // receiver has been dropped.
fn dispatch_orphaned(&self, ctx: &BasicContext) { fn dispatch_orphaned(&self, ctx: &BasicContext) {
@ -494,30 +353,22 @@ impl OnDemand {
let to_dispatch = ::std::mem::replace(&mut *self.orphaned_requests.write(), Vec::new()); let to_dispatch = ::std::mem::replace(&mut *self.orphaned_requests.write(), Vec::new());
for orphaned in to_dispatch { for mut orphaned in to_dispatch {
match orphaned { let hung_up = match orphaned {
Pending::HeaderByNumber(req, mut sender) => { Pending::HeaderProof(_, ref mut sender) => match *sender {
let hangup = match sender {
ChtProofSender::Both(ref mut s) => check_hangup(s), ChtProofSender::Both(ref mut s) => check_hangup(s),
ChtProofSender::Header(ref mut s) => check_hangup(s), ChtProofSender::Hash(ref mut s) => check_hangup(s),
ChtProofSender::ChainScore(ref mut s) => check_hangup(s), ChtProofSender::ChainScore(ref mut s) => check_hangup(s),
},
Pending::HeaderByHash(_, ref mut sender) => check_hangup(sender),
Pending::Block(_, ref mut sender) => check_hangup(sender),
Pending::BlockReceipts(_, ref mut sender) => check_hangup(sender),
Pending::Account(_, ref mut sender) => check_hangup(sender),
Pending::Code(_, ref mut sender) => check_hangup(sender),
Pending::TxProof(_, ref mut sender) => check_hangup(sender),
}; };
if !hangup { self.dispatch_header_by_number(ctx, req, sender) } if !hung_up { self.dispatch(ctx, orphaned) }
}
Pending::HeaderByHash(req, mut sender) =>
if !check_hangup(&mut sender) { self.dispatch_header_by_hash(ctx, req, sender) },
Pending::Block(req, mut sender) =>
if !check_hangup(&mut sender) { self.dispatch_block(ctx, req, sender) },
Pending::BlockReceipts(req, mut sender) =>
if !check_hangup(&mut sender) { self.dispatch_block_receipts(ctx, req, sender) },
Pending::Account(req, mut sender) =>
if !check_hangup(&mut sender) { self.dispatch_account(ctx, req, sender) },
Pending::Code(req, mut sender) =>
if !check_hangup(&mut sender) { self.dispatch_code(ctx, req, sender) },
Pending::TxProof(req, mut sender) =>
if !check_hangup(&mut sender) { self.dispatch_transaction_proof(ctx, req, sender) }
}
} }
} }
} }
@ -555,218 +406,126 @@ impl Handler for OnDemand {
self.dispatch_orphaned(ctx.as_basic()); self.dispatch_orphaned(ctx.as_basic());
} }
fn on_header_proofs(&self, ctx: &EventContext, req_id: ReqId, proofs: &[(Bytes, Vec<Bytes>)]) { fn on_responses(&self, ctx: &EventContext, req_id: ReqId, responses: &[basic_request::Response]) {
let peer = ctx.peer(); let peer = ctx.peer();
let req = match self.pending_requests.write().remove(&req_id) { let req = match self.pending_requests.write().remove(&req_id) {
Some(req) => req, Some(req) => req,
None => return, None => return,
}; };
let response = match responses.get(0) {
Some(response) => response,
None => {
trace!(target: "on_demand", "Ignoring empty response for request {}", req_id);
self.dispatch(ctx.as_basic(), req);
return;
}
};
// handle the response appropriately for the request.
// all branches which do not return early lead to disabling of the peer
// due to misbehavior.
match req { match req {
Pending::HeaderByNumber(req, sender) => { Pending::HeaderProof(req, sender) => {
if let Some(&(ref header, ref proof)) = proofs.get(0) { if let NetworkResponse::HeaderProof(ref response) = *response {
match req.check_response(header, proof) { match req.check_response(&response.proof) {
Ok((header, score)) => { Ok((hash, score)) => {
let mut cache = self.cache.lock(); let mut cache = self.cache.lock();
let hash = header.hash(); cache.insert_block_hash(req.num(), hash);
cache.insert_block_header(hash, header.clone());
cache.insert_block_hash(header.number(), hash);
cache.insert_chain_score(hash, score); cache.insert_chain_score(hash, score);
match sender { match sender {
ChtProofSender::Both(sender) => sender.complete((header, score)), ChtProofSender::Both(sender) => { let _ = sender.send((hash, score)); }
ChtProofSender::Header(sender) => sender.complete(header), ChtProofSender::Hash(sender) => { let _ = sender.send(hash); }
ChtProofSender::ChainScore(sender) => sender.complete(score), ChtProofSender::ChainScore(sender) => { let _ = sender.send(score); }
} }
return return
} }
Err(e) => { Err(e) => warn!("Error handling response for header request: {:?}", e),
warn!("Error handling response for header request: {:?}", e);
ctx.disable_peer(peer);
} }
} }
} }
self.dispatch_header_by_number(ctx.as_basic(), req, sender);
}
_ => panic!("Only header by number request fetches header proofs; qed"),
}
}
fn on_block_headers(&self, ctx: &EventContext, req_id: ReqId, headers: &[Bytes]) {
let peer = ctx.peer();
let req = match self.pending_requests.write().remove(&req_id) {
Some(req) => req,
None => return,
};
match req {
Pending::HeaderByHash(req, sender) => { Pending::HeaderByHash(req, sender) => {
if let Some(ref header) = headers.get(0) { if let NetworkResponse::Headers(ref response) = *response {
if let Some(header) = response.headers.get(0) {
match req.check_response(header) { match req.check_response(header) {
Ok(header) => { Ok(header) => {
self.cache.lock().insert_block_header(req.0, header.clone()); self.cache.lock().insert_block_header(req.0, header.clone());
sender.complete(header); let _ = sender.send(header);
return return
} }
Err(e) => { Err(e) => warn!("Error handling response for header request: {:?}", e),
warn!("Error handling response for header request: {:?}", e);
ctx.disable_peer(peer);
} }
} }
} }
self.dispatch_header_by_hash(ctx.as_basic(), req, sender);
} }
_ => panic!("Only header by hash request fetches headers; qed"),
}
}
fn on_block_bodies(&self, ctx: &EventContext, req_id: ReqId, bodies: &[Bytes]) {
let peer = ctx.peer();
let req = match self.pending_requests.write().remove(&req_id) {
Some(req) => req,
None => return,
};
match req {
Pending::Block(req, sender) => { Pending::Block(req, sender) => {
if let Some(ref body) = bodies.get(0) { if let NetworkResponse::Body(ref response) = *response {
match req.check_response(body) { match req.check_response(&response.body) {
Ok(block) => { Ok(block) => {
let body = encoded::Body::new(body.to_vec()); self.cache.lock().insert_block_body(req.hash, response.body.clone());
self.cache.lock().insert_block_body(req.hash, body); let _ = sender.send(block);
sender.complete(block);
return return
} }
Err(e) => { Err(e) => warn!("Error handling response for block request: {:?}", e),
warn!("Error handling response for block request: {:?}", e);
ctx.disable_peer(peer);
} }
} }
} }
self.dispatch_block(ctx.as_basic(), req, sender);
}
_ => panic!("Only block request fetches bodies; qed"),
}
}
fn on_receipts(&self, ctx: &EventContext, req_id: ReqId, receipts: &[Vec<Receipt>]) {
let peer = ctx.peer();
let req = match self.pending_requests.write().remove(&req_id) {
Some(req) => req,
None => return,
};
match req {
Pending::BlockReceipts(req, sender) => { Pending::BlockReceipts(req, sender) => {
if let Some(ref receipts) = receipts.get(0) { if let NetworkResponse::Receipts(ref response) = *response {
match req.check_response(receipts) { match req.check_response(&response.receipts) {
Ok(receipts) => { Ok(receipts) => {
let hash = req.0.hash(); let hash = req.0.hash();
self.cache.lock().insert_block_receipts(hash, receipts.clone()); self.cache.lock().insert_block_receipts(hash, receipts.clone());
sender.complete(receipts); let _ = sender.send(receipts);
return return
} }
Err(e) => { Err(e) => warn!("Error handling response for receipts request: {:?}", e),
warn!("Error handling response for receipts request: {:?}", e);
ctx.disable_peer(peer);
} }
} }
} }
self.dispatch_block_receipts(ctx.as_basic(), req, sender);
}
_ => panic!("Only receipts request fetches receipts; qed"),
}
}
fn on_state_proofs(&self, ctx: &EventContext, req_id: ReqId, proofs: &[Vec<Bytes>]) {
let peer = ctx.peer();
let req = match self.pending_requests.write().remove(&req_id) {
Some(req) => req,
None => return,
};
match req {
Pending::Account(req, sender) => { Pending::Account(req, sender) => {
if let Some(ref proof) = proofs.get(0) { if let NetworkResponse::Account(ref response) = *response {
match req.check_response(proof) { match req.check_response(&response.proof) {
Ok(proof) => { Ok(maybe_account) => {
sender.complete(proof); // TODO: validate against request outputs.
// needs engine + env info as part of request.
let _ = sender.send(maybe_account);
return return
} }
Err(e) => { Err(e) => warn!("Error handling response for state request: {:?}", e),
warn!("Error handling response for state request: {:?}", e);
ctx.disable_peer(peer);
} }
} }
} }
self.dispatch_account(ctx.as_basic(), req, sender);
}
_ => panic!("Only account request fetches state proof; qed"),
}
}
fn on_code(&self, ctx: &EventContext, req_id: ReqId, codes: &[Bytes]) {
let peer = ctx.peer();
let req = match self.pending_requests.write().remove(&req_id) {
Some(req) => req,
None => return,
};
match req {
Pending::Code(req, sender) => { Pending::Code(req, sender) => {
if let Some(code) = codes.get(0) { if let NetworkResponse::Code(ref response) = *response {
match req.check_response(code.as_slice()) { match req.check_response(response.code.as_slice()) {
Ok(()) => { Ok(()) => {
sender.complete(code.clone()); let _ = sender.send(response.code.clone());
return return
} }
Err(e) => { Err(e) => warn!("Error handling response for code request: {:?}", e),
warn!("Error handling response for code request: {:?}", e);
ctx.disable_peer(peer);
} }
} }
self.dispatch_code(ctx.as_basic(), req, sender);
} }
}
_ => panic!("Only code request fetches code; qed"),
}
}
fn on_transaction_proof(&self, ctx: &EventContext, req_id: ReqId, items: &[DBValue]) {
let peer = ctx.peer();
let req = match self.pending_requests.write().remove(&req_id) {
Some(req) => req,
None => return,
};
match req {
Pending::TxProof(req, sender) => { Pending::TxProof(req, sender) => {
match req.check_response(items) { if let NetworkResponse::Execution(ref response) = *response {
match req.check_response(&response.items) {
ProvedExecution::Complete(executed) => { ProvedExecution::Complete(executed) => {
sender.complete(Ok(executed)); let _ = sender.send(Ok(executed));
return return
} }
ProvedExecution::Failed(err) => { ProvedExecution::Failed(err) => {
sender.complete(Err(err)); let _ = sender.send(Err(err));
return return
} }
ProvedExecution::BadProof => { ProvedExecution::BadProof => warn!("Error handling response for transaction proof request"),
warn!("Error handling response for transaction proof request"); }
ctx.disable_peer(peer); }
} }
} }
self.dispatch_transaction_proof(ctx.as_basic(), req, sender); ctx.disable_peer(peer);
}
_ => panic!("Only transaction proof request dispatches transaction proof requests; qed"),
}
} }
fn tick(&self, ctx: &BasicContext) { fn tick(&self, ctx: &BasicContext) {
@ -782,7 +541,7 @@ mod tests {
use cache::Cache; use cache::Cache;
use net::{Announcement, BasicContext, ReqId, Error as LesError}; use net::{Announcement, BasicContext, ReqId, Error as LesError};
use request::{Request as LesRequest, Kind as LesRequestKind}; use request::Requests;
use network::{PeerId, NodeId}; use network::{PeerId, NodeId};
use time::Duration; use time::Duration;
@ -792,11 +551,10 @@ mod tests {
impl BasicContext for FakeContext { impl BasicContext for FakeContext {
fn persistent_peer_id(&self, _: PeerId) -> Option<NodeId> { None } fn persistent_peer_id(&self, _: PeerId) -> Option<NodeId> { None }
fn request_from(&self, _: PeerId, _: LesRequest) -> Result<ReqId, LesError> { fn request_from(&self, _: PeerId, _: Requests) -> Result<ReqId, LesError> {
unimplemented!() unimplemented!()
} }
fn make_announcement(&self, _: Announcement) { } fn make_announcement(&self, _: Announcement) { }
fn max_requests(&self, _: PeerId, _: LesRequestKind) -> usize { 0 }
fn disconnect_peer(&self, _: PeerId) { } fn disconnect_peer(&self, _: PeerId) { }
fn disable_peer(&self, _: PeerId) { } fn disable_peer(&self, _: PeerId) { }
} }

View File

@ -26,7 +26,7 @@ use ethcore::receipt::Receipt;
use ethcore::state::{self, ProvedExecution}; use ethcore::state::{self, ProvedExecution};
use ethcore::transaction::SignedTransaction; use ethcore::transaction::SignedTransaction;
use rlp::{RlpStream, UntrustedRlp, View}; use rlp::{RlpStream, UntrustedRlp};
use util::{Address, Bytes, DBValue, HashDB, H256, U256}; use util::{Address, Bytes, DBValue, HashDB, H256, U256};
use util::memorydb::MemoryDB; use util::memorydb::MemoryDB;
use util::sha3::Hashable; use util::sha3::Hashable;
@ -61,9 +61,9 @@ impl From<Box<TrieError>> for Error {
} }
} }
/// Request for a header by number. /// Request for header proof by number
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct HeaderByNumber { pub struct HeaderProof {
/// The header's number. /// The header's number.
num: u64, num: u64,
/// The cht number for the given block number. /// The cht number for the given block number.
@ -72,11 +72,11 @@ pub struct HeaderByNumber {
cht_root: H256, cht_root: H256,
} }
impl HeaderByNumber { impl HeaderProof {
/// Construct a new header-by-number request. Fails if the given number is 0. /// Construct a new header-by-number request. Fails if the given number is 0.
/// Provide the expected CHT root to compare against. /// Provide the expected CHT root to compare against.
pub fn new(num: u64, cht_root: H256) -> Option<Self> { pub fn new(num: u64, cht_root: H256) -> Option<Self> {
::cht::block_to_cht_number(num).map(|cht_num| HeaderByNumber { ::cht::block_to_cht_number(num).map(|cht_num| HeaderProof {
num: num, num: num,
cht_num: cht_num, cht_num: cht_num,
cht_root: cht_root, cht_root: cht_root,
@ -92,18 +92,11 @@ impl HeaderByNumber {
/// Access the expected CHT root. /// Access the expected CHT root.
pub fn cht_root(&self) -> H256 { self.cht_root } pub fn cht_root(&self) -> H256 { self.cht_root }
/// Check a response with a header and cht proof. /// Check a response with a CHT proof, get a hash and total difficulty back.
pub fn check_response(&self, header: &[u8], proof: &[Bytes]) -> Result<(encoded::Header, U256), Error> { pub fn check_response(&self, proof: &[Bytes]) -> Result<(H256, U256), Error> {
let (expected_hash, td) = match ::cht::check_proof(proof, self.num, self.cht_root) { match ::cht::check_proof(proof, self.num, self.cht_root) {
Some((expected_hash, td)) => (expected_hash, td), Some((expected_hash, td)) => Ok((expected_hash, td)),
None => return Err(Error::BadProof), None => Err(Error::BadProof),
};
// and compare the hash to the found header.
let found_hash = header.sha3();
match expected_hash == found_hash {
true => Ok((encoded::Header::new(header.to_vec()), td)),
false => Err(Error::WrongHash(expected_hash, found_hash)),
} }
} }
} }
@ -114,10 +107,10 @@ pub struct HeaderByHash(pub H256);
impl HeaderByHash { impl HeaderByHash {
/// Check a response for the header. /// Check a response for the header.
pub fn check_response(&self, header: &[u8]) -> Result<encoded::Header, Error> { pub fn check_response(&self, header: &encoded::Header) -> Result<encoded::Header, Error> {
let hash = header.sha3(); let hash = header.sha3();
match hash == self.0 { match hash == self.0 {
true => Ok(encoded::Header::new(header.to_vec())), true => Ok(header.clone()),
false => Err(Error::WrongHash(self.0, hash)), false => Err(Error::WrongHash(self.0, hash)),
} }
} }
@ -143,16 +136,14 @@ impl Body {
} }
/// Check a response for this block body. /// Check a response for this block body.
pub fn check_response(&self, body: &[u8]) -> Result<encoded::Block, Error> { pub fn check_response(&self, body: &encoded::Body) -> Result<encoded::Block, Error> {
let body_view = UntrustedRlp::new(&body);
// check the integrity of the the body against the header // check the integrity of the the body against the header
let tx_root = ::util::triehash::ordered_trie_root(body_view.at(0)?.iter().map(|r| r.as_raw().to_vec())); let tx_root = ::util::triehash::ordered_trie_root(body.rlp().at(0).iter().map(|r| r.as_raw().to_vec()));
if tx_root != self.header.transactions_root() { if tx_root != self.header.transactions_root() {
return Err(Error::WrongTrieRoot(self.header.transactions_root(), tx_root)); return Err(Error::WrongTrieRoot(self.header.transactions_root(), tx_root));
} }
let uncles_hash = body_view.at(1)?.as_raw().sha3(); let uncles_hash = body.rlp().at(1).as_raw().sha3();
if uncles_hash != self.header.uncles_hash() { if uncles_hash != self.header.uncles_hash() {
return Err(Error::WrongHash(self.header.uncles_hash(), uncles_hash)); return Err(Error::WrongHash(self.header.uncles_hash(), uncles_hash));
} }
@ -160,7 +151,7 @@ impl Body {
// concatenate the header and the body. // concatenate the header and the body.
let mut stream = RlpStream::new_list(3); let mut stream = RlpStream::new_list(3);
stream.append_raw(self.header.rlp().as_raw(), 1); stream.append_raw(self.header.rlp().as_raw(), 1);
stream.append_raw(body, 2); stream.append_raw(&body.rlp().as_raw(), 2);
Ok(encoded::Block::new(stream.out())) Ok(encoded::Block::new(stream.out()))
} }
@ -194,7 +185,7 @@ pub struct Account {
impl Account { impl Account {
/// Check a response with an account against the stored header. /// Check a response with an account against the stored header.
pub fn check_response(&self, proof: &[Bytes]) -> Result<BasicAccount, Error> { pub fn check_response(&self, proof: &[Bytes]) -> Result<Option<BasicAccount>, Error> {
let state_root = self.header.state_root(); let state_root = self.header.state_root();
let mut db = MemoryDB::new(); let mut db = MemoryDB::new();
@ -203,14 +194,14 @@ impl Account {
match TrieDB::new(&db, &state_root).and_then(|t| t.get(&self.address.sha3()))? { match TrieDB::new(&db, &state_root).and_then(|t| t.get(&self.address.sha3()))? {
Some(val) => { Some(val) => {
let rlp = UntrustedRlp::new(&val); let rlp = UntrustedRlp::new(&val);
Ok(BasicAccount { Ok(Some(BasicAccount {
nonce: rlp.val_at(0)?, nonce: rlp.val_at(0)?,
balance: rlp.val_at(1)?, balance: rlp.val_at(1)?,
storage_root: rlp.val_at(2)?, storage_root: rlp.val_at(2)?,
code_hash: rlp.val_at(3)?, code_hash: rlp.val_at(3)?,
}) }))
}, },
None => Err(Error::BadProof) None => Ok(None),
} }
} }
} }
@ -219,8 +210,6 @@ impl Account {
pub struct Code { pub struct Code {
/// Block hash, number pair. /// Block hash, number pair.
pub block_id: (H256, u64), pub block_id: (H256, u64),
/// Address requested.
pub address: Address,
/// Account's code hash. /// Account's code hash.
pub code_hash: H256, pub code_hash: H256,
} }
@ -278,11 +267,11 @@ mod tests {
#[test] #[test]
fn no_invalid_header_by_number() { fn no_invalid_header_by_number() {
assert!(HeaderByNumber::new(0, Default::default()).is_none()) assert!(HeaderProof::new(0, Default::default()).is_none())
} }
#[test] #[test]
fn check_header_by_number() { fn check_header_proof() {
use ::cht; use ::cht;
let test_client = TestBlockChainClient::new(); let test_client = TestBlockChainClient::new();
@ -303,11 +292,9 @@ mod tests {
}; };
let proof = cht.prove(10_000, 0).unwrap().unwrap(); let proof = cht.prove(10_000, 0).unwrap().unwrap();
let req = HeaderByNumber::new(10_000, cht.root()).unwrap(); let req = HeaderProof::new(10_000, cht.root()).unwrap();
let raw_header = test_client.block_header(::ethcore::ids::BlockId::Number(10_000)).unwrap(); assert!(req.check_response(&proof[..]).is_ok());
assert!(req.check_response(&raw_header.into_inner(), &proof[..]).is_ok());
} }
#[test] #[test]
@ -316,9 +303,9 @@ mod tests {
header.set_number(10_000); header.set_number(10_000);
header.set_extra_data(b"test_header".to_vec()); header.set_extra_data(b"test_header".to_vec());
let hash = header.hash(); let hash = header.hash();
let raw_header = ::rlp::encode(&header); let raw_header = encoded::Header::new(::rlp::encode(&header).to_vec());
assert!(HeaderByHash(hash).check_response(&*raw_header).is_ok()) assert!(HeaderByHash(hash).check_response(&raw_header).is_ok())
} }
#[test] #[test]
@ -334,7 +321,8 @@ mod tests {
hash: header.hash(), hash: header.hash(),
}; };
assert!(req.check_response(&*body_stream.drain()).is_ok()) let response = encoded::Body::new(body_stream.drain().to_vec());
assert!(req.check_response(&response).is_ok())
} }
#[test] #[test]
@ -412,7 +400,6 @@ mod tests {
let code = vec![1u8; 256]; let code = vec![1u8; 256];
let req = Code { let req = Code {
block_id: (Default::default(), 2), block_id: (Default::default(), 2),
address: Default::default(),
code_hash: ::util::Hashable::sha3(&code), code_hash: ::util::Hashable::sha3(&code),
}; };

View File

@ -24,22 +24,15 @@ use ethcore::client::{BlockChainClient, ProvingBlockChainClient};
use ethcore::transaction::PendingTransaction; use ethcore::transaction::PendingTransaction;
use ethcore::ids::BlockId; use ethcore::ids::BlockId;
use ethcore::encoded; use ethcore::encoded;
use util::{Bytes, DBValue, RwLock, H256}; use util::{RwLock, H256};
use cht::{self, BlockInfo}; use cht::{self, BlockInfo};
use client::{LightChainClient, AsLightClient}; use client::{LightChainClient, AsLightClient};
use transaction_queue::TransactionQueue; use transaction_queue::TransactionQueue;
use request; use request;
/// Defines the operations that a provider for `LES` must fulfill. /// Defines the operations that a provider for the light subprotocol must fulfill.
///
/// These are defined at [1], but may be subject to change.
/// Requests which can't be fulfilled should return either an empty RLP list
/// or empty vector where appropriate.
///
/// [1]: https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES)
#[cfg_attr(feature = "ipc", ipc(client_ident="LightProviderClient"))] #[cfg_attr(feature = "ipc", ipc(client_ident="LightProviderClient"))]
pub trait Provider: Send + Sync { pub trait Provider: Send + Sync {
/// Provide current blockchain info. /// Provide current blockchain info.
@ -59,18 +52,18 @@ pub trait Provider: Send + Sync {
/// ///
/// The returned vector may have any length in the range [0, `max`], but the /// The returned vector may have any length in the range [0, `max`], but the
/// results within must adhere to the `skip` and `reverse` parameters. /// results within must adhere to the `skip` and `reverse` parameters.
fn block_headers(&self, req: request::Headers) -> Vec<encoded::Header> { fn block_headers(&self, req: request::CompleteHeadersRequest) -> Option<request::HeadersResponse> {
use request::HashOrNumber; use request::HashOrNumber;
if req.max == 0 { return Vec::new() } if req.max == 0 { return None }
let best_num = self.chain_info().best_block_number; let best_num = self.chain_info().best_block_number;
let start_num = match req.start { let start_num = match req.start {
HashOrNumber::Number(start_num) => start_num, HashOrNumber::Number(start_num) => start_num,
HashOrNumber::Hash(hash) => match self.block_header(BlockId::Hash(hash)) { HashOrNumber::Hash(hash) => match self.block_header(BlockId::Hash(hash)) {
None => { None => {
trace!(target: "les_provider", "Unknown block hash {} requested", hash); trace!(target: "pip_provider", "Unknown block hash {} requested", hash);
return Vec::new(); return None;
} }
Some(header) => { Some(header) => {
let num = header.number(); let num = header.number();
@ -79,7 +72,9 @@ pub trait Provider: Send + Sync {
if req.max == 1 || canon_hash != Some(hash) { if req.max == 1 || canon_hash != Some(hash) {
// Non-canonical header or single header requested. // Non-canonical header or single header requested.
return vec![header]; return Some(::request::HeadersResponse {
headers: vec![header],
})
} }
num num
@ -87,116 +82,50 @@ pub trait Provider: Send + Sync {
} }
}; };
(0u64..req.max as u64) let headers: Vec<_> = (0u64..req.max as u64)
.map(|x: u64| x.saturating_mul(req.skip + 1)) .map(|x: u64| x.saturating_mul(req.skip + 1))
.take_while(|x| if req.reverse { x < &start_num } else { best_num.saturating_sub(start_num) >= *x }) .take_while(|x| if req.reverse { x < &start_num } else { best_num.saturating_sub(start_num) >= *x })
.map(|x| if req.reverse { start_num - x } else { start_num + x }) .map(|x| if req.reverse { start_num - x } else { start_num + x })
.map(|x| self.block_header(BlockId::Number(x))) .map(|x| self.block_header(BlockId::Number(x)))
.take_while(|x| x.is_some()) .take_while(|x| x.is_some())
.flat_map(|x| x) .flat_map(|x| x)
.collect() .collect();
if headers.is_empty() {
None
} else {
Some(::request::HeadersResponse { headers: headers })
}
} }
/// Get a block header by id. /// Get a block header by id.
fn block_header(&self, id: BlockId) -> Option<encoded::Header>; fn block_header(&self, id: BlockId) -> Option<encoded::Header>;
/// Provide as many as possible of the requested blocks (minus the headers) encoded /// Fulfill a block body request.
/// in RLP format. fn block_body(&self, req: request::CompleteBodyRequest) -> Option<request::BodyResponse>;
fn block_bodies(&self, req: request::Bodies) -> Vec<Option<encoded::Body>> {
req.block_hashes.into_iter()
.map(|hash| self.block_body(BlockId::Hash(hash)))
.collect()
}
/// Get a block body by id. /// Fulfill a request for block receipts.
fn block_body(&self, id: BlockId) -> Option<encoded::Body>; fn block_receipts(&self, req: request::CompleteReceiptsRequest) -> Option<request::ReceiptsResponse>;
/// Provide the receipts as many as possible of the requested blocks. /// Get an account proof.
/// Returns a vector of RLP-encoded lists of receipts. fn account_proof(&self, req: request::CompleteAccountRequest) -> Option<request::AccountResponse>;
fn receipts(&self, req: request::Receipts) -> Vec<Bytes> {
req.block_hashes.into_iter()
.map(|hash| self.block_receipts(&hash))
.map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec()))
.collect()
}
/// Get a block's receipts as an RLP-encoded list by block hash. /// Get a storage proof.
fn block_receipts(&self, hash: &H256) -> Option<Bytes>; fn storage_proof(&self, req: request::CompleteStorageRequest) -> Option<request::StorageResponse>;
/// Provide a set of merkle proofs, as requested. Each request is a /// Provide contract code for the specified (block_hash, code_hash) pair.
/// block hash and request parameters. fn contract_code(&self, req: request::CompleteCodeRequest) -> Option<request::CodeResponse>;
///
/// Returns a vector of RLP-encoded lists satisfying the requests.
fn proofs(&self, req: request::StateProofs) -> Vec<Bytes> {
use rlp::RlpStream;
let mut results = Vec::with_capacity(req.requests.len());
for request in req.requests {
let proof = self.state_proof(request);
let mut stream = RlpStream::new_list(proof.len());
for node in proof {
stream.append_raw(&node, 1);
}
results.push(stream.out());
}
results
}
/// Get a state proof from a request. Each proof should be a vector
/// of rlp-encoded trie nodes, in ascending order by distance from the root.
fn state_proof(&self, req: request::StateProof) -> Vec<Bytes>;
/// Provide contract code for the specified (block_hash, account_hash) pairs.
/// Each item in the resulting vector is either the raw bytecode or empty.
fn contract_codes(&self, req: request::ContractCodes) -> Vec<Bytes> {
req.code_requests.into_iter()
.map(|req| self.contract_code(req))
.collect()
}
/// Get contract code by request. Either the raw bytecode or empty.
fn contract_code(&self, req: request::ContractCode) -> Bytes;
/// Provide header proofs from the Canonical Hash Tries as well as the headers
/// they correspond to -- each element in the returned vector is a 2-tuple.
/// The first element is a block header and the second a merkle proof of
/// the header in a requested CHT.
fn header_proofs(&self, req: request::HeaderProofs) -> Vec<Bytes> {
use rlp::{self, RlpStream};
req.requests.into_iter()
.map(|req| self.header_proof(req))
.map(|maybe_proof| match maybe_proof {
None => rlp::EMPTY_LIST_RLP.to_vec(),
Some((header, proof)) => {
let mut stream = RlpStream::new_list(2);
stream.append_raw(&header.into_inner(), 1).begin_list(proof.len());
for node in proof {
stream.append_raw(&node, 1);
}
stream.out()
}
})
.collect()
}
/// Provide a header proof from a given Canonical Hash Trie as well as the /// Provide a header proof from a given Canonical Hash Trie as well as the
/// corresponding header. The first element is the block header and the /// corresponding header.
/// second is a merkle proof of the CHT. fn header_proof(&self, req: request::CompleteHeaderProofRequest) -> Option<request::HeaderProofResponse>;
fn header_proof(&self, req: request::HeaderProof) -> Option<(encoded::Header, Vec<Bytes>)>;
/// Provide pending transactions. /// Provide pending transactions.
fn ready_transactions(&self) -> Vec<PendingTransaction>; fn ready_transactions(&self) -> Vec<PendingTransaction>;
/// Provide a proof-of-execution for the given transaction proof request. /// Provide a proof-of-execution for the given transaction proof request.
/// Returns a vector of all state items necessary to execute the transaction. /// Returns a vector of all state items necessary to execute the transaction.
fn transaction_proof(&self, req: request::TransactionProof) -> Option<Vec<DBValue>>; fn transaction_proof(&self, req: request::CompleteExecutionRequest) -> Option<request::ExecutionResponse>;
} }
// Implementation of a light client data provider for a client. // Implementation of a light client data provider for a client.
@ -217,32 +146,52 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
BlockChainClient::block_header(self, id) BlockChainClient::block_header(self, id)
} }
fn block_body(&self, id: BlockId) -> Option<encoded::Body> { fn block_body(&self, req: request::CompleteBodyRequest) -> Option<request::BodyResponse> {
BlockChainClient::block_body(self, id) BlockChainClient::block_body(self, BlockId::Hash(req.hash))
.map(|body| ::request::BodyResponse { body: body })
} }
fn block_receipts(&self, hash: &H256) -> Option<Bytes> { fn block_receipts(&self, req: request::CompleteReceiptsRequest) -> Option<request::ReceiptsResponse> {
BlockChainClient::block_receipts(self, hash) BlockChainClient::block_receipts(self, &req.hash)
.map(|x| ::request::ReceiptsResponse { receipts: ::rlp::decode_list(&x) })
} }
fn state_proof(&self, req: request::StateProof) -> Vec<Bytes> { fn account_proof(&self, req: request::CompleteAccountRequest) -> Option<request::AccountResponse> {
match req.key2 { self.prove_account(req.address_hash, BlockId::Hash(req.block_hash)).map(|(proof, acc)| {
Some(key2) => self.prove_storage(req.key1, key2, req.from_level, BlockId::Hash(req.block)), ::request::AccountResponse {
None => self.prove_account(req.key1, req.from_level, BlockId::Hash(req.block)), proof: proof,
nonce: acc.nonce,
balance: acc.balance,
code_hash: acc.code_hash,
storage_root: acc.storage_root,
} }
})
} }
fn contract_code(&self, req: request::ContractCode) -> Bytes { fn storage_proof(&self, req: request::CompleteStorageRequest) -> Option<request::StorageResponse> {
self.code_by_hash(req.account_key, BlockId::Hash(req.block_hash)) self.prove_storage(req.address_hash, req.key_hash, BlockId::Hash(req.block_hash)).map(|(proof, item) | {
::request::StorageResponse {
proof: proof,
value: item,
}
})
} }
fn header_proof(&self, req: request::HeaderProof) -> Option<(encoded::Header, Vec<Bytes>)> { fn contract_code(&self, req: request::CompleteCodeRequest) -> Option<request::CodeResponse> {
if Some(req.cht_number) != cht::block_to_cht_number(req.block_number) { self.state_data(&req.code_hash)
debug!(target: "les_provider", "Requested CHT number mismatch with block number."); .map(|code| ::request::CodeResponse { code: code })
}
fn header_proof(&self, req: request::CompleteHeaderProofRequest) -> Option<request::HeaderProofResponse> {
let cht_number = match cht::block_to_cht_number(req.num) {
Some(cht_num) => cht_num,
None => {
debug!(target: "pip_provider", "Requested CHT proof with invalid block number");
return None; return None;
} }
};
let mut needed_hdr = None; let mut needed = None;
// build the CHT, caching the requested header as we pass through it. // build the CHT, caching the requested header as we pass through it.
let cht = { let cht = {
@ -258,8 +207,8 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
total_difficulty: td, total_difficulty: td,
}; };
if hdr.number() == req.block_number { if hdr.number() == req.num {
needed_hdr = Some(hdr); needed = Some((hdr, td));
} }
Some(info) Some(info)
@ -268,29 +217,33 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
} }
}; };
match cht::build(req.cht_number, block_info) { match cht::build(cht_number, block_info) {
Some(cht) => cht, Some(cht) => cht,
None => return None, // incomplete CHT. None => return None, // incomplete CHT.
} }
}; };
let needed_hdr = needed_hdr.expect("`needed_hdr` always set in loop, number checked before; qed"); let (needed_hdr, needed_td) = needed.expect("`needed` always set in loop, number checked before; qed");
// prove our result. // prove our result.
match cht.prove(req.block_number, req.from_level) { match cht.prove(req.num, 0) {
Ok(Some(proof)) => Some((needed_hdr, proof)), Ok(Some(proof)) => Some(::request::HeaderProofResponse {
proof: proof,
hash: needed_hdr.hash(),
td: needed_td,
}),
Ok(None) => None, Ok(None) => None,
Err(e) => { Err(e) => {
debug!(target: "les_provider", "Error looking up number in freshly-created CHT: {}", e); debug!(target: "pip_provider", "Error looking up number in freshly-created CHT: {}", e);
None None
} }
} }
} }
fn transaction_proof(&self, req: request::TransactionProof) -> Option<Vec<DBValue>> { fn transaction_proof(&self, req: request::CompleteExecutionRequest) -> Option<request::ExecutionResponse> {
use ethcore::transaction::Transaction; use ethcore::transaction::Transaction;
let id = BlockId::Hash(req.at); let id = BlockId::Hash(req.block_hash);
let nonce = match self.nonce(&req.from, id.clone()) { let nonce = match self.nonce(&req.from, id.clone()) {
Some(nonce) => nonce, Some(nonce) => nonce,
None => return None, None => return None,
@ -305,6 +258,7 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
}.fake_sign(req.from); }.fake_sign(req.from);
self.prove_transaction(transaction, id) self.prove_transaction(transaction, id)
.map(|proof| ::request::ExecutionResponse { items: proof })
} }
fn ready_transactions(&self) -> Vec<PendingTransaction> { fn ready_transactions(&self) -> Vec<PendingTransaction> {
@ -347,27 +301,31 @@ impl<L: AsLightClient + Send + Sync> Provider for LightProvider<L> {
self.client.as_light_client().block_header(id) self.client.as_light_client().block_header(id)
} }
fn block_body(&self, _id: BlockId) -> Option<encoded::Body> { fn block_body(&self, _req: request::CompleteBodyRequest) -> Option<request::BodyResponse> {
None None
} }
fn block_receipts(&self, _hash: &H256) -> Option<Bytes> { fn block_receipts(&self, _req: request::CompleteReceiptsRequest) -> Option<request::ReceiptsResponse> {
None None
} }
fn state_proof(&self, _req: request::StateProof) -> Vec<Bytes> { fn account_proof(&self, _req: request::CompleteAccountRequest) -> Option<request::AccountResponse> {
Vec::new()
}
fn contract_code(&self, _req: request::ContractCode) -> Bytes {
Vec::new()
}
fn header_proof(&self, _req: request::HeaderProof) -> Option<(encoded::Header, Vec<Bytes>)> {
None None
} }
fn transaction_proof(&self, _req: request::TransactionProof) -> Option<Vec<DBValue>> { fn storage_proof(&self, _req: request::CompleteStorageRequest) -> Option<request::StorageResponse> {
None
}
fn contract_code(&self, _req: request::CompleteCodeRequest) -> Option<request::CodeResponse> {
None
}
fn header_proof(&self, _req: request::CompleteHeaderProofRequest) -> Option<request::HeaderProofResponse> {
None
}
fn transaction_proof(&self, _req: request::CompleteExecutionRequest) -> Option<request::ExecutionResponse> {
None None
} }
@ -395,10 +353,8 @@ mod tests {
let client = TestBlockChainClient::new(); let client = TestBlockChainClient::new();
client.add_blocks(2000, EachBlockWith::Nothing); client.add_blocks(2000, EachBlockWith::Nothing);
let req = ::request::HeaderProof { let req = ::request::CompleteHeaderProofRequest {
cht_number: 0, num: 1500,
block_number: 1500,
from_level: 0,
}; };
assert!(client.header_proof(req.clone()).is_none()); assert!(client.header_proof(req.clone()).is_none());

View File

@ -1,228 +0,0 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! LES request types.
use ethcore::transaction::Action;
use util::{Address, H256, U256, Uint};
/// Either a hash or a number.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub enum HashOrNumber {
/// Block hash variant.
Hash(H256),
/// Block number variant.
Number(u64),
}
impl From<H256> for HashOrNumber {
fn from(hash: H256) -> Self {
HashOrNumber::Hash(hash)
}
}
impl From<u64> for HashOrNumber {
fn from(num: u64) -> Self {
HashOrNumber::Number(num)
}
}
/// A request for block headers.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct Headers {
/// Starting block number or hash.
pub start: HashOrNumber,
/// The maximum amount of headers which can be returned.
pub max: usize,
/// The amount of headers to skip between each response entry.
pub skip: u64,
/// Whether the headers should proceed in falling number from the initial block.
pub reverse: bool,
}
/// A request for specific block bodies.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct Bodies {
/// Hashes which bodies are being requested for.
pub block_hashes: Vec<H256>
}
/// A request for transaction receipts.
///
/// This request is answered with a list of transaction receipts for each block
/// requested.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct Receipts {
/// Block hashes to return receipts for.
pub block_hashes: Vec<H256>,
}
/// A request for a state proof
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct StateProof {
/// Block hash to query state from.
pub block: H256,
/// Key of the state trie -- corresponds to account hash.
pub key1: H256,
/// Key in that account's storage trie; if empty, then the account RLP should be
/// returned.
pub key2: Option<H256>,
/// if greater than zero, trie nodes beyond this level may be omitted.
pub from_level: u32, // could even safely be u8; trie w/ 32-byte key can be at most 64-levels deep.
}
/// A request for state proofs.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct StateProofs {
/// All the proof requests.
pub requests: Vec<StateProof>,
}
/// A request for contract code.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct ContractCode {
/// Block hash
pub block_hash: H256,
/// Account key (== sha3(address))
pub account_key: H256,
}
/// A request for contract code.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct ContractCodes {
/// Block hash and account key (== sha3(address)) pairs to fetch code for.
pub code_requests: Vec<ContractCode>,
}
/// A request for a header proof from the Canonical Hash Trie.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct HeaderProof {
/// Number of the CHT.
pub cht_number: u64,
/// Block number requested. May not be 0: genesis isn't included in any CHT.
pub block_number: u64,
/// If greater than zero, trie nodes beyond this level may be omitted.
pub from_level: u32,
}
/// A request for header proofs from the CHT.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct HeaderProofs {
/// All the proof requests.
pub requests: Vec<HeaderProof>,
}
/// A request for proof of (simulated) transaction execution.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub struct TransactionProof {
/// Block hash to request for.
pub at: H256,
/// Address to treat as the caller.
pub from: Address,
/// Action to take: either a call or a create.
pub action: Action,
/// Amount of gas to request proof-of-execution for.
pub gas: U256,
/// Price for each gas.
pub gas_price: U256,
/// Value to simulate sending.
pub value: U256,
/// Transaction data.
pub data: Vec<u8>,
}
/// Kinds of requests.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub enum Kind {
/// Requesting headers.
Headers,
/// Requesting block bodies.
Bodies,
/// Requesting transaction receipts.
Receipts,
/// Requesting proofs of state trie nodes.
StateProofs,
/// Requesting contract code by hash.
Codes,
/// Requesting header proofs (from the CHT).
HeaderProofs,
/// Requesting proof of transaction execution.
TransactionProof,
}
/// Encompasses all possible types of requests in a single structure.
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)]
pub enum Request {
/// Requesting headers.
Headers(Headers),
/// Requesting block bodies.
Bodies(Bodies),
/// Requesting transaction receipts.
Receipts(Receipts),
/// Requesting state proofs.
StateProofs(StateProofs),
/// Requesting contract codes.
Codes(ContractCodes),
/// Requesting header proofs.
HeaderProofs(HeaderProofs),
/// Requesting proof of transaction execution.
TransactionProof(TransactionProof),
}
impl Request {
/// Get the kind of request this is.
pub fn kind(&self) -> Kind {
match *self {
Request::Headers(_) => Kind::Headers,
Request::Bodies(_) => Kind::Bodies,
Request::Receipts(_) => Kind::Receipts,
Request::StateProofs(_) => Kind::StateProofs,
Request::Codes(_) => Kind::Codes,
Request::HeaderProofs(_) => Kind::HeaderProofs,
Request::TransactionProof(_) => Kind::TransactionProof,
}
}
/// Get the amount of requests being made.
/// In the case of `TransactionProof`, this is the amount of gas being requested.
pub fn amount(&self) -> usize {
match *self {
Request::Headers(ref req) => req.max,
Request::Bodies(ref req) => req.block_hashes.len(),
Request::Receipts(ref req) => req.block_hashes.len(),
Request::StateProofs(ref req) => req.requests.len(),
Request::Codes(ref req) => req.code_requests.len(),
Request::HeaderProofs(ref req) => req.requests.len(),
Request::TransactionProof(ref req) => match req.gas > usize::max_value().into() {
true => usize::max_value(),
false => req.gas.low_u64() as usize,
}
}
}
}

View File

@ -14,4 +14,4 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
pub mod les_request; pub mod request;

View File

@ -0,0 +1,190 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Request chain builder utility.
//! Push requests with `push`. Back-references and data required to verify responses must be
//! supplied as well.
use std::collections::HashMap;
use request::{
IncompleteRequest, CompleteRequest, Request,
OutputKind, Output, NoSuchOutput, Response, ResponseError,
};
/// Build chained requests. Push them onto the series with `push`,
/// and produce a `Requests` object with `build`. Outputs are checked for consistency.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
pub struct RequestBuilder {
output_kinds: HashMap<(usize, usize), OutputKind>,
requests: Vec<Request>,
}
impl RequestBuilder {
/// Attempt to push a request onto the request chain. Fails if the request
/// references a non-existent output of a prior request.
pub fn push(&mut self, request: Request) -> Result<(), NoSuchOutput> {
request.check_outputs(|req, idx, kind| {
match self.output_kinds.get(&(req, idx)) {
Some(k) if k == &kind => Ok(()),
_ => Err(NoSuchOutput),
}
})?;
let req_idx = self.requests.len();
request.note_outputs(|idx, kind| { self.output_kinds.insert((req_idx, idx), kind); });
self.requests.push(request);
Ok(())
}
/// Get a reference to the output kinds map.
pub fn output_kinds(&self) -> &HashMap<(usize, usize), OutputKind> {
&self.output_kinds
}
/// Convert this into a "requests" object.
pub fn build(self) -> Requests {
Requests {
outputs: HashMap::new(),
requests: self.requests,
answered: 0,
}
}
}
/// Requests pending responses.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Requests {
outputs: HashMap<(usize, usize), Output>,
requests: Vec<Request>,
answered: usize,
}
impl Requests {
/// For each request, produce responses for each.
/// The responses vector produced goes up to the point where the responder
/// first returns `None`, an invalid response, or until all requests have been responded to.
pub fn respond_to_all<F>(mut self, responder: F) -> Vec<Response>
where F: Fn(CompleteRequest) -> Option<Response>
{
let mut responses = Vec::new();
while let Some(response) = self.next_complete().and_then(&responder) {
match self.supply_response(&response) {
Ok(()) => responses.push(response),
Err(e) => {
debug!(target: "pip", "produced bad response to request: {:?}", e);
return responses;
}
}
}
responses
}
/// Get access to the underlying slice of requests.
// TODO: unimplemented -> Vec<Request>, // do we _have to_ allocate?
pub fn requests(&self) -> &[Request] { &self.requests }
/// Get the number of answered requests.
pub fn num_answered(&self) -> usize { self.answered }
/// Get the next request as a filled request. Returns `None` when all requests answered.
pub fn next_complete(&self) -> Option<CompleteRequest> {
if self.answered == self.requests.len() {
None
} else {
Some(self.requests[self.answered].clone()
.complete()
.expect("All outputs checked as invariant of `Requests` object; qed"))
}
}
/// Supply a response for the next request.
/// Fails on: wrong request kind, all requests answered already.
pub fn supply_response(&mut self, response: &Response) -> Result<(), ResponseError> {
let idx = self.answered;
// check validity.
if idx == self.requests.len() { return Err(ResponseError::Unexpected) }
if self.requests[idx].kind() != response.kind() { return Err(ResponseError::WrongKind) }
let outputs = &mut self.outputs;
response.fill_outputs(|out_idx, output| {
// we don't need to check output kinds here because all back-references
// are validated in the builder.
// TODO: optimization for only storing outputs we "care about"?
outputs.insert((idx, out_idx), output);
});
self.answered += 1;
// fill as much of the next request as we can.
if let Some(ref mut req) = self.requests.get_mut(self.answered) {
req.fill(|req_idx, out_idx| outputs.get(&(req_idx, out_idx)).cloned().ok_or(NoSuchOutput))
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use request::*;
use super::RequestBuilder;
use util::H256;
#[test]
fn all_scalar() {
let mut builder = RequestBuilder::default();
builder.push(Request::HeaderProof(IncompleteHeaderProofRequest {
num: 100.into(),
})).unwrap();
builder.push(Request::Receipts(IncompleteReceiptsRequest {
hash: H256::default().into(),
})).unwrap();
}
#[test]
#[should_panic]
fn missing_backref() {
let mut builder = RequestBuilder::default();
builder.push(Request::HeaderProof(IncompleteHeaderProofRequest {
num: Field::BackReference(100, 3),
})).unwrap();
}
#[test]
#[should_panic]
fn wrong_kind() {
let mut builder = RequestBuilder::default();
assert!(builder.push(Request::HeaderProof(IncompleteHeaderProofRequest {
num: 100.into(),
})).is_ok());
builder.push(Request::HeaderProof(IncompleteHeaderProofRequest {
num: Field::BackReference(0, 0),
})).unwrap();
}
#[test]
fn good_backreference() {
let mut builder = RequestBuilder::default();
builder.push(Request::HeaderProof(IncompleteHeaderProofRequest {
num: 100.into(), // header proof puts hash at output 0.
})).unwrap();
builder.push(Request::Receipts(IncompleteReceiptsRequest {
hash: Field::BackReference(0, 0),
})).unwrap();
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,42 @@
{
"name": "TestMutiValidator",
"engine": {
"basicAuthority": {
"params": {
"gasLimitBoundDivisor": "0x0400",
"durationLimit": "0x0d",
"validators": {
"multi": {
"0": { "list": ["0x82a978b3f5962a5b0957d9ee9eef472ee55b42f1"] },
"2": { "list": ["0x7d577a597b2742b498cb5cf0c26cdcd726d39e6e"] }
}
}
}
}
},
"params": {
"accountStartNonce": "0x0",
"maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388",
"networkID" : "0x69"
},
"genesis": {
"seal": {
"generic": "0xc180"
},
"difficulty": "0x20000",
"author": "0x0000000000000000000000000000000000000000",
"timestamp": "0x00",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"extraData": "0x",
"gasLimit": "0x2fefd8"
},
"accounts": {
"0000000000000000000000000000000000000001": { "balance": "1", "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
"0000000000000000000000000000000000000002": { "balance": "1", "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } },
"0000000000000000000000000000000000000003": { "balance": "1", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
"0000000000000000000000000000000000000004": { "balance": "1", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
"0x82a978b3f5962a5b0957d9ee9eef472ee55b42f1": { "balance": "99999999999999999999999" },
"0x7d577a597b2742b498cb5cf0c26cdcd726d39e6e": { "balance": "99999999999999999999999" }
}
}

View File

@ -24,14 +24,16 @@ use std::fmt;
use std::collections::{HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::time::{Instant, Duration}; use std::time::{Instant, Duration};
use util::{RwLock}; use util::{RwLock};
use ethstore::{SimpleSecretStore, SecretStore, Error as SSError, EthStore, EthMultiStore, use ethstore::{
random_string, SecretVaultRef, StoreAccountRef}; SimpleSecretStore, SecretStore, Error as SSError, EthStore, EthMultiStore,
random_string, SecretVaultRef, StoreAccountRef,
};
use ethstore::dir::MemoryDirectory; use ethstore::dir::MemoryDirectory;
use ethstore::ethkey::{Address, Message, Public, Secret, Random, Generator}; use ethstore::ethkey::{Address, Message, Public, Secret, Random, Generator};
use ethjson::misc::AccountMeta; use ethjson::misc::AccountMeta;
use hardware_wallet::{Error as HardwareError, HardwareWalletManager, KeyPath}; use hardware_wallet::{Error as HardwareError, HardwareWalletManager, KeyPath};
pub use ethstore::ethkey::Signature; pub use ethstore::ethkey::Signature;
pub use ethstore::{Derivation, IndexDerivation}; pub use ethstore::{Derivation, IndexDerivation, KeyFile};
/// Type of unlock. /// Type of unlock.
#[derive(Clone)] #[derive(Clone)]
@ -500,6 +502,11 @@ impl AccountProvider {
self.sstore.change_password(&self.sstore.account_ref(address)?, &password, &new_password) self.sstore.change_password(&self.sstore.account_ref(address)?, &password, &new_password)
} }
/// Exports an account for given address.
pub fn export_account(&self, address: &Address, password: String) -> Result<KeyFile, Error> {
self.sstore.export_account(&self.sstore.account_ref(address)?, &password)
}
/// Helper method used for unlocking accounts. /// Helper method used for unlocking accounts.
fn unlock_account(&self, address: Address, password: String, unlock: Unlock) -> Result<(), Error> { fn unlock_account(&self, address: Address, password: String, unlock: Unlock) -> Result<(), Error> {
// verify password by signing dump message // verify password by signing dump message

View File

@ -20,7 +20,7 @@ use std::cmp;
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashSet; use std::collections::HashSet;
use rlp::{UntrustedRlp, RlpStream, Encodable, Decodable, Decoder, DecoderError, View}; use rlp::{UntrustedRlp, RlpStream, Encodable, Decodable, DecoderError};
use util::{Bytes, Address, Uint, Hashable, U256, H256, ordered_trie_root, SHA3_NULL_RLP}; use util::{Bytes, Address, Uint, Hashable, U256, H256, ordered_trie_root, SHA3_NULL_RLP};
use util::error::{Mismatch, OutOfBounds}; use util::error::{Mismatch, OutOfBounds};
@ -67,18 +67,17 @@ impl Block {
impl Decodable for Block { impl Decodable for Block {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
if decoder.as_raw().len() != decoder.as_rlp().payload_info()?.total() { if rlp.as_raw().len() != rlp.payload_info()?.total() {
return Err(DecoderError::RlpIsTooBig); return Err(DecoderError::RlpIsTooBig);
} }
let d = decoder.as_rlp(); if rlp.item_count()? != 3 {
if d.item_count() != 3 {
return Err(DecoderError::RlpIncorrectListLen); return Err(DecoderError::RlpIncorrectListLen);
} }
Ok(Block { Ok(Block {
header: d.val_at(0)?, header: rlp.val_at(0)?,
transactions: d.val_at(1)?, transactions: rlp.list_at(1)?,
uncles: d.val_at(2)?, uncles: rlp.list_at(2)?,
}) })
} }
} }

View File

@ -154,13 +154,12 @@ impl HeapSizeOf for BlockDetails {
} }
impl Decodable for BlockDetails { impl Decodable for BlockDetails {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let details = BlockDetails { let details = BlockDetails {
number: d.val_at(0)?, number: rlp.val_at(0)?,
total_difficulty: d.val_at(1)?, total_difficulty: rlp.val_at(1)?,
parent: d.val_at(2)?, parent: rlp.val_at(2)?,
children: d.val_at(3)?, children: rlp.list_at(3)?,
}; };
Ok(details) Ok(details)
} }
@ -190,11 +189,10 @@ impl HeapSizeOf for TransactionAddress {
} }
impl Decodable for TransactionAddress { impl Decodable for TransactionAddress {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let tx_address = TransactionAddress { let tx_address = TransactionAddress {
block_hash: d.val_at(0)?, block_hash: rlp.val_at(0)?,
index: d.val_at(1)?, index: rlp.val_at(1)?,
}; };
Ok(tx_address) Ok(tx_address)
@ -224,9 +222,9 @@ impl BlockReceipts {
} }
impl Decodable for BlockReceipts { impl Decodable for BlockReceipts {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(BlockReceipts { Ok(BlockReceipts {
receipts: Decodable::decode(decoder)? receipts: rlp.as_list()?,
}) })
} }
} }

View File

@ -44,8 +44,8 @@ impl Into<bc::Bloom> for Bloom {
} }
impl Decodable for Bloom { impl Decodable for Bloom {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Decodable::decode(decoder).map(Bloom) LogBloom::decode(rlp).map(Bloom)
} }
} }

View File

@ -52,8 +52,8 @@ impl Into<bc::BloomGroup> for BloomGroup {
} }
impl Decodable for BloomGroup { impl Decodable for BloomGroup {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let blooms = Decodable::decode(decoder)?; let blooms = rlp.as_list()?;
let group = BloomGroup { let group = BloomGroup {
blooms: blooms blooms: blooms
}; };

View File

@ -66,7 +66,7 @@ use evm::{Factory as EvmFactory, Schedule};
use miner::{Miner, MinerService, TransactionImportResult}; use miner::{Miner, MinerService, TransactionImportResult};
use snapshot::{self, io as snapshot_io}; use snapshot::{self, io as snapshot_io};
use factory::Factories; use factory::Factories;
use rlp::{View, UntrustedRlp}; use rlp::UntrustedRlp;
use state_db::StateDB; use state_db::StateDB;
use rand::OsRng; use rand::OsRng;
use client::registry::Registry; use client::registry::Registry;
@ -539,7 +539,7 @@ impl Client {
)?; )?;
// Commit results // Commit results
let receipts = ::rlp::decode(&receipts_bytes); let receipts = ::rlp::decode_list(&receipts_bytes);
let mut batch = DBTransaction::new(); let mut batch = DBTransaction::new();
chain.insert_unordered_block(&mut batch, &block_bytes, receipts, None, false, true); chain.insert_unordered_block(&mut batch, &block_bytes, receipts, None, false, true);
// Final commit to the DB // Final commit to the DB
@ -1607,23 +1607,14 @@ impl MayPanic for Client {
} }
impl ::client::ProvingBlockChainClient for Client { impl ::client::ProvingBlockChainClient for Client {
fn prove_storage(&self, key1: H256, key2: H256, from_level: u32, id: BlockId) -> Vec<Bytes> { fn prove_storage(&self, key1: H256, key2: H256, id: BlockId) -> Option<(Vec<Bytes>, H256)> {
self.state_at(id) self.state_at(id)
.and_then(move |state| state.prove_storage(key1, key2, from_level).ok()) .and_then(move |state| state.prove_storage(key1, key2).ok())
.unwrap_or_else(Vec::new)
} }
fn prove_account(&self, key1: H256, from_level: u32, id: BlockId) -> Vec<Bytes> { fn prove_account(&self, key1: H256, id: BlockId) -> Option<(Vec<Bytes>, ::types::basic_account::BasicAccount)> {
self.state_at(id) self.state_at(id)
.and_then(move |state| state.prove_account(key1, from_level).ok()) .and_then(move |state| state.prove_account(key1).ok())
.unwrap_or_else(Vec::new)
}
fn code_by_hash(&self, account_key: H256, id: BlockId) -> Bytes {
self.state_at(id)
.and_then(move |state| state.code_by_address_hash(account_key).ok())
.and_then(|x| x)
.unwrap_or_else(Vec::new)
} }
fn prove_transaction(&self, transaction: SignedTransaction, id: BlockId) -> Option<Vec<DBValue>> { fn prove_transaction(&self, transaction: SignedTransaction, id: BlockId) -> Option<Vec<DBValue>> {
@ -1643,7 +1634,6 @@ impl ::client::ProvingBlockChainClient for Client {
_ => return Some(state.drop().1.extract_proof()), _ => return Some(state.drop().1.extract_proof()),
} }
} }
} }
impl Drop for Client { impl Drop for Client {

View File

@ -38,6 +38,7 @@ use error::{ImportResult, Error as EthcoreError};
use evm::{Factory as EvmFactory, VMType, Schedule}; use evm::{Factory as EvmFactory, VMType, Schedule};
use miner::{Miner, MinerService, TransactionImportResult}; use miner::{Miner, MinerService, TransactionImportResult};
use spec::Spec; use spec::Spec;
use types::basic_account::BasicAccount;
use types::mode::Mode; use types::mode::Mode;
use types::pruning_info::PruningInfo; use types::pruning_info::PruningInfo;
@ -758,16 +759,12 @@ impl BlockChainClient for TestBlockChainClient {
} }
impl ProvingBlockChainClient for TestBlockChainClient { impl ProvingBlockChainClient for TestBlockChainClient {
fn prove_storage(&self, _: H256, _: H256, _: u32, _: BlockId) -> Vec<Bytes> { fn prove_storage(&self, _: H256, _: H256, _: BlockId) -> Option<(Vec<Bytes>, H256)> {
Vec::new() None
} }
fn prove_account(&self, _: H256, _: u32, _: BlockId) -> Vec<Bytes> { fn prove_account(&self, _: H256, _: BlockId) -> Option<(Vec<Bytes>, BasicAccount)> {
Vec::new() None
}
fn code_by_hash(&self, _: H256, _: BlockId) -> Bytes {
Vec::new()
} }
fn prove_transaction(&self, _: SignedTransaction, _: BlockId) -> Option<Vec<DBValue>> { fn prove_transaction(&self, _: SignedTransaction, _: BlockId) -> Option<Vec<DBValue>> {

View File

@ -34,6 +34,7 @@ use env_info::LastHashes;
use block_import_error::BlockImportError; use block_import_error::BlockImportError;
use ipc::IpcConfig; use ipc::IpcConfig;
use types::ids::*; use types::ids::*;
use types::basic_account::BasicAccount;
use types::trace_filter::Filter as TraceFilter; use types::trace_filter::Filter as TraceFilter;
use types::call_analytics::CallAnalytics; use types::call_analytics::CallAnalytics;
use types::blockchain_info::BlockChainInfo; use types::blockchain_info::BlockChainInfo;
@ -315,19 +316,12 @@ pub trait ProvingBlockChainClient: BlockChainClient {
/// ///
/// Both provided keys assume a secure trie. /// Both provided keys assume a secure trie.
/// Returns a vector of raw trie nodes (in order from the root) proving the storage query. /// Returns a vector of raw trie nodes (in order from the root) proving the storage query.
/// Nodes after `from_level` may be omitted. fn prove_storage(&self, key1: H256, key2: H256, id: BlockId) -> Option<(Vec<Bytes>, H256)>;
/// An empty vector indicates unservable query.
fn prove_storage(&self, key1: H256, key2: H256, from_level: u32, id: BlockId) -> Vec<Bytes>;
/// Prove account existence at a specific block id. /// Prove account existence at a specific block id.
/// The key is the keccak hash of the account's address. /// The key is the keccak hash of the account's address.
/// Returns a vector of raw trie nodes (in order from the root) proving the query. /// Returns a vector of raw trie nodes (in order from the root) proving the query.
/// Nodes after `from_level` may be omitted. fn prove_account(&self, key1: H256, id: BlockId) -> Option<(Vec<Bytes>, BasicAccount)>;
/// An empty vector indicates unservable query.
fn prove_account(&self, key1: H256, from_level: u32, id: BlockId) -> Vec<Bytes>;
/// Get code by address hash.
fn code_by_hash(&self, account_key: H256, id: BlockId) -> Bytes;
/// Prove execution of a transaction at the given block. /// Prove execution of a transaction at the given block.
fn prove_transaction(&self, transaction: SignedTransaction, id: BlockId) -> Option<Vec<DBValue>>; fn prove_transaction(&self, transaction: SignedTransaction, id: BlockId) -> Option<Vec<DBValue>>;

View File

@ -21,7 +21,7 @@ use std::sync::Weak;
use std::time::{UNIX_EPOCH, Duration}; use std::time::{UNIX_EPOCH, Duration};
use util::*; use util::*;
use ethkey::{verify_address, Signature}; use ethkey::{verify_address, Signature};
use rlp::{UntrustedRlp, View, encode}; use rlp::{UntrustedRlp, encode};
use account_provider::AccountProvider; use account_provider::AccountProvider;
use block::*; use block::*;
use spec::CommonParams; use spec::CommonParams;
@ -82,7 +82,7 @@ pub struct AuthorityRound {
proposed: AtomicBool, proposed: AtomicBool,
client: RwLock<Option<Weak<EngineClient>>>, client: RwLock<Option<Weak<EngineClient>>>,
signer: EngineSigner, signer: EngineSigner,
validators: Box<ValidatorSet + Send + Sync>, validators: Box<ValidatorSet>,
/// Is this Engine just for testing (prevents step calibration). /// Is this Engine just for testing (prevents step calibration).
calibrate_step: bool, calibrate_step: bool,
} }

View File

@ -58,7 +58,7 @@ pub struct BasicAuthority {
gas_limit_bound_divisor: U256, gas_limit_bound_divisor: U256,
builtins: BTreeMap<Address, Builtin>, builtins: BTreeMap<Address, Builtin>,
signer: EngineSigner, signer: EngineSigner,
validators: Box<ValidatorSet + Send + Sync>, validators: Box<ValidatorSet>,
} }
impl BasicAuthority { impl BasicAuthority {
@ -138,7 +138,7 @@ impl Engine for BasicAuthority {
} }
fn verify_block_family(&self, header: &Header, parent: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> { fn verify_block_family(&self, header: &Header, parent: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
use rlp::{UntrustedRlp, View}; use rlp::UntrustedRlp;
// Check if the signature belongs to a validator, can depend on parent state. // Check if the signature belongs to a validator, can depend on parent state.
let sig = UntrustedRlp::new(&header.seal()[0]).as_val::<H520>()?; let sig = UntrustedRlp::new(&header.seal()[0]).as_val::<H520>()?;
let signer = public_to_address(&recover(&sig.into(), &header.bare_hash())?); let signer = public_to_address(&recover(&sig.into(), &header.bare_hash())?);

View File

@ -20,7 +20,7 @@ use util::*;
use super::{Height, View, BlockHash, Step}; use super::{Height, View, BlockHash, Step};
use error::Error; use error::Error;
use header::Header; use header::Header;
use rlp::{Rlp, UntrustedRlp, RlpStream, Encodable, Decodable, Decoder, DecoderError, View as RlpView}; use rlp::{Rlp, UntrustedRlp, RlpStream, Encodable, Decodable, DecoderError};
use ethkey::{recover, public_to_address}; use ethkey::{recover, public_to_address};
use super::super::vote_collector::Message; use super::super::vote_collector::Message;
@ -150,8 +150,8 @@ impl Step {
} }
impl Decodable for Step { impl Decodable for Step {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
match decoder.as_rlp().as_val()? { match rlp.as_val()? {
0u8 => Ok(Step::Propose), 0u8 => Ok(Step::Propose),
1 => Ok(Step::Prevote), 1 => Ok(Step::Prevote),
2 => Ok(Step::Precommit), 2 => Ok(Step::Precommit),
@ -168,8 +168,7 @@ impl Encodable for Step {
/// (signature, (height, view, step, block_hash)) /// (signature, (height, view, step, block_hash))
impl Decodable for ConsensusMessage { impl Decodable for ConsensusMessage {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let rlp = decoder.as_rlp();
let m = rlp.at(1)?; let m = rlp.at(1)?;
let block_message: H256 = m.val_at(3)?; let block_message: H256 = m.val_at(3)?;
Ok(ConsensusMessage { Ok(ConsensusMessage {

View File

@ -33,7 +33,7 @@ use error::{Error, BlockError};
use header::Header; use header::Header;
use builtin::Builtin; use builtin::Builtin;
use env_info::EnvInfo; use env_info::EnvInfo;
use rlp::{UntrustedRlp, View as RlpView}; use rlp::UntrustedRlp;
use ethkey::{recover, public_to_address, Signature}; use ethkey::{recover, public_to_address, Signature};
use account_provider::AccountProvider; use account_provider::AccountProvider;
use block::*; use block::*;
@ -98,7 +98,7 @@ pub struct Tendermint {
/// Hash of the proposal parent block. /// Hash of the proposal parent block.
proposal_parent: RwLock<H256>, proposal_parent: RwLock<H256>,
/// Set used to determine the current validators. /// Set used to determine the current validators.
validators: Box<ValidatorSet + Send + Sync>, validators: Box<ValidatorSet>,
} }
impl Tendermint { impl Tendermint {

View File

@ -19,6 +19,7 @@
mod simple_list; mod simple_list;
mod safe_contract; mod safe_contract;
mod contract; mod contract;
mod multi;
use std::sync::Weak; use std::sync::Weak;
use util::{Address, H256}; use util::{Address, H256};
@ -27,23 +28,27 @@ use client::Client;
use self::simple_list::SimpleList; use self::simple_list::SimpleList;
use self::contract::ValidatorContract; use self::contract::ValidatorContract;
use self::safe_contract::ValidatorSafeContract; use self::safe_contract::ValidatorSafeContract;
use self::multi::Multi;
/// Creates a validator set from spec. /// Creates a validator set from spec.
pub fn new_validator_set(spec: ValidatorSpec) -> Box<ValidatorSet + Send + Sync> { pub fn new_validator_set(spec: ValidatorSpec) -> Box<ValidatorSet> {
match spec { match spec {
ValidatorSpec::List(list) => Box::new(SimpleList::new(list.into_iter().map(Into::into).collect())), ValidatorSpec::List(list) => Box::new(SimpleList::new(list.into_iter().map(Into::into).collect())),
ValidatorSpec::SafeContract(address) => Box::new(ValidatorSafeContract::new(address.into())), ValidatorSpec::SafeContract(address) => Box::new(ValidatorSafeContract::new(address.into())),
ValidatorSpec::Contract(address) => Box::new(ValidatorContract::new(address.into())), ValidatorSpec::Contract(address) => Box::new(ValidatorContract::new(address.into())),
ValidatorSpec::Multi(sequence) => Box::new(
Multi::new(sequence.into_iter().map(|(block, set)| (block.into(), new_validator_set(set))).collect())
),
} }
} }
pub trait ValidatorSet { pub trait ValidatorSet: Send + Sync {
/// Checks if a given address is a validator. /// Checks if a given address is a validator.
fn contains(&self, bh: &H256, address: &Address) -> bool; fn contains(&self, parent_block_hash: &H256, address: &Address) -> bool;
/// Draws an validator nonce modulo number of validators. /// Draws an validator nonce modulo number of validators.
fn get(&self, bh: &H256, nonce: usize) -> Address; fn get(&self, parent_block_hash: &H256, nonce: usize) -> Address;
/// Returns the current number of validators. /// Returns the current number of validators.
fn count(&self, bh: &H256) -> usize; fn count(&self, parent_block_hash: &H256) -> usize;
/// Notifies about malicious behaviour. /// Notifies about malicious behaviour.
fn report_malicious(&self, _validator: &Address) {} fn report_malicious(&self, _validator: &Address) {}
/// Notifies about benign misbehaviour. /// Notifies about benign misbehaviour.

View File

@ -0,0 +1,158 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
/// Validator set changing at fork blocks.
use std::collections::BTreeMap;
use std::sync::Weak;
use util::{H256, Address, RwLock};
use ids::BlockId;
use header::BlockNumber;
use client::{Client, BlockChainClient};
use super::ValidatorSet;
type BlockNumberLookup = Box<Fn(&H256) -> Result<BlockNumber, String> + Send + Sync + 'static>;
pub struct Multi {
sets: BTreeMap<BlockNumber, Box<ValidatorSet>>,
block_number: RwLock<BlockNumberLookup>,
}
impl Multi {
pub fn new(set_map: BTreeMap<BlockNumber, Box<ValidatorSet>>) -> Self {
assert!(set_map.get(&0u64).is_some(), "ValidatorSet has to be specified from block 0.");
Multi {
sets: set_map,
block_number: RwLock::new(Box::new(move |_| Err("No client!".into()))),
}
}
fn correct_set(&self, bh: &H256) -> Option<&Box<ValidatorSet>> {
match self
.block_number
.read()(bh)
.map(|parent_block| self
.sets
.iter()
.rev()
.find(|&(block, _)| *block <= parent_block + 1)
.expect("constructor validation ensures that there is at least one validator set for block 0;
block 0 is less than any uint;
qed")
) {
Ok((block, set)) => {
trace!(target: "engine", "Multi ValidatorSet retrieved for block {}.", block);
Some(set)
},
Err(e) => {
debug!(target: "engine", "ValidatorSet could not be recovered: {}", e);
None
},
}
}
}
impl ValidatorSet for Multi {
fn contains(&self, bh: &H256, address: &Address) -> bool {
self.correct_set(bh).map_or(false, |set| set.contains(bh, address))
}
fn get(&self, bh: &H256, nonce: usize) -> Address {
self.correct_set(bh).map_or_else(Default::default, |set| set.get(bh, nonce))
}
fn count(&self, bh: &H256) -> usize {
self.correct_set(bh).map_or_else(usize::max_value, |set| set.count(bh))
}
fn report_malicious(&self, validator: &Address) {
for set in self.sets.values() {
set.report_malicious(validator);
}
}
fn report_benign(&self, validator: &Address) {
for set in self.sets.values() {
set.report_benign(validator);
}
}
fn register_contract(&self, client: Weak<Client>) {
for set in self.sets.values() {
set.register_contract(client.clone());
}
*self.block_number.write() = Box::new(move |hash| client
.upgrade()
.ok_or("No client!".into())
.and_then(|c| c.block_number(BlockId::Hash(*hash)).ok_or("Unknown block".into())));
}
}
#[cfg(test)]
mod tests {
use util::*;
use types::ids::BlockId;
use spec::Spec;
use account_provider::AccountProvider;
use client::{BlockChainClient, EngineClient};
use ethkey::Secret;
use miner::MinerService;
use tests::helpers::{generate_dummy_client_with_spec_and_accounts, generate_dummy_client_with_spec_and_data};
#[test]
fn uses_current_set() {
::env_logger::init().unwrap();
let tap = Arc::new(AccountProvider::transient_provider());
let s0 = Secret::from_slice(&"0".sha3()).unwrap();
let v0 = tap.insert_account(s0.clone(), "").unwrap();
let v1 = tap.insert_account(Secret::from_slice(&"1".sha3()).unwrap(), "").unwrap();
let client = generate_dummy_client_with_spec_and_accounts(Spec::new_validator_multi, Some(tap));
client.engine().register_client(Arc::downgrade(&client));
// Make sure txs go through.
client.miner().set_gas_floor_target(1_000_000.into());
// Wrong signer for the first block.
client.miner().set_engine_signer(v1, "".into()).unwrap();
client.transact_contract(Default::default(), Default::default()).unwrap();
client.update_sealing();
assert_eq!(client.chain_info().best_block_number, 0);
// Right signer for the first block.
client.miner().set_engine_signer(v0, "".into()).unwrap();
client.update_sealing();
assert_eq!(client.chain_info().best_block_number, 1);
// This time v0 is wrong.
client.transact_contract(Default::default(), Default::default()).unwrap();
client.update_sealing();
assert_eq!(client.chain_info().best_block_number, 1);
client.miner().set_engine_signer(v1, "".into()).unwrap();
client.update_sealing();
assert_eq!(client.chain_info().best_block_number, 2);
// v1 is still good.
client.transact_contract(Default::default(), Default::default()).unwrap();
client.update_sealing();
assert_eq!(client.chain_info().best_block_number, 3);
// Check syncing.
let sync_client = generate_dummy_client_with_spec_and_data(Spec::new_validator_multi, 0, 0, &[]);
sync_client.engine().register_client(Arc::downgrade(&sync_client));
for i in 1..4 {
sync_client.import_block(client.block(BlockId::Number(i)).unwrap().into_inner()).unwrap();
}
sync_client.flush_queue();
assert_eq!(sync_client.chain_info().best_block_number, 3);
}
}

View File

@ -27,7 +27,7 @@ use transaction::UnverifiedTransaction;
use engines::Engine; use engines::Engine;
use evm::Schedule; use evm::Schedule;
use ethjson; use ethjson;
use rlp::{self, UntrustedRlp, View}; use rlp::{self, UntrustedRlp};
/// Parity tries to round block.gas_limit to multiple of this constant /// Parity tries to round block.gas_limit to multiple of this constant
pub const PARITY_GAS_LIMIT_DETERMINANT: U256 = U256([37, 0, 0, 0]); pub const PARITY_GAS_LIMIT_DETERMINANT: U256 = U256([37, 0, 0, 0]);
@ -43,6 +43,8 @@ pub struct EthashParams {
pub difficulty_bound_divisor: U256, pub difficulty_bound_divisor: U256,
/// Difficulty increment divisor. /// Difficulty increment divisor.
pub difficulty_increment_divisor: u64, pub difficulty_increment_divisor: u64,
/// Metropolis difficulty increment divisor.
pub metropolis_difficulty_increment_divisor: u64,
/// Block duration. /// Block duration.
pub duration_limit: u64, pub duration_limit: u64,
/// Block reward. /// Block reward.
@ -63,6 +65,8 @@ pub struct EthashParams {
pub difficulty_hardfork_bound_divisor: U256, pub difficulty_hardfork_bound_divisor: U256,
/// Block on which there is no additional difficulty from the exponential bomb. /// Block on which there is no additional difficulty from the exponential bomb.
pub bomb_defuse_transition: u64, pub bomb_defuse_transition: u64,
/// Number of first block where EIP-100 rules begin.
pub eip100b_transition: u64,
/// Number of first block where EIP-150 rules begin. /// Number of first block where EIP-150 rules begin.
pub eip150_transition: u64, pub eip150_transition: u64,
/// Number of first block where EIP-155 rules begin. /// Number of first block where EIP-155 rules begin.
@ -96,6 +100,7 @@ impl From<ethjson::spec::EthashParams> for EthashParams {
minimum_difficulty: p.minimum_difficulty.into(), minimum_difficulty: p.minimum_difficulty.into(),
difficulty_bound_divisor: p.difficulty_bound_divisor.into(), difficulty_bound_divisor: p.difficulty_bound_divisor.into(),
difficulty_increment_divisor: p.difficulty_increment_divisor.map_or(10, Into::into), difficulty_increment_divisor: p.difficulty_increment_divisor.map_or(10, Into::into),
metropolis_difficulty_increment_divisor: p.metropolis_difficulty_increment_divisor.map_or(9, Into::into),
duration_limit: p.duration_limit.into(), duration_limit: p.duration_limit.into(),
block_reward: p.block_reward.into(), block_reward: p.block_reward.into(),
registrar: p.registrar.map_or_else(Address::new, Into::into), registrar: p.registrar.map_or_else(Address::new, Into::into),
@ -106,6 +111,7 @@ impl From<ethjson::spec::EthashParams> for EthashParams {
difficulty_hardfork_transition: p.difficulty_hardfork_transition.map_or(u64::max_value(), Into::into), difficulty_hardfork_transition: p.difficulty_hardfork_transition.map_or(u64::max_value(), Into::into),
difficulty_hardfork_bound_divisor: p.difficulty_hardfork_bound_divisor.map_or(p.difficulty_bound_divisor.into(), Into::into), difficulty_hardfork_bound_divisor: p.difficulty_hardfork_bound_divisor.map_or(p.difficulty_bound_divisor.into(), Into::into),
bomb_defuse_transition: p.bomb_defuse_transition.map_or(u64::max_value(), Into::into), bomb_defuse_transition: p.bomb_defuse_transition.map_or(u64::max_value(), Into::into),
eip100b_transition: p.eip100b_transition.map_or(u64::max_value(), Into::into),
eip150_transition: p.eip150_transition.map_or(0, Into::into), eip150_transition: p.eip150_transition.map_or(0, Into::into),
eip155_transition: p.eip155_transition.map_or(0, Into::into), eip155_transition: p.eip155_transition.map_or(0, Into::into),
eip160_transition: p.eip160_transition.map_or(0, Into::into), eip160_transition: p.eip160_transition.map_or(0, Into::into),
@ -406,6 +412,8 @@ impl Ethash {
panic!("Can't calculate genesis block difficulty"); panic!("Can't calculate genesis block difficulty");
} }
let parent_has_uncles = parent.uncles_hash() != &sha3::SHA3_EMPTY_LIST_RLP;
let min_difficulty = self.ethash_params.minimum_difficulty; let min_difficulty = self.ethash_params.minimum_difficulty;
let difficulty_hardfork = header.number() >= self.ethash_params.difficulty_hardfork_transition; let difficulty_hardfork = header.number() >= self.ethash_params.difficulty_hardfork_transition;
let difficulty_bound_divisor = match difficulty_hardfork { let difficulty_bound_divisor = match difficulty_hardfork {
@ -417,19 +425,27 @@ impl Ethash {
let mut target = if header.number() < frontier_limit { let mut target = if header.number() < frontier_limit {
if header.timestamp() >= parent.timestamp() + duration_limit { if header.timestamp() >= parent.timestamp() + duration_limit {
parent.difficulty().clone() - (parent.difficulty().clone() / difficulty_bound_divisor) *parent.difficulty() - (*parent.difficulty() / difficulty_bound_divisor)
} else { } else {
parent.difficulty().clone() + (parent.difficulty().clone() / difficulty_bound_divisor) *parent.difficulty() + (*parent.difficulty() / difficulty_bound_divisor)
} }
} }
else { else {
trace!(target: "ethash", "Calculating difficulty parent.difficulty={}, header.timestamp={}, parent.timestamp={}", parent.difficulty(), header.timestamp(), parent.timestamp()); trace!(target: "ethash", "Calculating difficulty parent.difficulty={}, header.timestamp={}, parent.timestamp={}", parent.difficulty(), header.timestamp(), parent.timestamp());
//block_diff = parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99) //block_diff = parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99)
let diff_inc = (header.timestamp() - parent.timestamp()) / self.ethash_params.difficulty_increment_divisor; let (increment_divisor, threshold) = if header.number() < self.ethash_params.eip100b_transition {
if diff_inc <= 1 { (self.ethash_params.difficulty_increment_divisor, 1)
parent.difficulty().clone() + parent.difficulty().clone() / From::from(difficulty_bound_divisor) * From::from(1 - diff_inc) } else if parent_has_uncles {
(self.ethash_params.metropolis_difficulty_increment_divisor, 2)
} else { } else {
parent.difficulty().clone() - parent.difficulty().clone() / From::from(difficulty_bound_divisor) * From::from(min(diff_inc - 1, 99)) (self.ethash_params.metropolis_difficulty_increment_divisor, 1)
};
let diff_inc = (header.timestamp() - parent.timestamp()) / increment_divisor;
if diff_inc <= threshold {
*parent.difficulty() + *parent.difficulty() / difficulty_bound_divisor * (threshold - diff_inc).into()
} else {
*parent.difficulty() - *parent.difficulty() / difficulty_bound_divisor * min(diff_inc - threshold, 99).into()
} }
}; };
target = max(min_difficulty, target); target = max(min_difficulty, target);

View File

@ -261,9 +261,7 @@ impl Header {
} }
impl Decodable for Header { impl Decodable for Header {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(r: &UntrustedRlp) -> Result<Self, DecoderError> {
let r = decoder.as_rlp();
let mut blockheader = Header { let mut blockheader = Header {
parent_hash: r.val_at(0)?, parent_hash: r.val_at(0)?,
uncles_hash: r.val_at(1)?, uncles_hash: r.val_at(1)?,
@ -283,7 +281,7 @@ impl Decodable for Header {
bare_hash: RefCell::new(None), bare_hash: RefCell::new(None),
}; };
for i in 13..r.item_count() { for i in 13..r.item_count()? {
blockheader.seal.push(r.at(i)?.as_raw().to_vec()) blockheader.seal.push(r.at(i)?.as_raw().to_vec())
} }

View File

@ -25,7 +25,7 @@ use miner::Miner;
use io::IoChannel; use io::IoChannel;
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> { pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
init_log(); ::ethcore_logger::init_log();
let tests = ethjson::blockchain::Test::load(json_data).unwrap(); let tests = ethjson::blockchain::Test::load(json_data).unwrap();
let mut failed = Vec::new(); let mut failed = Vec::new();

View File

@ -22,7 +22,7 @@ use ethereum;
use ethjson; use ethjson;
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> { pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
init_log(); ::ethcore_logger::init_log();
let tests = ethjson::state::Test::load(json_data).unwrap(); let tests = ethjson::state::Test::load(json_data).unwrap();
let mut failed = Vec::new(); let mut failed = Vec::new();
let engine = match era { let engine = match era {

View File

@ -17,7 +17,7 @@
use super::test_common::*; use super::test_common::*;
use evm; use evm;
use ethjson; use ethjson;
use rlp::{UntrustedRlp, View}; use rlp::UntrustedRlp;
use transaction::{Action, UnverifiedTransaction}; use transaction::{Action, UnverifiedTransaction};
use ethstore::ethkey::public_to_address; use ethstore::ethkey::public_to_address;

View File

@ -106,8 +106,10 @@ extern crate ethcore_stratum;
extern crate ethabi; extern crate ethabi;
extern crate hardware_wallet; extern crate hardware_wallet;
extern crate stats; extern crate stats;
extern crate ethcore_logger;
extern crate num; extern crate num;
extern crate bn; extern crate bn;
extern crate itertools;
#[macro_use] #[macro_use]
extern crate log; extern crate log;

View File

@ -17,7 +17,7 @@
//! This migration compresses the state db. //! This migration compresses the state db.
use util::migration::{SimpleMigration, Progress}; use util::migration::{SimpleMigration, Progress};
use rlp::{Compressible, UntrustedRlp, View, RlpType}; use rlp::{Compressible, UntrustedRlp, RlpType};
/// Compressing migration. /// Compressing migration.
#[derive(Default)] #[derive(Default)]

View File

@ -26,8 +26,7 @@ use util::migration::{Batch, Config, Error, Migration, SimpleMigration, Progress
use util::sha3::Hashable; use util::sha3::Hashable;
use std::sync::Arc; use std::sync::Arc;
use rlp::{decode, Rlp, RlpStream, View}; use rlp::{decode, Rlp, RlpStream};
// attempt to migrate a key, value pair. None if migration not possible. // attempt to migrate a key, value pair. None if migration not possible.
fn attempt_migrate(mut key_h: H256, val: &[u8]) -> Option<H256> { fn attempt_migrate(mut key_h: H256, val: &[u8]) -> Option<H256> {
@ -184,7 +183,7 @@ impl OverlayRecentV7 {
} }
// migrate all deleted keys. // migrate all deleted keys.
let mut deleted_keys: Vec<H256> = rlp.val_at(2); let mut deleted_keys: Vec<H256> = rlp.list_at(2);
for old_key in &mut deleted_keys { for old_key in &mut deleted_keys {
if let Some(new) = self.migrated_keys.get(&*old_key) { if let Some(new) = self.migrated_keys.get(&*old_key) {
*old_key = new.clone(); *old_key = new.clone();

View File

@ -17,7 +17,7 @@
//! This migration consolidates all databases into single one using Column Families. //! This migration consolidates all databases into single one using Column Families.
use rlp::{Rlp, RlpStream, View}; use rlp::{Rlp, RlpStream};
use util::kvdb::Database; use util::kvdb::Database;
use util::migration::{Batch, Config, Error, Migration, Progress}; use util::migration::{Batch, Config, Error, Migration, Progress};
use std::sync::Arc; use std::sync::Arc;

View File

@ -110,7 +110,7 @@ impl PriceInfo {
fn should_get_price_info() { fn should_get_price_info() {
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use util::log::init_log; use ethcore_logger::init_log;
use util::{Condvar, Mutex}; use util::{Condvar, Mutex};
init_log(); init_log();

View File

@ -2454,6 +2454,7 @@ pub mod test {
#[test] #[test]
fn should_replace_same_transaction_when_has_higher_fee() { fn should_replace_same_transaction_when_has_higher_fee() {
use ethcore_logger::init_log;
init_log(); init_log();
// given // given
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();

View File

@ -22,7 +22,8 @@ use snapshot::Error;
use util::{U256, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP}; use util::{U256, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP};
use util::trie::{TrieDB, Trie}; use util::trie::{TrieDB, Trie};
use rlp::{RlpStream, UntrustedRlp, View}; use rlp::{RlpStream, UntrustedRlp};
use itertools::Itertools;
use std::collections::HashSet; use std::collections::HashSet;
@ -60,25 +61,23 @@ impl CodeState {
} }
} }
// walk the account's storage trie, returning an RLP item containing the // walk the account's storage trie, returning a vector of RLP items containing the
// account properties and the storage. // account properties and the storage. Each item contains at most `max_storage_items`
pub fn to_fat_rlp(acc: &BasicAccount, acct_db: &AccountDB, used_code: &mut HashSet<H256>) -> Result<Bytes, Error> { // storage records split according to snapshot format definition.
pub fn to_fat_rlps(acc: &BasicAccount, acct_db: &AccountDB, used_code: &mut HashSet<H256>, max_storage_items: usize) -> Result<Vec<Bytes>, Error> {
if acc == &ACC_EMPTY { if acc == &ACC_EMPTY {
return Ok(::rlp::NULL_RLP.to_vec()); return Ok(vec![::rlp::NULL_RLP.to_vec()]);
} }
let db = TrieDB::new(acct_db, &acc.storage_root)?; let db = TrieDB::new(acct_db, &acc.storage_root)?;
let mut pairs = Vec::new(); let chunks = db.iter()?.chunks(max_storage_items);
let pair_chunks = chunks.into_iter().map(|chunk| chunk.collect());
for item in db.iter()? { pair_chunks.pad_using(1, |_| Vec::new(), ).map(|pairs| {
let (k, v) = item?;
pairs.push((k, v));
}
let mut stream = RlpStream::new_list(pairs.len()); let mut stream = RlpStream::new_list(pairs.len());
for (k, v) in pairs { for r in pairs {
let (k, v) = r?;
stream.begin_list(2).append(&k).append(&&*v); stream.begin_list(2).append(&k).append(&&*v);
} }
@ -107,8 +106,8 @@ pub fn to_fat_rlp(acc: &BasicAccount, acct_db: &AccountDB, used_code: &mut HashS
} }
account_stream.append_raw(&pairs_rlp, 1); account_stream.append_raw(&pairs_rlp, 1);
Ok(account_stream.out()) Ok(account_stream.out())
}).collect()
} }
// decode a fat rlp, and rebuild the storage trie as we go. // decode a fat rlp, and rebuild the storage trie as we go.
@ -117,6 +116,7 @@ pub fn to_fat_rlp(acc: &BasicAccount, acct_db: &AccountDB, used_code: &mut HashS
pub fn from_fat_rlp( pub fn from_fat_rlp(
acct_db: &mut AccountDBMut, acct_db: &mut AccountDBMut,
rlp: UntrustedRlp, rlp: UntrustedRlp,
mut storage_root: H256,
) -> Result<(BasicAccount, Option<Bytes>), Error> { ) -> Result<(BasicAccount, Option<Bytes>), Error> {
use util::{TrieDBMut, TrieMut}; use util::{TrieDBMut, TrieMut};
@ -148,10 +148,12 @@ pub fn from_fat_rlp(
} }
}; };
let mut storage_root = H256::zero();
{ {
let mut storage_trie = TrieDBMut::new(acct_db, &mut storage_root); let mut storage_trie = if storage_root.is_zero() {
TrieDBMut::new(acct_db, &mut storage_root)
} else {
TrieDBMut::from_existing(acct_db, &mut storage_root)?
};
let pairs = rlp.at(4)?; let pairs = rlp.at(4)?;
for pair_rlp in pairs.iter() { for pair_rlp in pairs.iter() {
let k: Bytes = pair_rlp.val_at(0)?; let k: Bytes = pair_rlp.val_at(0)?;
@ -180,11 +182,11 @@ mod tests {
use util::sha3::{SHA3_EMPTY, SHA3_NULL_RLP}; use util::sha3::{SHA3_EMPTY, SHA3_NULL_RLP};
use util::{Address, H256, HashDB, DBValue}; use util::{Address, H256, HashDB, DBValue};
use rlp::{UntrustedRlp, View}; use rlp::UntrustedRlp;
use std::collections::HashSet; use std::collections::HashSet;
use super::{ACC_EMPTY, to_fat_rlp, from_fat_rlp}; use super::{ACC_EMPTY, to_fat_rlps, from_fat_rlp};
#[test] #[test]
fn encoding_basic() { fn encoding_basic() {
@ -201,9 +203,9 @@ mod tests {
let thin_rlp = ::rlp::encode(&account); let thin_rlp = ::rlp::encode(&account);
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account); assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
let fat_rlp = to_fat_rlp(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap(); let fat_rlps = to_fat_rlps(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default(), usize::max_value()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlps[0]);
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account); assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, H256::zero()).unwrap().0, account);
} }
#[test] #[test]
@ -226,9 +228,40 @@ mod tests {
let thin_rlp = ::rlp::encode(&account); let thin_rlp = ::rlp::encode(&account);
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account); assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
let fat_rlp = to_fat_rlp(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap(); let fat_rlp = to_fat_rlps(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default(), usize::max_value()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlp[0]);
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account); assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, H256::zero()).unwrap().0, account);
}
#[test]
fn encoding_storage_split() {
let mut db = get_temp_state_db();
let addr = Address::random();
let account = {
let acct_db = AccountDBMut::new(db.as_hashdb_mut(), &addr);
let mut root = SHA3_NULL_RLP;
fill_storage(acct_db, &mut root, &mut H256::zero());
BasicAccount {
nonce: 25.into(),
balance: 987654321.into(),
storage_root: root,
code_hash: SHA3_EMPTY,
}
};
let thin_rlp = ::rlp::encode(&account);
assert_eq!(::rlp::decode::<BasicAccount>(&thin_rlp), account);
let fat_rlps = to_fat_rlps(&account, &AccountDB::new(db.as_hashdb(), &addr), &mut Default::default(), 100).unwrap();
let mut root = SHA3_NULL_RLP;
let mut restored_account = None;
for rlp in fat_rlps {
let fat_rlp = UntrustedRlp::new(&rlp);
restored_account = Some(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, root).unwrap().0);
root = restored_account.as_ref().unwrap().storage_root.clone();
}
assert_eq!(restored_account, Some(account));
} }
#[test] #[test]
@ -264,18 +297,18 @@ mod tests {
let mut used_code = HashSet::new(); let mut used_code = HashSet::new();
let fat_rlp1 = to_fat_rlp(&account1, &AccountDB::new(db.as_hashdb(), &addr1), &mut used_code).unwrap(); let fat_rlp1 = to_fat_rlps(&account1, &AccountDB::new(db.as_hashdb(), &addr1), &mut used_code, usize::max_value()).unwrap();
let fat_rlp2 = to_fat_rlp(&account2, &AccountDB::new(db.as_hashdb(), &addr2), &mut used_code).unwrap(); let fat_rlp2 = to_fat_rlps(&account2, &AccountDB::new(db.as_hashdb(), &addr2), &mut used_code, usize::max_value()).unwrap();
assert_eq!(used_code.len(), 1); assert_eq!(used_code.len(), 1);
let fat_rlp1 = UntrustedRlp::new(&fat_rlp1); let fat_rlp1 = UntrustedRlp::new(&fat_rlp1[0]);
let fat_rlp2 = UntrustedRlp::new(&fat_rlp2); let fat_rlp2 = UntrustedRlp::new(&fat_rlp2[0]);
let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2).unwrap(); let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2, H256::zero()).unwrap();
assert!(maybe_code.is_none()); assert!(maybe_code.is_none());
assert_eq!(acc, account2); assert_eq!(acc, account2);
let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1).unwrap(); let (acc, maybe_code) = from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1, H256::zero()).unwrap();
assert_eq!(maybe_code, Some(b"this is definitely code".to_vec())); assert_eq!(maybe_code, Some(b"this is definitely code".to_vec()));
assert_eq!(acc, account1); assert_eq!(acc, account1);
} }
@ -285,7 +318,7 @@ mod tests {
let mut db = get_temp_state_db(); let mut db = get_temp_state_db();
let mut used_code = HashSet::new(); let mut used_code = HashSet::new();
assert_eq!(to_fat_rlp(&ACC_EMPTY, &AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code).unwrap(), ::rlp::NULL_RLP.to_vec()); assert_eq!(to_fat_rlps(&ACC_EMPTY, &AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code, usize::max_value()).unwrap(), vec![::rlp::NULL_RLP.to_vec()]);
assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP)).unwrap(), (ACC_EMPTY, None)); assert_eq!(from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP), H256::zero()).unwrap(), (ACC_EMPTY, None));
} }
} }

View File

@ -20,7 +20,7 @@ use block::Block;
use header::Header; use header::Header;
use views::BlockView; use views::BlockView;
use rlp::{DecoderError, RlpStream, UntrustedRlp, View}; use rlp::{DecoderError, RlpStream, UntrustedRlp};
use util::{Bytes, Hashable, H256}; use util::{Bytes, Hashable, H256};
use util::triehash::ordered_trie_root; use util::triehash::ordered_trie_root;
@ -101,8 +101,8 @@ impl AbridgedBlock {
header.set_timestamp(rlp.val_at(6)?); header.set_timestamp(rlp.val_at(6)?);
header.set_extra_data(rlp.val_at(7)?); header.set_extra_data(rlp.val_at(7)?);
let transactions = rlp.val_at(8)?; let transactions = rlp.list_at(8)?;
let uncles: Vec<Header> = rlp.val_at(9)?; let uncles: Vec<Header> = rlp.list_at(9)?;
header.set_transactions_root(ordered_trie_root( header.set_transactions_root(ordered_trie_root(
rlp.at(8)?.iter().map(|r| r.as_raw().to_owned()) rlp.at(8)?.iter().map(|r| r.as_raw().to_owned())
@ -114,7 +114,7 @@ impl AbridgedBlock {
header.set_uncles_hash(uncles_rlp.as_raw().sha3()); header.set_uncles_hash(uncles_rlp.as_raw().sha3());
let mut seal_fields = Vec::new(); let mut seal_fields = Vec::new();
for i in (HEADER_FIELDS + BLOCK_FIELDS)..rlp.item_count() { for i in (HEADER_FIELDS + BLOCK_FIELDS)..rlp.item_count()? {
let seal_rlp = rlp.at(i)?; let seal_rlp = rlp.at(i)?;
seal_fields.push(seal_rlp.as_raw().to_owned()); seal_fields.push(seal_rlp.as_raw().to_owned());
} }

View File

@ -53,6 +53,8 @@ pub enum Error {
Decoder(DecoderError), Decoder(DecoderError),
/// Io error. /// Io error.
Io(::std::io::Error), Io(::std::io::Error),
/// Snapshot version is not supported.
VersionNotSupported(u64),
} }
impl fmt::Display for Error { impl fmt::Display for Error {
@ -73,6 +75,7 @@ impl fmt::Display for Error {
Error::Io(ref err) => err.fmt(f), Error::Io(ref err) => err.fmt(f),
Error::Decoder(ref err) => err.fmt(f), Error::Decoder(ref err) => err.fmt(f),
Error::Trie(ref err) => err.fmt(f), Error::Trie(ref err) => err.fmt(f),
Error::VersionNotSupported(ref ver) => write!(f, "Snapshot version {} is not supprted.", ver),
} }
} }
} }

View File

@ -27,10 +27,12 @@ use std::path::{Path, PathBuf};
use util::Bytes; use util::Bytes;
use util::hash::H256; use util::hash::H256;
use rlp::{self, Encodable, RlpStream, UntrustedRlp, View}; use rlp::{self, Encodable, RlpStream, UntrustedRlp};
use super::ManifestData; use super::ManifestData;
const SNAPSHOT_VERSION: u64 = 2;
/// Something which can write snapshots. /// Something which can write snapshots.
/// Writing the same chunk multiple times will lead to implementation-defined /// Writing the same chunk multiple times will lead to implementation-defined
/// behavior, and is not advised. /// behavior, and is not advised.
@ -57,12 +59,10 @@ impl Encodable for ChunkInfo {
} }
impl rlp::Decodable for ChunkInfo { impl rlp::Decodable for ChunkInfo {
fn decode<D: rlp::Decoder>(decoder: &D) -> Result<Self, rlp::DecoderError> { fn decode(rlp: &UntrustedRlp) -> Result<Self, rlp::DecoderError> {
let d = decoder.as_rlp(); let hash = rlp.val_at(0)?;
let len = rlp.val_at(1)?;
let hash = d.val_at(0)?; let off = rlp.val_at(2)?;
let len = d.val_at(1)?;
let off = d.val_at(2)?;
Ok(ChunkInfo(hash, len, off)) Ok(ChunkInfo(hash, len, off))
} }
} }
@ -120,8 +120,9 @@ impl SnapshotWriter for PackedWriter {
fn finish(mut self, manifest: ManifestData) -> io::Result<()> { fn finish(mut self, manifest: ManifestData) -> io::Result<()> {
// we ignore the hashes fields of the manifest under the assumption that // we ignore the hashes fields of the manifest under the assumption that
// they are consistent with ours. // they are consistent with ours.
let mut stream = RlpStream::new_list(5); let mut stream = RlpStream::new_list(6);
stream stream
.append(&SNAPSHOT_VERSION)
.append_list(&self.state_hashes) .append_list(&self.state_hashes)
.append_list(&self.block_hashes) .append_list(&self.block_hashes)
.append(&manifest.state_root) .append(&manifest.state_root)
@ -223,7 +224,7 @@ impl PackedReader {
/// Create a new `PackedReader` for the file at the given path. /// Create a new `PackedReader` for the file at the given path.
/// This will fail if any io errors are encountered or the file /// This will fail if any io errors are encountered or the file
/// is not a valid packed snapshot. /// is not a valid packed snapshot.
pub fn new(path: &Path) -> Result<Option<Self>, ::error::Error> { pub fn new(path: &Path) -> Result<Option<Self>, ::snapshot::error::Error> {
let mut file = File::open(path)?; let mut file = File::open(path)?;
let file_len = file.metadata()?.len(); let file_len = file.metadata()?.len();
if file_len < 8 { if file_len < 8 {
@ -257,15 +258,26 @@ impl PackedReader {
let rlp = UntrustedRlp::new(&manifest_buf); let rlp = UntrustedRlp::new(&manifest_buf);
let state: Vec<ChunkInfo> = rlp.val_at(0)?; let (start, version) = if rlp.item_count()? == 5 {
let blocks: Vec<ChunkInfo> = rlp.val_at(1)?; (0, 1)
} else {
(1, rlp.val_at(0)?)
};
if version > SNAPSHOT_VERSION {
return Err(::snapshot::error::Error::VersionNotSupported(version));
}
let state: Vec<ChunkInfo> = rlp.list_at(0 + start)?;
let blocks: Vec<ChunkInfo> = rlp.list_at(1 + start)?;
let manifest = ManifestData { let manifest = ManifestData {
version: version,
state_hashes: state.iter().map(|c| c.0).collect(), state_hashes: state.iter().map(|c| c.0).collect(),
block_hashes: blocks.iter().map(|c| c.0).collect(), block_hashes: blocks.iter().map(|c| c.0).collect(),
state_root: rlp.val_at(2)?, state_root: rlp.val_at(2 + start)?,
block_number: rlp.val_at(3)?, block_number: rlp.val_at(3 + start)?,
block_hash: rlp.val_at(4)?, block_hash: rlp.val_at(4 + start)?,
}; };
Ok(Some(PackedReader { Ok(Some(PackedReader {
@ -348,7 +360,7 @@ mod tests {
use util::sha3::Hashable; use util::sha3::Hashable;
use snapshot::ManifestData; use snapshot::ManifestData;
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader}; use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader, SNAPSHOT_VERSION};
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"]; const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"]; const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
@ -374,6 +386,7 @@ mod tests {
} }
let manifest = ManifestData { let manifest = ManifestData {
version: SNAPSHOT_VERSION,
state_hashes: state_hashes, state_hashes: state_hashes,
block_hashes: block_hashes, block_hashes: block_hashes,
state_root: b"notarealroot".sha3(), state_root: b"notarealroot".sha3(),
@ -412,6 +425,7 @@ mod tests {
} }
let manifest = ManifestData { let manifest = ManifestData {
version: SNAPSHOT_VERSION,
state_hashes: state_hashes, state_hashes: state_hashes,
block_hashes: block_hashes, block_hashes: block_hashes,
state_root: b"notarealroot".sha3(), state_root: b"notarealroot".sha3(),

View File

@ -37,7 +37,7 @@ use util::journaldb::{self, Algorithm, JournalDB};
use util::kvdb::Database; use util::kvdb::Database;
use util::trie::{TrieDB, TrieDBMut, Trie, TrieMut}; use util::trie::{TrieDB, TrieDBMut, Trie, TrieMut};
use util::sha3::SHA3_NULL_RLP; use util::sha3::SHA3_NULL_RLP;
use rlp::{RlpStream, UntrustedRlp, View}; use rlp::{RlpStream, UntrustedRlp};
use bloom_journal::Bloom; use bloom_journal::Bloom;
use self::block::AbridgedBlock; use self::block::AbridgedBlock;
@ -56,6 +56,7 @@ pub use self::traits::SnapshotService;
pub use self::watcher::Watcher; pub use self::watcher::Watcher;
pub use types::snapshot_manifest::ManifestData; pub use types::snapshot_manifest::ManifestData;
pub use types::restoration_status::RestorationStatus; pub use types::restoration_status::RestorationStatus;
pub use types::basic_account::BasicAccount;
pub mod io; pub mod io;
pub mod service; pub mod service;
@ -82,6 +83,9 @@ mod traits {
// Try to have chunks be around 4MB (before compression) // Try to have chunks be around 4MB (before compression)
const PREFERRED_CHUNK_SIZE: usize = 4 * 1024 * 1024; const PREFERRED_CHUNK_SIZE: usize = 4 * 1024 * 1024;
// Try to have chunks be around 4MB (before compression)
const MAX_STORAGE_ENTRIES_PER_ACCOUNT_RECORD: usize = 80_000;
// How many blocks to include in a snapshot, starting from the head of the chain. // How many blocks to include in a snapshot, starting from the head of the chain.
const SNAPSHOT_BLOCKS: u64 = 30000; const SNAPSHOT_BLOCKS: u64 = 30000;
@ -147,6 +151,7 @@ pub fn take_snapshot<W: SnapshotWriter + Send>(
info!("produced {} state chunks and {} block chunks.", state_hashes.len(), block_hashes.len()); info!("produced {} state chunks and {} block chunks.", state_hashes.len(), block_hashes.len());
let manifest_data = ManifestData { let manifest_data = ManifestData {
version: 2,
state_hashes: state_hashes, state_hashes: state_hashes,
block_hashes: block_hashes, block_hashes: block_hashes,
state_root: *state_root, state_root: *state_root,
@ -300,14 +305,14 @@ impl<'a> StateChunker<'a> {
// //
// If the buffer is greater than the desired chunk size, // If the buffer is greater than the desired chunk size,
// this will write out the data to disk. // this will write out the data to disk.
fn push(&mut self, account_hash: Bytes, data: Bytes) -> Result<(), Error> { fn push(&mut self, account_hash: Bytes, data: Bytes, force_chunk: bool) -> Result<(), Error> {
let pair = { let pair = {
let mut stream = RlpStream::new_list(2); let mut stream = RlpStream::new_list(2);
stream.append(&account_hash).append_raw(&data, 1); stream.append(&account_hash).append_raw(&data, 1);
stream.out() stream.out()
}; };
if self.cur_size + pair.len() >= PREFERRED_CHUNK_SIZE { if force_chunk || self.cur_size + pair.len() >= PREFERRED_CHUNK_SIZE {
self.write_chunk()?; self.write_chunk()?;
} }
@ -372,8 +377,10 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
let account_db = AccountDB::from_hash(db, account_key_hash); let account_db = AccountDB::from_hash(db, account_key_hash);
let fat_rlp = account::to_fat_rlp(&account, &account_db, &mut used_code)?; let fat_rlps = account::to_fat_rlps(&account, &account_db, &mut used_code, MAX_STORAGE_ENTRIES_PER_ACCOUNT_RECORD)?;
chunker.push(account_key, fat_rlp)?; for (i, fat_rlp) in fat_rlps.into_iter().enumerate() {
chunker.push(account_key.clone(), fat_rlp, i > 0)?;
}
} }
if chunker.cur_size != 0 { if chunker.cur_size != 0 {
@ -390,6 +397,7 @@ pub struct StateRebuilder {
known_code: HashMap<H256, H256>, // code hashes mapped to first account with this code. known_code: HashMap<H256, H256>, // code hashes mapped to first account with this code.
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code. missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
bloom: Bloom, bloom: Bloom,
known_storage_roots: HashMap<H256, H256>, // maps account hashes to last known storage root. Only filled for last account per chunk.
} }
impl StateRebuilder { impl StateRebuilder {
@ -401,6 +409,7 @@ impl StateRebuilder {
known_code: HashMap::new(), known_code: HashMap::new(),
missing_code: HashMap::new(), missing_code: HashMap::new(),
bloom: StateDB::load_bloom(&*db), bloom: StateDB::load_bloom(&*db),
known_storage_roots: HashMap::new(),
} }
} }
@ -408,16 +417,17 @@ impl StateRebuilder {
pub fn feed(&mut self, chunk: &[u8], flag: &AtomicBool) -> Result<(), ::error::Error> { pub fn feed(&mut self, chunk: &[u8], flag: &AtomicBool) -> Result<(), ::error::Error> {
let rlp = UntrustedRlp::new(chunk); let rlp = UntrustedRlp::new(chunk);
let empty_rlp = StateAccount::new_basic(U256::zero(), U256::zero()).rlp(); let empty_rlp = StateAccount::new_basic(U256::zero(), U256::zero()).rlp();
let mut pairs = Vec::with_capacity(rlp.item_count()); let mut pairs = Vec::with_capacity(rlp.item_count()?);
// initialize the pairs vector with empty values so we have slots to write into. // initialize the pairs vector with empty values so we have slots to write into.
pairs.resize(rlp.item_count(), (H256::new(), Vec::new())); pairs.resize(rlp.item_count()?, (H256::new(), Vec::new()));
let status = rebuild_accounts( let status = rebuild_accounts(
self.db.as_hashdb_mut(), self.db.as_hashdb_mut(),
rlp, rlp,
&mut pairs, &mut pairs,
&self.known_code, &self.known_code,
&mut self.known_storage_roots,
flag flag
)?; )?;
@ -464,14 +474,18 @@ impl StateRebuilder {
Ok(()) Ok(())
} }
/// Check for accounts missing code. Once all chunks have been fed, there should /// Finalize the restoration. Check for accounts missing code and make a dummy
/// be none. /// journal entry.
pub fn check_missing(self) -> Result<(), Error> { /// Once all chunks have been fed, there should be nothing missing.
pub fn finalize(mut self, era: u64, id: H256) -> Result<(), ::error::Error> {
let missing = self.missing_code.keys().cloned().collect::<Vec<_>>(); let missing = self.missing_code.keys().cloned().collect::<Vec<_>>();
match missing.is_empty() { if !missing.is_empty() { return Err(Error::MissingCode(missing).into()) }
true => Ok(()),
false => Err(Error::MissingCode(missing)), let mut batch = self.db.backing().transaction();
} self.db.journal_under(&mut batch, era, &id)?;
self.db.backing().write_buffered(batch);
Ok(())
} }
/// Get the state root of the rebuilder. /// Get the state root of the rebuilder.
@ -492,10 +506,11 @@ fn rebuild_accounts(
account_fat_rlps: UntrustedRlp, account_fat_rlps: UntrustedRlp,
out_chunk: &mut [(H256, Bytes)], out_chunk: &mut [(H256, Bytes)],
known_code: &HashMap<H256, H256>, known_code: &HashMap<H256, H256>,
known_storage_roots: &mut HashMap<H256, H256>,
abort_flag: &AtomicBool, abort_flag: &AtomicBool,
) -> Result<RebuiltStatus, ::error::Error> { ) -> Result<RebuiltStatus, ::error::Error> {
let mut status = RebuiltStatus::default(); let mut status = RebuiltStatus::default();
for (account_rlp, out) in account_fat_rlps.into_iter().zip(out_chunk) { for (account_rlp, out) in account_fat_rlps.into_iter().zip(out_chunk.iter_mut()) {
if !abort_flag.load(Ordering::SeqCst) { return Err(Error::RestorationAborted.into()) } if !abort_flag.load(Ordering::SeqCst) { return Err(Error::RestorationAborted.into()) }
let hash: H256 = account_rlp.val_at(0)?; let hash: H256 = account_rlp.val_at(0)?;
@ -506,7 +521,8 @@ fn rebuild_accounts(
// fill out the storage trie and code while decoding. // fill out the storage trie and code while decoding.
let (acc, maybe_code) = { let (acc, maybe_code) = {
let mut acct_db = AccountDBMut::from_hash(db, hash); let mut acct_db = AccountDBMut::from_hash(db, hash);
account::from_fat_rlp(&mut acct_db, fat_rlp)? let storage_root = known_storage_roots.get(&hash).cloned().unwrap_or(H256::zero());
account::from_fat_rlp(&mut acct_db, fat_rlp, storage_root)?
}; };
let code_hash = acc.code_hash.clone(); let code_hash = acc.code_hash.clone();
@ -538,6 +554,12 @@ fn rebuild_accounts(
*out = (hash, thin_rlp); *out = (hash, thin_rlp);
} }
if let Some(&(ref hash, ref rlp)) = out_chunk.iter().last() {
known_storage_roots.insert(*hash, ::rlp::decode::<BasicAccount>(rlp).storage_root);
}
if let Some(&(ref hash, ref rlp)) = out_chunk.iter().next() {
known_storage_roots.insert(*hash, ::rlp::decode::<BasicAccount>(rlp).storage_root);
}
Ok(status) Ok(status)
} }
@ -601,7 +623,7 @@ impl BlockRebuilder {
use util::triehash::ordered_trie_root; use util::triehash::ordered_trie_root;
let rlp = UntrustedRlp::new(chunk); let rlp = UntrustedRlp::new(chunk);
let item_count = rlp.item_count(); let item_count = rlp.item_count()?;
let num_blocks = (item_count - 3) as u64; let num_blocks = (item_count - 3) as u64;
trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3); trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3);
@ -621,7 +643,7 @@ impl BlockRebuilder {
let pair = rlp.at(idx)?; let pair = rlp.at(idx)?;
let abridged_rlp = pair.at(0)?.as_raw().to_owned(); let abridged_rlp = pair.at(0)?.as_raw().to_owned();
let abridged_block = AbridgedBlock::from_raw(abridged_rlp); let abridged_block = AbridgedBlock::from_raw(abridged_rlp);
let receipts: Vec<::receipt::Receipt> = pair.val_at(1)?; let receipts: Vec<::receipt::Receipt> = pair.list_at(1)?;
let receipts_root = ordered_trie_root( let receipts_root = ordered_trie_root(
pair.at(1)?.iter().map(|r| r.as_raw().to_owned()) pair.at(1)?.iter().map(|r| r.as_raw().to_owned())
); );

View File

@ -166,7 +166,7 @@ impl Restoration {
} }
// check for missing code. // check for missing code.
self.state.check_missing()?; self.state.finalize(self.manifest.block_number, self.manifest.block_hash)?;
// connect out-of-order chunks and verify chain integrity. // connect out-of-order chunks and verify chain integrity.
self.blocks.finalize(self.canonical_hashes)?; self.blocks.finalize(self.canonical_hashes)?;
@ -656,6 +656,7 @@ mod tests {
assert_eq!(service.status(), RestorationStatus::Inactive); assert_eq!(service.status(), RestorationStatus::Inactive);
let manifest = ManifestData { let manifest = ManifestData {
version: 2,
state_hashes: vec![], state_hashes: vec![],
block_hashes: vec![], block_hashes: vec![],
state_root: Default::default(), state_root: Default::default(),

View File

@ -63,6 +63,7 @@ fn chunk_and_restore(amount: u64) {
let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap()); let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap());
let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap(); let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap();
let manifest = ::snapshot::ManifestData { let manifest = ::snapshot::ManifestData {
version: 2,
state_hashes: Vec::new(), state_hashes: Vec::new(),
block_hashes: block_hashes, block_hashes: block_hashes,
state_root: ::util::sha3::SHA3_NULL_RLP, state_root: ::util::sha3::SHA3_NULL_RLP,
@ -125,6 +126,7 @@ fn checks_flag() {
let chain = BlockChain::new(Default::default(), &genesis, db.clone()); let chain = BlockChain::new(Default::default(), &genesis, db.clone());
let manifest = ::snapshot::ManifestData { let manifest = ::snapshot::ManifestData {
version: 2,
state_hashes: Vec::new(), state_hashes: Vec::new(),
block_hashes: Vec::new(), block_hashes: Vec::new(),
state_root: ::util::sha3::SHA3_NULL_RLP, state_root: ::util::sha3::SHA3_NULL_RLP,

View File

@ -27,6 +27,7 @@ use super::ManifestData;
#[test] #[test]
fn manifest_rlp() { fn manifest_rlp() {
let manifest = ManifestData { let manifest = ManifestData {
version: 2,
block_hashes: Vec::new(), block_hashes: Vec::new(),
state_hashes: Vec::new(), state_hashes: Vec::new(),
block_number: 1234567, block_number: 1234567,

View File

@ -122,6 +122,7 @@ fn guards_delete_folders() {
path.push("restoration"); path.push("restoration");
let manifest = ManifestData { let manifest = ManifestData {
version: 2,
state_hashes: vec![], state_hashes: vec![],
block_hashes: vec![], block_hashes: vec![],
block_number: 0, block_number: 0,

View File

@ -58,10 +58,11 @@ fn snap_and_restore() {
let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap(); let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap();
writer.into_inner().finish(::snapshot::ManifestData { writer.into_inner().finish(::snapshot::ManifestData {
version: 2,
state_hashes: state_hashes, state_hashes: state_hashes,
block_hashes: Vec::new(), block_hashes: Vec::new(),
state_root: state_root, state_root: state_root,
block_number: 0, block_number: 1000,
block_hash: H256::default(), block_hash: H256::default(),
}).unwrap(); }).unwrap();
@ -69,7 +70,7 @@ fn snap_and_restore() {
db_path.push("db"); db_path.push("db");
let db = { let db = {
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap()); let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::Archive); let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::OverlayRecent);
let reader = PackedReader::new(&snap_file).unwrap().unwrap(); let reader = PackedReader::new(&snap_file).unwrap().unwrap();
let flag = AtomicBool::new(true); let flag = AtomicBool::new(true);
@ -82,12 +83,13 @@ fn snap_and_restore() {
} }
assert_eq!(rebuilder.state_root(), state_root); assert_eq!(rebuilder.state_root(), state_root);
rebuilder.check_missing().unwrap(); rebuilder.finalize(1000, H256::default()).unwrap();
new_db new_db
}; };
let new_db = journaldb::new(db, Algorithm::Archive, ::db::COL_STATE); let new_db = journaldb::new(db, Algorithm::OverlayRecent, ::db::COL_STATE);
assert_eq!(new_db.earliest_era(), Some(1000));
compare_dbs(&old_db, new_db.as_hashdb()); compare_dbs(&old_db, new_db.as_hashdb());
} }
@ -120,10 +122,10 @@ fn get_code_from_prev_chunk() {
let mut db = MemoryDB::new(); let mut db = MemoryDB::new();
AccountDBMut::from_hash(&mut db, hash).insert(&code[..]); AccountDBMut::from_hash(&mut db, hash).insert(&code[..]);
let fat_rlp = account::to_fat_rlp(&acc, &AccountDB::from_hash(&db, hash), &mut used_code).unwrap(); let fat_rlp = account::to_fat_rlps(&acc, &AccountDB::from_hash(&db, hash), &mut used_code, usize::max_value()).unwrap();
let mut stream = RlpStream::new_list(1); let mut stream = RlpStream::new_list(1);
stream.begin_list(2).append(&hash).append_raw(&fat_rlp, 1); stream.begin_list(2).append(&hash).append_raw(&fat_rlp[0], 1);
stream.out() stream.out()
}; };
@ -134,13 +136,18 @@ fn get_code_from_prev_chunk() {
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS); let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap()); let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
let mut rebuilder = StateRebuilder::new(new_db, Algorithm::Archive); {
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::OverlayRecent);
let flag = AtomicBool::new(true); let flag = AtomicBool::new(true);
rebuilder.feed(&chunk1, &flag).unwrap(); rebuilder.feed(&chunk1, &flag).unwrap();
rebuilder.feed(&chunk2, &flag).unwrap(); rebuilder.feed(&chunk2, &flag).unwrap();
rebuilder.check_missing().unwrap(); rebuilder.finalize(1000, H256::random()).unwrap();
}
let state_db = journaldb::new(new_db, Algorithm::OverlayRecent, ::db::COL_STATE);
assert_eq!(state_db.earliest_era(), Some(1000));
} }
#[test] #[test]
@ -164,6 +171,7 @@ fn checks_flag() {
let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap(); let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap();
writer.into_inner().finish(::snapshot::ManifestData { writer.into_inner().finish(::snapshot::ManifestData {
version: 2,
state_hashes: state_hashes, state_hashes: state_hashes,
block_hashes: Vec::new(), block_hashes: Vec::new(),
state_root: state_root, state_root: state_root,
@ -175,7 +183,7 @@ fn checks_flag() {
db_path.push("db"); db_path.push("db");
{ {
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap()); let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::Archive); let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::OverlayRecent);
let reader = PackedReader::new(&snap_file).unwrap().unwrap(); let reader = PackedReader::new(&snap_file).unwrap().unwrap();
let flag = AtomicBool::new(false); let flag = AtomicBool::new(false);

View File

@ -34,7 +34,7 @@ use super::genesis::Genesis;
use super::seal::Generic as GenericSeal; use super::seal::Generic as GenericSeal;
use ethereum; use ethereum;
use ethjson; use ethjson;
use rlp::{Rlp, RlpStream, View}; use rlp::{Rlp, RlpStream};
/// Parameters common to all engines. /// Parameters common to all engines.
#[derive(Debug, PartialEq, Clone, Default)] #[derive(Debug, PartialEq, Clone, Default)]
@ -326,7 +326,7 @@ impl Spec {
pub fn load<R>(reader: R) -> Result<Self, String> where R: Read { pub fn load<R>(reader: R) -> Result<Self, String> where R: Read {
match ethjson::spec::Spec::load(reader) { match ethjson::spec::Spec::load(reader) {
Ok(spec) => Ok(spec.into()), Ok(spec) => Ok(spec.into()),
_ => Err("Spec json is invalid".into()), Err(e) => Err(format!("Spec json is invalid: {}", e)),
} }
} }
@ -360,6 +360,10 @@ impl Spec {
/// Account is marked with `reportBenign` it can be checked as disliked with "0xd8f2e0bf". /// Account is marked with `reportBenign` it can be checked as disliked with "0xd8f2e0bf".
/// Validator can be removed with `reportMalicious`. /// Validator can be removed with `reportMalicious`.
pub fn new_validator_contract() -> Self { load_bundled!("validator_contract") } pub fn new_validator_contract() -> Self { load_bundled!("validator_contract") }
/// Create a new Spec with BasicAuthority which uses multiple validator sets changing with height.
/// Account with secrets "0".sha3() is the validator for block 1 and with "1".sha3() onwards.
pub fn new_validator_multi() -> Self { load_bundled!("validator_multi") }
} }
#[cfg(test)] #[cfg(test)]

View File

@ -438,18 +438,19 @@ impl Account {
/// trie. /// trie.
/// `storage_key` is the hash of the desired storage key, meaning /// `storage_key` is the hash of the desired storage key, meaning
/// this will only work correctly under a secure trie. /// this will only work correctly under a secure trie.
/// Returns a merkle proof of the storage trie node with all nodes before `from_level` pub fn prove_storage(&self, db: &HashDB, storage_key: H256) -> Result<(Vec<Bytes>, H256), Box<TrieError>> {
/// omitted.
pub fn prove_storage(&self, db: &HashDB, storage_key: H256, from_level: u32) -> Result<Vec<Bytes>, Box<TrieError>> {
use util::trie::{Trie, TrieDB}; use util::trie::{Trie, TrieDB};
use util::trie::recorder::Recorder; use util::trie::recorder::Recorder;
let mut recorder = Recorder::with_depth(from_level); let mut recorder = Recorder::new();
let trie = TrieDB::new(db, &self.storage_root)?; let trie = TrieDB::new(db, &self.storage_root)?;
let _ = trie.get_with(&storage_key, &mut recorder)?; let item: U256 = {
let query = (&mut recorder, ::rlp::decode);
trie.get_with(&storage_key, query)?.unwrap_or_else(U256::zero)
};
Ok(recorder.drain().into_iter().map(|r| r.data).collect()) Ok((recorder.drain().into_iter().map(|r| r.data).collect(), item.into()))
} }
} }
@ -461,7 +462,7 @@ impl fmt::Debug for Account {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use rlp::{UntrustedRlp, RlpType, View, Compressible}; use rlp::{UntrustedRlp, RlpType, Compressible};
use util::*; use util::*;
use super::*; use super::*;
use account_db::*; use account_db::*;

View File

@ -31,6 +31,7 @@ use factory::Factories;
use trace::FlatTrace; use trace::FlatTrace;
use pod_account::*; use pod_account::*;
use pod_state::{self, PodState}; use pod_state::{self, PodState};
use types::basic_account::BasicAccount;
use types::executed::{Executed, ExecutionError}; use types::executed::{Executed, ExecutionError};
use types::state_diff::StateDiff; use types::state_diff::StateDiff;
use transaction::SignedTransaction; use transaction::SignedTransaction;
@ -857,47 +858,43 @@ impl<B: Backend> State<B> {
// State proof implementations; useful for light client protocols. // State proof implementations; useful for light client protocols.
impl<B: Backend> State<B> { impl<B: Backend> State<B> {
/// Prove an account's existence or nonexistence in the state trie. /// Prove an account's existence or nonexistence in the state trie.
/// Returns a merkle proof of the account's trie node with all nodes before `from_level` /// Returns a merkle proof of the account's trie node omitted or an encountered trie error.
/// omitted or an encountered trie error. /// If the account doesn't exist in the trie, prove that and return defaults.
/// Requires a secure trie to be used for accurate results. /// Requires a secure trie to be used for accurate results.
/// `account_key` == sha3(address) /// `account_key` == sha3(address)
pub fn prove_account(&self, account_key: H256, from_level: u32) -> trie::Result<Vec<Bytes>> { pub fn prove_account(&self, account_key: H256) -> trie::Result<(Vec<Bytes>, BasicAccount)> {
let mut recorder = Recorder::with_depth(from_level); let mut recorder = Recorder::new();
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
trie.get_with(&account_key, &mut recorder)?; let maybe_account: Option<BasicAccount> = {
let query = (&mut recorder, ::rlp::decode);
trie.get_with(&account_key, query)?
};
let account = maybe_account.unwrap_or_else(|| BasicAccount {
balance: 0.into(),
nonce: self.account_start_nonce,
code_hash: SHA3_EMPTY,
storage_root: ::util::sha3::SHA3_NULL_RLP,
});
Ok(recorder.drain().into_iter().map(|r| r.data).collect()) Ok((recorder.drain().into_iter().map(|r| r.data).collect(), account))
} }
/// Prove an account's storage key's existence or nonexistence in the state. /// Prove an account's storage key's existence or nonexistence in the state.
/// Returns a merkle proof of the account's storage trie with all nodes before /// Returns a merkle proof of the account's storage trie.
/// `from_level` omitted. Requires a secure trie to be used for correctness. /// Requires a secure trie to be used for correctness.
/// `account_key` == sha3(address) /// `account_key` == sha3(address)
/// `storage_key` == sha3(key) /// `storage_key` == sha3(key)
pub fn prove_storage(&self, account_key: H256, storage_key: H256, from_level: u32) -> trie::Result<Vec<Bytes>> { pub fn prove_storage(&self, account_key: H256, storage_key: H256) -> trie::Result<(Vec<Bytes>, H256)> {
// TODO: probably could look into cache somehow but it's keyed by // TODO: probably could look into cache somehow but it's keyed by
// address, not sha3(address). // address, not sha3(address).
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?; let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
let acc = match trie.get_with(&account_key, Account::from_rlp)? { let acc = match trie.get_with(&account_key, Account::from_rlp)? {
Some(acc) => acc, Some(acc) => acc,
None => return Ok(Vec::new()), None => return Ok((Vec::new(), H256::new())),
}; };
let account_db = self.factories.accountdb.readonly(self.db.as_hashdb(), account_key); let account_db = self.factories.accountdb.readonly(self.db.as_hashdb(), account_key);
acc.prove_storage(account_db.as_hashdb(), storage_key, from_level) acc.prove_storage(account_db.as_hashdb(), storage_key)
}
/// Get code by address hash.
/// Only works when backed by a secure trie.
pub fn code_by_address_hash(&self, account_key: H256) -> trie::Result<Option<Bytes>> {
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
let mut acc = match trie.get_with(&account_key, Account::from_rlp)? {
Some(acc) => acc,
None => return Ok(None),
};
let account_db = self.factories.accountdb.readonly(self.db.as_hashdb(), account_key);
Ok(acc.cache_code(account_db.as_hashdb()).map(|c| (&*c).clone()))
} }
} }
@ -946,7 +943,7 @@ mod tests {
use env_info::EnvInfo; use env_info::EnvInfo;
use spec::*; use spec::*;
use transaction::*; use transaction::*;
use util::log::init_log; use ethcore_logger::init_log;
use trace::{FlatTrace, TraceError, trace}; use trace::{FlatTrace, TraceError, trace};
use types::executed::CallType; use types::executed::CallType;

View File

@ -457,7 +457,7 @@ mod tests {
use util::{U256, H256, Address, DBTransaction}; use util::{U256, H256, Address, DBTransaction};
use tests::helpers::*; use tests::helpers::*;
use state::{Account, Backend}; use state::{Account, Backend};
use util::log::init_log; use ethcore_logger::init_log;
#[test] #[test]
fn state_db_smoke() { fn state_db_smoke() {

View File

@ -25,7 +25,6 @@ use types::filter::Filter;
use util::*; use util::*;
use devtools::*; use devtools::*;
use miner::Miner; use miner::Miner;
use rlp::View;
use spec::Spec; use spec::Spec;
use views::BlockView; use views::BlockView;
use ethkey::{KeyPair, Secret}; use ethkey::{KeyPair, Secret};

View File

@ -438,6 +438,7 @@ pub fn get_default_ethash_params() -> EthashParams{
minimum_difficulty: U256::from(131072), minimum_difficulty: U256::from(131072),
difficulty_bound_divisor: U256::from(2048), difficulty_bound_divisor: U256::from(2048),
difficulty_increment_divisor: 10, difficulty_increment_divisor: 10,
metropolis_difficulty_increment_divisor: 9,
duration_limit: 13, duration_limit: 13,
block_reward: U256::from(0), block_reward: U256::from(0),
registrar: "0000000000000000000000000000000000000001".into(), registrar: "0000000000000000000000000000000000000001".into(),
@ -448,6 +449,7 @@ pub fn get_default_ethash_params() -> EthashParams{
difficulty_hardfork_transition: u64::max_value(), difficulty_hardfork_transition: u64::max_value(),
difficulty_hardfork_bound_divisor: U256::from(0), difficulty_hardfork_bound_divisor: U256::from(0),
bomb_defuse_transition: u64::max_value(), bomb_defuse_transition: u64::max_value(),
eip100b_transition: u64::max_value(),
eip150_transition: u64::max_value(), eip150_transition: u64::max_value(),
eip155_transition: u64::max_value(), eip155_transition: u64::max_value(),
eip160_transition: u64::max_value(), eip160_transition: u64::max_value(),

View File

@ -60,8 +60,8 @@ impl Into<BloomGroup> for BlockTracesBloomGroup {
} }
impl Decodable for BlockTracesBloom { impl Decodable for BlockTracesBloom {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Decodable::decode(decoder).map(BlockTracesBloom) LogBloom::decode(rlp).map(BlockTracesBloom)
} }
} }
@ -72,8 +72,8 @@ impl Encodable for BlockTracesBloom {
} }
impl Decodable for BlockTracesBloomGroup { impl Decodable for BlockTracesBloomGroup {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let blooms = Decodable::decode(decoder)?; let blooms = rlp.as_list()?;
let group = BlockTracesBloomGroup { let group = BlockTracesBloomGroup {
blooms: blooms blooms: blooms
}; };

View File

@ -43,8 +43,7 @@ impl Encodable for BasicAccount {
} }
impl Decodable for BasicAccount { impl Decodable for BasicAccount {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let rlp = decoder.as_rlp();
Ok(BasicAccount { Ok(BasicAccount {
nonce: rlp.val_at(0)?, nonce: rlp.val_at(0)?,
balance: rlp.val_at(1)?, balance: rlp.val_at(1)?,

View File

@ -29,7 +29,7 @@ use transaction::UnverifiedTransaction;
use views; use views;
use util::{Address, Hashable, H256, H2048, U256, HeapSizeOf}; use util::{Address, Hashable, H256, H2048, U256, HeapSizeOf};
use rlp::{Rlp, View}; use rlp::Rlp;
/// Owning header view. /// Owning header view.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]

View File

@ -51,8 +51,8 @@ impl Encodable for CallType {
} }
impl Decodable for CallType { impl Decodable for CallType {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
decoder.as_rlp().as_val().and_then(|v| Ok(match v { rlp.as_val().and_then(|v| Ok(match v {
0u32 => CallType::None, 0u32 => CallType::None,
1 => CallType::Call, 1 => CallType::Call,
2 => CallType::CallCode, 2 => CallType::CallCode,

View File

@ -47,12 +47,11 @@ impl Encodable for LogEntry {
} }
impl Decodable for LogEntry { impl Decodable for LogEntry {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let entry = LogEntry { let entry = LogEntry {
address: d.val_at(0)?, address: rlp.val_at(0)?,
topics: d.val_at(1)?, topics: rlp.list_at(1)?,
data: d.val_at(2)?, data: rlp.val_at(2)?,
}; };
Ok(entry) Ok(entry)
} }

View File

@ -65,21 +65,20 @@ impl Encodable for Receipt {
} }
impl Decodable for Receipt { impl Decodable for Receipt {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp(); if rlp.item_count()? == 3 {
if d.item_count() == 3 {
Ok(Receipt { Ok(Receipt {
state_root: None, state_root: None,
gas_used: d.val_at(0)?, gas_used: rlp.val_at(0)?,
log_bloom: d.val_at(1)?, log_bloom: rlp.val_at(1)?,
logs: d.val_at(2)?, logs: rlp.list_at(2)?,
}) })
} else { } else {
Ok(Receipt { Ok(Receipt {
state_root: Some(d.val_at(0)?), state_root: Some(rlp.val_at(0)?),
gas_used: d.val_at(1)?, gas_used: rlp.val_at(1)?,
log_bloom: d.val_at(2)?, log_bloom: rlp.val_at(2)?,
logs: d.val_at(3)?, logs: rlp.list_at(3)?,
}) })
} }
} }

View File

@ -24,6 +24,8 @@ use util::Bytes;
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", binary)] #[cfg_attr(feature = "ipc", binary)]
pub struct ManifestData { pub struct ManifestData {
/// Snapshot format version.
pub version: u64,
/// List of state chunk hashes. /// List of state chunk hashes.
pub state_hashes: Vec<H256>, pub state_hashes: Vec<H256>,
/// List of block chunk hashes. /// List of block chunk hashes.
@ -39,7 +41,8 @@ pub struct ManifestData {
impl ManifestData { impl ManifestData {
/// Encode the manifest data to rlp. /// Encode the manifest data to rlp.
pub fn into_rlp(self) -> Bytes { pub fn into_rlp(self) -> Bytes {
let mut stream = RlpStream::new_list(5); let mut stream = RlpStream::new_list(6);
stream.append(&self.version);
stream.append_list(&self.state_hashes); stream.append_list(&self.state_hashes);
stream.append_list(&self.block_hashes); stream.append_list(&self.block_hashes);
stream.append(&self.state_root); stream.append(&self.state_root);
@ -52,14 +55,20 @@ impl ManifestData {
/// Try to restore manifest data from raw bytes, interpreted as RLP. /// Try to restore manifest data from raw bytes, interpreted as RLP.
pub fn from_rlp(raw: &[u8]) -> Result<Self, DecoderError> { pub fn from_rlp(raw: &[u8]) -> Result<Self, DecoderError> {
let decoder = UntrustedRlp::new(raw); let decoder = UntrustedRlp::new(raw);
let (start, version) = if decoder.item_count()? == 5 {
(0, 1)
} else {
(1, decoder.val_at(0)?)
};
let state_hashes: Vec<H256> = decoder.val_at(0)?; let state_hashes: Vec<H256> = decoder.list_at(start + 0)?;
let block_hashes: Vec<H256> = decoder.val_at(1)?; let block_hashes: Vec<H256> = decoder.list_at(start + 1)?;
let state_root: H256 = decoder.val_at(2)?; let state_root: H256 = decoder.val_at(start + 2)?;
let block_number: u64 = decoder.val_at(3)?; let block_number: u64 = decoder.val_at(start + 3)?;
let block_hash: H256 = decoder.val_at(4)?; let block_hash: H256 = decoder.val_at(start + 4)?;
Ok(ManifestData { Ok(ManifestData {
version: version,
state_hashes: state_hashes, state_hashes: state_hashes,
block_hashes: block_hashes, block_hashes: block_hashes,
state_root: state_root, state_root: state_root,

View File

@ -17,7 +17,7 @@
//! Trace errors. //! Trace errors.
use std::fmt; use std::fmt;
use rlp::{Encodable, RlpStream, Decodable, Decoder, DecoderError, View}; use rlp::{Encodable, RlpStream, Decodable, DecoderError, UntrustedRlp};
use evm::Error as EvmError; use evm::Error as EvmError;
/// Trace evm errors. /// Trace evm errors.
@ -96,9 +96,9 @@ impl Encodable for Error {
} }
impl Decodable for Error { impl Decodable for Error {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
use self::Error::*; use self::Error::*;
let value: u8 = decoder.as_rlp().as_val()?; let value: u8 = rlp.as_val()?;
match value { match value {
0 => Ok(OutOfGas), 0 => Ok(OutOfGas),
1 => Ok(BadJumpDestination), 1 => Ok(BadJumpDestination),

View File

@ -64,9 +64,8 @@ impl Encodable for FlatTrace {
} }
impl Decodable for FlatTrace { impl Decodable for FlatTrace {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(d: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp(); let v: Vec<usize> = d.list_at(3)?;
let v: Vec<usize> = d.val_at(3)?;
let res = FlatTrace { let res = FlatTrace {
action: d.val_at(0)?, action: d.val_at(0)?,
result: d.val_at(1)?, result: d.val_at(1)?,
@ -108,8 +107,8 @@ impl Encodable for FlatTransactionTraces {
} }
impl Decodable for FlatTransactionTraces { impl Decodable for FlatTransactionTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(FlatTransactionTraces(Decodable::decode(decoder)?)) Ok(FlatTransactionTraces(rlp.as_list()?))
} }
} }
@ -149,8 +148,8 @@ impl Encodable for FlatBlockTraces {
} }
impl Decodable for FlatBlockTraces { impl Decodable for FlatBlockTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
Ok(FlatBlockTraces(Decodable::decode(decoder)?)) Ok(FlatBlockTraces(rlp.as_list()?))
} }
} }

View File

@ -45,11 +45,10 @@ impl Encodable for CallResult {
} }
impl Decodable for CallResult { impl Decodable for CallResult {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let res = CallResult { let res = CallResult {
gas_used: d.val_at(0)?, gas_used: rlp.val_at(0)?,
output: d.val_at(1)?, output: rlp.val_at(1)?,
}; };
Ok(res) Ok(res)
@ -78,12 +77,11 @@ impl Encodable for CreateResult {
} }
impl Decodable for CreateResult { impl Decodable for CreateResult {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let res = CreateResult { let res = CreateResult {
gas_used: d.val_at(0)?, gas_used: rlp.val_at(0)?,
code: d.val_at(1)?, code: rlp.val_at(1)?,
address: d.val_at(2)?, address: rlp.val_at(2)?,
}; };
Ok(res) Ok(res)
@ -141,15 +139,14 @@ impl Encodable for Call {
} }
impl Decodable for Call { impl Decodable for Call {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let res = Call { let res = Call {
from: d.val_at(0)?, from: rlp.val_at(0)?,
to: d.val_at(1)?, to: rlp.val_at(1)?,
value: d.val_at(2)?, value: rlp.val_at(2)?,
gas: d.val_at(3)?, gas: rlp.val_at(3)?,
input: d.val_at(4)?, input: rlp.val_at(4)?,
call_type: d.val_at(5)?, call_type: rlp.val_at(5)?,
}; };
Ok(res) Ok(res)
@ -201,13 +198,12 @@ impl Encodable for Create {
} }
impl Decodable for Create { impl Decodable for Create {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let res = Create { let res = Create {
from: d.val_at(0)?, from: rlp.val_at(0)?,
value: d.val_at(1)?, value: rlp.val_at(1)?,
gas: d.val_at(2)?, gas: rlp.val_at(2)?,
init: d.val_at(3)?, init: rlp.val_at(3)?,
}; };
Ok(res) Ok(res)
@ -252,12 +248,11 @@ impl Encodable for Suicide {
} }
impl Decodable for Suicide { impl Decodable for Suicide {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let res = Suicide { let res = Suicide {
address: d.val_at(0)?, address: rlp.val_at(0)?,
refund_address: d.val_at(1)?, refund_address: rlp.val_at(1)?,
balance: d.val_at(2)?, balance: rlp.val_at(2)?,
}; };
Ok(res) Ok(res)
@ -298,13 +293,12 @@ impl Encodable for Action {
} }
impl Decodable for Action { impl Decodable for Action {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp(); let action_type: u8 = rlp.val_at(0)?;
let action_type: u8 = d.val_at(0)?;
match action_type { match action_type {
0 => d.val_at(1).map(Action::Call), 0 => rlp.val_at(1).map(Action::Call),
1 => d.val_at(1).map(Action::Create), 1 => rlp.val_at(1).map(Action::Create),
2 => d.val_at(1).map(Action::Suicide), 2 => rlp.val_at(1).map(Action::Suicide),
_ => Err(DecoderError::Custom("Invalid action type.")), _ => Err(DecoderError::Custom("Invalid action type.")),
} }
} }
@ -369,14 +363,13 @@ impl Encodable for Res {
} }
impl Decodable for Res { impl Decodable for Res {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp(); let action_type: u8 = rlp.val_at(0)?;
let action_type: u8 = d.val_at(0)?;
match action_type { match action_type {
0 => d.val_at(1).map(Res::Call), 0 => rlp.val_at(1).map(Res::Call),
1 => d.val_at(1).map(Res::Create), 1 => rlp.val_at(1).map(Res::Create),
2 => d.val_at(1).map(Res::FailedCall), 2 => rlp.val_at(1).map(Res::FailedCall),
3 => d.val_at(1).map(Res::FailedCreate), 3 => rlp.val_at(1).map(Res::FailedCreate),
4 => Ok(Res::None), 4 => Ok(Res::None),
_ => Err(DecoderError::Custom("Invalid result type.")), _ => Err(DecoderError::Custom("Invalid result type.")),
} }
@ -420,11 +413,10 @@ impl Encodable for MemoryDiff {
} }
impl Decodable for MemoryDiff { impl Decodable for MemoryDiff {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
Ok(MemoryDiff { Ok(MemoryDiff {
offset: d.val_at(0)?, offset: rlp.val_at(0)?,
data: d.val_at(1)?, data: rlp.val_at(1)?,
}) })
} }
} }
@ -448,11 +440,10 @@ impl Encodable for StorageDiff {
} }
impl Decodable for StorageDiff { impl Decodable for StorageDiff {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
Ok(StorageDiff { Ok(StorageDiff {
location: d.val_at(0)?, location: rlp.val_at(0)?,
value: d.val_at(1)?, value: rlp.val_at(1)?,
}) })
} }
} }
@ -482,13 +473,12 @@ impl Encodable for VMExecutedOperation {
} }
impl Decodable for VMExecutedOperation { impl Decodable for VMExecutedOperation {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
Ok(VMExecutedOperation { Ok(VMExecutedOperation {
gas_used: d.val_at(0)?, gas_used: rlp.val_at(0)?,
stack_push: d.val_at(1)?, stack_push: rlp.list_at(1)?,
mem_diff: d.val_at(2)?, mem_diff: rlp.val_at(2)?,
store_diff: d.val_at(3)?, store_diff: rlp.val_at(3)?,
}) })
} }
} }
@ -518,13 +508,12 @@ impl Encodable for VMOperation {
} }
impl Decodable for VMOperation { impl Decodable for VMOperation {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let res = VMOperation { let res = VMOperation {
pc: d.val_at(0)?, pc: rlp.val_at(0)?,
instruction: d.val_at(1)?, instruction: rlp.val_at(1)?,
gas_cost: d.val_at(2)?, gas_cost: rlp.val_at(2)?,
executed: d.val_at(3)?, executed: rlp.val_at(3)?,
}; };
Ok(res) Ok(res)
@ -557,13 +546,12 @@ impl Encodable for VMTrace {
} }
impl Decodable for VMTrace { impl Decodable for VMTrace {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp();
let res = VMTrace { let res = VMTrace {
parent_step: d.val_at(0)?, parent_step: rlp.val_at(0)?,
code: d.val_at(1)?, code: rlp.val_at(1)?,
operations: d.val_at(2)?, operations: rlp.list_at(2)?,
subs: d.val_at(3)?, subs: rlp.list_at(3)?,
}; };
Ok(res) Ok(res)

View File

@ -42,8 +42,7 @@ impl Default for Action {
} }
impl Decodable for Action { impl Decodable for Action {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let rlp = decoder.as_rlp();
if rlp.is_empty() { if rlp.is_empty() {
Ok(Action::Create) Ok(Action::Create)
} else { } else {
@ -243,12 +242,11 @@ impl Deref for UnverifiedTransaction {
} }
impl Decodable for UnverifiedTransaction { impl Decodable for UnverifiedTransaction {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder { fn decode(d: &UntrustedRlp) -> Result<Self, DecoderError> {
let d = decoder.as_rlp(); if d.item_count()? != 9 {
if d.item_count() != 9 {
return Err(DecoderError::RlpIncorrectListLen); return Err(DecoderError::RlpIncorrectListLen);
} }
let hash = decoder.as_raw().sha3(); let hash = d.as_raw().sha3();
Ok(UnverifiedTransaction { Ok(UnverifiedTransaction {
unsigned: Transaction { unsigned: Transaction {
nonce: d.val_at(0)?, nonce: d.val_at(0)?,

View File

@ -26,7 +26,7 @@ use engines::Engine;
use error::{BlockError, Error}; use error::{BlockError, Error};
use blockchain::*; use blockchain::*;
use header::{BlockNumber, Header}; use header::{BlockNumber, Header};
use rlp::{UntrustedRlp, View}; use rlp::UntrustedRlp;
use transaction::SignedTransaction; use transaction::SignedTransaction;
use views::BlockView; use views::BlockView;
use time::get_time; use time::get_time;
@ -101,7 +101,7 @@ pub fn verify_block_family(header: &Header, bytes: &[u8], engine: &Engine, bc: &
verify_parent(&header, &parent)?; verify_parent(&header, &parent)?;
engine.verify_block_family(&header, &parent, Some(bytes))?; engine.verify_block_family(&header, &parent, Some(bytes))?;
let num_uncles = UntrustedRlp::new(bytes).at(2)?.item_count(); let num_uncles = UntrustedRlp::new(bytes).at(2)?.item_count()?;
if num_uncles != 0 { if num_uncles != 0 {
if num_uncles > engine.maximum_uncle_count() { if num_uncles > engine.maximum_uncle_count() {
return Err(From::from(BlockError::TooManyUncles(OutOfBounds { min: None, max: Some(engine.maximum_uncle_count()), found: num_uncles }))); return Err(From::from(BlockError::TooManyUncles(OutOfBounds { min: None, max: Some(engine.maximum_uncle_count()), found: num_uncles })));
@ -264,7 +264,6 @@ mod tests {
use transaction::*; use transaction::*;
use tests::helpers::*; use tests::helpers::*;
use types::log_entry::{LogEntry, LocalizedLogEntry}; use types::log_entry::{LogEntry, LocalizedLogEntry};
use rlp::View;
use time::get_time; use time::get_time;
use encoded; use encoded;

View File

@ -20,7 +20,7 @@ use util::*;
use header::*; use header::*;
use transaction::*; use transaction::*;
use super::{TransactionView, HeaderView}; use super::{TransactionView, HeaderView};
use rlp::{Rlp, View}; use rlp::Rlp;
/// View onto block rlp. /// View onto block rlp.
pub struct BlockView<'a> { pub struct BlockView<'a> {
@ -69,7 +69,7 @@ impl<'a> BlockView<'a> {
/// Return List of transactions in given block. /// Return List of transactions in given block.
pub fn transactions(&self) -> Vec<UnverifiedTransaction> { pub fn transactions(&self) -> Vec<UnverifiedTransaction> {
self.rlp.val_at(1) self.rlp.list_at(1)
} }
/// Return List of transactions with additional localization info. /// Return List of transactions with additional localization info.
@ -125,7 +125,7 @@ impl<'a> BlockView<'a> {
/// Return list of uncles of given block. /// Return list of uncles of given block.
pub fn uncles(&self) -> Vec<Header> { pub fn uncles(&self) -> Vec<Header> {
self.rlp.val_at(2) self.rlp.list_at(2)
} }
/// Return number of uncles in given block, without deserializing them. /// Return number of uncles in given block, without deserializing them.

View File

@ -20,7 +20,7 @@ use util::*;
use header::*; use header::*;
use transaction::*; use transaction::*;
use super::{TransactionView, HeaderView}; use super::{TransactionView, HeaderView};
use rlp::{Rlp, View}; use rlp::Rlp;
/// View onto block rlp. /// View onto block rlp.
pub struct BodyView<'a> { pub struct BodyView<'a> {
@ -49,7 +49,7 @@ impl<'a> BodyView<'a> {
/// Return List of transactions in given block. /// Return List of transactions in given block.
pub fn transactions(&self) -> Vec<UnverifiedTransaction> { pub fn transactions(&self) -> Vec<UnverifiedTransaction> {
self.rlp.val_at(0) self.rlp.list_at(0)
} }
/// Return List of transactions with additional localization info. /// Return List of transactions with additional localization info.
@ -99,7 +99,7 @@ impl<'a> BodyView<'a> {
/// Return list of uncles of given block. /// Return list of uncles of given block.
pub fn uncles(&self) -> Vec<Header> { pub fn uncles(&self) -> Vec<Header> {
self.rlp.val_at(1) self.rlp.list_at(1)
} }
/// Return number of uncles in given block, without deserializing them. /// Return number of uncles in given block, without deserializing them.

View File

@ -17,7 +17,7 @@
//! View onto block header rlp //! View onto block header rlp
use util::{U256, Bytes, Hashable, H256, Address, H2048}; use util::{U256, Bytes, Hashable, H256, Address, H2048};
use rlp::{Rlp, View}; use rlp::Rlp;
use header::BlockNumber; use header::BlockNumber;
/// View onto block header rlp. /// View onto block header rlp.

View File

@ -16,7 +16,7 @@
//! View onto transaction rlp //! View onto transaction rlp
use util::{U256, Bytes, Hashable, H256}; use util::{U256, Bytes, Hashable, H256};
use rlp::{Rlp, View}; use rlp::Rlp;
/// View onto transaction rlp. /// View onto transaction rlp.
pub struct TransactionView<'a> { pub struct TransactionView<'a> {

View File

@ -19,14 +19,22 @@ use {json, Error, crypto};
use account::Version; use account::Version;
use super::crypto::Crypto; use super::crypto::Crypto;
/// Account representation.
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct SafeAccount { pub struct SafeAccount {
/// Account ID
pub id: [u8; 16], pub id: [u8; 16],
/// Account version
pub version: Version, pub version: Version,
/// Account address
pub address: Address, pub address: Address,
/// Account private key derivation definition.
pub crypto: Crypto, pub crypto: Crypto,
/// Account filename
pub filename: Option<String>, pub filename: Option<String>,
/// Account name
pub name: String, pub name: String,
/// Account metadata
pub meta: String, pub meta: String,
} }
@ -44,6 +52,7 @@ impl Into<json::KeyFile> for SafeAccount {
} }
impl SafeAccount { impl SafeAccount {
/// Create a new account
pub fn create( pub fn create(
keypair: &KeyPair, keypair: &KeyPair,
id: [u8; 16], id: [u8; 16],
@ -114,21 +123,25 @@ impl SafeAccount {
}) })
} }
/// Sign a message.
pub fn sign(&self, password: &str, message: &Message) -> Result<Signature, Error> { pub fn sign(&self, password: &str, message: &Message) -> Result<Signature, Error> {
let secret = self.crypto.secret(password)?; let secret = self.crypto.secret(password)?;
sign(&secret, message).map_err(From::from) sign(&secret, message).map_err(From::from)
} }
/// Decrypt a message.
pub fn decrypt(&self, password: &str, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> { pub fn decrypt(&self, password: &str, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> {
let secret = self.crypto.secret(password)?; let secret = self.crypto.secret(password)?;
crypto::ecies::decrypt(&secret, shared_mac, message).map_err(From::from) crypto::ecies::decrypt(&secret, shared_mac, message).map_err(From::from)
} }
/// Derive public key.
pub fn public(&self, password: &str) -> Result<Public, Error> { pub fn public(&self, password: &str) -> Result<Public, Error> {
let secret = self.crypto.secret(password)?; let secret = self.crypto.secret(password)?;
Ok(KeyPair::from_secret(secret)?.public().clone()) Ok(KeyPair::from_secret(secret)?.public().clone())
} }
/// Change account's password.
pub fn change_password(&self, old_password: &str, new_password: &str, iterations: u32) -> Result<Self, Error> { pub fn change_password(&self, old_password: &str, new_password: &str, iterations: u32) -> Result<Self, Error> {
let secret = self.crypto.secret(old_password)?; let secret = self.crypto.secret(old_password)?;
let result = SafeAccount { let result = SafeAccount {
@ -143,6 +156,7 @@ impl SafeAccount {
Ok(result) Ok(result)
} }
/// Check if password matches the account.
pub fn check_password(&self, password: &str) -> bool { pub fn check_password(&self, password: &str) -> bool {
self.crypto.secret(password).is_ok() self.crypto.secret(password).is_ok()
} }

View File

@ -22,7 +22,7 @@ use std::{env, process, fs};
use std::io::Read; use std::io::Read;
use docopt::Docopt; use docopt::Docopt;
use ethstore::ethkey::Address; use ethstore::ethkey::Address;
use ethstore::dir::{KeyDirectory, ParityDirectory, RootDiskDirectory, GethDirectory, DirectoryType}; use ethstore::dir::{paths, KeyDirectory, RootDiskDirectory};
use ethstore::{EthStore, SimpleSecretStore, SecretStore, import_accounts, Error, PresaleWallet, use ethstore::{EthStore, SimpleSecretStore, SecretStore, import_accounts, Error, PresaleWallet,
SecretVaultRef, StoreAccountRef}; SecretVaultRef, StoreAccountRef};
@ -49,14 +49,14 @@ Usage:
Options: Options:
-h, --help Display this message and exit. -h, --help Display this message and exit.
--dir DIR Specify the secret store directory. It may be either --dir DIR Specify the secret store directory. It may be either
parity, parity-test, geth, geth-test parity, parity-(chain), geth, geth-test
or a path [default: parity]. or a path [default: parity].
--vault VAULT Specify vault to use in this operation. --vault VAULT Specify vault to use in this operation.
--vault-pwd VAULTPWD Specify vault password to use in this operation. Please note --vault-pwd VAULTPWD Specify vault password to use in this operation. Please note
that this option is required when vault option is set. that this option is required when vault option is set.
Otherwise it is ignored. Otherwise it is ignored.
--src DIR Specify import source. It may be either --src DIR Specify import source. It may be either
parity, parity-test, get, geth-test parity, parity-(chain), get, geth-test
or a path [default: geth]. or a path [default: geth].
Commands: Commands:
@ -116,10 +116,13 @@ fn main() {
fn key_dir(location: &str) -> Result<Box<KeyDirectory>, Error> { fn key_dir(location: &str) -> Result<Box<KeyDirectory>, Error> {
let dir: Box<KeyDirectory> = match location { let dir: Box<KeyDirectory> = match location {
"parity" => Box::new(ParityDirectory::create(DirectoryType::Main)?), "geth" => Box::new(RootDiskDirectory::create(paths::geth(false))?),
"parity-test" => Box::new(ParityDirectory::create(DirectoryType::Testnet)?), "geth-test" => Box::new(RootDiskDirectory::create(paths::geth(true))?),
"geth" => Box::new(GethDirectory::create(DirectoryType::Main)?), path if path.starts_with("parity") => {
"geth-test" => Box::new(GethDirectory::create(DirectoryType::Testnet)?), let chain = path.split('-').nth(1).unwrap_or("ethereum");
let path = paths::parity(chain);
Box::new(RootDiskDirectory::create(path)?)
},
path => Box::new(RootDiskDirectory::create(path)?), path => Box::new(RootDiskDirectory::create(path)?),
}; };
@ -254,4 +257,3 @@ fn execute<S, I>(command: I) -> Result<String, Error> where I: IntoIterator<Item
Ok(format!("{}", USAGE)) Ok(format!("{}", USAGE))
} }
} }

View File

@ -1,102 +0,0 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::path::PathBuf;
use {SafeAccount, Error};
use super::{KeyDirectory, RootDiskDirectory, DirectoryType};
#[cfg(target_os = "macos")]
fn geth_dir_path() -> PathBuf {
let mut home = env::home_dir().expect("Failed to get home dir");
home.push("Library");
home.push("Ethereum");
home
}
#[cfg(windows)]
/// Default path for ethereum installation on Windows
pub fn geth_dir_path() -> PathBuf {
let mut home = env::home_dir().expect("Failed to get home dir");
home.push("AppData");
home.push("Roaming");
home.push("Ethereum");
home
}
#[cfg(not(any(target_os = "macos", windows)))]
/// Default path for ethereum installation on posix system which is not Mac OS
pub fn geth_dir_path() -> PathBuf {
let mut home = env::home_dir().expect("Failed to get home dir");
home.push(".ethereum");
home
}
fn geth_keystore(t: DirectoryType) -> PathBuf {
let mut dir = geth_dir_path();
match t {
DirectoryType::Testnet => {
dir.push("testnet");
dir.push("keystore");
},
DirectoryType::Main => {
dir.push("keystore");
}
}
dir
}
pub struct GethDirectory {
dir: RootDiskDirectory,
}
impl GethDirectory {
pub fn create(t: DirectoryType) -> Result<Self, Error> {
let result = GethDirectory {
dir: RootDiskDirectory::create(geth_keystore(t))?,
};
Ok(result)
}
pub fn open(t: DirectoryType) -> Self {
GethDirectory {
dir: RootDiskDirectory::at(geth_keystore(t)),
}
}
}
impl KeyDirectory for GethDirectory {
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
self.dir.load()
}
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
self.dir.insert(account)
}
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
self.dir.update(account)
}
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
self.dir.remove(account)
}
fn unique_repr(&self) -> Result<u64, Error> {
self.dir.unique_repr()
}
}

View File

@ -22,6 +22,7 @@ use ethkey::Address;
use {SafeAccount, Error}; use {SafeAccount, Error};
use super::KeyDirectory; use super::KeyDirectory;
/// Accounts in-memory storage.
#[derive(Default)] #[derive(Default)]
pub struct MemoryDirectory { pub struct MemoryDirectory {
accounts: RwLock<HashMap<Address, Vec<SafeAccount>>>, accounts: RwLock<HashMap<Address, Vec<SafeAccount>>>,

View File

@ -14,19 +14,15 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Accounts Directory
use std::path::{PathBuf}; use std::path::{PathBuf};
use {SafeAccount, Error}; use {SafeAccount, Error};
mod disk; mod disk;
mod geth;
mod memory; mod memory;
mod parity;
mod vault; mod vault;
pub mod paths;
pub enum DirectoryType {
Testnet,
Main,
}
/// `VaultKeyDirectory::set_key` error /// `VaultKeyDirectory::set_key` error
#[derive(Debug)] #[derive(Debug)]
@ -54,7 +50,7 @@ pub trait KeyDirectory: Send + Sync {
fn load(&self) -> Result<Vec<SafeAccount>, Error>; fn load(&self) -> Result<Vec<SafeAccount>, Error>;
/// Insert new key to directory /// Insert new key to directory
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error>; fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error>;
//// Update key in directory /// Update key in the directory
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error>; fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error>;
/// Remove key from directory /// Remove key from directory
fn remove(&self, account: &SafeAccount) -> Result<(), Error>; fn remove(&self, account: &SafeAccount) -> Result<(), Error>;
@ -95,9 +91,7 @@ pub trait VaultKeyDirectory: KeyDirectory {
} }
pub use self::disk::RootDiskDirectory; pub use self::disk::RootDiskDirectory;
pub use self::geth::GethDirectory;
pub use self::memory::MemoryDirectory; pub use self::memory::MemoryDirectory;
pub use self::parity::ParityDirectory;
pub use self::vault::VaultDiskDirectory; pub use self::vault::VaultDiskDirectory;
impl VaultKey { impl VaultKey {

View File

@ -1,81 +0,0 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::path::PathBuf;
use {SafeAccount, Error};
use super::{KeyDirectory, RootDiskDirectory, DirectoryType};
fn parity_dir_path() -> PathBuf {
let mut home = env::home_dir().expect("Failed to get home dir");
home.push(".parity");
home
}
fn parity_keystore(t: DirectoryType) -> PathBuf {
let mut dir = parity_dir_path();
match t {
DirectoryType::Testnet => {
dir.push("testnet_keys");
},
DirectoryType::Main => {
dir.push("keys");
}
}
dir
}
pub struct ParityDirectory {
dir: RootDiskDirectory,
}
impl ParityDirectory {
pub fn create(t: DirectoryType) -> Result<Self, Error> {
let result = ParityDirectory {
dir: RootDiskDirectory::create(parity_keystore(t))?,
};
Ok(result)
}
pub fn open(t: DirectoryType) -> Self {
ParityDirectory {
dir: RootDiskDirectory::at(parity_keystore(t)),
}
}
}
impl KeyDirectory for ParityDirectory {
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
self.dir.load()
}
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
self.dir.insert(account)
}
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
self.dir.update(account)
}
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
self.dir.remove(account)
}
fn unique_repr(&self) -> Result<u64, Error> {
self.dir.unique_repr()
}
}

96
ethstore/src/dir/paths.rs Normal file
View File

@ -0,0 +1,96 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Common tools paths.
use std::env;
use std::path::PathBuf;
fn home() -> PathBuf {
env::home_dir().expect("Failed to get home dir")
}
/// Geth path
pub fn geth(testnet: bool) -> PathBuf {
let mut base = geth_base();
if testnet {
base.push("testnet");
}
base.push("keystore");
base
}
/// Parity path for specific chain
pub fn parity(chain: &str) -> PathBuf {
let mut base = parity_base();
base.push(chain);
base
}
#[cfg(target_os = "macos")]
fn parity_base() -> PathBuf {
let mut home = home();
home.push("Library");
home.push("Application Support");
home.push("io.parity.ethereum");
home.push("keys");
home
}
#[cfg(windows)]
fn parity_base() -> PathBuf {
let mut home = home();
home.push("AppData");
home.push("Roaming");
home.push("Parity");
home.push("Ethereum");
home.push("keys");
home
}
#[cfg(not(any(target_os = "macos", windows)))]
fn parity_base() -> PathBuf {
let mut home = home();
home.push(".local");
home.push("share");
home.push("io.parity.ethereum");
home.push("keys");
home
}
#[cfg(target_os = "macos")]
fn geth_base() -> PathBuf {
let mut home = home();
home.push("Library");
home.push("Ethereum");
home
}
#[cfg(windows)]
fn geth_base() -> PathBuf {
let mut home = home();
home.push("AppData");
home.push("Roaming");
home.push("Ethereum");
home
}
#[cfg(not(any(target_os = "macos", windows)))]
fn geth_base() -> PathBuf {
let mut home = home();
home.push(".ethereum");
home
}

View File

@ -20,23 +20,40 @@ use ethkey::Error as EthKeyError;
use crypto::Error as EthCryptoError; use crypto::Error as EthCryptoError;
use ethkey::DerivationError; use ethkey::DerivationError;
/// Account-related errors.
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
/// IO error
Io(IoError), Io(IoError),
/// Invalid Password
InvalidPassword, InvalidPassword,
/// Account's secret is invalid.
InvalidSecret, InvalidSecret,
/// Invalid Vault Crypto meta.
InvalidCryptoMeta, InvalidCryptoMeta,
/// Invalid Account.
InvalidAccount, InvalidAccount,
/// Invalid Message.
InvalidMessage, InvalidMessage,
/// Invalid Key File
InvalidKeyFile(String), InvalidKeyFile(String),
/// Vaults are not supported.
VaultsAreNotSupported, VaultsAreNotSupported,
/// Unsupported vault
UnsupportedVault, UnsupportedVault,
/// Invalid vault name
InvalidVaultName, InvalidVaultName,
/// Vault not found
VaultNotFound, VaultNotFound,
/// Account creation failed.
CreationFailed, CreationFailed,
/// `EthKey` error
EthKey(EthKeyError), EthKey(EthKeyError),
/// `EthCrypto` error
EthCrypto(EthCryptoError), EthCrypto(EthCryptoError),
/// Derivation error
Derivation(DerivationError), Derivation(DerivationError),
/// Custom error
Custom(String), Custom(String),
} }

View File

@ -25,18 +25,21 @@ use ethkey::{self, Signature, Address, Message, Secret, Public, KeyPair, Extende
use dir::{KeyDirectory, VaultKeyDirectory, VaultKey, SetKeyError}; use dir::{KeyDirectory, VaultKeyDirectory, VaultKey, SetKeyError};
use account::SafeAccount; use account::SafeAccount;
use presale::PresaleWallet; use presale::PresaleWallet;
use json::{self, Uuid}; use json::{self, Uuid, OpaqueKeyFile};
use {import, Error, SimpleSecretStore, SecretStore, SecretVaultRef, StoreAccountRef, Derivation}; use {import, Error, SimpleSecretStore, SecretStore, SecretVaultRef, StoreAccountRef, Derivation};
/// Accounts store.
pub struct EthStore { pub struct EthStore {
store: EthMultiStore, store: EthMultiStore,
} }
impl EthStore { impl EthStore {
/// Open a new accounts store with given key directory backend.
pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> { pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> {
Self::open_with_iterations(directory, KEY_ITERATIONS as u32) Self::open_with_iterations(directory, KEY_ITERATIONS as u32)
} }
/// Open a new account store with given key directory backend and custom number of iterations.
pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: u32) -> Result<Self, Error> { pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: u32) -> Result<Self, Error> {
Ok(EthStore { Ok(EthStore {
store: EthMultiStore::open_with_iterations(directory, iterations)?, store: EthMultiStore::open_with_iterations(directory, iterations)?,
@ -44,7 +47,7 @@ impl EthStore {
} }
fn get(&self, account: &StoreAccountRef) -> Result<SafeAccount, Error> { fn get(&self, account: &StoreAccountRef) -> Result<SafeAccount, Error> {
let mut accounts = self.store.get(account)?.into_iter(); let mut accounts = self.store.get_accounts(account)?.into_iter();
accounts.next().ok_or(Error::InvalidAccount) accounts.next().ok_or(Error::InvalidAccount)
} }
} }
@ -76,6 +79,10 @@ impl SimpleSecretStore for EthStore {
self.store.change_password(account, old_password, new_password) self.store.change_password(account, old_password, new_password)
} }
fn export_account(&self, account: &StoreAccountRef, password: &str) -> Result<OpaqueKeyFile, Error> {
self.store.export_account(account, password)
}
fn remove_account(&self, account: &StoreAccountRef, password: &str) -> Result<(), Error> { fn remove_account(&self, account: &StoreAccountRef, password: &str) -> Result<(), Error> {
self.store.remove_account(account, password) self.store.remove_account(account, password)
} }
@ -234,11 +241,12 @@ pub struct EthMultiStore {
} }
impl EthMultiStore { impl EthMultiStore {
/// Open new multi-accounts store with given key directory backend.
pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> { pub fn open(directory: Box<KeyDirectory>) -> Result<Self, Error> {
Self::open_with_iterations(directory, KEY_ITERATIONS as u32) Self::open_with_iterations(directory, KEY_ITERATIONS as u32)
} }
/// Open new multi-accounts store with given key directory backend and custom number of iterations for new keys.
pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: u32) -> Result<Self, Error> { pub fn open_with_iterations(directory: Box<KeyDirectory>, iterations: u32) -> Result<Self, Error> {
let store = EthMultiStore { let store = EthMultiStore {
dir: directory, dir: directory,
@ -287,7 +295,7 @@ impl EthMultiStore {
Ok(()) Ok(())
} }
fn get(&self, account: &StoreAccountRef) -> Result<Vec<SafeAccount>, Error> { fn get_accounts(&self, account: &StoreAccountRef) -> Result<Vec<SafeAccount>, Error> {
{ {
let cache = self.cache.read(); let cache = self.cache.read();
if let Some(accounts) = cache.get(account) { if let Some(accounts) = cache.get(account) {
@ -307,6 +315,15 @@ impl EthMultiStore {
} }
} }
fn get_matching(&self, account: &StoreAccountRef, password: &str) -> Result<Vec<SafeAccount>, Error> {
let accounts = self.get_accounts(account)?;
Ok(accounts.into_iter()
.filter(|acc| acc.check_password(password))
.collect()
)
}
fn import(&self, vault: SecretVaultRef, account: SafeAccount) -> Result<StoreAccountRef, Error> { fn import(&self, vault: SecretVaultRef, account: SafeAccount) -> Result<StoreAccountRef, Error> {
// save to file // save to file
let account = match vault { let account = match vault {
@ -398,12 +415,8 @@ impl SimpleSecretStore for EthMultiStore {
fn insert_derived(&self, vault: SecretVaultRef, account_ref: &StoreAccountRef, password: &str, derivation: Derivation) fn insert_derived(&self, vault: SecretVaultRef, account_ref: &StoreAccountRef, password: &str, derivation: Derivation)
-> Result<StoreAccountRef, Error> -> Result<StoreAccountRef, Error>
{ {
let accounts = self.get(account_ref)?; let accounts = self.get_matching(account_ref, password)?;
for account in accounts { for account in accounts {
// Skip if password is invalid
if !account.check_password(password) {
continue;
}
let extended = self.generate(account.crypto.secret(password)?, derivation)?; let extended = self.generate(account.crypto.secret(password)?, derivation)?;
return self.insert_account(vault, extended.secret().as_raw().clone(), password); return self.insert_account(vault, extended.secret().as_raw().clone(), password);
} }
@ -413,14 +426,9 @@ impl SimpleSecretStore for EthMultiStore {
fn generate_derived(&self, account_ref: &StoreAccountRef, password: &str, derivation: Derivation) fn generate_derived(&self, account_ref: &StoreAccountRef, password: &str, derivation: Derivation)
-> Result<Address, Error> -> Result<Address, Error>
{ {
let accounts = self.get(&account_ref)?; let accounts = self.get_matching(&account_ref, password)?;
for account in accounts { for account in accounts {
// Skip if password is invalid
if !account.check_password(password) {
continue;
}
let extended = self.generate(account.crypto.secret(password)?, derivation)?; let extended = self.generate(account.crypto.secret(password)?, derivation)?;
return Ok(ethkey::public_to_address(extended.public().public())); return Ok(ethkey::public_to_address(extended.public().public()));
} }
Err(Error::InvalidPassword) Err(Error::InvalidPassword)
@ -429,18 +437,13 @@ impl SimpleSecretStore for EthMultiStore {
fn sign_derived(&self, account_ref: &StoreAccountRef, password: &str, derivation: Derivation, message: &Message) fn sign_derived(&self, account_ref: &StoreAccountRef, password: &str, derivation: Derivation, message: &Message)
-> Result<Signature, Error> -> Result<Signature, Error>
{ {
let accounts = self.get(&account_ref)?; let accounts = self.get_matching(&account_ref, password)?;
for account in accounts { for account in accounts {
// Skip if password is invalid
if !account.check_password(password) {
continue;
}
let extended = self.generate(account.crypto.secret(password)?, derivation)?; let extended = self.generate(account.crypto.secret(password)?, derivation)?;
let secret = extended.secret().as_raw(); let secret = extended.secret().as_raw();
return Ok(ethkey::sign(&secret, message)?) return Ok(ethkey::sign(&secret, message)?)
} }
Err(Error::InvalidPassword) Err(Error::InvalidPassword)
} }
fn account_ref(&self, address: &Address) -> Result<StoreAccountRef, Error> { fn account_ref(&self, address: &Address) -> Result<StoreAccountRef, Error> {
@ -457,48 +460,48 @@ impl SimpleSecretStore for EthMultiStore {
} }
fn remove_account(&self, account_ref: &StoreAccountRef, password: &str) -> Result<(), Error> { fn remove_account(&self, account_ref: &StoreAccountRef, password: &str) -> Result<(), Error> {
let accounts = self.get(account_ref)?; let accounts = self.get_matching(account_ref, password)?;
for account in accounts { for account in accounts {
// Skip if password is invalid
if !account.check_password(password) {
continue;
}
return self.remove_safe_account(account_ref, &account); return self.remove_safe_account(account_ref, &account);
} }
Err(Error::InvalidPassword) Err(Error::InvalidPassword)
} }
fn change_password(&self, account_ref: &StoreAccountRef, old_password: &str, new_password: &str) -> Result<(), Error> { fn change_password(&self, account_ref: &StoreAccountRef, old_password: &str, new_password: &str) -> Result<(), Error> {
let accounts = self.get(account_ref)?; let accounts = self.get_matching(account_ref, old_password)?;
if accounts.is_empty() {
return Err(Error::InvalidPassword);
}
for account in accounts { for account in accounts {
// Change password // Change password
let new_account = account.change_password(old_password, new_password, self.iterations)?; let new_account = account.change_password(old_password, new_password, self.iterations)?;
self.update(account_ref, account, new_account)?; self.update(account_ref, account, new_account)?;
} }
Ok(()) Ok(())
} }
fn sign(&self, account: &StoreAccountRef, password: &str, message: &Message) -> Result<Signature, Error> { fn export_account(&self, account_ref: &StoreAccountRef, password: &str) -> Result<OpaqueKeyFile, Error> {
let accounts = self.get(account)?; self.get_matching(account_ref, password)?.into_iter().nth(0).map(Into::into).ok_or(Error::InvalidPassword)
for account in accounts {
if account.check_password(password) {
return account.sign(password, message);
}
} }
fn sign(&self, account: &StoreAccountRef, password: &str, message: &Message) -> Result<Signature, Error> {
let accounts = self.get_matching(account, password)?;
for account in accounts {
return account.sign(password, message);
}
Err(Error::InvalidPassword) Err(Error::InvalidPassword)
} }
fn decrypt(&self, account: &StoreAccountRef, password: &str, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> { fn decrypt(&self, account: &StoreAccountRef, password: &str, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> {
let accounts = self.get(account)?; let accounts = self.get_matching(account, password)?;
for account in accounts { for account in accounts {
if account.check_password(password) {
return account.decrypt(password, shared_mac, message); return account.decrypt(password, shared_mac, message);
} }
}
Err(Error::InvalidPassword) Err(Error::InvalidPassword)
} }
@ -586,7 +589,7 @@ impl SimpleSecretStore for EthMultiStore {
return Ok(account_ref); return Ok(account_ref);
} }
let account = self.get(&account_ref)?.into_iter().nth(0).ok_or(Error::InvalidAccount)?; let account = self.get_accounts(&account_ref)?.into_iter().nth(0).ok_or(Error::InvalidAccount)?;
let new_account_ref = self.import(vault, account.clone())?; let new_account_ref = self.import(vault, account.clone())?;
self.remove_safe_account(&account_ref, &account)?; self.remove_safe_account(&account_ref, &account)?;
self.reload_accounts()?; self.reload_accounts()?;
@ -1032,4 +1035,18 @@ mod tests {
// then // then
assert_eq!(store.get_vault_meta(name).unwrap(), "OldMeta".to_owned()); assert_eq!(store.get_vault_meta(name).unwrap(), "OldMeta".to_owned());
} }
#[test]
fn should_export_account() {
// given
let store = store();
let keypair = keypair();
let address = store.insert_account(SecretVaultRef::Root, keypair.secret().clone(), "test").unwrap();
// when
let exported = store.export_account(&address, "test");
// then
assert!(exported.is_ok(), "Should export single account: {:?}", exported);
}
} }

Some files were not shown because too many files have changed in this diff Show More