Merge remote-tracking branch 'parity/master' into bft

Conflicts:
	ethcore/src/error.rs
This commit is contained in:
keorn 2016-08-25 19:24:29 +02:00
commit 8bd0034ced
132 changed files with 2939 additions and 1692 deletions

View File

@ -62,6 +62,7 @@ linux-nightly:
paths: paths:
- target/release/parity - target/release/parity
name: "${CI_BUILD_NAME}_parity" name: "${CI_BUILD_NAME}_parity"
allow_failure: true
linux-centos: linux-centos:
stage: build stage: build
image: ethcore/rust-centos:latest image: ethcore/rust-centos:latest
@ -91,10 +92,6 @@ linux-armv7:
- tags - tags
- stable - stable
script: script:
- mkdir -p .cargo
- echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target armv7-unknown-linux-gnueabihf --release --verbose - cargo build --target armv7-unknown-linux-gnueabihf --release --verbose
- arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity - arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity
tags: tags:
@ -113,10 +110,6 @@ linux-arm:
- tags - tags
- stable - stable
script: script:
- mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabihf --release --verbose - cargo build --target arm-unknown-linux-gnueabihf --release --verbose
- arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity - arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity
tags: tags:
@ -126,6 +119,7 @@ linux-arm:
paths: paths:
- target/arm-unknown-linux-gnueabihf/release/parity - target/arm-unknown-linux-gnueabihf/release/parity
name: "${CI_BUILD_NAME}_parity" name: "${CI_BUILD_NAME}_parity"
allow_failure: true
linux-armv6: linux-armv6:
stage: build stage: build
image: ethcore/rust-arm:latest image: ethcore/rust-arm:latest
@ -135,10 +129,6 @@ linux-armv6:
- tags - tags
- stable - stable
script: script:
- mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabi-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabi --release --verbose - cargo build --target arm-unknown-linux-gnueabi --release --verbose
- arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity - arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity
tags: tags:
@ -148,6 +138,7 @@ linux-armv6:
paths: paths:
- target/arm-unknown-linux-gnueabi/release/parity - target/arm-unknown-linux-gnueabi/release/parity
name: "${CI_BUILD_NAME}_parity" name: "${CI_BUILD_NAME}_parity"
allow_failure: true
linux-aarch64: linux-aarch64:
stage: build stage: build
image: ethcore/rust-arm:latest image: ethcore/rust-arm:latest
@ -157,10 +148,6 @@ linux-aarch64:
- tags - tags
- stable - stable
script: script:
- mkdir -p .cargo
- echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config
- echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target aarch64-unknown-linux-gnu --release --verbose - cargo build --target aarch64-unknown-linux-gnu --release --verbose
- aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity - aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity
tags: tags:
@ -170,6 +157,7 @@ linux-aarch64:
paths: paths:
- target/aarch64-unknown-linux-gnu/release/parity - target/aarch64-unknown-linux-gnu/release/parity
name: "${CI_BUILD_NAME}_parity" name: "${CI_BUILD_NAME}_parity"
allow_failure: true
darwin: darwin:
stage: build stage: build
only: only:

113
Cargo.lock generated
View File

@ -3,7 +3,7 @@ name = "parity"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)", "ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
@ -24,7 +24,7 @@ dependencies = [
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"ethsync 1.4.0", "ethsync 1.4.0",
"fdlimit 0.1.0", "fdlimit 0.1.0",
"hyper 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
"isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)", "json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -36,7 +36,6 @@ dependencies = [
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -134,15 +133,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "clippy" name = "clippy"
version = "0.0.82" version = "0.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"clippy_lints 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy_lints 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "clippy_lints" name = "clippy_lints"
version = "0.0.82" version = "0.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -230,6 +229,19 @@ dependencies = [
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "ethabi"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "ethash" name = "ethash"
version = "1.4.0" version = "1.4.0"
@ -246,7 +258,7 @@ version = "1.4.0"
dependencies = [ dependencies = [
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0", "ethash 1.4.0",
@ -257,6 +269,7 @@ dependencies = [
"ethcore-ipc-nano 1.4.0", "ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"ethjson 0.1.0", "ethjson 0.1.0",
"ethkey 0.2.0",
"ethstore 0.1.0", "ethstore 0.1.0",
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
@ -275,7 +288,8 @@ dependencies = [
name = "ethcore-dapps" name = "ethcore-dapps"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
"ethabi 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-rpc 1.4.0", "ethcore-rpc 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
@ -292,7 +306,6 @@ dependencies = [
"serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
"unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"zip 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "zip 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
@ -370,7 +383,6 @@ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)", "nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -394,6 +406,8 @@ dependencies = [
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"ethcrypto 0.1.0",
"ethkey 0.2.0",
"igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -411,7 +425,7 @@ dependencies = [
name = "ethcore-rpc" name = "ethcore-rpc"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0", "ethash 1.4.0",
"ethcore 1.4.0", "ethcore 1.4.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
@ -419,6 +433,7 @@ dependencies = [
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"ethjson 0.1.0", "ethjson 0.1.0",
"ethkey 0.2.0",
"ethstore 0.1.0", "ethstore 0.1.0",
"ethsync 1.4.0", "ethsync 1.4.0",
"json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)", "json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)",
@ -429,7 +444,7 @@ dependencies = [
"serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"transient-hashmap 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "transient-hashmap 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -437,7 +452,7 @@ dependencies = [
name = "ethcore-signer" name = "ethcore-signer"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-io 1.4.0", "ethcore-io 1.4.0",
"ethcore-rpc 1.4.0", "ethcore-rpc 1.4.0",
@ -456,12 +471,16 @@ version = "1.4.0"
dependencies = [ dependencies = [
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)", "json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)",
"jsonrpc-core 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -471,7 +490,7 @@ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"bigint 0.1.0", "bigint 0.1.0",
"clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.4.0 (git+https://github.com/ethcore/elastic-array)", "elastic-array 0.4.0 (git+https://github.com/ethcore/elastic-array)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
@ -491,10 +510,22 @@ dependencies = [
"table 0.1.0", "table 0.1.0",
"target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"using_queue 0.1.0", "using_queue 0.1.0",
"vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "ethcrypto"
version = "0.1.0"
dependencies = [
"bigint 0.1.0",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethkey 0.2.0",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "ethjson" name = "ethjson"
version = "0.1.0" version = "0.1.0"
@ -504,7 +535,6 @@ dependencies = [
"serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -523,6 +553,7 @@ dependencies = [
name = "ethstore" name = "ethstore"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"ethcrypto 0.1.0",
"ethkey 0.2.0", "ethkey 0.2.0",
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -533,7 +564,6 @@ dependencies = [
"serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)", "serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -542,7 +572,7 @@ dependencies = [
name = "ethsync" name = "ethsync"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.4.0", "ethcore 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.4.0",
@ -611,26 +641,6 @@ name = "httparse"
version = "1.1.2" version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "hyper"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "hyper" name = "hyper"
version = "0.9.4" version = "0.9.4"
@ -1540,18 +1550,6 @@ name = "unicode-xid"
version = "0.0.3" version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "url"
version = "0.5.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "url" name = "url"
version = "1.2.0" version = "1.2.0"
@ -1570,14 +1568,6 @@ name = "utf8-ranges"
version = "0.1.3" version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "uuid"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "vecio" name = "vecio"
version = "0.1.0" version = "0.1.0"
@ -1676,8 +1666,8 @@ dependencies = [
"checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27" "checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27"
"checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>" "checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>"
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" "checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
"checksum clippy 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)" = "8be61845840f25e9abc06b930d1211c3207f3eb5db92bc001b0510b7e4f361aa" "checksum clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)" = "97f6d6efa6d7aec74d4eca1be62164b605d43b7fcb5256e9db0449f685130cba"
"checksum clippy_lints 0.0.82 (registry+https://github.com/rust-lang/crates.io-index)" = "5de435cbb0abacae719e2424a5702afcdf6b51d99b4d52ed5de86094a30e0a80" "checksum clippy_lints 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)" = "dc96d3c877b63943b08ce3037c0ae8fd3bd5dead5fab11178b93afc71ca16031"
"checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245" "checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245"
"checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc" "checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc"
"checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>" "checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>"
@ -1687,6 +1677,7 @@ dependencies = [
"checksum elastic-array 0.4.0 (git+https://github.com/ethcore/elastic-array)" = "<none>" "checksum elastic-array 0.4.0 (git+https://github.com/ethcore/elastic-array)" = "<none>"
"checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5" "checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5"
"checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>" "checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>"
"checksum ethabi 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bc7789d1518abba0c61606826a5229284d47a9d0934feb62a1ee218882780a9b"
"checksum flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "3eeb481e957304178d2e782f2da1257f1434dfecbae883bafb61ada2a9fea3bb" "checksum flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "3eeb481e957304178d2e782f2da1257f1434dfecbae883bafb61ada2a9fea3bb"
"checksum gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "3da3a2cbaeb01363c8e3704fd9fd0eb2ceb17c6f27abd4c1ef040fb57d20dc79" "checksum gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "3da3a2cbaeb01363c8e3704fd9fd0eb2ceb17c6f27abd4c1ef040fb57d20dc79"
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
@ -1694,7 +1685,6 @@ dependencies = [
"checksum heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "abb306abb8d398e053cfb1b3e7b72c2f580be048b85745c52652954f8ad1439c" "checksum heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "abb306abb8d398e053cfb1b3e7b72c2f580be048b85745c52652954f8ad1439c"
"checksum hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d2da7d3a34cf6406d9d700111b8eafafe9a251de41ae71d8052748259343b58" "checksum hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d2da7d3a34cf6406d9d700111b8eafafe9a251de41ae71d8052748259343b58"
"checksum httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "46534074dbb80b070d60a5cb8ecadd8963a00a438ae1a95268850a7ef73b67ae" "checksum httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "46534074dbb80b070d60a5cb8ecadd8963a00a438ae1a95268850a7ef73b67ae"
"checksum hyper 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bb0f4d00bb781e559b6e66ae4b5479df0fdf9ab15949f52fa2f1f5de16d4cc07"
"checksum hyper 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "eb27e8a3e8f17ac43ffa41bbda9cf5ad3f9f13ef66fa4873409d4902310275f7" "checksum hyper 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)" = "eb27e8a3e8f17ac43ffa41bbda9cf5ad3f9f13ef66fa4873409d4902310275f7"
"checksum hyper 0.9.4 (git+https://github.com/ethcore/hyper)" = "<none>" "checksum hyper 0.9.4 (git+https://github.com/ethcore/hyper)" = "<none>"
"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11" "checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11"
@ -1702,6 +1692,7 @@ dependencies = [
"checksum isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7408a548dc0e406b7912d9f84c261cc533c1866e047644a811c133c56041ac0c" "checksum isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7408a548dc0e406b7912d9f84c261cc533c1866e047644a811c133c56041ac0c"
"checksum itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "086e1fa5fe48840b1cfdef3a20c7e3115599f8d5c4c87ef32a794a7cdd184d76" "checksum itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "086e1fa5fe48840b1cfdef3a20c7e3115599f8d5c4c87ef32a794a7cdd184d76"
"checksum json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)" = "<none>" "checksum json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)" = "<none>"
"checksum json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)" = "<none>"
"checksum jsonrpc-core 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec4477e4e8218da23caa5dd31f4eb39999aa0ea9035660617eccfb19a23bf5ad" "checksum jsonrpc-core 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ec4477e4e8218da23caa5dd31f4eb39999aa0ea9035660617eccfb19a23bf5ad"
"checksum jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)" = "<none>" "checksum jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)" = "<none>"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
@ -1798,10 +1789,8 @@ dependencies = [
"checksum unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f7ceb96afdfeedee42bade65a0d585a6a0106f681b6749c8ff4daa8df30b3f" "checksum unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f7ceb96afdfeedee42bade65a0d585a6a0106f681b6749c8ff4daa8df30b3f"
"checksum unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "26643a2f83bac55f1976fb716c10234485f9202dcd65cfbdf9da49867b271172" "checksum unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "26643a2f83bac55f1976fb716c10234485f9202dcd65cfbdf9da49867b271172"
"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb" "checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
"checksum url 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f6d04073d0fcd045a1cf57aea560d1be5ba812d8f28814e1e1cf0e90ff4d2f03"
"checksum url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afe9ec54bc4db14bc8744b7fed060d785ac756791450959b2248443319d5b119" "checksum url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afe9ec54bc4db14bc8744b7fed060d785ac756791450959b2248443319d5b119"
"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" "checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
"checksum uuid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9767696a9e1bc7a73f2d5f8e0f5428b076cecd9199c200c0364aa0b2d57b8dfa"
"checksum vecio 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0795a11576d29ae80525a3fda315bf7b534f8feb9d34101e5fe63fb95bb2fd24" "checksum vecio 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0795a11576d29ae80525a3fda315bf7b534f8feb9d34101e5fe63fb95bb2fd24"
"checksum vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "56b639f935488eb40f06d17c3e3bcc3054f6f75d264e187b1107c8d1cba8d31c" "checksum vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "56b639f935488eb40f06d17c3e3bcc3054f6f75d264e187b1107c8d1cba8d31c"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"

View File

@ -8,7 +8,6 @@ build = "build.rs"
[build-dependencies] [build-dependencies]
rustc_version = "0.1" rustc_version = "0.1"
syntex = "*"
ethcore-ipc-codegen = { path = "ipc/codegen" } ethcore-ipc-codegen = { path = "ipc/codegen" }
ethcore-ipc-tests = { path = "ipc/tests" } ethcore-ipc-tests = { path = "ipc/tests" }
@ -41,7 +40,7 @@ ethcore-ipc-hypervisor = { path = "ipc/hypervisor" }
ethcore-logger = { path = "logger" } ethcore-logger = { path = "logger" }
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" } json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
ethcore-dapps = { path = "dapps", optional = true } ethcore-dapps = { path = "dapps", optional = true }
clippy = { version = "0.0.82", optional = true} clippy = { version = "0.0.85", optional = true}
ethcore-stratum = { path = "stratum" } ethcore-stratum = { path = "stratum" }
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
@ -51,7 +50,7 @@ winapi = "0.2"
daemonize = "0.2" daemonize = "0.2"
[dependencies.hyper] [dependencies.hyper]
version = "0.8" version = "0.9"
default-features = false default-features = false
[features] [features]
@ -62,6 +61,7 @@ dapps = ["ethcore-dapps"]
ipc = ["ethcore/ipc"] ipc = ["ethcore/ipc"]
dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"] dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"]
json-tests = ["ethcore/json-tests"] json-tests = ["ethcore/json-tests"]
stratum = ["ipc"]
[[bin]] [[bin]]
path = "parity/main.rs" path = "parity/main.rs"

View File

@ -21,6 +21,7 @@ serde = "0.7.0"
serde_json = "0.7.0" serde_json = "0.7.0"
serde_macros = { version = "0.7.0", optional = true } serde_macros = { version = "0.7.0", optional = true }
zip = { version = "0.1", default-features = false } zip = { version = "0.1", default-features = false }
ethabi = "0.2.1"
ethcore-rpc = { path = "../rpc" } ethcore-rpc = { path = "../rpc" }
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
parity-dapps = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" } parity-dapps = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" }
@ -29,11 +30,10 @@ parity-dapps-status = { git = "https://github.com/ethcore/parity-ui.git", versio
parity-dapps-home = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" } parity-dapps-home = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" }
parity-dapps-wallet = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4", optional = true } parity-dapps-wallet = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4", optional = true }
mime_guess = { version = "1.6.1" } mime_guess = { version = "1.6.1" }
clippy = { version = "0.0.82", optional = true} clippy = { version = "0.0.85", optional = true}
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.7.0", optional = true } serde_codegen = { version = "0.7.0", optional = true }
syntex = "*"
[features] [features]
default = ["serde_codegen", "extra-dapps"] default = ["serde_codegen", "extra-dapps"]

View File

@ -16,7 +16,6 @@
#[cfg(not(feature = "serde_macros"))] #[cfg(not(feature = "serde_macros"))]
mod inner { mod inner {
extern crate syntex;
extern crate serde_codegen; extern crate serde_codegen;
use std::env; use std::env;
@ -28,10 +27,7 @@ mod inner {
let src = Path::new("./src/api/types.rs.in"); let src = Path::new("./src/api/types.rs.in");
let dst = Path::new(&out_dir).join("types.rs"); let dst = Path::new(&out_dir).join("types.rs");
let mut registry = syntex::Registry::new(); serde_codegen::expand(&src, &dst).unwrap();
serde_codegen::register(&mut registry);
registry.expand("", &src, &dst).unwrap();
} }
} }

View File

@ -24,12 +24,14 @@ use std::io::{self, Read, Write};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashMap; use std::collections::HashMap;
use rustc_serialize::hex::FromHex;
use hyper::Control; use hyper::Control;
use hyper::status::StatusCode; use hyper::status::StatusCode;
use random_filename; use random_filename;
use util::Mutex; use util::{Mutex, H256};
use util::sha3::sha3;
use page::LocalPageEndpoint; use page::LocalPageEndpoint;
use handlers::{ContentHandler, AppFetcherHandler, DappHandler}; use handlers::{ContentHandler, AppFetcherHandler, DappHandler};
use endpoint::{Endpoint, EndpointPath, Handler}; use endpoint::{Endpoint, EndpointPath, Handler};
@ -54,12 +56,6 @@ impl<R: URLHint> Drop for AppFetcher<R> {
} }
} }
impl Default for AppFetcher<URLHintContract> {
fn default() -> Self {
AppFetcher::new(URLHintContract)
}
}
impl<R: URLHint> AppFetcher<R> { impl<R: URLHint> AppFetcher<R> {
pub fn new(resolver: R) -> Self { pub fn new(resolver: R) -> Self {
@ -84,7 +80,10 @@ impl<R: URLHint> AppFetcher<R> {
// Check if we already have the app // Check if we already have the app
Some(_) => true, Some(_) => true,
// fallback to resolver // fallback to resolver
None => self.resolver.resolve(app_id).is_some(), None => match app_id.from_hex() {
Ok(app_id) => self.resolver.resolve(app_id).is_some(),
_ => false,
},
} }
} }
@ -103,16 +102,22 @@ impl<R: URLHint> AppFetcher<R> {
Some(&AppStatus::Fetching) => { Some(&AppStatus::Fetching) => {
(None, Box::new(ContentHandler::html( (None, Box::new(ContentHandler::html(
StatusCode::ServiceUnavailable, StatusCode::ServiceUnavailable,
"<h1>This dapp is already being downloaded.</h1>".into() format!(
"<html><head>{}</head><body>{}</body></html>",
"<meta http-equiv=\"refresh\" content=\"1\">",
"<h1>This dapp is already being downloaded.</h1><h2>Please wait...</h2>",
)
)) as Box<Handler>) )) as Box<Handler>)
}, },
// We need to start fetching app // We need to start fetching app
None => { None => {
// TODO [todr] Keep only last N dapps available! // TODO [todr] Keep only last N dapps available!
let app = self.resolver.resolve(&app_id).expect("to_handler is called only when `contains` returns true."); let app_hex = app_id.from_hex().expect("to_handler is called only when `contains` returns true.");
let app = self.resolver.resolve(app_hex).expect("to_handler is called only when `contains` returns true.");
(Some(AppStatus::Fetching), Box::new(AppFetcherHandler::new( (Some(AppStatus::Fetching), Box::new(AppFetcherHandler::new(
app, app,
control, control,
path.using_dapps_domains,
DappInstaller { DappInstaller {
dapp_id: app_id.clone(), dapp_id: app_id.clone(),
dapps_path: self.dapps_path.clone(), dapps_path: self.dapps_path.clone(),
@ -133,10 +138,12 @@ impl<R: URLHint> AppFetcher<R> {
#[derive(Debug)] #[derive(Debug)]
pub enum ValidationError { pub enum ValidationError {
ManifestNotFound,
ManifestSerialization(String),
Io(io::Error), Io(io::Error),
Zip(zip::result::ZipError), Zip(zip::result::ZipError),
InvalidDappId,
ManifestNotFound,
ManifestSerialization(String),
HashMismatch { expected: H256, got: H256, },
} }
impl From<io::Error> for ValidationError { impl From<io::Error> for ValidationError {
@ -194,8 +201,15 @@ impl DappHandler for DappInstaller {
fn validate_and_install(&self, app_path: PathBuf) -> Result<Manifest, ValidationError> { fn validate_and_install(&self, app_path: PathBuf) -> Result<Manifest, ValidationError> {
trace!(target: "dapps", "Opening dapp bundle at {:?}", app_path); trace!(target: "dapps", "Opening dapp bundle at {:?}", app_path);
// TODO [ToDr] Validate file hash let mut file = try!(fs::File::open(app_path));
let file = try!(fs::File::open(app_path)); let hash = try!(sha3(&mut file));
let dapp_id = try!(self.dapp_id.as_str().parse().map_err(|_| ValidationError::InvalidDappId));
if dapp_id != hash {
return Err(ValidationError::HashMismatch {
expected: dapp_id,
got: hash,
});
}
// Unpack archive // Unpack archive
let mut zip = try!(zip::ZipArchive::new(file)); let mut zip = try!(zip::ZipArchive::new(file));
// First find manifest file // First find manifest file
@ -265,10 +279,11 @@ mod tests {
use apps::urlhint::{GithubApp, URLHint}; use apps::urlhint::{GithubApp, URLHint};
use endpoint::EndpointInfo; use endpoint::EndpointInfo;
use page::LocalPageEndpoint; use page::LocalPageEndpoint;
use util::Bytes;
struct FakeResolver; struct FakeResolver;
impl URLHint for FakeResolver { impl URLHint for FakeResolver {
fn resolve(&self, _app_id: &str) -> Option<GithubApp> { fn resolve(&self, _app_id: Bytes) -> Option<GithubApp> {
None None
} }
} }

View File

@ -33,7 +33,14 @@ pub const API_PATH : &'static str = "api";
pub const UTILS_PATH : &'static str = "parity-utils"; pub const UTILS_PATH : &'static str = "parity-utils";
pub fn main_page() -> &'static str { pub fn main_page() -> &'static str {
"/home/" "home"
}
pub fn redirection_address(using_dapps_domains: bool, app_id: &str) -> String {
if using_dapps_domains {
format!("http://{}{}/", app_id, DAPPS_DOMAIN)
} else {
format!("/{}/", app_id)
}
} }
pub fn utils() -> Box<Endpoint> { pub fn utils() -> Box<Endpoint> {

View File

@ -0,0 +1,21 @@
[
{"constant":false,"inputs":[{"name":"_new","type":"address"}],"name":"setOwner","outputs":[],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"string"}],"name":"confirmReverse","outputs":[{"name":"success","type":"bool"}],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"}],"name":"reserve","outputs":[{"name":"success","type":"bool"}],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"},{"name":"_value","type":"bytes32"}],"name":"set","outputs":[{"name":"success","type":"bool"}],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"}],"name":"drop","outputs":[{"name":"success","type":"bool"}],"type":"function"},
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"}],"name":"getAddress","outputs":[{"name":"","type":"address"}],"type":"function"},
{"constant":false,"inputs":[{"name":"_amount","type":"uint256"}],"name":"setFee","outputs":[],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_to","type":"address"}],"name":"transfer","outputs":[{"name":"success","type":"bool"}],"type":"function"},
{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"type":"function"},
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"}],"name":"reserved","outputs":[{"name":"reserved","type":"bool"}],"type":"function"},
{"constant":false,"inputs":[],"name":"drain","outputs":[],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"string"},{"name":"_who","type":"address"}],"name":"proposeReverse","outputs":[{"name":"success","type":"bool"}],"type":"function"},
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"}],"name":"getUint","outputs":[{"name":"","type":"uint256"}],"type":"function"},
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"}],"name":"get","outputs":[{"name":"","type":"bytes32"}],"type":"function"},
{"constant":true,"inputs":[],"name":"fee","outputs":[{"name":"","type":"uint256"}],"type":"function"},
{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"reverse","outputs":[{"name":"","type":"string"}],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"},{"name":"_value","type":"uint256"}],"name":"setUint","outputs":[{"name":"success","type":"bool"}],"type":"function"},
{"constant":false,"inputs":[],"name":"removeReverse","outputs":[],"type":"function"},
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"},{"name":"_value","type":"address"}],"name":"setAddress","outputs":[{"name":"success","type":"bool"}],"type":"function"},{"anonymous":false,"inputs":[{"indexed":false,"name":"amount","type":"uint256"}],"name":"Drained","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"amount","type":"uint256"}],"name":"FeeChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"owner","type":"address"}],"name":"Reserved","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"oldOwner","type":"address"},{"indexed":true,"name":"newOwner","type":"address"}],"name":"Transferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"owner","type":"address"}],"name":"Dropped","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"owner","type":"address"},{"indexed":true,"name":"key","type":"string"}],"name":"DataChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":true,"name":"reverse","type":"address"}],"name":"ReverseProposed","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":true,"name":"reverse","type":"address"}],"name":"ReverseConfirmed","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":true,"name":"reverse","type":"address"}],"name":"ReverseRemoved","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"old","type":"address"},{"indexed":true,"name":"current","type":"address"}],"name":"NewOwner","type":"event"}
]

View File

@ -0,0 +1,6 @@
[
{"constant":false,"inputs":[{"name":"_content","type":"bytes32"},{"name":"_url","type":"string"}],"name":"hintURL","outputs":[],"type":"function"},
{"constant":false,"inputs":[{"name":"_content","type":"bytes32"},{"name":"_accountSlashRepo","type":"string"},{"name":"_commit","type":"bytes20"}],"name":"hint","outputs":[],"type":"function"},
{"constant":true,"inputs":[{"name":"","type":"bytes32"}],"name":"entries","outputs":[{"name":"accountSlashRepo","type":"string"},{"name":"commit","type":"bytes20"},{"name":"owner","type":"address"}],"type":"function"},
{"constant":false,"inputs":[{"name":"_content","type":"bytes32"}],"name":"unhint","outputs":[],"type":"function"}
]

View File

@ -14,13 +14,16 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::fmt;
use std::sync::Arc;
use rustc_serialize::hex::ToHex; use rustc_serialize::hex::ToHex;
use util::{Address, FromHex}; use ethabi::{Interface, Contract, Token};
use util::{Address, Bytes, Hashable};
const COMMIT_LEN: usize = 20; const COMMIT_LEN: usize = 20;
#[derive(Debug)] #[derive(Debug, PartialEq)]
pub struct GithubApp { pub struct GithubApp {
pub account: String, pub account: String,
pub repo: String, pub repo: String,
@ -48,42 +51,240 @@ impl GithubApp {
} }
} }
pub trait URLHint { /// RAW Contract interface.
fn resolve(&self, app_id: &str) -> Option<GithubApp>; /// Should execute transaction using current blockchain state.
pub trait ContractClient: Send + Sync {
/// Get registrar address
fn registrar(&self) -> Result<Address, String>;
/// Call Contract
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String>;
} }
pub struct URLHintContract; /// URLHint Contract interface
pub trait URLHint {
/// Resolves given id to registrar entry.
fn resolve(&self, app_id: Bytes) -> Option<GithubApp>;
}
impl URLHint for URLHintContract { pub struct URLHintContract {
fn resolve(&self, app_id: &str) -> Option<GithubApp> { urlhint: Contract,
// TODO [todr] use GithubHint contract to check the details registrar: Contract,
// For now we are just accepting patterns: <commithash>.<repo>.<account>.parity client: Arc<ContractClient>,
let mut app_parts = app_id.split('.'); }
let hash = app_parts.next() impl URLHintContract {
.and_then(|h| h.from_hex().ok()) pub fn new(client: Arc<ContractClient>) -> Self {
.and_then(|h| GithubApp::commit(&h)); let urlhint = Interface::load(include_bytes!("./urlhint.json")).expect("urlhint.json is valid ABI");
let repo = app_parts.next(); let registrar = Interface::load(include_bytes!("./registrar.json")).expect("registrar.json is valid ABI");
let account = app_parts.next();
match (hash, repo, account) { URLHintContract {
(Some(hash), Some(repo), Some(account)) => { urlhint: Contract::new(urlhint),
Some(GithubApp { registrar: Contract::new(registrar),
account: account.into(), client: client,
repo: repo.into(), }
commit: hash, }
owner: Address::default(),
fn urlhint_address(&self) -> Option<Address> {
let res = || {
let get_address = try!(self.registrar.function("getAddress".into()).map_err(as_string));
let params = try!(get_address.encode_call(
vec![Token::FixedBytes((*"githubhint".sha3()).to_vec()), Token::String("A".into())]
).map_err(as_string));
let output = try!(self.client.call(try!(self.client.registrar()), params));
let result = try!(get_address.decode_output(output).map_err(as_string));
match result.get(0) {
Some(&Token::Address(address)) if address != *Address::default() => Ok(address.into()),
Some(&Token::Address(_)) => Err(format!("Contract not found.")),
e => Err(format!("Invalid result: {:?}", e)),
}
};
match res() {
Ok(res) => Some(res),
Err(e) => {
warn!(target: "dapps", "Error while calling registrar: {:?}", e);
None
}
}
}
fn encode_urlhint_call(&self, app_id: Bytes) -> Option<Bytes> {
let call = self.urlhint
.function("entries".into())
.and_then(|f| f.encode_call(vec![Token::FixedBytes(app_id)]));
match call {
Ok(res) => {
Some(res)
},
Err(e) => {
warn!(target: "dapps", "Error while encoding urlhint call: {:?}", e);
None
}
}
}
fn decode_urlhint_output(&self, output: Bytes) -> Option<GithubApp> {
trace!(target: "dapps", "Output: {:?}", output.to_hex());
let output = self.urlhint
.function("entries".into())
.and_then(|f| f.decode_output(output));
if let Ok(vec) = output {
if vec.len() != 3 {
warn!(target: "dapps", "Invalid contract output: {:?}", vec);
return None;
}
let mut it = vec.into_iter();
let account_slash_repo = it.next().unwrap();
let commit = it.next().unwrap();
let owner = it.next().unwrap();
match (account_slash_repo, commit, owner) {
(Token::String(account_slash_repo), Token::FixedBytes(commit), Token::Address(owner)) => {
let owner = owner.into();
if owner == Address::default() {
return None;
}
let (account, repo) = {
let mut it = account_slash_repo.split('/');
match (it.next(), it.next()) {
(Some(account), Some(repo)) => (account.into(), repo.into()),
_ => return None,
}
};
GithubApp::commit(&commit).map(|commit| GithubApp {
account: account,
repo: repo,
commit: commit,
owner: owner,
}) })
}, },
_ => None, e => {
warn!(target: "dapps", "Invalid contract output parameters: {:?}", e);
None
},
}
} else {
warn!(target: "dapps", "Invalid contract output: {:?}", output);
None
} }
} }
} }
impl URLHint for URLHintContract {
fn resolve(&self, app_id: Bytes) -> Option<GithubApp> {
self.urlhint_address().and_then(|address| {
// Prepare contract call
self.encode_urlhint_call(app_id)
.and_then(|data| {
let call = self.client.call(address, data);
if let Err(ref e) = call {
warn!(target: "dapps", "Error while calling urlhint: {:?}", e);
}
call.ok()
})
.and_then(|output| self.decode_urlhint_output(output))
})
}
}
fn as_string<T: fmt::Debug>(e: T) -> String {
format!("{:?}", e)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::GithubApp; use std::sync::Arc;
use util::Address; use std::str::FromStr;
use rustc_serialize::hex::{ToHex, FromHex};
use super::*;
use util::{Bytes, Address, Mutex, ToPretty};
struct FakeRegistrar {
pub calls: Arc<Mutex<Vec<(String, String)>>>,
pub responses: Mutex<Vec<Result<Bytes, String>>>,
}
const REGISTRAR: &'static str = "8e4e9b13d4b45cb0befc93c3061b1408f67316b2";
const URLHINT: &'static str = "deadbeefcafe0000000000000000000000000000";
impl FakeRegistrar {
fn new() -> Self {
FakeRegistrar {
calls: Arc::new(Mutex::new(Vec::new())),
responses: Mutex::new(
vec![
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
Ok(Vec::new())
]
),
}
}
}
impl ContractClient for FakeRegistrar {
fn registrar(&self) -> Result<Address, String> {
Ok(REGISTRAR.parse().unwrap())
}
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String> {
self.calls.lock().push((address.to_hex(), data.to_hex()));
self.responses.lock().remove(0)
}
}
#[test]
fn should_call_registrar_and_urlhint_contracts() {
// given
let registrar = FakeRegistrar::new();
let calls = registrar.calls.clone();
let urlhint = URLHintContract::new(Arc::new(registrar));
// when
let res = urlhint.resolve("test".bytes().collect());
let calls = calls.lock();
let call0 = calls.get(0).expect("Registrar resolve called");
let call1 = calls.get(1).expect("URLHint Resolve called");
// then
assert!(res.is_none());
assert_eq!(call0.0, REGISTRAR);
assert_eq!(call0.1,
"6795dbcd058740ee9a5a3fb9f1cfa10752baec87e09cc45cd7027fd54708271aca300c75000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000014100000000000000000000000000000000000000000000000000000000000000".to_owned()
);
assert_eq!(call1.0, URLHINT);
assert_eq!(call1.1,
"267b69227465737400000000000000000000000000000000000000000000000000000000".to_owned()
);
}
#[test]
fn should_decode_urlhint_output() {
// given
let mut registrar = FakeRegistrar::new();
registrar.responses = Mutex::new(vec![
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
Ok("0000000000000000000000000000000000000000000000000000000000000060ec4c1fe06c808fe3739858c347109b1f5f1ed4b5000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff0000000000000000000000000000000000000000000000000000000000000011657468636f72652f64616f2e636c61696d000000000000000000000000000000".from_hex().unwrap()),
]);
let urlhint = URLHintContract::new(Arc::new(registrar));
// when
let res = urlhint.resolve("test".bytes().collect());
// then
assert_eq!(res, Some(GithubApp {
account: "ethcore".into(),
repo: "dao.claim".into(),
commit: GithubApp::commit(&"ec4c1fe06c808fe3739858c347109b1f5f1ed4b5".from_hex().unwrap()).unwrap(),
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
}))
}
#[test] #[test]
fn should_return_valid_url() { fn should_return_valid_url() {

View File

@ -24,6 +24,7 @@ pub struct EndpointPath {
pub app_id: String, pub app_id: String,
pub host: String, pub host: String,
pub port: u16, pub port: u16,
pub using_dapps_domains: bool,
} }
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]

View File

@ -27,7 +27,7 @@ use hyper::status::StatusCode;
use handlers::ContentHandler; use handlers::ContentHandler;
use handlers::client::{Fetch, FetchResult}; use handlers::client::{Fetch, FetchResult};
use apps::DAPPS_DOMAIN; use apps::redirection_address;
use apps::urlhint::GithubApp; use apps::urlhint::GithubApp;
use apps::manifest::Manifest; use apps::manifest::Manifest;
@ -54,6 +54,7 @@ pub struct AppFetcherHandler<H: DappHandler> {
control: Option<Control>, control: Option<Control>,
status: FetchState, status: FetchState,
client: Option<Client<Fetch>>, client: Option<Client<Fetch>>,
using_dapps_domains: bool,
dapp: H, dapp: H,
} }
@ -72,6 +73,7 @@ impl<H: DappHandler> AppFetcherHandler<H> {
pub fn new( pub fn new(
app: GithubApp, app: GithubApp,
control: Control, control: Control,
using_dapps_domains: bool,
handler: H) -> Self { handler: H) -> Self {
let client = Client::new().expect("Failed to create a Client"); let client = Client::new().expect("Failed to create a Client");
@ -79,6 +81,7 @@ impl<H: DappHandler> AppFetcherHandler<H> {
control: Some(control), control: Some(control),
client: Some(client), client: Some(client),
status: FetchState::NotStarted(app), status: FetchState::NotStarted(app),
using_dapps_domains: using_dapps_domains,
dapp: handler, dapp: handler,
} }
} }
@ -207,8 +210,7 @@ impl<H: DappHandler> server::Handler<HttpStream> for AppFetcherHandler<H> {
FetchState::Done(ref manifest) => { FetchState::Done(ref manifest) => {
trace!(target: "dapps", "Fetching dapp finished. Redirecting to {}", manifest.id); trace!(target: "dapps", "Fetching dapp finished. Redirecting to {}", manifest.id);
res.set_status(StatusCode::Found); res.set_status(StatusCode::Found);
// TODO [todr] should detect if its using nice-urls res.headers_mut().set(header::Location(redirection_address(self.using_dapps_domains, &manifest.id)));
res.headers_mut().set(header::Location(format!("http://{}{}", manifest.id, DAPPS_DOMAIN)));
Next::write() Next::write()
}, },
FetchState::Error(ref mut handler) => handler.on_response(res), FetchState::Error(ref mut handler) => handler.on_response(res),

View File

@ -21,13 +21,13 @@ use hyper::net::HttpStream;
use hyper::status::StatusCode; use hyper::status::StatusCode;
pub struct Redirection { pub struct Redirection {
to_url: &'static str to_url: String
} }
impl Redirection { impl Redirection {
pub fn new(url: &'static str) -> Box<Self> { pub fn new(url: &str) -> Box<Self> {
Box::new(Redirection { Box::new(Redirection {
to_url: url to_url: url.to_owned()
}) })
} }
} }

View File

@ -52,13 +52,14 @@ extern crate serde;
extern crate serde_json; extern crate serde_json;
extern crate zip; extern crate zip;
extern crate rand; extern crate rand;
extern crate ethabi;
extern crate jsonrpc_core; extern crate jsonrpc_core;
extern crate jsonrpc_http_server; extern crate jsonrpc_http_server;
extern crate mime_guess;
extern crate rustc_serialize;
extern crate parity_dapps; extern crate parity_dapps;
extern crate ethcore_rpc; extern crate ethcore_rpc;
extern crate ethcore_util as util; extern crate ethcore_util as util;
extern crate mime_guess;
extern crate rustc_serialize;
mod endpoint; mod endpoint;
mod apps; mod apps;
@ -70,6 +71,8 @@ mod api;
mod proxypac; mod proxypac;
mod url; mod url;
pub use self::apps::urlhint::ContractClient;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::net::SocketAddr; use std::net::SocketAddr;
use std::collections::HashMap; use std::collections::HashMap;
@ -84,6 +87,7 @@ static DAPPS_DOMAIN : &'static str = ".parity";
pub struct ServerBuilder { pub struct ServerBuilder {
dapps_path: String, dapps_path: String,
handler: Arc<IoHandler>, handler: Arc<IoHandler>,
registrar: Arc<ContractClient>,
} }
impl Extendable for ServerBuilder { impl Extendable for ServerBuilder {
@ -94,23 +98,24 @@ impl Extendable for ServerBuilder {
impl ServerBuilder { impl ServerBuilder {
/// Construct new dapps server /// Construct new dapps server
pub fn new(dapps_path: String) -> Self { pub fn new(dapps_path: String, registrar: Arc<ContractClient>) -> Self {
ServerBuilder { ServerBuilder {
dapps_path: dapps_path, dapps_path: dapps_path,
handler: Arc::new(IoHandler::new()) handler: Arc::new(IoHandler::new()),
registrar: registrar,
} }
} }
/// Asynchronously start server with no authentication, /// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error. /// returns result with `Server` handle on success or an error.
pub fn start_unsecure_http(&self, addr: &SocketAddr) -> Result<Server, ServerError> { pub fn start_unsecure_http(&self, addr: &SocketAddr) -> Result<Server, ServerError> {
Server::start_http(addr, NoAuth, self.handler.clone(), self.dapps_path.clone()) Server::start_http(addr, NoAuth, self.handler.clone(), self.dapps_path.clone(), self.registrar.clone())
} }
/// Asynchronously start server with `HTTP Basic Authentication`, /// Asynchronously start server with `HTTP Basic Authentication`,
/// return result with `Server` handle on success or an error. /// return result with `Server` handle on success or an error.
pub fn start_basic_auth_http(&self, addr: &SocketAddr, username: &str, password: &str) -> Result<Server, ServerError> { pub fn start_basic_auth_http(&self, addr: &SocketAddr, username: &str, password: &str) -> Result<Server, ServerError> {
Server::start_http(addr, HttpBasicAuth::single_user(username, password), self.handler.clone(), self.dapps_path.clone()) Server::start_http(addr, HttpBasicAuth::single_user(username, password), self.handler.clone(), self.dapps_path.clone(), self.registrar.clone())
} }
} }
@ -121,10 +126,16 @@ pub struct Server {
} }
impl Server { impl Server {
fn start_http<A: Authorization + 'static>(addr: &SocketAddr, authorization: A, handler: Arc<IoHandler>, dapps_path: String) -> Result<Server, ServerError> { fn start_http<A: Authorization + 'static>(
addr: &SocketAddr,
authorization: A,
handler: Arc<IoHandler>,
dapps_path: String,
registrar: Arc<ContractClient>,
) -> Result<Server, ServerError> {
let panic_handler = Arc::new(Mutex::new(None)); let panic_handler = Arc::new(Mutex::new(None));
let authorization = Arc::new(authorization); let authorization = Arc::new(authorization);
let apps_fetcher = Arc::new(apps::fetcher::AppFetcher::default()); let apps_fetcher = Arc::new(apps::fetcher::AppFetcher::new(apps::urlhint::URLHintContract::new(registrar)));
let endpoints = Arc::new(apps::all_endpoints(dapps_path)); let endpoints = Arc::new(apps::all_endpoints(dapps_path));
let special = Arc::new({ let special = Arc::new({
let mut special = HashMap::new(); let mut special = HashMap::new();

View File

@ -187,7 +187,8 @@ fn should_extract_path_with_appid() {
path: EndpointPath { path: EndpointPath {
app_id: "app".to_owned(), app_id: "app".to_owned(),
host: "".to_owned(), host: "".to_owned(),
port: 8080 port: 8080,
using_dapps_domains: true,
}, },
file: None, file: None,
safe_to_embed: true, safe_to_embed: true,

View File

@ -86,9 +86,10 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
let control = self.control.take().expect("on_request is called only once, thus control is always defined."); let control = self.control.take().expect("on_request is called only once, thus control is always defined.");
self.fetch.to_handler(path.clone(), control) self.fetch.to_handler(path.clone(), control)
}, },
// Redirection to main page // Redirection to main page (maybe 404 instead?)
_ if *req.method() == hyper::method::Method::Get => { (Some(ref path), _) if *req.method() == hyper::method::Method::Get => {
Redirection::new(self.main_page) let address = apps::redirection_address(path.using_dapps_domains, self.main_page);
Redirection::new(address.as_str())
}, },
// RPC by default // RPC by default
_ => { _ => {
@ -165,6 +166,7 @@ fn extract_endpoint(url: &Option<Url>) -> (Option<EndpointPath>, SpecialEndpoint
app_id: id, app_id: id,
host: domain.clone(), host: domain.clone(),
port: url.port, port: url.port,
using_dapps_domains: true,
}), special_endpoint(url)) }), special_endpoint(url))
}, },
_ if url.path.len() > 1 => { _ if url.path.len() > 1 => {
@ -173,6 +175,7 @@ fn extract_endpoint(url: &Option<Url>) -> (Option<EndpointPath>, SpecialEndpoint
app_id: id.clone(), app_id: id.clone(),
host: format!("{}", url.host), host: format!("{}", url.host),
port: url.port, port: url.port,
using_dapps_domains: false,
}), special_endpoint(url)) }), special_endpoint(url))
}, },
_ => (None, special_endpoint(url)), _ => (None, special_endpoint(url)),
@ -192,6 +195,7 @@ fn should_extract_endpoint() {
app_id: "status".to_owned(), app_id: "status".to_owned(),
host: "localhost".to_owned(), host: "localhost".to_owned(),
port: 8080, port: 8080,
using_dapps_domains: false,
}), SpecialEndpoint::None) }), SpecialEndpoint::None)
); );
@ -202,6 +206,7 @@ fn should_extract_endpoint() {
app_id: "rpc".to_owned(), app_id: "rpc".to_owned(),
host: "localhost".to_owned(), host: "localhost".to_owned(),
port: 8080, port: 8080,
using_dapps_domains: false,
}), SpecialEndpoint::Rpc) }), SpecialEndpoint::Rpc)
); );
@ -211,6 +216,7 @@ fn should_extract_endpoint() {
app_id: "my.status".to_owned(), app_id: "my.status".to_owned(),
host: "my.status.parity".to_owned(), host: "my.status.parity".to_owned(),
port: 80, port: 80,
using_dapps_domains: true,
}), SpecialEndpoint::Utils) }), SpecialEndpoint::Utils)
); );
@ -221,6 +227,7 @@ fn should_extract_endpoint() {
app_id: "my.status".to_owned(), app_id: "my.status".to_owned(),
host: "my.status.parity".to_owned(), host: "my.status.parity".to_owned(),
port: 80, port: 80,
using_dapps_domains: true,
}), SpecialEndpoint::None) }), SpecialEndpoint::None)
); );
@ -231,6 +238,7 @@ fn should_extract_endpoint() {
app_id: "my.status".to_owned(), app_id: "my.status".to_owned(),
host: "my.status.parity".to_owned(), host: "my.status.parity".to_owned(),
port: 80, port: 80,
using_dapps_domains: true,
}), SpecialEndpoint::Rpc) }), SpecialEndpoint::Rpc)
); );
@ -241,6 +249,7 @@ fn should_extract_endpoint() {
app_id: "my.status".to_owned(), app_id: "my.status".to_owned(),
host: "my.status.parity".to_owned(), host: "my.status.parity".to_owned(),
port: 80, port: 80,
using_dapps_domains: true,
}), SpecialEndpoint::Api) }), SpecialEndpoint::Api)
); );
} }

View File

@ -8,11 +8,10 @@ authors = ["Ethcore <admin@ethcore.io>"]
build = "build.rs" build = "build.rs"
[build-dependencies] [build-dependencies]
syntex = "*"
ethcore-ipc-codegen = { path = "../ipc/codegen" } ethcore-ipc-codegen = { path = "../ipc/codegen" }
[dependencies] [dependencies]
clippy = { version = "0.0.82", optional = true} clippy = { version = "0.0.85", optional = true}
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
ethcore-ipc = { path = "../ipc/rpc" } ethcore-ipc = { path = "../ipc/rpc" }
rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" } rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }

View File

@ -14,7 +14,6 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
extern crate syntex;
extern crate ethcore_ipc_codegen as codegen; extern crate ethcore_ipc_codegen as codegen;
use std::env; use std::env;
@ -27,17 +26,13 @@ pub fn main() {
{ {
let src = Path::new("src/lib.rs.in"); let src = Path::new("src/lib.rs.in");
let dst = Path::new(&out_dir).join("lib.intermediate.rs.in"); let dst = Path::new(&out_dir).join("lib.intermediate.rs.in");
let mut registry = syntex::Registry::new(); codegen::expand(&src, &dst);
codegen::register(&mut registry);
registry.expand("", &src, &dst).unwrap();
} }
// binary serialization pass // binary serialization pass
{ {
let src = Path::new(&out_dir).join("lib.intermediate.rs.in"); let src = Path::new(&out_dir).join("lib.intermediate.rs.in");
let dst = Path::new(&out_dir).join("lib.rs"); let dst = Path::new(&out_dir).join("lib.rs");
let mut registry = syntex::Registry::new(); codegen::expand(&src, &dst);
codegen::register(&mut registry);
registry.expand("", &src, &dst).unwrap();
} }
} }

View File

@ -67,13 +67,21 @@ impl RandomTempPath {
} }
} }
impl AsRef<Path> for RandomTempPath {
fn as_ref(&self) -> &Path {
self.as_path()
}
}
impl Drop for RandomTempPath { impl Drop for RandomTempPath {
fn drop(&mut self) { fn drop(&mut self) {
if let Err(e) = fs::remove_dir_all(self.as_path()) { if let Err(_) = fs::remove_dir_all(&self) {
if let Err(e) = fs::remove_file(&self) {
panic!("Failed to remove temp directory. Here's what prevented this from happening: ({})", e); panic!("Failed to remove temp directory. Here's what prevented this from happening: ({})", e);
} }
} }
} }
}
pub struct GuardedTempResult<T> { pub struct GuardedTempResult<T> {
pub result: Option<T>, pub result: Option<T>,

View File

@ -25,7 +25,7 @@ semver = "0.2"
bit-set = "0.4" bit-set = "0.4"
time = "0.1" time = "0.1"
evmjit = { path = "../evmjit", optional = true } evmjit = { path = "../evmjit", optional = true }
clippy = { version = "0.0.82", optional = true} clippy = { version = "0.0.85", optional = true}
ethash = { path = "../ethash" } ethash = { path = "../ethash" }
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
ethcore-io = { path = "../util/io" } ethcore-io = { path = "../util/io" }
@ -33,6 +33,7 @@ ethcore-devtools = { path = "../devtools" }
ethjson = { path = "../json" } ethjson = { path = "../json" }
ethcore-ipc = { path = "../ipc/rpc" } ethcore-ipc = { path = "../ipc/rpc" }
ethstore = { path = "../ethstore" } ethstore = { path = "../ethstore" }
ethkey = { path = "../ethkey" }
ethcore-ipc-nano = { path = "../ipc/nano" } ethcore-ipc-nano = { path = "../ipc/nano" }
rand = "0.3" rand = "0.3"

View File

@ -35,6 +35,38 @@ fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
dst dst
} }
/// A factory for different kinds of account dbs.
#[derive(Debug, Clone)]
pub enum Factory {
/// Mangle hashes based on address.
Mangled,
/// Don't mangle hashes.
Plain,
}
impl Default for Factory {
fn default() -> Self { Factory::Mangled }
}
impl Factory {
/// Create a read-only accountdb.
/// This will panic when write operations are called.
pub fn readonly<'db>(&self, db: &'db HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
Factory::Plain => Box::new(Wrapping(db)),
}
}
/// Create a new mutable hashdb.
pub fn create<'db>(&self, db: &'db mut HashDB, address_hash: H256) -> Box<HashDB + 'db> {
match *self {
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
Factory::Plain => Box::new(WrappingMut(db)),
}
}
}
// TODO: introduce HashDBMut? // TODO: introduce HashDBMut?
/// DB backend wrapper for Account trie /// DB backend wrapper for Account trie
/// Transforms trie node keys for the database /// Transforms trie node keys for the database
@ -162,4 +194,79 @@ impl<'db> HashDB for AccountDBMut<'db>{
} }
} }
struct Wrapping<'db>(&'db HashDB);
impl<'db> HashDB for Wrapping<'db> {
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<&[u8]> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: Bytes) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
struct WrappingMut<'db>(&'db mut HashDB);
impl<'db> HashDB for WrappingMut<'db>{
fn keys(&self) -> HashMap<H256, i32> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<&[u8]> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
}
self.0.get(key)
}
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.0.contains(key)
}
fn insert(&mut self, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return SHA3_NULL_RLP.clone();
}
self.0.insert(value)
}
fn emplace(&mut self, key: H256, value: Bytes) {
if key == SHA3_NULL_RLP {
return;
}
self.0.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
self.0.remove(key)
}
}

View File

@ -21,7 +21,7 @@ use engines::Engine;
use state::*; use state::*;
use verification::PreverifiedBlock; use verification::PreverifiedBlock;
use trace::FlatTrace; use trace::FlatTrace;
use evm::Factory as EvmFactory; use factory::Factories;
/// A block, encoded as it is on the block chain. /// A block, encoded as it is on the block chain.
#[derive(Default, Debug, Clone, PartialEq)] #[derive(Default, Debug, Clone, PartialEq)]
@ -192,7 +192,6 @@ impl IsBlock for ExecutedBlock {
pub struct OpenBlock<'x> { pub struct OpenBlock<'x> {
block: ExecutedBlock, block: ExecutedBlock,
engine: &'x Engine, engine: &'x Engine,
vm_factory: &'x EvmFactory,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
} }
@ -230,8 +229,7 @@ impl<'x> OpenBlock<'x> {
/// Create a new `OpenBlock` ready for transaction pushing. /// Create a new `OpenBlock` ready for transaction pushing.
pub fn new( pub fn new(
engine: &'x Engine, engine: &'x Engine,
vm_factory: &'x EvmFactory, factories: Factories,
trie_factory: TrieFactory,
tracing: bool, tracing: bool,
db: Box<JournalDB>, db: Box<JournalDB>,
parent: &Header, parent: &Header,
@ -240,11 +238,10 @@ impl<'x> OpenBlock<'x> {
gas_range_target: (U256, U256), gas_range_target: (U256, U256),
extra_data: Bytes, extra_data: Bytes,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let state = try!(State::from_existing(db, parent.state_root().clone(), engine.account_start_nonce(), trie_factory)); let state = try!(State::from_existing(db, parent.state_root().clone(), engine.account_start_nonce(), factories));
let mut r = OpenBlock { let mut r = OpenBlock {
block: ExecutedBlock::new(state, tracing), block: ExecutedBlock::new(state, tracing),
engine: engine, engine: engine,
vm_factory: vm_factory,
last_hashes: last_hashes, last_hashes: last_hashes,
}; };
@ -332,7 +329,7 @@ impl<'x> OpenBlock<'x> {
let env_info = self.env_info(); let env_info = self.env_info();
// info!("env_info says gas_used={}", env_info.gas_used); // info!("env_info says gas_used={}", env_info.gas_used);
match self.block.state.apply(&env_info, self.engine, self.vm_factory, &t, self.block.traces.is_some()) { match self.block.state.apply(&env_info, self.engine, &t, self.block.traces.is_some()) {
Ok(outcome) => { Ok(outcome) => {
self.block.transactions_set.insert(h.unwrap_or_else(||t.hash())); self.block.transactions_set.insert(h.unwrap_or_else(||t.hash()));
self.block.base.transactions.push(t); self.block.base.transactions.push(t);
@ -421,14 +418,13 @@ impl ClosedBlock {
} }
/// Given an engine reference, reopen the `ClosedBlock` into an `OpenBlock`. /// Given an engine reference, reopen the `ClosedBlock` into an `OpenBlock`.
pub fn reopen<'a>(self, engine: &'a Engine, vm_factory: &'a EvmFactory) -> OpenBlock<'a> { pub fn reopen<'a>(self, engine: &'a Engine) -> OpenBlock<'a> {
// revert rewards (i.e. set state back at last transaction's state). // revert rewards (i.e. set state back at last transaction's state).
let mut block = self.block; let mut block = self.block;
block.state = self.unclosed_state; block.state = self.unclosed_state;
OpenBlock { OpenBlock {
block: block, block: block,
engine: engine, engine: engine,
vm_factory: vm_factory,
last_hashes: self.last_hashes, last_hashes: self.last_hashes,
} }
} }
@ -499,17 +495,16 @@ pub fn enact(
db: Box<JournalDB>, db: Box<JournalDB>,
parent: &Header, parent: &Header,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
vm_factory: &EvmFactory, factories: Factories,
trie_factory: TrieFactory,
) -> Result<LockedBlock, Error> { ) -> Result<LockedBlock, Error> {
{ {
if ::log::max_log_level() >= ::log::LogLevel::Trace { if ::log::max_log_level() >= ::log::LogLevel::Trace {
let s = try!(State::from_existing(db.boxed_clone(), parent.state_root().clone(), engine.account_start_nonce(), trie_factory.clone())); let s = try!(State::from_existing(db.boxed_clone(), parent.state_root().clone(), engine.account_start_nonce(), factories.clone()));
trace!("enact(): root={}, author={}, author_balance={}\n", s.root(), header.author(), s.balance(&header.author())); trace!("enact(): root={}, author={}, author_balance={}\n", s.root(), header.author(), s.balance(&header.author()));
} }
} }
let mut b = try!(OpenBlock::new(engine, vm_factory, trie_factory, tracing, db, parent, last_hashes, Address::new(), (3141562.into(), 31415620.into()), vec![])); let mut b = try!(OpenBlock::new(engine, factories, tracing, db, parent, last_hashes, Address::new(), (3141562.into(), 31415620.into()), vec![]));
b.set_difficulty(*header.difficulty()); b.set_difficulty(*header.difficulty());
b.set_gas_limit(*header.gas_limit()); b.set_gas_limit(*header.gas_limit());
b.set_timestamp(header.timestamp()); b.set_timestamp(header.timestamp());
@ -532,12 +527,11 @@ pub fn enact_bytes(
db: Box<JournalDB>, db: Box<JournalDB>,
parent: &Header, parent: &Header,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
vm_factory: &EvmFactory, factories: Factories,
trie_factory: TrieFactory,
) -> Result<LockedBlock, Error> { ) -> Result<LockedBlock, Error> {
let block = BlockView::new(block_bytes); let block = BlockView::new(block_bytes);
let header = block.header(); let header = block.header();
enact(&header, &block.transactions(), &block.uncles(), engine, tracing, db, parent, last_hashes, vm_factory, trie_factory) enact(&header, &block.transactions(), &block.uncles(), engine, tracing, db, parent, last_hashes, factories)
} }
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header /// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
@ -549,11 +543,10 @@ pub fn enact_verified(
db: Box<JournalDB>, db: Box<JournalDB>,
parent: &Header, parent: &Header,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
vm_factory: &EvmFactory, factories: Factories,
trie_factory: TrieFactory,
) -> Result<LockedBlock, Error> { ) -> Result<LockedBlock, Error> {
let view = BlockView::new(&block.bytes); let view = BlockView::new(&block.bytes);
enact(&block.header, &block.transactions, &view.uncles(), engine, tracing, db, parent, last_hashes, vm_factory, trie_factory) enact(&block.header, &block.transactions, &view.uncles(), engine, tracing, db, parent, last_hashes, factories)
} }
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards /// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards
@ -565,11 +558,10 @@ pub fn enact_and_seal(
db: Box<JournalDB>, db: Box<JournalDB>,
parent: &Header, parent: &Header,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
vm_factory: &EvmFactory, factories: Factories,
trie_factory: TrieFactory,
) -> Result<SealedBlock, Error> { ) -> Result<SealedBlock, Error> {
let header = BlockView::new(block_bytes).header_view(); let header = BlockView::new(block_bytes).header_view();
Ok(try!(try!(enact_bytes(block_bytes, engine, tracing, db, parent, last_hashes, vm_factory, trie_factory)).seal(engine, header.seal()))) Ok(try!(try!(enact_bytes(block_bytes, engine, tracing, db, parent, last_hashes, factories)).seal(engine, header.seal())))
} }
#[cfg(test)] #[cfg(test)]
@ -587,8 +579,7 @@ mod tests {
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let vm_factory = Default::default(); let b = OpenBlock::new(&*spec.engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = OpenBlock::new(&*spec.engine, &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close_and_lock(); let b = b.close_and_lock();
let _ = b.seal(&*spec.engine, vec![]); let _ = b.seal(&*spec.engine, vec![]);
} }
@ -603,9 +594,8 @@ mod tests {
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_journal_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let vm_factory = Default::default();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let b = OpenBlock::new(engine, &vm_factory, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap() let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap()
.close_and_lock().seal(engine, vec![]).unwrap(); .close_and_lock().seal(engine, vec![]).unwrap();
let orig_bytes = b.rlp_bytes(); let orig_bytes = b.rlp_bytes();
let orig_db = b.drain(); let orig_db = b.drain();
@ -613,7 +603,7 @@ mod tests {
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_journal_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, &Default::default(), Default::default()).unwrap(); let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap();
assert_eq!(e.rlp_bytes(), orig_bytes); assert_eq!(e.rlp_bytes(), orig_bytes);
@ -632,9 +622,8 @@ mod tests {
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_journal_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let vm_factory = Default::default();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let mut open_block = OpenBlock::new(engine, &vm_factory, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let mut open_block = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let mut uncle1_header = Header::new(); let mut uncle1_header = Header::new();
uncle1_header.extra_data = b"uncle1".to_vec(); uncle1_header.extra_data = b"uncle1".to_vec();
let mut uncle2_header = Header::new(); let mut uncle2_header = Header::new();
@ -649,7 +638,7 @@ mod tests {
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_journal_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, &Default::default(), Default::default()).unwrap(); let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap();
let bytes = e.rlp_bytes(); let bytes = e.rlp_bytes();
assert_eq!(bytes, orig_bytes); assert_eq!(bytes, orig_bytes);

View File

@ -380,7 +380,7 @@ impl BlockChain {
children: vec![] children: vec![]
}; };
let batch = DBTransaction::new(&db); let mut batch = DBTransaction::new(&db);
batch.put(db::COL_HEADERS, &hash, block.header_rlp().as_raw()); batch.put(db::COL_HEADERS, &hash, block.header_rlp().as_raw());
batch.put(db::COL_BODIES, &hash, &Self::block_to_body(genesis)); batch.put(db::COL_BODIES, &hash, &Self::block_to_body(genesis));
@ -419,7 +419,7 @@ impl BlockChain {
} }
} }
let batch = db.transaction(); let mut batch = db.transaction();
batch.put(db::COL_EXTRA, b"first", &hash); batch.put(db::COL_EXTRA, b"first", &hash);
db.write(batch).expect("Low level database error."); db.write(batch).expect("Low level database error.");
@ -451,7 +451,7 @@ impl BlockChain {
#[cfg(test)] #[cfg(test)]
fn rewind(&self) -> Option<H256> { fn rewind(&self) -> Option<H256> {
use db::Key; use db::Key;
let batch = self.db.transaction(); let mut batch =self.db.transaction();
// track back to the best block we have in the blocks database // track back to the best block we have in the blocks database
if let Some(best_block_hash) = self.db.get(db::COL_EXTRA, b"best").unwrap() { if let Some(best_block_hash) = self.db.get(db::COL_EXTRA, b"best").unwrap() {
let best_block_hash = H256::from_slice(&best_block_hash); let best_block_hash = H256::from_slice(&best_block_hash);
@ -604,7 +604,7 @@ impl BlockChain {
assert!(self.pending_best_block.read().is_none()); assert!(self.pending_best_block.read().is_none());
let batch = self.db.transaction(); let mut batch = self.db.transaction();
let block_rlp = UntrustedRlp::new(bytes); let block_rlp = UntrustedRlp::new(bytes);
let compressed_header = block_rlp.at(0).unwrap().compress(RlpType::Blocks); let compressed_header = block_rlp.at(0).unwrap().compress(RlpType::Blocks);
@ -625,7 +625,7 @@ impl BlockChain {
location: BlockLocation::CanonChain, location: BlockLocation::CanonChain,
}; };
self.prepare_update(&batch, ExtrasUpdate { self.prepare_update(&mut batch, ExtrasUpdate {
block_hashes: self.prepare_block_hashes_update(bytes, &info), block_hashes: self.prepare_block_hashes_update(bytes, &info),
block_details: self.prepare_block_details_update(bytes, &info), block_details: self.prepare_block_details_update(bytes, &info),
block_receipts: self.prepare_block_receipts_update(receipts, &info), block_receipts: self.prepare_block_receipts_update(receipts, &info),
@ -659,7 +659,7 @@ impl BlockChain {
let mut update = HashMap::new(); let mut update = HashMap::new();
update.insert(hash, block_details); update.insert(hash, block_details);
self.prepare_update(&batch, ExtrasUpdate { self.prepare_update(&mut batch, ExtrasUpdate {
block_hashes: self.prepare_block_hashes_update(bytes, &info), block_hashes: self.prepare_block_hashes_update(bytes, &info),
block_details: update, block_details: update,
block_receipts: self.prepare_block_receipts_update(receipts, &info), block_receipts: self.prepare_block_receipts_update(receipts, &info),
@ -682,7 +682,7 @@ impl BlockChain {
let mut parent_details = self.block_details(&block_hash) let mut parent_details = self.block_details(&block_hash)
.unwrap_or_else(|| panic!("Invalid block hash: {:?}", block_hash)); .unwrap_or_else(|| panic!("Invalid block hash: {:?}", block_hash));
let batch = self.db.transaction(); let mut batch = self.db.transaction();
parent_details.children.push(child_hash); parent_details.children.push(child_hash);
let mut update = HashMap::new(); let mut update = HashMap::new();
@ -701,7 +701,7 @@ impl BlockChain {
/// Inserts the block into backing cache database. /// Inserts the block into backing cache database.
/// Expects the block to be valid and already verified. /// Expects the block to be valid and already verified.
/// If the block is already known, does nothing. /// If the block is already known, does nothing.
pub fn insert_block(&self, batch: &DBTransaction, bytes: &[u8], receipts: Vec<Receipt>) -> ImportRoute { pub fn insert_block(&self, batch: &mut DBTransaction, bytes: &[u8], receipts: Vec<Receipt>) -> ImportRoute {
// create views onto rlp // create views onto rlp
let block = BlockView::new(bytes); let block = BlockView::new(bytes);
let header = block.header_view(); let header = block.header_view();
@ -782,7 +782,7 @@ impl BlockChain {
} }
/// Prepares extras update. /// Prepares extras update.
fn prepare_update(&self, batch: &DBTransaction, update: ExtrasUpdate, is_best: bool) { fn prepare_update(&self, batch: &mut DBTransaction, update: ExtrasUpdate, is_best: bool) {
{ {
let block_hashes: Vec<_> = update.block_details.keys().cloned().collect(); let block_hashes: Vec<_> = update.block_details.keys().cloned().collect();
@ -995,8 +995,13 @@ impl BlockChain {
let log_blooms = match info.location { let log_blooms = match info.location {
BlockLocation::Branch => HashMap::new(), BlockLocation::Branch => HashMap::new(),
BlockLocation::CanonChain => { BlockLocation::CanonChain => {
let log_bloom = header.log_bloom();
if log_bloom.is_zero() {
HashMap::new()
} else {
let chain = bc::group::BloomGroupChain::new(self.blooms_config, self); let chain = bc::group::BloomGroupChain::new(self.blooms_config, self);
chain.insert(info.number as bc::Number, Bloom::from(header.log_bloom()).into()) chain.insert(info.number as bc::Number, Bloom::from(log_bloom).into())
}
}, },
BlockLocation::BranchBecomingCanonChain(ref data) => { BlockLocation::BranchBecomingCanonChain(ref data) => {
let ancestor_number = self.block_number(&data.ancestor).unwrap(); let ancestor_number = self.block_number(&data.ancestor).unwrap();
@ -1142,8 +1147,8 @@ mod tests {
assert_eq!(bc.best_block_number(), 0); assert_eq!(bc.best_block_number(), 0);
// when // when
let batch = db.transaction(); let mut batch =db.transaction();
bc.insert_block(&batch, &first, vec![]); bc.insert_block(&mut batch, &first, vec![]);
assert_eq!(bc.best_block_number(), 0); assert_eq!(bc.best_block_number(), 0);
bc.commit(); bc.commit();
// NOTE no db.write here (we want to check if best block is cached) // NOTE no db.write here (we want to check if best block is cached)
@ -1172,8 +1177,8 @@ mod tests {
assert_eq!(bc.block_hash(1), None); assert_eq!(bc.block_hash(1), None);
assert_eq!(bc.block_details(&genesis_hash).unwrap().children, vec![]); assert_eq!(bc.block_details(&genesis_hash).unwrap().children, vec![]);
let batch = db.transaction(); let mut batch =db.transaction();
bc.insert_block(&batch, &first, vec![]); bc.insert_block(&mut batch, &first, vec![]);
db.write(batch).unwrap(); db.write(batch).unwrap();
bc.commit(); bc.commit();
@ -1198,11 +1203,11 @@ mod tests {
let bc = BlockChain::new(Config::default(), &genesis, db.clone()); let bc = BlockChain::new(Config::default(), &genesis, db.clone());
let mut block_hashes = vec![genesis_hash.clone()]; let mut block_hashes = vec![genesis_hash.clone()];
let batch = db.transaction(); let mut batch =db.transaction();
for _ in 0..10 { for _ in 0..10 {
let block = canon_chain.generate(&mut finalizer).unwrap(); let block = canon_chain.generate(&mut finalizer).unwrap();
block_hashes.push(BlockView::new(&block).header_view().sha3()); block_hashes.push(BlockView::new(&block).header_view().sha3());
bc.insert_block(&batch, &block, vec![]); bc.insert_block(&mut batch, &block, vec![]);
bc.commit(); bc.commit();
} }
db.write(batch).unwrap(); db.write(batch).unwrap();
@ -1233,20 +1238,20 @@ mod tests {
let db = new_db(temp.as_str()); let db = new_db(temp.as_str());
let bc = BlockChain::new(Config::default(), &genesis, db.clone()); let bc = BlockChain::new(Config::default(), &genesis, db.clone());
let batch = db.transaction(); let mut batch =db.transaction();
for b in &[&b1a, &b1b, &b2a, &b2b, &b3a, &b3b, &b4a, &b4b, &b5a, &b5b] { for b in &[&b1a, &b1b, &b2a, &b2b, &b3a, &b3b, &b4a, &b4b, &b5a, &b5b] {
bc.insert_block(&batch, b, vec![]); bc.insert_block(&mut batch, b, vec![]);
bc.commit(); bc.commit();
} }
bc.insert_block(&batch, &b1b, vec![]); bc.insert_block(&mut batch, &b1b, vec![]);
bc.insert_block(&batch, &b2a, vec![]); bc.insert_block(&mut batch, &b2a, vec![]);
bc.insert_block(&batch, &b2b, vec![]); bc.insert_block(&mut batch, &b2b, vec![]);
bc.insert_block(&batch, &b3a, vec![]); bc.insert_block(&mut batch, &b3a, vec![]);
bc.insert_block(&batch, &b3b, vec![]); bc.insert_block(&mut batch, &b3b, vec![]);
bc.insert_block(&batch, &b4a, vec![]); bc.insert_block(&mut batch, &b4a, vec![]);
bc.insert_block(&batch, &b4b, vec![]); bc.insert_block(&mut batch, &b4b, vec![]);
bc.insert_block(&batch, &b5a, vec![]); bc.insert_block(&mut batch, &b5a, vec![]);
bc.insert_block(&batch, &b5b, vec![]); bc.insert_block(&mut batch, &b5b, vec![]);
db.write(batch).unwrap(); db.write(batch).unwrap();
assert_eq!( assert_eq!(
@ -1281,17 +1286,17 @@ mod tests {
let db = new_db(temp.as_str()); let db = new_db(temp.as_str());
let bc = BlockChain::new(Config::default(), &genesis, db.clone()); let bc = BlockChain::new(Config::default(), &genesis, db.clone());
let batch = db.transaction(); let mut batch =db.transaction();
let ir1 = bc.insert_block(&batch, &b1, vec![]); let ir1 = bc.insert_block(&mut batch, &b1, vec![]);
bc.commit(); bc.commit();
let ir2 = bc.insert_block(&batch, &b2, vec![]); let ir2 = bc.insert_block(&mut batch, &b2, vec![]);
bc.commit(); bc.commit();
let ir3b = bc.insert_block(&batch, &b3b, vec![]); let ir3b = bc.insert_block(&mut batch, &b3b, vec![]);
bc.commit(); bc.commit();
db.write(batch).unwrap(); db.write(batch).unwrap();
assert_eq!(bc.block_hash(3).unwrap(), b3b_hash); assert_eq!(bc.block_hash(3).unwrap(), b3b_hash);
let batch = db.transaction(); let mut batch =db.transaction();
let ir3a = bc.insert_block(&batch, &b3a, vec![]); let ir3a = bc.insert_block(&mut batch, &b3a, vec![]);
bc.commit(); bc.commit();
db.write(batch).unwrap(); db.write(batch).unwrap();
@ -1397,8 +1402,8 @@ mod tests {
let db = new_db(temp.as_str()); let db = new_db(temp.as_str());
let bc = BlockChain::new(Config::default(), &genesis, db.clone()); let bc = BlockChain::new(Config::default(), &genesis, db.clone());
assert_eq!(bc.best_block_hash(), genesis_hash); assert_eq!(bc.best_block_hash(), genesis_hash);
let batch = db.transaction(); let mut batch =db.transaction();
bc.insert_block(&batch, &first, vec![]); bc.insert_block(&mut batch, &first, vec![]);
db.write(batch).unwrap(); db.write(batch).unwrap();
bc.commit(); bc.commit();
assert_eq!(bc.best_block_hash(), first_hash); assert_eq!(bc.best_block_hash(), first_hash);
@ -1462,8 +1467,8 @@ mod tests {
let temp = RandomTempPath::new(); let temp = RandomTempPath::new();
let db = new_db(temp.as_str()); let db = new_db(temp.as_str());
let bc = BlockChain::new(Config::default(), &genesis, db.clone()); let bc = BlockChain::new(Config::default(), &genesis, db.clone());
let batch = db.transaction(); let mut batch =db.transaction();
bc.insert_block(&batch, &b1, vec![]); bc.insert_block(&mut batch, &b1, vec![]);
db.write(batch).unwrap(); db.write(batch).unwrap();
bc.commit(); bc.commit();
@ -1475,8 +1480,8 @@ mod tests {
} }
fn insert_block(db: &Arc<Database>, bc: &BlockChain, bytes: &[u8], receipts: Vec<Receipt>) -> ImportRoute { fn insert_block(db: &Arc<Database>, bc: &BlockChain, bytes: &[u8], receipts: Vec<Receipt>) -> ImportRoute {
let batch = db.transaction(); let mut batch =db.transaction();
let res = bc.insert_block(&batch, bytes, receipts); let res = bc.insert_block(&mut batch, bytes, receipts);
db.write(batch).unwrap(); db.write(batch).unwrap();
bc.commit(); bc.commit();
res res
@ -1564,16 +1569,16 @@ mod tests {
let bc = BlockChain::new(Config::default(), &genesis, db.clone()); let bc = BlockChain::new(Config::default(), &genesis, db.clone());
let uncle = canon_chain.fork(1).generate(&mut finalizer.fork()).unwrap(); let uncle = canon_chain.fork(1).generate(&mut finalizer.fork()).unwrap();
let batch = db.transaction(); let mut batch =db.transaction();
// create a longer fork // create a longer fork
for _ in 0..5 { for _ in 0..5 {
let canon_block = canon_chain.generate(&mut finalizer).unwrap(); let canon_block = canon_chain.generate(&mut finalizer).unwrap();
bc.insert_block(&batch, &canon_block, vec![]); bc.insert_block(&mut batch, &canon_block, vec![]);
bc.commit(); bc.commit();
} }
assert_eq!(bc.best_block_number(), 5); assert_eq!(bc.best_block_number(), 5);
bc.insert_block(&batch, &uncle, vec![]); bc.insert_block(&mut batch, &uncle, vec![]);
db.write(batch).unwrap(); db.write(batch).unwrap();
bc.commit(); bc.commit();
} }
@ -1599,10 +1604,10 @@ mod tests {
let db = new_db(temp.as_str()); let db = new_db(temp.as_str());
let bc = BlockChain::new(Config::default(), &genesis, db.clone()); let bc = BlockChain::new(Config::default(), &genesis, db.clone());
let batch = db.transaction(); let mut batch =db.transaction();
bc.insert_block(&batch, &first, vec![]); bc.insert_block(&mut batch, &first, vec![]);
bc.commit(); bc.commit();
bc.insert_block(&batch, &second, vec![]); bc.insert_block(&mut batch, &second, vec![]);
bc.commit(); bc.commit();
db.write(batch).unwrap(); db.write(batch).unwrap();

View File

@ -14,10 +14,11 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::*;
use crypto::sha2::Sha256; use crypto::sha2::Sha256;
use crypto::ripemd160::Ripemd160; use crypto::ripemd160::Ripemd160;
use crypto::digest::Digest; use crypto::digest::Digest;
use util::*;
use ethkey::{Signature, recover};
use ethjson; use ethjson;
/// Definition of a contract whose implementation is built-in. /// Definition of a contract whose implementation is built-in.
@ -92,19 +93,19 @@ pub fn new_builtin_exec(name: &str) -> Box<Fn(&[u8], &mut [u8])> {
}), }),
"ecrecover" => Box::new(move|input: &[u8], output: &mut[u8]| { "ecrecover" => Box::new(move|input: &[u8], output: &mut[u8]| {
#[repr(packed)] #[repr(packed)]
#[derive(Debug)] #[derive(Debug, Default)]
struct InType { struct InType {
hash: H256, hash: H256,
v: H256, v: H256,
r: H256, r: H256,
s: H256, s: H256,
} }
let mut it: InType = InType { hash: H256::new(), v: H256::new(), r: H256::new(), s: H256::new() }; let mut it = InType::default();
it.copy_raw(input); it.copy_raw(input);
if it.v == H256::from(&U256::from(27)) || it.v == H256::from(&U256::from(28)) { if it.v == H256::from(&U256::from(27)) || it.v == H256::from(&U256::from(28)) {
let s = signature_from_rsv(&it.r, &it.s, it.v[31] - 27); let s = Signature::from_rsv(&it.r, &it.s, it.v[31] - 27);
if ec::is_valid(&s) { if s.is_valid() {
if let Ok(p) = ec::recover(&s, &it.hash) { if let Ok(p) = recover(&s, &it.hash) {
let r = p.as_slice().sha3(); let r = p.as_slice().sha3();
// NICE: optimise and separate out into populate-like function // NICE: optimise and separate out into populate-like function
for i in 0..min(32, output.len()) { for i in 0..min(32, output.len()) {

View File

@ -66,6 +66,7 @@ use evm::Factory as EvmFactory;
use miner::{Miner, MinerService}; use miner::{Miner, MinerService};
use util::TrieFactory; use util::TrieFactory;
use snapshot::{self, io as snapshot_io}; use snapshot::{self, io as snapshot_io};
use factory::Factories;
// re-export // re-export
pub use types::blockchain_info::BlockChainInfo; pub use types::blockchain_info::BlockChainInfo;
@ -131,8 +132,6 @@ pub struct Client {
import_lock: Mutex<()>, import_lock: Mutex<()>,
panic_handler: Arc<PanicHandler>, panic_handler: Arc<PanicHandler>,
verifier: Box<Verifier>, verifier: Box<Verifier>,
vm_factory: Arc<EvmFactory>,
trie_factory: TrieFactory,
miner: Arc<Miner>, miner: Arc<Miner>,
sleep_state: Mutex<SleepState>, sleep_state: Mutex<SleepState>,
liveness: AtomicBool, liveness: AtomicBool,
@ -140,6 +139,7 @@ pub struct Client {
notify: RwLock<Vec<Weak<ChainNotify>>>, notify: RwLock<Vec<Weak<ChainNotify>>>,
queue_transactions: AtomicUsize, queue_transactions: AtomicUsize,
last_hashes: RwLock<VecDeque<H256>>, last_hashes: RwLock<VecDeque<H256>>,
factories: Factories,
} }
const HISTORY: u64 = 1200; const HISTORY: u64 = 1200;
@ -173,8 +173,8 @@ impl Client {
let mut state_db = journaldb::new(db.clone(), config.pruning, ::db::COL_STATE); let mut state_db = journaldb::new(db.clone(), config.pruning, ::db::COL_STATE);
if state_db.is_empty() && try!(spec.ensure_db_good(state_db.as_hashdb_mut())) { if state_db.is_empty() && try!(spec.ensure_db_good(state_db.as_hashdb_mut())) {
let batch = DBTransaction::new(&db); let mut batch = DBTransaction::new(&db);
try!(state_db.commit(&batch, 0, &spec.genesis_header().hash(), None)); try!(state_db.commit(&mut batch, 0, &spec.genesis_header().hash(), None));
try!(db.write(batch).map_err(ClientError::Database)); try!(db.write(batch).map_err(ClientError::Database));
} }
@ -189,6 +189,13 @@ impl Client {
panic_handler.forward_from(&block_queue); panic_handler.forward_from(&block_queue);
let awake = match config.mode { Mode::Dark(..) => false, _ => true }; let awake = match config.mode { Mode::Dark(..) => false, _ => true };
let factories = Factories {
vm: EvmFactory::new(config.vm_type),
trie: TrieFactory::new(config.trie_spec),
accountdb: Default::default(),
};
let client = Client { let client = Client {
sleep_state: Mutex::new(SleepState::new(awake)), sleep_state: Mutex::new(SleepState::new(awake)),
liveness: AtomicBool::new(awake), liveness: AtomicBool::new(awake),
@ -203,13 +210,12 @@ impl Client {
import_lock: Mutex::new(()), import_lock: Mutex::new(()),
panic_handler: panic_handler, panic_handler: panic_handler,
verifier: verification::new(config.verifier_type), verifier: verification::new(config.verifier_type),
vm_factory: Arc::new(EvmFactory::new(config.vm_type)),
trie_factory: TrieFactory::new(config.trie_spec),
miner: miner, miner: miner,
io_channel: message_channel, io_channel: message_channel,
notify: RwLock::new(Vec::new()), notify: RwLock::new(Vec::new()),
queue_transactions: AtomicUsize::new(0), queue_transactions: AtomicUsize::new(0),
last_hashes: RwLock::new(VecDeque::new()), last_hashes: RwLock::new(VecDeque::new()),
factories: factories,
}; };
Ok(Arc::new(client)) Ok(Arc::new(client))
} }
@ -290,7 +296,7 @@ impl Client {
let last_hashes = self.build_last_hashes(header.parent_hash.clone()); let last_hashes = self.build_last_hashes(header.parent_hash.clone());
let db = self.state_db.lock().boxed_clone(); let db = self.state_db.lock().boxed_clone();
let enact_result = enact_verified(block, engine, self.tracedb.tracing_enabled(), db, &parent, last_hashes, &self.vm_factory, self.trie_factory.clone()); let enact_result = enact_verified(block, engine, self.tracedb.tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
if let Err(e) = enact_result { if let Err(e) = enact_result {
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e); warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
return Err(()); return Err(());
@ -426,14 +432,14 @@ impl Client {
//let traces = From::from(block.traces().clone().unwrap_or_else(Vec::new)); //let traces = From::from(block.traces().clone().unwrap_or_else(Vec::new));
let batch = DBTransaction::new(&self.db); let mut batch = DBTransaction::new(&self.db);
// CHECK! I *think* this is fine, even if the state_root is equal to another // CHECK! I *think* this is fine, even if the state_root is equal to another
// already-imported block of the same number. // already-imported block of the same number.
// TODO: Prove it with a test. // TODO: Prove it with a test.
block.drain().commit(&batch, number, hash, ancient).expect("DB commit failed."); block.drain().commit(&mut batch, number, hash, ancient).expect("DB commit failed.");
let route = self.chain.insert_block(&batch, block_data, receipts); let route = self.chain.insert_block(&mut batch, block_data, receipts);
self.tracedb.import(&batch, TraceImportRequest { self.tracedb.import(&mut batch, TraceImportRequest {
traces: traces.into(), traces: traces.into(),
block_hash: hash.clone(), block_hash: hash.clone(),
block_number: number, block_number: number,
@ -494,7 +500,7 @@ impl Client {
let root = HeaderView::new(&header).state_root(); let root = HeaderView::new(&header).state_root();
State::from_existing(db, root, self.engine.account_start_nonce(), self.trie_factory.clone()).ok() State::from_existing(db, root, self.engine.account_start_nonce(), self.factories.clone()).ok()
}) })
} }
@ -519,7 +525,7 @@ impl Client {
self.state_db.lock().boxed_clone(), self.state_db.lock().boxed_clone(),
HeaderView::new(&self.best_block_header()).state_root(), HeaderView::new(&self.best_block_header()).state_root(),
self.engine.account_start_nonce(), self.engine.account_start_nonce(),
self.trie_factory.clone()) self.factories.clone())
.expect("State root of best block header always valid.") .expect("State root of best block header always valid.")
} }
@ -689,7 +695,7 @@ impl BlockChainClient for Client {
state.add_balance(&sender, &(needed_balance - balance)); state.add_balance(&sender, &(needed_balance - balance));
} }
let options = TransactOptions { tracing: analytics.transaction_tracing, vm_tracing: analytics.vm_tracing, check_nonce: false }; let options = TransactOptions { tracing: analytics.transaction_tracing, vm_tracing: analytics.vm_tracing, check_nonce: false };
let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, &self.vm_factory).transact(t, options)); let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, &self.factories.vm).transact(t, options));
// TODO gav move this into Executive. // TODO gav move this into Executive.
ret.state_diff = original_state.map(|original| state.diff_from(original)); ret.state_diff = original_state.map(|original| state.diff_from(original));
@ -721,7 +727,7 @@ impl BlockChainClient for Client {
gas_limit: view.gas_limit(), gas_limit: view.gas_limit(),
}; };
for t in txs.iter().take(address.index) { for t in txs.iter().take(address.index) {
match Executive::new(&mut state, &env_info, &*self.engine, &self.vm_factory).transact(t, Default::default()) { match Executive::new(&mut state, &env_info, &*self.engine, &self.factories.vm).transact(t, Default::default()) {
Ok(x) => { env_info.gas_used = env_info.gas_used + x.gas_used; } Ok(x) => { env_info.gas_used = env_info.gas_used + x.gas_used; }
Err(ee) => { return Err(CallError::Execution(ee)) } Err(ee) => { return Err(CallError::Execution(ee)) }
} }
@ -729,7 +735,7 @@ impl BlockChainClient for Client {
let t = &txs[address.index]; let t = &txs[address.index];
let original_state = if analytics.state_diffing { Some(state.clone()) } else { None }; let original_state = if analytics.state_diffing { Some(state.clone()) } else { None };
let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, &self.vm_factory).transact(t, options)); let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, &self.factories.vm).transact(t, options));
ret.state_diff = original_state.map(|original| state.diff_from(original)); ret.state_diff = original_state.map(|original| state.diff_from(original));
Ok(ret) Ok(ret)
@ -1045,8 +1051,7 @@ impl MiningBlockChainClient for Client {
let mut open_block = OpenBlock::new( let mut open_block = OpenBlock::new(
engine, engine,
&self.vm_factory, self.factories.clone(),
self.trie_factory.clone(),
false, // TODO: this will need to be parameterised once we want to do immediate mining insertion. false, // TODO: this will need to be parameterised once we want to do immediate mining insertion.
self.state_db.lock().boxed_clone(), self.state_db.lock().boxed_clone(),
&self.chain.block_header(&h).expect("h is best block hash: so its header must exist: qed"), &self.chain.block_header(&h).expect("h is best block hash: so its header must exist: qed"),
@ -1070,7 +1075,7 @@ impl MiningBlockChainClient for Client {
} }
fn vm_factory(&self) -> &EvmFactory { fn vm_factory(&self) -> &EvmFactory {
&self.vm_factory &self.factories.vm
} }
fn import_sealed_block(&self, block: SealedBlock) -> ImportResult { fn import_sealed_block(&self, block: SealedBlock) -> ImportResult {

View File

@ -18,6 +18,7 @@
use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrder}; use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrder};
use util::*; use util::*;
use ethkey::{Generator, Random};
use devtools::*; use devtools::*;
use transaction::{Transaction, LocalizedTransaction, SignedTransaction, Action}; use transaction::{Transaction, LocalizedTransaction, SignedTransaction, Action};
use blockchain::TreeRoute; use blockchain::TreeRoute;
@ -73,6 +74,8 @@ pub struct TestBlockChainClient {
pub spec: Spec, pub spec: Spec,
/// VM Factory /// VM Factory
pub vm_factory: EvmFactory, pub vm_factory: EvmFactory,
/// Timestamp assigned to latest sealed block
pub latest_block_timestamp: RwLock<u64>,
} }
#[derive(Clone)] #[derive(Clone)]
@ -114,6 +117,7 @@ impl TestBlockChainClient {
miner: Arc::new(Miner::with_spec(&spec)), miner: Arc::new(Miner::with_spec(&spec)),
spec: spec, spec: spec,
vm_factory: EvmFactory::new(VMType::Interpreter), vm_factory: EvmFactory::new(VMType::Interpreter),
latest_block_timestamp: RwLock::new(10_000_000),
}; };
client.add_blocks(1, EachBlockWith::Nothing); // add genesis block client.add_blocks(1, EachBlockWith::Nothing); // add genesis block
client.genesis_hash = client.last_hash.read().clone(); client.genesis_hash = client.last_hash.read().clone();
@ -155,6 +159,11 @@ impl TestBlockChainClient {
self.queue_size.store(size, AtomicOrder::Relaxed); self.queue_size.store(size, AtomicOrder::Relaxed);
} }
/// Set timestamp assigned to latest sealed block
pub fn set_latest_block_timestamp(&self, ts: u64) {
*self.latest_block_timestamp.write() = ts;
}
/// Add blocks to test client. /// Add blocks to test client.
pub fn add_blocks(&self, count: usize, with: EachBlockWith) { pub fn add_blocks(&self, count: usize, with: EachBlockWith) {
let len = self.numbers.read().len(); let len = self.numbers.read().len();
@ -180,7 +189,7 @@ impl TestBlockChainClient {
let txs = match with { let txs = match with {
EachBlockWith::Transaction | EachBlockWith::UncleAndTransaction => { EachBlockWith::Transaction | EachBlockWith::UncleAndTransaction => {
let mut txs = RlpStream::new_list(1); let mut txs = RlpStream::new_list(1);
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
// Update nonces value // Update nonces value
self.nonces.write().insert(keypair.address(), U256::one()); self.nonces.write().insert(keypair.address(), U256::one());
let tx = Transaction { let tx = Transaction {
@ -268,7 +277,6 @@ impl MiningBlockChainClient for TestBlockChainClient {
let last_hashes = vec![genesis_header.hash()]; let last_hashes = vec![genesis_header.hash()];
let mut open_block = OpenBlock::new( let mut open_block = OpenBlock::new(
engine, engine,
self.vm_factory(),
Default::default(), Default::default(),
false, false,
db, db,
@ -279,7 +287,7 @@ impl MiningBlockChainClient for TestBlockChainClient {
extra_data extra_data
).expect("Opening block for tests will not fail."); ).expect("Opening block for tests will not fail.");
// TODO [todr] Override timestamp for predictability (set_timestamp_now kind of sucks) // TODO [todr] Override timestamp for predictability (set_timestamp_now kind of sucks)
open_block.set_timestamp(10_000_000); open_block.set_timestamp(*self.latest_block_timestamp.read());
open_block open_block
} }

View File

@ -83,10 +83,10 @@ pub trait Key<T> {
/// Should be used to write value into database. /// Should be used to write value into database.
pub trait Writable { pub trait Writable {
/// Writes the value into the database. /// Writes the value into the database.
fn write<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: Encodable, R: Deref<Target = [u8]>; fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: Encodable, R: Deref<Target = [u8]>;
/// Writes the value into the database and updates the cache. /// Writes the value into the database and updates the cache.
fn write_with_cache<K, T, R>(&self, col: Option<u32>, cache: &mut Cache<K, T>, key: K, value: T, policy: CacheUpdatePolicy) where fn write_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, key: K, value: T, policy: CacheUpdatePolicy) where
K: Key<T, Target = R> + Hash + Eq, K: Key<T, Target = R> + Hash + Eq,
T: Encodable, T: Encodable,
R: Deref<Target = [u8]> { R: Deref<Target = [u8]> {
@ -102,7 +102,7 @@ pub trait Writable {
} }
/// Writes the values into the database and updates the cache. /// Writes the values into the database and updates the cache.
fn extend_with_cache<K, T, R>(&self, col: Option<u32>, cache: &mut Cache<K, T>, values: HashMap<K, T>, policy: CacheUpdatePolicy) where fn extend_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, values: HashMap<K, T>, policy: CacheUpdatePolicy) where
K: Key<T, Target = R> + Hash + Eq, K: Key<T, Target = R> + Hash + Eq,
T: Encodable, T: Encodable,
R: Deref<Target = [u8]> { R: Deref<Target = [u8]> {
@ -169,7 +169,7 @@ pub trait Readable {
} }
impl Writable for DBTransaction { impl Writable for DBTransaction {
fn write<T, R>(&self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: Encodable, R: Deref<Target = [u8]> { fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: Encodable, R: Deref<Target = [u8]> {
self.put(col, &key.key(), &encode(value)); self.put(col, &key.key(), &encode(value));
} }
} }

View File

@ -17,6 +17,7 @@
//! A blockchain engine that supports a basic, non-BFT proof-of-authority. //! A blockchain engine that supports a basic, non-BFT proof-of-authority.
use common::*; use common::*;
use ethkey::{recover, public_to_address};
use account_provider::AccountProvider; use account_provider::AccountProvider;
use block::*; use block::*;
use spec::CommonParams; use spec::CommonParams;
@ -133,7 +134,7 @@ impl Engine for BasicAuthority {
fn verify_block_unordered(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> { fn verify_block_unordered(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
// check the signature is legit. // check the signature is legit.
let sig = try!(UntrustedRlp::new(&header.seal[0]).as_val::<H520>()); let sig = try!(UntrustedRlp::new(&header.seal[0]).as_val::<H520>());
let signer = Address::from(try!(ec::recover(&sig, &header.bare_hash())).sha3()); let signer = public_to_address(&try!(recover(&sig.into(), &header.bare_hash())));
if !self.our_params.authorities.contains(&signer) { if !self.our_params.authorities.contains(&signer) {
return try!(Err(BlockError::InvalidSeal)); return try!(Err(BlockError::InvalidSeal));
} }
@ -228,15 +229,10 @@ mod tests {
fn can_do_signature_verification_fail() { fn can_do_signature_verification_fail() {
let engine = new_test_authority().engine; let engine = new_test_authority().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
header.set_seal(vec![rlp::encode(&Signature::zero()).to_vec()]); header.set_seal(vec![rlp::encode(&H520::default()).to_vec()]);
let verify_result = engine.verify_block_unordered(&header, None); let verify_result = engine.verify_block_unordered(&header, None);
assert!(verify_result.is_err());
match verify_result {
Err(Error::Util(UtilError::Crypto(CryptoError::InvalidSignature))) => {},
Err(_) => { panic!("should be block difficulty error (got {:?})", verify_result); },
_ => { panic!("Should be error, got Ok"); },
}
} }
#[test] #[test]
@ -252,8 +248,7 @@ mod tests {
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let vm_factory = Default::default(); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = OpenBlock::new(engine, &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close_and_lock(); let b = b.close_and_lock();
let seal = engine.generate_seal(b.block(), Some(&tap)).unwrap(); let seal = engine.generate_seal(b.block(), Some(&tap)).unwrap();
assert!(b.try_seal(engine, seal).is_ok()); assert!(b.try_seal(engine, seal).is_ok());

View File

@ -86,8 +86,7 @@ mod tests {
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let vm_factory = Default::default(); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = OpenBlock::new(engine, &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close_and_lock(); let b = b.close_and_lock();
// Seal with empty AccountProvider. // Seal with empty AccountProvider.
let seal = engine.generate_seal(b.block(), Some(&tap)).unwrap(); let seal = engine.generate_seal(b.block(), Some(&tap)).unwrap();
@ -101,7 +100,7 @@ mod tests {
assert!(engine.verify_block_basic(&header, None).is_ok()); assert!(engine.verify_block_basic(&header, None).is_ok());
header.set_seal(vec![rlp::encode(&Signature::zero()).to_vec()]); header.set_seal(vec![rlp::encode(&H520::default()).to_vec()]);
assert!(engine.verify_block_unordered(&header, None).is_ok()); assert!(engine.verify_block_unordered(&header, None).is_ok());
} }

View File

@ -25,6 +25,7 @@ use ipc::binary::{BinaryConvertError, BinaryConvertable};
use types::block_import_error::BlockImportError; use types::block_import_error::BlockImportError;
use snapshot::Error as SnapshotError; use snapshot::Error as SnapshotError;
use engines::EngineError; use engines::EngineError;
use ethkey::Error as EthkeyError;
pub use types::executed::{ExecutionError, CallError}; pub use types::executed::{ExecutionError, CallError};
@ -241,6 +242,8 @@ pub enum Error {
Snapshot(SnapshotError), Snapshot(SnapshotError),
/// Consensus vote error. /// Consensus vote error.
Engine(EngineError), Engine(EngineError),
/// Ethkey error.
Ethkey(EthkeyError),
} }
impl fmt::Display for Error { impl fmt::Display for Error {
@ -263,6 +266,7 @@ impl fmt::Display for Error {
Error::Snapshot(ref err) => err.fmt(f), Error::Snapshot(ref err) => err.fmt(f),
Error::Engine(ref err) => Error::Engine(ref err) =>
f.write_fmt(format_args!("Bad vote: {:?}", err)), f.write_fmt(format_args!("Bad vote: {:?}", err)),
Error::Ethkey(ref err) => err.fmt(f),
} }
} }
} }
@ -303,12 +307,6 @@ impl From<ExecutionError> for Error {
} }
} }
impl From<CryptoError> for Error {
fn from(err: CryptoError) -> Error {
Error::Util(UtilError::Crypto(err))
}
}
impl From<DecoderError> for Error { impl From<DecoderError> for Error {
fn from(err: DecoderError) -> Error { fn from(err: DecoderError) -> Error {
Error::Util(UtilError::Decoder(err)) Error::Util(UtilError::Decoder(err))
@ -371,6 +369,9 @@ impl From<EngineError> for Error {
match err { match err {
other => Error::Engine(other), other => Error::Engine(other),
} }
impl From<EthkeyError> for Error {
fn from(err: EthkeyError) -> Error {
Error::Ethkey(err)
} }
} }

View File

@ -357,8 +357,7 @@ mod tests {
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let vm_factory = Default::default(); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = OpenBlock::new(engine, &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close(); let b = b.close();
assert_eq!(b.state().balance(&Address::zero()), U256::from_str("4563918244f40000").unwrap()); assert_eq!(b.state().balance(&Address::zero()), U256::from_str("4563918244f40000").unwrap());
} }
@ -372,8 +371,7 @@ mod tests {
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let vm_factory = Default::default(); let mut b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let mut b = OpenBlock::new(engine, &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let mut uncle = Header::new(); let mut uncle = Header::new();
let uncle_author: Address = "ef2d6d194084c2de36e0dabfce45d046b37d1106".into(); let uncle_author: Address = "ef2d6d194084c2de36e0dabfce45d046b37d1106".into();
uncle.author = uncle_author.clone(); uncle.author = uncle_author.clone();

View File

@ -80,6 +80,7 @@ impl VMType {
} }
/// Evm factory. Creates appropriate Evm. /// Evm factory. Creates appropriate Evm.
#[derive(Clone)]
pub struct Factory { pub struct Factory {
evm: VMType evm: VMType
} }
@ -128,7 +129,7 @@ impl Factory {
impl Default for Factory { impl Default for Factory {
/// Returns jitvm factory /// Returns jitvm factory
#[cfg(feature = "jit")] #[cfg(all(feature = "jit", not(test)))]
fn default() -> Factory { fn default() -> Factory {
Factory { Factory {
evm: VMType::Jit evm: VMType::Jit
@ -136,7 +137,7 @@ impl Default for Factory {
} }
/// Returns native rust evm factory /// Returns native rust evm factory
#[cfg(not(feature = "jit"))] #[cfg(any(not(feature = "jit"), test))]
fn default() -> Factory { fn default() -> Factory {
Factory { Factory {
evm: VMType::Interpreter evm: VMType::Interpreter

View File

@ -483,6 +483,7 @@ impl<'a> Executive<'a> {
#[cfg(test)] #[cfg(test)]
#[allow(dead_code)] #[allow(dead_code)]
mod tests { mod tests {
use ethkey::{Generator, Random};
use super::*; use super::*;
use common::*; use common::*;
use evm::{Factory, VMType}; use evm::{Factory, VMType};
@ -1002,7 +1003,7 @@ mod tests {
// TODO: fix (preferred) or remove // TODO: fix (preferred) or remove
evm_test_ignore!{test_transact_simple: test_transact_simple_jit, test_transact_simple_int} evm_test_ignore!{test_transact_simple: test_transact_simple_jit, test_transact_simple_int}
fn test_transact_simple(factory: Factory) { fn test_transact_simple(factory: Factory) {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let t = Transaction { let t = Transaction {
action: Action::Create, action: Action::Create,
value: U256::from(17), value: U256::from(17),
@ -1069,7 +1070,7 @@ mod tests {
evm_test!{test_transact_invalid_nonce: test_transact_invalid_nonce_jit, test_transact_invalid_nonce_int} evm_test!{test_transact_invalid_nonce: test_transact_invalid_nonce_jit, test_transact_invalid_nonce_int}
fn test_transact_invalid_nonce(factory: Factory) { fn test_transact_invalid_nonce(factory: Factory) {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let t = Transaction { let t = Transaction {
action: Action::Create, action: Action::Create,
value: U256::from(17), value: U256::from(17),
@ -1102,7 +1103,7 @@ mod tests {
evm_test!{test_transact_gas_limit_reached: test_transact_gas_limit_reached_jit, test_transact_gas_limit_reached_int} evm_test!{test_transact_gas_limit_reached: test_transact_gas_limit_reached_jit, test_transact_gas_limit_reached_int}
fn test_transact_gas_limit_reached(factory: Factory) { fn test_transact_gas_limit_reached(factory: Factory) {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let t = Transaction { let t = Transaction {
action: Action::Create, action: Action::Create,
value: U256::from(17), value: U256::from(17),
@ -1137,7 +1138,7 @@ mod tests {
evm_test!{test_not_enough_cash: test_not_enough_cash_jit, test_not_enough_cash_int} evm_test!{test_not_enough_cash: test_not_enough_cash_jit, test_not_enough_cash_int}
fn test_not_enough_cash(factory: Factory) { fn test_not_enough_cash(factory: Factory) {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let t = Transaction { let t = Transaction {
action: Action::Create, action: Action::Create,
value: U256::from(18), value: U256::from(18),

30
ethcore/src/factory.rs Normal file
View File

@ -0,0 +1,30 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use util::trie::TrieFactory;
use evm::Factory as EvmFactory;
use account_db::Factory as AccountFactory;
/// Collection of factories.
#[derive(Default, Clone)]
pub struct Factories {
/// factory for evm.
pub vm: EvmFactory,
/// factory for tries.
pub trie: TrieFactory,
/// factory for account databases.
pub accountdb: AccountFactory,
}

View File

@ -64,8 +64,7 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
state.populate_from(pre); state.populate_from(pre);
state.commit() state.commit()
.expect(&format!("State test {} failed due to internal error.", name)); .expect(&format!("State test {} failed due to internal error.", name));
let vm_factory = Default::default(); let res = state.apply(&env, &*engine, &transaction, false);
let res = state.apply(&env, &*engine, &vm_factory, &transaction, false);
if fail_unless(state.root() == &post_state_root) { if fail_unless(state.root() == &post_state_root) {
println!("!!! {}: State mismatch (got: {}, expect: {}):", name, state.root(), post_state_root); println!("!!! {}: State mismatch (got: {}, expect: {}):", name, state.root(), post_state_root);

View File

@ -96,6 +96,7 @@ extern crate bloomchain;
extern crate rayon; extern crate rayon;
extern crate hyper; extern crate hyper;
extern crate ethash; extern crate ethash;
extern crate ethkey;
pub extern crate ethstore; pub extern crate ethstore;
extern crate semver; extern crate semver;
extern crate ethcore_ipc_nano as nanoipc; extern crate ethcore_ipc_nano as nanoipc;
@ -139,6 +140,7 @@ mod externalities;
mod verification; mod verification;
mod blockchain; mod blockchain;
mod types; mod types;
mod factory;
#[cfg(test)] #[cfg(test)]
mod tests; mod tests;

View File

@ -270,7 +270,7 @@ impl Miner {
Some(old_block) => { Some(old_block) => {
trace!(target: "miner", "Already have previous work; updating and returning"); trace!(target: "miner", "Already have previous work; updating and returning");
// add transactions to old_block // add transactions to old_block
old_block.reopen(&*self.engine, chain.vm_factory()) old_block.reopen(&*self.engine)
} }
None => { None => {
// block not found - create it. // block not found - create it.
@ -723,8 +723,8 @@ impl MinerService for Miner {
.position(|t| t == *hash) .position(|t| t == *hash)
.map(|index| { .map(|index| {
let prev_gas = if index == 0 { Default::default() } else { pending.receipts()[index - 1].gas_used }; let prev_gas = if index == 0 { Default::default() } else { pending.receipts()[index - 1].gas_used };
let ref tx = txs[index]; let tx = &txs[index];
let ref receipt = pending.receipts()[index]; let receipt = &pending.receipts()[index];
RichReceipt { RichReceipt {
transaction_hash: hash.clone(), transaction_hash: hash.clone(),
transaction_index: index, transaction_index: index,
@ -911,6 +911,7 @@ mod tests {
use super::super::MinerService; use super::super::MinerService;
use super::*; use super::*;
use util::*; use util::*;
use ethkey::{Generator, Random};
use client::{TestBlockChainClient, EachBlockWith}; use client::{TestBlockChainClient, EachBlockWith};
use client::{TransactionImportResult}; use client::{TransactionImportResult};
use types::transaction::{Transaction, Action}; use types::transaction::{Transaction, Action};
@ -975,7 +976,7 @@ mod tests {
let client = TestBlockChainClient::default(); let client = TestBlockChainClient::default();
let miner = miner(); let miner = miner();
let transaction = { let transaction = {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
Transaction { Transaction {
action: Action::Create, action: Action::Create,
value: U256::zero(), value: U256::zero(),
@ -1005,7 +1006,7 @@ mod tests {
let client = TestBlockChainClient::default(); let client = TestBlockChainClient::default();
let miner = miner(); let miner = miner();
let transaction = { let transaction = {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
Transaction { Transaction {
action: Action::Create, action: Action::Create,
value: U256::zero(), value: U256::zero(),

View File

@ -26,16 +26,17 @@
//! ```rust //! ```rust
//! extern crate ethcore_util as util; //! extern crate ethcore_util as util;
//! extern crate ethcore; //! extern crate ethcore;
//! extern crate ethkey;
//! extern crate rustc_serialize; //! extern crate rustc_serialize;
//! //!
//! use util::crypto::KeyPair;
//! use util::{Uint, U256, Address}; //! use util::{Uint, U256, Address};
//! use ethkey::{Random, Generator};
//! use ethcore::miner::{TransactionQueue, AccountDetails, TransactionOrigin}; //! use ethcore::miner::{TransactionQueue, AccountDetails, TransactionOrigin};
//! use ethcore::transaction::*; //! use ethcore::transaction::*;
//! use rustc_serialize::hex::FromHex; //! use rustc_serialize::hex::FromHex;
//! //!
//! fn main() { //! fn main() {
//! let key = KeyPair::create().unwrap(); //! let key = Random.generate().unwrap();
//! let t1 = Transaction { action: Action::Create, value: U256::from(100), data: "3331600055".from_hex().unwrap(), //! let t1 = Transaction { action: Action::Create, value: U256::from(100), data: "3331600055".from_hex().unwrap(),
//! gas: U256::from(100_000), gas_price: U256::one(), nonce: U256::from(10) }; //! gas: U256::from(100_000), gas_price: U256::one(), nonce: U256::from(10) };
//! let t2 = Transaction { action: Action::Create, value: U256::from(100), data: "3331600055".from_hex().unwrap(), //! let t2 = Transaction { action: Action::Create, value: U256::from(100), data: "3331600055".from_hex().unwrap(),
@ -43,14 +44,14 @@
//! //!
//! let st1 = t1.sign(&key.secret()); //! let st1 = t1.sign(&key.secret());
//! let st2 = t2.sign(&key.secret()); //! let st2 = t2.sign(&key.secret());
//! let default_nonce = |_a: &Address| AccountDetails { //! let default_account_details = |_a: &Address| AccountDetails {
//! nonce: U256::from(10), //! nonce: U256::from(10),
//! balance: U256::from(1_000_000), //! balance: U256::from(1_000_000),
//! }; //! };
//! //!
//! let mut txq = TransactionQueue::new(); //! let mut txq = TransactionQueue::new();
//! txq.add(st2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); //! txq.add(st2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
//! txq.add(st1.clone(), &default_nonce, TransactionOrigin::External).unwrap(); //! txq.add(st1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
//! //!
//! // Check status //! // Check status
//! assert_eq!(txq.status().pending, 2); //! assert_eq!(txq.status().pending, 2);
@ -62,7 +63,7 @@
//! //!
//! // And when transaction is removed (but nonce haven't changed) //! // And when transaction is removed (but nonce haven't changed)
//! // it will move subsequent transactions to future //! // it will move subsequent transactions to future
//! txq.remove_invalid(&st1.hash(), &default_nonce); //! txq.remove_invalid(&st1.hash(), &default_account_details);
//! assert_eq!(txq.status().pending, 0); //! assert_eq!(txq.status().pending, 0);
//! assert_eq!(txq.status().future, 1); //! assert_eq!(txq.status().future, 1);
//! assert_eq!(txq.top_transactions().len(), 0); //! assert_eq!(txq.top_transactions().len(), 0);
@ -82,7 +83,7 @@
use std::cmp::Ordering; use std::cmp::Ordering;
use std::cmp; use std::cmp;
use std::collections::{HashMap, BTreeSet}; use std::collections::{HashSet, HashMap, BTreeSet, BTreeMap};
use util::{Address, H256, Uint, U256}; use util::{Address, H256, Uint, U256};
use util::table::Table; use util::table::Table;
use transaction::*; use transaction::*;
@ -226,23 +227,34 @@ impl VerifiedTransaction {
struct TransactionSet { struct TransactionSet {
by_priority: BTreeSet<TransactionOrder>, by_priority: BTreeSet<TransactionOrder>,
by_address: Table<Address, U256, TransactionOrder>, by_address: Table<Address, U256, TransactionOrder>,
by_gas_price: BTreeMap<U256, HashSet<H256>>,
limit: usize, limit: usize,
} }
impl TransactionSet { impl TransactionSet {
/// Inserts `TransactionOrder` to this set /// Inserts `TransactionOrder` to this set. Transaction does not need to be unique -
/// the same transaction may be validly inserted twice. Any previous transaction that
/// it replaces (i.e. with the same `sender` and `nonce`) should be returned.
fn insert(&mut self, sender: Address, nonce: U256, order: TransactionOrder) -> Option<TransactionOrder> { fn insert(&mut self, sender: Address, nonce: U256, order: TransactionOrder) -> Option<TransactionOrder> {
self.by_priority.insert(order.clone()); if !self.by_priority.insert(order.clone()) {
let r = self.by_address.insert(sender, nonce, order); return Some(order.clone());
// If transaction was replaced remove it from priority queue
if let Some(ref old_order) = r {
self.by_priority.remove(old_order);
} }
assert_eq!(self.by_priority.len(), self.by_address.len()); let order_hash = order.hash.clone();
r let order_gas_price = order.gas_price.clone();
let by_address_replaced = self.by_address.insert(sender, nonce, order);
// If transaction was replaced remove it from priority queue
if let Some(ref old_order) = by_address_replaced {
assert!(self.by_priority.remove(old_order), "hash is in `by_address`; all transactions in `by_address` must be in `by_priority`; qed");
assert!(Self::remove_item(&mut self.by_gas_price, &old_order.gas_price, &old_order.hash),
"hash is in `by_address`; all transactions' gas_prices in `by_address` must be in `by_gas_limit`; qed");
}
Self::insert_item(&mut self.by_gas_price, order_gas_price, order_hash);
debug_assert_eq!(self.by_priority.len(), self.by_address.len());
debug_assert_eq!(self.by_gas_price.iter().map(|(_, v)| v.len()).fold(0, |a, b| a + b), self.by_address.len());
by_address_replaced
} }
/// Remove low priority transactions if there is more then specified by given `limit`. /// Remove low priority transactions if there is more than specified by given `limit`.
/// ///
/// It drops transactions from this set but also removes associated `VerifiedTransaction`. /// It drops transactions from this set but also removes associated `VerifiedTransaction`.
/// Returns addresses and lowest nonces of transactions removed because of limit. /// Returns addresses and lowest nonces of transactions removed because of limit.
@ -267,7 +279,7 @@ impl TransactionSet {
.expect("Transaction has just been found in `by_priority`; so it is in `by_address` also."); .expect("Transaction has just been found in `by_priority`; so it is in `by_address` also.");
by_hash.remove(&order.hash) by_hash.remove(&order.hash)
.expect("Hash found in `by_priorty` matches the one dropped; so it is included in `by_hash`"); .expect("hash is in `by_priorty`; all hashes in `by_priority` must be in `by_hash`; qed");
let min = removed.get(&sender).map_or(nonce, |val| cmp::min(*val, nonce)); let min = removed.get(&sender).map_or(nonce, |val| cmp::min(*val, nonce));
removed.insert(sender, min); removed.insert(sender, min);
@ -278,6 +290,8 @@ impl TransactionSet {
/// Drop transaction from this set (remove from `by_priority` and `by_address`) /// Drop transaction from this set (remove from `by_priority` and `by_address`)
fn drop(&mut self, sender: &Address, nonce: &U256) -> Option<TransactionOrder> { fn drop(&mut self, sender: &Address, nonce: &U256) -> Option<TransactionOrder> {
if let Some(tx_order) = self.by_address.remove(sender, nonce) { if let Some(tx_order) = self.by_address.remove(sender, nonce) {
assert!(Self::remove_item(&mut self.by_gas_price, &tx_order.gas_price, &tx_order.hash),
"hash is in `by_address`; all transactions' gas_prices in `by_address` must be in `by_gas_limit`; qed");
self.by_priority.remove(&tx_order); self.by_priority.remove(&tx_order);
assert_eq!(self.by_priority.len(), self.by_address.len()); assert_eq!(self.by_priority.len(), self.by_address.len());
return Some(tx_order); return Some(tx_order);
@ -290,6 +304,7 @@ impl TransactionSet {
fn clear(&mut self) { fn clear(&mut self) {
self.by_priority.clear(); self.by_priority.clear();
self.by_address.clear(); self.by_address.clear();
self.by_gas_price.clear();
} }
/// Sets new limit for number of transactions in this `TransactionSet`. /// Sets new limit for number of transactions in this `TransactionSet`.
@ -297,6 +312,41 @@ impl TransactionSet {
fn set_limit(&mut self, limit: usize) { fn set_limit(&mut self, limit: usize) {
self.limit = limit; self.limit = limit;
} }
/// Get the minimum gas price that we can accept into this queue that wouldn't cause the transaction to
/// immediately be dropped. 0 if the queue isn't at capacity; 1 plus the lowest if it is.
fn gas_price_entry_limit(&self) -> U256 {
match self.by_gas_price.keys().next() {
Some(k) if self.by_priority.len() >= self.limit => *k + 1.into(),
_ => U256::default(),
}
}
/// Insert an item into a BTreeMap/HashSet "multimap".
fn insert_item(into: &mut BTreeMap<U256, HashSet<H256>>, gas_price: U256, hash: H256) -> bool {
into.entry(gas_price).or_insert_with(Default::default).insert(hash)
}
/// Remove an item from a BTreeMap/HashSet "multimap".
/// Returns true if the item was removed successfully.
fn remove_item(from: &mut BTreeMap<U256, HashSet<H256>>, gas_price: &U256, hash: &H256) -> bool {
if let Some(mut hashes) = from.get_mut(gas_price) {
let only_one_left = hashes.len() == 1;
if !only_one_left {
// Operation may be ok: only if hash is in gas-price's Set.
return hashes.remove(hash);
}
if hashes.iter().next().unwrap() != hash {
// Operation failed: hash not the single item in gas-price's Set.
return false;
}
} else {
// Operation failed: gas-price not found in Map.
return false;
}
// Operation maybe ok: only if hash not found in gas-price Set.
from.remove(gas_price).is_some()
}
} }
#[derive(Debug)] #[derive(Debug)]
@ -316,7 +366,6 @@ pub struct AccountDetails {
pub balance: U256, pub balance: U256,
} }
/// Transactions with `gas > (gas_limit + gas_limit * Factor(in percents))` are not imported to the queue. /// Transactions with `gas > (gas_limit + gas_limit * Factor(in percents))` are not imported to the queue.
const GAS_LIMIT_HYSTERESIS: usize = 10; // % const GAS_LIMIT_HYSTERESIS: usize = 10; // %
@ -355,12 +404,14 @@ impl TransactionQueue {
let current = TransactionSet { let current = TransactionSet {
by_priority: BTreeSet::new(), by_priority: BTreeSet::new(),
by_address: Table::new(), by_address: Table::new(),
by_gas_price: Default::default(),
limit: limit, limit: limit,
}; };
let future = TransactionSet { let future = TransactionSet {
by_priority: BTreeSet::new(), by_priority: BTreeSet::new(),
by_address: Table::new(), by_address: Table::new(),
by_gas_price: Default::default(),
limit: limit, limit: limit,
}; };
@ -400,6 +451,12 @@ impl TransactionQueue {
self.minimal_gas_price = min_gas_price; self.minimal_gas_price = min_gas_price;
} }
/// Get one more than the lowest gas price in the queue iff the pool is
/// full, otherwise 0.
pub fn effective_minimum_gas_price(&self) -> U256 {
self.current.gas_price_entry_limit()
}
/// Sets new gas limit. Transactions with gas slightly (`GAS_LIMIT_HYSTERESIS`) above the limit won't be imported. /// Sets new gas limit. Transactions with gas slightly (`GAS_LIMIT_HYSTERESIS`) above the limit won't be imported.
/// Any transaction already imported to the queue is not affected. /// Any transaction already imported to the queue is not affected.
pub fn set_gas_limit(&mut self, gas_limit: U256) { pub fn set_gas_limit(&mut self, gas_limit: U256) {
@ -445,6 +502,21 @@ impl TransactionQueue {
})); }));
} }
let full_queues_lowest = self.effective_minimum_gas_price();
if tx.gas_price < full_queues_lowest && origin != TransactionOrigin::Local {
trace!(target: "txqueue",
"Dropping transaction below lowest gas price in a full queue: {:?} (gp: {} < {})",
tx.hash(),
tx.gas_price,
full_queues_lowest
);
return Err(Error::Transaction(TransactionError::InsufficientGasPrice {
minimal: full_queues_lowest,
got: tx.gas_price,
}));
}
try!(tx.check_low_s()); try!(tx.check_low_s());
if tx.gas > self.gas_limit || tx.gas > self.tx_gas_limit { if tx.gas > self.gas_limit || tx.gas > self.tx_gas_limit {
@ -798,6 +870,7 @@ mod test {
extern crate rustc_serialize; extern crate rustc_serialize;
use util::table::*; use util::table::*;
use util::*; use util::*;
use ethkey::{Random, Generator};
use transaction::*; use transaction::*;
use error::{Error, TransactionError}; use error::{Error, TransactionError};
use super::*; use super::*;
@ -811,59 +884,86 @@ mod test {
} }
} }
fn new_unsigned_tx(nonce: U256) -> Transaction { fn default_nonce() -> U256 { 123.into() }
fn default_gas_price() -> U256 { 1.into() }
fn new_unsigned_tx(nonce: U256, gas_price: U256) -> Transaction {
Transaction { Transaction {
action: Action::Create, action: Action::Create,
value: U256::from(100), value: U256::from(100),
data: "3331600055".from_hex().unwrap(), data: "3331600055".from_hex().unwrap(),
gas: U256::from(100_000), gas: U256::from(100_000),
gas_price: U256::one(), gas_price: gas_price,
nonce: nonce nonce: nonce
} }
} }
fn new_tx() -> SignedTransaction { fn new_tx(nonce: U256, gas_price: U256) -> SignedTransaction {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
new_unsigned_tx(U256::from(123)).sign(keypair.secret()) new_unsigned_tx(nonce, gas_price).sign(keypair.secret())
} }
fn new_tx_default() -> SignedTransaction {
fn default_nonce_val() -> U256 { new_tx(default_nonce(), default_gas_price())
U256::from(123)
} }
fn default_nonce(_address: &Address) -> AccountDetails { fn default_account_details(_address: &Address) -> AccountDetails {
AccountDetails { AccountDetails {
nonce: default_nonce_val(), nonce: default_nonce(),
balance: !U256::zero() balance: !U256::zero()
} }
} }
/// Returns two transactions with identical (sender, nonce) but different hashes fn new_tx_pair(nonce: U256, gas_price: U256, nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) {
fn new_similar_txs() -> (SignedTransaction, SignedTransaction) { let tx1 = new_unsigned_tx(nonce, gas_price);
let keypair = KeyPair::create().unwrap(); let tx2 = new_unsigned_tx(nonce + nonce_increment, gas_price + gas_price_increment);
let secret = &keypair.secret();
let nonce = U256::from(123);
let tx = new_unsigned_tx(nonce);
let mut tx2 = new_unsigned_tx(nonce);
tx2.gas_price = U256::from(2);
(tx.sign(secret), tx2.sign(secret)) let keypair = Random.generate().unwrap();
let secret = &keypair.secret();
(tx1.sign(secret), tx2.sign(secret))
} }
fn new_txs(second_nonce: U256) -> (SignedTransaction, SignedTransaction) { fn new_tx_pair_default(nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) {
new_txs_with_gas_price_diff(second_nonce, U256::zero()) new_tx_pair(default_nonce(), default_gas_price(), nonce_increment, gas_price_increment)
} }
fn new_txs_with_gas_price_diff(second_nonce: U256, gas_price: U256) -> (SignedTransaction, SignedTransaction) { /// Returns two transactions with identical (sender, nonce) but different gas_price/hash.
let keypair = KeyPair::create().unwrap(); fn new_similar_tx_pair() -> (SignedTransaction, SignedTransaction) {
let secret = &keypair.secret(); new_tx_pair_default(0.into(), 1.into())
let nonce = U256::from(123); }
let tx = new_unsigned_tx(nonce);
let mut tx2 = new_unsigned_tx(nonce + second_nonce);
tx2.gas_price = tx2.gas_price + gas_price;
(tx.sign(secret), tx2.sign(secret)) #[test]
fn should_return_correct_nonces_when_dropped_because_of_limit() {
// given
let mut txq = TransactionQueue::with_limits(2, !U256::zero());
let (tx1, tx2) = new_tx_pair(123.into(), 1.into(), 1.into(), 0.into());
let sender = tx1.sender().unwrap();
let nonce = tx1.nonce;
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(nonce + U256::one()));
// when
let tx = new_tx(123.into(), 1.into());
let res = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External);
// then
// No longer the case as we don't even consider a transaction that isn't above a full
// queue's minimum gas price.
// We may want to reconsider this in the near future so leaving this code in as a
// possible alternative.
/*
assert_eq!(res.unwrap(), TransactionImportResult::Current);
assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(nonce));
*/
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice {
minimal: 2.into(),
got: 1.into(),
});
assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(tx2.nonce));
} }
#[test] #[test]
@ -872,9 +972,10 @@ mod test {
let mut set = TransactionSet { let mut set = TransactionSet {
by_priority: BTreeSet::new(), by_priority: BTreeSet::new(),
by_address: Table::new(), by_address: Table::new(),
by_gas_price: Default::default(),
limit: 1 limit: 1
}; };
let (tx1, tx2) = new_txs(U256::from(1)); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx1 = VerifiedTransaction::new(tx1, TransactionOrigin::External).unwrap(); let tx1 = VerifiedTransaction::new(tx1, TransactionOrigin::External).unwrap();
let tx2 = VerifiedTransaction::new(tx2, TransactionOrigin::External).unwrap(); let tx2 = VerifiedTransaction::new(tx2, TransactionOrigin::External).unwrap();
let mut by_hash = { let mut by_hash = {
@ -911,11 +1012,12 @@ mod test {
let mut set = TransactionSet { let mut set = TransactionSet {
by_priority: BTreeSet::new(), by_priority: BTreeSet::new(),
by_address: Table::new(), by_address: Table::new(),
by_gas_price: Default::default(),
limit: 1 limit: 1
}; };
// Create two transactions with same nonce // Create two transactions with same nonce
// (same hash) // (same hash)
let (tx1, tx2) = new_txs(U256::from(0)); let (tx1, tx2) = new_tx_pair_default(0.into(), 0.into());
let tx1 = VerifiedTransaction::new(tx1, TransactionOrigin::External).unwrap(); let tx1 = VerifiedTransaction::new(tx1, TransactionOrigin::External).unwrap();
let tx2 = VerifiedTransaction::new(tx2, TransactionOrigin::External).unwrap(); let tx2 = VerifiedTransaction::new(tx2, TransactionOrigin::External).unwrap();
let by_hash = { let by_hash = {
@ -931,25 +1033,68 @@ mod test {
set.insert(tx1.sender(), tx1.nonce(), order1.clone()); set.insert(tx1.sender(), tx1.nonce(), order1.clone());
assert_eq!(set.by_priority.len(), 1); assert_eq!(set.by_priority.len(), 1);
assert_eq!(set.by_address.len(), 1); assert_eq!(set.by_address.len(), 1);
assert_eq!(set.by_gas_price.len(), 1);
assert_eq!(*set.by_gas_price.iter().next().unwrap().0, 1.into());
assert_eq!(set.by_gas_price.iter().next().unwrap().1.len(), 1);
// Two different orders (imagine nonce changed in the meantime) // Two different orders (imagine nonce changed in the meantime)
let order2 = TransactionOrder::for_transaction(&tx2, U256::one()); let order2 = TransactionOrder::for_transaction(&tx2, U256::one());
set.insert(tx2.sender(), tx2.nonce(), order2.clone()); set.insert(tx2.sender(), tx2.nonce(), order2.clone());
assert_eq!(set.by_priority.len(), 1); assert_eq!(set.by_priority.len(), 1);
assert_eq!(set.by_address.len(), 1); assert_eq!(set.by_address.len(), 1);
assert_eq!(set.by_gas_price.len(), 1);
assert_eq!(*set.by_gas_price.iter().next().unwrap().0, 1.into());
assert_eq!(set.by_gas_price.iter().next().unwrap().1.len(), 1);
// then // then
assert_eq!(by_hash.len(), 1); assert_eq!(by_hash.len(), 1);
assert_eq!(set.by_priority.len(), 1); assert_eq!(set.by_priority.len(), 1);
assert_eq!(set.by_address.len(), 1); assert_eq!(set.by_address.len(), 1);
assert_eq!(set.by_gas_price.len(), 1);
assert_eq!(*set.by_gas_price.iter().next().unwrap().0, 1.into());
assert_eq!(set.by_gas_price.iter().next().unwrap().1.len(), 1);
assert_eq!(set.by_priority.iter().next().unwrap().clone(), order2); assert_eq!(set.by_priority.iter().next().unwrap().clone(), order2);
} }
#[test]
fn should_not_insert_same_transaction_twice_into_set() {
let mut set = TransactionSet {
by_priority: BTreeSet::new(),
by_address: Table::new(),
by_gas_price: Default::default(),
limit: 2
};
let tx = new_tx_default();
let tx1 = VerifiedTransaction::new(tx.clone(), TransactionOrigin::External).unwrap();
let order1 = TransactionOrder::for_transaction(&tx1, U256::zero());
assert!(set.insert(tx1.sender(), tx1.nonce(), order1).is_none());
let tx2 = VerifiedTransaction::new(tx, TransactionOrigin::External).unwrap();
let order2 = TransactionOrder::for_transaction(&tx2, U256::zero());
assert!(set.insert(tx2.sender(), tx2.nonce(), order2).is_some());
}
#[test]
fn should_give_correct_gas_price_entry_limit() {
let mut set = TransactionSet {
by_priority: BTreeSet::new(),
by_address: Table::new(),
by_gas_price: Default::default(),
limit: 1
};
assert_eq!(set.gas_price_entry_limit(), 0.into());
let tx = new_tx_default();
let tx1 = VerifiedTransaction::new(tx.clone(), TransactionOrigin::External).unwrap();
let order1 = TransactionOrder::for_transaction(&tx1, U256::zero());
assert!(set.insert(tx1.sender(), tx1.nonce(), order1.clone()).is_none());
assert_eq!(set.gas_price_entry_limit(), 2.into());
}
#[test] #[test]
fn should_handle_same_transaction_imported_twice_with_different_state_nonces() { fn should_handle_same_transaction_imported_twice_with_different_state_nonces() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_similar_txs(); let (tx, tx2) = new_similar_tx_pair();
let prev_nonce = |a: &Address| AccountDetails{ nonce: default_nonce(a).nonce - U256::one(), balance: let prev_nonce = |a: &Address| AccountDetails{ nonce: default_account_details(a).nonce - U256::one(), balance:
!U256::zero() }; !U256::zero() };
// First insert one transaction to future // First insert one transaction to future
@ -958,7 +1103,7 @@ mod test {
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// now import second transaction to current // now import second transaction to current
let res = txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External); let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External);
// and then there should be only one transaction in current (the one with higher gas_price) // and then there should be only one transaction in current (the one with higher gas_price)
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -974,10 +1119,10 @@ mod test {
fn should_import_tx() { fn should_import_tx() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
// when // when
let res = txq.add(tx, &default_nonce, TransactionOrigin::External); let res = txq.add(tx, &default_account_details, TransactionOrigin::External);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1003,13 +1148,13 @@ mod test {
fn should_not_import_transaction_above_gas_limit() { fn should_not_import_transaction_above_gas_limit() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
let gas = tx.gas; let gas = tx.gas;
let limit = gas / U256::from(2); let limit = gas / U256::from(2);
txq.set_gas_limit(limit); txq.set_gas_limit(limit);
// when // when
let res = txq.add(tx, &default_nonce, TransactionOrigin::External); let res = txq.add(tx, &default_account_details, TransactionOrigin::External);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::GasLimitExceeded { assert_eq!(unwrap_tx_err(res), TransactionError::GasLimitExceeded {
@ -1026,9 +1171,9 @@ mod test {
fn should_drop_transactions_from_senders_without_balance() { fn should_drop_transactions_from_senders_without_balance() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
let account = |a: &Address| AccountDetails { let account = |a: &Address| AccountDetails {
nonce: default_nonce(a).nonce, nonce: default_account_details(a).nonce,
balance: U256::one() balance: U256::one()
}; };
@ -1049,11 +1194,11 @@ mod test {
fn should_not_import_transaction_below_min_gas_price_threshold_if_external() { fn should_not_import_transaction_below_min_gas_price_threshold_if_external() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
txq.set_minimal_gas_price(tx.gas_price + U256::one()); txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when // when
let res = txq.add(tx, &default_nonce, TransactionOrigin::External); let res = txq.add(tx, &default_account_details, TransactionOrigin::External);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice { assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice {
@ -1069,11 +1214,11 @@ mod test {
fn should_import_transaction_below_min_gas_price_threshold_if_local() { fn should_import_transaction_below_min_gas_price_threshold_if_local() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
txq.set_minimal_gas_price(tx.gas_price + U256::one()); txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when // when
let res = txq.add(tx, &default_nonce, TransactionOrigin::Local); let res = txq.add(tx, &default_account_details, TransactionOrigin::Local);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1086,7 +1231,7 @@ mod test {
fn should_reject_incorectly_signed_transaction() { fn should_reject_incorectly_signed_transaction() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_unsigned_tx(U256::from(123)); let tx = new_unsigned_tx(123.into(), 1.into());
let stx = { let stx = {
let mut s = RlpStream::new_list(9); let mut s = RlpStream::new_list(9);
s.append(&tx.nonce); s.append(&tx.nonce);
@ -1101,7 +1246,7 @@ mod test {
decode(s.as_raw()) decode(s.as_raw())
}; };
// when // when
let res = txq.add(stx, &default_nonce, TransactionOrigin::External); let res = txq.add(stx, &default_account_details, TransactionOrigin::External);
// then // then
assert!(res.is_err()); assert!(res.is_err());
@ -1112,11 +1257,11 @@ mod test {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(1)); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1129,15 +1274,15 @@ mod test {
fn should_prioritize_local_transactions_within_same_nonce_height() { fn should_prioritize_local_transactions_within_same_nonce_height() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
// the second one has same nonce but higher `gas_price` // the second one has same nonce but higher `gas_price`
let (_, tx2) = new_similar_txs(); let (_, tx2) = new_similar_tx_pair();
// when // when
// first insert the one with higher gas price // first insert the one with higher gas price
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
// then the one with lower gas price, but local // then the one with lower gas price, but local
txq.add(tx.clone(), &default_nonce, TransactionOrigin::Local).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1150,11 +1295,11 @@ mod test {
fn should_not_prioritize_local_transactions_with_different_nonce_height() { fn should_not_prioritize_local_transactions_with_different_nonce_height() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(1)); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::Local).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1168,11 +1313,11 @@ mod test {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(1)); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
// then // then
let top = txq.pending_hashes(); let top = txq.pending_hashes();
@ -1186,11 +1331,11 @@ mod test {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(2)); let (tx, tx2) = new_tx_pair_default(2.into(), 0.into());
// when // when
let res1 = txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); let res1 = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
let res2 = txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); let res2 = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
// then // then
assert_eq!(res1, TransactionImportResult::Current); assert_eq!(res1, TransactionImportResult::Current);
@ -1206,13 +1351,13 @@ mod test {
#[test] #[test]
fn should_correctly_update_futures_when_removing() { fn should_correctly_update_futures_when_removing() {
// given // given
let prev_nonce = |a: &Address| AccountDetails{ nonce: default_nonce(a).nonce - U256::one(), balance: let prev_nonce = |a: &Address| AccountDetails{ nonce: default_account_details(a).nonce - U256::one(), balance:
!U256::zero() }; !U256::zero() };
let next2_nonce = default_nonce_val() + U256::from(3); let next2_nonce = default_nonce() + U256::from(3);
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(1)); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 2); assert_eq!(txq.status().future, 2);
@ -1230,19 +1375,19 @@ mod test {
fn should_move_transactions_if_gap_filled() { fn should_move_transactions_if_gap_filled() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let kp = KeyPair::create().unwrap(); let kp = Random.generate().unwrap();
let secret = kp.secret(); let secret = kp.secret();
let tx = new_unsigned_tx(U256::from(123)).sign(secret); let tx = new_unsigned_tx(123.into(), 1.into()).sign(secret);
let tx1 = new_unsigned_tx(U256::from(124)).sign(secret); let tx1 = new_unsigned_tx(124.into(), 1.into()).sign(secret);
let tx2 = new_unsigned_tx(U256::from(125)).sign(secret); let tx2 = new_unsigned_tx(125.into(), 1.into()).sign(secret);
txq.add(tx, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 1); assert_eq!(txq.status().pending, 1);
txq.add(tx2, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// when // when
txq.add(tx1, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1254,9 +1399,9 @@ mod test {
fn should_remove_transaction() { fn should_remove_transaction() {
// given // given
let mut txq2 = TransactionQueue::new(); let mut txq2 = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(3)); let (tx, tx2) = new_tx_pair_default(3.into(), 0.into());
txq2.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq2.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq2.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq2.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq2.status().pending, 1); assert_eq!(txq2.status().pending, 1);
assert_eq!(txq2.status().future, 1); assert_eq!(txq2.status().future, 1);
@ -1275,16 +1420,16 @@ mod test {
fn should_move_transactions_to_future_if_gap_introduced() { fn should_move_transactions_to_future_if_gap_introduced() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(1)); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx(); let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 3); assert_eq!(txq.status().pending, 3);
// when // when
txq.remove_invalid(&tx.hash(), &default_nonce); txq.remove_invalid(&tx.hash(), &default_account_details);
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1296,11 +1441,11 @@ mod test {
fn should_clear_queue() { fn should_clear_queue() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::one()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// add // add
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
let stats = txq.status(); let stats = txq.status();
assert_eq!(stats.pending, 2); assert_eq!(stats.pending, 2);
@ -1316,60 +1461,38 @@ mod test {
fn should_drop_old_transactions_when_hitting_the_limit() { fn should_drop_old_transactions_when_hitting_the_limit() {
// given // given
let mut txq = TransactionQueue::with_limits(1, !U256::zero()); let mut txq = TransactionQueue::with_limits(1, !U256::zero());
let (tx, tx2) = new_txs(U256::one()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let sender = tx.sender().unwrap(); let sender = tx.sender().unwrap();
let nonce = tx.nonce; let nonce = tx.nonce;
txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 1); assert_eq!(txq.status().pending, 1);
// when // when
let res = txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External); let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External);
// then // then
let t = txq.top_transactions(); let t = txq.top_transactions();
assert_eq!(unwrap_tx_err(res), TransactionError::LimitReached); assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice { minimal: 2.into(), got: 1.into() });
assert_eq!(txq.status().pending, 1); assert_eq!(txq.status().pending, 1);
assert_eq!(t.len(), 1); assert_eq!(t.len(), 1);
assert_eq!(t[0], tx); assert_eq!(t[0], tx);
assert_eq!(txq.last_nonce(&sender), Some(nonce)); assert_eq!(txq.last_nonce(&sender), Some(nonce));
} }
#[test]
fn should_return_correct_nonces_when_dropped_because_of_limit() {
// given
let mut txq = TransactionQueue::with_limits(2, !U256::zero());
let tx = new_tx();
let (tx1, tx2) = new_txs(U256::one());
let sender = tx1.sender().unwrap();
let nonce = tx1.nonce;
txq.add(tx1.clone(), &default_nonce, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(nonce + U256::one()));
// when
let res = txq.add(tx.clone(), &default_nonce, TransactionOrigin::External);
// then
assert_eq!(res.unwrap(), TransactionImportResult::Current);
assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(nonce));
}
#[test] #[test]
fn should_limit_future_transactions() { fn should_limit_future_transactions() {
let mut txq = TransactionQueue::with_limits(1, !U256::zero()); let mut txq = TransactionQueue::with_limits(1, !U256::zero());
txq.current.set_limit(10); txq.current.set_limit(10);
let (tx1, tx2) = new_txs_with_gas_price_diff(U256::from(4), U256::from(1)); let (tx1, tx2) = new_tx_pair_default(4.into(), 1.into());
let (tx3, tx4) = new_txs_with_gas_price_diff(U256::from(4), U256::from(2)); let (tx3, tx4) = new_tx_pair_default(4.into(), 2.into());
txq.add(tx1.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx3.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
// when // when
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx4.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx4.clone(), &default_account_details, TransactionOrigin::External).unwrap();
// then // then
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
@ -1378,7 +1501,7 @@ mod test {
#[test] #[test]
fn should_drop_transactions_with_old_nonces() { fn should_drop_transactions_with_old_nonces() {
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
let last_nonce = tx.nonce + U256::one(); let last_nonce = tx.nonce + U256::one();
let fetch_last_nonce = |_a: &Address| AccountDetails{ nonce: last_nonce, balance: !U256::zero() }; let fetch_last_nonce = |_a: &Address| AccountDetails{ nonce: last_nonce, balance: !U256::zero() };
@ -1395,11 +1518,11 @@ mod test {
#[test] #[test]
fn should_not_insert_same_transaction_twice() { fn should_not_insert_same_transaction_twice() {
// given // given
let nonce = |a: &Address| AccountDetails { nonce: default_nonce(a).nonce + U256::one(), let nonce = |a: &Address| AccountDetails { nonce: default_account_details(a).nonce + U256::one(),
balance: !U256::zero() }; balance: !U256::zero() };
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (_tx1, tx2) = new_txs(U256::from(1)); let (_tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
assert_eq!(txq.status().pending, 0); assert_eq!(txq.status().pending, 0);
@ -1417,16 +1540,16 @@ mod test {
fn should_accept_same_transaction_twice_if_removed() { fn should_accept_same_transaction_twice_if_removed() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx1, tx2) = new_txs(U256::from(1)); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
// when // when
txq.remove_invalid(&tx1.hash(), &default_nonce); txq.remove_invalid(&tx1.hash(), &default_account_details);
assert_eq!(txq.status().pending, 0); assert_eq!(txq.status().pending, 0);
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx1.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1438,17 +1561,17 @@ mod test {
fn should_not_move_to_future_if_state_nonce_is_higher() { fn should_not_move_to_future_if_state_nonce_is_higher() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx, tx2) = new_txs(U256::from(1)); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx(); let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().pending, 3); assert_eq!(txq.status().pending, 3);
// when // when
let sender = tx.sender().unwrap(); let sender = tx.sender().unwrap();
txq.remove_all(sender, default_nonce_val() + U256::one()); txq.remove_all(sender, default_nonce() + U256::one());
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1461,8 +1584,8 @@ mod test {
init_log(); init_log();
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let tx = new_unsigned_tx(U256::from(123)).sign(keypair.secret()); let tx = new_unsigned_tx(123.into(), 1.into()).sign(keypair.secret());
let tx2 = { let tx2 = {
let mut tx2 = (*tx).clone(); let mut tx2 = (*tx).clone();
tx2.gas_price = U256::from(200); tx2.gas_price = U256::from(200);
@ -1470,8 +1593,8 @@ mod test {
}; };
// when // when
txq.add(tx, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1484,8 +1607,8 @@ mod test {
fn should_replace_same_transaction_when_importing_to_futures() { fn should_replace_same_transaction_when_importing_to_futures() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let tx0 = new_unsigned_tx(U256::from(123)).sign(keypair.secret()); let tx0 = new_unsigned_tx(123.into(), 1.into()).sign(keypair.secret());
let tx1 = { let tx1 = {
let mut tx1 = (*tx0).clone(); let mut tx1 = (*tx0).clone();
tx1.nonce = U256::from(124); tx1.nonce = U256::from(124);
@ -1498,10 +1621,10 @@ mod test {
}; };
// when // when
txq.add(tx1, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx0, &default_nonce, TransactionOrigin::External).unwrap(); txq.add(tx0, &default_account_details, TransactionOrigin::External).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1513,12 +1636,12 @@ mod test {
#[test] #[test]
fn should_recalculate_height_when_removing_from_future() { fn should_recalculate_height_when_removing_from_future() {
// given // given
let previous_nonce = |a: &Address| AccountDetails{ nonce: default_nonce(a).nonce - U256::one(), balance: let previous_nonce = |a: &Address| AccountDetails{ nonce: default_account_details(a).nonce - U256::one(), balance:
!U256::zero() }; !U256::zero() };
let next_nonce = |a: &Address| AccountDetails{ nonce: default_nonce(a).nonce + U256::one(), balance: let next_nonce = |a: &Address| AccountDetails{ nonce: default_account_details(a).nonce + U256::one(), balance:
!U256::zero() }; !U256::zero() };
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx1, tx2) = new_txs(U256::one()); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &previous_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), &previous_nonce, TransactionOrigin::External).unwrap();
txq.add(tx2, &previous_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2, &previous_nonce, TransactionOrigin::External).unwrap();
assert_eq!(txq.status().future, 2); assert_eq!(txq.status().future, 2);
@ -1545,7 +1668,7 @@ mod test {
fn should_return_correct_nonce_when_transactions_from_given_address_exist() { fn should_return_correct_nonce_when_transactions_from_given_address_exist() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let tx = new_tx(); let tx = new_tx_default();
let from = tx.sender().unwrap(); let from = tx.sender().unwrap();
let nonce = tx.nonce; let nonce = tx.nonce;
let details = |_a: &Address| AccountDetails { nonce: nonce, balance: !U256::zero() }; let details = |_a: &Address| AccountDetails { nonce: nonce, balance: !U256::zero() };
@ -1561,7 +1684,7 @@ mod test {
fn should_remove_old_transaction_even_if_newer_transaction_was_not_known() { fn should_remove_old_transaction_even_if_newer_transaction_was_not_known() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx1, tx2) = new_txs(U256::one()); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
let (nonce1, nonce2) = (tx1.nonce, tx2.nonce); let (nonce1, nonce2) = (tx1.nonce, tx2.nonce);
let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() }; let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() };
@ -1579,7 +1702,7 @@ mod test {
fn should_return_valid_last_nonce_after_remove_all() { fn should_return_valid_last_nonce_after_remove_all() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx1, tx2) = new_txs(U256::from(4)); let (tx1, tx2) = new_tx_pair_default(4.into(), 0.into());
let sender = tx1.sender().unwrap(); let sender = tx1.sender().unwrap();
let (nonce1, nonce2) = (tx1.nonce, tx2.nonce); let (nonce1, nonce2) = (tx1.nonce, tx2.nonce);
let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() }; let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() };
@ -1603,13 +1726,13 @@ mod test {
fn should_return_true_if_there_is_local_transaction_pending() { fn should_return_true_if_there_is_local_transaction_pending() {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx1, tx2) = new_txs(U256::from(1)); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
assert_eq!(txq.has_local_pending_transactions(), false); assert_eq!(txq.has_local_pending_transactions(), false);
// when // when
assert_eq!(txq.add(tx1, &default_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(), TransactionImportResult::Current);
assert_eq!(txq.has_local_pending_transactions(), false); assert_eq!(txq.has_local_pending_transactions(), false);
assert_eq!(txq.add(tx2, &default_nonce, TransactionOrigin::Local).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap(), TransactionImportResult::Current);
// then // then
assert_eq!(txq.has_local_pending_transactions(), true); assert_eq!(txq.has_local_pending_transactions(), true);
@ -1619,9 +1742,9 @@ mod test {
fn should_keep_right_order_in_future() { fn should_keep_right_order_in_future() {
// given // given
let mut txq = TransactionQueue::with_limits(1, !U256::zero()); let mut txq = TransactionQueue::with_limits(1, !U256::zero());
let (tx1, tx2) = new_txs(U256::from(1)); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
let prev_nonce = |a: &Address| AccountDetails { nonce: default_nonce(a).nonce - U256::one(), balance: let prev_nonce = |a: &Address| AccountDetails { nonce: default_account_details(a).nonce - U256::one(), balance:
default_nonce(a).balance }; default_account_details(a).balance };
// when // when
assert_eq!(txq.add(tx2, &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future); assert_eq!(txq.add(tx2, &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future);
@ -1637,27 +1760,26 @@ mod test {
// given // given
let mut txq = TransactionQueue::new(); let mut txq = TransactionQueue::new();
let (tx1, tx2, tx2_2, tx3) = { let (tx1, tx2, tx2_2, tx3) = {
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let secret = &keypair.secret(); let secret = &keypair.secret();
let nonce = U256::from(123); let nonce = 123.into();
let tx = new_unsigned_tx(nonce); let tx = new_unsigned_tx(nonce, 1.into());
let tx2 = new_unsigned_tx(nonce + 1.into()); let tx2 = new_unsigned_tx(nonce + 1.into(), 1.into());
let mut tx2_2 = new_unsigned_tx(nonce + 1.into()); let tx2_2 = new_unsigned_tx(nonce + 1.into(), 5.into());
tx2_2.gas_price = U256::from(5); let tx3 = new_unsigned_tx(nonce + 2.into(), 1.into());
let tx3 = new_unsigned_tx(nonce + 2.into());
(tx.sign(secret), tx2.sign(secret), tx2_2.sign(secret), tx3.sign(secret)) (tx.sign(secret), tx2.sign(secret), tx2_2.sign(secret), tx3.sign(secret))
}; };
let sender = tx1.sender().unwrap(); let sender = tx1.sender().unwrap();
txq.add(tx1, &default_nonce, TransactionOrigin::Local).unwrap(); txq.add(tx1, &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx2, &default_nonce, TransactionOrigin::Local).unwrap(); txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx3, &default_nonce, TransactionOrigin::Local).unwrap(); txq.add(tx3, &default_account_details, TransactionOrigin::Local).unwrap();
assert_eq!(txq.future.by_priority.len(), 0); assert_eq!(txq.future.by_priority.len(), 0);
assert_eq!(txq.current.by_priority.len(), 3); assert_eq!(txq.current.by_priority.len(), 3);
// when // when
let res = txq.add(tx2_2, &default_nonce, TransactionOrigin::Local); let res = txq.add(tx2_2, &default_account_details, TransactionOrigin::Local);
// then // then
assert_eq!(txq.last_nonce(&sender).unwrap(), 125.into()); assert_eq!(txq.last_nonce(&sender).unwrap(), 125.into());

View File

@ -60,6 +60,7 @@ impl ClientService {
config: ClientConfig, config: ClientConfig,
spec: &Spec, spec: &Spec,
db_path: &Path, db_path: &Path,
ipc_path: &Path,
miner: Arc<Miner>, miner: Arc<Miner>,
) -> Result<ClientService, Error> ) -> Result<ClientService, Error>
{ {
@ -86,7 +87,7 @@ impl ClientService {
try!(io_service.register_handler(client_io)); try!(io_service.register_handler(client_io));
let stop_guard = ::devtools::StopGuard::new(); let stop_guard = ::devtools::StopGuard::new();
run_ipc(client.clone(), stop_guard.share()); run_ipc(ipc_path, client.clone(), stop_guard.share());
Ok(ClientService { Ok(ClientService {
io_service: Arc::new(io_service), io_service: Arc::new(io_service),
@ -167,10 +168,13 @@ impl IoHandler<ClientIoMessage> for ClientIoHandler {
} }
#[cfg(feature="ipc")] #[cfg(feature="ipc")]
fn run_ipc(client: Arc<Client>, stop: Arc<AtomicBool>) { fn run_ipc(base_path: &Path, client: Arc<Client>, stop: Arc<AtomicBool>) {
let mut path = base_path.to_owned();
path.push("parity-chain.ipc");
let socket_addr = format!("ipc://{}", path.to_string_lossy());
::std::thread::spawn(move || { ::std::thread::spawn(move || {
let mut worker = nanoipc::Worker::new(&(client as Arc<BlockChainClient>)); let mut worker = nanoipc::Worker::new(&(client as Arc<BlockChainClient>));
worker.add_reqrep("ipc:///tmp/parity-chain.ipc").expect("Ipc expected to initialize with no issues"); worker.add_reqrep(&socket_addr).expect("Ipc expected to initialize with no issues");
while !stop.load(::std::sync::atomic::Ordering::Relaxed) { while !stop.load(::std::sync::atomic::Ordering::Relaxed) {
worker.poll(); worker.poll();
@ -179,7 +183,7 @@ fn run_ipc(client: Arc<Client>, stop: Arc<AtomicBool>) {
} }
#[cfg(not(feature="ipc"))] #[cfg(not(feature="ipc"))]
fn run_ipc(_client: Arc<Client>, _stop: Arc<AtomicBool>) { fn run_ipc(_base_path: &Path, _client: Arc<Client>, _stop: Arc<AtomicBool>) {
} }
#[cfg(test)] #[cfg(test)]
@ -203,6 +207,7 @@ mod tests {
ClientConfig::default(), ClientConfig::default(),
&spec, &spec,
&path, &path,
&path,
Arc::new(Miner::with_spec(&spec)), Arc::new(Miner::with_spec(&spec)),
); );
assert!(service.is_ok()); assert!(service.is_ok());

View File

@ -17,10 +17,39 @@
//! Account state encoding and decoding //! Account state encoding and decoding
use account_db::{AccountDB, AccountDBMut}; use account_db::{AccountDB, AccountDBMut};
use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY, TrieDB}; use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY};
use util::rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View}; use util::rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View};
use util::trie::{TrieDB, Trie};
use snapshot::Error; use snapshot::Error;
use std::collections::{HashMap, HashSet};
// whether an encoded account has code and how it is referred to.
#[repr(u8)]
enum CodeState {
// the account has no code.
Empty = 0,
// raw code is encoded.
Inline = 1,
// the code is referred to by hash.
Hash = 2,
}
impl CodeState {
fn from(x: u8) -> Result<Self, Error> {
match x {
0 => Ok(CodeState::Empty),
1 => Ok(CodeState::Inline),
2 => Ok(CodeState::Hash),
_ => Err(Error::UnrecognizedCodeState(x))
}
}
fn raw(self) -> u8 {
self as u8
}
}
// An alternate account structure from ::account::Account. // An alternate account structure from ::account::Account.
#[derive(PartialEq, Clone, Debug)] #[derive(PartialEq, Clone, Debug)]
pub struct Account { pub struct Account {
@ -57,7 +86,7 @@ impl Account {
// walk the account's storage trie, returning an RLP item containing the // walk the account's storage trie, returning an RLP item containing the
// account properties and the storage. // account properties and the storage.
pub fn to_fat_rlp(&self, acct_db: &AccountDB) -> Result<Bytes, Error> { pub fn to_fat_rlp(&self, acct_db: &AccountDB, used_code: &mut HashSet<H256>) -> Result<Bytes, Error> {
let db = try!(TrieDB::new(acct_db, &self.storage_root)); let db = try!(TrieDB::new(acct_db, &self.storage_root));
let mut pairs = Vec::new(); let mut pairs = Vec::new();
@ -80,11 +109,14 @@ impl Account {
// [has_code, code_hash]. // [has_code, code_hash].
if self.code_hash == SHA3_EMPTY { if self.code_hash == SHA3_EMPTY {
account_stream.append(&false).append_empty_data(); account_stream.append(&CodeState::Empty.raw()).append_empty_data();
} else if used_code.contains(&self.code_hash) {
account_stream.append(&CodeState::Hash.raw()).append(&self.code_hash);
} else { } else {
match acct_db.get(&self.code_hash) { match acct_db.get(&self.code_hash) {
Some(c) => { Some(c) => {
account_stream.append(&true).append(&c); used_code.insert(self.code_hash.clone());
account_stream.append(&CodeState::Inline.raw()).append(&c);
} }
None => { None => {
warn!("code lookup failed during snapshot"); warn!("code lookup failed during snapshot");
@ -99,16 +131,39 @@ impl Account {
} }
// decode a fat rlp, and rebuild the storage trie as we go. // decode a fat rlp, and rebuild the storage trie as we go.
pub fn from_fat_rlp(acct_db: &mut AccountDBMut, rlp: UntrustedRlp) -> Result<Self, Error> { // returns the account structure along with its newly recovered code,
// if it exists.
pub fn from_fat_rlp(
acct_db: &mut AccountDBMut,
rlp: UntrustedRlp,
code_map: &HashMap<H256, Bytes>,
) -> Result<(Self, Option<Bytes>), Error> {
use util::{TrieDBMut, TrieMut}; use util::{TrieDBMut, TrieMut};
let nonce = try!(rlp.val_at(0)); let nonce = try!(rlp.val_at(0));
let balance = try!(rlp.val_at(1)); let balance = try!(rlp.val_at(1));
let code_hash = if try!(rlp.val_at(2)) { let code_state: CodeState = {
let raw: u8 = try!(rlp.val_at(2));
try!(CodeState::from(raw))
};
// load the code if it exists.
let (code_hash, new_code) = match code_state {
CodeState::Empty => (SHA3_EMPTY, None),
CodeState::Inline => {
let code: Bytes = try!(rlp.val_at(3)); let code: Bytes = try!(rlp.val_at(3));
acct_db.insert(&code) let code_hash = acct_db.insert(&code);
} else {
SHA3_EMPTY (code_hash, Some(code))
}
CodeState::Hash => {
let code_hash = try!(rlp.val_at(3));
if let Some(code) = code_map.get(&code_hash) {
acct_db.emplace(code_hash.clone(), code.clone());
}
(code_hash, None)
}
}; };
let mut storage_root = H256::zero(); let mut storage_root = H256::zero();
@ -123,12 +178,20 @@ impl Account {
try!(storage_trie.insert(&k, &v)); try!(storage_trie.insert(&k, &v));
} }
} }
Ok(Account {
let acc = Account {
nonce: nonce, nonce: nonce,
balance: balance, balance: balance,
storage_root: storage_root, storage_root: storage_root,
code_hash: code_hash, code_hash: code_hash,
}) };
Ok((acc, new_code))
}
/// Get the account's code hash.
pub fn code_hash(&self) -> &H256 {
&self.code_hash
} }
#[cfg(test)] #[cfg(test)]
@ -144,9 +207,11 @@ mod tests {
use snapshot::tests::helpers::fill_storage; use snapshot::tests::helpers::fill_storage;
use util::{SHA3_NULL_RLP, SHA3_EMPTY}; use util::{SHA3_NULL_RLP, SHA3_EMPTY};
use util::{Address, FixedHash, H256}; use util::{Address, FixedHash, H256, HashDB};
use util::rlp::{UntrustedRlp, View}; use util::rlp::{UntrustedRlp, View};
use std::collections::{HashSet, HashMap};
use super::Account; use super::Account;
#[test] #[test]
@ -165,9 +230,9 @@ mod tests {
let thin_rlp = account.to_thin_rlp(); let thin_rlp = account.to_thin_rlp();
assert_eq!(Account::from_thin_rlp(&thin_rlp), account); assert_eq!(Account::from_thin_rlp(&thin_rlp), account);
let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr)).unwrap(); let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlp);
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap(), account); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, &Default::default()).unwrap().0, account);
} }
#[test] #[test]
@ -191,8 +256,59 @@ mod tests {
let thin_rlp = account.to_thin_rlp(); let thin_rlp = account.to_thin_rlp();
assert_eq!(Account::from_thin_rlp(&thin_rlp), account); assert_eq!(Account::from_thin_rlp(&thin_rlp), account);
let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr)).unwrap(); let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlp);
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap(), account); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, &Default::default()).unwrap().0, account);
}
#[test]
fn encoding_code() {
let mut db = get_temp_journal_db();
let mut db = &mut **db;
let addr1 = Address::random();
let addr2 = Address::random();
let code_hash = {
let mut acct_db = AccountDBMut::new(db.as_hashdb_mut(), &addr1);
acct_db.insert(b"this is definitely code")
};
{
let mut acct_db = AccountDBMut::new(db.as_hashdb_mut(), &addr2);
acct_db.emplace(code_hash.clone(), b"this is definitely code".to_vec());
}
let account1 = Account {
nonce: 50.into(),
balance: 123456789.into(),
storage_root: SHA3_NULL_RLP,
code_hash: code_hash,
};
let account2 = Account {
nonce: 400.into(),
balance: 98765432123456789usize.into(),
storage_root: SHA3_NULL_RLP,
code_hash: code_hash,
};
let mut used_code = HashSet::new();
let fat_rlp1 = account1.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr1), &mut used_code).unwrap();
let fat_rlp2 = account2.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr2), &mut used_code).unwrap();
assert_eq!(used_code.len(), 1);
let fat_rlp1 = UntrustedRlp::new(&fat_rlp1);
let fat_rlp2 = UntrustedRlp::new(&fat_rlp2);
let code_map = HashMap::new();
let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2, &code_map).unwrap();
assert!(maybe_code.is_none());
assert_eq!(acc, account2);
let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1, &code_map).unwrap();
assert_eq!(maybe_code, Some(b"this is definitely code".to_vec()));
assert_eq!(acc, account1);
} }
} }

View File

@ -21,6 +21,7 @@ use header::Header;
use views::BlockView; use views::BlockView;
use util::rlp::{DecoderError, RlpStream, Stream, UntrustedRlp, View}; use util::rlp::{DecoderError, RlpStream, Stream, UntrustedRlp, View};
use util::rlp::{Compressible, RlpType};
use util::{Bytes, Hashable, H256}; use util::{Bytes, Hashable, H256};
const HEADER_FIELDS: usize = 10; const HEADER_FIELDS: usize = 10;
@ -31,10 +32,10 @@ pub struct AbridgedBlock {
} }
impl AbridgedBlock { impl AbridgedBlock {
/// Create from a vector of bytes. Does no verification. /// Create from rlp-compressed bytes. Does no verification.
pub fn from_raw(rlp: Bytes) -> Self { pub fn from_raw(compressed: Bytes) -> Self {
AbridgedBlock { AbridgedBlock {
rlp: rlp, rlp: compressed,
} }
} }
@ -78,7 +79,7 @@ impl AbridgedBlock {
} }
AbridgedBlock { AbridgedBlock {
rlp: stream.out(), rlp: UntrustedRlp::new(stream.as_raw()).compress(RlpType::Blocks).to_vec(),
} }
} }
@ -86,7 +87,8 @@ impl AbridgedBlock {
/// ///
/// Will fail if contains invalid rlp. /// Will fail if contains invalid rlp.
pub fn to_block(&self, parent_hash: H256, number: u64) -> Result<Block, DecoderError> { pub fn to_block(&self, parent_hash: H256, number: u64) -> Result<Block, DecoderError> {
let rlp = UntrustedRlp::new(&self.rlp); let rlp = UntrustedRlp::new(&self.rlp).decompress(RlpType::Blocks);
let rlp = UntrustedRlp::new(&rlp);
let mut header = Header { let mut header = Header {
parent_hash: parent_hash, parent_hash: parent_hash,

View File

@ -35,6 +35,10 @@ pub enum Error {
IncompleteChain, IncompleteChain,
/// Old starting block in a pruned database. /// Old starting block in a pruned database.
OldBlockPrunedDB, OldBlockPrunedDB,
/// Missing code.
MissingCode(Vec<H256>),
/// Unrecognized code encoding.
UnrecognizedCodeState(u8),
/// Trie error. /// Trie error.
Trie(TrieError), Trie(TrieError),
/// Decoder error. /// Decoder error.
@ -51,6 +55,8 @@ impl fmt::Display for Error {
Error::IncompleteChain => write!(f, "Cannot create snapshot due to incomplete chain."), Error::IncompleteChain => write!(f, "Cannot create snapshot due to incomplete chain."),
Error::OldBlockPrunedDB => write!(f, "Attempted to create a snapshot at an old block while using \ Error::OldBlockPrunedDB => write!(f, "Attempted to create a snapshot at an old block while using \
a pruned database. Please re-run with the --pruning archive flag."), a pruned database. Please re-run with the --pruning archive flag."),
Error::MissingCode(ref missing) => write!(f, "Incomplete snapshot: {} contract codes not found.", missing.len()),
Error::UnrecognizedCodeState(state) => write!(f, "Unrecognized code encoding ({})", state),
Error::Io(ref err) => err.fmt(f), Error::Io(ref err) => err.fmt(f),
Error::Decoder(ref err) => err.fmt(f), Error::Decoder(ref err) => err.fmt(f),
Error::Trie(ref err) => err.fmt(f), Error::Trie(ref err) => err.fmt(f),

View File

@ -16,7 +16,7 @@
//! Snapshot creation, restoration, and network service. //! Snapshot creation, restoration, and network service.
use std::collections::VecDeque; use std::collections::{HashMap, HashSet, VecDeque};
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
@ -26,13 +26,15 @@ use engines::Engine;
use ids::BlockID; use ids::BlockID;
use views::BlockView; use views::BlockView;
use util::{Bytes, Hashable, HashDB, snappy, TrieDB, TrieDBMut, TrieMut}; use util::{Bytes, Hashable, HashDB, snappy};
use util::memorydb::MemoryDB;
use util::Mutex; use util::Mutex;
use util::hash::{FixedHash, H256}; use util::hash::{FixedHash, H256};
use util::journaldb::{self, Algorithm, JournalDB}; use util::journaldb::{self, Algorithm, JournalDB};
use util::kvdb::Database; use util::kvdb::Database;
use util::rlp::{DecoderError, RlpStream, Stream, UntrustedRlp, View, Compressible, RlpType}; use util::rlp::{DecoderError, RlpStream, Stream, UntrustedRlp, View, Compressible, RlpType};
use util::rlp::SHA3_NULL_RLP; use util::rlp::SHA3_NULL_RLP;
use util::trie::{TrieDB, TrieDBMut, Trie, TrieMut};
use self::account::Account; use self::account::Account;
use self::block::AbridgedBlock; use self::block::AbridgedBlock;
@ -331,6 +333,8 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
progress: progress, progress: progress,
}; };
let mut used_code = HashSet::new();
// account_key here is the address' hash. // account_key here is the address' hash.
for (account_key, account_data) in account_trie.iter() { for (account_key, account_data) in account_trie.iter() {
let account = Account::from_thin_rlp(account_data); let account = Account::from_thin_rlp(account_data);
@ -338,7 +342,7 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
let account_db = AccountDB::from_hash(db, account_key_hash); let account_db = AccountDB::from_hash(db, account_key_hash);
let fat_rlp = try!(account.to_fat_rlp(&account_db)); let fat_rlp = try!(account.to_fat_rlp(&account_db, &mut used_code));
let compressed_rlp = UntrustedRlp::new(&fat_rlp).compress(RlpType::Snapshot).to_vec(); let compressed_rlp = UntrustedRlp::new(&fat_rlp).compress(RlpType::Snapshot).to_vec();
try!(chunker.push(account_key, compressed_rlp)); try!(chunker.push(account_key, compressed_rlp));
} }
@ -402,6 +406,8 @@ impl ManifestData {
pub struct StateRebuilder { pub struct StateRebuilder {
db: Box<JournalDB>, db: Box<JournalDB>,
state_root: H256, state_root: H256,
code_map: HashMap<H256, Bytes>, // maps code hashes to code itself.
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
} }
impl StateRebuilder { impl StateRebuilder {
@ -410,6 +416,8 @@ impl StateRebuilder {
StateRebuilder { StateRebuilder {
db: journaldb::new(db.clone(), pruning, ::db::COL_STATE), db: journaldb::new(db.clone(), pruning, ::db::COL_STATE),
state_root: SHA3_NULL_RLP, state_root: SHA3_NULL_RLP,
code_map: HashMap::new(),
missing_code: HashMap::new(),
} }
} }
@ -418,41 +426,57 @@ impl StateRebuilder {
let rlp = UntrustedRlp::new(chunk); let rlp = UntrustedRlp::new(chunk);
let account_fat_rlps: Vec<_> = rlp.iter().map(|r| r.as_raw()).collect(); let account_fat_rlps: Vec<_> = rlp.iter().map(|r| r.as_raw()).collect();
let mut pairs = Vec::with_capacity(rlp.item_count()); let mut pairs = Vec::with_capacity(rlp.item_count());
let backing = self.db.backing().clone();
// initialize the pairs vector with empty values so we have slots to write into. // initialize the pairs vector with empty values so we have slots to write into.
pairs.resize(rlp.item_count(), (H256::new(), Vec::new())); pairs.resize(rlp.item_count(), (H256::new(), Vec::new()));
let chunk_size = account_fat_rlps.len() / ::num_cpus::get() + 1; let chunk_size = account_fat_rlps.len() / ::num_cpus::get() + 1;
// new code contained within this chunk.
let mut chunk_code = HashMap::new();
// build account tries in parallel. // build account tries in parallel.
// Todo [rob] keep a thread pool around so we don't do this per-chunk. // Todo [rob] keep a thread pool around so we don't do this per-chunk.
try!(scope(|scope| { try!(scope(|scope| {
let mut handles = Vec::new(); let mut handles = Vec::new();
for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) { for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) {
let mut db = self.db.boxed_clone(); let code_map = &self.code_map;
let handle: ScopedJoinHandle<Result<Box<JournalDB>, ::error::Error>> = scope.spawn(move || { let handle: ScopedJoinHandle<Result<_, ::error::Error>> = scope.spawn(move || {
try!(rebuild_account_trie(db.as_hashdb_mut(), account_chunk, out_pairs_chunk)); let mut db = MemoryDB::new();
let status = try!(rebuild_accounts(&mut db, account_chunk, out_pairs_chunk, code_map));
trace!(target: "snapshot", "thread rebuilt {} account tries", account_chunk.len()); trace!(target: "snapshot", "thread rebuilt {} account tries", account_chunk.len());
Ok(db) Ok((db, status))
}); });
handles.push(handle); handles.push(handle);
} }
// commit all account tries to the db, but only in this thread. // consolidate all edits into the main overlay.
let batch = backing.transaction();
for handle in handles { for handle in handles {
let mut thread_db = try!(handle.join()); let (thread_db, status): (MemoryDB, _) = try!(handle.join());
try!(thread_db.inject(&batch)); self.db.consolidate(thread_db);
}
try!(backing.write(batch).map_err(::util::UtilError::SimpleString));
chunk_code.extend(status.new_code);
for (addr_hash, code_hash) in status.missing_code {
self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
}
}
Ok::<_, ::error::Error>(()) Ok::<_, ::error::Error>(())
})); }));
// patch up all missing code. must be done after collecting all new missing code entries.
for (code_hash, code) in chunk_code {
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash);
db.emplace(code_hash, code.clone());
}
self.code_map.insert(code_hash, code);
}
// batch trie writes // batch trie writes
{ {
@ -467,18 +491,44 @@ impl StateRebuilder {
} }
} }
let batch = backing.transaction(); let backing = self.db.backing().clone();
try!(self.db.inject(&batch)); let mut batch = backing.transaction();
try!(self.db.inject(&mut batch));
try!(backing.write(batch).map_err(::util::UtilError::SimpleString)); try!(backing.write(batch).map_err(::util::UtilError::SimpleString));
trace!(target: "snapshot", "current state root: {:?}", self.state_root); trace!(target: "snapshot", "current state root: {:?}", self.state_root);
Ok(()) Ok(())
} }
/// Check for accounts missing code. Once all chunks have been fed, there should
/// be none.
pub fn check_missing(&self) -> Result<(), Error> {
let missing = self.missing_code.keys().cloned().collect::<Vec<_>>();
match missing.is_empty() {
true => Ok(()),
false => Err(Error::MissingCode(missing)),
}
}
/// Get the state root of the rebuilder. /// Get the state root of the rebuilder.
pub fn state_root(&self) -> H256 { self.state_root } pub fn state_root(&self) -> H256 { self.state_root }
} }
fn rebuild_account_trie(db: &mut HashDB, account_chunk: &[&[u8]], out_chunk: &mut [(H256, Bytes)]) -> Result<(), ::error::Error> { #[derive(Default)]
struct RebuiltStatus {
new_code: Vec<(H256, Bytes)>, // new code that's become available.
missing_code: Vec<(H256, H256)>, // accounts that are missing code.
}
// rebuild a set of accounts and their storage.
// returns
fn rebuild_accounts(
db: &mut HashDB,
account_chunk: &[&[u8]],
out_chunk: &mut [(H256, Bytes)],
code_map: &HashMap<H256, Bytes>
) -> Result<RebuiltStatus, ::error::Error>
{
let mut status = RebuiltStatus::default();
for (account_pair, out) in account_chunk.into_iter().zip(out_chunk) { for (account_pair, out) in account_chunk.into_iter().zip(out_chunk) {
let account_rlp = UntrustedRlp::new(account_pair); let account_rlp = UntrustedRlp::new(account_pair);
@ -490,14 +540,24 @@ fn rebuild_account_trie(db: &mut HashDB, account_chunk: &[&[u8]], out_chunk: &mu
let mut acct_db = AccountDBMut::from_hash(db, hash); let mut acct_db = AccountDBMut::from_hash(db, hash);
// fill out the storage trie and code while decoding. // fill out the storage trie and code while decoding.
let acc = try!(Account::from_fat_rlp(&mut acct_db, fat_rlp)); let (acc, maybe_code) = try!(Account::from_fat_rlp(&mut acct_db, fat_rlp, code_map));
let code_hash = acc.code_hash().clone();
match maybe_code {
Some(code) => status.new_code.push((code_hash, code)),
None => {
if code_hash != ::util::SHA3_EMPTY && !code_map.contains_key(&code_hash) {
status.missing_code.push((hash, code_hash));
}
}
}
acc.to_thin_rlp() acc.to_thin_rlp()
}; };
*out = (hash, thin_rlp); *out = (hash, thin_rlp);
} }
Ok(()) Ok(status)
} }
/// Proportion of blocks which we will verify `PoW` for. /// Proportion of blocks which we will verify `PoW` for.

View File

@ -125,6 +125,8 @@ impl Restoration {
try!(self.state.feed(&self.snappy_buffer[..len])); try!(self.state.feed(&self.snappy_buffer[..len]));
if self.state_chunks_left.is_empty() { if self.state_chunks_left.is_empty() {
try!(self.state.check_missing());
let root = self.state.state_root(); let root = self.state.state_root();
if root != self.final_state_root { if root != self.final_state_root {
warn!("Final restored state has wrong state root: expected {:?}, got {:?}", root, self.final_state_root); warn!("Final restored state has wrong state root: expected {:?}, got {:?}", root, self.final_state_root);

View File

@ -43,14 +43,16 @@ fn chunk_and_restore(amount: u64) {
let bc = BlockChain::new(Default::default(), &genesis, old_db.clone()); let bc = BlockChain::new(Default::default(), &genesis, old_db.clone());
// build the blockchain. // build the blockchain.
let mut batch = old_db.transaction();
for _ in 0..amount { for _ in 0..amount {
let block = canon_chain.generate(&mut finalizer).unwrap(); let block = canon_chain.generate(&mut finalizer).unwrap();
let batch = old_db.transaction(); bc.insert_block(&mut batch, &block, vec![]);
bc.insert_block(&batch, &block, vec![]);
bc.commit(); bc.commit();
old_db.write(batch).unwrap();
} }
old_db.write(batch).unwrap();
let best_hash = bc.best_block_hash(); let best_hash = bc.best_block_hash();
// snapshot it. // snapshot it.

View File

@ -24,7 +24,7 @@ use snapshot::account::Account;
use util::hash::{FixedHash, H256}; use util::hash::{FixedHash, H256};
use util::hashdb::HashDB; use util::hashdb::HashDB;
use util::trie::{Alphabet, StandardMap, SecTrieDBMut, TrieMut, ValueMode}; use util::trie::{Alphabet, StandardMap, SecTrieDBMut, TrieMut, ValueMode};
use util::trie::{TrieDB, TrieDBMut}; use util::trie::{TrieDB, TrieDBMut, Trie};
use util::rlp::SHA3_NULL_RLP; use util::rlp::SHA3_NULL_RLP;
// the proportion of accounts we will alter each tick. // the proportion of accounts we will alter each tick.
@ -51,10 +51,12 @@ impl StateProducer {
// modify existing accounts. // modify existing accounts.
let mut accounts_to_modify: Vec<_> = { let mut accounts_to_modify: Vec<_> = {
let trie = TrieDB::new(&*db, &self.state_root).unwrap(); let trie = TrieDB::new(&*db, &self.state_root).unwrap();
trie.iter() let temp = trie.iter() // binding required due to complicated lifetime stuff
.filter(|_| rng.gen::<f32>() < ACCOUNT_CHURN) .filter(|_| rng.gen::<f32>() < ACCOUNT_CHURN)
.map(|(k, v)| (H256::from_slice(&k), v.to_owned())) .map(|(k, v)| (H256::from_slice(&k), v.to_owned()))
.collect() .collect();
temp
}; };
// sweep once to alter storage tries. // sweep once to alter storage tries.

View File

@ -72,6 +72,7 @@ fn snap_and_restore() {
rebuilder.feed(&chunk).unwrap(); rebuilder.feed(&chunk).unwrap();
} }
rebuilder.check_missing().unwrap();
assert_eq!(rebuilder.state_root(), state_root); assert_eq!(rebuilder.state_root(), state_root);
new_db new_db
}; };

View File

@ -19,7 +19,6 @@
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use util::*; use util::*;
use pod_account::*; use pod_account::*;
use account_db::*;
use std::cell::{Ref, RefCell, Cell}; use std::cell::{Ref, RefCell, Cell};
@ -148,7 +147,7 @@ impl Account {
} }
/// Get (and cache) the contents of the trie's storage at `key`. /// Get (and cache) the contents of the trie's storage at `key`.
pub fn storage_at(&self, db: &AccountDB, key: &H256) -> H256 { pub fn storage_at(&self, db: &HashDB, key: &H256) -> H256 {
self.storage_overlay.borrow_mut().entry(key.clone()).or_insert_with(||{ self.storage_overlay.borrow_mut().entry(key.clone()).or_insert_with(||{
let db = SecTrieDB::new(db, &self.storage_root) let db = SecTrieDB::new(db, &self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \ .expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
@ -225,7 +224,7 @@ impl Account {
} }
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code. /// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &AccountDB) -> bool { pub fn cache_code(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache; // TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty()); trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.is_cached() || self.is_cached() ||
@ -277,7 +276,7 @@ impl Account {
} }
/// Commit the `storage_overlay` to the backing DB and update `storage_root`. /// Commit the `storage_overlay` to the backing DB and update `storage_root`.
pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut AccountDBMut) { pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut HashDB) {
let mut t = trie_factory.from_existing(db, &mut self.storage_root) let mut t = trie_factory.from_existing(db, &mut self.storage_root)
.expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \ .expect("Account storage_root initially set to zero (valid) and only altered by SecTrieDBMut. \
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \ SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
@ -300,7 +299,7 @@ impl Account {
} }
/// Commit any unsaved code. `code_hash` will always return the hash of the `code_cache` after this. /// Commit any unsaved code. `code_hash` will always return the hash of the `code_cache` after this.
pub fn commit_code(&mut self, db: &mut AccountDBMut) { pub fn commit_code(&mut self, db: &mut HashDB) {
trace!("Commiting code of {:?} - {:?}, {:?}", self, self.code_hash.is_none(), self.code_cache.is_empty()); trace!("Commiting code of {:?} - {:?}, {:?}", self, self.code_hash.is_none(), self.code_cache.is_empty());
match (self.code_hash.is_none(), self.code_cache.is_empty()) { match (self.code_hash.is_none(), self.code_cache.is_empty()) {
(true, true) => self.code_hash = Some(SHA3_EMPTY), (true, true) => self.code_hash = Some(SHA3_EMPTY),

View File

@ -18,8 +18,7 @@ use std::cell::{RefCell, RefMut};
use common::*; use common::*;
use engines::Engine; use engines::Engine;
use executive::{Executive, TransactOptions}; use executive::{Executive, TransactOptions};
use evm::Factory as EvmFactory; use factory::Factories;
use account_db::*;
use trace::FlatTrace; use trace::FlatTrace;
use pod_account::*; use pod_account::*;
use pod_state::{self, PodState}; use pod_state::{self, PodState};
@ -49,7 +48,7 @@ pub struct State {
cache: RefCell<HashMap<Address, Option<Account>>>, cache: RefCell<HashMap<Address, Option<Account>>>,
snapshots: RefCell<Vec<HashMap<Address, Option<Option<Account>>>>>, snapshots: RefCell<Vec<HashMap<Address, Option<Option<Account>>>>>,
account_start_nonce: U256, account_start_nonce: U256,
trie_factory: TrieFactory, factories: Factories,
} }
const SEC_TRIE_DB_UNWRAP_STR: &'static str = "A state can only be created with valid root. Creating a SecTrieDB with a valid root will not fail. \ const SEC_TRIE_DB_UNWRAP_STR: &'static str = "A state can only be created with valid root. Creating a SecTrieDB with a valid root will not fail. \
@ -58,11 +57,11 @@ const SEC_TRIE_DB_UNWRAP_STR: &'static str = "A state can only be created with v
impl State { impl State {
/// Creates new state with empty state root /// Creates new state with empty state root
#[cfg(test)] #[cfg(test)]
pub fn new(mut db: Box<JournalDB>, account_start_nonce: U256, trie_factory: TrieFactory) -> State { pub fn new(mut db: Box<JournalDB>, account_start_nonce: U256, factories: Factories) -> State {
let mut root = H256::new(); let mut root = H256::new();
{ {
// init trie and reset root too null // init trie and reset root too null
let _ = trie_factory.create(db.as_hashdb_mut(), &mut root); let _ = factories.trie.create(db.as_hashdb_mut(), &mut root);
} }
State { State {
@ -71,12 +70,12 @@ impl State {
cache: RefCell::new(HashMap::new()), cache: RefCell::new(HashMap::new()),
snapshots: RefCell::new(Vec::new()), snapshots: RefCell::new(Vec::new()),
account_start_nonce: account_start_nonce, account_start_nonce: account_start_nonce,
trie_factory: trie_factory, factories: factories,
} }
} }
/// Creates new state with existing state root /// Creates new state with existing state root
pub fn from_existing(db: Box<JournalDB>, root: H256, account_start_nonce: U256, trie_factory: TrieFactory) -> Result<State, TrieError> { pub fn from_existing(db: Box<JournalDB>, root: H256, account_start_nonce: U256, factories: Factories) -> Result<State, TrieError> {
if !db.as_hashdb().contains(&root) { if !db.as_hashdb().contains(&root) {
return Err(TrieError::InvalidStateRoot(root)); return Err(TrieError::InvalidStateRoot(root));
} }
@ -87,7 +86,7 @@ impl State {
cache: RefCell::new(HashMap::new()), cache: RefCell::new(HashMap::new()),
snapshots: RefCell::new(Vec::new()), snapshots: RefCell::new(Vec::new()),
account_start_nonce: account_start_nonce, account_start_nonce: account_start_nonce,
trie_factory: trie_factory, factories: factories
}; };
Ok(state) Ok(state)
@ -185,8 +184,11 @@ impl State {
/// Mutate storage of account `address` so that it is `value` for `key`. /// Mutate storage of account `address` so that it is `value` for `key`.
pub fn storage_at(&self, address: &Address, key: &H256) -> H256 { pub fn storage_at(&self, address: &Address, key: &H256) -> H256 {
self.ensure_cached(address, false, self.ensure_cached(address, false, |a| a.as_ref().map_or(H256::new(), |a| {
|a| a.as_ref().map_or(H256::new(), |a|a.storage_at(&AccountDB::from_hash(self.db.as_hashdb(), a.address_hash(address)), key))) let addr_hash = a.address_hash(address);
let db = self.factories.accountdb.readonly(self.db.as_hashdb(), addr_hash);
a.storage_at(db.as_hashdb(), key)
}))
} }
/// Mutate storage of account `a` so that it is `value` for `key`. /// Mutate storage of account `a` so that it is `value` for `key`.
@ -236,11 +238,12 @@ impl State {
/// Execute a given transaction. /// Execute a given transaction.
/// This will change the state accordingly. /// This will change the state accordingly.
pub fn apply(&mut self, env_info: &EnvInfo, engine: &Engine, vm_factory: &EvmFactory, t: &SignedTransaction, tracing: bool) -> ApplyResult { pub fn apply(&mut self, env_info: &EnvInfo, engine: &Engine, t: &SignedTransaction, tracing: bool) -> ApplyResult {
// let old = self.to_pod(); // let old = self.to_pod();
let options = TransactOptions { tracing: tracing, vm_tracing: false, check_nonce: true }; let options = TransactOptions { tracing: tracing, vm_tracing: false, check_nonce: true };
let e = try!(Executive::new(self, env_info, engine, vm_factory).transact(t, options)); let vm_factory = self.factories.vm.clone();
let e = try!(Executive::new(self, env_info, engine, &vm_factory).transact(t, options));
// TODO uncomment once to_pod() works correctly. // TODO uncomment once to_pod() works correctly.
// trace!("Applied transaction. Diff:\n{}\n", state_diff::diff_pod(&old, &self.to_pod())); // trace!("Applied transaction. Diff:\n{}\n", state_diff::diff_pod(&old, &self.to_pod()));
@ -254,27 +257,27 @@ impl State {
/// `accounts` is mutable because we may need to commit the code or storage and record that. /// `accounts` is mutable because we may need to commit the code or storage and record that.
#[cfg_attr(feature="dev", allow(match_ref_pats))] #[cfg_attr(feature="dev", allow(match_ref_pats))]
pub fn commit_into( pub fn commit_into(
trie_factory: &TrieFactory, factories: &Factories,
db: &mut HashDB, db: &mut HashDB,
root: &mut H256, root: &mut H256,
accounts: &mut HashMap<Address, accounts: &mut HashMap<Address, Option<Account>>
Option<Account>>
) -> Result<(), Error> { ) -> Result<(), Error> {
// first, commit the sub trees. // first, commit the sub trees.
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`? // TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
for (address, ref mut a) in accounts.iter_mut() { for (address, ref mut a) in accounts.iter_mut() {
match a { match a {
&mut&mut Some(ref mut account) if account.is_dirty() => { &mut&mut Some(ref mut account) if account.is_dirty() => {
let mut account_db = AccountDBMut::from_hash(db, account.address_hash(address)); let addr_hash = account.address_hash(address);
account.commit_storage(trie_factory, &mut account_db); let mut account_db = factories.accountdb.create(db, addr_hash);
account.commit_code(&mut account_db); account.commit_storage(&factories.trie, account_db.as_hashdb_mut());
account.commit_code(account_db.as_hashdb_mut());
} }
_ => {} _ => {}
} }
} }
{ {
let mut trie = trie_factory.from_existing(db, root).unwrap(); let mut trie = factories.trie.from_existing(db, root).unwrap();
for (address, ref mut a) in accounts.iter_mut() { for (address, ref mut a) in accounts.iter_mut() {
match **a { match **a {
Some(ref mut account) if account.is_dirty() => { Some(ref mut account) if account.is_dirty() => {
@ -293,7 +296,7 @@ impl State {
/// Commits our cached account changes into the trie. /// Commits our cached account changes into the trie.
pub fn commit(&mut self) -> Result<(), Error> { pub fn commit(&mut self) -> Result<(), Error> {
assert!(self.snapshots.borrow().is_empty()); assert!(self.snapshots.borrow().is_empty());
Self::commit_into(&self.trie_factory, self.db.as_hashdb_mut(), &mut self.root, &mut *self.cache.borrow_mut()) Self::commit_into(&self.factories, self.db.as_hashdb_mut(), &mut self.root, &mut *self.cache.borrow_mut())
} }
/// Clear state cache /// Clear state cache
@ -351,7 +354,7 @@ impl State {
where F: FnOnce(&Option<Account>) -> U { where F: FnOnce(&Option<Account>) -> U {
let have_key = self.cache.borrow().contains_key(a); let have_key = self.cache.borrow().contains_key(a);
if !have_key { if !have_key {
let db = self.trie_factory.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let maybe_acc = match db.get(a) { let maybe_acc = match db.get(a) {
Ok(acc) => acc.map(Account::from_rlp), Ok(acc) => acc.map(Account::from_rlp),
Err(e) => panic!("Potential DB corruption encountered: {}", e), Err(e) => panic!("Potential DB corruption encountered: {}", e),
@ -361,7 +364,8 @@ impl State {
if require_code { if require_code {
if let Some(ref mut account) = self.cache.borrow_mut().get_mut(a).unwrap().as_mut() { if let Some(ref mut account) = self.cache.borrow_mut().get_mut(a).unwrap().as_mut() {
let addr_hash = account.address_hash(a); let addr_hash = account.address_hash(a);
account.cache_code(&AccountDB::from_hash(self.db.as_hashdb(), addr_hash)); let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), addr_hash);
account.cache_code(accountdb.as_hashdb());
} }
} }
@ -380,7 +384,7 @@ impl State {
{ {
let contains_key = self.cache.borrow().contains_key(a); let contains_key = self.cache.borrow().contains_key(a);
if !contains_key { if !contains_key {
let db = self.trie_factory.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let maybe_acc = match db.get(a) { let maybe_acc = match db.get(a) {
Ok(acc) => acc.map(Account::from_rlp), Ok(acc) => acc.map(Account::from_rlp),
Err(e) => panic!("Potential DB corruption encountered: {}", e), Err(e) => panic!("Potential DB corruption encountered: {}", e),
@ -400,7 +404,8 @@ impl State {
let account = c.get_mut(a).unwrap().as_mut().unwrap(); let account = c.get_mut(a).unwrap().as_mut().unwrap();
if require_code { if require_code {
let addr_hash = account.address_hash(a); let addr_hash = account.address_hash(a);
account.cache_code(&AccountDB::from_hash(self.db.as_hashdb(), addr_hash)); let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), addr_hash);
account.cache_code(accountdb.as_hashdb());
} }
account account
}) })
@ -421,7 +426,7 @@ impl Clone for State {
cache: RefCell::new(self.cache.borrow().clone()), cache: RefCell::new(self.cache.borrow().clone()),
snapshots: RefCell::new(self.snapshots.borrow().clone()), snapshots: RefCell::new(self.snapshots.borrow().clone()),
account_start_nonce: self.account_start_nonce.clone(), account_start_nonce: self.account_start_nonce.clone(),
trie_factory: self.trie_factory.clone(), factories: self.factories.clone(),
} }
} }
} }
@ -464,8 +469,7 @@ fn should_apply_create_transaction() {
}.sign(&"".sha3()); }.sign(&"".sha3());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
subtraces: 0, subtraces: 0,
@ -525,8 +529,7 @@ fn should_trace_failed_create_transaction() {
}.sign(&"".sha3()); }.sign(&"".sha3());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
action: trace::Action::Create(trace::Create { action: trace::Action::Create(trace::Create {
@ -564,8 +567,7 @@ fn should_trace_call_transaction() {
state.init_code(&0xa.into(), FromHex::from_hex("6000").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("6000").unwrap());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
action: trace::Action::Call(trace::Call { action: trace::Action::Call(trace::Call {
@ -607,8 +609,7 @@ fn should_trace_basic_call_transaction() {
}.sign(&"".sha3()); }.sign(&"".sha3());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
action: trace::Action::Call(trace::Call { action: trace::Action::Call(trace::Call {
@ -649,8 +650,7 @@ fn should_trace_call_transaction_to_builtin() {
data: vec![], data: vec![],
}.sign(&"".sha3()); }.sign(&"".sha3());
let vm_factory = Default::default(); let result = state.apply(&info, engine, &t, true).unwrap();
let result = state.apply(&info, engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
@ -693,8 +693,7 @@ fn should_not_trace_subcall_transaction_to_builtin() {
}.sign(&"".sha3()); }.sign(&"".sha3());
state.init_code(&0xa.into(), FromHex::from_hex("600060006000600060006001610be0f1").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("600060006000600060006001610be0f1").unwrap());
let vm_factory = Default::default(); let result = state.apply(&info, engine, &t, true).unwrap();
let result = state.apply(&info, engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
@ -738,8 +737,7 @@ fn should_not_trace_callcode() {
state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006000600b611000f2").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006000600b611000f2").unwrap());
state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap()); state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap());
let vm_factory = Default::default(); let result = state.apply(&info, engine, &t, true).unwrap();
let result = state.apply(&info, engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
@ -801,8 +799,7 @@ fn should_not_trace_delegatecall() {
state.init_code(&0xa.into(), FromHex::from_hex("6000600060006000600b618000f4").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("6000600060006000600b618000f4").unwrap());
state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap()); state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap());
let vm_factory = Default::default(); let result = state.apply(&info, engine, &t, true).unwrap();
let result = state.apply(&info, engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
@ -861,8 +858,7 @@ fn should_trace_failed_call_transaction() {
state.init_code(&0xa.into(), FromHex::from_hex("5b600056").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("5b600056").unwrap());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
action: trace::Action::Call(trace::Call { action: trace::Action::Call(trace::Call {
@ -903,8 +899,7 @@ fn should_trace_call_with_subcall_transaction() {
state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006000600b602b5a03f1").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006000600b602b5a03f1").unwrap());
state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap()); state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
@ -963,8 +958,7 @@ fn should_trace_call_with_basic_subcall_transaction() {
state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006045600b6000f1").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006045600b6000f1").unwrap());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
subtraces: 1, subtraces: 1,
@ -1019,8 +1013,7 @@ fn should_not_trace_call_with_invalid_basic_subcall_transaction() {
state.init_code(&0xa.into(), FromHex::from_hex("600060006000600060ff600b6000f1").unwrap()); // not enough funds. state.init_code(&0xa.into(), FromHex::from_hex("600060006000600060ff600b6000f1").unwrap()); // not enough funds.
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
subtraces: 0, subtraces: 0,
@ -1064,8 +1057,7 @@ fn should_trace_failed_subcall_transaction() {
state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006000600b602b5a03f1").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("60006000600060006000600b602b5a03f1").unwrap());
state.init_code(&0xb.into(), FromHex::from_hex("5b600056").unwrap()); state.init_code(&0xb.into(), FromHex::from_hex("5b600056").unwrap());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
subtraces: 1, subtraces: 1,
@ -1122,8 +1114,7 @@ fn should_trace_call_with_subcall_with_subcall_transaction() {
state.init_code(&0xb.into(), FromHex::from_hex("60006000600060006000600c602b5a03f1").unwrap()); state.init_code(&0xb.into(), FromHex::from_hex("60006000600060006000600c602b5a03f1").unwrap());
state.init_code(&0xc.into(), FromHex::from_hex("6000").unwrap()); state.init_code(&0xc.into(), FromHex::from_hex("6000").unwrap());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
subtraces: 1, subtraces: 1,
@ -1198,8 +1189,7 @@ fn should_trace_failed_subcall_with_subcall_transaction() {
state.init_code(&0xb.into(), FromHex::from_hex("60006000600060006000600c602b5a03f1505b601256").unwrap()); state.init_code(&0xb.into(), FromHex::from_hex("60006000600060006000600c602b5a03f1505b601256").unwrap());
state.init_code(&0xc.into(), FromHex::from_hex("6000").unwrap()); state.init_code(&0xc.into(), FromHex::from_hex("6000").unwrap());
state.add_balance(t.sender().as_ref().unwrap(), &(100.into())); state.add_balance(t.sender().as_ref().unwrap(), &(100.into()));
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
@ -1271,8 +1261,7 @@ fn should_trace_suicide() {
state.init_code(&0xa.into(), FromHex::from_hex("73000000000000000000000000000000000000000bff").unwrap()); state.init_code(&0xa.into(), FromHex::from_hex("73000000000000000000000000000000000000000bff").unwrap());
state.add_balance(&0xa.into(), &50.into()); state.add_balance(&0xa.into(), &50.into());
state.add_balance(t.sender().as_ref().unwrap(), &100.into()); state.add_balance(t.sender().as_ref().unwrap(), &100.into());
let vm_factory = Default::default(); let result = state.apply(&info, &engine, &t, true).unwrap();
let result = state.apply(&info, &engine, &vm_factory, &t, true).unwrap();
let expected_trace = vec![FlatTrace { let expected_trace = vec![FlatTrace {
trace_address: Default::default(), trace_address: Default::default(),
subtraces: 1, subtraces: 1,

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use ethkey::KeyPair;
use io::*; use io::*;
use client::{BlockChainClient, Client, ClientConfig}; use client::{BlockChainClient, Client, ClientConfig};
use common::*; use common::*;
@ -139,7 +140,6 @@ pub fn generate_dummy_client_with_spec_and_data<F>(get_test_spec: F, block_numbe
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_journal_db();
let mut db = db_result.take(); let mut db = db_result.take();
test_spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); test_spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
let vm_factory = Default::default();
let genesis_header = test_spec.genesis_header(); let genesis_header = test_spec.genesis_header();
let mut rolling_timestamp = 40; let mut rolling_timestamp = 40;
@ -156,7 +156,6 @@ pub fn generate_dummy_client_with_spec_and_data<F>(get_test_spec: F, block_numbe
// forge block. // forge block.
let mut b = OpenBlock::new( let mut b = OpenBlock::new(
test_engine, test_engine,
&vm_factory,
Default::default(), Default::default(),
false, false,
db, db,
@ -260,9 +259,9 @@ pub fn generate_dummy_blockchain(block_number: u32) -> GuardedTempResult<BlockCh
let db = new_db(temp.as_str()); let db = new_db(temp.as_str());
let bc = BlockChain::new(BlockChainConfig::default(), &create_unverifiable_block(0, H256::zero()), db.clone()); let bc = BlockChain::new(BlockChainConfig::default(), &create_unverifiable_block(0, H256::zero()), db.clone());
let batch = db.transaction(); let mut batch = db.transaction();
for block_order in 1..block_number { for block_order in 1..block_number {
bc.insert_block(&batch, &create_unverifiable_block(block_order, bc.best_block_hash()), vec![]); bc.insert_block(&mut batch, &create_unverifiable_block(block_order, bc.best_block_hash()), vec![]);
bc.commit(); bc.commit();
} }
db.write(batch).unwrap(); db.write(batch).unwrap();
@ -279,9 +278,9 @@ pub fn generate_dummy_blockchain_with_extra(block_number: u32) -> GuardedTempRes
let bc = BlockChain::new(BlockChainConfig::default(), &create_unverifiable_block(0, H256::zero()), db.clone()); let bc = BlockChain::new(BlockChainConfig::default(), &create_unverifiable_block(0, H256::zero()), db.clone());
let batch = db.transaction(); let mut batch = db.transaction();
for block_order in 1..block_number { for block_order in 1..block_number {
bc.insert_block(&batch, &create_unverifiable_block_with_extra(block_order, bc.best_block_hash(), None), vec![]); bc.insert_block(&mut batch, &create_unverifiable_block_with_extra(block_order, bc.best_block_hash(), None), vec![]);
bc.commit(); bc.commit();
} }
db.write(batch).unwrap(); db.write(batch).unwrap();

View File

@ -142,7 +142,7 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
false => [0x0] false => [0x0]
}; };
let batch = DBTransaction::new(&tracesdb); let mut batch = DBTransaction::new(&tracesdb);
batch.put(db::COL_TRACE, b"enabled", &encoded_tracing); batch.put(db::COL_TRACE, b"enabled", &encoded_tracing);
batch.put(db::COL_TRACE, b"version", TRACE_DB_VER); batch.put(db::COL_TRACE, b"version", TRACE_DB_VER);
tracesdb.write(batch).unwrap(); tracesdb.write(batch).unwrap();
@ -261,7 +261,14 @@ impl<T> TraceDatabase for TraceDB<T> where T: DatabaseExtras {
/// Traces of import request's enacted blocks are expected to be already in database /// Traces of import request's enacted blocks are expected to be already in database
/// or to be the currently inserted trace. /// or to be the currently inserted trace.
fn import(&self, batch: &DBTransaction, request: ImportRequest) { fn import(&self, batch: &mut DBTransaction, request: ImportRequest) {
// valid (canon): retracted 0, enacted 1 => false, true,
// valid (branch): retracted 0, enacted 0 => false, false,
// valid (bbcc): retracted 1, enacted 1 => true, true,
// invalid: retracted 1, enacted 0 => true, false,
let ret = request.retracted != 0;
let ena = !request.enacted.is_empty();
assert!(!(ret && !ena));
// fast return if tracing is disabled // fast return if tracing is disabled
if !self.tracing_enabled() { if !self.tracing_enabled() {
return; return;
@ -278,7 +285,7 @@ impl<T> TraceDatabase for TraceDB<T> where T: DatabaseExtras {
} }
// now let's rebuild the blooms // now let's rebuild the blooms
{ if !request.enacted.is_empty() {
let range_start = request.block_number as Number + 1 - request.enacted.len(); let range_start = request.block_number as Number + 1 - request.enacted.len();
let range_end = range_start + request.retracted; let range_end = range_start + request.retracted;
let replaced_range = range_start..range_end; let replaced_range = range_start..range_end;
@ -604,8 +611,8 @@ mod tests {
// import block 0 // import block 0
let request = create_simple_import_request(0, block_0.clone()); let request = create_simple_import_request(0, block_0.clone());
let batch = DBTransaction::new(&db); let mut batch = DBTransaction::new(&db);
tracedb.import(&batch, request); tracedb.import(&mut batch, request);
db.write(batch).unwrap(); db.write(batch).unwrap();
let filter = Filter { let filter = Filter {
@ -620,8 +627,8 @@ mod tests {
// import block 1 // import block 1
let request = create_simple_import_request(1, block_1.clone()); let request = create_simple_import_request(1, block_1.clone());
let batch = DBTransaction::new(&db); let mut batch = DBTransaction::new(&db);
tracedb.import(&batch, request); tracedb.import(&mut batch, request);
db.write(batch).unwrap(); db.write(batch).unwrap();
let filter = Filter { let filter = Filter {
@ -679,8 +686,8 @@ mod tests {
// import block 0 // import block 0
let request = create_simple_import_request(0, block_0.clone()); let request = create_simple_import_request(0, block_0.clone());
let batch = DBTransaction::new(&db); let mut batch = DBTransaction::new(&db);
tracedb.import(&batch, request); tracedb.import(&mut batch, request);
db.write(batch).unwrap(); db.write(batch).unwrap();
} }

View File

@ -52,11 +52,12 @@ fn update_trace_address(traces: Vec<FlatTrace>) -> Vec<FlatTrace> {
let mut subtrace_subtraces_left = 0; let mut subtrace_subtraces_left = 0;
traces.into_iter().map(|mut trace| { traces.into_iter().map(|mut trace| {
let is_top_subtrace = trace.trace_address.is_empty(); let is_top_subtrace = trace.trace_address.is_empty();
let is_subtrace = trace.trace_address.len() == 1;
trace.trace_address.push_front(top_subtrace_index); trace.trace_address.push_front(top_subtrace_index);
if is_top_subtrace { if is_top_subtrace {
subtrace_subtraces_left = trace.subtraces; subtrace_subtraces_left = trace.subtraces;
} else { } else if is_subtrace {
subtrace_subtraces_left -= 1; subtrace_subtraces_left -= 1;
} }

View File

@ -121,7 +121,7 @@ pub trait Database {
fn tracing_enabled(&self) -> bool; fn tracing_enabled(&self) -> bool;
/// Imports new block traces. /// Imports new block traces.
fn import(&self, batch: &DBTransaction, request: ImportRequest); fn import(&self, batch: &mut DBTransaction, request: ImportRequest);
/// Returns localized trace at given position. /// Returns localized trace at given position.
fn trace(&self, block_number: BlockNumber, tx_position: usize, trace_position: Vec<usize>) -> Option<LocalizedTrace>; fn trace(&self, block_number: BlockNumber, tx_position: usize, trace_position: Vec<usize>) -> Option<LocalizedTrace>;

View File

@ -16,18 +16,16 @@
//! Transaction data structure. //! Transaction data structure.
use util::{H256, Address, U256, H520};
use std::ops::Deref; use std::ops::Deref;
use util::rlp::*;
use util::sha3::*;
use util::{UtilError, CryptoError, Bytes, Signature, Secret, ec};
use util::crypto::{signature_from_rsv, signature_to_rsv};
use std::cell::*; use std::cell::*;
use util::rlp::*;
use util::sha3::Hashable;
use util::{H256, Address, U256, Bytes};
use ethkey::{Signature, sign, Secret, recover, public_to_address, Error as EthkeyError};
use error::*; use error::*;
use evm::Schedule; use evm::Schedule;
use header::BlockNumber; use header::BlockNumber;
use ethjson; use ethjson;
use ethstore::ethkey::Signature as EthkeySignature;
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq, Binary)]
/// Transaction action type. /// Transaction action type.
@ -139,19 +137,17 @@ impl Transaction {
/// Signs the transaction as coming from `sender`. /// Signs the transaction as coming from `sender`.
pub fn sign(self, secret: &Secret) -> SignedTransaction { pub fn sign(self, secret: &Secret) -> SignedTransaction {
let sig = ec::sign(secret, &self.hash()).unwrap(); let sig = sign(secret, &self.hash()).unwrap();
self.with_signature(sig.into()) self.with_signature(sig)
} }
/// Signs the transaction with signature. /// Signs the transaction with signature.
pub fn with_signature(self, sig: EthkeySignature) -> SignedTransaction { pub fn with_signature(self, sig: Signature) -> SignedTransaction {
let sig: H520 = sig.into();
let (r, s, v) = signature_to_rsv(&sig);
SignedTransaction { SignedTransaction {
unsigned: self, unsigned: self,
r: r, r: sig.r().into(),
s: s, s: sig.s().into(),
v: v + 27, v: sig.v() + 27,
hash: Cell::new(None), hash: Cell::new(None),
sender: Cell::new(None), sender: Cell::new(None),
} }
@ -290,12 +286,14 @@ impl SignedTransaction {
pub fn standard_v(&self) -> u8 { match self.v { 27 => 0, 28 => 1, _ => 4 } } pub fn standard_v(&self) -> u8 { match self.v { 27 => 0, 28 => 1, _ => 4 } }
/// Construct a signature object from the sig. /// Construct a signature object from the sig.
pub fn signature(&self) -> Signature { signature_from_rsv(&From::from(&self.r), &From::from(&self.s), self.standard_v()) } pub fn signature(&self) -> Signature {
Signature::from_rsv(&self.r.into(), &self.s.into(), self.standard_v())
}
/// Checks whether the signature has a low 's' value. /// Checks whether the signature has a low 's' value.
pub fn check_low_s(&self) -> Result<(), Error> { pub fn check_low_s(&self) -> Result<(), Error> {
if !ec::is_low_s(&self.s) { if !self.signature().is_low_s() {
Err(Error::Util(UtilError::Crypto(CryptoError::InvalidSignature))) Err(EthkeyError::InvalidSignature.into())
} else { } else {
Ok(()) Ok(())
} }
@ -307,7 +305,7 @@ impl SignedTransaction {
match sender { match sender {
Some(s) => Ok(s), Some(s) => Ok(s),
None => { None => {
let s = Address::from(try!(ec::recover(&self.signature(), &self.unsigned.hash())).sha3()); let s = public_to_address(&try!(recover(&self.signature(), &self.unsigned.hash())));
self.sender.set(Some(s)); self.sender.set(Some(s));
Ok(s) Ok(s)
} }
@ -319,8 +317,8 @@ impl SignedTransaction {
#[cfg(test)] #[cfg(test)]
#[cfg(feature = "json-tests")] #[cfg(feature = "json-tests")]
pub fn validate(self, schedule: &Schedule, require_low: bool) -> Result<SignedTransaction, Error> { pub fn validate(self, schedule: &Schedule, require_low: bool) -> Result<SignedTransaction, Error> {
if require_low && !ec::is_low_s(&self.s) { if require_low && !self.signature().is_low_s() {
return Err(Error::Util(UtilError::Crypto(CryptoError::InvalidSignature))); return Err(EthkeyError::InvalidSignature.into())
} }
try!(self.sender()); try!(self.sender());
if self.gas < U256::from(self.gas_required(&schedule)) { if self.gas < U256::from(self.gas_required(&schedule)) {
@ -368,7 +366,9 @@ fn sender_test() {
#[test] #[test]
fn signing() { fn signing() {
let key = ::util::crypto::KeyPair::create().unwrap(); use ethkey::{Random, Generator};
let key = Random.generate().unwrap();
let t = Transaction { let t = Transaction {
action: Action::Create, action: Action::Create,
nonce: U256::from(42), nonce: U256::from(42),

View File

@ -228,6 +228,7 @@ fn verify_block_integrity(block: &[u8], transactions_root: &H256, uncles_hash: &
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use util::*; use util::*;
use ethkey::{Random, Generator};
use header::*; use header::*;
use verification::*; use verification::*;
use blockchain::extras::*; use blockchain::extras::*;
@ -355,7 +356,7 @@ mod tests {
good.timestamp = 40; good.timestamp = 40;
good.number = 10; good.number = 10;
let keypair = KeyPair::create().unwrap(); let keypair = Random.generate().unwrap();
let tr1 = Transaction { let tr1 = Transaction {
action: Action::Create, action: Action::Create,

12
ethcrypto/Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[package]
name = "ethcrypto"
version = "0.1.0"
authors = ["debris <marek.kotewicz@gmail.com>"]
[dependencies]
rust-crypto = "0.2.36"
tiny-keccak = "1.0"
eth-secp256k1 = { git = "https://github.com/ethcore/rust-secp256k1" }
ethkey = { path = "../ethkey" }
bigint = { path = "../util/bigint" }

333
ethcrypto/src/lib.rs Normal file
View File

@ -0,0 +1,333 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Crypto utils used ethstore and network.
extern crate bigint;
extern crate tiny_keccak;
extern crate crypto as rcrypto;
extern crate secp256k1;
extern crate ethkey;
use tiny_keccak::Keccak;
use rcrypto::pbkdf2::pbkdf2;
use rcrypto::scrypt::{scrypt, ScryptParams};
use rcrypto::sha2::Sha256;
use rcrypto::hmac::Hmac;
use secp256k1::Error as SecpError;
pub const KEY_LENGTH: usize = 32;
pub const KEY_ITERATIONS: usize = 10240;
pub const KEY_LENGTH_AES: usize = KEY_LENGTH / 2;
#[derive(PartialEq, Debug)]
pub enum Error {
Secp(SecpError),
InvalidMessage,
}
impl From<SecpError> for Error {
fn from(e: SecpError) -> Self {
Error::Secp(e)
}
}
pub trait Keccak256<T> {
fn keccak256(&self) -> T where T: Sized;
}
impl Keccak256<[u8; 32]> for [u8] {
fn keccak256(&self) -> [u8; 32] {
let mut keccak = Keccak::new_keccak256();
let mut result = [0u8; 32];
keccak.update(self);
keccak.finalize(&mut result);
result
}
}
pub fn derive_key_iterations(password: &str, salt: &[u8; 32], c: u32) -> (Vec<u8>, Vec<u8>) {
let mut h_mac = Hmac::new(Sha256::new(), password.as_bytes());
let mut derived_key = vec![0u8; KEY_LENGTH];
pbkdf2(&mut h_mac, salt, c, &mut derived_key);
let derived_right_bits = &derived_key[0..KEY_LENGTH_AES];
let derived_left_bits = &derived_key[KEY_LENGTH_AES..KEY_LENGTH];
(derived_right_bits.to_vec(), derived_left_bits.to_vec())
}
pub fn derive_key_scrypt(password: &str, salt: &[u8; 32], n: u32, p: u32, r: u32) -> (Vec<u8>, Vec<u8>) {
let mut derived_key = vec![0u8; KEY_LENGTH];
let scrypt_params = ScryptParams::new(n.trailing_zeros() as u8, r, p);
scrypt(password.as_bytes(), salt, &scrypt_params, &mut derived_key);
let derived_right_bits = &derived_key[0..KEY_LENGTH_AES];
let derived_left_bits = &derived_key[KEY_LENGTH_AES..KEY_LENGTH];
(derived_right_bits.to_vec(), derived_left_bits.to_vec())
}
pub fn derive_mac(derived_left_bits: &[u8], cipher_text: &[u8]) -> Vec<u8> {
let mut mac = vec![0u8; KEY_LENGTH_AES + cipher_text.len()];
mac[0..KEY_LENGTH_AES].copy_from_slice(derived_left_bits);
mac[KEY_LENGTH_AES..cipher_text.len() + KEY_LENGTH_AES].copy_from_slice(cipher_text);
mac
}
/// AES encryption
pub mod aes {
use rcrypto::blockmodes::{CtrMode, CbcDecryptor, PkcsPadding};
use rcrypto::aessafe::{AesSafe128Encryptor, AesSafe128Decryptor};
use rcrypto::symmetriccipher::{Encryptor, Decryptor, SymmetricCipherError};
use rcrypto::buffer::{RefReadBuffer, RefWriteBuffer, WriteBuffer};
/// Encrypt a message
pub fn encrypt(k: &[u8], iv: &[u8], plain: &[u8], dest: &mut [u8]) {
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
encryptor.encrypt(&mut RefReadBuffer::new(plain), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
}
/// Decrypt a message
pub fn decrypt(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) {
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
}
/// Decrypt a message using cbc mode
pub fn decrypt_cbc(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) -> Result<usize, SymmetricCipherError> {
let mut encryptor = CbcDecryptor::new(AesSafe128Decryptor::new(k), PkcsPadding, iv.to_vec());
let len = dest.len();
let mut buffer = RefWriteBuffer::new(dest);
try!(encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut buffer, true));
Ok(len - buffer.remaining())
}
}
/// ECDH functions
#[cfg_attr(feature="dev", allow(similar_names))]
pub mod ecdh {
use secp256k1::{ecdh, key};
use ethkey::{Secret, Public, SECP256K1};
use Error;
/// Agree on a shared secret
pub fn agree(secret: &Secret, public: &Public) -> Result<Secret, Error> {
let context = &SECP256K1;
let pdata = {
let mut temp = [4u8; 65];
(&mut temp[1..65]).copy_from_slice(&public[0..64]);
temp
};
let publ = try!(key::PublicKey::from_slice(context, &pdata));
// no way to create SecretKey from raw byte array.
let sec: &key::SecretKey = unsafe { ::std::mem::transmute(secret) };
let shared = ecdh::SharedSecret::new_raw(context, &publ, sec);
let mut s = Secret::default();
s.copy_from_slice(&shared[0..32]);
Ok(s)
}
}
/// ECIES function
#[cfg_attr(feature="dev", allow(similar_names))]
pub mod ecies {
use rcrypto::digest::Digest;
use rcrypto::sha2::Sha256;
use rcrypto::hmac::Hmac;
use rcrypto::mac::Mac;
use bigint::hash::{FixedHash, H128};
use ethkey::{Random, Generator, Public, Secret};
use {Error, ecdh, aes, Keccak256};
/// Encrypt a message with a public key
pub fn encrypt(public: &Public, shared_mac: &[u8], plain: &[u8]) -> Result<Vec<u8>, Error> {
let r = Random.generate().unwrap();
let z = try!(ecdh::agree(r.secret(), public));
let mut key = [0u8; 32];
let mut mkey = [0u8; 32];
kdf(&z, &[0u8; 0], &mut key);
let mut hasher = Sha256::new();
let mkey_material = &key[16..32];
hasher.input(mkey_material);
hasher.result(&mut mkey);
let ekey = &key[0..16];
let mut msg = vec![0u8; (1 + 64 + 16 + plain.len() + 32)];
msg[0] = 0x04u8;
{
let msgd = &mut msg[1..];
msgd[0..64].copy_from_slice(r.public());
let iv = H128::random();
msgd[64..80].copy_from_slice(&iv);
{
let cipher = &mut msgd[(64 + 16)..(64 + 16 + plain.len())];
aes::encrypt(ekey, &iv, plain, cipher);
}
let mut hmac = Hmac::new(Sha256::new(), &mkey);
{
let cipher_iv = &msgd[64..(64 + 16 + plain.len())];
hmac.input(cipher_iv);
}
hmac.input(shared_mac);
hmac.raw_result(&mut msgd[(64 + 16 + plain.len())..]);
}
Ok(msg)
}
/// Encrypt a message with a public key
pub fn encrypt_single_message(public: &Public, plain: &[u8]) -> Result<Vec<u8>, Error> {
let r = Random.generate().unwrap();
let z = try!(ecdh::agree(r.secret(), public));
let mut key = [0u8; 32];
let mut mkey = [0u8; 32];
kdf(&z, &[0u8; 0], &mut key);
let mut hasher = Sha256::new();
let mkey_material = &key[16..32];
hasher.input(mkey_material);
hasher.result(&mut mkey);
let ekey = &key[0..16];
let mut msgd = vec![0u8; (64 + plain.len())];
{
r.public().copy_to(&mut msgd[0..64]);
let iv = H128::from_slice(&z.keccak256()[0..16]);
{
let cipher = &mut msgd[64..(64 + plain.len())];
aes::encrypt(ekey, &iv, plain, cipher);
}
}
Ok(msgd)
}
/// Decrypt a message with a secret key
pub fn decrypt(secret: &Secret, shared_mac: &[u8], encrypted: &[u8]) -> Result<Vec<u8>, Error> {
let meta_len = 1 + 64 + 16 + 32;
if encrypted.len() < meta_len || encrypted[0] < 2 || encrypted[0] > 4 {
return Err(Error::InvalidMessage); //invalid message: publickey
}
let e = &encrypted[1..];
let p = Public::from_slice(&e[0..64]);
let z = try!(ecdh::agree(secret, &p));
let mut key = [0u8; 32];
kdf(&z, &[0u8; 0], &mut key);
let ekey = &key[0..16];
let mkey_material = &key[16..32];
let mut hasher = Sha256::new();
let mut mkey = [0u8; 32];
hasher.input(mkey_material);
hasher.result(&mut mkey);
let clen = encrypted.len() - meta_len;
let cipher_with_iv = &e[64..(64+16+clen)];
let cipher_iv = &cipher_with_iv[0..16];
let cipher_no_iv = &cipher_with_iv[16..];
let msg_mac = &e[(64+16+clen)..];
// Verify tag
let mut hmac = Hmac::new(Sha256::new(), &mkey);
hmac.input(cipher_with_iv);
hmac.input(shared_mac);
let mut mac = [0u8; 32];
hmac.raw_result(&mut mac);
if &mac[..] != msg_mac {
return Err(Error::InvalidMessage);
}
let mut msg = vec![0u8; clen];
aes::decrypt(ekey, cipher_iv, cipher_no_iv, &mut msg[..]);
Ok(msg)
}
/// Decrypt single message with a secret key
pub fn decrypt_single_message(secret: &Secret, encrypted: &[u8]) -> Result<Vec<u8>, Error> {
let meta_len = 64;
if encrypted.len() < meta_len {
return Err(Error::InvalidMessage); //invalid message: publickey
}
let e = encrypted;
let p = Public::from_slice(&e[0..64]);
let z = try!(ecdh::agree(secret, &p));
let mut key = [0u8; 32];
kdf(&z, &[0u8; 0], &mut key);
let ekey = &key[0..16];
let mkey_material = &key[16..32];
let mut hasher = Sha256::new();
let mut mkey = [0u8; 32];
hasher.input(mkey_material);
hasher.result(&mut mkey);
let clen = encrypted.len() - meta_len;
let cipher = &e[64..(64+clen)];
let mut msg = vec![0u8; clen];
let iv = H128::from_slice(&z.keccak256()[0..16]);
aes::decrypt(ekey, &iv, cipher, &mut msg[..]);
Ok(msg)
}
fn kdf(secret: &Secret, s1: &[u8], dest: &mut [u8]) {
let mut hasher = Sha256::new();
// SEC/ISO/Shoup specify counter size SHOULD be equivalent
// to size of hash output, however, it also notes that
// the 4 bytes is okay. NIST specifies 4 bytes.
let mut ctr = 1u32;
let mut written = 0usize;
while written < dest.len() {
let ctrs = [(ctr >> 24) as u8, (ctr >> 16) as u8, (ctr >> 8) as u8, ctr as u8];
hasher.input(&ctrs);
hasher.input(secret);
hasher.input(s1);
hasher.result(&mut dest[written..(written + 32)]);
hasher.reset();
written += 32;
ctr += 1;
}
}
}
#[cfg(test)]
mod tests {
use ethkey::{Random, Generator};
use ecies;
#[test]
fn ecies_shared() {
let kp = Random.generate().unwrap();
let message = b"So many books, so little time";
let shared = b"shared";
let wrong_shared = b"incorrect";
let encrypted = ecies::encrypt(kp.public(), shared, message).unwrap();
assert!(encrypted[..] != message[..]);
assert_eq!(encrypted[0], 0x04);
assert!(ecies::decrypt(kp.secret(), wrong_shared, &encrypted).is_err());
let decrypted = ecies::decrypt(kp.secret(), shared, &encrypted).unwrap();
assert_eq!(decrypted[..message.len()], message[..]);
}
#[test]
fn ecies_shared_single() {
let kp = Random.generate().unwrap();
let message = b"So many books, so little time";
let encrypted = ecies::encrypt_single_message(kp.public(), message).unwrap();
assert!(encrypted[..] != message[..]);
let decrypted = ecies::decrypt_single_message(kp.secret(), &encrypted).unwrap();
assert_eq!(decrypted[..message.len()], message[..]);
}
}

View File

@ -31,7 +31,7 @@ mod random;
mod signature; mod signature;
lazy_static! { lazy_static! {
static ref SECP256K1: secp256k1::Secp256k1 = secp256k1::Secp256k1::new(); pub static ref SECP256K1: secp256k1::Secp256k1 = secp256k1::Secp256k1::new();
} }
/// Generates new keypair. /// Generates new keypair.

View File

@ -21,7 +21,7 @@ use std::str::FromStr;
use secp256k1::{Message as SecpMessage, RecoverableSignature, RecoveryId, Error as SecpError}; use secp256k1::{Message as SecpMessage, RecoverableSignature, RecoveryId, Error as SecpError};
use secp256k1::key::{SecretKey, PublicKey}; use secp256k1::key::{SecretKey, PublicKey};
use rustc_serialize::hex::{ToHex, FromHex}; use rustc_serialize::hex::{ToHex, FromHex};
use bigint::hash::H520; use bigint::hash::{H520, H256, FixedHash};
use {Secret, Public, SECP256K1, Error, Message, public_to_address, Address}; use {Secret, Public, SECP256K1, Error, Message, public_to_address, Address};
#[repr(C)] #[repr(C)]
@ -43,6 +43,29 @@ impl Signature {
pub fn v(&self) -> u8 { pub fn v(&self) -> u8 {
self.0[64] self.0[64]
} }
/// Create a signature object from the sig.
pub fn from_rsv(r: &H256, s: &H256, v: u8) -> Signature {
let mut sig = [0u8; 65];
sig[0..32].copy_from_slice(&r);
sig[32..64].copy_from_slice(&s);
sig[64] = v;
Signature(sig)
}
/// Check if this is a "low" signature.
pub fn is_low_s(&self) -> bool {
H256::from_slice(self.s()) <= "7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF5D576E7357A4501DDFE92F46681B20A0".into()
}
/// Check if each component of the signature is in range.
pub fn is_valid(&self) -> bool {
self.v() <= 1 &&
H256::from_slice(self.r()) < "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141".into() &&
H256::from_slice(self.r()) >= 1.into() &&
H256::from_slice(self.s()) < "fffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141".into() &&
H256::from_slice(self.s()) >= 1.into()
}
} }
// manual implementation large arrays don't have trait impls by default. // manual implementation large arrays don't have trait impls by default.

View File

@ -18,10 +18,10 @@ docopt = { version = "0.6", optional = true }
time = "0.1.34" time = "0.1.34"
lazy_static = "0.2" lazy_static = "0.2"
itertools = "0.4" itertools = "0.4"
ethcrypto = { path = "../ethcrypto" }
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.7", optional = true } serde_codegen = { version = "0.7", optional = true }
syntex = "0.33.0"
[features] [features]
default = ["serde_codegen"] default = ["serde_codegen"]

View File

@ -16,7 +16,6 @@
#[cfg(not(feature = "serde_macros"))] #[cfg(not(feature = "serde_macros"))]
mod inner { mod inner {
extern crate syntex;
extern crate serde_codegen; extern crate serde_codegen;
use std::env; use std::env;
@ -28,10 +27,7 @@ mod inner {
let src = Path::new("src/json/mod.rs.in"); let src = Path::new("src/json/mod.rs.in");
let dst = Path::new(&out_dir).join("mod.rs"); let dst = Path::new(&out_dir).join("mod.rs");
let mut registry = syntex::Registry::new(); serde_codegen::expand(&src, &dst).unwrap();
serde_codegen::register(&mut registry);
registry.expand("", &src, &dst).unwrap();
} }
} }

View File

@ -20,7 +20,6 @@ extern crate ethstore;
use std::{env, process, fs}; use std::{env, process, fs};
use std::io::Read; use std::io::Read;
use std::ops::Deref;
use docopt::Docopt; use docopt::Docopt;
use ethstore::ethkey::Address; use ethstore::ethkey::Address;
use ethstore::dir::{KeyDirectory, ParityDirectory, DiskDirectory, GethDirectory, DirectoryType}; use ethstore::dir::{KeyDirectory, ParityDirectory, DiskDirectory, GethDirectory, DirectoryType};
@ -142,7 +141,7 @@ fn execute<S, I>(command: I) -> Result<String, Error> where I: IntoIterator<Item
} else if args.cmd_import { } else if args.cmd_import {
let src = try!(key_dir(&args.flag_src)); let src = try!(key_dir(&args.flag_src));
let dst = try!(key_dir(&args.flag_dir)); let dst = try!(key_dir(&args.flag_dir));
let accounts = try!(import_accounts(*src, *dst)); let accounts = try!(import_accounts(&*src, &*dst));
Ok(format_accounts(&accounts)) Ok(format_accounts(&accounts))
} else if args.cmd_import_wallet { } else if args.cmd_import_wallet {
let wallet = try!(PresaleWallet::open(&args.arg_path)); let wallet = try!(PresaleWallet::open(&args.arg_path));
@ -162,7 +161,7 @@ fn execute<S, I>(command: I) -> Result<String, Error> where I: IntoIterator<Item
let signature = try!(store.sign(&address, &password, &message)); let signature = try!(store.sign(&address, &password, &message));
Ok(format!("{}", signature)) Ok(format!("{}", signature))
} else { } else {
unreachable!(); Ok(format!("{}", USAGE))
} }
} }

View File

@ -1,95 +0,0 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use tiny_keccak::Keccak;
use rcrypto::pbkdf2::pbkdf2;
use rcrypto::scrypt::{scrypt, ScryptParams};
use rcrypto::sha2::Sha256;
use rcrypto::hmac::Hmac;
pub const KEY_LENGTH: usize = 32;
pub const KEY_ITERATIONS: usize = 10240;
pub const KEY_LENGTH_AES: usize = KEY_LENGTH / 2;
pub fn derive_key_iterations(password: &str, salt: &[u8; 32], c: u32) -> (Vec<u8>, Vec<u8>) {
let mut h_mac = Hmac::new(Sha256::new(), password.as_bytes());
let mut derived_key = vec![0u8; KEY_LENGTH];
pbkdf2(&mut h_mac, salt, c, &mut derived_key);
let derived_right_bits = &derived_key[0..KEY_LENGTH_AES];
let derived_left_bits = &derived_key[KEY_LENGTH_AES..KEY_LENGTH];
(derived_right_bits.to_vec(), derived_left_bits.to_vec())
}
pub fn derive_key_scrypt(password: &str, salt: &[u8; 32], n: u32, p: u32, r: u32) -> (Vec<u8>, Vec<u8>) {
let mut derived_key = vec![0u8; KEY_LENGTH];
let scrypt_params = ScryptParams::new(n.trailing_zeros() as u8, r, p);
scrypt(password.as_bytes(), salt, &scrypt_params, &mut derived_key);
let derived_right_bits = &derived_key[0..KEY_LENGTH_AES];
let derived_left_bits = &derived_key[KEY_LENGTH_AES..KEY_LENGTH];
(derived_right_bits.to_vec(), derived_left_bits.to_vec())
}
pub fn derive_mac(derived_left_bits: &[u8], cipher_text: &[u8]) -> Vec<u8> {
let mut mac = vec![0u8; KEY_LENGTH_AES + cipher_text.len()];
mac[0..KEY_LENGTH_AES].copy_from_slice(derived_left_bits);
mac[KEY_LENGTH_AES..cipher_text.len() + KEY_LENGTH_AES].copy_from_slice(cipher_text);
mac
}
pub trait Keccak256<T> {
fn keccak256(&self) -> T where T: Sized;
}
impl Keccak256<[u8; 32]> for [u8] {
fn keccak256(&self) -> [u8; 32] {
let mut keccak = Keccak::new_keccak256();
let mut result = [0u8; 32];
keccak.update(self);
keccak.finalize(&mut result);
result
}
}
/// AES encryption
pub mod aes {
use rcrypto::blockmodes::{CtrMode, CbcDecryptor, PkcsPadding};
use rcrypto::aessafe::{AesSafe128Encryptor, AesSafe128Decryptor};
use rcrypto::symmetriccipher::{Encryptor, Decryptor, SymmetricCipherError};
use rcrypto::buffer::{RefReadBuffer, RefWriteBuffer, WriteBuffer};
/// Encrypt a message
pub fn encrypt(k: &[u8], iv: &[u8], plain: &[u8], dest: &mut [u8]) {
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
encryptor.encrypt(&mut RefReadBuffer::new(plain), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
}
/// Decrypt a message
pub fn decrypt(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) {
let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec());
encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
}
/// Decrypt a message using cbc mode
pub fn decrypt_cbc(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) -> Result<usize, SymmetricCipherError> {
let mut encryptor = CbcDecryptor::new(AesSafe128Decryptor::new(k), PkcsPadding, iv.to_vec());
let len = dest.len();
let mut buffer = RefWriteBuffer::new(dest);
try!(encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut buffer, true));
Ok(len - buffer.remaining())
}
}

View File

@ -30,13 +30,13 @@ extern crate tiny_keccak;
extern crate lazy_static; extern crate lazy_static;
// reexport it nicely // reexport it nicely
extern crate ethkey as _ethkey; extern crate ethkey as _ethkey;
extern crate ethcrypto as crypto;
pub mod dir; pub mod dir;
pub mod ethkey; pub mod ethkey;
mod account; mod account;
mod json; mod json;
mod crypto;
mod error; mod error;
mod ethstore; mod ethstore;

View File

@ -48,6 +48,13 @@ include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "with-syntex"))] #[cfg(not(feature = "with-syntex"))]
include!("lib.rs.in"); include!("lib.rs.in");
#[cfg(feature = "with-syntex")]
pub fn expand(src: &std::path::Path, dst: &std::path::Path) {
let mut registry = syntex::Registry::new();
register(&mut registry);
registry.expand("", src, dst).unwrap();
}
#[cfg(feature = "with-syntex")] #[cfg(feature = "with-syntex")]
pub fn register(reg: &mut syntex::Registry) { pub fn register(reg: &mut syntex::Registry) {
use syntax::{ast, fold}; use syntax::{ast, fold};

View File

@ -26,7 +26,7 @@ extern crate semver;
pub mod service; pub mod service;
/// Default value for hypervisor ipc listener /// Default value for hypervisor ipc listener
pub const HYPERVISOR_IPC_URL: &'static str = "ipc:///tmp/parity-internal-hyper-status.ipc"; pub const HYPERVISOR_IPC_URL: &'static str = "parity-internal-hyper-status.ipc";
use std::sync::{Arc,RwLock}; use std::sync::{Arc,RwLock};
use service::{HypervisorService, IpcModuleId}; use service::{HypervisorService, IpcModuleId};
@ -41,8 +41,9 @@ pub struct Hypervisor {
ipc_addr: String, ipc_addr: String,
service: Arc<HypervisorService>, service: Arc<HypervisorService>,
ipc_worker: RwLock<nanoipc::Worker<HypervisorService>>, ipc_worker: RwLock<nanoipc::Worker<HypervisorService>>,
processes: RwLock<HashMap<BinaryId, Child>>, processes: RwLock<HashMap<IpcModuleId, Child>>,
modules: HashMap<IpcModuleId, (BinaryId, BootArgs)>, modules: HashMap<IpcModuleId, BootArgs>,
pub io_path: String,
} }
/// Boot arguments for binary /// Boot arguments for binary
@ -79,8 +80,8 @@ impl Hypervisor {
Hypervisor::with_url(HYPERVISOR_IPC_URL) Hypervisor::with_url(HYPERVISOR_IPC_URL)
} }
pub fn module(mut self, module_id: IpcModuleId, binary_id: BinaryId, args: BootArgs) -> Hypervisor { pub fn module(mut self, module_id: IpcModuleId, args: BootArgs) -> Hypervisor {
self.modules.insert(module_id, (binary_id, args)); self.modules.insert(module_id, args);
self.service.add_module(module_id); self.service.add_module(module_id);
self self
} }
@ -90,6 +91,11 @@ impl Hypervisor {
self self
} }
pub fn io_path(mut self, directory: &str) -> Hypervisor {
self.io_path = directory.to_owned();
self
}
/// Starts with the specified address for the ipc listener and /// Starts with the specified address for the ipc listener and
/// the specified list of modules in form of created service /// the specified list of modules in form of created service
pub fn with_url(addr: &str) -> Hypervisor { pub fn with_url(addr: &str) -> Hypervisor {
@ -101,12 +107,13 @@ impl Hypervisor {
ipc_worker: RwLock::new(worker), ipc_worker: RwLock::new(worker),
processes: RwLock::new(HashMap::new()), processes: RwLock::new(HashMap::new()),
modules: HashMap::new(), modules: HashMap::new(),
io_path: "/tmp".to_owned(),
} }
} }
/// Since one binary can host multiple modules /// Since one binary can host multiple modules
/// we match binaries /// we match binaries
fn match_module(&self, module_id: &IpcModuleId) -> Option<&(BinaryId, BootArgs)> { fn match_module(&self, module_id: &IpcModuleId) -> Option<&BootArgs> {
self.modules.get(module_id) self.modules.get(module_id)
} }
@ -126,24 +133,19 @@ impl Hypervisor {
fn start_module(&self, module_id: IpcModuleId) { fn start_module(&self, module_id: IpcModuleId) {
use std::io::Write; use std::io::Write;
self.match_module(&module_id).map(|&(ref binary_id, ref binary_args)| { self.match_module(&module_id).map(|boot_args| {
let mut processes = self.processes.write().unwrap(); let mut processes = self.processes.write().unwrap();
{ {
if processes.get(binary_id).is_some() { if processes.get(&module_id).is_some() {
// already started for another module // already started for another module
return; return;
} }
} }
let mut executable_path = std::env::current_exe().unwrap(); let mut command = Command::new(&std::env::current_exe().unwrap());
executable_path.pop();
executable_path.push(binary_id);
let executable_path = executable_path.to_str().unwrap();
let mut command = Command::new(&executable_path);
command.stderr(std::process::Stdio::inherit()); command.stderr(std::process::Stdio::inherit());
if let Some(ref cli_args) = binary_args.cli { if let Some(ref cli_args) = boot_args.cli {
for arg in cli_args { command.arg(arg); } for arg in cli_args { command.arg(arg); }
} }
@ -152,18 +154,18 @@ impl Hypervisor {
trace!(target: "hypervisor", "Spawn executable: {:?}", command); trace!(target: "hypervisor", "Spawn executable: {:?}", command);
let mut child = command.spawn().unwrap_or_else( let mut child = command.spawn().unwrap_or_else(
|e| panic!("Hypervisor cannot start binary ({:?}): {}", executable_path, e)); |e| panic!("Hypervisor cannot execute command ({:?}): {}", command, e));
if let Some(ref std_in) = binary_args.stdin { if let Some(ref std_in) = boot_args.stdin {
trace!(target: "hypervisor", "Pushing std-in payload..."); trace!(target: "hypervisor", "Pushing std-in payload...");
child.stdin.as_mut() child.stdin.as_mut()
.expect("std-in should be piped above") .expect("std-in should be piped above")
.write(std_in) .write(std_in)
.unwrap_or_else(|e| panic!(format!("Error trying to pipe stdin for {}: {:?}", &executable_path, e))); .unwrap_or_else(|e| panic!(format!("Error trying to pipe stdin for {:?}: {:?}", &command, e)));
drop(child.stdin.take()); drop(child.stdin.take());
} }
processes.insert(binary_id, child); processes.insert(module_id, child);
}); });
} }
@ -185,8 +187,8 @@ impl Hypervisor {
if wait_time.is_some() { std::thread::sleep(wait_time.unwrap()) } if wait_time.is_some() { std::thread::sleep(wait_time.unwrap()) }
let mut childs = self.processes.write().unwrap(); let mut childs = self.processes.write().unwrap();
for (ref mut binary, ref mut child) in childs.iter_mut() { for (ref mut module, ref mut child) in childs.iter_mut() {
trace!(target: "hypervisor", "Stopping process module: {}", binary); trace!(target: "hypervisor", "Stopping process module: {}", module);
child.kill().unwrap(); child.kill().unwrap();
} }
} }

View File

@ -28,7 +28,7 @@ use nanomsg::{Socket, Protocol, Error, Endpoint, PollRequest, PollFd, PollInOut}
use std::ops::Deref; use std::ops::Deref;
const POLL_TIMEOUT: isize = 200; const POLL_TIMEOUT: isize = 200;
const CLIENT_CONNECTION_TIMEOUT: isize = 15000; const CLIENT_CONNECTION_TIMEOUT: isize = 120000;
/// Generic worker to handle service (binded) sockets /// Generic worker to handle service (binded) sockets
pub struct Worker<S: ?Sized> where S: IpcInterface { pub struct Worker<S: ?Sized> where S: IpcInterface {
@ -97,6 +97,7 @@ pub fn init_client<S>(socket_addr: &str) -> Result<GuardedSocket<S>, SocketError
SocketError::RequestLink SocketError::RequestLink
})); }));
trace!(target: "ipc", "Created cleint for {}", socket_addr);
Ok(GuardedSocket { Ok(GuardedSocket {
client: Arc::new(S::init(socket)), client: Arc::new(S::init(socket)),
_endpoint: endpoint, _endpoint: endpoint,
@ -189,6 +190,8 @@ impl<S: ?Sized> Worker<S> where S: IpcInterface {
self.rebuild_poll_request(); self.rebuild_poll_request();
trace!(target: "ipc", "Started duplex worker at {}", addr);
Ok(()) Ok(())
} }
@ -200,6 +203,7 @@ impl<S: ?Sized> Worker<S> where S: IpcInterface {
SocketError::DuplexLink SocketError::DuplexLink
})); }));
let endpoint = try!(socket.bind(addr).map_err(|e| { let endpoint = try!(socket.bind(addr).map_err(|e| {
warn!(target: "ipc", "Failed to bind socket to address '{}': {:?}", addr, e); warn!(target: "ipc", "Failed to bind socket to address '{}': {:?}", addr, e);
SocketError::DuplexLink SocketError::DuplexLink
@ -209,6 +213,7 @@ impl<S: ?Sized> Worker<S> where S: IpcInterface {
self.rebuild_poll_request(); self.rebuild_poll_request();
trace!(target: "ipc", "Started request-reply worker at {}", addr);
Ok(()) Ok(())
} }
} }

View File

@ -17,5 +17,4 @@ ethcore-util = { path = "../../util" }
log = "0.3" log = "0.3"
[build-dependencies] [build-dependencies]
syntex = "0.33"
ethcore-ipc-codegen = { path = "../codegen" } ethcore-ipc-codegen = { path = "../codegen" }

View File

@ -10,11 +10,10 @@ rustc-serialize = "0.3"
serde = "0.7.0" serde = "0.7.0"
serde_json = "0.7.0" serde_json = "0.7.0"
serde_macros = { version = "0.7.0", optional = true } serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.82", optional = true} clippy = { version = "0.0.85", optional = true}
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.7.0", optional = true } serde_codegen = { version = "0.7.0", optional = true }
syntex = "*"
[features] [features]
default = ["serde_codegen"] default = ["serde_codegen"]

View File

@ -16,7 +16,6 @@
#[cfg(not(feature = "serde_macros"))] #[cfg(not(feature = "serde_macros"))]
mod inner { mod inner {
extern crate syntex;
extern crate serde_codegen; extern crate serde_codegen;
use std::env; use std::env;
@ -28,10 +27,7 @@ mod inner {
let src = Path::new("src/lib.rs.in"); let src = Path::new("src/lib.rs.in");
let dst = Path::new(&out_dir).join("lib.rs"); let dst = Path::new(&out_dir).join("lib.rs");
let mut registry = syntex::Registry::new(); serde_codegen::expand(&src, &dst).unwrap();
serde_codegen::register(&mut registry);
registry.expand("", &src, &dst).unwrap();
} }
} }

View File

@ -138,6 +138,7 @@ fn execute_import(cmd: ImportBlockchain) -> Result<String, String> {
client_config, client_config,
&spec, &spec,
Path::new(&client_path), Path::new(&client_path),
Path::new(&cmd.dirs.ipc_path()),
Arc::new(Miner::with_spec(&spec)), Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))); ).map_err(|e| format!("Client service error: {:?}", e)));
@ -248,6 +249,7 @@ fn execute_export(cmd: ExportBlockchain) -> Result<String, String> {
client_config, client_config,
&spec, &spec,
Path::new(&client_path), Path::new(&client_path),
Path::new(&cmd.dirs.ipc_path()),
Arc::new(Miner::with_spec(&spec)), Arc::new(Miner::with_spec(&spec)),
).map_err(|e| format!("Client service error: {:?}", e))); ).map_err(|e| format!("Client service error: {:?}", e)));

123
parity/boot.rs Normal file
View File

@ -0,0 +1,123 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Parity micro-service helpers
use nanoipc;
use ipc;
use std;
use std::sync::Arc;
use hypervisor::HypervisorServiceClient;
use hypervisor::service::IpcModuleId;
use ctrlc::CtrlC;
use std::sync::atomic::{AtomicBool, Ordering};
use nanoipc::{IpcInterface, GuardedSocket, NanoSocket};
use ipc::WithSocket;
use ethcore_logger::{Config as LogConfig, setup_log};
use docopt::Docopt;
#[derive(Debug)]
pub enum BootError {
ReadArgs(std::io::Error),
DecodeArgs(ipc::binary::BinaryError),
DependencyConnect(nanoipc::SocketError),
}
pub fn host_service<T: ?Sized + Send + Sync + 'static>(addr: &str, stop_guard: Arc<AtomicBool>, service: Arc<T>) where T: IpcInterface {
let socket_url = addr.to_owned();
std::thread::spawn(move || {
let mut worker = nanoipc::Worker::<T>::new(&service);
worker.add_reqrep(&socket_url).unwrap();
while !stop_guard.load(Ordering::Relaxed) {
worker.poll();
}
});
}
pub fn payload<B: ipc::BinaryConvertable>() -> Result<B, BootError> {
use std::io;
use std::io::Read;
let mut buffer = Vec::new();
try!(
io::stdin().read_to_end(&mut buffer)
.map_err(|io_err| BootError::ReadArgs(io_err))
);
ipc::binary::deserialize::<B>(&buffer)
.map_err(|binary_error| BootError::DecodeArgs(binary_error))
}
pub fn register(hv_url: &str, module_id: IpcModuleId) -> GuardedSocket<HypervisorServiceClient<NanoSocket>>{
let hypervisor_client = nanoipc::init_client::<HypervisorServiceClient<_>>(hv_url).unwrap();
hypervisor_client.handshake().unwrap();
hypervisor_client.module_ready(module_id);
hypervisor_client
}
pub fn dependency<C: WithSocket<NanoSocket>>(url: &str)
-> Result<GuardedSocket<C>, BootError>
{
nanoipc::init_client::<C>(url).map_err(|socket_err| BootError::DependencyConnect(socket_err))
}
pub fn main_thread() -> Arc<AtomicBool> {
let stop = Arc::new(AtomicBool::new(false));
let ctrc_stop = stop.clone();
CtrlC::set_handler(move || {
ctrc_stop.store(true, Ordering::Relaxed);
});
stop
}
pub fn setup_cli_logger(svc_name: &str) {
let usage = format!("
Ethcore {} service
Usage:
parity {} [options]
Options:
-l --logging LOGGING Specify the logging level. Must conform to the same
format as RUST_LOG.
--log-file FILENAME Specify a filename into which logging should be
directed.
--no-color Don't use terminal color codes in output.
", svc_name, svc_name);
#[derive(Debug, RustcDecodable)]
struct Args {
flag_logging: Option<String>,
flag_log_file: Option<String>,
flag_no_color: bool,
}
impl Args {
pub fn log_settings(&self) -> LogConfig {
LogConfig {
color: self.flag_no_color || cfg!(windows),
mode: self.flag_logging.clone(),
file: self.flag_log_file.clone(),
}
}
}
let args: Args = Docopt::new(usage)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
setup_log(&args.log_settings()).expect("Log initialization failure");
}

View File

@ -73,6 +73,9 @@ Account Options:
Signer UIs. Signer UIs.
--signer-port PORT Specify the port of Trusted Signer server --signer-port PORT Specify the port of Trusted Signer server
[default: 8180]. [default: 8180].
--signer-interface IP Specify the hostname portion of the Trusted Signer
server, IP should be an interface's IP address,
or local [default: local].
--signer-path PATH Specify directory where Signer UIs tokens should --signer-path PATH Specify directory where Signer UIs tokens should
be stored. [default: $HOME/.parity/signer] be stored. [default: $HOME/.parity/signer]
--signer-no-validation Disable Origin and Host headers validation for --signer-no-validation Disable Origin and Host headers validation for
@ -170,7 +173,7 @@ Sealing/Mining Options:
lenient - Same as strict when mining, and cheap lenient - Same as strict when mining, and cheap
when not [default: cheap]. when not [default: cheap].
--usd-per-tx USD Amount of USD to be paid for a basic transaction --usd-per-tx USD Amount of USD to be paid for a basic transaction
[default: 0.005]. The minimum gas price is set [default: 0]. The minimum gas price is set
accordingly. accordingly.
--usd-per-eth SOURCE USD value of a single ETH. SOURCE may be either an --usd-per-eth SOURCE USD value of a single ETH. SOURCE may be either an
amount in USD, a web service or 'auto' to use each amount in USD, a web service or 'auto' to use each
@ -349,6 +352,7 @@ pub struct Args {
pub flag_force_signer: bool, pub flag_force_signer: bool,
pub flag_no_signer: bool, pub flag_no_signer: bool,
pub flag_signer_port: u16, pub flag_signer_port: u16,
pub flag_signer_interface: String,
pub flag_signer_path: String, pub flag_signer_path: String,
pub flag_signer_no_validation: bool, pub flag_signer_no_validation: bool,
pub flag_force_sealing: bool, pub flag_force_sealing: bool,

View File

@ -32,7 +32,7 @@ use ethcore_rpc::NetworkSettings;
use cache::CacheConfig; use cache::CacheConfig;
use helpers::{to_duration, to_mode, to_block_id, to_u256, to_pending_set, to_price, replace_home, use helpers::{to_duration, to_mode, to_block_id, to_u256, to_pending_set, to_price, replace_home,
geth_ipc_path, parity_ipc_path, to_bootnodes, to_addresses, to_address}; geth_ipc_path, parity_ipc_path, to_bootnodes, to_addresses, to_address};
use params::{ResealPolicy, AccountsConfig, GasPricerConfig, MinerExtras}; use params::{ResealPolicy, AccountsConfig, GasPricerConfig, MinerExtras, SpecType};
use ethcore_logger::Config as LogConfig; use ethcore_logger::Config as LogConfig;
use dir::Directories; use dir::Directories;
use dapps::Configuration as DappsConfiguration; use dapps::Configuration as DappsConfiguration;
@ -345,6 +345,7 @@ impl Configuration {
SignerConfiguration { SignerConfiguration {
enabled: self.signer_enabled(), enabled: self.signer_enabled(),
port: self.args.flag_signer_port, port: self.args.flag_signer_port,
interface: self.signer_interface(),
signer_path: self.directories().signer, signer_path: self.directories().signer,
skip_origin_validation: self.args.flag_signer_no_validation, skip_origin_validation: self.args.flag_signer_no_validation,
} }
@ -440,13 +441,23 @@ impl Configuration {
ret.min_peers = self.min_peers(); ret.min_peers = self.min_peers();
let mut net_path = PathBuf::from(self.directories().db); let mut net_path = PathBuf::from(self.directories().db);
net_path.push("network"); net_path.push("network");
let net_specific_path = net_path.join(&try!(self.network_specific_path()));
ret.config_path = Some(net_path.to_str().unwrap().to_owned()); ret.config_path = Some(net_path.to_str().unwrap().to_owned());
ret.net_config_path = Some(net_specific_path.to_str().unwrap().to_owned());
ret.reserved_nodes = try!(self.init_reserved_nodes()); ret.reserved_nodes = try!(self.init_reserved_nodes());
ret.allow_non_reserved = !self.args.flag_reserved_only; ret.allow_non_reserved = !self.args.flag_reserved_only;
Ok(ret) Ok(ret)
} }
fn network_specific_path(&self) -> Result<PathBuf, String> {
let spec_type : SpecType = try!(self.chain().parse());
let spec = try!(spec_type.spec());
let id = try!(self.network_id());
let mut path = PathBuf::new();
path.push(format!("{}", id.unwrap_or_else(|| spec.network_id())));
Ok(path)
}
fn network_id(&self) -> Result<Option<U256>, String> { fn network_id(&self) -> Result<Option<U256>, String> {
let net_id = self.args.flag_network_id.as_ref().or(self.args.flag_networkid.as_ref()); let net_id = self.args.flag_network_id.as_ref().or(self.args.flag_networkid.as_ref());
match net_id { match net_id {
@ -530,6 +541,15 @@ impl Configuration {
|e| warn!("Failed to create '{}' for geth mode: {}", &geth_path.to_str().unwrap(), e)); |e| warn!("Failed to create '{}' for geth mode: {}", &geth_path.to_str().unwrap(), e));
} }
if cfg!(feature = "ipc") && !cfg!(feature = "windows") {
let mut path_buf = PathBuf::from(db_path.clone());
path_buf.push("ipc");
let ipc_path = path_buf.to_str().unwrap();
::std::fs::create_dir_all(ipc_path).unwrap_or_else(
|e| warn!("Failed to directory '{}' for ipc sockets: {}", ipc_path, e)
);
}
Directories { Directories {
keys: keys_path, keys: keys_path,
db: db_path, db: db_path,
@ -554,6 +574,13 @@ impl Configuration {
} }
} }
fn signer_interface(&self) -> String {
match self.args.flag_signer_interface.as_str() {
"local" => "127.0.0.1",
x => x,
}.into()
}
fn rpc_interface(&self) -> String { fn rpc_interface(&self) -> String {
match self.network_settings().rpc_interface.as_str() { match self.network_settings().rpc_interface.as_str() {
"all" => "0.0.0.0", "all" => "0.0.0.0",
@ -595,6 +622,7 @@ mod tests {
use ethcore::client::{VMType, BlockID}; use ethcore::client::{VMType, BlockID};
use helpers::{replace_home, default_network_config}; use helpers::{replace_home, default_network_config};
use run::RunCmd; use run::RunCmd;
use signer::Configuration as SignerConfiguration;
use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, DataFormat}; use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, DataFormat};
use presale::ImportWallet; use presale::ImportWallet;
use account::{AccountCmd, NewAccount, ImportAccounts}; use account::{AccountCmd, NewAccount, ImportAccounts};
@ -857,16 +885,44 @@ mod tests {
} }
#[test] #[test]
fn should_parse_signer_allow_all_flag() { fn should_parse_signer_configration() {
// given // given
// when // when
let conf0 = parse(&["parity", "--signer-no-validation"]); let conf0 = parse(&["parity", "--signer-path", "signer"]);
let conf1 = parse(&["parity"]); let conf1 = parse(&["parity", "--signer-path", "signer", "--signer-no-validation"]);
let conf2 = parse(&["parity", "--signer-path", "signer", "--signer-port", "3123"]);
let conf3 = parse(&["parity", "--signer-path", "signer", "--signer-interface", "test"]);
// then // then
assert_eq!(conf0.args.flag_signer_no_validation, true); assert_eq!(conf0.signer_config(), SignerConfiguration {
assert_eq!(conf1.args.flag_signer_no_validation, false); enabled: true,
port: 8180,
interface: "127.0.0.1".into(),
signer_path: "signer".into(),
skip_origin_validation: false,
});
assert_eq!(conf1.signer_config(), SignerConfiguration {
enabled: true,
port: 8180,
interface: "127.0.0.1".into(),
signer_path: "signer".into(),
skip_origin_validation: true,
});
assert_eq!(conf2.signer_config(), SignerConfiguration {
enabled: true,
port: 3123,
interface: "127.0.0.1".into(),
signer_path: "signer".into(),
skip_origin_validation: false,
});
assert_eq!(conf3.signer_config(), SignerConfiguration {
enabled: true,
port: 8180,
interface: "test".into(),
signer_path: "signer".into(),
skip_origin_validation: false,
});
} }
#[test] #[test]

View File

@ -15,16 +15,11 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc; use std::sync::Arc;
use std::net::SocketAddr;
use io::PanicHandler; use io::PanicHandler;
use rpc_apis; use rpc_apis;
use ethcore::client::Client;
use helpers::replace_home; use helpers::replace_home;
#[cfg(feature = "dapps")]
pub use ethcore_dapps::Server as WebappServer;
#[cfg(not(feature = "dapps"))]
pub struct WebappServer;
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
pub struct Configuration { pub struct Configuration {
pub enabled: bool, pub enabled: bool,
@ -51,6 +46,7 @@ impl Default for Configuration {
pub struct Dependencies { pub struct Dependencies {
pub panic_handler: Arc<PanicHandler>, pub panic_handler: Arc<PanicHandler>,
pub apis: Arc<rpc_apis::Dependencies>, pub apis: Arc<rpc_apis::Dependencies>,
pub client: Arc<Client>,
} }
pub fn new(configuration: Configuration, deps: Dependencies) -> Result<Option<WebappServer>, String> { pub fn new(configuration: Configuration, deps: Dependencies) -> Result<Option<WebappServer>, String> {
@ -75,7 +71,15 @@ pub fn new(configuration: Configuration, deps: Dependencies) -> Result<Option<We
Ok(Some(try!(setup_dapps_server(deps, configuration.dapps_path, &addr, auth)))) Ok(Some(try!(setup_dapps_server(deps, configuration.dapps_path, &addr, auth))))
} }
pub use self::server::WebappServer;
pub use self::server::setup_dapps_server;
#[cfg(not(feature = "dapps"))] #[cfg(not(feature = "dapps"))]
mod server {
use super::Dependencies;
use std::net::SocketAddr;
pub struct WebappServer;
pub fn setup_dapps_server( pub fn setup_dapps_server(
_deps: Dependencies, _deps: Dependencies,
_dapps_path: String, _dapps_path: String,
@ -84,8 +88,23 @@ pub fn setup_dapps_server(
) -> Result<WebappServer, String> { ) -> Result<WebappServer, String> {
Err("Your Parity version has been compiled without WebApps support.".into()) Err("Your Parity version has been compiled without WebApps support.".into())
} }
}
#[cfg(feature = "dapps")] #[cfg(feature = "dapps")]
mod server {
use super::Dependencies;
use std::sync::Arc;
use std::net::SocketAddr;
use util::{Bytes, Address, U256};
use ethcore::transaction::{Transaction, Action};
use ethcore::client::{Client, BlockChainClient, BlockID};
use rpc_apis;
use ethcore_dapps::ContractClient;
pub use ethcore_dapps::Server as WebappServer;
pub fn setup_dapps_server( pub fn setup_dapps_server(
deps: Dependencies, deps: Dependencies,
dapps_path: String, dapps_path: String,
@ -94,7 +113,9 @@ pub fn setup_dapps_server(
) -> Result<WebappServer, String> { ) -> Result<WebappServer, String> {
use ethcore_dapps as dapps; use ethcore_dapps as dapps;
let server = dapps::ServerBuilder::new(dapps_path); let server = dapps::ServerBuilder::new(dapps_path, Arc::new(Registrar {
client: deps.client.clone(),
}));
let server = rpc_apis::setup_rpc(server, deps.apis.clone(), rpc_apis::ApiSet::UnsafeContext); let server = rpc_apis::setup_rpc(server, deps.apis.clone(), rpc_apis::ApiSet::UnsafeContext);
let start_result = match auth { let start_result = match auth {
None => { None => {
@ -117,3 +138,35 @@ pub fn setup_dapps_server(
} }
} }
struct Registrar {
client: Arc<Client>,
}
impl ContractClient for Registrar {
fn registrar(&self) -> Result<Address, String> {
self.client.additional_params().get("registrar")
.ok_or_else(|| "Registrar not defined.".into())
.and_then(|registrar| {
registrar.parse().map_err(|e| format!("Invalid registrar address: {:?}", e))
})
}
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String> {
let from = Address::default();
let transaction = Transaction {
nonce: self.client.latest_nonce(&from),
action: Action::Call(address),
gas: U256::from(50_000_000),
gas_price: U256::default(),
value: U256::default(),
data: data,
}.fake_sign(from);
self.client.call(&transaction, BlockID::Latest, Default::default())
.map_err(|e| format!("{:?}", e))
.map(|executed| {
executed.output
})
}
}
}

View File

@ -66,6 +66,13 @@ impl Directories {
dir.push("db"); dir.push("db");
dir dir
} }
/// Get the ipc sockets path
pub fn ipc_path(&self) -> PathBuf {
let mut dir = Path::new(&self.db).to_path_buf();
dir.push("ipc");
dir
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -172,6 +172,7 @@ pub fn default_network_config() -> ::ethsync::NetworkConfiguration {
use ethsync::NetworkConfiguration; use ethsync::NetworkConfiguration;
NetworkConfiguration { NetworkConfiguration {
config_path: Some(replace_home("$HOME/.parity/network")), config_path: Some(replace_home("$HOME/.parity/network")),
net_config_path: Some(replace_home("$HOME/.parity/network/1")),
listen_address: Some("0.0.0.0:30303".into()), listen_address: Some("0.0.0.0:30303".into()),
public_address: None, public_address: None,
udp_port: None, udp_port: None,

View File

@ -57,9 +57,24 @@ extern crate lazy_static;
extern crate regex; extern crate regex;
extern crate isatty; extern crate isatty;
#[cfg(feature="stratum")]
extern crate ethcore_stratum;
#[cfg(feature = "dapps")] #[cfg(feature = "dapps")]
extern crate ethcore_dapps; extern crate ethcore_dapps;
macro_rules! dependency {
($dep_ty:ident, $url:expr) => {
{
let dep = boot::dependency::<$dep_ty<_>>($url)
.unwrap_or_else(|e| panic!("Fatal: error connecting service ({:?})", e));
dep.handshake()
.unwrap_or_else(|e| panic!("Fatal: error in connected service ({:?})", e));
dep
}
}
}
mod cache; mod cache;
mod upgrade; mod upgrade;
mod rpc; mod rpc;
@ -83,6 +98,10 @@ mod presale;
mod run; mod run;
mod sync; mod sync;
mod snapshot; mod snapshot;
mod boot;
#[cfg(feature="stratum")]
mod stratum;
use std::{process, env}; use std::{process, env};
use cli::print_version; use cli::print_version;
@ -116,6 +135,25 @@ fn start() -> Result<String, String> {
execute(cmd) execute(cmd)
} }
#[cfg(feature="stratum")]
mod stratum_optional {
pub fn probably_run() -> bool {
// just redirect to the stratum::main()
if ::std::env::args().nth(1).map_or(false, |arg| arg == "stratum") {
super::stratum::main();
true
}
else { false }
}
}
#[cfg(not(feature="stratum"))]
mod stratum_optional {
pub fn probably_run() -> bool {
false
}
}
fn main() { fn main() {
// just redirect to the sync::main() // just redirect to the sync::main()
if std::env::args().nth(1).map_or(false, |arg| arg == "sync") { if std::env::args().nth(1).map_or(false, |arg| arg == "sync") {
@ -123,6 +161,8 @@ fn main() {
return; return;
} }
if stratum_optional::probably_run() { return; }
match start() { match start() {
Ok(result) => { Ok(result) => {
println!("{}", result); println!("{}", result);

View File

@ -23,12 +23,27 @@ use self::no_ipc_deps::*;
#[cfg(feature="ipc")] #[cfg(feature="ipc")]
use self::ipc_deps::*; use self::ipc_deps::*;
use ethcore_logger::Config as LogConfig; use ethcore_logger::Config as LogConfig;
use std::path::Path;
pub mod service_urls { pub mod service_urls {
pub const CLIENT: &'static str = "ipc:///tmp/parity-chain.ipc"; use std::path::PathBuf;
pub const SYNC: &'static str = "ipc:///tmp/parity-sync.ipc";
pub const SYNC_NOTIFY: &'static str = "ipc:///tmp/parity-sync-notify.ipc"; pub const CLIENT: &'static str = "parity-chain.ipc";
pub const NETWORK_MANAGER: &'static str = "ipc:///tmp/parity-manage-net.ipc"; pub const SYNC: &'static str = "parity-sync.ipc";
pub const SYNC_NOTIFY: &'static str = "parity-sync-notify.ipc";
pub const NETWORK_MANAGER: &'static str = "parity-manage-net.ipc";
#[cfg(feature="stratum")]
pub const STRATUM: &'static str = "parity-stratum.ipc";
#[cfg(feature="stratum")]
pub const MINING_JOB_DISPATCHER: &'static str = "parity-mining-jobs.ipc";
pub fn with_base(data_dir: &str, service_path: &str) -> String {
let mut path = PathBuf::from(data_dir);
path.push(service_path);
format!("ipc://{}", path.to_str().unwrap())
}
} }
#[cfg(not(feature="ipc"))] #[cfg(not(feature="ipc"))]
@ -51,27 +66,30 @@ pub type SyncModules = (Arc<SyncProvider>, Arc<ManageNetwork>, Arc<ChainNotify>)
mod ipc_deps { mod ipc_deps {
pub use ethsync::{SyncClient, NetworkManagerClient, ServiceConfiguration}; pub use ethsync::{SyncClient, NetworkManagerClient, ServiceConfiguration};
pub use ethcore::client::ChainNotifyClient; pub use ethcore::client::ChainNotifyClient;
pub use hypervisor::{SYNC_MODULE_ID, BootArgs}; pub use hypervisor::{SYNC_MODULE_ID, BootArgs, HYPERVISOR_IPC_URL};
pub use nanoipc::{GuardedSocket, NanoSocket, init_client}; pub use nanoipc::{GuardedSocket, NanoSocket, init_client};
pub use ipc::IpcSocket; pub use ipc::IpcSocket;
pub use ipc::binary::serialize; pub use ipc::binary::serialize;
} }
#[cfg(feature="ipc")] #[cfg(feature="ipc")]
pub fn hypervisor() -> Option<Hypervisor> { pub fn hypervisor(base_path: &Path) -> Option<Hypervisor> {
Some(Hypervisor::new()) Some(Hypervisor
::with_url(&service_urls::with_base(base_path.to_str().unwrap(), HYPERVISOR_IPC_URL))
.io_path(base_path.to_str().unwrap()))
} }
#[cfg(not(feature="ipc"))] #[cfg(not(feature="ipc"))]
pub fn hypervisor() -> Option<Hypervisor> { pub fn hypervisor(_: &Path) -> Option<Hypervisor> {
None None
} }
#[cfg(feature="ipc")] #[cfg(feature="ipc")]
fn sync_arguments(sync_cfg: SyncConfig, net_cfg: NetworkConfiguration, log_settings: &LogConfig) -> BootArgs { fn sync_arguments(io_path: &str, sync_cfg: SyncConfig, net_cfg: NetworkConfiguration, log_settings: &LogConfig) -> BootArgs {
let service_config = ServiceConfiguration { let service_config = ServiceConfiguration {
sync: sync_cfg, sync: sync_cfg,
net: net_cfg, net: net_cfg,
io_path: io_path.to_owned(),
}; };
// initialisation payload is passed via stdin // initialisation payload is passed via stdin
@ -105,14 +123,18 @@ pub fn sync
-> Result<SyncModules, NetworkError> -> Result<SyncModules, NetworkError>
{ {
let mut hypervisor = hypervisor_ref.take().expect("There should be hypervisor for ipc configuration"); let mut hypervisor = hypervisor_ref.take().expect("There should be hypervisor for ipc configuration");
hypervisor = hypervisor.module(SYNC_MODULE_ID, "parity", sync_arguments(sync_cfg, net_cfg, log_settings)); let args = sync_arguments(&hypervisor.io_path, sync_cfg, net_cfg, log_settings);
hypervisor = hypervisor.module(SYNC_MODULE_ID, args);
hypervisor.start(); hypervisor.start();
hypervisor.wait_for_startup(); hypervisor.wait_for_startup();
let sync_client = init_client::<SyncClient<_>>(service_urls::SYNC).unwrap(); let sync_client = init_client::<SyncClient<_>>(
let notify_client = init_client::<ChainNotifyClient<_>>(service_urls::SYNC_NOTIFY).unwrap(); &service_urls::with_base(&hypervisor.io_path, service_urls::SYNC)).unwrap();
let manage_client = init_client::<NetworkManagerClient<_>>(service_urls::NETWORK_MANAGER).unwrap(); let notify_client = init_client::<ChainNotifyClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::SYNC_NOTIFY)).unwrap();
let manage_client = init_client::<NetworkManagerClient<_>>(
&service_urls::with_base(&hypervisor.io_path, service_urls::NETWORK_MANAGER)).unwrap();
*hypervisor_ref = Some(hypervisor); *hypervisor_ref = Some(hypervisor);
Ok((sync_client, manage_client, notify_client)) Ok((sync_client, manage_client, notify_client))

View File

@ -179,7 +179,7 @@ pub enum GasPricerConfig {
impl Default for GasPricerConfig { impl Default for GasPricerConfig {
fn default() -> Self { fn default() -> Self {
GasPricerConfig::Calibrated { GasPricerConfig::Calibrated {
usd_per_tx: 0.005, usd_per_tx: 0f32,
recalibration_period: Duration::from_secs(3600), recalibration_period: Duration::from_secs(3600),
} }
} }

View File

@ -163,13 +163,14 @@ pub fn execute(cmd: RunCmd) -> Result<(), String> {
} }
// create supervisor // create supervisor
let mut hypervisor = modules::hypervisor(); let mut hypervisor = modules::hypervisor(Path::new(&cmd.dirs.ipc_path()));
// create client service. // create client service.
let service = try!(ClientService::start( let service = try!(ClientService::start(
client_config, client_config,
&spec, &spec,
Path::new(&client_path), Path::new(&client_path),
Path::new(&cmd.dirs.ipc_path()),
miner.clone(), miner.clone(),
).map_err(|e| format!("Client service error: {:?}", e))); ).map_err(|e| format!("Client service error: {:?}", e)));
@ -222,6 +223,7 @@ pub fn execute(cmd: RunCmd) -> Result<(), String> {
let dapps_deps = dapps::Dependencies { let dapps_deps = dapps::Dependencies {
panic_handler: panic_handler.clone(), panic_handler: panic_handler.clone(),
apis: deps_for_rpc_apis.clone(), apis: deps_for_rpc_apis.clone(),
client: client.clone(),
}; };
// start dapps server // start dapps server

View File

@ -31,6 +31,7 @@ const CODES_FILENAME: &'static str = "authcodes";
pub struct Configuration { pub struct Configuration {
pub enabled: bool, pub enabled: bool,
pub port: u16, pub port: u16,
pub interface: String,
pub signer_path: String, pub signer_path: String,
pub skip_origin_validation: bool, pub skip_origin_validation: bool,
} }
@ -40,6 +41,7 @@ impl Default for Configuration {
Configuration { Configuration {
enabled: true, enabled: true,
port: 8180, port: 8180,
interface: "127.0.0.1".into(),
signer_path: replace_home("$HOME/.parity/signer"), signer_path: replace_home("$HOME/.parity/signer"),
skip_origin_validation: false, skip_origin_validation: false,
} }
@ -82,7 +84,7 @@ fn generate_new_token(path: String) -> io::Result<String> {
} }
fn do_start(conf: Configuration, deps: Dependencies) -> Result<SignerServer, String> { fn do_start(conf: Configuration, deps: Dependencies) -> Result<SignerServer, String> {
let addr = try!(format!("127.0.0.1:{}", conf.port) let addr = try!(format!("{}:{}", conf.interface, conf.port)
.parse() .parse()
.map_err(|_| format!("Invalid port specified: {}", conf.port))); .map_err(|_| format!("Invalid port specified: {}", conf.port)));

View File

@ -95,6 +95,7 @@ impl SnapshotCommand {
client_config, client_config,
&spec, &spec,
Path::new(&client_path), Path::new(&client_path),
Path::new(&self.dirs.ipc_path()),
Arc::new(Miner::with_spec(&spec)) Arc::new(Miner::with_spec(&spec))
).map_err(|e| format!("Client service error: {:?}", e))); ).map_err(|e| format!("Client service error: {:?}", e)));
@ -107,6 +108,7 @@ impl SnapshotCommand {
let (service, _panic_handler) = try!(self.start_service()); let (service, _panic_handler) = try!(self.start_service());
warn!("Snapshot restoration is experimental and the format may be subject to change."); warn!("Snapshot restoration is experimental and the format may be subject to change.");
warn!("On encountering an unexpected error, please ensure that you have a recent snapshot.");
let snapshot = service.snapshot_service(); let snapshot = service.snapshot_service();
let reader = PackedReader::new(Path::new(&file)) let reader = PackedReader::new(Path::new(&file))

57
parity/stratum.rs Normal file
View File

@ -0,0 +1,57 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Parity sync service
use std;
use std::sync::Arc;
use ethcore_stratum::{Stratum as StratumServer, PushWorkHandler, RemoteJobDispatcher, ServiceConfiguration};
use std::thread;
use modules::service_urls;
use boot;
use hypervisor::service::IpcModuleId;
use std::net::SocketAddr;
use std::str::FromStr;
const STRATUM_MODULE_ID: IpcModuleId = 8000;
pub fn main() {
boot::setup_cli_logger("stratum");
let service_config: ServiceConfiguration = boot::payload()
.unwrap_or_else(|e| panic!("Fatal: error reading boot arguments ({:?})", e));
let job_dispatcher = dependency!(RemoteJobDispatcher, service_urls::MINING_JOB_DISPATCHER);
let stop = boot::main_thread();
let server =
StratumServer::start(
&SocketAddr::from_str(&service_config.listen_addr)
.unwrap_or_else(|e| panic!("Fatal: invalid listen address ({:?})", e)),
job_dispatcher.service().clone(),
service_config.secret
).unwrap_or_else(
|e| panic!("Fatal: cannot start stratum server({:?})", e)
);
boot::host_service(service_urls::STRATUM, stop.clone(), server.clone() as Arc<PushWorkHandler>);
let _ = boot::register(STRATUM_MODULE_ID);
while !stop.load(::std::sync::atomic::Ordering::Relaxed) {
thread::park_timeout(std::time::Duration::from_millis(1000));
}
}

View File

@ -16,97 +16,48 @@
//! Parity sync service //! Parity sync service
use nanoipc;
use ipc;
use std; use std;
use std::sync::Arc; use std::sync::Arc;
use hypervisor::{HypervisorServiceClient, SYNC_MODULE_ID, HYPERVISOR_IPC_URL}; use hypervisor::{SYNC_MODULE_ID, HYPERVISOR_IPC_URL};
use ctrlc::CtrlC;
use std::sync::atomic::{AtomicBool, Ordering};
use docopt::Docopt;
use ethcore::client::{RemoteClient, ChainNotify}; use ethcore::client::{RemoteClient, ChainNotify};
use ethsync::{SyncProvider, EthSync, ManageNetwork, ServiceConfiguration}; use ethsync::{SyncProvider, EthSync, ManageNetwork, ServiceConfiguration};
use std::thread; use std::thread;
use nanoipc::IpcInterface;
use modules::service_urls; use modules::service_urls;
use ethcore_logger::{Config as LogConfig, setup_log}; use boot;
const USAGE: &'static str = "
Ethcore sync service
Usage:
parity sync [options]
Options:
-l --logging LOGGING Specify the logging level. Must conform to the same
format as RUST_LOG.
--log-file FILENAME Specify a filename into which logging should be
directed.
--no-color Don't use terminal color codes in output.
";
#[derive(Debug, RustcDecodable)]
struct Args {
flag_logging: Option<String>,
flag_log_file: Option<String>,
flag_no_color: bool,
}
impl Args {
pub fn log_settings(&self) -> LogConfig {
LogConfig {
color: self.flag_no_color || cfg!(windows),
mode: self.flag_logging.clone(),
file: self.flag_log_file.clone(),
}
}
}
fn run_service<T: ?Sized + Send + Sync + 'static>(addr: &str, stop_guard: Arc<AtomicBool>, service: Arc<T>) where T: IpcInterface {
let socket_url = addr.to_owned();
std::thread::spawn(move || {
let mut worker = nanoipc::Worker::<T>::new(&service);
worker.add_reqrep(&socket_url).unwrap();
while !stop_guard.load(Ordering::Relaxed) {
worker.poll();
}
});
}
pub fn main() { pub fn main() {
use std::io::{self, Read}; boot::setup_cli_logger("sync");
let args: Args = Docopt::new(USAGE) let service_config: ServiceConfiguration = boot::payload()
.and_then(|d| d.decode()) .unwrap_or_else(|e| panic!("Fatal: error reading boot arguments ({:?})", e));
.unwrap_or_else(|e| e.exit());
setup_log(&args.log_settings()).expect("Log initialization failure"); let remote_client = dependency!(RemoteClient, &service_urls::with_base(&service_config.io_path, service_urls::CLIENT));
let mut buffer = Vec::new(); let stop = boot::main_thread();
io::stdin().read_to_end(&mut buffer).expect("Failed to read initialisation payload");
let service_config = ipc::binary::deserialize::<ServiceConfiguration>(&buffer).expect("Failed deserializing initialisation payload");
let remote_client = nanoipc::init_client::<RemoteClient<_>>(service_urls::CLIENT).unwrap();
remote_client.handshake().unwrap();
let stop = Arc::new(AtomicBool::new(false));
let sync = EthSync::new(service_config.sync, remote_client.service().clone(), service_config.net).unwrap(); let sync = EthSync::new(service_config.sync, remote_client.service().clone(), service_config.net).unwrap();
run_service(service_urls::SYNC, stop.clone(), sync.clone() as Arc<SyncProvider>); let _ = boot::register(
run_service(service_urls::NETWORK_MANAGER, stop.clone(), sync.clone() as Arc<ManageNetwork>); &service_urls::with_base(&service_config.io_path, HYPERVISOR_IPC_URL),
run_service(service_urls::SYNC_NOTIFY, stop.clone(), sync.clone() as Arc<ChainNotify>); SYNC_MODULE_ID
);
let hypervisor_client = nanoipc::init_client::<HypervisorServiceClient<_>>(HYPERVISOR_IPC_URL).unwrap(); boot::host_service(
hypervisor_client.handshake().unwrap(); &service_urls::with_base(&service_config.io_path, service_urls::SYNC),
hypervisor_client.module_ready(SYNC_MODULE_ID); stop.clone(),
sync.clone() as Arc<SyncProvider>
);
boot::host_service(
&service_urls::with_base(&service_config.io_path, service_urls::NETWORK_MANAGER),
stop.clone(),
sync.clone() as Arc<ManageNetwork>
);
boot::host_service(
&service_urls::with_base(&service_config.io_path, service_urls::SYNC_NOTIFY),
stop.clone(),
sync.clone() as Arc<ChainNotify>
);
let terminate_stop = stop.clone(); while !stop.load(::std::sync::atomic::Ordering::Relaxed) {
CtrlC::set_handler(move || {
terminate_stop.store(true, Ordering::Relaxed);
});
while !stop.load(Ordering::Relaxed) {
thread::park_timeout(std::time::Duration::from_millis(1000)); thread::park_timeout(std::time::Duration::from_millis(1000));
} }
} }

View File

@ -46,7 +46,7 @@ pub fn open(url: &str) {
} }
} }
#[cfg(target_os="macos")] #[cfg(any(target_os="macos", target_os="freebsd"))]
pub fn open(url: &str) { pub fn open(url: &str) {
use std; use std;
let _ = std::process::Command::new("open").arg(url).spawn(); let _ = std::process::Command::new("open").arg(url).spawn();

View File

@ -17,6 +17,7 @@ jsonrpc-http-server = { git = "https://github.com/ethcore/jsonrpc-http-server.gi
ethcore-io = { path = "../util/io" } ethcore-io = { path = "../util/io" }
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
ethcore = { path = "../ethcore" } ethcore = { path = "../ethcore" }
ethkey = { path = "../ethkey" }
ethstore = { path = "../ethstore" } ethstore = { path = "../ethstore" }
ethash = { path = "../ethash" } ethash = { path = "../ethash" }
ethsync = { path = "../sync" } ethsync = { path = "../sync" }
@ -25,13 +26,13 @@ ethcore-devtools = { path = "../devtools" }
rustc-serialize = "0.3" rustc-serialize = "0.3"
transient-hashmap = "0.1" transient-hashmap = "0.1"
serde_macros = { version = "0.7.0", optional = true } serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.82", optional = true} clippy = { version = "0.0.85", optional = true}
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" } json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
ethcore-ipc = { path = "../ipc/rpc" } ethcore-ipc = { path = "../ipc/rpc" }
time = "0.1"
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.7.0", optional = true } serde_codegen = { version = "0.7.0", optional = true }
syntex = "*"
[features] [features]
default = ["serde_codegen"] default = ["serde_codegen"]

View File

@ -16,7 +16,6 @@
#[cfg(not(feature = "serde_macros"))] #[cfg(not(feature = "serde_macros"))]
mod inner { mod inner {
extern crate syntex;
extern crate serde_codegen; extern crate serde_codegen;
use std::env; use std::env;
@ -28,10 +27,7 @@ mod inner {
let src = Path::new("src/v1/types/mod.rs.in"); let src = Path::new("src/v1/types/mod.rs.in");
let dst = Path::new(&out_dir).join("mod.rs"); let dst = Path::new(&out_dir).join("mod.rs");
let mut registry = syntex::Registry::new(); serde_codegen::expand(&src, &dst).unwrap();
serde_codegen::register(&mut registry);
registry.expand("", &src, &dst).unwrap();
} }
} }

View File

@ -30,11 +30,13 @@ extern crate jsonrpc_http_server;
extern crate ethcore_util as util; extern crate ethcore_util as util;
extern crate ethcore_io as io; extern crate ethcore_io as io;
extern crate ethcore; extern crate ethcore;
extern crate ethkey;
extern crate ethstore; extern crate ethstore;
extern crate ethsync; extern crate ethsync;
extern crate transient_hashmap; extern crate transient_hashmap;
extern crate json_ipc_server as ipc; extern crate json_ipc_server as ipc;
extern crate ethcore_ipc; extern crate ethcore_ipc;
extern crate time;
#[cfg(test)] #[cfg(test)]
extern crate ethjson; extern crate ethjson;

View File

@ -30,6 +30,7 @@ mod codes {
pub const UNSUPPORTED_REQUEST: i64 = -32000; pub const UNSUPPORTED_REQUEST: i64 = -32000;
pub const NO_WORK: i64 = -32001; pub const NO_WORK: i64 = -32001;
pub const NO_AUTHOR: i64 = -32002; pub const NO_AUTHOR: i64 = -32002;
pub const NO_NEW_WORK: i64 = -32003;
pub const UNKNOWN_ERROR: i64 = -32009; pub const UNKNOWN_ERROR: i64 = -32009;
pub const TRANSACTION_ERROR: i64 = -32010; pub const TRANSACTION_ERROR: i64 = -32010;
pub const ACCOUNT_LOCKED: i64 = -32020; pub const ACCOUNT_LOCKED: i64 = -32020;
@ -114,6 +115,14 @@ pub fn no_work() -> Error {
} }
} }
pub fn no_new_work() -> Error {
Error {
code: ErrorCode::ServerError(codes::NO_NEW_WORK),
message: "Work has not changed.".into(),
data: None
}
}
pub fn no_author() -> Error { pub fn no_author() -> Error {
Error { Error {
code: ErrorCode::ServerError(codes::NO_AUTHOR), code: ErrorCode::ServerError(codes::NO_AUTHOR),

View File

@ -23,6 +23,7 @@ use std::process::{Command, Stdio};
use std::thread; use std::thread;
use std::time::{Instant, Duration}; use std::time::{Instant, Duration};
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use time::get_time;
use ethsync::{SyncProvider, SyncState}; use ethsync::{SyncProvider, SyncState};
use ethcore::miner::{MinerService, ExternalMinerService}; use ethcore::miner::{MinerService, ExternalMinerService};
use jsonrpc_core::*; use jsonrpc_core::*;
@ -516,7 +517,7 @@ impl<C, S: ?Sized, M, EM> Eth for EthClient<C, S, M, EM> where
fn work(&self, params: Params) -> Result<Value, Error> { fn work(&self, params: Params) -> Result<Value, Error> {
try!(self.active()); try!(self.active());
try!(expect_no_params(params)); let (no_new_work_timeout,) = from_params::<(u64,)>(params).unwrap_or((0,));
let client = take_weak!(self.client); let client = take_weak!(self.client);
// check if we're still syncing and return empty strings in that case // check if we're still syncing and return empty strings in that case
@ -545,7 +546,9 @@ impl<C, S: ?Sized, M, EM> Eth for EthClient<C, S, M, EM> where
let target = Ethash::difficulty_to_boundary(b.block().header().difficulty()); let target = Ethash::difficulty_to_boundary(b.block().header().difficulty());
let seed_hash = self.seed_compute.lock().get_seedhash(b.block().header().number()); let seed_hash = self.seed_compute.lock().get_seedhash(b.block().header().number());
if self.options.send_block_number_in_get_work { if no_new_work_timeout > 0 && b.block().header().timestamp() + no_new_work_timeout < get_time().sec as u64 {
Err(errors::no_new_work())
} else if self.options.send_block_number_in_get_work {
let block_number = RpcU256::from(b.block().header().number()); let block_number = RpcU256::from(b.block().header().number());
to_value(&(RpcH256::from(pow_hash), RpcH256::from(seed_hash), RpcH256::from(target), block_number)) to_value(&(RpcH256::from(pow_hash), RpcH256::from(seed_hash), RpcH256::from(target), block_number))
} else { } else {

View File

@ -18,9 +18,10 @@
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use std::str::FromStr; use std::str::FromStr;
use std::collections::{BTreeMap}; use std::collections::{BTreeMap};
use util::{RotatingLogger, KeyPair, Address}; use util::{RotatingLogger, Address};
use util::misc::version_data; use util::misc::version_data;
use ethkey::{Brain, Generator};
use ethstore::random_phrase; use ethstore::random_phrase;
use ethsync::{SyncProvider, ManageNetwork}; use ethsync::{SyncProvider, ManageNetwork};
use ethcore::miner::MinerService; use ethcore::miner::MinerService;
@ -213,7 +214,7 @@ impl<C, M, S: ?Sized> Ethcore for EthcoreClient<C, M, S> where M: MinerService +
fn phrase_to_address(&self, params: Params) -> Result<Value, Error> { fn phrase_to_address(&self, params: Params) -> Result<Value, Error> {
try!(self.active()); try!(self.active());
from_params::<(String,)>(params).and_then(|(phrase,)| from_params::<(String,)>(params).and_then(|(phrase,)|
to_value(&H160::from(KeyPair::from_phrase(&phrase).address())) to_value(&H160::from(Brain::new(phrase).generate().unwrap().address()))
) )
} }
} }

View File

@ -17,14 +17,15 @@
//! Account management (personal) rpc implementation //! Account management (personal) rpc implementation
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use std::collections::{BTreeMap}; use std::collections::{BTreeMap};
use util::{Address};
use jsonrpc_core::*; use jsonrpc_core::*;
use ethkey::{Brain, Generator};
use v1::traits::Personal; use v1::traits::Personal;
use v1::types::{H160 as RpcH160, TransactionRequest}; use v1::types::{H160 as RpcH160, TransactionRequest};
use v1::helpers::{errors, TransactionRequest as TRequest}; use v1::helpers::{errors, TransactionRequest as TRequest};
use v1::helpers::params::expect_no_params; use v1::helpers::params::expect_no_params;
use v1::helpers::dispatch::unlock_sign_and_dispatch; use v1::helpers::dispatch::unlock_sign_and_dispatch;
use ethcore::account_provider::AccountProvider; use ethcore::account_provider::AccountProvider;
use util::{Address, KeyPair};
use ethcore::client::MiningBlockChainClient; use ethcore::client::MiningBlockChainClient;
use ethcore::miner::MinerService; use ethcore::miner::MinerService;
@ -94,7 +95,7 @@ impl<C: 'static, M: 'static> Personal for PersonalClient<C, M> where C: MiningBl
from_params::<(String, String, )>(params).and_then( from_params::<(String, String, )>(params).and_then(
|(phrase, pass, )| { |(phrase, pass, )| {
let store = take_weak!(self.accounts); let store = take_weak!(self.accounts);
match store.insert_account(*KeyPair::from_phrase(&phrase).secret(), &pass) { match store.insert_account(*Brain::new(phrase).generate().unwrap().secret(), &pass) {
Ok(address) => to_value(&RpcH160::from(address)), Ok(address) => to_value(&RpcH160::from(address)),
Err(e) => Err(errors::account("Could not create account.", e)), Err(e) => Err(errors::account("Could not create account.", e)),
} }

View File

@ -16,7 +16,6 @@
//! rpc integration tests. //! rpc integration tests.
use std::sync::Arc; use std::sync::Arc;
use std::str::FromStr;
use std::time::Duration; use std::time::Duration;
use ethcore::client::{BlockChainClient, Client, ClientConfig}; use ethcore::client::{BlockChainClient, Client, ClientConfig};
@ -286,9 +285,7 @@ const POSITIVE_NONCE_SPEC: &'static [u8] = br#"{
#[test] #[test]
fn eth_transaction_count() { fn eth_transaction_count() {
use util::crypto::Secret; let secret = "8a283037bb19c4fed7b1c569e40c7dcff366165eb869110a1b11532963eb9cb2".into();
let secret = Secret::from_str("8a283037bb19c4fed7b1c569e40c7dcff366165eb869110a1b11532963eb9cb2").unwrap();
let tester = EthTester::from_spec(Spec::load(TRANSACTION_COUNT_SPEC)); let tester = EthTester::from_spec(Spec::load(TRANSACTION_COUNT_SPEC));
let address = tester.accounts.insert_account(secret, "").unwrap(); let address = tester.accounts.insert_account(secret, "").unwrap();
tester.accounts.unlock_account_permanently(address, "".into()).unwrap(); tester.accounts.unlock_account_permanently(address, "".into()).unwrap();

View File

@ -30,6 +30,7 @@ use ethsync::SyncState;
use v1::{Eth, EthClient, EthClientOptions, EthSigning, EthSigningUnsafeClient}; use v1::{Eth, EthClient, EthClientOptions, EthSigning, EthSigningUnsafeClient};
use v1::tests::helpers::{TestSyncProvider, Config, TestMinerService}; use v1::tests::helpers::{TestSyncProvider, Config, TestMinerService};
use rustc_serialize::hex::ToHex; use rustc_serialize::hex::ToHex;
use time::get_time;
fn blockchain_client() -> Arc<TestBlockChainClient> { fn blockchain_client() -> Arc<TestBlockChainClient> {
let client = TestBlockChainClient::new(); let client = TestBlockChainClient::new();
@ -818,7 +819,7 @@ fn rpc_eth_compile_serpent() {
} }
#[test] #[test]
fn returns_no_work_if_cant_mine() { fn rpc_get_work_returns_no_work_if_cant_mine() {
let eth_tester = EthTester::default(); let eth_tester = EthTester::default();
eth_tester.client.set_queue_size(10); eth_tester.client.set_queue_size(10);
@ -829,7 +830,7 @@ fn returns_no_work_if_cant_mine() {
} }
#[test] #[test]
fn returns_correct_work_package() { fn rpc_get_work_returns_correct_work_package() {
let eth_tester = EthTester::default(); let eth_tester = EthTester::default();
eth_tester.miner.set_author(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap()); eth_tester.miner.set_author(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap());
@ -840,7 +841,7 @@ fn returns_correct_work_package() {
} }
#[test] #[test]
fn should_not_return_block_number() { fn rpc_get_work_should_not_return_block_number() {
let eth_tester = EthTester::new_with_options(EthClientOptions { let eth_tester = EthTester::new_with_options(EthClientOptions {
allow_pending_receipt_query: true, allow_pending_receipt_query: true,
send_block_number_in_get_work: false, send_block_number_in_get_work: false,
@ -852,3 +853,36 @@ fn should_not_return_block_number() {
assert_eq!(eth_tester.io.handle_request(request), Some(response.to_owned())); assert_eq!(eth_tester.io.handle_request(request), Some(response.to_owned()));
} }
#[test]
fn rpc_get_work_should_timeout() {
let eth_tester = EthTester::default();
eth_tester.miner.set_author(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap());
eth_tester.client.set_latest_block_timestamp(get_time().sec as u64 - 1000); // Set latest block to 1000 seconds ago
let hash = eth_tester.miner.map_sealing_work(&*eth_tester.client, |b| b.hash()).unwrap();
// Request without providing timeout. This should work since we're disabling timeout.
let request = r#"{"jsonrpc": "2.0", "method": "eth_getWork", "params": [], "id": 1}"#;
let work_response = format!(
r#"{{"jsonrpc":"2.0","result":["0x{:?}","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000800000000000000000000000000000000000000000000000000000000000","0x01"],"id":1}}"#,
hash,
);
assert_eq!(eth_tester.io.handle_request(request), Some(work_response.to_owned()));
// Request with timeout of 0 seconds. This should work since we're disabling timeout.
let request = r#"{"jsonrpc": "2.0", "method": "eth_getWork", "params": ["0"], "id": 1}"#;
let work_response = format!(
r#"{{"jsonrpc":"2.0","result":["0x{:?}","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000800000000000000000000000000000000000000000000000000000000000","0x01"],"id":1}}"#,
hash,
);
assert_eq!(eth_tester.io.handle_request(request), Some(work_response.to_owned()));
// Request with timeout of 10K seconds. This should work.
let request = r#"{"jsonrpc": "2.0", "method": "eth_getWork", "params": ["10000"], "id": 1}"#;
assert_eq!(eth_tester.io.handle_request(request), Some(work_response.to_owned()));
// Request with timeout of 10 seconds. This should fail.
let request = r#"{"jsonrpc": "2.0", "method": "eth_getWork", "params": ["10"], "id": 1}"#;
let err_response = r#"{"jsonrpc":"2.0","error":{"code":-32003,"message":"Work has not changed.","data":null},"id":1}"#;
assert_eq!(eth_tester.io.handle_request(request), Some(err_response.to_owned()));
}

View File

@ -99,6 +99,7 @@ pub struct VMOperation {
/// Information concerning the execution of the operation. /// Information concerning the execution of the operation.
pub ex: Option<VMExecutedOperation>, pub ex: Option<VMExecutedOperation>,
/// Subordinate trace of the CALL/CREATE if applicable. /// Subordinate trace of the CALL/CREATE if applicable.
#[serde(bound="VMTrace: Serialize")]
pub sub: Option<VMTrace>, pub sub: Option<VMTrace>,
} }

View File

@ -21,7 +21,7 @@ ethcore-io = { path = "../util/io" }
ethcore-rpc = { path = "../rpc" } ethcore-rpc = { path = "../rpc" }
parity-dapps-signer = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4", optional = true} parity-dapps-signer = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4", optional = true}
clippy = { version = "0.0.82", optional = true} clippy = { version = "0.0.85", optional = true}
[features] [features]
dev = ["clippy"] dev = ["clippy"]

View File

@ -4,6 +4,10 @@ name = "ethcore-stratum"
version = "1.4.0" version = "1.4.0"
license = "GPL-3.0" license = "GPL-3.0"
authors = ["Ethcore <admin@ethcore.io>"] authors = ["Ethcore <admin@ethcore.io>"]
build = "build.rs"
[build-dependencies]
ethcore-ipc-codegen = { path = "../ipc/codegen" }
[dependencies] [dependencies]
log = "0.3" log = "0.3"
@ -14,6 +18,9 @@ ethcore-util = { path = "../util" }
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
lazy_static = "0.2" lazy_static = "0.2"
env_logger = "0.3" env_logger = "0.3"
ethcore-ipc = { path = "../ipc/rpc" }
semver = "0.2"
ethcore-ipc-nano = { path = "../ipc/nano" }
[profile.release] [profile.release]
debug = true debug = true

View File

@ -14,4 +14,8 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
extern crate ethcore_ipc_codegen;
fn main() {
ethcore_ipc_codegen::derive_ipc("src/traits.rs").unwrap();
}

View File

@ -20,6 +20,8 @@ extern crate json_tcp_server;
extern crate jsonrpc_core; extern crate jsonrpc_core;
#[macro_use] extern crate log; #[macro_use] extern crate log;
extern crate ethcore_util as util; extern crate ethcore_util as util;
extern crate ethcore_ipc as ipc;
extern crate semver;
#[cfg(test)] #[cfg(test)]
extern crate mio; extern crate mio;
@ -31,9 +33,16 @@ extern crate env_logger;
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
mod traits; mod traits {
//! Stratum ipc interfaces specification
#![allow(dead_code, unused_assignments, unused_variables, missing_docs)] // codegen issues
include!(concat!(env!("OUT_DIR"), "/traits.rs"));
}
pub use traits::{JobDispatcher, PushWorkHandler, Error}; pub use traits::{
JobDispatcher, PushWorkHandler, Error, ServiceConfiguration,
RemoteWorkHandler, RemoteJobDispatcher,
};
use json_tcp_server::Server as JsonRpcServer; use json_tcp_server::Server as JsonRpcServer;
use jsonrpc_core::{IoHandler, Params, IoDelegate, to_value, from_params}; use jsonrpc_core::{IoHandler, Params, IoDelegate, to_value, from_params};
@ -133,8 +142,8 @@ impl Stratum {
let mut job_que = self.job_que.write(); let mut job_que = self.job_que.write();
let workers = self.workers.read(); let workers = self.workers.read();
for socket_addr in job_que.drain() { for socket_addr in job_que.drain() {
if let Some(ref worker_id) = workers.get(&socket_addr) { if let Some(worker_id) = workers.get(&socket_addr) {
let job_payload = self.dispatcher.job(worker_id); let job_payload = self.dispatcher.job(worker_id.to_owned());
job_payload.map( job_payload.map(
|json| self.rpc_server.push_message(&socket_addr, json.as_bytes()) |json| self.rpc_server.push_message(&socket_addr, json.as_bytes())
); );

Some files were not shown because too many files have changed in this diff Show More