Merge branch 'master' into auth-round-no-mocknet

This commit is contained in:
keorn 2016-10-28 17:53:23 +01:00
commit 7976f46231
105 changed files with 1844 additions and 671 deletions

View File

@ -136,10 +136,15 @@ linux-armv7:
stage: build stage: build
image: ethcore/rust-armv7:latest image: ethcore/rust-armv7:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
script: script:
- export CC=arm-linux-gnueabihf-gcc
- export CXX=arm-linux-gnueabihf-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config - echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config
@ -171,10 +176,15 @@ linux-arm:
stage: build stage: build
image: ethcore/rust-arm:latest image: ethcore/rust-arm:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
script: script:
- export CC=arm-linux-gnueabihf-gcc
- export CXX=arm-linux-gnueabihf-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config - echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config
@ -210,6 +220,10 @@ linux-armv6:
- tags - tags
- stable - stable
script: script:
- export CC=arm-linux-gnueabi-gcc
- export CXX=arm-linux-gnueabi-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config - echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config
@ -234,10 +248,15 @@ linux-aarch64:
stage: build stage: build
image: ethcore/rust-aarch64:latest image: ethcore/rust-aarch64:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
script: script:
- export CC=aarch64-linux-gnu-gcc
- export CXX=aarch64-linux-gnu-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config - echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config
@ -274,6 +293,7 @@ darwin:
- stable - stable
script: script:
- cargo build --release --verbose - cargo build --release --verbose
- rm -rf parity.md5
- md5sum target/release/parity >> parity.md5 - md5sum target/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
@ -344,6 +364,28 @@ test-linux:
- rust-test - rust-test
dependencies: dependencies:
- linux-stable - linux-stable
test-darwin:
stage: test
before_script:
- git submodule update --init --recursive
script:
- export RUST_BACKTRACE=1
- ./test.sh --verbose
tags:
- osx
dependencies:
- darwin
test-windows:
stage: test
before_script:
- git submodule update --init --recursive
script:
- export RUST_BACKTRACE=1
- ./test.sh --verbose
tags:
- rust-windows
dependencies:
- windows
js-release: js-release:
stage: build stage: build
image: ethcore/javascript:latest image: ethcore/javascript:latest

36
Cargo.lock generated
View File

@ -3,7 +3,7 @@ name = "parity"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)", "ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
@ -145,15 +145,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "clippy" name = "clippy"
version = "0.0.90" version = "0.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"clippy_lints 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "clippy_lints" name = "clippy_lints"
version = "0.0.90" version = "0.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -279,7 +279,7 @@ dependencies = [
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0", "ethash 1.4.0",
@ -298,7 +298,7 @@ dependencies = [
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)", "lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -307,6 +307,7 @@ dependencies = [
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"transient-hashmap 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -330,7 +331,7 @@ dependencies = [
name = "ethcore-dapps" name = "ethcore-dapps"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
@ -473,7 +474,7 @@ dependencies = [
name = "ethcore-rpc" name = "ethcore-rpc"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0", "ethash 1.4.0",
"ethcore 1.4.0", "ethcore 1.4.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
@ -503,7 +504,7 @@ dependencies = [
name = "ethcore-signer" name = "ethcore-signer"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.4.0",
@ -542,7 +543,7 @@ version = "1.4.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)", "elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
@ -554,6 +555,7 @@ dependencies = [
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)",
@ -631,7 +633,7 @@ dependencies = [
name = "ethsync" name = "ethsync"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.4.0", "ethcore 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.4.0",
@ -906,8 +908,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "lru-cache" name = "lru-cache"
version = "0.0.7" version = "0.1.0"
source = "git+https://github.com/contain-rs/lru-cache#13255e33c45ceb69a4b143f235a4322df5fb580e" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1211,7 +1213,7 @@ dependencies = [
[[package]] [[package]]
name = "parity-ui-precompiled" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#9f8baa9d0e54056c41a842b351597d0565beda98" source = "git+https://github.com/ethcore/js-precompiled.git#ba726039185238d6fd604f092b089a7d52c0f436"
dependencies = [ dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1948,8 +1950,8 @@ dependencies = [
"checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27" "checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27"
"checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>" "checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>"
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" "checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
"checksum clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "d19bda68c3db98e3a780342f6101b44312fef20a5f13ce756d1202a35922b01b" "checksum clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "6eacf01b0aad84a0817703498f72d252df7c0faf6a5b86d0be4265f1829e459f"
"checksum clippy_lints 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "3d4ed67c69b9bb35169be2538691d290a3aa0cbfd4b9f0bfb7c221fc1d399a96" "checksum clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "a49960c9aab544ce86b004dcb61620e8b898fea5fc0f697a028f460f48221ed6"
"checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245" "checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245"
"checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc" "checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc"
"checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>" "checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>"
@ -1986,7 +1988,7 @@ dependencies = [
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48" "checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd" "checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" "checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
"checksum lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)" = "<none>" "checksum lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "656fa4dfcb02bcf1063c592ba3ff6a5303ee1f2afe98c8a889e8b1a77c6dfdb7"
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e" "checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" "checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2" "checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2"

View File

@ -46,7 +46,7 @@ ethcore-logger = { path = "logger" }
rlp = { path = "util/rlp" } rlp = { path = "util/rlp" }
ethcore-stratum = { path = "stratum" } ethcore-stratum = { path = "stratum" }
ethcore-dapps = { path = "dapps", optional = true } ethcore-dapps = { path = "dapps", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi = "0.2" winapi = "0.2"

View File

@ -33,7 +33,7 @@ fetch = { path = "../util/fetch" }
parity-ui = { path = "./ui" } parity-ui = { path = "./ui" }
mime_guess = { version = "1.6.1" } mime_guess = { version = "1.6.1" }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.8", optional = true } serde_codegen = { version = "0.8", optional = true }

View File

@ -11,7 +11,7 @@ build = "build.rs"
ethcore-ipc-codegen = { path = "../ipc/codegen" } ethcore-ipc-codegen = { path = "../ipc/codegen" }
[dependencies] [dependencies]
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
ethcore-ipc = { path = "../ipc/rpc" } ethcore-ipc = { path = "../ipc/rpc" }
rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" } rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }

View File

@ -69,6 +69,10 @@ impl EthashManager {
Some(ref e) if *e == epoch => lights.recent.clone(), Some(ref e) if *e == epoch => lights.recent.clone(),
_ => match lights.prev_epoch.clone() { _ => match lights.prev_epoch.clone() {
Some(e) if e == epoch => { Some(e) if e == epoch => {
// don't swap if recent is newer.
if lights.recent_epoch > lights.prev_epoch {
None
} else {
// swap // swap
let t = lights.prev_epoch; let t = lights.prev_epoch;
lights.prev_epoch = lights.recent_epoch; lights.prev_epoch = lights.recent_epoch;
@ -78,6 +82,7 @@ impl EthashManager {
lights.recent = t; lights.recent = t;
lights.recent.clone() lights.recent.clone()
} }
}
_ => None, _ => None,
}, },
}; };

View File

@ -24,8 +24,11 @@ rayon = "0.4.2"
semver = "0.2" semver = "0.2"
bit-set = "0.4" bit-set = "0.4"
time = "0.1" time = "0.1"
rand = "0.3"
byteorder = "0.5"
transient-hashmap = "0.1"
evmjit = { path = "../evmjit", optional = true } evmjit = { path = "../evmjit", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
ethash = { path = "../ethash" } ethash = { path = "../ethash" }
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
ethcore-io = { path = "../util/io" } ethcore-io = { path = "../util/io" }
@ -36,10 +39,8 @@ ethstore = { path = "../ethstore" }
ethkey = { path = "../ethkey" } ethkey = { path = "../ethkey" }
ethcore-ipc-nano = { path = "../ipc/nano" } ethcore-ipc-nano = { path = "../ipc/nano" }
rlp = { path = "../util/rlp" } rlp = { path = "../util/rlp" }
rand = "0.3" lru-cache = "0.1.0"
lru-cache = { git = "https://github.com/contain-rs/lru-cache" }
ethcore-bloom-journal = { path = "../util/bloom" } ethcore-bloom-journal = { path = "../util/bloom" }
byteorder = "0.5"
[dependencies.hyper] [dependencies.hyper]
git = "https://github.com/ethcore/hyper" git = "https://github.com/ethcore/hyper"

View File

@ -39,10 +39,18 @@
"stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544" "stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
}, },
"nodes": [ "nodes": [
"enode://08c7ee6a4f861ff0664a49532bcc86de1363acd608999d1b76609bb9bc278649906f069057630fd9493924a368b5d1dc9b8f8bf13ac26df72512f6d1fabd8c95@45.32.7.81:30303",
"enode://e809c4a2fec7daed400e5e28564e23693b23b2cc5a019b612505631bbe7b9ccf709c1796d2a3d29ef2b045f210caf51e3c4f5b6d3587d43ad5d6397526fa6179@174.112.32.157:30303", "enode://e809c4a2fec7daed400e5e28564e23693b23b2cc5a019b612505631bbe7b9ccf709c1796d2a3d29ef2b045f210caf51e3c4f5b6d3587d43ad5d6397526fa6179@174.112.32.157:30303",
"enode://687be94c3a7beaa3d2fde82fa5046cdeb3e8198354e05b29d6e0d4e276713e3707ac10f784a7904938b06b46c764875c241b0337dd853385a4d8bfcbf8190647@95.183.51.229:30303", "enode://687be94c3a7beaa3d2fde82fa5046cdeb3e8198354e05b29d6e0d4e276713e3707ac10f784a7904938b06b46c764875c241b0337dd853385a4d8bfcbf8190647@95.183.51.229:30303",
"enode://6e538e7c1280f0a31ff08b382db5302480f775480b8e68f8febca0ceff81e4b19153c6f8bf60313b93bef2cc34d34e1df41317de0ce613a201d1660a788a03e2@52.206.67.235:30303", "enode://6e538e7c1280f0a31ff08b382db5302480f775480b8e68f8febca0ceff81e4b19153c6f8bf60313b93bef2cc34d34e1df41317de0ce613a201d1660a788a03e2@52.206.67.235:30303",
"enode://217ebe27e89bf4fec8ce06509323ff095b1014378deb75ab2e5f6759a4e8750a3bd8254b8c6833136e4d5e58230d65ee8ab34a5db5abf0640408c4288af3c8a7@188.138.1.237:30303" "enode://ca5ae4eca09ba6787e29cf6d86f7634d07aae6b9e6317a59aff675851c0bf445068173208cf8ef7f5cd783d8e29b85b2fa3fa358124cf0546823149724f9bde1@138.68.1.16:30303",
"enode://217ebe27e89bf4fec8ce06509323ff095b1014378deb75ab2e5f6759a4e8750a3bd8254b8c6833136e4d5e58230d65ee8ab34a5db5abf0640408c4288af3c8a7@188.138.1.237:30303",
"enode://fa20444ef991596ce99b81652ac4e61de1eddc4ff21d3cd42762abd7ed47e7cf044d3c9ccddaf6035d39725e4eb372806787829ccb9a08ec7cb71883cb8c3abd@50.149.116.182:30303",
"enode://4bd6a4df3612c718333eb5ea7f817923a8cdf1bed89cee70d1710b45a0b6b77b2819846440555e451a9b602ad2efa2d2facd4620650249d8468008946887820a@71.178.232.20:30304",
"enode://921cf8e4c345fe8db913c53964f9cadc667644e7f20195a0b7d877bd689a5934e146ff2c2259f1bae6817b6585153a007ceb67d260b720fa3e6fc4350df25c7f@51.255.49.170:30303",
"enode://ffea3b01c000cdd89e1e9229fea3e80e95b646f9b2aa55071fc865e2f19543c9b06045cc2e69453e6b78100a119e66be1b5ad50b36f2ffd27293caa28efdd1b2@128.199.93.177:3030",
"enode://ee3da491ce6a155eb132708eb0e8d04b0637926ec0ae1b79e63fc97cb9fc3818f49250a0ae0d7f79ed62b66ec677f408c4e01741504dc7a051e274f1e803d454@91.121.65.179:40404",
"enode://48e063a6cf5f335b1ef2ed98126bf522cf254396f850c7d442fe2edbbc23398787e14cd4de7968a00175a82762de9cbe9e1407d8ccbcaeca5004d65f8398d759@159.203.255.59:30303"
], ],
"accounts": { "accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } }, "0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },

View File

@ -158,9 +158,17 @@
"stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544" "stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
}, },
"nodes": [ "nodes": [
"enode://cd6611461840543d5b9c56fbf088736154c699c43973b3a1a32390cf27106f87e58a818a606ccb05f3866de95a4fe860786fea71bf891ea95f234480d3022aa3@136.243.154.245:30303", "enode://efe4f2493f4aff2d641b1db8366b96ddacfe13e7a6e9c8f8f8cf49f9cdba0fdf3258d8c8f8d0c5db529f8123c8f1d95f36d54d590ca1bb366a5818b9a4ba521c@163.172.187.252:30303",
"enode://cd6611461840543d5b9c56fbf088736154c699c43973b3a1a32390cf27106f87e58a818a606ccb05f3866de95a4fe860786fea71bf891ea95f234480d3022aa3@163.172.157.114:30303",
"enode://bcc7240543fe2cf86f5e9093d05753dd83343f8fda7bf0e833f65985c73afccf8f981301e13ef49c4804491eab043647374df1c4adf85766af88a624ecc3330e@136.243.154.244:30303", "enode://bcc7240543fe2cf86f5e9093d05753dd83343f8fda7bf0e833f65985c73afccf8f981301e13ef49c4804491eab043647374df1c4adf85766af88a624ecc3330e@136.243.154.244:30303",
"enode://ed4227681ca8c70beb2277b9e870353a9693f12e7c548c35df6bca6a956934d6f659999c2decb31f75ce217822eefca149ace914f1cbe461ed5a2ebaf9501455@88.212.206.70:30303", "enode://ed4227681ca8c70beb2277b9e870353a9693f12e7c548c35df6bca6a956934d6f659999c2decb31f75ce217822eefca149ace914f1cbe461ed5a2ebaf9501455@88.212.206.70:30303",
"enode://cadc6e573b6bc2a9128f2f635ac0db3353e360b56deef239e9be7e7fce039502e0ec670b595f6288c0d2116812516ad6b6ff8d5728ff45eba176989e40dead1e@37.128.191.230:30303",
"enode://595a9a06f8b9bc9835c8723b6a82105aea5d55c66b029b6d44f229d6d135ac3ecdd3e9309360a961ea39d7bee7bac5d03564077a4e08823acc723370aace65ec@46.20.235.22:30303",
"enode://029178d6d6f9f8026fc0bc17d5d1401aac76ec9d86633bba2320b5eed7b312980c0a210b74b20c4f9a8b0b2bf884b111fa9ea5c5f916bb9bbc0e0c8640a0f56c@216.158.85.185:30303",
"enode://84f5d5957b4880a8b0545e32e05472318898ad9fc8ebe1d56c90c12334a98e12351eccfdf3a2bf72432ac38b57e9d348400d17caa083879ade3822390f89773f@10.1.52.78:30303",
"enode://f90dc9b9bf7b8db97726b7849e175f1eb2707f3d8f281c929336e398dd89b0409fc6aeceb89e846278e9d3ecc3857cebfbe6758ff352ece6fe5d42921ee761db@10.1.173.87:30303",
"enode://6a868ced2dec399c53f730261173638a93a40214cf299ccf4d42a76e3fa54701db410669e8006347a4b3a74fa090bb35af0320e4bc8d04cf5b7f582b1db285f5@10.3.149.199:30303",
"enode://fdd1b9bb613cfbc200bba17ce199a9490edc752a833f88d4134bf52bb0d858aa5524cb3ec9366c7a4ef4637754b8b15b5dc913e4ed9fdb6022f7512d7b63f181@212.47.247.103:30303",
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303", "enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303", "enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",
"enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303", "enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303",

View File

@ -267,7 +267,7 @@ impl AccountProvider {
/// Returns `true` if the password for `account` is `password`. `false` if not. /// Returns `true` if the password for `account` is `password`. `false` if not.
pub fn test_password(&self, account: &Address, password: String) -> Result<bool, Error> { pub fn test_password(&self, account: &Address, password: String) -> Result<bool, Error> {
match self.sstore.sign(&account, &password, &Default::default()) { match self.sstore.sign(account, &password, &Default::default()) {
Ok(_) => Ok(true), Ok(_) => Ok(true),
Err(SSError::InvalidPassword) => Ok(false), Err(SSError::InvalidPassword) => Ok(false),
Err(e) => Err(Error::SStore(e)), Err(e) => Err(Error::SStore(e)),
@ -276,7 +276,7 @@ impl AccountProvider {
/// Changes the password of `account` from `password` to `new_password`. Fails if incorrect `password` given. /// Changes the password of `account` from `password` to `new_password`. Fails if incorrect `password` given.
pub fn change_password(&self, account: &Address, password: String, new_password: String) -> Result<(), Error> { pub fn change_password(&self, account: &Address, password: String, new_password: String) -> Result<(), Error> {
self.sstore.change_password(&account, &password, &new_password).map_err(Error::SStore) self.sstore.change_password(account, &password, &new_password).map_err(Error::SStore)
} }
/// Helper method used for unlocking accounts. /// Helper method used for unlocking accounts.

View File

@ -542,7 +542,7 @@ pub fn enact(
Ok(b.close_and_lock()) Ok(b.close_and_lock())
} }
#[inline(always)] #[inline]
#[cfg(not(feature = "slow-blocks"))] #[cfg(not(feature = "slow-blocks"))]
fn push_transactions(block: &mut OpenBlock, transactions: &[SignedTransaction]) -> Result<(), Error> { fn push_transactions(block: &mut OpenBlock, transactions: &[SignedTransaction]) -> Result<(), Error> {
for t in transactions { for t in transactions {

View File

@ -196,6 +196,7 @@ pub struct BlockChain {
pending_best_block: RwLock<Option<BestBlock>>, pending_best_block: RwLock<Option<BestBlock>>,
pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>, pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>,
pending_block_details: RwLock<HashMap<H256, BlockDetails>>,
pending_transaction_addresses: RwLock<HashMap<H256, Option<TransactionAddress>>>, pending_transaction_addresses: RwLock<HashMap<H256, Option<TransactionAddress>>>,
} }
@ -414,6 +415,7 @@ impl<'a> Iterator for AncestryIter<'a> {
} }
impl BlockChain { impl BlockChain {
#[cfg_attr(feature="dev", allow(useless_let_if_seq))]
/// Create new instance of blockchain from given Genesis /// Create new instance of blockchain from given Genesis
pub fn new(config: Config, genesis: &[u8], db: Arc<Database>) -> BlockChain { pub fn new(config: Config, genesis: &[u8], db: Arc<Database>) -> BlockChain {
// 400 is the avarage size of the key // 400 is the avarage size of the key
@ -438,6 +440,7 @@ impl BlockChain {
cache_man: Mutex::new(cache_man), cache_man: Mutex::new(cache_man),
pending_best_block: RwLock::new(None), pending_best_block: RwLock::new(None),
pending_block_hashes: RwLock::new(HashMap::new()), pending_block_hashes: RwLock::new(HashMap::new()),
pending_block_details: RwLock::new(HashMap::new()),
pending_transaction_addresses: RwLock::new(HashMap::new()), pending_transaction_addresses: RwLock::new(HashMap::new()),
}; };
@ -565,7 +568,7 @@ impl BlockChain {
let range = extras.number as bc::Number .. extras.number as bc::Number; let range = extras.number as bc::Number .. extras.number as bc::Number;
let chain = bc::group::BloomGroupChain::new(self.blooms_config, self); let chain = bc::group::BloomGroupChain::new(self.blooms_config, self);
let changes = chain.replace(&range, vec![]); let changes = chain.replace(&range, vec![]);
for (k, v) in changes.into_iter() { for (k, v) in changes {
batch.write(db::COL_EXTRA, &LogGroupPosition::from(k), &BloomGroup::from(v)); batch.write(db::COL_EXTRA, &LogGroupPosition::from(k), &BloomGroup::from(v));
} }
batch.put(db::COL_EXTRA, b"best", &hash); batch.put(db::COL_EXTRA, b"best", &hash);
@ -789,11 +792,10 @@ impl BlockChain {
/// the chain and the child's parent is this block. /// the chain and the child's parent is this block.
/// ///
/// Used in snapshots to glue the chunks together at the end. /// Used in snapshots to glue the chunks together at the end.
pub fn add_child(&self, block_hash: H256, child_hash: H256) { pub fn add_child(&self, batch: &mut DBTransaction, block_hash: H256, child_hash: H256) {
let mut parent_details = self.block_details(&block_hash) let mut parent_details = self.block_details(&block_hash)
.unwrap_or_else(|| panic!("Invalid block hash: {:?}", block_hash)); .unwrap_or_else(|| panic!("Invalid block hash: {:?}", block_hash));
let mut batch = self.db.transaction();
parent_details.children.push(child_hash); parent_details.children.push(child_hash);
let mut update = HashMap::new(); let mut update = HashMap::new();
@ -804,8 +806,6 @@ impl BlockChain {
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update, CacheUpdatePolicy::Overwrite); batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update, CacheUpdatePolicy::Overwrite);
self.cache_man.lock().note_used(CacheID::BlockDetails(block_hash)); self.cache_man.lock().note_used(CacheID::BlockDetails(block_hash));
self.db.write(batch).unwrap();
} }
#[cfg_attr(feature="dev", allow(similar_names))] #[cfg_attr(feature="dev", allow(similar_names))]
@ -894,17 +894,6 @@ impl BlockChain {
/// Prepares extras update. /// Prepares extras update.
fn prepare_update(&self, batch: &mut DBTransaction, update: ExtrasUpdate, is_best: bool) { fn prepare_update(&self, batch: &mut DBTransaction, update: ExtrasUpdate, is_best: bool) {
{
let block_hashes: Vec<_> = update.block_details.keys().cloned().collect();
let mut write_details = self.block_details.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update.block_details, CacheUpdatePolicy::Overwrite);
let mut cache_man = self.cache_man.lock();
for hash in block_hashes {
cache_man.note_used(CacheID::BlockDetails(hash));
}
}
{ {
let mut write_receipts = self.block_receipts.write(); let mut write_receipts = self.block_receipts.write();
@ -916,7 +905,7 @@ impl BlockChain {
batch.extend_with_cache(db::COL_EXTRA, &mut *write_blocks_blooms, update.blocks_blooms, CacheUpdatePolicy::Remove); batch.extend_with_cache(db::COL_EXTRA, &mut *write_blocks_blooms, update.blocks_blooms, CacheUpdatePolicy::Remove);
} }
// These cached values must be updated last with all three locks taken to avoid // These cached values must be updated last with all four locks taken to avoid
// cache decoherence // cache decoherence
{ {
let mut best_block = self.pending_best_block.write(); let mut best_block = self.pending_best_block.write();
@ -934,8 +923,10 @@ impl BlockChain {
}, },
} }
let mut write_hashes = self.pending_block_hashes.write(); let mut write_hashes = self.pending_block_hashes.write();
let mut write_details = self.pending_block_details.write();
let mut write_txs = self.pending_transaction_addresses.write(); let mut write_txs = self.pending_transaction_addresses.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update.block_details, CacheUpdatePolicy::Overwrite);
batch.extend_with_cache(db::COL_EXTRA, &mut *write_hashes, update.block_hashes, CacheUpdatePolicy::Overwrite); batch.extend_with_cache(db::COL_EXTRA, &mut *write_hashes, update.block_hashes, CacheUpdatePolicy::Overwrite);
batch.extend_with_option_cache(db::COL_EXTRA, &mut *write_txs, update.transactions_addresses, CacheUpdatePolicy::Overwrite); batch.extend_with_option_cache(db::COL_EXTRA, &mut *write_txs, update.transactions_addresses, CacheUpdatePolicy::Overwrite);
} }
@ -945,9 +936,11 @@ impl BlockChain {
pub fn commit(&self) { pub fn commit(&self) {
let mut pending_best_block = self.pending_best_block.write(); let mut pending_best_block = self.pending_best_block.write();
let mut pending_write_hashes = self.pending_block_hashes.write(); let mut pending_write_hashes = self.pending_block_hashes.write();
let mut pending_block_details = self.pending_block_details.write();
let mut pending_write_txs = self.pending_transaction_addresses.write(); let mut pending_write_txs = self.pending_transaction_addresses.write();
let mut best_block = self.best_block.write(); let mut best_block = self.best_block.write();
let mut write_block_details = self.block_details.write();
let mut write_hashes = self.block_hashes.write(); let mut write_hashes = self.block_hashes.write();
let mut write_txs = self.transaction_addresses.write(); let mut write_txs = self.transaction_addresses.write();
// update best block // update best block
@ -960,9 +953,11 @@ impl BlockChain {
let pending_hashes_keys: Vec<_> = pending_write_hashes.keys().cloned().collect(); let pending_hashes_keys: Vec<_> = pending_write_hashes.keys().cloned().collect();
let enacted_txs_keys: Vec<_> = enacted_txs.keys().cloned().collect(); let enacted_txs_keys: Vec<_> = enacted_txs.keys().cloned().collect();
let pending_block_hashes: Vec<_> = pending_block_details.keys().cloned().collect();
write_hashes.extend(mem::replace(&mut *pending_write_hashes, HashMap::new())); write_hashes.extend(mem::replace(&mut *pending_write_hashes, HashMap::new()));
write_txs.extend(enacted_txs.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed")))); write_txs.extend(enacted_txs.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed"))));
write_block_details.extend(mem::replace(&mut *pending_block_details, HashMap::new()));
for hash in retracted_txs.keys() { for hash in retracted_txs.keys() {
write_txs.remove(hash); write_txs.remove(hash);
@ -976,6 +971,10 @@ impl BlockChain {
for hash in enacted_txs_keys { for hash in enacted_txs_keys {
cache_man.note_used(CacheID::TransactionAddresses(hash)); cache_man.note_used(CacheID::TransactionAddresses(hash));
} }
for hash in pending_block_hashes {
cache_man.note_used(CacheID::BlockDetails(hash));
}
} }
/// Iterator that lists `first` and then all of `first`'s ancestors, by hash. /// Iterator that lists `first` and then all of `first`'s ancestors, by hash.
@ -1296,6 +1295,11 @@ impl BlockChain {
ancient_block_number: best_ancient_block.as_ref().map(|b| b.number), ancient_block_number: best_ancient_block.as_ref().map(|b| b.number),
} }
} }
#[cfg(test)]
pub fn db(&self) -> &Arc<Database> {
&self.db
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -66,7 +66,7 @@ impl<T> CacheManager<T> where T: Eq + Hash {
} }
fn rotate_cache_if_needed(&mut self) { fn rotate_cache_if_needed(&mut self) {
if self.cache_usage.len() == 0 { return } if self.cache_usage.is_empty() { return }
if self.cache_usage[0].len() * self.bytes_per_cache_entry > self.pref_cache_size / COLLECTION_QUEUE_SIZE { if self.cache_usage[0].len() * self.bytes_per_cache_entry > self.pref_cache_size / COLLECTION_QUEUE_SIZE {
if let Some(cache) = self.cache_usage.pop_back() { if let Some(cache) = self.cache_usage.pop_back() {

View File

@ -60,12 +60,13 @@ use receipt::LocalizedReceipt;
use trace::{TraceDB, ImportRequest as TraceImportRequest, LocalizedTrace, Database as TraceDatabase}; use trace::{TraceDB, ImportRequest as TraceImportRequest, LocalizedTrace, Database as TraceDatabase};
use trace; use trace;
use trace::FlatTransactionTraces; use trace::FlatTransactionTraces;
use evm::Factory as EvmFactory; use evm::{Factory as EvmFactory, Schedule};
use miner::{Miner, MinerService}; use miner::{Miner, MinerService};
use snapshot::{self, io as snapshot_io}; use snapshot::{self, io as snapshot_io};
use factory::Factories; use factory::Factories;
use rlp::{View, UntrustedRlp}; use rlp::{View, UntrustedRlp};
use state_db::StateDB; use state_db::StateDB;
use rand::OsRng;
// re-export // re-export
pub use types::blockchain_info::BlockChainInfo; pub use types::blockchain_info::BlockChainInfo;
@ -144,6 +145,7 @@ pub struct Client {
last_hashes: RwLock<VecDeque<H256>>, last_hashes: RwLock<VecDeque<H256>>,
factories: Factories, factories: Factories,
history: u64, history: u64,
rng: Mutex<OsRng>,
} }
impl Client { impl Client {
@ -239,6 +241,7 @@ impl Client {
last_hashes: RwLock::new(VecDeque::new()), last_hashes: RwLock::new(VecDeque::new()),
factories: factories, factories: factories,
history: history, history: history,
rng: Mutex::new(try!(OsRng::new().map_err(::util::UtilError::StdIo))),
}; };
Ok(Arc::new(client)) Ok(Arc::new(client))
} }
@ -314,7 +317,7 @@ impl Client {
if let Some(parent) = chain_has_parent { if let Some(parent) = chain_has_parent {
// Enact Verified Block // Enact Verified Block
let last_hashes = self.build_last_hashes(header.parent_hash().clone()); let last_hashes = self.build_last_hashes(header.parent_hash().clone());
let db = self.state_db.lock().boxed_clone_canon(&header.parent_hash()); let db = self.state_db.lock().boxed_clone_canon(header.parent_hash());
let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone()); let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
let locked_block = try!(enact_result.map_err(|e| { let locked_block = try!(enact_result.map_err(|e| {
@ -434,14 +437,26 @@ impl Client {
/// Import a block with transaction receipts. /// Import a block with transaction receipts.
/// The block is guaranteed to be the next best blocks in the first block sequence. /// The block is guaranteed to be the next best blocks in the first block sequence.
/// Does no sealing or transaction validation. /// Does no sealing or transaction validation.
fn import_old_block(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> H256 { fn import_old_block(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> Result<H256, ::error::Error> {
let block = BlockView::new(&block_bytes); let block = BlockView::new(&block_bytes);
let hash = block.header().hash(); let header = block.header();
let hash = header.hash();
let _import_lock = self.import_lock.lock(); let _import_lock = self.import_lock.lock();
{ {
let _timer = PerfTimer::new("import_old_block"); let _timer = PerfTimer::new("import_old_block");
let mut rng = self.rng.lock();
let chain = self.chain.read(); let chain = self.chain.read();
// verify block.
try!(::snapshot::verify_old_block(
&mut *rng,
&header,
&*self.engine,
&*chain,
Some(&block_bytes),
false,
));
// Commit results // Commit results
let receipts = ::rlp::decode(&receipts_bytes); let receipts = ::rlp::decode(&receipts_bytes);
let mut batch = DBTransaction::new(&self.db.read()); let mut batch = DBTransaction::new(&self.db.read());
@ -451,7 +466,7 @@ impl Client {
chain.commit(); chain.commit();
} }
self.db.read().flush().expect("DB flush failed."); self.db.read().flush().expect("DB flush failed.");
hash Ok(hash)
} }
fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain { fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain {
@ -1042,7 +1057,7 @@ impl BlockChainClient for Client {
return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash()))); return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash())));
} }
} }
Ok(self.import_old_block(block_bytes, receipts_bytes)) self.import_old_block(block_bytes, receipts_bytes).map_err(Into::into)
} }
fn queue_info(&self) -> BlockQueueInfo { fn queue_info(&self) -> BlockQueueInfo {
@ -1145,6 +1160,23 @@ impl BlockChainClient for Client {
} }
impl MiningBlockChainClient for Client { impl MiningBlockChainClient for Client {
fn latest_schedule(&self) -> Schedule {
let header_data = self.best_block_header();
let view = HeaderView::new(&header_data);
let env_info = EnvInfo {
number: view.number(),
author: view.author(),
timestamp: view.timestamp(),
difficulty: view.difficulty(),
last_hashes: self.build_last_hashes(view.hash()),
gas_used: U256::default(),
gas_limit: view.gas_limit(),
};
self.engine.schedule(&env_info)
}
fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock { fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock {
let engine = &*self.engine; let engine = &*self.engine;
let chain = self.chain.read(); let chain = self.chain.read();
@ -1220,3 +1252,33 @@ impl MayPanic for Client {
self.panic_handler.on_panic(closure); self.panic_handler.on_panic(closure);
} }
} }
#[test]
fn should_not_cache_details_before_commit() {
use tests::helpers::*;
use std::thread;
use std::time::Duration;
use std::sync::atomic::{AtomicBool, Ordering};
let client = generate_dummy_client(0);
let genesis = client.chain_info().best_block_hash;
let (new_hash, new_block) = get_good_dummy_block_hash();
let go = {
// Separate thread uncommited transaction
let go = Arc::new(AtomicBool::new(false));
let go_thread = go.clone();
let another_client = client.reference().clone();
thread::spawn(move || {
let mut batch = DBTransaction::new(&*another_client.chain.read().db().clone());
another_client.chain.read().insert_block(&mut batch, &new_block, Vec::new());
go_thread.store(true, Ordering::SeqCst);
});
go
};
while !go.load(Ordering::SeqCst) { thread::park_timeout(Duration::from_millis(5)); }
assert!(client.tree_route(&genesis, &new_hash).is_none());
}

View File

@ -34,7 +34,7 @@ use log_entry::LocalizedLogEntry;
use receipt::{Receipt, LocalizedReceipt}; use receipt::{Receipt, LocalizedReceipt};
use blockchain::extras::BlockReceipts; use blockchain::extras::BlockReceipts;
use error::{ImportResult}; use error::{ImportResult};
use evm::{Factory as EvmFactory, VMType}; use evm::{Factory as EvmFactory, VMType, Schedule};
use miner::{Miner, MinerService, TransactionImportResult}; use miner::{Miner, MinerService, TransactionImportResult};
use spec::Spec; use spec::Spec;
@ -316,6 +316,10 @@ pub fn get_temp_state_db() -> GuardedTempResult<StateDB> {
} }
impl MiningBlockChainClient for TestBlockChainClient { impl MiningBlockChainClient for TestBlockChainClient {
fn latest_schedule(&self) -> Schedule {
Schedule::new_homestead_gas_fix()
}
fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock { fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock {
let engine = &*self.spec.engine; let engine = &*self.spec.engine;
let genesis_header = self.spec.genesis_header(); let genesis_header = self.spec.genesis_header();

View File

@ -27,7 +27,7 @@ use views::{BlockView};
use error::{ImportResult, CallError}; use error::{ImportResult, CallError};
use receipt::LocalizedReceipt; use receipt::LocalizedReceipt;
use trace::LocalizedTrace; use trace::LocalizedTrace;
use evm::Factory as EvmFactory; use evm::{Factory as EvmFactory, Schedule};
use types::ids::*; use types::ids::*;
use types::trace_filter::Filter as TraceFilter; use types::trace_filter::Filter as TraceFilter;
use executive::Executed; use executive::Executed;
@ -236,6 +236,9 @@ pub trait MiningBlockChainClient : BlockChainClient {
/// Import sealed block. Skips all verifications. /// Import sealed block. Skips all verifications.
fn import_sealed_block(&self, block: SealedBlock) -> ImportResult; fn import_sealed_block(&self, block: SealedBlock) -> ImportResult;
/// Returns latest schedule.
fn latest_schedule(&self) -> Schedule;
} }
impl IpcConfig for BlockChainClient { } impl IpcConfig for BlockChainClient { }

View File

@ -114,7 +114,7 @@ pub trait Writable {
R: Deref<Target = [u8]> { R: Deref<Target = [u8]> {
match policy { match policy {
CacheUpdatePolicy::Overwrite => { CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() { for (key, value) in values {
self.write(col, &key, &value); self.write(col, &key, &value);
cache.insert(key, value); cache.insert(key, value);
} }
@ -135,7 +135,7 @@ pub trait Writable {
R: Deref<Target = [u8]> { R: Deref<Target = [u8]> {
match policy { match policy {
CacheUpdatePolicy::Overwrite => { CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() { for (key, value) in values {
match value { match value {
Some(ref v) => self.write(col, &key, v), Some(ref v) => self.write(col, &key, v),
None => self.delete(col, &key), None => self.delete(col, &key),
@ -144,7 +144,7 @@ pub trait Writable {
} }
}, },
CacheUpdatePolicy::Remove => { CacheUpdatePolicy::Remove => {
for (key, value) in values.into_iter() { for (key, value) in values {
match value { match value {
Some(v) => self.write(col, &key, &v), Some(v) => self.write(col, &key, &v),
None => self.delete(col, &key), None => self.delete(col, &key),

View File

@ -47,6 +47,13 @@ pub enum TransactionError {
/// Transaction gas price /// Transaction gas price
got: U256, got: U256,
}, },
/// Transaction's gas is below currently set minimal gas requirement.
InsufficientGas {
/// Minimal expected gas
minimal: U256,
/// Transaction gas
got: U256,
},
/// Sender doesn't have enough funds to pay for this transaction /// Sender doesn't have enough funds to pay for this transaction
InsufficientBalance { InsufficientBalance {
/// Senders balance /// Senders balance
@ -63,6 +70,12 @@ pub enum TransactionError {
}, },
/// Transaction's gas limit (aka gas) is invalid. /// Transaction's gas limit (aka gas) is invalid.
InvalidGasLimit(OutOfBounds<U256>), InvalidGasLimit(OutOfBounds<U256>),
/// Transaction sender is banned.
SenderBanned,
/// Transaction receipient is banned.
RecipientBanned,
/// Contract creation code is banned.
CodeBanned,
} }
impl fmt::Display for TransactionError { impl fmt::Display for TransactionError {
@ -75,12 +88,17 @@ impl fmt::Display for TransactionError {
LimitReached => "Transaction limit reached".into(), LimitReached => "Transaction limit reached".into(),
InsufficientGasPrice { minimal, got } => InsufficientGasPrice { minimal, got } =>
format!("Insufficient gas price. Min={}, Given={}", minimal, got), format!("Insufficient gas price. Min={}, Given={}", minimal, got),
InsufficientGas { minimal, got } =>
format!("Insufficient gas. Min={}, Given={}", minimal, got),
InsufficientBalance { balance, cost } => InsufficientBalance { balance, cost } =>
format!("Insufficient balance for transaction. Balance={}, Cost={}", format!("Insufficient balance for transaction. Balance={}, Cost={}",
balance, cost), balance, cost),
GasLimitExceeded { limit, got } => GasLimitExceeded { limit, got } =>
format!("Gas limit exceeded. Limit={}, Given={}", limit, got), format!("Gas limit exceeded. Limit={}, Given={}", limit, got),
InvalidGasLimit(ref err) => format!("Invalid gas limit. {}", err), InvalidGasLimit(ref err) => format!("Invalid gas limit. {}", err),
SenderBanned => "Sender is temporarily banned.".into(),
RecipientBanned => "Recipient is temporarily banned.".into(),
CodeBanned => "Contract code is temporarily banned.".into(),
}; };
f.write_fmt(format_args!("Transaction error ({})", msg)) f.write_fmt(format_args!("Transaction error ({})", msg))

View File

@ -173,7 +173,7 @@ lazy_static! {
arr[SIGNEXTEND as usize] = InstructionInfo::new("SIGNEXTEND", 0, 2, 1, false, GasPriceTier::Low); arr[SIGNEXTEND as usize] = InstructionInfo::new("SIGNEXTEND", 0, 2, 1, false, GasPriceTier::Low);
arr[SHA3 as usize] = InstructionInfo::new("SHA3", 0, 2, 1, false, GasPriceTier::Special); arr[SHA3 as usize] = InstructionInfo::new("SHA3", 0, 2, 1, false, GasPriceTier::Special);
arr[ADDRESS as usize] = InstructionInfo::new("ADDRESS", 0, 0, 1, false, GasPriceTier::Base); arr[ADDRESS as usize] = InstructionInfo::new("ADDRESS", 0, 0, 1, false, GasPriceTier::Base);
arr[BALANCE as usize] = InstructionInfo::new("BALANCE", 0, 1, 1, false, GasPriceTier::Ext); arr[BALANCE as usize] = InstructionInfo::new("BALANCE", 0, 1, 1, false, GasPriceTier::Special);
arr[ORIGIN as usize] = InstructionInfo::new("ORIGIN", 0, 0, 1, false, GasPriceTier::Base); arr[ORIGIN as usize] = InstructionInfo::new("ORIGIN", 0, 0, 1, false, GasPriceTier::Base);
arr[CALLER as usize] = InstructionInfo::new("CALLER", 0, 0, 1, false, GasPriceTier::Base); arr[CALLER as usize] = InstructionInfo::new("CALLER", 0, 0, 1, false, GasPriceTier::Base);
arr[CALLVALUE as usize] = InstructionInfo::new("CALLVALUE", 0, 0, 1, false, GasPriceTier::Base); arr[CALLVALUE as usize] = InstructionInfo::new("CALLVALUE", 0, 0, 1, false, GasPriceTier::Base);
@ -183,8 +183,8 @@ lazy_static! {
arr[CODESIZE as usize] = InstructionInfo::new("CODESIZE", 0, 0, 1, false, GasPriceTier::Base); arr[CODESIZE as usize] = InstructionInfo::new("CODESIZE", 0, 0, 1, false, GasPriceTier::Base);
arr[CODECOPY as usize] = InstructionInfo::new("CODECOPY", 0, 3, 0, true, GasPriceTier::VeryLow); arr[CODECOPY as usize] = InstructionInfo::new("CODECOPY", 0, 3, 0, true, GasPriceTier::VeryLow);
arr[GASPRICE as usize] = InstructionInfo::new("GASPRICE", 0, 0, 1, false, GasPriceTier::Base); arr[GASPRICE as usize] = InstructionInfo::new("GASPRICE", 0, 0, 1, false, GasPriceTier::Base);
arr[EXTCODESIZE as usize] = InstructionInfo::new("EXTCODESIZE", 0, 1, 1, false, GasPriceTier::Ext); arr[EXTCODESIZE as usize] = InstructionInfo::new("EXTCODESIZE", 0, 1, 1, false, GasPriceTier::Special);
arr[EXTCODECOPY as usize] = InstructionInfo::new("EXTCODECOPY", 0, 4, 0, true, GasPriceTier::Ext); arr[EXTCODECOPY as usize] = InstructionInfo::new("EXTCODECOPY", 0, 4, 0, true, GasPriceTier::Special);
arr[BLOCKHASH as usize] = InstructionInfo::new("BLOCKHASH", 0, 1, 1, false, GasPriceTier::Ext); arr[BLOCKHASH as usize] = InstructionInfo::new("BLOCKHASH", 0, 1, 1, false, GasPriceTier::Ext);
arr[COINBASE as usize] = InstructionInfo::new("COINBASE", 0, 0, 1, false, GasPriceTier::Base); arr[COINBASE as usize] = InstructionInfo::new("COINBASE", 0, 0, 1, false, GasPriceTier::Base);
arr[TIMESTAMP as usize] = InstructionInfo::new("TIMESTAMP", 0, 0, 1, false, GasPriceTier::Base); arr[TIMESTAMP as usize] = InstructionInfo::new("TIMESTAMP", 0, 0, 1, false, GasPriceTier::Base);
@ -277,7 +277,7 @@ lazy_static! {
arr[CALLCODE as usize] = InstructionInfo::new("CALLCODE", 0, 7, 1, true, GasPriceTier::Special); arr[CALLCODE as usize] = InstructionInfo::new("CALLCODE", 0, 7, 1, true, GasPriceTier::Special);
arr[RETURN as usize] = InstructionInfo::new("RETURN", 0, 2, 0, true, GasPriceTier::Zero); arr[RETURN as usize] = InstructionInfo::new("RETURN", 0, 2, 0, true, GasPriceTier::Zero);
arr[DELEGATECALL as usize] = InstructionInfo::new("DELEGATECALL", 0, 6, 1, true, GasPriceTier::Special); arr[DELEGATECALL as usize] = InstructionInfo::new("DELEGATECALL", 0, 6, 1, true, GasPriceTier::Special);
arr[SUICIDE as usize] = InstructionInfo::new("SUICIDE", 0, 1, 0, true, GasPriceTier::Zero); arr[SUICIDE as usize] = InstructionInfo::new("SUICIDE", 0, 1, 0, true, GasPriceTier::Special);
arr arr
}; };
} }

View File

@ -30,12 +30,20 @@ macro_rules! overflowing {
} }
#[cfg_attr(feature="dev", allow(enum_variant_names))] #[cfg_attr(feature="dev", allow(enum_variant_names))]
enum InstructionCost<Cost: CostType> { enum Request<Cost: CostType> {
Gas(Cost), Gas(Cost),
GasMem(Cost, Cost, Option<Cost>), GasMem(Cost, Cost),
GasMemProvide(Cost, Cost, Option<U256>),
GasMemCopy(Cost, Cost, Cost) GasMemCopy(Cost, Cost, Cost)
} }
pub struct InstructionRequirements<Cost: CostType> {
pub gas_cost: Cost,
pub provide_gas: Option<Cost>,
pub memory_total_gas: Cost,
pub memory_required_size: usize,
}
pub struct Gasometer<Gas: CostType> { pub struct Gasometer<Gas: CostType> {
pub current_gas: Gas, pub current_gas: Gas,
pub current_mem_gas: Gas, pub current_mem_gas: Gas,
@ -59,11 +67,19 @@ impl<Gas: CostType> Gasometer<Gas> {
/// How much gas is provided to a CALL/CREATE, given that we need to deduct `needed` for this operation /// How much gas is provided to a CALL/CREATE, given that we need to deduct `needed` for this operation
/// and that we `requested` some. /// and that we `requested` some.
pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<evm::Result<Gas>>) -> evm::Result<Gas> { pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<U256>) -> evm::Result<Gas> {
// Try converting requested gas to `Gas` (`U256/u64`)
// but in EIP150 even if we request more we should never fail from OOG
let requested = requested.map(Gas::from_u256);
match schedule.sub_gas_cap_divisor { match schedule.sub_gas_cap_divisor {
Some(cap_divisor) if self.current_gas >= needed => { Some(cap_divisor) if self.current_gas >= needed => {
let gas_remaining = self.current_gas - needed; let gas_remaining = self.current_gas - needed;
let max_gas_provided = gas_remaining - gas_remaining / Gas::from(cap_divisor); let max_gas_provided = match cap_divisor {
64 => gas_remaining - (gas_remaining >> 6),
cap_divisor => gas_remaining - gas_remaining / Gas::from(cap_divisor),
};
if let Some(Ok(r)) = requested { if let Some(Ok(r)) = requested {
Ok(min(r, max_gas_provided)) Ok(min(r, max_gas_provided))
} else { } else {
@ -78,7 +94,7 @@ impl<Gas: CostType> Gasometer<Gas> {
} else { } else {
Ok(0.into()) Ok(0.into())
} }
} },
} }
} }
@ -88,21 +104,21 @@ impl<Gas: CostType> Gasometer<Gas> {
/// We guarantee that the final element of the returned tuple (`provided`) will be `Some` /// We guarantee that the final element of the returned tuple (`provided`) will be `Some`
/// iff the `instruction` is one of `CREATE`, or any of the `CALL` variants. In this case, /// iff the `instruction` is one of `CREATE`, or any of the `CALL` variants. In this case,
/// it will be the amount of gas that the current context provides to the child context. /// it will be the amount of gas that the current context provides to the child context.
pub fn get_gas_cost_mem( pub fn requirements(
&mut self, &mut self,
ext: &evm::Ext, ext: &evm::Ext,
instruction: Instruction, instruction: Instruction,
info: &InstructionInfo, info: &InstructionInfo,
stack: &Stack<U256>, stack: &Stack<U256>,
current_mem_size: usize, current_mem_size: usize,
) -> evm::Result<(Gas, Gas, usize, Option<Gas>)> { ) -> evm::Result<InstructionRequirements<Gas>> {
let schedule = ext.schedule(); let schedule = ext.schedule();
let tier = instructions::get_tier_idx(info.tier); let tier = instructions::get_tier_idx(info.tier);
let default_gas = Gas::from(schedule.tier_step_gas[tier]); let default_gas = Gas::from(schedule.tier_step_gas[tier]);
let cost = match instruction { let cost = match instruction {
instructions::JUMPDEST => { instructions::JUMPDEST => {
InstructionCost::Gas(Gas::from(1)) Request::Gas(Gas::from(1))
}, },
instructions::SSTORE => { instructions::SSTORE => {
let address = H256::from(stack.peek(0)); let address = H256::from(stack.peek(0));
@ -116,16 +132,16 @@ impl<Gas: CostType> Gasometer<Gas> {
// !is_zero(&val) && is_zero(newval) // !is_zero(&val) && is_zero(newval)
schedule.sstore_reset_gas schedule.sstore_reset_gas
}; };
InstructionCost::Gas(Gas::from(gas)) Request::Gas(Gas::from(gas))
}, },
instructions::SLOAD => { instructions::SLOAD => {
InstructionCost::Gas(Gas::from(schedule.sload_gas)) Request::Gas(Gas::from(schedule.sload_gas))
}, },
instructions::BALANCE => { instructions::BALANCE => {
InstructionCost::Gas(Gas::from(schedule.balance_gas)) Request::Gas(Gas::from(schedule.balance_gas))
}, },
instructions::EXTCODESIZE => { instructions::EXTCODESIZE => {
InstructionCost::Gas(Gas::from(schedule.extcodesize_gas)) Request::Gas(Gas::from(schedule.extcodesize_gas))
}, },
instructions::SUICIDE => { instructions::SUICIDE => {
let mut gas = Gas::from(schedule.suicide_gas); let mut gas = Gas::from(schedule.suicide_gas);
@ -135,28 +151,28 @@ impl<Gas: CostType> Gasometer<Gas> {
gas = overflowing!(gas.overflow_add(schedule.suicide_to_new_account_cost.into())); gas = overflowing!(gas.overflow_add(schedule.suicide_to_new_account_cost.into()));
} }
InstructionCost::Gas(gas) Request::Gas(gas)
}, },
instructions::MSTORE | instructions::MLOAD => { instructions::MSTORE | instructions::MLOAD => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32)), None) Request::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32)))
}, },
instructions::MSTORE8 => { instructions::MSTORE8 => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1)), None) Request::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1)))
}, },
instructions::RETURN => { instructions::RETURN => {
InstructionCost::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None) Request::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
}, },
instructions::SHA3 => { instructions::SHA3 => {
let w = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(1))), 31)); let w = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(1))), 31));
let words = w >> 5; let words = w >> 5;
let gas = Gas::from(schedule.sha3_gas) + (Gas::from(schedule.sha3_word_gas) * words); let gas = Gas::from(schedule.sha3_gas) + (Gas::from(schedule.sha3_word_gas) * words);
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None) Request::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
}, },
instructions::CALLDATACOPY | instructions::CODECOPY => { instructions::CALLDATACOPY | instructions::CODECOPY => {
InstructionCost::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2)))) Request::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2))))
}, },
instructions::EXTCODECOPY => { instructions::EXTCODECOPY => {
InstructionCost::GasMemCopy(schedule.extcodecopy_base_gas.into(), try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3)))) Request::GasMemCopy(schedule.extcodecopy_base_gas.into(), try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3))))
}, },
instructions::LOG0...instructions::LOG4 => { instructions::LOG0...instructions::LOG4 => {
let no_of_topics = instructions::get_log_topics(instruction); let no_of_topics = instructions::get_log_topics(instruction);
@ -164,7 +180,7 @@ impl<Gas: CostType> Gasometer<Gas> {
let data_gas = overflowing!(try!(Gas::from_u256(*stack.peek(1))).overflow_mul(Gas::from(schedule.log_data_gas))); let data_gas = overflowing!(try!(Gas::from_u256(*stack.peek(1))).overflow_mul(Gas::from(schedule.log_data_gas)));
let gas = overflowing!(data_gas.overflow_add(Gas::from(log_gas))); let gas = overflowing!(data_gas.overflow_add(Gas::from(log_gas)));
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None) Request::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
}, },
instructions::CALL | instructions::CALLCODE => { instructions::CALL | instructions::CALLCODE => {
let mut gas = Gas::from(schedule.call_gas); let mut gas = Gas::from(schedule.call_gas);
@ -183,70 +199,82 @@ impl<Gas: CostType> Gasometer<Gas> {
gas = overflowing!(gas.overflow_add(schedule.call_value_transfer_gas.into())); gas = overflowing!(gas.overflow_add(schedule.call_value_transfer_gas.into()));
}; };
// TODO: refactor to avoid duplicate calculation here and later on. let requested = *stack.peek(0);
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided)) Request::GasMemProvide(gas, mem, Some(requested))
}, },
instructions::DELEGATECALL => { instructions::DELEGATECALL => {
let mut gas = Gas::from(schedule.call_gas); let gas = Gas::from(schedule.call_gas);
let mem = cmp::max( let mem = cmp::max(
try!(mem_needed(stack.peek(4), stack.peek(5))), try!(mem_needed(stack.peek(4), stack.peek(5))),
try!(mem_needed(stack.peek(2), stack.peek(3))) try!(mem_needed(stack.peek(2), stack.peek(3)))
); );
let requested = *stack.peek(0);
// TODO: refactor to avoid duplicate calculation here and later on. Request::GasMemProvide(gas, mem, Some(requested))
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
}, },
instructions::CREATE => { instructions::CREATE => {
let mut gas = Gas::from(schedule.create_gas); let gas = Gas::from(schedule.create_gas);
let mem = try!(mem_needed(stack.peek(1), stack.peek(2))); let mem = try!(mem_needed(stack.peek(1), stack.peek(2)));
// TODO: refactor to avoid duplicate calculation here and later on. Request::GasMemProvide(gas, mem, None)
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let provided = try!(self.gas_provided(schedule, cost_so_far, None));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
}, },
instructions::EXP => { instructions::EXP => {
let expon = stack.peek(1); let expon = stack.peek(1);
let bytes = ((expon.bits() + 7) / 8) as usize; let bytes = ((expon.bits() + 7) / 8) as usize;
let gas = Gas::from(schedule.exp_gas + schedule.exp_byte_gas * bytes); let gas = Gas::from(schedule.exp_gas + schedule.exp_byte_gas * bytes);
InstructionCost::Gas(gas) Request::Gas(gas)
}, },
_ => InstructionCost::Gas(default_gas), _ => Request::Gas(default_gas),
}; };
match cost { Ok(match cost {
InstructionCost::Gas(gas) => { Request::Gas(gas) => {
Ok((gas, self.current_mem_gas, 0, None)) InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: 0,
memory_total_gas: self.current_mem_gas,
}
}, },
InstructionCost::GasMem(gas, mem_size, provided) => { Request::GasMem(gas, mem_size) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size)); let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let gas = overflowing!(gas.overflow_add(mem_gas_cost)); let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size, provided)) InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
}, },
InstructionCost::GasMemCopy(gas, mem_size, copy) => { Request::GasMemProvide(gas, mem_size, requested) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let gas = overflowing!(gas.overflow_add(mem_gas_cost));
let provided = try!(self.gas_provided(schedule, gas, requested));
let total_gas = overflowing!(gas.overflow_add(provided));
InstructionRequirements {
gas_cost: total_gas,
provide_gas: Some(provided),
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
},
Request::GasMemCopy(gas, mem_size, copy) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size)); let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let copy = overflowing!(add_gas_usize(copy, 31)) >> 5; let copy = overflowing!(add_gas_usize(copy, 31)) >> 5;
let copy_gas = Gas::from(schedule.copy_gas) * copy; let copy_gas = Gas::from(schedule.copy_gas) * copy;
let gas = overflowing!(gas.overflow_add(copy_gas)); let gas = overflowing!(gas.overflow_add(copy_gas));
let gas = overflowing!(gas.overflow_add(mem_gas_cost)); let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size, None))
} InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
} }
},
})
} }
fn mem_gas_cost(&self, schedule: &evm::Schedule, current_mem_size: usize, mem_size: &Gas) -> evm::Result<(Gas, Gas, usize)> { fn mem_gas_cost(&self, schedule: &evm::Schedule, current_mem_size: usize, mem_size: &Gas) -> evm::Result<(Gas, Gas, usize)> {
@ -256,7 +284,7 @@ impl<Gas: CostType> Gasometer<Gas> {
let a = overflowing!(s.overflow_mul(Gas::from(schedule.memory_gas))); let a = overflowing!(s.overflow_mul(Gas::from(schedule.memory_gas)));
// Calculate s*s/quad_coeff_div // Calculate s*s/quad_coeff_div
debug_assert_eq!(schedule.quad_coeff_div, 512); assert_eq!(schedule.quad_coeff_div, 512);
let b = overflowing!(s.overflow_mul_shr(s, 9)); let b = overflowing!(s.overflow_mul_shr(s, 9));
Ok(overflowing!(a.overflow_add(b))) Ok(overflowing!(a.overflow_add(b)))
}; };
@ -328,3 +356,4 @@ fn test_calculate_mem_cost() {
assert_eq!(new_mem_gas, 3); assert_eq!(new_mem_gas, 3);
assert_eq!(mem_size, 32); assert_eq!(mem_size, 32);
} }

View File

@ -54,14 +54,14 @@ const TWO_POW_248: U256 = U256([0, 0, 0, 0x100000000000000]); //0x1 00000000 000
/// Abstraction over raw vector of Bytes. Easier state management of PC. /// Abstraction over raw vector of Bytes. Easier state management of PC.
struct CodeReader<'a> { struct CodeReader<'a> {
position: ProgramCounter, position: ProgramCounter,
code: &'a Bytes code: &'a [u8]
} }
#[cfg_attr(feature="dev", allow(len_without_is_empty))] #[cfg_attr(feature="dev", allow(len_without_is_empty))]
impl<'a> CodeReader<'a> { impl<'a> CodeReader<'a> {
/// Create new code reader - starting at position 0. /// Create new code reader - starting at position 0.
fn new(code: &'a Bytes) -> Self { fn new(code: &'a [u8]) -> Self {
CodeReader { CodeReader {
position: 0, position: 0,
code: code, code: code,
@ -120,14 +120,14 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
try!(self.verify_instruction(ext, instruction, info, &stack)); try!(self.verify_instruction(ext, instruction, info, &stack));
// Calculate gas cost // Calculate gas cost
let (gas_cost, mem_gas, mem_size, provided) = try!(gasometer.get_gas_cost_mem(ext, instruction, info, &stack, self.mem.size())); let requirements = try!(gasometer.requirements(ext, instruction, info, &stack, self.mem.size()));
// TODO: make compile-time removable if too much of a performance hit. // TODO: make compile-time removable if too much of a performance hit.
let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &gas_cost.as_u256()); let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &requirements.gas_cost.as_u256());
try!(gasometer.verify_gas(&gas_cost)); try!(gasometer.verify_gas(&requirements.gas_cost));
self.mem.expand(mem_size); self.mem.expand(requirements.memory_required_size);
gasometer.current_mem_gas = mem_gas; gasometer.current_mem_gas = requirements.memory_total_gas;
gasometer.current_gas = gasometer.current_gas - gas_cost; gasometer.current_gas = gasometer.current_gas - requirements.gas_cost;
evm_debug!({ informant.before_instruction(reader.position, instruction, info, &gasometer.current_gas, &stack) }); evm_debug!({ informant.before_instruction(reader.position, instruction, info, &gasometer.current_gas, &stack) });
@ -138,7 +138,7 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
// Execute instruction // Execute instruction
let result = try!(self.exec_instruction( let result = try!(self.exec_instruction(
gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack, provided gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack, requirements.provide_gas
)); ));
evm_debug!({ informant.after_instruction(instruction) }); evm_debug!({ informant.after_instruction(instruction) });

View File

@ -15,20 +15,27 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc; use std::sync::Arc;
use lru_cache::LruCache; use util::{H256, HeapSizeOf, Mutex};
use util::{H256, Mutex};
use util::sha3::*; use util::sha3::*;
use util::cache::MemoryLruCache;
use bit_set::BitSet; use bit_set::BitSet;
use super::super::instructions; use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024; const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
// stub for a HeapSizeOf implementation.
struct Bits(Arc<BitSet>);
impl HeapSizeOf for Bits {
fn heap_size_of_children(&self) -> usize {
// dealing in bits here
self.0.capacity() * 8
}
}
/// Global cache for EVM interpreter /// Global cache for EVM interpreter
pub struct SharedCache { pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>, jump_destinations: Mutex<MemoryLruCache<H256, Bits>>,
max_size: usize,
cur_size: Mutex<usize>,
} }
impl SharedCache { impl SharedCache {
@ -36,9 +43,7 @@ impl SharedCache {
/// to cache. /// to cache.
pub fn new(max_size: usize) -> Self { pub fn new(max_size: usize) -> Self {
SharedCache { SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)), jump_destinations: Mutex::new(MemoryLruCache::new(max_size)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
} }
} }
@ -49,37 +54,11 @@ impl SharedCache {
} }
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) { if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone(); return d.0.clone();
} }
let d = Self::find_jump_destinations(code); let d = Self::find_jump_destinations(code);
self.jump_destinations.lock().insert(code_hash.clone(), Bits(d.clone()));
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
d d
} }

View File

@ -101,6 +101,7 @@ extern crate bit_set;
extern crate rlp; extern crate rlp;
extern crate ethcore_bloom_journal as bloom_journal; extern crate ethcore_bloom_journal as bloom_journal;
extern crate byteorder; extern crate byteorder;
extern crate transient_hashmap;
#[macro_use] #[macro_use]
extern crate log; extern crate log;

View File

@ -61,7 +61,7 @@ pub fn generate_bloom(source: Arc<Database>, dest: &mut Database) -> Result<(),
let account_trie = try!(TrieDB::new(state_db.as_hashdb(), &state_root).map_err(|e| Error::Custom(format!("Cannot open trie: {:?}", e)))); let account_trie = try!(TrieDB::new(state_db.as_hashdb(), &state_root).map_err(|e| Error::Custom(format!("Cannot open trie: {:?}", e))));
for item in try!(account_trie.iter().map_err(|_| Error::MigrationImpossible)) { for item in try!(account_trie.iter().map_err(|_| Error::MigrationImpossible)) {
let (ref account_key, _) = try!(item.map_err(|_| Error::MigrationImpossible)); let (ref account_key, _) = try!(item.map_err(|_| Error::MigrationImpossible));
let account_key_hash = H256::from_slice(&account_key); let account_key_hash = H256::from_slice(account_key);
bloom.set(&*account_key_hash); bloom.set(&*account_key_hash);
} }

View File

@ -0,0 +1,339 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Banning Queue
//! Transacton Queue wrapper maintaining additional list of banned senders and contract hashes.
use std::time::Duration;
use std::ops::{Deref, DerefMut};
use std::cell::Cell;
use transaction::{SignedTransaction, Action};
use transient_hashmap::TransientHashMap;
use miner::{TransactionQueue, TransactionImportResult, TransactionOrigin, AccountDetails};
use error::{Error, TransactionError};
use util::{Uint, U256, H256, Address, Hashable};
type Count = u16;
/// Auto-Banning threshold
pub enum Threshold {
/// Should ban after given number of misbehaves reported.
BanAfter(Count),
/// Should never ban anything
NeverBan
}
impl Default for Threshold {
fn default() -> Self {
Threshold::NeverBan
}
}
/// Transaction queue with banlist.
pub struct BanningTransactionQueue {
queue: TransactionQueue,
ban_threshold: Threshold,
senders_bans: TransientHashMap<Address, Cell<Count>>,
recipients_bans: TransientHashMap<Address, Cell<Count>>,
codes_bans: TransientHashMap<H256, Cell<Count>>,
}
impl BanningTransactionQueue {
/// Creates new banlisting transaction queue
pub fn new(queue: TransactionQueue, ban_threshold: Threshold, ban_lifetime: Duration) -> Self {
let ban_lifetime_sec = ban_lifetime.as_secs();
assert!(ban_lifetime_sec > 0, "Lifetime has to be specified in seconds.");
BanningTransactionQueue {
queue: queue,
ban_threshold: ban_threshold,
senders_bans: TransientHashMap::new(ban_lifetime_sec),
recipients_bans: TransientHashMap::new(ban_lifetime_sec),
codes_bans: TransientHashMap::new(ban_lifetime_sec),
}
}
/// Borrows internal queue.
/// NOTE: you can insert transactions to the queue even
/// if they would be rejected because of ban otherwise.
/// But probably you shouldn't.
pub fn queue(&mut self) -> &mut TransactionQueue {
&mut self.queue
}
/// Add to the queue taking bans into consideration.
/// May reject transaction because of the banlist.
pub fn add_with_banlist<F, G>(
&mut self,
transaction: SignedTransaction,
account_details: &F,
gas_estimator: &G,
) -> Result<TransactionImportResult, Error> where
F: Fn(&Address) -> AccountDetails,
G: Fn(&SignedTransaction) -> U256,
{
if let Threshold::BanAfter(threshold) = self.ban_threshold {
// NOTE In all checks use direct query to avoid increasing ban timeout.
// Check sender
if let Ok(sender) = transaction.sender() {
let count = self.senders_bans.direct().get(&sender).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because sender is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::SenderBanned));
}
}
// Check recipient
if let Action::Call(recipient) = transaction.action {
let count = self.recipients_bans.direct().get(&recipient).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because recipient is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::RecipientBanned));
}
}
// Check code
if let Action::Create = transaction.action {
let code_hash = transaction.data.sha3();
let count = self.codes_bans.direct().get(&code_hash).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because code is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::CodeBanned));
}
}
}
self.queue.add(transaction, TransactionOrigin::External, account_details, gas_estimator)
}
/// Ban transaction with given hash.
/// Transaction has to be in the queue.
///
/// Bans sender and recipient/code and returns `true` when any ban has reached threshold.
pub fn ban_transaction(&mut self, hash: &H256) -> bool {
let transaction = self.queue.find(hash);
match transaction {
Some(transaction) => {
let sender = transaction.sender().expect("Transaction is in queue, so the sender is already validated; qed");
// Ban sender
let sender_banned = self.ban_sender(sender);
// Ban recipient and codehash
let is_banned = sender_banned || match transaction.action {
Action::Call(recipient) => {
self.ban_recipient(recipient)
},
Action::Create => {
self.ban_codehash(transaction.data.sha3())
},
};
is_banned
},
None => false,
}
}
/// Ban given sender.
/// If bans threshold is reached all subsequent transactions from this sender will be rejected.
/// Reaching bans threshold also removes all existsing transaction from this sender that are already in the
/// queue.
fn ban_sender(&mut self, address: Address) -> bool {
let count = {
let mut count = self.senders_bans.entry(address).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
count.get()
};
match self.ban_threshold {
Threshold::BanAfter(threshold) if count > threshold => {
// Banlist the sender.
// Remove all transactions from the queue.
self.remove_all(address, !U256::zero());
true
},
_ => false
}
}
/// Ban given recipient.
/// If bans threshold is reached all subsequent transactions to this address will be rejected.
/// Returns true if bans threshold has been reached.
fn ban_recipient(&mut self, address: Address) -> bool {
let count = {
let mut count = self.recipients_bans.entry(address).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
count.get()
};
match self.ban_threshold {
// TODO [ToDr] Consider removing other transactions to the same recipient from the queue?
Threshold::BanAfter(threshold) if count > threshold => true,
_ => false
}
}
/// Ban given codehash.
/// If bans threshold is reached all subsequent transactions to contracts with this codehash will be rejected.
/// Returns true if bans threshold has been reached.
fn ban_codehash(&mut self, code_hash: H256) -> bool {
let mut count = self.codes_bans.entry(code_hash).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
match self.ban_threshold {
// TODO [ToDr] Consider removing other transactions with the same code from the queue?
Threshold::BanAfter(threshold) if count.get() > threshold => true,
_ => false,
}
}
}
impl Deref for BanningTransactionQueue {
type Target = TransactionQueue;
fn deref(&self) -> &Self::Target {
&self.queue
}
}
impl DerefMut for BanningTransactionQueue {
fn deref_mut(&mut self) -> &mut Self::Target {
self.queue()
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use super::{BanningTransactionQueue, Threshold};
use ethkey::{Random, Generator};
use transaction::{Transaction, SignedTransaction, Action};
use error::{Error, TransactionError};
use client::TransactionImportResult;
use miner::{TransactionQueue, TransactionOrigin, AccountDetails};
use util::{Uint, U256, Address, FromHex, Hashable};
fn queue() -> BanningTransactionQueue {
BanningTransactionQueue::new(TransactionQueue::default(), Threshold::BanAfter(1), Duration::from_secs(180))
}
fn default_account_details(_address: &Address) -> AccountDetails {
AccountDetails {
nonce: U256::zero(),
balance: !U256::zero(),
}
}
fn gas_required(_tx: &SignedTransaction) -> U256 {
0.into()
}
fn transaction(action: Action) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: action,
value: U256::from(100),
data: "3331600055".from_hex().unwrap(),
gas: U256::from(100_000),
gas_price: U256::from(10),
nonce: U256::from(0),
}.sign(keypair.secret())
}
fn unwrap_err(res: Result<TransactionImportResult, Error>) -> TransactionError {
match res {
Err(Error::Transaction(e)) => e,
Ok(x) => panic!("Expected error, got: Ok({:?})", x),
Err(e) => panic!("Unexpected error type returned by queue: {:?}", e),
}
}
#[test]
fn should_allow_to_borrow_the_queue() {
// given
let tx = transaction(Action::Create);
let mut txq = queue();
// when
txq.queue().add(tx, TransactionOrigin::External, &default_account_details, &gas_required).unwrap();
// then
// should also deref to queue
assert_eq!(txq.status().pending, 1);
}
#[test]
fn should_not_accept_transactions_from_banned_sender() {
// given
let tx = transaction(Action::Create);
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_sender(tx.sender().unwrap());
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_sender(tx.sender().unwrap());
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::SenderBanned);
// Should also remove transacion from the queue
assert_eq!(txq.find(&tx.hash()), None);
}
#[test]
fn should_not_accept_transactions_to_banned_recipient() {
// given
let recipient = Address::default();
let tx = transaction(Action::Call(recipient));
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_recipient(recipient);
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_recipient(recipient);
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::RecipientBanned);
}
#[test]
fn should_not_accept_transactions_with_banned_code() {
// given
let tx = transaction(Action::Create);
let codehash = tx.data.sha3();
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_codehash(codehash);
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_codehash(codehash);
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::CodeBanned);
}
}

View File

@ -31,6 +31,7 @@ use receipt::{Receipt, RichReceipt};
use spec::Spec; use spec::Spec;
use engines::Engine; use engines::Engine;
use miner::{MinerService, MinerStatus, TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin}; use miner::{MinerService, MinerStatus, TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin};
use miner::banning_queue::{BanningTransactionQueue, Threshold};
use miner::work_notify::WorkPoster; use miner::work_notify::WorkPoster;
use client::TransactionImportResult; use client::TransactionImportResult;
use miner::price_info::PriceInfo; use miner::price_info::PriceInfo;
@ -59,6 +60,22 @@ pub enum GasLimit {
Fixed(U256), Fixed(U256),
} }
/// Transaction queue banning settings.
#[derive(Debug, PartialEq, Clone)]
pub enum Banning {
/// Banning in transaction queue is disabled
Disabled,
/// Banning in transaction queue is enabled
Enabled {
/// Upper limit of transaction processing time before banning.
offend_threshold: Duration,
/// Number of similar offending transactions before banning.
min_offends: u16,
/// Number of seconds the offender is banned for.
ban_duration: Duration,
},
}
/// Configures the behaviour of the miner. /// Configures the behaviour of the miner.
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct MinerOptions { pub struct MinerOptions {
@ -86,6 +103,8 @@ pub struct MinerOptions {
pub enable_resubmission: bool, pub enable_resubmission: bool,
/// Global gas limit for all transaction in the queue except for local and retracted. /// Global gas limit for all transaction in the queue except for local and retracted.
pub tx_queue_gas_limit: GasLimit, pub tx_queue_gas_limit: GasLimit,
/// Banning settings
pub tx_queue_banning: Banning,
} }
impl Default for MinerOptions { impl Default for MinerOptions {
@ -98,11 +117,12 @@ impl Default for MinerOptions {
tx_gas_limit: !U256::zero(), tx_gas_limit: !U256::zero(),
tx_queue_size: 1024, tx_queue_size: 1024,
tx_queue_gas_limit: GasLimit::Auto, tx_queue_gas_limit: GasLimit::Auto,
tx_queue_strategy: PrioritizationStrategy::GasFactorAndGasPrice, tx_queue_strategy: PrioritizationStrategy::GasPriceOnly,
pending_set: PendingSet::AlwaysQueue, pending_set: PendingSet::AlwaysQueue,
reseal_min_period: Duration::from_secs(2), reseal_min_period: Duration::from_secs(2),
work_queue_size: 20, work_queue_size: 20,
enable_resubmission: true, enable_resubmission: true,
tx_queue_banning: Banning::Disabled,
} }
} }
} }
@ -186,7 +206,7 @@ struct SealingWork {
/// Handles preparing work for "work sealing" or seals "internally" if Engine does not require work. /// Handles preparing work for "work sealing" or seals "internally" if Engine does not require work.
pub struct Miner { pub struct Miner {
// NOTE [ToDr] When locking always lock in this order! // NOTE [ToDr] When locking always lock in this order!
transaction_queue: Arc<Mutex<TransactionQueue>>, transaction_queue: Arc<Mutex<BanningTransactionQueue>>,
sealing_work: Mutex<SealingWork>, sealing_work: Mutex<SealingWork>,
next_allowed_reseal: Mutex<Instant>, next_allowed_reseal: Mutex<Instant>,
sealing_block_last_request: Mutex<u64>, sealing_block_last_request: Mutex<u64>,
@ -215,11 +235,18 @@ impl Miner {
GasLimit::Fixed(ref limit) => *limit, GasLimit::Fixed(ref limit) => *limit,
_ => !U256::zero(), _ => !U256::zero(),
}; };
let txq = Arc::new(Mutex::new(TransactionQueue::with_limits(
options.tx_queue_strategy, options.tx_queue_size, gas_limit, options.tx_gas_limit let txq = TransactionQueue::with_limits(options.tx_queue_strategy, options.tx_queue_size, gas_limit, options.tx_gas_limit);
))); let txq = match options.tx_queue_banning {
Banning::Disabled => BanningTransactionQueue::new(txq, Threshold::NeverBan, Duration::from_secs(180)),
Banning::Enabled { ban_duration, min_offends, .. } => BanningTransactionQueue::new(
txq,
Threshold::BanAfter(min_offends),
ban_duration,
),
};
Miner { Miner {
transaction_queue: txq, transaction_queue: Arc::new(Mutex::new(txq)),
next_allowed_reseal: Mutex::new(Instant::now()), next_allowed_reseal: Mutex::new(Instant::now()),
sealing_block_last_request: Mutex::new(0), sealing_block_last_request: Mutex::new(0),
sealing_work: Mutex::new(SealingWork{ sealing_work: Mutex::new(SealingWork{
@ -323,10 +350,31 @@ impl Miner {
let mut invalid_transactions = HashSet::new(); let mut invalid_transactions = HashSet::new();
let mut transactions_to_penalize = HashSet::new(); let mut transactions_to_penalize = HashSet::new();
let block_number = open_block.block().fields().header.number(); let block_number = open_block.block().fields().header.number();
// TODO: push new uncles, too.
// TODO Push new uncles too.
for tx in transactions { for tx in transactions {
let hash = tx.hash(); let hash = tx.hash();
match open_block.push_transaction(tx, None) { let start = Instant::now();
let result = open_block.push_transaction(tx, None);
let took = start.elapsed();
// Check for heavy transactions
match self.options.tx_queue_banning {
Banning::Enabled { ref offend_threshold, .. } if &took > offend_threshold => {
match self.transaction_queue.lock().ban_transaction(&hash) {
true => {
warn!(target: "miner", "Detected heavy transaction. Banning the sender and recipient/code.");
},
false => {
transactions_to_penalize.insert(hash);
debug!(target: "miner", "Detected heavy transaction. Penalizing sender.")
}
}
},
_ => {},
}
match result {
Err(Error::Execution(ExecutionError::BlockGasLimitReached { gas_limit, gas_used, gas })) => { Err(Error::Execution(ExecutionError::BlockGasLimitReached { gas_limit, gas_used, gas })) => {
debug!(target: "miner", "Skipping adding transaction to block because of gas limit: {:?} (limit: {:?}, used: {:?}, gas: {:?})", hash, gas_limit, gas_used, gas); debug!(target: "miner", "Skipping adding transaction to block because of gas limit: {:?} (limit: {:?}, used: {:?}, gas: {:?})", hash, gas_limit, gas_used, gas);
@ -367,7 +415,7 @@ impl Miner {
{ {
let mut queue = self.transaction_queue.lock(); let mut queue = self.transaction_queue.lock();
for hash in invalid_transactions.into_iter() { for hash in invalid_transactions {
queue.remove_invalid(&hash, &fetch_account); queue.remove_invalid(&hash, &fetch_account);
} }
for hash in transactions_to_penalize { for hash in transactions_to_penalize {
@ -512,7 +560,7 @@ impl Miner {
prepare_new prepare_new
} }
fn add_transactions_to_queue(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, origin: TransactionOrigin, transaction_queue: &mut TransactionQueue) -> fn add_transactions_to_queue(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, origin: TransactionOrigin, transaction_queue: &mut BanningTransactionQueue) ->
Vec<Result<TransactionImportResult, Error>> { Vec<Result<TransactionImportResult, Error>> {
let fetch_account = |a: &Address| AccountDetails { let fetch_account = |a: &Address| AccountDetails {
@ -520,14 +568,25 @@ impl Miner {
balance: chain.latest_balance(a), balance: chain.latest_balance(a),
}; };
let schedule = chain.latest_schedule();
let gas_required = |tx: &SignedTransaction| tx.gas_required(&schedule).into();
transactions.into_iter() transactions.into_iter()
.map(|tx| transaction_queue.add(tx, &fetch_account, origin)) .map(|tx| match origin {
TransactionOrigin::Local | TransactionOrigin::RetractedBlock => {
transaction_queue.add(tx, origin, &fetch_account, &gas_required)
},
TransactionOrigin::External => {
transaction_queue.add_with_banlist(tx, &fetch_account, &gas_required)
}
})
.collect() .collect()
} }
/// Are we allowed to do a non-mandatory reseal? /// Are we allowed to do a non-mandatory reseal?
fn tx_reseal_allowed(&self) -> bool { Instant::now() > *self.next_allowed_reseal.lock() } fn tx_reseal_allowed(&self) -> bool { Instant::now() > *self.next_allowed_reseal.lock() }
#[cfg_attr(feature="dev", allow(wrong_self_convention))]
#[cfg_attr(feature="dev", allow(redundant_closure))]
fn from_pending_block<H, F, G>(&self, latest_block_number: BlockNumber, from_chain: F, map_block: G) -> H fn from_pending_block<H, F, G>(&self, latest_block_number: BlockNumber, from_chain: F, map_block: G) -> H
where F: Fn() -> H, G: Fn(&ClosedBlock) -> H { where F: Fn() -> H, G: Fn(&ClosedBlock) -> H {
let sealing_work = self.sealing_work.lock(); let sealing_work = self.sealing_work.lock();
@ -891,7 +950,7 @@ impl MinerService for Miner {
fn pending_receipts(&self, best_block: BlockNumber) -> BTreeMap<H256, Receipt> { fn pending_receipts(&self, best_block: BlockNumber) -> BTreeMap<H256, Receipt> {
self.from_pending_block( self.from_pending_block(
best_block, best_block,
|| BTreeMap::new(), BTreeMap::new,
|pending| { |pending| {
let hashes = pending.transactions() let hashes = pending.transactions()
.iter() .iter()
@ -1025,7 +1084,7 @@ impl MinerService for Miner {
tx.sender().expect("Transaction is in block, so sender has to be defined.") tx.sender().expect("Transaction is in block, so sender has to be defined.")
}) })
.collect::<HashSet<Address>>(); .collect::<HashSet<Address>>();
for sender in to_remove.into_iter() { for sender in to_remove {
transaction_queue.remove_all(sender, chain.latest_nonce(&sender)); transaction_queue.remove_all(sender, chain.latest_nonce(&sender));
} }
}); });
@ -1103,6 +1162,7 @@ mod tests {
pending_set: PendingSet::AlwaysSealing, pending_set: PendingSet::AlwaysSealing,
work_queue_size: 5, work_queue_size: 5,
enable_resubmission: true, enable_resubmission: true,
tx_queue_banning: Banning::Disabled,
}, },
GasPricer::new_fixed(0u64.into()), GasPricer::new_fixed(0u64.into()),
&Spec::new_test(), &Spec::new_test(),

View File

@ -44,11 +44,12 @@
mod miner; mod miner;
mod external; mod external;
mod transaction_queue; mod transaction_queue;
mod banning_queue;
mod work_notify; mod work_notify;
mod price_info; mod price_info;
pub use self::transaction_queue::{TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin}; pub use self::transaction_queue::{TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin};
pub use self::miner::{Miner, MinerOptions, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit}; pub use self::miner::{Miner, MinerOptions, Banning, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit};
pub use self::external::{ExternalMiner, ExternalMinerService}; pub use self::external::{ExternalMiner, ExternalMinerService};
pub use client::TransactionImportResult; pub use client::TransactionImportResult;

View File

@ -48,10 +48,11 @@
//! nonce: U256::from(10), //! nonce: U256::from(10),
//! balance: U256::from(1_000_000), //! balance: U256::from(1_000_000),
//! }; //! };
//! let gas_estimator = |_tx: &SignedTransaction| 2.into();
//! //!
//! let mut txq = TransactionQueue::default(); //! let mut txq = TransactionQueue::default();
//! txq.add(st2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); //! txq.add(st2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
//! txq.add(st1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); //! txq.add(st1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
//! //!
//! // Check status //! // Check status
//! assert_eq!(txq.status().pending, 2); //! assert_eq!(txq.status().pending, 2);
@ -446,6 +447,7 @@ pub struct AccountDetails {
const GAS_LIMIT_HYSTERESIS: usize = 10; // (100/GAS_LIMIT_HYSTERESIS) % const GAS_LIMIT_HYSTERESIS: usize = 10; // (100/GAS_LIMIT_HYSTERESIS) %
/// Describes the strategy used to prioritize transactions in the queue. /// Describes the strategy used to prioritize transactions in the queue.
#[cfg_attr(feature="dev", allow(enum_variant_names))]
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PrioritizationStrategy { pub enum PrioritizationStrategy {
/// Use only gas price. Disregards the actual computation cost of the transaction. /// Use only gas price. Disregards the actual computation cost of the transaction.
@ -592,9 +594,20 @@ impl TransactionQueue {
} }
} }
/// Add signed transaction to queue to be verified and imported /// Add signed transaction to queue to be verified and imported.
pub fn add<T>(&mut self, tx: SignedTransaction, fetch_account: &T, origin: TransactionOrigin) -> Result<TransactionImportResult, Error> ///
where T: Fn(&Address) -> AccountDetails { /// NOTE fetch_account and gas_estimator should be cheap to compute
/// otherwise it might open up an attack vector.
pub fn add<F, G>(
&mut self,
tx: SignedTransaction,
origin: TransactionOrigin,
fetch_account: &F,
gas_estimator: &G,
) -> Result<TransactionImportResult, Error> where
F: Fn(&Address) -> AccountDetails,
G: Fn(&SignedTransaction) -> U256,
{
if tx.gas_price < self.minimal_gas_price && origin != TransactionOrigin::Local { if tx.gas_price < self.minimal_gas_price && origin != TransactionOrigin::Local {
trace!(target: "txqueue", trace!(target: "txqueue",
@ -625,8 +638,6 @@ impl TransactionQueue {
})); }));
} }
try!(tx.check_low_s());
if tx.gas > self.gas_limit || tx.gas > self.tx_gas_limit { if tx.gas > self.gas_limit || tx.gas > self.tx_gas_limit {
trace!(target: "txqueue", trace!(target: "txqueue",
"Dropping transaction above gas limit: {:?} ({} > min({}, {}))", "Dropping transaction above gas limit: {:?} ({} > min({}, {}))",
@ -642,6 +653,24 @@ impl TransactionQueue {
})); }));
} }
let minimal_gas = gas_estimator(&tx);
if tx.gas < minimal_gas {
trace!(target: "txqueue",
"Dropping transaction with insufficient gas: {:?} ({} > {})",
tx.hash(),
tx.gas,
minimal_gas,
);
return Err(Error::Transaction(TransactionError::InsufficientGas {
minimal: minimal_gas,
got: tx.gas,
}));
}
// Verify signature
try!(tx.check_low_s());
let vtx = try!(VerifiedTransaction::new(tx, origin)); let vtx = try!(VerifiedTransaction::new(tx, origin));
let client_account = fetch_account(&vtx.sender()); let client_account = fetch_account(&vtx.sender());
@ -904,16 +933,6 @@ impl TransactionQueue {
let nonce = tx.nonce(); let nonce = tx.nonce();
let hash = tx.hash(); let hash = tx.hash();
{
// Rough size sanity check
let gas = &tx.transaction.gas;
if U256::from(tx.transaction.data.len()) > *gas {
// Droping transaction
trace!(target: "txqueue", "Dropping oversized transaction: {:?} (gas: {} < size {})", hash, gas, tx.transaction.data.len());
return Err(TransactionError::LimitReached);
}
}
// The transaction might be old, let's check that. // The transaction might be old, let's check that.
// This has to be the first test, otherwise calculating // This has to be the first test, otherwise calculating
// nonce height would result in overflow. // nonce height would result in overflow.
@ -1103,6 +1122,10 @@ mod test {
} }
} }
fn gas_estimator(_tx: &SignedTransaction) -> U256 {
U256::zero()
}
fn new_tx_pair(nonce: U256, gas_price: U256, nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) { fn new_tx_pair(nonce: U256, gas_price: U256, nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) {
let tx1 = new_unsigned_tx(nonce, default_gas_val(), gas_price); let tx1 = new_unsigned_tx(nonce, default_gas_val(), gas_price);
let tx2 = new_unsigned_tx(nonce + nonce_increment, default_gas_val(), gas_price + gas_price_increment); let tx2 = new_unsigned_tx(nonce + nonce_increment, default_gas_val(), gas_price + gas_price_increment);
@ -1154,14 +1177,14 @@ mod test {
let (tx1, tx2) = new_tx_pair(123.into(), 1.into(), 1.into(), 0.into()); let (tx1, tx2) = new_tx_pair(123.into(), 1.into(), 1.into(), 0.into());
let sender = tx1.sender().unwrap(); let sender = tx1.sender().unwrap();
let nonce = tx1.nonce; let nonce = tx1.nonce;
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(nonce + 1.into())); assert_eq!(txq.last_nonce(&sender), Some(nonce + 1.into()));
// when // when
let tx = new_tx(123.into(), 1.into()); let tx = new_tx(123.into(), 1.into());
let res = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
// No longer the case as we don't even consider a transaction that isn't above a full // No longer the case as we don't even consider a transaction that isn't above a full
@ -1317,12 +1340,12 @@ mod test {
!U256::zero() }; !U256::zero() };
// First insert one transaction to future // First insert one transaction to future
let res = txq.add(tx, &prev_nonce, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &prev_nonce, &gas_estimator);
assert_eq!(res.unwrap(), TransactionImportResult::Future); assert_eq!(res.unwrap(), TransactionImportResult::Future);
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// now import second transaction to current // now import second transaction to current
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// and then there should be only one transaction in current (the one with higher gas_price) // and then there should be only one transaction in current (the one with higher gas_price)
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1342,12 +1365,12 @@ mod test {
!U256::zero() }; !U256::zero() };
// First insert one transaction to future // First insert one transaction to future
let res = txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External); let res = txq.add(tx.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator);
assert_eq!(res.unwrap(), TransactionImportResult::Future); assert_eq!(res.unwrap(), TransactionImportResult::Future);
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// now import second transaction to current // now import second transaction to current
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1366,7 +1389,7 @@ mod test {
let tx = new_tx_default(); let tx = new_tx_default();
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1385,10 +1408,10 @@ mod test {
txq.set_minimal_gas_price(15.into()); txq.set_minimal_gas_price(15.into());
// when // when
let res1 = txq.add(tx1, &default_account_details, TransactionOrigin::External); let res1 = txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, &default_account_details, TransactionOrigin::External); let res2 = txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res3 = txq.add(tx3, &default_account_details, TransactionOrigin::External); let res3 = txq.add(tx3, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res4 = txq.add(tx4, &default_account_details, TransactionOrigin::External); let res4 = txq.add(tx4, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res1.unwrap(), TransactionImportResult::Current); assert_eq!(res1.unwrap(), TransactionImportResult::Current);
@ -1419,10 +1442,10 @@ mod test {
txq.set_minimal_gas_price(15.into()); txq.set_minimal_gas_price(15.into());
// when // when
let res1 = txq.add(tx1, &default_account_details, TransactionOrigin::External); let res1 = txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, &default_account_details, TransactionOrigin::External); let res2 = txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res3 = txq.add(tx3, &default_account_details, TransactionOrigin::External); let res3 = txq.add(tx3, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res4 = txq.add(tx4, &default_account_details, TransactionOrigin::External); let res4 = txq.add(tx4, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res1.unwrap(), TransactionImportResult::Current); assert_eq!(res1.unwrap(), TransactionImportResult::Current);
@ -1465,7 +1488,7 @@ mod test {
txq.set_gas_limit(limit); txq.set_gas_limit(limit);
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::GasLimitExceeded { assert_eq!(unwrap_tx_err(res), TransactionError::GasLimitExceeded {
@ -1489,7 +1512,7 @@ mod test {
}; };
// when // when
let res = txq.add(tx, &account, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &account, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientBalance { assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientBalance {
@ -1509,7 +1532,7 @@ mod test {
txq.set_minimal_gas_price(tx.gas_price + U256::one()); txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice { assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice {
@ -1529,7 +1552,7 @@ mod test {
txq.set_minimal_gas_price(tx.gas_price + U256::one()); txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::Local); let res = txq.add(tx, TransactionOrigin::Local, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1559,7 +1582,7 @@ mod test {
rlp::decode(s.as_raw()) rlp::decode(s.as_raw())
}; };
// when // when
let res = txq.add(stx, &default_account_details, TransactionOrigin::External); let res = txq.add(stx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert!(res.is_err()); assert!(res.is_err());
@ -1573,8 +1596,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1593,9 +1616,9 @@ mod test {
// when // when
// first insert the one with higher gas price // first insert the one with higher gas price
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then the one with lower gas price, but local // then the one with lower gas price, but local
txq.add(tx.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1614,9 +1637,9 @@ mod test {
// when // when
// first insert local one with higher gas price // first insert local one with higher gas price
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then the one with lower gas price, but from retracted block // then the one with lower gas price, but from retracted block
txq.add(tx.clone(), &default_account_details, TransactionOrigin::RetractedBlock).unwrap(); txq.add(tx.clone(), TransactionOrigin::RetractedBlock, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1632,8 +1655,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1652,10 +1675,10 @@ mod test {
let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into()); let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into());
// insert everything // insert everything
txq.add(txa.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(txa.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(txb.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(txb.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx1.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 4); assert_eq!(txq.status().future, 4);
@ -1681,10 +1704,10 @@ mod test {
let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into()); let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into());
// insert everything // insert everything
txq.add(txa.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(txa.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(txb.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(txb.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let top = txq.top_transactions(); let top = txq.top_transactions();
assert_eq!(top[0], tx1); assert_eq!(top[0], tx1);
@ -1713,8 +1736,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.pending_hashes(); let top = txq.pending_hashes();
@ -1731,8 +1754,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(2.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(2.into(), 0.into());
// when // when
let res1 = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); let res1 = txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let res2 = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); let res2 = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
assert_eq!(res1, TransactionImportResult::Current); assert_eq!(res1, TransactionImportResult::Current);
@ -1755,8 +1778,8 @@ mod test {
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 2); assert_eq!(txq.status().future, 2);
// when // when
@ -1778,13 +1801,13 @@ mod test {
let tx1 = new_unsigned_tx(124.into(), default_gas_val(), 1.into()).sign(secret); let tx1 = new_unsigned_tx(124.into(), default_gas_val(), 1.into()).sign(secret);
let tx2 = new_unsigned_tx(125.into(), default_gas_val(), 1.into()).sign(secret); let tx2 = new_unsigned_tx(125.into(), default_gas_val(), 1.into()).sign(secret);
txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 1); assert_eq!(txq.status().pending, 1);
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// when // when
txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1800,8 +1823,8 @@ mod test {
// given // given
let mut txq2 = TransactionQueue::default(); let mut txq2 = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(3.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(3.into(), 0.into());
txq2.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq2.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq2.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq2.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq2.status().pending, 1); assert_eq!(txq2.status().pending, 1);
assert_eq!(txq2.status().future, 1); assert_eq!(txq2.status().future, 1);
@ -1822,10 +1845,10 @@ mod test {
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx_default(); let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 3); assert_eq!(txq.status().pending, 3);
// when // when
@ -1844,8 +1867,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// add // add
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let stats = txq.status(); let stats = txq.status();
assert_eq!(stats.pending, 2); assert_eq!(stats.pending, 2);
@ -1864,11 +1887,11 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let sender = tx.sender().unwrap(); let sender = tx.sender().unwrap();
let nonce = tx.nonce; let nonce = tx.nonce;
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 1); assert_eq!(txq.status().pending, 1);
// when // when
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
let t = txq.top_transactions(); let t = txq.top_transactions();
@ -1885,14 +1908,14 @@ mod test {
txq.current.set_limit(10); txq.current.set_limit(10);
let (tx1, tx2) = new_tx_pair_default(4.into(), 1.into()); let (tx1, tx2) = new_tx_pair_default(4.into(), 1.into());
let (tx3, tx4) = new_tx_pair_default(4.into(), 2.into()); let (tx3, tx4) = new_tx_pair_default(4.into(), 2.into());
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
// when // when
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx4.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
@ -1903,11 +1926,11 @@ mod test {
let mut txq = TransactionQueue::with_limits(PrioritizationStrategy::GasPriceOnly, 100, default_gas_val() * U256::from(2), !U256::zero()); let mut txq = TransactionQueue::with_limits(PrioritizationStrategy::GasPriceOnly, 100, default_gas_val() * U256::from(2), !U256::zero());
let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1)); let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1));
let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2)); let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2));
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// limited by gas // limited by gas
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::External).unwrap_err(); txq.add(tx4.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap_err();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
} }
@ -1917,13 +1940,13 @@ mod test {
let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1)); let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1));
let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2)); let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2));
let (tx5, tx6) = new_tx_pair_default(U256::from(1), U256::from(2)); let (tx5, tx6) = new_tx_pair_default(U256::from(1), U256::from(2));
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx1.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx5.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx5.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// Not accepted because of limit // Not accepted because of limit
txq.add(tx6.clone(), &default_account_details, TransactionOrigin::External).unwrap_err(); txq.add(tx6.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap_err();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx3.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx4.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 4); assert_eq!(txq.status().pending, 4);
} }
@ -1935,7 +1958,7 @@ mod test {
let fetch_last_nonce = |_a: &Address| AccountDetails { nonce: last_nonce, balance: !U256::zero() }; let fetch_last_nonce = |_a: &Address| AccountDetails { nonce: last_nonce, balance: !U256::zero() };
// when // when
let res = txq.add(tx, &fetch_last_nonce, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &fetch_last_nonce, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::Old); assert_eq!(unwrap_tx_err(res), TransactionError::Old);
@ -1951,12 +1974,12 @@ mod test {
balance: !U256::zero() }; balance: !U256::zero() };
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (_tx1, tx2) = new_tx_pair_default(1.into(), 0.into()); let (_tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
assert_eq!(txq.status().pending, 0); assert_eq!(txq.status().pending, 0);
// when // when
let res = txq.add(tx2.clone(), &nonce, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &nonce, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::AlreadyImported); assert_eq!(unwrap_tx_err(res), TransactionError::AlreadyImported);
@ -1970,15 +1993,15 @@ mod test {
// given // given
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
// when // when
txq.remove_invalid(&tx1.hash(), &default_account_details); txq.remove_invalid(&tx1.hash(), &default_account_details);
assert_eq!(txq.status().pending, 0); assert_eq!(txq.status().pending, 0);
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1992,10 +2015,10 @@ mod test {
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx_default(); let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 3); assert_eq!(txq.status().pending, 3);
// when // when
@ -2022,8 +2045,8 @@ mod test {
}; };
// when // when
txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -2050,10 +2073,10 @@ mod test {
}; };
// when // when
txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx0, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx0, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -2071,8 +2094,8 @@ mod test {
!U256::zero() }; !U256::zero() };
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &previous_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &previous_nonce, &gas_estimator).unwrap();
txq.add(tx2, &previous_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &previous_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 2); assert_eq!(txq.status().future, 2);
// when // when
@ -2103,7 +2126,7 @@ mod test {
let details = |_a: &Address| AccountDetails { nonce: nonce, balance: !U256::zero() }; let details = |_a: &Address| AccountDetails { nonce: nonce, balance: !U256::zero() };
// when // when
txq.add(tx, &details, TransactionOrigin::External).unwrap(); txq.add(tx, TransactionOrigin::External, &details, &gas_estimator).unwrap();
// then // then
assert_eq!(txq.last_nonce(&from), Some(nonce)); assert_eq!(txq.last_nonce(&from), Some(nonce));
@ -2118,7 +2141,7 @@ mod test {
let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() }; let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() };
// Insert first transaction // Insert first transaction
txq.add(tx1, &details1, TransactionOrigin::External).unwrap(); txq.add(tx1, TransactionOrigin::External, &details1, &gas_estimator).unwrap();
// when // when
txq.remove_all(tx2.sender().unwrap(), nonce2 + U256::one()); txq.remove_all(tx2.sender().unwrap(), nonce2 + U256::one());
@ -2138,9 +2161,9 @@ mod test {
// when // when
// Insert first transaction // Insert first transaction
assert_eq!(txq.add(tx1, &details1, TransactionOrigin::External).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx1, TransactionOrigin::External, &details1, &gas_estimator).unwrap(), TransactionImportResult::Current);
// Second should go to future // Second should go to future
assert_eq!(txq.add(tx2, &details1, TransactionOrigin::External).unwrap(), TransactionImportResult::Future); assert_eq!(txq.add(tx2, TransactionOrigin::External, &details1, &gas_estimator).unwrap(), TransactionImportResult::Future);
// Now block is imported // Now block is imported
txq.remove_all(sender, nonce2 - U256::from(1)); txq.remove_all(sender, nonce2 - U256::from(1));
// tx2 should be not be promoted to current // tx2 should be not be promoted to current
@ -2159,9 +2182,9 @@ mod test {
assert_eq!(txq.has_local_pending_transactions(), false); assert_eq!(txq.has_local_pending_transactions(), false);
// when // when
assert_eq!(txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap(), TransactionImportResult::Current);
assert_eq!(txq.has_local_pending_transactions(), false); assert_eq!(txq.has_local_pending_transactions(), false);
assert_eq!(txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx2, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap(), TransactionImportResult::Current);
// then // then
assert_eq!(txq.has_local_pending_transactions(), true); assert_eq!(txq.has_local_pending_transactions(), true);
@ -2176,8 +2199,8 @@ mod test {
default_account_details(a).balance }; default_account_details(a).balance };
// when // when
assert_eq!(txq.add(tx2, &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future); assert_eq!(txq.add(tx2, TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap(), TransactionImportResult::Future);
assert_eq!(txq.add(tx1.clone(), &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future); assert_eq!(txq.add(tx1.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap(), TransactionImportResult::Future);
// then // then
assert_eq!(txq.future.by_priority.len(), 1); assert_eq!(txq.future.by_priority.len(), 1);
@ -2202,14 +2225,14 @@ mod test {
(tx.sign(secret), tx2.sign(secret), tx2_2.sign(secret), tx3.sign(secret)) (tx.sign(secret), tx2.sign(secret), tx2_2.sign(secret), tx3.sign(secret))
}; };
let sender = tx1.sender().unwrap(); let sender = tx1.sender().unwrap();
txq.add(tx1, &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx1, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3, &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx3, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.future.by_priority.len(), 0); assert_eq!(txq.future.by_priority.len(), 0);
assert_eq!(txq.current.by_priority.len(), 3); assert_eq!(txq.current.by_priority.len(), 3);
// when // when
let res = txq.add(tx2_2, &default_account_details, TransactionOrigin::Local); let res = txq.add(tx2_2, TransactionOrigin::Local, &default_account_details, &gas_estimator);
// then // then
assert_eq!(txq.last_nonce(&sender).unwrap(), 125.into()); assert_eq!(txq.last_nonce(&sender).unwrap(), 125.into());
@ -2217,4 +2240,24 @@ mod test {
assert_eq!(txq.current.by_priority.len(), 3); assert_eq!(txq.current.by_priority.len(), 3);
} }
#[test]
fn should_reject_transactions_below_bas_gas() {
// given
let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
let high_gas = |_: &SignedTransaction| 100_001.into();
// when
let res1 = txq.add(tx1, TransactionOrigin::Local, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, TransactionOrigin::Local, &default_account_details, &high_gas);
// then
assert_eq!(res1.unwrap(), TransactionImportResult::Current);
assert_eq!(unwrap_tx_err(res2), TransactionError::InsufficientGas {
minimal: 100_001.into(),
got: 100_000.into(),
});
}
} }

View File

@ -89,7 +89,7 @@ impl ClientService {
db_config.set_cache(::db::COL_STATE, size); db_config.set_cache(::db::COL_STATE, size);
} }
db_config.compaction = config.db_compaction.compaction_profile(&client_path); db_config.compaction = config.db_compaction.compaction_profile(client_path);
db_config.wal = config.db_wal; db_config.wal = config.db_wal;
let pruning = config.pruning; let pruning = config.pruning;

View File

@ -33,6 +33,12 @@ pub enum Error {
BlockNotFound(H256), BlockNotFound(H256),
/// Incomplete chain. /// Incomplete chain.
IncompleteChain, IncompleteChain,
/// Best block has wrong state root.
WrongStateRoot(H256, H256),
/// Wrong block hash.
WrongBlockHash(u64, H256, H256),
/// Too many blocks contained within the snapshot.
TooManyBlocks(u64, u64),
/// Old starting block in a pruned database. /// Old starting block in a pruned database.
OldBlockPrunedDB, OldBlockPrunedDB,
/// Missing code. /// Missing code.
@ -52,7 +58,11 @@ impl fmt::Display for Error {
match *self { match *self {
Error::InvalidStartingBlock(ref id) => write!(f, "Invalid starting block: {:?}", id), Error::InvalidStartingBlock(ref id) => write!(f, "Invalid starting block: {:?}", id),
Error::BlockNotFound(ref hash) => write!(f, "Block not found in chain: {}", hash), Error::BlockNotFound(ref hash) => write!(f, "Block not found in chain: {}", hash),
Error::IncompleteChain => write!(f, "Cannot create snapshot due to incomplete chain."), Error::IncompleteChain => write!(f, "Incomplete blockchain."),
Error::WrongStateRoot(ref expected, ref found) => write!(f, "Final block has wrong state root. Expected {:?}, got {:?}", expected, found),
Error::WrongBlockHash(ref num, ref expected, ref found) =>
write!(f, "Block {} had wrong hash. expected {:?}, got {:?}", num, expected, found),
Error::TooManyBlocks(ref expected, ref found) => write!(f, "Snapshot contained too many blocks. Expected {}, got {}", expected, found),
Error::OldBlockPrunedDB => write!(f, "Attempted to create a snapshot at an old block while using \ Error::OldBlockPrunedDB => write!(f, "Attempted to create a snapshot at an old block while using \
a pruned database. Please re-run with the --pruning archive flag."), a pruned database. Please re-run with the --pruning archive flag."),
Error::MissingCode(ref missing) => write!(f, "Incomplete snapshot: {} contract codes not found.", missing.len()), Error::MissingCode(ref missing) => write!(f, "Incomplete snapshot: {} contract codes not found.", missing.len()),

View File

@ -26,6 +26,7 @@ use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use account_db::{AccountDB, AccountDBMut}; use account_db::{AccountDB, AccountDBMut};
use blockchain::{BlockChain, BlockProvider}; use blockchain::{BlockChain, BlockProvider};
use engines::Engine; use engines::Engine;
use header::Header;
use ids::BlockID; use ids::BlockID;
use views::BlockView; use views::BlockView;
@ -202,7 +203,7 @@ impl<'a> BlockChunker<'a> {
// cut off the chunk if too large. // cut off the chunk if too large.
if new_loaded_size > PREFERRED_CHUNK_SIZE && self.rlps.len() > 0 { if new_loaded_size > PREFERRED_CHUNK_SIZE && !self.rlps.is_empty() {
try!(self.write_chunk(last)); try!(self.write_chunk(last));
loaded_size = pair.len(); loaded_size = pair.len();
} else { } else {
@ -528,6 +529,20 @@ fn rebuild_accounts(
/// Proportion of blocks which we will verify `PoW` for. /// Proportion of blocks which we will verify `PoW` for.
const POW_VERIFY_RATE: f32 = 0.02; const POW_VERIFY_RATE: f32 = 0.02;
/// Verify an old block with the given header, engine, blockchain, body. If `always` is set, it will perform
/// the fullest verification possible. If not, it will take a random sample to determine whether it will
/// do heavy or light verification.
pub fn verify_old_block(rng: &mut OsRng, header: &Header, engine: &Engine, chain: &BlockChain, body: Option<&[u8]>, always: bool) -> Result<(), ::error::Error> {
if always || rng.gen::<f32>() <= POW_VERIFY_RATE {
match chain.block_header(header.parent_hash()) {
Some(parent) => engine.verify_block_family(&header, &parent, body),
None => engine.verify_block_seal(&header),
}
} else {
engine.verify_block_basic(&header, body)
}
}
/// Rebuilds the blockchain from chunks. /// Rebuilds the blockchain from chunks.
/// ///
/// Does basic verification for all blocks, but `PoW` verification for some. /// Does basic verification for all blocks, but `PoW` verification for some.
@ -543,17 +558,23 @@ pub struct BlockRebuilder {
rng: OsRng, rng: OsRng,
disconnected: Vec<(u64, H256)>, disconnected: Vec<(u64, H256)>,
best_number: u64, best_number: u64,
best_hash: H256,
best_root: H256,
fed_blocks: u64,
} }
impl BlockRebuilder { impl BlockRebuilder {
/// Create a new BlockRebuilder. /// Create a new BlockRebuilder.
pub fn new(chain: BlockChain, db: Arc<Database>, best_number: u64) -> Result<Self, ::error::Error> { pub fn new(chain: BlockChain, db: Arc<Database>, manifest: &ManifestData) -> Result<Self, ::error::Error> {
Ok(BlockRebuilder { Ok(BlockRebuilder {
chain: chain, chain: chain,
db: db, db: db,
rng: try!(OsRng::new()), rng: try!(OsRng::new()),
disconnected: Vec::new(), disconnected: Vec::new(),
best_number: best_number, best_number: manifest.block_number,
best_hash: manifest.block_hash,
best_root: manifest.state_root,
fed_blocks: 0,
}) })
} }
@ -566,9 +587,14 @@ impl BlockRebuilder {
let rlp = UntrustedRlp::new(chunk); let rlp = UntrustedRlp::new(chunk);
let item_count = rlp.item_count(); let item_count = rlp.item_count();
let num_blocks = (item_count - 3) as u64;
trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3); trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3);
if self.fed_blocks + num_blocks > SNAPSHOT_BLOCKS {
return Err(Error::TooManyBlocks(SNAPSHOT_BLOCKS, self.fed_blocks).into())
}
// todo: assert here that these values are consistent with chunks being in order. // todo: assert here that these values are consistent with chunks being in order.
let mut cur_number = try!(rlp.val_at::<u64>(0)) + 1; let mut cur_number = try!(rlp.val_at::<u64>(0)) + 1;
let mut parent_hash = try!(rlp.val_at::<H256>(1)); let mut parent_hash = try!(rlp.val_at::<H256>(1));
@ -585,14 +611,27 @@ impl BlockRebuilder {
let block = try!(abridged_block.to_block(parent_hash, cur_number, receipts_root)); let block = try!(abridged_block.to_block(parent_hash, cur_number, receipts_root));
let block_bytes = block.rlp_bytes(With); let block_bytes = block.rlp_bytes(With);
let is_best = cur_number == self.best_number;
if self.rng.gen::<f32>() <= POW_VERIFY_RATE { if is_best {
try!(engine.verify_block_seal(&block.header)) if block.header.hash() != self.best_hash {
} else { return Err(Error::WrongBlockHash(cur_number, self.best_hash, block.header.hash()).into())
try!(engine.verify_block_basic(&block.header, Some(&block_bytes)));
} }
let is_best = cur_number == self.best_number; if block.header.state_root() != &self.best_root {
return Err(Error::WrongStateRoot(self.best_root, *block.header.state_root()).into())
}
}
try!(verify_old_block(
&mut self.rng,
&block.header,
engine,
&self.chain,
Some(&block_bytes),
is_best
));
let mut batch = self.db.transaction(); let mut batch = self.db.transaction();
// special-case the first block in each chunk. // special-case the first block in each chunk.
@ -610,11 +649,15 @@ impl BlockRebuilder {
cur_number += 1; cur_number += 1;
} }
Ok(item_count as u64 - 3) self.fed_blocks += num_blocks;
Ok(num_blocks)
} }
/// Glue together any disconnected chunks. To be called at the end. /// Glue together any disconnected chunks and check that the chain is complete.
pub fn glue_chunks(self) { pub fn finalize(self, canonical: HashMap<u64, H256>) -> Result<(), Error> {
let mut batch = self.db.transaction();
for (first_num, first_hash) in self.disconnected { for (first_num, first_hash) in self.disconnected {
let parent_num = first_num - 1; let parent_num = first_num - 1;
@ -623,8 +666,23 @@ impl BlockRebuilder {
// the first block of the first chunks has nothing to connect to. // the first block of the first chunks has nothing to connect to.
if let Some(parent_hash) = self.chain.block_hash(parent_num) { if let Some(parent_hash) = self.chain.block_hash(parent_num) {
// if so, add the child to it. // if so, add the child to it.
self.chain.add_child(parent_hash, first_hash); self.chain.add_child(&mut batch, parent_hash, first_hash);
}
}
self.db.write_buffered(batch);
let best_number = self.best_number;
for num in (0..self.fed_blocks).map(|x| best_number - x) {
let hash = try!(self.chain.block_hash(num).ok_or(Error::IncompleteChain));
if let Some(canon_hash) = canonical.get(&num).cloned() {
if canon_hash != hash {
return Err(Error::WrongBlockHash(num, canon_hash, hash));
} }
} }
} }
Ok(())
}
} }

View File

@ -16,7 +16,7 @@
//! Snapshot network service implementation. //! Snapshot network service implementation.
use std::collections::HashSet; use std::collections::{HashMap, HashSet};
use std::io::ErrorKind; use std::io::ErrorKind;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
@ -74,6 +74,7 @@ struct Restoration {
snappy_buffer: Bytes, snappy_buffer: Bytes,
final_state_root: H256, final_state_root: H256,
guard: Guard, guard: Guard,
canonical_hashes: HashMap<u64, H256>,
db: Arc<Database>, db: Arc<Database>,
} }
@ -99,7 +100,7 @@ impl Restoration {
.map_err(UtilError::SimpleString))); .map_err(UtilError::SimpleString)));
let chain = BlockChain::new(Default::default(), params.genesis, raw_db.clone()); let chain = BlockChain::new(Default::default(), params.genesis, raw_db.clone());
let blocks = try!(BlockRebuilder::new(chain, raw_db.clone(), manifest.block_number)); let blocks = try!(BlockRebuilder::new(chain, raw_db.clone(), &manifest));
let root = manifest.state_root.clone(); let root = manifest.state_root.clone();
Ok(Restoration { Ok(Restoration {
@ -112,6 +113,7 @@ impl Restoration {
snappy_buffer: Vec::new(), snappy_buffer: Vec::new(),
final_state_root: root, final_state_root: root,
guard: params.guard, guard: params.guard,
canonical_hashes: HashMap::new(),
db: raw_db, db: raw_db,
}) })
} }
@ -145,6 +147,11 @@ impl Restoration {
Ok(()) Ok(())
} }
// note canonical hashes.
fn note_canonical(&mut self, hashes: &[(u64, H256)]) {
self.canonical_hashes.extend(hashes.iter().cloned());
}
// finish up restoration. // finish up restoration.
fn finalize(self) -> Result<(), Error> { fn finalize(self) -> Result<(), Error> {
use util::trie::TrieError; use util::trie::TrieError;
@ -161,8 +168,8 @@ impl Restoration {
// check for missing code. // check for missing code.
try!(self.state.check_missing()); try!(self.state.check_missing());
// connect out-of-order chunks. // connect out-of-order chunks and verify chain integrity.
self.blocks.glue_chunks(); try!(self.blocks.finalize(self.canonical_hashes));
if let Some(writer) = self.writer { if let Some(writer) = self.writer {
try!(writer.finish(self.manifest)); try!(writer.finish(self.manifest));
@ -352,7 +359,8 @@ impl Service {
// "Cancelled" is mincing words a bit -- what really happened // "Cancelled" is mincing words a bit -- what really happened
// is that the state we were snapshotting got pruned out // is that the state we were snapshotting got pruned out
// before we could finish. // before we could finish.
info!("Cancelled prematurely-started periodic snapshot."); info!("Periodic snapshot failed: block state pruned.\
Run with a longer `--pruning-history` or with `--no-periodic-snapshot`");
return Ok(()) return Ok(())
} else { } else {
return Err(e); return Err(e);
@ -580,6 +588,14 @@ impl SnapshotService for Service {
trace!("Error sending snapshot service message: {:?}", e); trace!("Error sending snapshot service message: {:?}", e);
} }
} }
fn provide_canon_hashes(&self, canonical: &[(u64, H256)]) {
let mut rest = self.restoration.lock();
if let Some(ref mut rest) = rest.as_mut() {
rest.note_canonical(canonical);
}
}
} }
impl Drop for Service { impl Drop for Service {

View File

@ -48,6 +48,10 @@ pub trait SnapshotService : Sync + Send {
/// Feed a raw block chunk to the service to be processed asynchronously. /// Feed a raw block chunk to the service to be processed asynchronously.
/// no-op if currently restoring. /// no-op if currently restoring.
fn restore_block_chunk(&self, hash: H256, chunk: Bytes); fn restore_block_chunk(&self, hash: H256, chunk: Bytes);
/// Give the restoration in-progress some canonical block hashes for
/// extra verification (performed at the end)
fn provide_canon_hashes(&self, canonical: &[(u64, H256)]);
} }
impl IpcConfig for SnapshotService { } impl IpcConfig for SnapshotService { }

View File

@ -26,6 +26,7 @@ use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter};
use util::{Mutex, snappy}; use util::{Mutex, snappy};
use util::kvdb::{Database, DatabaseConfig}; use util::kvdb::{Database, DatabaseConfig};
use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
fn chunk_and_restore(amount: u64) { fn chunk_and_restore(amount: u64) {
@ -58,18 +59,20 @@ fn chunk_and_restore(amount: u64) {
// snapshot it. // snapshot it.
let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap()); let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap());
let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap(); let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap();
writer.into_inner().finish(::snapshot::ManifestData { let manifest = ::snapshot::ManifestData {
state_hashes: Vec::new(), state_hashes: Vec::new(),
block_hashes: block_hashes, block_hashes: block_hashes,
state_root: Default::default(), state_root: ::util::sha3::SHA3_NULL_RLP,
block_number: amount, block_number: amount,
block_hash: best_hash, block_hash: best_hash,
}).unwrap(); };
writer.into_inner().finish(manifest.clone()).unwrap();
// restore it. // restore it.
let new_db = Arc::new(Database::open(&db_cfg, new_path.as_str()).unwrap()); let new_db = Arc::new(Database::open(&db_cfg, new_path.as_str()).unwrap());
let new_chain = BlockChain::new(Default::default(), &genesis, new_db.clone()); let new_chain = BlockChain::new(Default::default(), &genesis, new_db.clone());
let mut rebuilder = BlockRebuilder::new(new_chain, new_db.clone(), amount).unwrap(); let mut rebuilder = BlockRebuilder::new(new_chain, new_db.clone(), &manifest).unwrap();
let reader = PackedReader::new(&snapshot_path).unwrap().unwrap(); let reader = PackedReader::new(&snapshot_path).unwrap().unwrap();
let engine = ::engines::NullEngine::new(Default::default(), Default::default()); let engine = ::engines::NullEngine::new(Default::default(), Default::default());
for chunk_hash in &reader.manifest().block_hashes { for chunk_hash in &reader.manifest().block_hashes {
@ -78,7 +81,7 @@ fn chunk_and_restore(amount: u64) {
rebuilder.feed(&chunk, &engine).unwrap(); rebuilder.feed(&chunk, &engine).unwrap();
} }
rebuilder.glue_chunks(); rebuilder.finalize(HashMap::new()).unwrap();
// and test it. // and test it.
let new_chain = BlockChain::new(Default::default(), &genesis, new_db); let new_chain = BlockChain::new(Default::default(), &genesis, new_db);

View File

@ -247,23 +247,34 @@ impl Account {
} }
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code. /// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> bool { pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
// TODO: fill out self.code_cache; // TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty()); trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.is_cached() ||
if self.is_cached() { return Some(self.code_cache.clone()) }
match db.get(&self.code_hash) { match db.get(&self.code_hash) {
Some(x) => { Some(x) => {
self.code_size = Some(x.len()); self.code_size = Some(x.len());
self.code_cache = Arc::new(x.to_vec()); self.code_cache = Arc::new(x.to_vec());
true Some(self.code_cache.clone())
}, },
_ => { _ => {
warn!("Failed reverse get of {}", self.code_hash); warn!("Failed reverse get of {}", self.code_hash);
false None
}, },
} }
} }
/// Provide code to cache. For correctness, should be the correct code for the
/// account.
pub fn cache_given_code(&mut self, code: Arc<Bytes>) {
trace!("Account::cache_given_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size = Some(code.len());
self.code_cache = code;
}
/// Provide a database to get `code_size`. Should not be called if it is a contract without code. /// Provide a database to get `code_size`. Should not be called if it is a contract without code.
pub fn cache_code_size(&mut self, db: &HashDB) -> bool { pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache; // TODO: fill out self.code_cache;
@ -413,7 +424,7 @@ impl Account {
self.code_size = other.code_size; self.code_size = other.code_size;
self.address_hash = other.address_hash; self.address_hash = other.address_hash;
let mut cache = self.storage_cache.borrow_mut(); let mut cache = self.storage_cache.borrow_mut();
for (k, v) in other.storage_cache.into_inner().into_iter() { for (k, v) in other.storage_cache.into_inner() {
cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here
} }
self.storage_changes = other.storage_changes; self.storage_changes = other.storage_changes;
@ -476,7 +487,7 @@ mod tests {
}; };
let mut a = Account::from_rlp(&rlp); let mut a = Account::from_rlp(&rlp);
assert!(a.cache_code(&db.immutable())); assert!(a.cache_code(&db.immutable()).is_some());
let mut a = Account::from_rlp(&rlp); let mut a = Account::from_rlp(&rlp);
assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(())); assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(()));

View File

@ -127,11 +127,10 @@ impl AccountEntry {
fn overwrite_with(&mut self, other: AccountEntry) { fn overwrite_with(&mut self, other: AccountEntry) {
self.state = other.state; self.state = other.state;
match other.account { match other.account {
Some(acc) => match self.account { Some(acc) => {
Some(ref mut ours) => { if let Some(ref mut ours) = self.account {
ours.overwrite_with(acc); ours.overwrite_with(acc);
}, }
None => {},
}, },
None => self.account = None, None => self.account = None,
} }
@ -281,13 +280,10 @@ impl State {
} }
}, },
None => { None => {
match self.cache.get_mut().entry(k) { if let Entry::Occupied(e) = self.cache.get_mut().entry(k) {
Entry::Occupied(e) => {
if e.get().is_dirty() { if e.get().is_dirty() {
e.remove(); e.remove();
} }
},
_ => {}
} }
} }
} }
@ -501,6 +497,7 @@ impl State {
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit. /// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
/// `accounts` is mutable because we may need to commit the code or storage and record that. /// `accounts` is mutable because we may need to commit the code or storage and record that.
#[cfg_attr(feature="dev", allow(match_ref_pats))] #[cfg_attr(feature="dev", allow(match_ref_pats))]
#[cfg_attr(feature="dev", allow(needless_borrow))]
fn commit_into( fn commit_into(
factories: &Factories, factories: &Factories,
db: &mut StateDB, db: &mut StateDB,
@ -509,18 +506,15 @@ impl State {
) -> Result<(), Error> { ) -> Result<(), Error> {
// first, commit the sub trees. // first, commit the sub trees.
for (address, ref mut a) in accounts.iter_mut().filter(|&(_, ref a)| a.is_dirty()) { for (address, ref mut a) in accounts.iter_mut().filter(|&(_, ref a)| a.is_dirty()) {
match a.account { if let Some(ref mut account) = a.account {
Some(ref mut account) => {
if !account.is_empty() { if !account.is_empty() {
db.note_account_bloom(&address); db.note_account_bloom(address);
} }
let addr_hash = account.address_hash(address); let addr_hash = account.address_hash(address);
let mut account_db = factories.accountdb.create(db.as_hashdb_mut(), addr_hash); let mut account_db = factories.accountdb.create(db.as_hashdb_mut(), addr_hash);
account.commit_storage(&factories.trie, account_db.as_hashdb_mut()); account.commit_storage(&factories.trie, account_db.as_hashdb_mut());
account.commit_code(account_db.as_hashdb_mut()); account.commit_code(account_db.as_hashdb_mut());
} }
_ => {}
}
} }
{ {
@ -586,7 +580,7 @@ impl State {
fn query_pod(&mut self, query: &PodState) { fn query_pod(&mut self, query: &PodState) {
for (address, pod_account) in query.get().into_iter() for (address, pod_account) in query.get().into_iter()
.filter(|&(ref a, _)| self.ensure_cached(a, RequireCache::Code, true, |a| a.is_some())) .filter(|&(a, _)| self.ensure_cached(a, RequireCache::Code, true, |a| a.is_some()))
{ {
// needs to be split into two parts for the refcell code here // needs to be split into two parts for the refcell code here
// to work. // to work.
@ -605,17 +599,33 @@ impl State {
pod_state::diff_pod(&state_pre.to_pod(), &pod_state_post) pod_state::diff_pod(&state_pre.to_pod(), &pod_state_post)
} }
fn update_account_cache(require: RequireCache, account: &mut Account, db: &HashDB) { // load required account data from the databases.
match require { fn update_account_cache(require: RequireCache, account: &mut Account, state_db: &StateDB, db: &HashDB) {
match (account.is_cached(), require) {
(true, _) | (false, RequireCache::None) => {}
(false, require) => {
// if there's already code in the global cache, always cache it
// locally.
let hash = account.code_hash();
match state_db.get_cached_code(&hash) {
Some(code) => account.cache_given_code(code),
None => match require {
RequireCache::None => {}, RequireCache::None => {},
RequireCache::Code => { RequireCache::Code => {
account.cache_code(db); if let Some(code) = account.cache_code(db) {
// propagate code loaded from the database to
// the global code cache.
state_db.cache_code(hash, code)
}
} }
RequireCache::CodeSize => { RequireCache::CodeSize => {
account.cache_code_size(db); account.cache_code_size(db);
} }
} }
} }
}
}
}
/// Check caches for required data /// Check caches for required data
/// First searches for account in the local, then the shared cache. /// First searches for account in the local, then the shared cache.
@ -626,7 +636,7 @@ impl State {
if let Some(ref mut maybe_acc) = self.cache.borrow_mut().get_mut(a) { if let Some(ref mut maybe_acc) = self.cache.borrow_mut().get_mut(a) {
if let Some(ref mut account) = maybe_acc.account { if let Some(ref mut account) = maybe_acc.account {
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a)); let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a));
Self::update_account_cache(require, account, accountdb.as_hashdb()); Self::update_account_cache(require, account, &self.db, accountdb.as_hashdb());
return f(Some(account)); return f(Some(account));
} }
return f(None); return f(None);
@ -635,7 +645,7 @@ impl State {
let result = self.db.get_cached(a, |mut acc| { let result = self.db.get_cached(a, |mut acc| {
if let Some(ref mut account) = acc { if let Some(ref mut account) = acc {
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a)); let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a));
Self::update_account_cache(require, account, accountdb.as_hashdb()); Self::update_account_cache(require, account, &self.db, accountdb.as_hashdb());
} }
f(acc.map(|a| &*a)) f(acc.map(|a| &*a))
}); });
@ -653,7 +663,7 @@ impl State {
}; };
if let Some(ref mut account) = maybe_acc.as_mut() { if let Some(ref mut account) = maybe_acc.as_mut() {
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a)); let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a));
Self::update_account_cache(require, account, accountdb.as_hashdb()); Self::update_account_cache(require, account, &self.db, accountdb.as_hashdb());
} }
let r = f(maybe_acc.as_ref()); let r = f(maybe_acc.as_ref());
self.insert_cache(a, AccountEntry::new_clean(maybe_acc)); self.insert_cache(a, AccountEntry::new_clean(maybe_acc));
@ -679,14 +689,12 @@ impl State {
None => { None => {
let maybe_acc = if self.db.check_account_bloom(a) { let maybe_acc = if self.db.check_account_bloom(a) {
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR); let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let maybe_acc = match db.get(a) { match db.get(a) {
Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(&acc))), Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(&acc))),
Ok(None) => AccountEntry::new_clean(None), Ok(None) => AccountEntry::new_clean(None),
Err(e) => panic!("Potential DB corruption encountered: {}", e), Err(e) => panic!("Potential DB corruption encountered: {}", e),
};
maybe_acc
} }
else { } else {
AccountEntry::new_clean(None) AccountEntry::new_clean(None)
}; };
self.insert_cache(a, maybe_acc); self.insert_cache(a, maybe_acc);
@ -711,7 +719,7 @@ impl State {
if require_code { if require_code {
let addr_hash = account.address_hash(a); let addr_hash = account.address_hash(a);
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), addr_hash); let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), addr_hash);
account.cache_code(accountdb.as_hashdb()); Self::update_account_cache(RequireCache::Code, account, &self.db, accountdb.as_hashdb());
} }
account account
}, },

View File

@ -16,6 +16,7 @@
use std::collections::{VecDeque, HashSet}; use std::collections::{VecDeque, HashSet};
use lru_cache::LruCache; use lru_cache::LruCache;
use util::cache::MemoryLruCache;
use util::journaldb::JournalDB; use util::journaldb::JournalDB;
use util::hash::{H256}; use util::hash::{H256};
use util::hashdb::HashDB; use util::hashdb::HashDB;
@ -33,12 +34,17 @@ pub const ACCOUNT_BLOOM_HASHCOUNT_KEY: &'static [u8] = b"account_hash_count";
const STATE_CACHE_BLOCKS: usize = 12; const STATE_CACHE_BLOCKS: usize = 12;
// The percentage of supplied cache size to go to accounts.
const ACCOUNT_CACHE_RATIO: usize = 90;
/// Shared canonical state cache. /// Shared canonical state cache.
struct AccountCache { struct AccountCache {
/// DB Account cache. `None` indicates that account is known to be missing. /// DB Account cache. `None` indicates that account is known to be missing.
// When changing the type of the values here, be sure to update `mem_used` and // When changing the type of the values here, be sure to update `mem_used` and
// `new`. // `new`.
accounts: LruCache<Address, Option<Account>>, accounts: LruCache<Address, Option<Account>>,
/// DB Code cache. Maps code hashes to shared bytes.
code: MemoryLruCache<H256, Arc<Vec<u8>>>,
/// Information on the modifications in recently committed blocks; specifically which addresses /// Information on the modifications in recently committed blocks; specifically which addresses
/// changed in which block. Ordered by block number. /// changed in which block. Ordered by block number.
modifications: VecDeque<BlockChanges>, modifications: VecDeque<BlockChanges>,
@ -111,12 +117,15 @@ impl StateDB {
// into the `AccountCache` structure as its own `LruCache<(Address, H256), H256>`. // into the `AccountCache` structure as its own `LruCache<(Address, H256), H256>`.
pub fn new(db: Box<JournalDB>, cache_size: usize) -> StateDB { pub fn new(db: Box<JournalDB>, cache_size: usize) -> StateDB {
let bloom = Self::load_bloom(db.backing()); let bloom = Self::load_bloom(db.backing());
let cache_items = cache_size / ::std::mem::size_of::<Option<Account>>(); let acc_cache_size = cache_size * ACCOUNT_CACHE_RATIO / 100;
let code_cache_size = cache_size - acc_cache_size;
let cache_items = acc_cache_size / ::std::mem::size_of::<Option<Account>>();
StateDB { StateDB {
db: db, db: db,
account_cache: Arc::new(Mutex::new(AccountCache { account_cache: Arc::new(Mutex::new(AccountCache {
accounts: LruCache::new(cache_items), accounts: LruCache::new(cache_items),
code: MemoryLruCache::new(code_cache_size),
modifications: VecDeque::new(), modifications: VecDeque::new(),
})), })),
local_cache: Vec::new(), local_cache: Vec::new(),
@ -170,7 +179,7 @@ impl StateDB {
pub fn commit_bloom(batch: &mut DBTransaction, journal: BloomJournal) -> Result<(), UtilError> { pub fn commit_bloom(batch: &mut DBTransaction, journal: BloomJournal) -> Result<(), UtilError> {
assert!(journal.hash_functions <= 255); assert!(journal.hash_functions <= 255);
batch.put(COL_ACCOUNT_BLOOM, ACCOUNT_BLOOM_HASHCOUNT_KEY, &vec![journal.hash_functions as u8]); batch.put(COL_ACCOUNT_BLOOM, ACCOUNT_BLOOM_HASHCOUNT_KEY, &[journal.hash_functions as u8]);
let mut key = [0u8; 8]; let mut key = [0u8; 8];
let mut val = [0u8; 8]; let mut val = [0u8; 8];
@ -216,7 +225,7 @@ impl StateDB {
let mut clear = false; let mut clear = false;
for block in enacted.iter().filter(|h| self.commit_hash.as_ref().map_or(true, |p| *h != p)) { for block in enacted.iter().filter(|h| self.commit_hash.as_ref().map_or(true, |p| *h != p)) {
clear = clear || { clear = clear || {
if let Some(ref mut m) = cache.modifications.iter_mut().find(|ref m| &m.hash == block) { if let Some(ref mut m) = cache.modifications.iter_mut().find(|m| &m.hash == block) {
trace!("Reverting enacted block {:?}", block); trace!("Reverting enacted block {:?}", block);
m.is_canon = true; m.is_canon = true;
for a in &m.accounts { for a in &m.accounts {
@ -232,7 +241,7 @@ impl StateDB {
for block in retracted { for block in retracted {
clear = clear || { clear = clear || {
if let Some(ref mut m) = cache.modifications.iter_mut().find(|ref m| &m.hash == block) { if let Some(ref mut m) = cache.modifications.iter_mut().find(|m| &m.hash == block) {
trace!("Retracting block {:?}", block); trace!("Retracting block {:?}", block);
m.is_canon = false; m.is_canon = false;
for a in &m.accounts { for a in &m.accounts {
@ -286,7 +295,7 @@ impl StateDB {
is_canon: is_best, is_canon: is_best,
parent: parent.clone(), parent: parent.clone(),
}; };
let insert_at = cache.modifications.iter().enumerate().find(|&(_, ref m)| m.number < *number).map(|(i, _)| i); let insert_at = cache.modifications.iter().enumerate().find(|&(_, m)| m.number < *number).map(|(i, _)| i);
trace!("inserting modifications at {:?}", insert_at); trace!("inserting modifications at {:?}", insert_at);
if let Some(insert_at) = insert_at { if let Some(insert_at) = insert_at {
cache.modifications.insert(insert_at, block_changes); cache.modifications.insert(insert_at, block_changes);
@ -342,7 +351,12 @@ impl StateDB {
/// Heap size used. /// Heap size used.
pub fn mem_used(&self) -> usize { pub fn mem_used(&self) -> usize {
// TODO: account for LRU-cache overhead; this is a close approximation. // TODO: account for LRU-cache overhead; this is a close approximation.
self.db.mem_used() + self.account_cache.lock().accounts.len() * ::std::mem::size_of::<Option<Account>>() self.db.mem_used() + {
let cache = self.account_cache.lock();
cache.code.current_size() +
cache.accounts.len() * ::std::mem::size_of::<Option<Account>>()
}
} }
/// Returns underlying `JournalDB`. /// Returns underlying `JournalDB`.
@ -362,6 +376,15 @@ impl StateDB {
}) })
} }
/// Add a global code cache entry. This doesn't need to worry about canonicality because
/// it simply maps hashes to raw code and will always be correct in the absence of
/// hash collisions.
pub fn cache_code(&self, hash: H256, code: Arc<Vec<u8>>) {
let mut cache = self.account_cache.lock();
cache.code.insert(hash, code);
}
/// Get basic copy of the cached account. Does not include storage. /// Get basic copy of the cached account. Does not include storage.
/// Returns 'None' if cache is disabled or if the account is not cached. /// Returns 'None' if cache is disabled or if the account is not cached.
pub fn get_cached_account(&self, addr: &Address) -> Option<Option<Account>> { pub fn get_cached_account(&self, addr: &Address) -> Option<Option<Account>> {
@ -369,7 +392,14 @@ impl StateDB {
if !Self::is_allowed(addr, &self.parent_hash, &cache.modifications) { if !Self::is_allowed(addr, &self.parent_hash, &cache.modifications) {
return None; return None;
} }
cache.accounts.get_mut(&addr).map(|a| a.as_ref().map(|a| a.clone_basic())) cache.accounts.get_mut(addr).map(|a| a.as_ref().map(|a| a.clone_basic()))
}
/// Get cached code based on hash.
pub fn get_cached_code(&self, hash: &H256) -> Option<Arc<Vec<u8>>> {
let mut cache = self.account_cache.lock();
cache.code.get_mut(hash).map(|code| code.clone())
} }
/// Get value from a cached account. /// Get value from a cached account.
@ -406,8 +436,7 @@ impl StateDB {
// We search for our parent in that list first and then for // We search for our parent in that list first and then for
// all its parent until we hit the canonical block, // all its parent until we hit the canonical block,
// checking against all the intermediate modifications. // checking against all the intermediate modifications.
let mut iter = modifications.iter(); for m in modifications {
while let Some(ref m) = iter.next() {
if &m.hash == parent { if &m.hash == parent {
if m.is_canon { if m.is_canon {
return true; return true;
@ -420,7 +449,7 @@ impl StateDB {
} }
} }
trace!("Cache lookup skipped for {:?}: parent hash is unknown", addr); trace!("Cache lookup skipped for {:?}: parent hash is unknown", addr);
return false; false
} }
} }

View File

@ -389,7 +389,7 @@ pub fn get_good_dummy_block_fork_seq(start_number: usize, count: usize, parent_h
r r
} }
pub fn get_good_dummy_block() -> Bytes { pub fn get_good_dummy_block_hash() -> (H256, Bytes) {
let mut block_header = Header::new(); let mut block_header = Header::new();
let test_spec = get_test_spec(); let test_spec = get_test_spec();
let test_engine = &test_spec.engine; let test_engine = &test_spec.engine;
@ -400,7 +400,12 @@ pub fn get_good_dummy_block() -> Bytes {
block_header.set_parent_hash(test_spec.genesis_header().hash()); block_header.set_parent_hash(test_spec.genesis_header().hash());
block_header.set_state_root(test_spec.genesis_header().state_root().clone()); block_header.set_state_root(test_spec.genesis_header().state_root().clone());
create_test_block(&block_header) (block_header.hash(), create_test_block(&block_header))
}
pub fn get_good_dummy_block() -> Bytes {
let (_, bytes) = get_good_dummy_block_hash();
bytes
} }
pub fn get_bad_state_dummy_block() -> Bytes { pub fn get_bad_state_dummy_block() -> Bytes {

View File

@ -285,7 +285,7 @@ impl<T> TraceDatabase for TraceDB<T> where T: DatabaseExtras {
let mut blooms = self.blooms.write(); let mut blooms = self.blooms.write();
batch.extend_with_cache(db::COL_TRACE, &mut *blooms, blooms_to_insert, CacheUpdatePolicy::Remove); batch.extend_with_cache(db::COL_TRACE, &mut *blooms, blooms_to_insert, CacheUpdatePolicy::Remove);
// note_used must be called after locking blooms to avoid cache/traces deadlock on garbage collection // note_used must be called after locking blooms to avoid cache/traces deadlock on garbage collection
for key in blooms_keys.into_iter() { for key in blooms_keys {
self.note_used(CacheID::Bloom(key)); self.note_used(CacheID::Bloom(key));
} }
} }

View File

@ -50,7 +50,7 @@ fn prefix_subtrace_addresses(mut traces: Vec<FlatTrace>) -> Vec<FlatTrace> {
// [1, 0] // [1, 0]
let mut current_subtrace_index = 0; let mut current_subtrace_index = 0;
let mut first = true; let mut first = true;
for trace in traces.iter_mut() { for trace in &mut traces {
match (first, trace.trace_address.is_empty()) { match (first, trace.trace_address.is_empty()) {
(true, _) => first = false, (true, _) => first = false,
(_, true) => current_subtrace_index += 1, (_, true) => current_subtrace_index += 1,

38
evmbin/Cargo.lock generated
View File

@ -155,7 +155,7 @@ name = "ethash"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"sha3 0.1.0", "sha3 0.1.0",
] ]
@ -208,6 +208,9 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-bloom-journal" name = "ethcore-bloom-journal"
version = "0.1.0" version = "0.1.0"
dependencies = [
"siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "ethcore-devtools" name = "ethcore-devtools"
@ -223,7 +226,7 @@ dependencies = [
"crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -275,8 +278,9 @@ dependencies = [
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
"rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)", "rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
@ -334,6 +338,7 @@ dependencies = [
"itertools 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -632,13 +637,28 @@ name = "odds"
version = "0.2.23" version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "owning_ref"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.2.8" version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parking_lot_core"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -867,6 +887,11 @@ dependencies = [
"gcc 0.3.37 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.37 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "siphasher"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.1.3" version = "0.1.3"
@ -1179,7 +1204,9 @@ dependencies = [
"checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c" "checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c"
"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3" "checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
"checksum odds 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "e04630a62b3f1cc8c58b4d8f2555a40136f02b420e158242936ef286a72d33a0" "checksum odds 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "e04630a62b3f1cc8c58b4d8f2555a40136f02b420e158242936ef286a72d33a0"
"checksum parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "968f685642555d2f7e202c48b8b11de80569e9bfea817f7f12d7c61aac62d4e6" "checksum owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8d91377085359426407a287ab16884a0111ba473aa6844ff01d4ec20ce3d75e7"
"checksum parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e1435e7a2a00dfebededd6c6bdbd54008001e94b4a2aadd6aef0dc4c56317621"
"checksum parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb1b97670a2ffadce7c397fb80a3d687c4f3060140b885621ef1653d0e5d5068"
"checksum primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0e31b86efadeaeb1235452171a66689682783149a6249ff334a2c5d8218d00a4" "checksum primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0e31b86efadeaeb1235452171a66689682783149a6249ff334a2c5d8218d00a4"
"checksum primal-bit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "464a91febc06166783d4f5ba3577b5ed8dda8e421012df80bfe48a971ed7be8f" "checksum primal-bit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "464a91febc06166783d4f5ba3577b5ed8dda8e421012df80bfe48a971ed7be8f"
"checksum primal-check 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "647c81b67bb9551a7b88d0bcd785ac35b7d0bf4b2f358683d7c2375d04daec51" "checksum primal-check 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "647c81b67bb9551a7b88d0bcd785ac35b7d0bf4b2f358683d7c2375d04daec51"
@ -1205,6 +1232,7 @@ dependencies = [
"checksum serde_codegen 0.8.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e575e583f7d162e163af117fb9791fbd2bd203c31023b3219617e12c5997a738" "checksum serde_codegen 0.8.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e575e583f7d162e163af117fb9791fbd2bd203c31023b3219617e12c5997a738"
"checksum serde_codegen_internals 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "318f7e77aa5187391d74aaf4553d2189f56b0ce25e963414c951b97877ffdcec" "checksum serde_codegen_internals 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "318f7e77aa5187391d74aaf4553d2189f56b0ce25e963414c951b97877ffdcec"
"checksum serde_json 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1cb6b19e74d9f65b9d03343730b643d729a446b29376785cd65efdff4675e2fc" "checksum serde_json 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1cb6b19e74d9f65b9d03343730b643d729a446b29376785cd65efdff4675e2fc"
"checksum siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c44e42fa187b5a8782489cf7740cc27c3125806be2bf33563cf5e02e9533fcd"
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e" "checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4" "checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410" "checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"

View File

@ -31,7 +31,7 @@ pub struct FakeExt {
impl Default for FakeExt { impl Default for FakeExt {
fn default() -> Self { fn default() -> Self {
FakeExt { FakeExt {
schedule: Schedule::new_homestead(), schedule: Schedule::new_homestead_gas_fix(),
store: HashMap::new(), store: HashMap::new(),
depth: 1, depth: 1,
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 78 KiB

View File

@ -23,28 +23,37 @@ rm -rf ./.git
git init git init
# add local files and send it up # add local files and send it up
echo "Setting up GitHub config for js-precompiled"
setup_git_user setup_git_user
echo "Checking out $CI_BUILD_REF_NAME branch"
git remote add origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/js-precompiled.git git remote add origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/js-precompiled.git
git fetch origin 2>$GITLOG git fetch origin 2>$GITLOG
git checkout -b $CI_BUILD_REF_NAME git checkout -b $CI_BUILD_REF_NAME
echo "Committing compiled files for $UTCDATE"
git add . git add .
git commit -m "$UTCDATE [compiled]" git commit -m "$UTCDATE"
echo "Merging remote"
git merge origin/$CI_BUILD_REF_NAME -X ours --commit -m "$UTCDATE [release]" git merge origin/$CI_BUILD_REF_NAME -X ours --commit -m "$UTCDATE [release]"
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG
PRECOMPILED_HASH=`git rev-parse HEAD`
# back to root # back to root
popd popd
# inti git with right origin echo "Setting up GitHub config for parity"
setup_git_user setup_git_user
git remote set-url origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git git remote set-url origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git
# at this point we have a detached head on GitLab, reset
git reset --hard origin/$CI_BUILD_REF_NAME 2>$GITLOG git reset --hard origin/$CI_BUILD_REF_NAME 2>$GITLOG
# bump js-precompiled, add, commit & push echo "Updating cargo package parity-ui-precompiled#$PRECOMPILED_HASH"
cargo update -p parity-ui-precompiled cargo update -p parity-ui-precompiled
git add . || true # --precise "$PRECOMPILED_HASH"
echo "Committing updated files"
git add .
git commit -m "[ci skip] js-precompiled $UTCDATE" git commit -m "[ci skip] js-precompiled $UTCDATE"
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { inAddress, inNumber10, inNumber16, inOptions } from '../../format/input'; import { inAddress, inHex, inNumber10, inNumber16, inOptions } from '../../format/input';
import { outAccountInfo, outAddress, outSignerRequest } from '../../format/output'; import { outAccountInfo, outAddress, outSignerRequest } from '../../format/output';
export default class Personal { export default class Personal {
@ -73,6 +73,12 @@ export default class Personal {
.then(outAddress); .then(outAddress);
} }
newAccountFromSecret (secret, password) {
return this._transport
.execute('personal_newAccountFromSecret', inHex(secret), password)
.then(outAddress);
}
newAccountFromWallet (json, password) { newAccountFromWallet (json, password) {
return this._transport return this._transport
.execute('personal_newAccountFromWallet', json, password) .execute('personal_newAccountFromWallet', json, password)

View File

@ -16,7 +16,7 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
// import { api } from '../parity'; import { api } from '../parity';
import { attachInstances } from '../services'; import { attachInstances } from '../services';
import Header from './Header'; import Header from './Header';
@ -83,7 +83,7 @@ export default class Application extends Component {
Promise Promise
.all([ .all([
attachInstances(), attachInstances(),
null // api.personal.accountsInfo() api.personal.accountsInfo()
]) ])
.then(([{ managerInstance, registryInstance, tokenregInstance }, accountsInfo]) => { .then(([{ managerInstance, registryInstance, tokenregInstance }, accountsInfo]) => {
accountsInfo = accountsInfo || {}; accountsInfo = accountsInfo || {};

View File

@ -22,7 +22,7 @@
.account { .account {
margin: 0.5em !important; margin: 0.5em !important;
background: rgb(50, 100, 150) !important; background: #430 !important;
display: inline-block !important; display: inline-block !important;
} }

View File

@ -73,6 +73,7 @@ export default class ActionBuyIn extends Component {
if (complete) { if (complete) {
return ( return (
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Done' label='Done'
primary primary
onTouchTap={ this.props.onClose } /> onTouchTap={ this.props.onClose } />
@ -84,10 +85,12 @@ export default class ActionBuyIn extends Component {
return ([ return ([
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Cancel' label='Cancel'
primary primary
onTouchTap={ this.props.onClose } />, onTouchTap={ this.props.onClose } />,
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Buy' label='Buy'
primary primary
disabled={ hasError || sending } disabled={ hasError || sending }

View File

@ -72,6 +72,7 @@ export default class ActionRefund extends Component {
if (this.state.complete) { if (this.state.complete) {
return ( return (
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Done' label='Done'
primary primary
onTouchTap={ this.props.onClose } /> onTouchTap={ this.props.onClose } />
@ -82,10 +83,12 @@ export default class ActionRefund extends Component {
return ([ return ([
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Cancel' label='Cancel'
primary primary
onTouchTap={ this.props.onClose } />, onTouchTap={ this.props.onClose } />,
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Refund' label='Refund'
primary primary
disabled={ hasError || this.state.sending } disabled={ hasError || this.state.sending }

View File

@ -75,6 +75,7 @@ export default class ActionTransfer extends Component {
if (complete) { if (complete) {
return ( return (
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Done' label='Done'
primary primary
onTouchTap={ this.props.onClose } /> onTouchTap={ this.props.onClose } />
@ -85,10 +86,12 @@ export default class ActionTransfer extends Component {
return ([ return ([
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Cancel' label='Cancel'
primary primary
onTouchTap={ this.props.onClose } />, onTouchTap={ this.props.onClose } />,
<FlatButton <FlatButton
className={ styles.dlgbtn }
label='Transfer' label='Transfer'
primary primary
disabled={ hasError || sending } disabled={ hasError || sending }

View File

@ -16,7 +16,7 @@
*/ */
.actions { .actions {
text-align: center; text-align: center;
padding: 2em 2em 0 2em; padding: 2em 0 0 0;
width: 100%; width: 100%;
} }
@ -25,20 +25,31 @@
} }
.button button { .button button {
background-color: rgba(50, 100, 150, 1) !important; background-color: #430 !important;
height: 56px !important; height: 56px !important;
padding: 0 10px !important; padding: 0 10px !important;
} }
.button button[disabled] { .button button[disabled] {
background-color: rgba(50, 50, 50, 0.25) !important; opacity: 0.25;
}
.dlgbtn {
}
.dlgbtn span {
color: #430 !important;
}
.dlgbtn[disabled] {
opacity: 0.25;
} }
.dialog { .dialog {
} }
.dialog h3 { .dialog h3 {
color: rgba(50, 100, 150, 1) !important; color: #430 !important;
text-transform: uppercase; text-transform: uppercase;
} }

View File

@ -0,0 +1,21 @@
/* Copyright 2015, 2016 Ethcore (UK) Ltd.
/* This file is part of Parity.
/*
/* Parity is free software: you can redistribute it and/or modify
/* it under the terms of the GNU General Public License as published by
/* the Free Software Foundation, either version 3 of the License, or
/* (at your option) any later version.
/*
/* Parity is distributed in the hope that it will be useful,
/* but WITHOUT ANY WARRANTY; without even the implied warranty of
/* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
/* GNU General Public License for more details.
/*
/* You should have received a copy of the GNU General Public License
/* along with Parity. If not, see <http://www.gnu.org/licenses/>.
*/
.body {
background-size: cover;
background-repeat: no-repeat;
}

View File

@ -32,6 +32,13 @@ import Events from '../Events';
import Loading from '../Loading'; import Loading from '../Loading';
import Status from '../Status'; import Status from '../Status';
import styles from './application.css';
import bgimage from '../../../../assets/images/dapps/gavcoin-bg.jpg';
const bgstyle = {
backgroundImage: `url(${bgimage})`
};
const DIVISOR = 10 ** 6; const DIVISOR = 10 ** 6;
export default class Application extends Component { export default class Application extends Component {
@ -70,7 +77,7 @@ export default class Application extends Component {
} }
return ( return (
<div> <div className={ styles.body } style={ bgstyle }>
{ this.renderModals() } { this.renderModals() }
<Status <Status
address={ address } address={ address }
@ -206,7 +213,7 @@ export default class Application extends Component {
.all([ .all([
registry.getAddress.call({}, [api.util.sha3('gavcoin'), 'A']), registry.getAddress.call({}, [api.util.sha3('gavcoin'), 'A']),
api.eth.accounts(), api.eth.accounts(),
null // api.personal.accountsInfo() api.personal.accountsInfo()
]); ]);
}) })
.then(([address, addresses, infos]) => { .then(([address, addresses, infos]) => {

View File

@ -90,5 +90,5 @@
} }
.newtranch { .newtranch {
background: rgba(50, 250, 50, 0.1); background: rgba(255, 175, 0, 0.125); /*rgba(68, 51, 0, 0.15);*/
} }

View File

@ -15,8 +15,8 @@
/* along with Parity. If not, see <http://www.gnu.org/licenses/>. /* along with Parity. If not, see <http://www.gnu.org/licenses/>.
*/ */
.status { .status {
background: rgba(25, 75, 125, 1); background: rgba(255, 175, 0, 0.25);
color: rgba(255, 255, 255, 1); color: #430;
padding: 4em 0 2em 0; padding: 4em 0 2em 0;
display: flex; display: flex;
flex-wrap: wrap; flex-wrap: wrap;
@ -38,14 +38,16 @@
.byline { .byline {
font-size: 1.25em; font-size: 1.25em;
color: rgba(255, 255, 255, 0.7); color: #430;
opacity: 0.75;
} }
.heading { .heading {
text-transform: uppercase; text-transform: uppercase;
letter-spacing: 0.25em; letter-spacing: 0.25em;
font-size: 1.5em; font-size: 1.5em;
color: rgba(255, 255, 255, 0.7); color: #430;
opacity: 0.75;
} }
.hero { .hero {

View File

@ -29,7 +29,7 @@ export function attachInterface () {
.all([ .all([
registry.getAddress.call({}, [api.util.sha3('githubhint'), 'A']), registry.getAddress.call({}, [api.util.sha3('githubhint'), 'A']),
api.eth.accounts(), api.eth.accounts(),
null // api.personal.accountsInfo() api.personal.accountsInfo()
]); ]);
}) })
.then(([address, addresses, accountsInfo]) => { .then(([address, addresses, accountsInfo]) => {

View File

@ -22,12 +22,16 @@ export const fetch = () => (dispatch) => {
return Promise return Promise
.all([ .all([
api.eth.accounts(), api.eth.accounts(),
null // api.personal.accountsInfo() api.personal.accountsInfo()
]) ])
.then(([ accounts, data ]) => { .then(([ accounts, data ]) => {
const addresses = accounts.map((address) => { data = data || {};
return { address, isAccount: true }; const addresses = Object.keys(data)
}); .filter((address) => data[address] && !data[address].meta.deleted)
.map((address) => ({
...data[address], address,
isAccount: accounts.includes(address)
}));
dispatch(set(addresses)); dispatch(set(addresses));
}) })
.catch((error) => { .catch((error) => {

View File

@ -50,7 +50,7 @@ export function attachInterface (callback) {
.all([ .all([
registry.getAddress.call({}, [api.util.sha3('signaturereg'), 'A']), registry.getAddress.call({}, [api.util.sha3('signaturereg'), 'A']),
api.eth.accounts(), api.eth.accounts(),
null // api.personal.accountsInfo() api.personal.accountsInfo()
]); ]);
}) })
.then(([address, addresses, accountsInfo]) => { .then(([address, addresses, accountsInfo]) => {

View File

@ -38,7 +38,7 @@ export const loadAccounts = () => (dispatch) => {
Promise Promise
.all([ .all([
api.eth.accounts(), api.eth.accounts(),
null // api.personal.accountsInfo() api.personal.accountsInfo()
]) ])
.then(([ accounts, accountsInfo ]) => { .then(([ accounts, accountsInfo ]) => {
accountsInfo = accountsInfo || {}; accountsInfo = accountsInfo || {};

View File

@ -141,7 +141,7 @@ export default {
}, },
newAccountFromPhrase: { newAccountFromPhrase: {
desc: 'Creates a new account from a brainwallet passphrase', desc: 'Creates a new account from a recovery passphrase',
params: [ params: [
{ {
type: String, type: String,
@ -158,6 +158,24 @@ export default {
} }
}, },
newAccountFromSecret: {
desc: 'Creates a new account from a private ethstore secret key',
params: [
{
type: Data,
desc: 'Secret, 32-byte hex'
},
{
type: String,
desc: 'Password'
}
],
returns: {
type: Address,
desc: 'The created address'
}
},
newAccountFromWallet: { newAccountFromWallet: {
desc: 'Creates a new account from a JSON import', desc: 'Creates a new account from a JSON import',
params: [ params: [

View File

@ -32,19 +32,25 @@
border-radius: 16px; border-radius: 16px;
margin: 0.75em 0.5em 0 0; margin: 0.75em 0.5em 0 0;
max-height: 24px; max-height: 24px;
max-width: 100%;
display: flex;
align-items: center;
} }
.balance img { .balance img {
display: inline-block;
height: 32px; height: 32px;
margin: -4px 1em 0 0; margin: -4px 1em 0 0;
width: 32px; width: 32px;
} }
.balance div { .balanceValue {
display: inline-block;
/*font-family: 'Roboto Mono', monospace;*/
line-height: 24px;
margin: 0 1em 0 0; margin: 0 1em 0 0;
vertical-align: top; text-overflow: ellipsis;
white-space: nowrap;
overflow: hidden;
}
.balanceTag {
font-size: 0.85em;
padding-right: 0.75rem;
} }

View File

@ -56,7 +56,10 @@ class Balance extends Component {
<img <img
src={ imagesrc } src={ imagesrc }
alt={ token.name } /> alt={ token.name } />
<div>{ value }<small> { token.tag }</small></div> <div className={ styles.balanceValue }>
<span title={ value }> { value } </span>
</div>
<div className={ styles.balanceTag }> { token.tag } </div>
</div> </div>
); );
}); });

View File

@ -16,6 +16,9 @@
*/ */
.byline { .byline {
color: #aaa; color: #aaa;
text-overflow: ellipsis;
white-space: nowrap;
overflow: hidden;
} }
.title { .title {

View File

@ -40,7 +40,7 @@ export default class Title extends Component {
{ this.props.title } { this.props.title }
</h3> </h3>
<div className={ styles.byline }> <div className={ styles.byline }>
{ this.props.byline } <span title={ this.props.byline }>{ this.props.byline }</span>
</div> </div>
</div> </div>
); );

View File

@ -10,7 +10,7 @@ rustc-serialize = "0.3"
serde = "0.8" serde = "0.8"
serde_json = "0.8" serde_json = "0.8"
serde_macros = { version = "0.8", optional = true } serde_macros = { version = "0.8", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.8", optional = true } serde_codegen = { version = "0.8", optional = true }

View File

@ -41,13 +41,13 @@ disable = false
port = 8545 port = 8545
interface = "local" interface = "local"
cors = "null" cors = "null"
apis = ["web3", "eth", "net", "ethcore", "traces", "rpc"] apis = ["web3", "eth", "net", "ethcore", "traces", "rpc", "personal_safe"]
hosts = ["none"] hosts = ["none"]
[ipc] [ipc]
disable = false disable = false
path = "$HOME/.parity/jsonrpc.ipc" path = "$HOME/.parity/jsonrpc.ipc"
apis = ["web3", "eth", "net", "ethcore", "traces", "rpc"] apis = ["web3", "eth", "net", "ethcore", "traces", "rpc", "personal_safe"]
[dapps] [dapps]
disable = false disable = false
@ -74,7 +74,10 @@ gas_cap = "6283184"
tx_queue_size = 1024 tx_queue_size = 1024
tx_queue_gas = "auto" tx_queue_gas = "auto"
tx_queue_strategy = "gas_factor" tx_queue_strategy = "gas_factor"
tx_queue_ban_count = 1
tx_queue_ban_time = 180 #s
tx_gas_limit = "6283184" tx_gas_limit = "6283184"
tx_time_limit = 100 #ms
extra_data = "Parity" extra_data = "Parity"
remove_solved = false remove_solved = false
notify_work = ["http://localhost:3001"] notify_work = ["http://localhost:3001"]

View File

@ -145,7 +145,7 @@ usage! {
or |c: &Config| otry!(c.rpc).interface.clone(), or |c: &Config| otry!(c.rpc).interface.clone(),
flag_jsonrpc_cors: Option<String> = None, flag_jsonrpc_cors: Option<String> = None,
or |c: &Config| otry!(c.rpc).cors.clone().map(Some), or |c: &Config| otry!(c.rpc).cors.clone().map(Some),
flag_jsonrpc_apis: String = "web3,eth,net,ethcore,traces,rpc", flag_jsonrpc_apis: String = "web3,eth,net,ethcore,traces,rpc,personal_safe",
or |c: &Config| otry!(c.rpc).apis.clone().map(|vec| vec.join(",")), or |c: &Config| otry!(c.rpc).apis.clone().map(|vec| vec.join(",")),
flag_jsonrpc_hosts: String = "none", flag_jsonrpc_hosts: String = "none",
or |c: &Config| otry!(c.rpc).hosts.clone().map(|vec| vec.join(",")), or |c: &Config| otry!(c.rpc).hosts.clone().map(|vec| vec.join(",")),
@ -155,7 +155,7 @@ usage! {
or |c: &Config| otry!(c.ipc).disable.clone(), or |c: &Config| otry!(c.ipc).disable.clone(),
flag_ipc_path: String = "$HOME/.parity/jsonrpc.ipc", flag_ipc_path: String = "$HOME/.parity/jsonrpc.ipc",
or |c: &Config| otry!(c.ipc).path.clone(), or |c: &Config| otry!(c.ipc).path.clone(),
flag_ipc_apis: String = "web3,eth,net,ethcore,traces,rpc", flag_ipc_apis: String = "web3,eth,net,ethcore,traces,rpc,personal_safe",
or |c: &Config| otry!(c.ipc).apis.clone().map(|vec| vec.join(",")), or |c: &Config| otry!(c.ipc).apis.clone().map(|vec| vec.join(",")),
// DAPPS // DAPPS
@ -187,6 +187,8 @@ usage! {
or |c: &Config| otry!(c.mining).work_queue_size.clone(), or |c: &Config| otry!(c.mining).work_queue_size.clone(),
flag_tx_gas_limit: Option<String> = None, flag_tx_gas_limit: Option<String> = None,
or |c: &Config| otry!(c.mining).tx_gas_limit.clone().map(Some), or |c: &Config| otry!(c.mining).tx_gas_limit.clone().map(Some),
flag_tx_time_limit: Option<u64> = None,
or |c: &Config| otry!(c.mining).tx_time_limit.clone().map(Some),
flag_relay_set: String = "cheap", flag_relay_set: String = "cheap",
or |c: &Config| otry!(c.mining).relay_set.clone(), or |c: &Config| otry!(c.mining).relay_set.clone(),
flag_usd_per_tx: String = "0", flag_usd_per_tx: String = "0",
@ -205,8 +207,12 @@ usage! {
or |c: &Config| otry!(c.mining).tx_queue_size.clone(), or |c: &Config| otry!(c.mining).tx_queue_size.clone(),
flag_tx_queue_gas: String = "auto", flag_tx_queue_gas: String = "auto",
or |c: &Config| otry!(c.mining).tx_queue_gas.clone(), or |c: &Config| otry!(c.mining).tx_queue_gas.clone(),
flag_tx_queue_strategy: String = "gas_factor", flag_tx_queue_strategy: String = "gas_price",
or |c: &Config| otry!(c.mining).tx_queue_strategy.clone(), or |c: &Config| otry!(c.mining).tx_queue_strategy.clone(),
flag_tx_queue_ban_count: u16 = 1u16,
or |c: &Config| otry!(c.mining).tx_queue_ban_count.clone(),
flag_tx_queue_ban_time: u16 = 180u16,
or |c: &Config| otry!(c.mining).tx_queue_ban_time.clone(),
flag_remove_solved: bool = false, flag_remove_solved: bool = false,
or |c: &Config| otry!(c.mining).remove_solved.clone(), or |c: &Config| otry!(c.mining).remove_solved.clone(),
flag_notify_work: Option<String> = None, flag_notify_work: Option<String> = None,
@ -361,6 +367,7 @@ struct Mining {
reseal_min_period: Option<u64>, reseal_min_period: Option<u64>,
work_queue_size: Option<usize>, work_queue_size: Option<usize>,
tx_gas_limit: Option<String>, tx_gas_limit: Option<String>,
tx_time_limit: Option<u64>,
relay_set: Option<String>, relay_set: Option<String>,
usd_per_tx: Option<String>, usd_per_tx: Option<String>,
usd_per_eth: Option<String>, usd_per_eth: Option<String>,
@ -371,6 +378,8 @@ struct Mining {
tx_queue_size: Option<usize>, tx_queue_size: Option<usize>,
tx_queue_gas: Option<String>, tx_queue_gas: Option<String>,
tx_queue_strategy: Option<String>, tx_queue_strategy: Option<String>,
tx_queue_ban_count: Option<u16>,
tx_queue_ban_time: Option<u16>,
remove_solved: Option<bool>, remove_solved: Option<bool>,
notify_work: Option<Vec<String>>, notify_work: Option<Vec<String>>,
} }
@ -445,6 +454,20 @@ mod tests {
assert_eq!(args.flag_chain, "xyz".to_owned()); assert_eq!(args.flag_chain, "xyz".to_owned());
} }
#[test]
fn should_use_config_if_cli_is_missing() {
let mut config = Config::default();
let mut footprint = Footprint::default();
footprint.pruning_history = Some(128);
config.footprint = Some(footprint);
// when
let args = Args::parse_with_config(&["parity"], config).unwrap();
// then
assert_eq!(args.flag_pruning_history, 128);
}
#[test] #[test]
fn should_parse_full_config() { fn should_parse_full_config() {
// given // given
@ -520,13 +543,13 @@ mod tests {
flag_jsonrpc_port: 8545u16, flag_jsonrpc_port: 8545u16,
flag_jsonrpc_interface: "local".into(), flag_jsonrpc_interface: "local".into(),
flag_jsonrpc_cors: Some("null".into()), flag_jsonrpc_cors: Some("null".into()),
flag_jsonrpc_apis: "web3,eth,net,ethcore,traces,rpc".into(), flag_jsonrpc_apis: "web3,eth,net,ethcore,traces,rpc,personal_safe".into(),
flag_jsonrpc_hosts: "none".into(), flag_jsonrpc_hosts: "none".into(),
// IPC // IPC
flag_no_ipc: false, flag_no_ipc: false,
flag_ipc_path: "$HOME/.parity/jsonrpc.ipc".into(), flag_ipc_path: "$HOME/.parity/jsonrpc.ipc".into(),
flag_ipc_apis: "web3,eth,net,ethcore,traces,rpc".into(), flag_ipc_apis: "web3,eth,net,ethcore,traces,rpc,personal_safe".into(),
// DAPPS // DAPPS
flag_no_dapps: false, flag_no_dapps: false,
@ -544,6 +567,7 @@ mod tests {
flag_reseal_min_period: 4000u64, flag_reseal_min_period: 4000u64,
flag_work_queue_size: 20usize, flag_work_queue_size: 20usize,
flag_tx_gas_limit: Some("6283184".into()), flag_tx_gas_limit: Some("6283184".into()),
flag_tx_time_limit: Some(100u64),
flag_relay_set: "cheap".into(), flag_relay_set: "cheap".into(),
flag_usd_per_tx: "0".into(), flag_usd_per_tx: "0".into(),
flag_usd_per_eth: "auto".into(), flag_usd_per_eth: "auto".into(),
@ -554,6 +578,8 @@ mod tests {
flag_tx_queue_size: 1024usize, flag_tx_queue_size: 1024usize,
flag_tx_queue_gas: "auto".into(), flag_tx_queue_gas: "auto".into(),
flag_tx_queue_strategy: "gas_factor".into(), flag_tx_queue_strategy: "gas_factor".into(),
flag_tx_queue_ban_count: 1u16,
flag_tx_queue_ban_time: 180u16,
flag_remove_solved: false, flag_remove_solved: false,
flag_notify_work: Some("http://localhost:3001".into()), flag_notify_work: Some("http://localhost:3001".into()),
@ -713,7 +739,10 @@ mod tests {
tx_queue_size: Some(1024), tx_queue_size: Some(1024),
tx_queue_gas: Some("auto".into()), tx_queue_gas: Some("auto".into()),
tx_queue_strategy: None, tx_queue_strategy: None,
tx_queue_ban_count: None,
tx_queue_ban_time: None,
tx_gas_limit: None, tx_gas_limit: None,
tx_time_limit: None,
extra_data: None, extra_data: None,
remove_solved: None, remove_solved: None,
notify_work: None, notify_work: None,

View File

@ -107,7 +107,7 @@ API and Console Options:
--jsonrpc-apis APIS Specify the APIs available through the JSONRPC --jsonrpc-apis APIS Specify the APIs available through the JSONRPC
interface. APIS is a comma-delimited list of API interface. APIS is a comma-delimited list of API
name. Possible name are web3, eth, net, personal, name. Possible name are web3, eth, net, personal,
ethcore, ethcore_set, traces, rpc. ethcore, ethcore_set, traces, rpc, personal_safe.
(default: {flag_jsonrpc_apis}). (default: {flag_jsonrpc_apis}).
--jsonrpc-hosts HOSTS List of allowed Host header values. This option will --jsonrpc-hosts HOSTS List of allowed Host header values. This option will
validate the Host header sent by the browser, it validate the Host header sent by the browser, it
@ -166,6 +166,11 @@ Sealing/Mining Options:
--tx-gas-limit GAS Apply a limit of GAS as the maximum amount of gas --tx-gas-limit GAS Apply a limit of GAS as the maximum amount of gas
a single transaction may have for it to be mined. a single transaction may have for it to be mined.
(default: {flag_tx_gas_limit:?}) (default: {flag_tx_gas_limit:?})
--tx-time-limit MS Maximal time for processing single transaction.
If enabled senders/recipients/code of transactions
offending the limit will be banned from being included
in transaction queue for 180 seconds.
(default: {flag_tx_time_limit:?})
--relay-set SET Set of transactions to relay. SET may be: --relay-set SET Set of transactions to relay. SET may be:
cheap - Relay any transaction in the queue (this cheap - Relay any transaction in the queue (this
may include invalid transactions); may include invalid transactions);
@ -203,6 +208,13 @@ Sealing/Mining Options:
gas_price - Prioritize txs with high gas price; gas_price - Prioritize txs with high gas price;
gas_factor - Prioritize txs using gas price gas_factor - Prioritize txs using gas price
and gas limit ratio (default: {flag_tx_queue_strategy}). and gas limit ratio (default: {flag_tx_queue_strategy}).
--tx-queue-ban-count C Number of times maximal time for execution (--tx-time-limit)
can be exceeded before banning sender/recipient/code.
(default: {flag_tx_queue_ban_count})
--tx-queue-ban-time SEC Banning time (in seconds) for offenders of specified
execution time limit. Also number of offending actions
have to reach the threshold within that time.
(default: {flag_tx_queue_ban_time} seconds)
--remove-solved Move solved blocks from the work package queue --remove-solved Move solved blocks from the work package queue
instead of cloning them. This gives a slightly instead of cloning them. This gives a slightly
faster import speed, but means that extra solutions faster import speed, but means that extra solutions
@ -225,7 +237,7 @@ Footprint Options:
auto - use the method most recently synced or auto - use the method most recently synced or
default to fast if none synced (default: {flag_pruning}). default to fast if none synced (default: {flag_pruning}).
--pruning-history NUM Set a number of recent states to keep when pruning --pruning-history NUM Set a number of recent states to keep when pruning
is active. [default: {flag_pruning_history}]. is active. (default: {flag_pruning_history}).
--cache-size-db MB Override database cache size (default: {flag_cache_size_db}). --cache-size-db MB Override database cache size (default: {flag_cache_size_db}).
--cache-size-blocks MB Specify the prefered size of the blockchain cache in --cache-size-blocks MB Specify the prefered size of the blockchain cache in
megabytes (default: {flag_cache_size_blocks}). megabytes (default: {flag_cache_size_blocks}).

View File

@ -24,7 +24,7 @@ use util::{Hashable, U256, Uint, Bytes, version_data, Secret, Address};
use util::log::Colour; use util::log::Colour;
use ethsync::{NetworkConfiguration, is_valid_node_url, AllowIP}; use ethsync::{NetworkConfiguration, is_valid_node_url, AllowIP};
use ethcore::client::{VMType, Mode}; use ethcore::client::{VMType, Mode};
use ethcore::miner::MinerOptions; use ethcore::miner::{MinerOptions, Banning};
use rpc::{IpcConfiguration, HttpConfiguration}; use rpc::{IpcConfiguration, HttpConfiguration};
use ethcore_rpc::NetworkSettings; use ethcore_rpc::NetworkSettings;
@ -387,6 +387,14 @@ impl Configuration {
reseal_min_period: Duration::from_millis(self.args.flag_reseal_min_period), reseal_min_period: Duration::from_millis(self.args.flag_reseal_min_period),
work_queue_size: self.args.flag_work_queue_size, work_queue_size: self.args.flag_work_queue_size,
enable_resubmission: !self.args.flag_remove_solved, enable_resubmission: !self.args.flag_remove_solved,
tx_queue_banning: match self.args.flag_tx_time_limit {
Some(limit) => Banning::Enabled {
min_offends: self.args.flag_tx_queue_ban_count,
offend_threshold: Duration::from_millis(limit),
ban_duration: Duration::from_secs(self.args.flag_tx_queue_ban_time as u64),
},
None => Banning::Disabled,
}
}; };
Ok(options) Ok(options)

View File

@ -33,7 +33,8 @@ pub enum Api {
Web3, Web3,
Net, Net,
Eth, Eth,
Personal, PersonalSafe,
PersonalAccounts,
Signer, Signer,
Ethcore, Ethcore,
EthcoreSet, EthcoreSet,
@ -51,7 +52,8 @@ impl FromStr for Api {
"web3" => Ok(Web3), "web3" => Ok(Web3),
"net" => Ok(Net), "net" => Ok(Net),
"eth" => Ok(Eth), "eth" => Ok(Eth),
"personal" => Ok(Personal), "personal" => Ok(PersonalAccounts),
"personal_safe" => Ok(PersonalSafe),
"signer" => Ok(Signer), "signer" => Ok(Signer),
"ethcore" => Ok(Ethcore), "ethcore" => Ok(Ethcore),
"ethcore_set" => Ok(EthcoreSet), "ethcore_set" => Ok(EthcoreSet),
@ -114,7 +116,8 @@ fn to_modules(apis: &[Api]) -> BTreeMap<String, String> {
Api::Web3 => ("web3", "1.0"), Api::Web3 => ("web3", "1.0"),
Api::Net => ("net", "1.0"), Api::Net => ("net", "1.0"),
Api::Eth => ("eth", "1.0"), Api::Eth => ("eth", "1.0"),
Api::Personal => ("personal", "1.0"), Api::PersonalSafe => ("personal_safe", "1.0"),
Api::PersonalAccounts => ("personal", "1.0"),
Api::Signer => ("signer", "1.0"), Api::Signer => ("signer", "1.0"),
Api::Ethcore => ("ethcore", "1.0"), Api::Ethcore => ("ethcore", "1.0"),
Api::EthcoreSet => ("ethcore_set", "1.0"), Api::EthcoreSet => ("ethcore_set", "1.0"),
@ -131,11 +134,11 @@ impl ApiSet {
match *self { match *self {
ApiSet::List(ref apis) => apis.clone(), ApiSet::List(ref apis) => apis.clone(),
ApiSet::UnsafeContext => { ApiSet::UnsafeContext => {
vec![Api::Web3, Api::Net, Api::Eth, Api::Ethcore, Api::Traces, Api::Rpc] vec![Api::Web3, Api::Net, Api::Eth, Api::Ethcore, Api::Traces, Api::Rpc, Api::PersonalSafe]
.into_iter().collect() .into_iter().collect()
}, },
ApiSet::SafeContext => { ApiSet::SafeContext => {
vec![Api::Web3, Api::Net, Api::Eth, Api::Personal, Api::Signer, Api::Ethcore, Api::EthcoreSet, Api::Traces, Api::Rpc] vec![Api::Web3, Api::Net, Api::Eth, Api::PersonalAccounts, Api::PersonalSafe, Api::Signer, Api::Ethcore, Api::EthcoreSet, Api::Traces, Api::Rpc]
.into_iter().collect() .into_iter().collect()
}, },
} }
@ -178,8 +181,11 @@ pub fn setup_rpc<T: Extendable>(server: T, deps: Arc<Dependencies>, apis: ApiSet
server.add_delegate(EthSigningUnsafeClient::new(&deps.client, &deps.secret_store, &deps.miner).to_delegate()); server.add_delegate(EthSigningUnsafeClient::new(&deps.client, &deps.secret_store, &deps.miner).to_delegate());
} }
}, },
Api::Personal => { Api::PersonalAccounts => {
server.add_delegate(PersonalClient::new(&deps.secret_store, &deps.client, &deps.miner, deps.geth_compatibility).to_delegate()); server.add_delegate(PersonalAccountsClient::new(&deps.secret_store, &deps.client, &deps.miner, deps.geth_compatibility).to_delegate());
},
Api::PersonalSafe => {
server.add_delegate(PersonalClient::new(&deps.secret_store, &deps.client).to_delegate());
}, },
Api::Signer => { Api::Signer => {
server.add_delegate(SignerClient::new(&deps.secret_store, &deps.client, &deps.miner, &deps.signer_service).to_delegate()); server.add_delegate(SignerClient::new(&deps.secret_store, &deps.client, &deps.miner, &deps.signer_service).to_delegate());
@ -224,7 +230,8 @@ mod test {
assert_eq!(Api::Web3, "web3".parse().unwrap()); assert_eq!(Api::Web3, "web3".parse().unwrap());
assert_eq!(Api::Net, "net".parse().unwrap()); assert_eq!(Api::Net, "net".parse().unwrap());
assert_eq!(Api::Eth, "eth".parse().unwrap()); assert_eq!(Api::Eth, "eth".parse().unwrap());
assert_eq!(Api::Personal, "personal".parse().unwrap()); assert_eq!(Api::PersonalAccounts, "personal".parse().unwrap());
assert_eq!(Api::PersonalSafe, "personal_safe".parse().unwrap());
assert_eq!(Api::Signer, "signer".parse().unwrap()); assert_eq!(Api::Signer, "signer".parse().unwrap());
assert_eq!(Api::Ethcore, "ethcore".parse().unwrap()); assert_eq!(Api::Ethcore, "ethcore".parse().unwrap());
assert_eq!(Api::EthcoreSet, "ethcore_set".parse().unwrap()); assert_eq!(Api::EthcoreSet, "ethcore_set".parse().unwrap());
@ -245,14 +252,14 @@ mod test {
#[test] #[test]
fn test_api_set_unsafe_context() { fn test_api_set_unsafe_context() {
let expected = vec![Api::Web3, Api::Net, Api::Eth, Api::Ethcore, Api::Traces, Api::Rpc] let expected = vec![Api::Web3, Api::Net, Api::Eth, Api::Ethcore, Api::Traces, Api::Rpc, Api::PersonalSafe]
.into_iter().collect(); .into_iter().collect();
assert_eq!(ApiSet::UnsafeContext.list_apis(), expected); assert_eq!(ApiSet::UnsafeContext.list_apis(), expected);
} }
#[test] #[test]
fn test_api_set_safe_context() { fn test_api_set_safe_context() {
let expected = vec![Api::Web3, Api::Net, Api::Eth, Api::Personal, Api::Signer, Api::Ethcore, Api::EthcoreSet, Api::Traces, Api::Rpc] let expected = vec![Api::Web3, Api::Net, Api::Eth, Api::PersonalAccounts, Api::PersonalSafe, Api::Signer, Api::Ethcore, Api::EthcoreSet, Api::Traces, Api::Rpc]
.into_iter().collect(); .into_iter().collect();
assert_eq!(ApiSet::SafeContext.list_apis(), expected); assert_eq!(ApiSet::SafeContext.list_apis(), expected);
} }

View File

@ -29,7 +29,7 @@ fetch = { path = "../util/fetch" }
rustc-serialize = "0.3" rustc-serialize = "0.3"
transient-hashmap = "0.1" transient-hashmap = "0.1"
serde_macros = { version = "0.8.0", optional = true } serde_macros = { version = "0.8.0", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" } json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
ethcore-ipc = { path = "../ipc/rpc" } ethcore-ipc = { path = "../ipc/rpc" }
time = "0.1" time = "0.1"

View File

@ -221,6 +221,9 @@ pub fn from_transaction_error(error: EthcoreError) -> Error {
LimitReached => { LimitReached => {
"There are too many transactions in the queue. Your transaction was dropped due to limit. Try increasing the fee.".into() "There are too many transactions in the queue. Your transaction was dropped due to limit. Try increasing the fee.".into()
}, },
InsufficientGas { minimal, got } => {
format!("Transaction gas is too low. There is not enough gas to cover minimal cost of the transaction (minimal: {}, got: {}). Try increasing supplied gas.", minimal, got)
},
InsufficientGasPrice { minimal, got } => { InsufficientGasPrice { minimal, got } => {
format!("Transaction gas price is too low. It does not satisfy your node's minimal gas price (minimal: {}, got: {}). Try increasing the gas price.", minimal, got) format!("Transaction gas price is too low. It does not satisfy your node's minimal gas price (minimal: {}, got: {}). Try increasing the gas price.", minimal, got)
}, },
@ -231,6 +234,9 @@ pub fn from_transaction_error(error: EthcoreError) -> Error {
format!("Transaction cost exceeds current gas limit. Limit: {}, got: {}. Try decreasing supplied gas.", limit, got) format!("Transaction cost exceeds current gas limit. Limit: {}, got: {}. Try decreasing supplied gas.", limit, got)
}, },
InvalidGasLimit(_) => "Supplied gas is beyond limit.".into(), InvalidGasLimit(_) => "Supplied gas is beyond limit.".into(),
SenderBanned => "Sender is banned in local queue.".into(),
RecipientBanned => "Recipient is banned in local queue.".into(),
CodeBanned => "Code is banned in local queue.".into(),
}; };
Error { Error {
code: ErrorCode::ServerError(codes::TRANSACTION_ERROR), code: ErrorCode::ServerError(codes::TRANSACTION_ERROR),

View File

@ -334,4 +334,17 @@ impl<C, M, S: ?Sized, F> Ethcore for EthcoreClient<C, M, S, F> where
self.dapps_port self.dapps_port
.ok_or_else(|| errors::dapps_disabled()) .ok_or_else(|| errors::dapps_disabled())
} }
fn next_nonce(&self, address: H160) -> Result<U256, Error> {
try!(self.active());
let address: Address = address.into();
let miner = take_weak!(self.miner);
let client = take_weak!(self.client);
Ok(miner.last_nonce(&address)
.map(|n| n + 1.into())
.unwrap_or_else(|| client.latest_nonce(&address))
.into()
)
}
} }

View File

@ -32,6 +32,7 @@ mod ethcore;
mod ethcore_set; mod ethcore_set;
mod net; mod net;
mod personal; mod personal;
mod personal_accounts;
mod personal_signer; mod personal_signer;
mod rpc; mod rpc;
mod traces; mod traces;
@ -43,6 +44,7 @@ pub use self::eth_filter::EthFilterClient;
pub use self::eth_signing::{EthSigningUnsafeClient, EthSigningQueueClient}; pub use self::eth_signing::{EthSigningUnsafeClient, EthSigningQueueClient};
pub use self::net::NetClient; pub use self::net::NetClient;
pub use self::personal::PersonalClient; pub use self::personal::PersonalClient;
pub use self::personal_accounts::PersonalAccountsClient;
pub use self::personal_signer::SignerClient; pub use self::personal_signer::SignerClient;
pub use self::ethcore::EthcoreClient; pub use self::ethcore::EthcoreClient;
pub use self::ethcore_set::EthcoreSetClient; pub use self::ethcore_set::EthcoreSetClient;

View File

@ -17,34 +17,26 @@
//! Account management (personal) rpc implementation //! Account management (personal) rpc implementation
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use std::collections::{BTreeMap}; use std::collections::{BTreeMap};
use util::{Address};
use jsonrpc_core::*; use jsonrpc_core::*;
use ethkey::{Brain, Generator};
use v1::traits::Personal; use v1::traits::Personal;
use v1::types::{H160 as RpcH160, TransactionRequest}; use v1::types::{H160 as RpcH160};
use v1::helpers::errors; use v1::helpers::errors;
use v1::helpers::params::expect_no_params; use v1::helpers::params::expect_no_params;
use v1::helpers::dispatch::sign_and_dispatch;
use ethcore::account_provider::AccountProvider; use ethcore::account_provider::AccountProvider;
use ethcore::client::MiningBlockChainClient; use ethcore::client::MiningBlockChainClient;
use ethcore::miner::MinerService;
/// Account management (personal) rpc implementation. /// Account management (personal) rpc implementation.
pub struct PersonalClient<C, M> where C: MiningBlockChainClient, M: MinerService { pub struct PersonalClient<C> where C: MiningBlockChainClient {
accounts: Weak<AccountProvider>, accounts: Weak<AccountProvider>,
client: Weak<C>, client: Weak<C>,
miner: Weak<M>,
allow_perm_unlock: bool,
} }
impl<C, M> PersonalClient<C, M> where C: MiningBlockChainClient, M: MinerService { impl<C> PersonalClient<C> where C: MiningBlockChainClient {
/// Creates new PersonalClient /// Creates new PersonalClient
pub fn new(store: &Arc<AccountProvider>, client: &Arc<C>, miner: &Arc<M>, allow_perm_unlock: bool) -> Self { pub fn new(store: &Arc<AccountProvider>, client: &Arc<C>) -> Self {
PersonalClient { PersonalClient {
accounts: Arc::downgrade(store), accounts: Arc::downgrade(store),
client: Arc::downgrade(client), client: Arc::downgrade(client),
miner: Arc::downgrade(miner),
allow_perm_unlock: allow_perm_unlock,
} }
} }
@ -55,7 +47,7 @@ impl<C, M> PersonalClient<C, M> where C: MiningBlockChainClient, M: MinerService
} }
} }
impl<C: 'static, M: 'static> Personal for PersonalClient<C, M> where C: MiningBlockChainClient, M: MinerService { impl<C: 'static> Personal for PersonalClient<C> where C: MiningBlockChainClient {
fn accounts(&self, params: Params) -> Result<Value, Error> { fn accounts(&self, params: Params) -> Result<Value, Error> {
try!(self.active()); try!(self.active());
@ -66,125 +58,6 @@ impl<C: 'static, M: 'static> Personal for PersonalClient<C, M> where C: MiningBl
Ok(to_value(&accounts.into_iter().map(Into::into).collect::<Vec<RpcH160>>())) Ok(to_value(&accounts.into_iter().map(Into::into).collect::<Vec<RpcH160>>()))
} }
fn new_account(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(String, )>(params).and_then(
|(pass, )| {
let store = take_weak!(self.accounts);
match store.new_account(&pass) {
Ok(address) => Ok(to_value(&RpcH160::from(address))),
Err(e) => Err(errors::account("Could not create account.", e)),
}
}
)
}
fn new_account_from_phrase(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(String, String, )>(params).and_then(
|(phrase, pass, )| {
let store = take_weak!(self.accounts);
match store.insert_account(*Brain::new(phrase).generate().unwrap().secret(), &pass) {
Ok(address) => Ok(to_value(&RpcH160::from(address))),
Err(e) => Err(errors::account("Could not create account.", e)),
}
}
)
}
fn new_account_from_wallet(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(String, String, )>(params).and_then(
|(json, pass, )| {
let store = take_weak!(self.accounts);
match store.import_presale(json.as_bytes(), &pass).or_else(|_| store.import_wallet(json.as_bytes(), &pass)) {
Ok(address) => Ok(to_value(&RpcH160::from(address))),
Err(e) => Err(errors::account("Could not create account.", e)),
}
}
)
}
fn unlock_account(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(RpcH160, String, Option<u64>)>(params).and_then(
|(account, account_pass, duration)| {
let account: Address = account.into();
let store = take_weak!(self.accounts);
let r = match (self.allow_perm_unlock, duration) {
(false, _) => store.unlock_account_temporarily(account, account_pass),
(true, Some(0)) => store.unlock_account_permanently(account, account_pass),
(true, Some(d)) => store.unlock_account_timed(account, account_pass, d as u32 * 1000),
(true, None) => store.unlock_account_timed(account, account_pass, 300_000),
};
match r {
Ok(_) => Ok(Value::Bool(true)),
Err(_) => Ok(Value::Bool(false)),
}
}
)
}
fn test_password(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(RpcH160, String)>(params).and_then(
|(account, password)| {
let account: Address = account.into();
take_weak!(self.accounts)
.test_password(&account, password)
.map(|b| Value::Bool(b))
.map_err(|e| errors::account("Could not fetch account info.", e))
}
)
}
fn change_password(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(RpcH160, String, String)>(params).and_then(
|(account, password, new_password)| {
let account: Address = account.into();
take_weak!(self.accounts)
.change_password(&account, password, new_password)
.map(|_| Value::Null)
.map_err(|e| errors::account("Could not fetch account info.", e))
}
)
}
fn sign_and_send_transaction(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(TransactionRequest, String)>(params)
.and_then(|(request, password)| {
sign_and_dispatch(
&*take_weak!(self.client),
&*take_weak!(self.miner),
&*take_weak!(self.accounts),
request.into(),
Some(password)
)
})
}
fn set_account_name(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
let store = take_weak!(self.accounts);
from_params::<(RpcH160, String)>(params).and_then(|(addr, name)| {
let addr: Address = addr.into();
store.set_account_name(addr.clone(), name.clone()).or_else(|_| store.set_address_name(addr, name)).expect("set_address_name always returns Ok; qed");
Ok(Value::Null)
})
}
fn set_account_meta(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
let store = take_weak!(self.accounts);
from_params::<(RpcH160, String)>(params).and_then(|(addr, meta)| {
let addr: Address = addr.into();
store.set_account_meta(addr.clone(), meta.clone()).or_else(|_| store.set_address_meta(addr, meta)).expect("set_address_meta always returns Ok; qed");
Ok(Value::Null)
})
}
fn accounts_info(&self, params: Params) -> Result<Value, Error> { fn accounts_info(&self, params: Params) -> Result<Value, Error> {
try!(self.active()); try!(self.active());
try!(expect_no_params(params)); try!(expect_no_params(params));
@ -204,21 +77,4 @@ impl<C: 'static, M: 'static> Personal for PersonalClient<C, M> where C: MiningBl
(format!("0x{}", a.hex()), Value::Object(m)) (format!("0x{}", a.hex()), Value::Object(m))
}).collect::<BTreeMap<_, _>>())) }).collect::<BTreeMap<_, _>>()))
} }
fn geth_accounts(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
try!(expect_no_params(params));
let store = take_weak!(self.accounts);
Ok(to_value(&store.list_geth_accounts(false).into_iter().map(Into::into).collect::<Vec<RpcH160>>()))
}
fn import_geth_accounts(&self, params: Params) -> Result<Value, Error> {
from_params::<(Vec<RpcH160>,)>(params).and_then(|(addresses,)| {
let store = take_weak!(self.accounts);
Ok(to_value(&try!(store
.import_geth_accounts(addresses.into_iter().map(Into::into).collect(), false)
.map_err(|e| errors::account("Couldn't import Geth accounts", e))
).into_iter().map(Into::into).collect::<Vec<RpcH160>>()))
})
}
} }

View File

@ -0,0 +1,207 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Account management (personal) rpc implementation
use std::sync::{Arc, Weak};
use util::{Address};
use jsonrpc_core::*;
use ethkey::{Brain, Generator};
use v1::traits::PersonalAccounts;
use v1::types::{H160 as RpcH160, H256 as RpcH256, TransactionRequest};
use v1::helpers::errors;
use v1::helpers::params::expect_no_params;
use v1::helpers::dispatch::sign_and_dispatch;
use ethcore::account_provider::AccountProvider;
use ethcore::client::MiningBlockChainClient;
use ethcore::miner::MinerService;
/// Account management (personal) rpc implementation.
pub struct PersonalAccountsClient<C, M> where C: MiningBlockChainClient, M: MinerService {
accounts: Weak<AccountProvider>,
client: Weak<C>,
miner: Weak<M>,
allow_perm_unlock: bool,
}
impl<C, M> PersonalAccountsClient<C, M> where C: MiningBlockChainClient, M: MinerService {
/// Creates new PersonalClient
pub fn new(store: &Arc<AccountProvider>, client: &Arc<C>, miner: &Arc<M>, allow_perm_unlock: bool) -> Self {
PersonalAccountsClient {
accounts: Arc::downgrade(store),
client: Arc::downgrade(client),
miner: Arc::downgrade(miner),
allow_perm_unlock: allow_perm_unlock,
}
}
fn active(&self) -> Result<(), Error> {
// TODO: only call every 30s at most.
take_weak!(self.client).keep_alive();
Ok(())
}
}
impl<C: 'static, M: 'static> PersonalAccounts for PersonalAccountsClient<C, M> where C: MiningBlockChainClient, M: MinerService {
fn new_account(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(String, )>(params).and_then(
|(pass, )| {
let store = take_weak!(self.accounts);
match store.new_account(&pass) {
Ok(address) => Ok(to_value(&RpcH160::from(address))),
Err(e) => Err(errors::account("Could not create account.", e)),
}
}
)
}
fn new_account_from_phrase(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(String, String, )>(params).and_then(
|(phrase, pass, )| {
let store = take_weak!(self.accounts);
match store.insert_account(*Brain::new(phrase).generate().unwrap().secret(), &pass) {
Ok(address) => Ok(to_value(&RpcH160::from(address))),
Err(e) => Err(errors::account("Could not create account.", e)),
}
}
)
}
fn new_account_from_wallet(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(String, String, )>(params).and_then(
|(json, pass, )| {
let store = take_weak!(self.accounts);
match store.import_presale(json.as_bytes(), &pass).or_else(|_| store.import_wallet(json.as_bytes(), &pass)) {
Ok(address) => Ok(to_value(&RpcH160::from(address))),
Err(e) => Err(errors::account("Could not create account.", e)),
}
}
)
}
fn new_account_from_secret(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(RpcH256, String, )>(params).and_then(
|(secret, pass, )| {
let store = take_weak!(self.accounts);
match store.insert_account(secret.into(), &pass) {
Ok(address) => Ok(to_value(&RpcH160::from(address))),
Err(e) => Err(errors::account("Could not create account.", e)),
}
}
)
}
fn unlock_account(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(RpcH160, String, Option<u64>)>(params).and_then(
|(account, account_pass, duration)| {
let account: Address = account.into();
let store = take_weak!(self.accounts);
let r = match (self.allow_perm_unlock, duration) {
(false, _) => store.unlock_account_temporarily(account, account_pass),
(true, Some(0)) => store.unlock_account_permanently(account, account_pass),
(true, Some(d)) => store.unlock_account_timed(account, account_pass, d as u32 * 1000),
(true, None) => store.unlock_account_timed(account, account_pass, 300_000),
};
match r {
Ok(_) => Ok(Value::Bool(true)),
Err(_) => Ok(Value::Bool(false)),
}
}
)
}
fn test_password(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(RpcH160, String)>(params).and_then(
|(account, password)| {
let account: Address = account.into();
take_weak!(self.accounts)
.test_password(&account, password)
.map(|b| Value::Bool(b))
.map_err(|e| errors::account("Could not fetch account info.", e))
}
)
}
fn change_password(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(RpcH160, String, String)>(params).and_then(
|(account, password, new_password)| {
let account: Address = account.into();
take_weak!(self.accounts)
.change_password(&account, password, new_password)
.map(|_| Value::Null)
.map_err(|e| errors::account("Could not fetch account info.", e))
}
)
}
fn sign_and_send_transaction(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
from_params::<(TransactionRequest, String)>(params)
.and_then(|(request, password)| {
sign_and_dispatch(
&*take_weak!(self.client),
&*take_weak!(self.miner),
&*take_weak!(self.accounts),
request.into(),
Some(password)
)
})
}
fn set_account_name(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
let store = take_weak!(self.accounts);
from_params::<(RpcH160, String)>(params).and_then(|(addr, name)| {
let addr: Address = addr.into();
store.set_account_name(addr.clone(), name.clone()).or_else(|_| store.set_address_name(addr, name)).expect("set_address_name always returns Ok; qed");
Ok(Value::Null)
})
}
fn set_account_meta(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
let store = take_weak!(self.accounts);
from_params::<(RpcH160, String)>(params).and_then(|(addr, meta)| {
let addr: Address = addr.into();
store.set_account_meta(addr.clone(), meta.clone()).or_else(|_| store.set_address_meta(addr, meta)).expect("set_address_meta always returns Ok; qed");
Ok(Value::Null)
})
}
fn import_geth_accounts(&self, params: Params) -> Result<Value, Error> {
from_params::<(Vec<RpcH160>,)>(params).and_then(|(addresses,)| {
let store = take_weak!(self.accounts);
Ok(to_value(&try!(store
.import_geth_accounts(addresses.into_iter().map(Into::into).collect(), false)
.map_err(|e| errors::account("Couldn't import Geth accounts", e))
).into_iter().map(Into::into).collect::<Vec<RpcH160>>()))
})
}
fn geth_accounts(&self, params: Params) -> Result<Value, Error> {
try!(self.active());
try!(expect_no_params(params));
let store = take_weak!(self.accounts);
Ok(to_value(&store.list_geth_accounts(false).into_iter().map(Into::into).collect::<Vec<RpcH160>>()))
}
}

View File

@ -26,6 +26,6 @@ pub mod traits;
pub mod tests; pub mod tests;
pub mod types; pub mod types;
pub use self::traits::{Web3, Eth, EthFilter, EthSigning, Personal, PersonalSigner, Net, Ethcore, EthcoreSet, Traces, Rpc}; pub use self::traits::{Web3, Eth, EthFilter, EthSigning, Personal, PersonalAccounts, PersonalSigner, Net, Ethcore, EthcoreSet, Traces, Rpc};
pub use self::impls::*; pub use self::impls::*;
pub use self::helpers::{SigningQueue, SignerService, ConfirmationsQueue, NetworkSettings, block_import}; pub use self::helpers::{SigningQueue, SignerService, ConfirmationsQueue, NetworkSettings, block_import};

View File

@ -24,7 +24,7 @@ use ethcore::spec::{Genesis, Spec};
use ethcore::block::Block; use ethcore::block::Block;
use ethcore::views::BlockView; use ethcore::views::BlockView;
use ethcore::ethereum; use ethcore::ethereum;
use ethcore::miner::{MinerOptions, GasPricer, MinerService, ExternalMiner, Miner, PendingSet, PrioritizationStrategy, GasLimit}; use ethcore::miner::{MinerOptions, Banning, GasPricer, MinerService, ExternalMiner, Miner, PendingSet, PrioritizationStrategy, GasLimit};
use ethcore::account_provider::AccountProvider; use ethcore::account_provider::AccountProvider;
use devtools::RandomTempPath; use devtools::RandomTempPath;
use util::Hashable; use util::Hashable;
@ -61,6 +61,7 @@ fn miner_service(spec: &Spec, accounts: Arc<AccountProvider>) -> Arc<Miner> {
tx_gas_limit: !U256::zero(), tx_gas_limit: !U256::zero(),
tx_queue_strategy: PrioritizationStrategy::GasPriceOnly, tx_queue_strategy: PrioritizationStrategy::GasPriceOnly,
tx_queue_gas_limit: GasLimit::None, tx_queue_gas_limit: GasLimit::None,
tx_queue_banning: Banning::Disabled,
pending_set: PendingSet::SealingOrElseQueue, pending_set: PendingSet::SealingOrElseQueue,
reseal_min_period: Duration::from_secs(0), reseal_min_period: Duration::from_secs(0),
work_queue_size: 50, work_queue_size: 50,

View File

@ -16,7 +16,7 @@
use std::sync::Arc; use std::sync::Arc;
use util::log::RotatingLogger; use util::log::RotatingLogger;
use util::U256; use util::{U256, Address};
use ethsync::ManageNetwork; use ethsync::ManageNetwork;
use ethcore::client::{TestBlockChainClient}; use ethcore::client::{TestBlockChainClient};
use ethstore::ethkey::{Generator, Random}; use ethstore::ethkey::{Generator, Random};
@ -320,3 +320,25 @@ fn rpc_ethcore_dapps_port() {
assert_eq!(io1.handle_request_sync(request), Some(response1.to_owned())); assert_eq!(io1.handle_request_sync(request), Some(response1.to_owned()));
assert_eq!(io2.handle_request_sync(request), Some(response2.to_owned())); assert_eq!(io2.handle_request_sync(request), Some(response2.to_owned()));
} }
#[test]
fn rpc_ethcore_next_nonce() {
let deps = Dependencies::new();
let address = Address::default();
let io1 = deps.default_client();
let deps = Dependencies::new();
deps.miner.last_nonces.write().insert(address.clone(), 2.into());
let io2 = deps.default_client();
let request = r#"{
"jsonrpc": "2.0",
"method": "ethcore_nextNonce",
"params": [""#.to_owned() + &format!("0x{:?}", address) + r#""],
"id": 1
}"#;
let response1 = r#"{"jsonrpc":"2.0","result":"0x0","id":1}"#;
let response2 = r#"{"jsonrpc":"2.0","result":"0x3","id":1}"#;
assert_eq!(io1.handle_request_sync(&request), Some(response1.to_owned()));
assert_eq!(io2.handle_request_sync(&request), Some(response2.to_owned()));
}

View File

@ -19,7 +19,7 @@ use std::str::FromStr;
use jsonrpc_core::IoHandler; use jsonrpc_core::IoHandler;
use util::{U256, Uint, Address}; use util::{U256, Uint, Address};
use ethcore::account_provider::AccountProvider; use ethcore::account_provider::AccountProvider;
use v1::{PersonalClient, Personal}; use v1::{PersonalClient, PersonalAccountsClient, PersonalAccounts, Personal};
use v1::tests::helpers::TestMinerService; use v1::tests::helpers::TestMinerService;
use ethcore::client::TestBlockChainClient; use ethcore::client::TestBlockChainClient;
use ethcore::transaction::{Action, Transaction}; use ethcore::transaction::{Action, Transaction};
@ -50,10 +50,12 @@ fn setup() -> PersonalTester {
let accounts = accounts_provider(); let accounts = accounts_provider();
let client = blockchain_client(); let client = blockchain_client();
let miner = miner_service(); let miner = miner_service();
let personal = PersonalClient::new(&accounts, &client, &miner, false); let personal = PersonalClient::new(&accounts, &client);
let personal_accounts = PersonalAccountsClient::new(&accounts, &client, &miner, false);
let io = IoHandler::new(); let io = IoHandler::new();
io.add_delegate(personal.to_delegate()); io.add_delegate(personal.to_delegate());
io.add_delegate(personal_accounts.to_delegate());
let tester = PersonalTester { let tester = PersonalTester {
accounts: accounts, accounts: accounts,

View File

@ -125,5 +125,9 @@ build_rpc_trait! {
/// Returns current Dapps Server port or an error if dapps server is disabled. /// Returns current Dapps Server port or an error if dapps server is disabled.
#[rpc(name = "ethcore_dappsPort")] #[rpc(name = "ethcore_dappsPort")]
fn dapps_port(&self) -> Result<u16, Error>; fn dapps_port(&self) -> Result<u16, Error>;
/// Returns next nonce for particular sender. Should include all transactions in the queue.
#[rpc(name = "ethcore_nextNonce")]
fn next_nonce(&self, H160) -> Result<U256, Error>;
} }
} }

View File

@ -30,7 +30,7 @@ pub use self::web3::Web3;
pub use self::eth::{Eth, EthFilter}; pub use self::eth::{Eth, EthFilter};
pub use self::eth_signing::EthSigning; pub use self::eth_signing::EthSigning;
pub use self::net::Net; pub use self::net::Net;
pub use self::personal::{Personal, PersonalSigner}; pub use self::personal::{Personal, PersonalAccounts, PersonalSigner};
pub use self::ethcore::Ethcore; pub use self::ethcore::Ethcore;
pub use self::ethcore_set::EthcoreSet; pub use self::ethcore_set::EthcoreSet;
pub use self::traces::Traces; pub use self::traces::Traces;

View File

@ -18,12 +18,28 @@
use std::sync::Arc; use std::sync::Arc;
use jsonrpc_core::*; use jsonrpc_core::*;
/// Personal rpc interface. /// Personal rpc interface. Safe (read-only) functions.
pub trait Personal: Sized + Send + Sync + 'static { pub trait Personal: Sized + Send + Sync + 'static {
/// Lists all stored accounts /// Lists all stored accounts
fn accounts(&self, _: Params) -> Result<Value, Error>; fn accounts(&self, _: Params) -> Result<Value, Error>;
/// Returns accounts information.
fn accounts_info(&self, _: Params) -> Result<Value, Error>;
/// Should be used to convert object to io delegate.
fn to_delegate(self) -> IoDelegate<Self> {
let mut delegate = IoDelegate::new(Arc::new(self));
delegate.add_method("personal_listAccounts", Personal::accounts);
delegate.add_method("personal_accountsInfo", Personal::accounts_info);
delegate
}
}
/// Personal rpc methods altering stored accounts or their settings.
pub trait PersonalAccounts: Sized + Send + Sync + 'static {
/// Creates new account (it becomes new current unlocked account) /// Creates new account (it becomes new current unlocked account)
/// Param is the password for the account. /// Param is the password for the account.
fn new_account(&self, _: Params) -> Result<Value, Error>; fn new_account(&self, _: Params) -> Result<Value, Error>;
@ -36,6 +52,10 @@ pub trait Personal: Sized + Send + Sync + 'static {
/// Second parameter is password for the wallet and the new account. /// Second parameter is password for the wallet and the new account.
fn new_account_from_wallet(&self, params: Params) -> Result<Value, Error>; fn new_account_from_wallet(&self, params: Params) -> Result<Value, Error>;
/// Creates new account from the given raw secret.
/// Second parameter is password for the new account.
fn new_account_from_secret(&self, params: Params) -> Result<Value, Error>;
/// Unlocks specified account for use (can only be one unlocked account at one moment) /// Unlocks specified account for use (can only be one unlocked account at one moment)
fn unlock_account(&self, _: Params) -> Result<Value, Error>; fn unlock_account(&self, _: Params) -> Result<Value, Error>;
@ -56,31 +76,27 @@ pub trait Personal: Sized + Send + Sync + 'static {
/// Set an account's metadata string. /// Set an account's metadata string.
fn set_account_meta(&self, _: Params) -> Result<Value, Error>; fn set_account_meta(&self, _: Params) -> Result<Value, Error>;
/// Returns accounts information. /// Imports a number of Geth accounts, with the list provided as the argument.
fn accounts_info(&self, _: Params) -> Result<Value, Error>; fn import_geth_accounts(&self, _: Params) -> Result<Value, Error>;
/// Returns the accounts available for importing from Geth. /// Returns the accounts available for importing from Geth.
fn geth_accounts(&self, _: Params) -> Result<Value, Error>; fn geth_accounts(&self, _: Params) -> Result<Value, Error>;
/// Imports a number of Geth accounts, with the list provided as the argument.
fn import_geth_accounts(&self, _: Params) -> Result<Value, Error>;
/// Should be used to convert object to io delegate. /// Should be used to convert object to io delegate.
fn to_delegate(self) -> IoDelegate<Self> { fn to_delegate(self) -> IoDelegate<Self> {
let mut delegate = IoDelegate::new(Arc::new(self)); let mut delegate = IoDelegate::new(Arc::new(self));
delegate.add_method("personal_listAccounts", Personal::accounts); delegate.add_method("personal_newAccount", PersonalAccounts::new_account);
delegate.add_method("personal_newAccount", Personal::new_account); delegate.add_method("personal_newAccountFromPhrase", PersonalAccounts::new_account_from_phrase);
delegate.add_method("personal_newAccountFromPhrase", Personal::new_account_from_phrase); delegate.add_method("personal_newAccountFromWallet", PersonalAccounts::new_account_from_wallet);
delegate.add_method("personal_newAccountFromWallet", Personal::new_account_from_wallet); delegate.add_method("personal_newAccountFromSecret", PersonalAccounts::new_account_from_secret);
delegate.add_method("personal_unlockAccount", Personal::unlock_account); delegate.add_method("personal_unlockAccount", PersonalAccounts::unlock_account);
delegate.add_method("personal_testPassword", Personal::test_password); delegate.add_method("personal_testPassword", PersonalAccounts::test_password);
delegate.add_method("personal_changePassword", Personal::change_password); delegate.add_method("personal_changePassword", PersonalAccounts::change_password);
delegate.add_method("personal_signAndSendTransaction", Personal::sign_and_send_transaction); delegate.add_method("personal_signAndSendTransaction", PersonalAccounts::sign_and_send_transaction);
delegate.add_method("personal_setAccountName", Personal::set_account_name); delegate.add_method("personal_setAccountName", PersonalAccounts::set_account_name);
delegate.add_method("personal_setAccountMeta", Personal::set_account_meta); delegate.add_method("personal_setAccountMeta", PersonalAccounts::set_account_meta);
delegate.add_method("personal_accountsInfo", Personal::accounts_info); delegate.add_method("personal_importGethAccounts", PersonalAccounts::import_geth_accounts);
delegate.add_method("personal_listGethAccounts", Personal::geth_accounts); delegate.add_method("personal_listGethAccounts", PersonalAccounts::geth_accounts);
delegate.add_method("personal_importGethAccounts", Personal::import_geth_accounts);
delegate delegate
} }
@ -108,6 +124,7 @@ pub trait PersonalSigner: Sized + Send + Sync + 'static {
delegate.add_method("personal_confirmRequest", PersonalSigner::confirm_request); delegate.add_method("personal_confirmRequest", PersonalSigner::confirm_request);
delegate.add_method("personal_rejectRequest", PersonalSigner::reject_request); delegate.add_method("personal_rejectRequest", PersonalSigner::reject_request);
delegate.add_method("personal_generateAuthorizationToken", PersonalSigner::generate_token); delegate.add_method("personal_generateAuthorizationToken", PersonalSigner::generate_token);
delegate delegate
} }
} }

View File

@ -23,7 +23,7 @@ ethcore-rpc = { path = "../rpc" }
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
parity-ui = { path = "../dapps/ui", version = "1.4", optional = true } parity-ui = { path = "../dapps/ui", version = "1.4", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
[features] [features]
dev = ["clippy"] dev = ["clippy"]

View File

@ -99,6 +99,7 @@ impl<T: TimeProvider> AuthCodes<T> {
} }
/// Checks if given hash is correct identifier of `SignerUI` /// Checks if given hash is correct identifier of `SignerUI`
#[cfg_attr(feature="dev", allow(wrong_self_convention))]
pub fn is_valid(&mut self, hash: &H256, time: u64) -> bool { pub fn is_valid(&mut self, hash: &H256, time: u64) -> bool {
let now = self.now.now(); let now = self.now.now();
// check time // check time

View File

@ -17,7 +17,7 @@ ethcore-network = { path = "../util/network" }
ethcore-io = { path = "../util/io" } ethcore-io = { path = "../util/io" }
ethcore = { path = "../ethcore" } ethcore = { path = "../ethcore" }
rlp = { path = "../util/rlp" } rlp = { path = "../util/rlp" }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
log = "0.3" log = "0.3"
env_logger = "0.3" env_logger = "0.3"
time = "0.1.34" time = "0.1.34"

View File

@ -34,7 +34,7 @@ use std::str::FromStr;
use parking_lot::RwLock; use parking_lot::RwLock;
use chain::{ETH_PACKET_COUNT, SNAPSHOT_SYNC_PACKET_COUNT}; use chain::{ETH_PACKET_COUNT, SNAPSHOT_SYNC_PACKET_COUNT};
pub const WARP_SYNC_PROTOCOL_ID: ProtocolId = *b"bam"; pub const WARP_SYNC_PROTOCOL_ID: ProtocolId = *b"par";
/// Sync configuration /// Sync configuration
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]

View File

@ -114,7 +114,7 @@ impl BlockCollection {
/// Insert a set of headers into collection and advance subchain head pointers. /// Insert a set of headers into collection and advance subchain head pointers.
pub fn insert_headers(&mut self, headers: Vec<Bytes>) { pub fn insert_headers(&mut self, headers: Vec<Bytes>) {
for h in headers.into_iter() { for h in headers {
if let Err(e) = self.insert_header(h) { if let Err(e) = self.insert_header(h) {
trace!(target: "sync", "Ignored invalid header: {:?}", e); trace!(target: "sync", "Ignored invalid header: {:?}", e);
} }
@ -125,7 +125,7 @@ impl BlockCollection {
/// Insert a collection of block bodies for previously downloaded headers. /// Insert a collection of block bodies for previously downloaded headers.
pub fn insert_bodies(&mut self, bodies: Vec<Bytes>) -> usize { pub fn insert_bodies(&mut self, bodies: Vec<Bytes>) -> usize {
let mut inserted = 0; let mut inserted = 0;
for b in bodies.into_iter() { for b in bodies {
if let Err(e) = self.insert_body(b) { if let Err(e) = self.insert_body(b) {
trace!(target: "sync", "Ignored invalid body: {:?}", e); trace!(target: "sync", "Ignored invalid body: {:?}", e);
} else { } else {
@ -141,7 +141,7 @@ impl BlockCollection {
return 0; return 0;
} }
let mut inserted = 0; let mut inserted = 0;
for r in receipts.into_iter() { for r in receipts {
if let Err(e) = self.insert_receipt(r) { if let Err(e) = self.insert_receipt(r) {
trace!(target: "sync", "Ignored invalid receipt: {:?}", e); trace!(target: "sync", "Ignored invalid receipt: {:?}", e);
} else { } else {

View File

@ -381,7 +381,7 @@ impl ChainSync {
/// Returns information on peers connections /// Returns information on peers connections
pub fn peers(&self, io: &SyncIo) -> Vec<PeerInfoDigest> { pub fn peers(&self, io: &SyncIo) -> Vec<PeerInfoDigest> {
self.peers.iter() self.peers.iter()
.filter_map(|(&peer_id, ref peer_data)| .filter_map(|(&peer_id, peer_data)|
io.peer_session_info(peer_id).map(|session_info| io.peer_session_info(peer_id).map(|session_info|
PeerInfoDigest { PeerInfoDigest {
id: session_info.id.map(|id| id.hex()), id: session_info.id.map(|id| id.hex()),
@ -1017,7 +1017,7 @@ impl ChainSync {
return; return;
} }
let (peer_latest, peer_difficulty, peer_snapshot_number, peer_snapshot_hash) = { let (peer_latest, peer_difficulty, peer_snapshot_number, peer_snapshot_hash) = {
if let Some(ref peer) = self.peers.get_mut(&peer_id) { if let Some(peer) = self.peers.get_mut(&peer_id) {
if peer.asking != PeerAsking::Nothing || !peer.can_sync() { if peer.asking != PeerAsking::Nothing || !peer.can_sync() {
return; return;
} }
@ -1142,6 +1142,7 @@ impl ChainSync {
} }
/// Checks if there are blocks fully downloaded that can be imported into the blockchain and does the import. /// Checks if there are blocks fully downloaded that can be imported into the blockchain and does the import.
#[cfg_attr(feature="dev", allow(block_in_if_condition_stmt))]
fn collect_blocks(&mut self, io: &mut SyncIo, block_set: BlockSet) { fn collect_blocks(&mut self, io: &mut SyncIo, block_set: BlockSet) {
match block_set { match block_set {
BlockSet::NewBlocks => { BlockSet::NewBlocks => {
@ -1242,7 +1243,7 @@ impl ChainSync {
return true; return true;
} }
} }
return false; false
} }
/// Generic request sender /// Generic request sender
@ -1370,7 +1371,7 @@ impl ChainSync {
while number <= last && count < max_count { while number <= last && count < max_count {
if let Some(hdr) = overlay.get(&number) { if let Some(hdr) = overlay.get(&number) {
trace!(target: "sync", "{}: Returning cached fork header", peer_id); trace!(target: "sync", "{}: Returning cached fork header", peer_id);
data.extend(hdr); data.extend_from_slice(hdr);
count += 1; count += 1;
} else if let Some(mut hdr) = io.chain().block_header(BlockID::Number(number)) { } else if let Some(mut hdr) = io.chain().block_header(BlockID::Number(number)) {
data.append(&mut hdr); data.append(&mut hdr);
@ -1427,16 +1428,18 @@ impl ChainSync {
} }
count = min(count, MAX_NODE_DATA_TO_SEND); count = min(count, MAX_NODE_DATA_TO_SEND);
let mut added = 0usize; let mut added = 0usize;
let mut data = Bytes::new(); let mut data = Vec::new();
for i in 0..count { for i in 0..count {
if let Some(mut hdr) = io.chain().state_data(&try!(r.val_at::<H256>(i))) { if let Some(hdr) = io.chain().state_data(&try!(r.val_at::<H256>(i))) {
data.append(&mut hdr); data.push(hdr);
added += 1; added += 1;
} }
} }
trace!(target: "sync", "{} -> GetNodeData: return {} entries", peer_id, added); trace!(target: "sync", "{} -> GetNodeData: return {} entries", peer_id, added);
let mut rlp = RlpStream::new_list(added); let mut rlp = RlpStream::new_list(added);
rlp.append_raw(&data, added); for d in data.into_iter() {
rlp.append(&d);
}
Ok(Some((NODE_DATA_PACKET, rlp))) Ok(Some((NODE_DATA_PACKET, rlp)))
} }
@ -1707,7 +1710,7 @@ impl ChainSync {
self.send_packet(io, *peer_id, NEW_BLOCK_PACKET, rlp); self.send_packet(io, *peer_id, NEW_BLOCK_PACKET, rlp);
} }
} }
if let Some(ref mut peer) = self.peers.get_mut(&peer_id) { if let Some(ref mut peer) = self.peers.get_mut(peer_id) {
peer.latest_hash = chain_info.best_block_hash.clone(); peer.latest_hash = chain_info.best_block_hash.clone();
} }
sent += 1; sent += 1;
@ -1725,7 +1728,7 @@ impl ChainSync {
sent += match ChainSync::create_new_hashes_rlp(io.chain(), &last_parent, &chain_info.best_block_hash) { sent += match ChainSync::create_new_hashes_rlp(io.chain(), &last_parent, &chain_info.best_block_hash) {
Some(rlp) => { Some(rlp) => {
{ {
if let Some(ref mut peer) = self.peers.get_mut(&peer_id) { if let Some(ref mut peer) = self.peers.get_mut(peer_id) {
peer.latest_hash = chain_info.best_block_hash.clone(); peer.latest_hash = chain_info.best_block_hash.clone();
} }
} }
@ -1793,7 +1796,7 @@ impl ChainSync {
// Send RLPs // Send RLPs
let sent = lucky_peers.len(); let sent = lucky_peers.len();
if sent > 0 { if sent > 0 {
for (peer_id, rlp) in lucky_peers.into_iter() { for (peer_id, rlp) in lucky_peers {
self.send_packet(io, peer_id, TRANSACTIONS_PACKET, rlp); self.send_packet(io, peer_id, TRANSACTIONS_PACKET, rlp);
} }
@ -2025,7 +2028,9 @@ mod tests {
assert!(rlp_result.is_some()); assert!(rlp_result.is_some());
// the length of one rlp-encoded hashe // the length of one rlp-encoded hashe
assert_eq!(34, rlp_result.unwrap().1.out().len()); let rlp = rlp_result.unwrap().1.out();
let rlp = Rlp::new(&rlp);
assert_eq!(1, rlp.item_count());
io.sender = Some(2usize); io.sender = Some(2usize);

View File

@ -23,6 +23,7 @@ use super::helpers::*;
pub struct TestSnapshotService { pub struct TestSnapshotService {
manifest: Option<ManifestData>, manifest: Option<ManifestData>,
chunks: HashMap<H256, Bytes>, chunks: HashMap<H256, Bytes>,
canon_hashes: Mutex<HashMap<u64, H256>>,
restoration_manifest: Mutex<Option<ManifestData>>, restoration_manifest: Mutex<Option<ManifestData>>,
state_restoration_chunks: Mutex<HashMap<H256, Bytes>>, state_restoration_chunks: Mutex<HashMap<H256, Bytes>>,
@ -34,6 +35,7 @@ impl TestSnapshotService {
TestSnapshotService { TestSnapshotService {
manifest: None, manifest: None,
chunks: HashMap::new(), chunks: HashMap::new(),
canon_hashes: Mutex::new(HashMap::new()),
restoration_manifest: Mutex::new(None), restoration_manifest: Mutex::new(None),
state_restoration_chunks: Mutex::new(HashMap::new()), state_restoration_chunks: Mutex::new(HashMap::new()),
block_restoration_chunks: Mutex::new(HashMap::new()), block_restoration_chunks: Mutex::new(HashMap::new()),
@ -57,6 +59,7 @@ impl TestSnapshotService {
TestSnapshotService { TestSnapshotService {
manifest: Some(manifest), manifest: Some(manifest),
chunks: chunks, chunks: chunks,
canon_hashes: Mutex::new(HashMap::new()),
restoration_manifest: Mutex::new(None), restoration_manifest: Mutex::new(None),
state_restoration_chunks: Mutex::new(HashMap::new()), state_restoration_chunks: Mutex::new(HashMap::new()),
block_restoration_chunks: Mutex::new(HashMap::new()), block_restoration_chunks: Mutex::new(HashMap::new()),
@ -110,6 +113,10 @@ impl SnapshotService for TestSnapshotService {
self.block_restoration_chunks.lock().insert(hash, chunk); self.block_restoration_chunks.lock().insert(hash, chunk);
} }
} }
fn provide_canon_hashes(&self, hashes: &[(u64, H256)]) {
self.canon_hashes.lock().extend(hashes.iter().cloned());
}
} }
#[test] #[test]

View File

@ -23,7 +23,7 @@ rlp = { path = "rlp" }
heapsize = { version = "0.3", features = ["unstable"] } heapsize = { version = "0.3", features = ["unstable"] }
itertools = "0.4" itertools = "0.4"
sha3 = { path = "sha3" } sha3 = { path = "sha3" }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
libc = "0.2.7" libc = "0.2.7"
vergen = "0.1" vergen = "0.1"
@ -36,6 +36,7 @@ ansi_term = "0.7"
tiny-keccak= "1.0" tiny-keccak= "1.0"
ethcore-bloom-journal = { path = "bloom" } ethcore-bloom-journal = { path = "bloom" }
regex = "0.1" regex = "0.1"
lru-cache = "0.1.0"
[features] [features]
default = [] default = []

View File

@ -14,7 +14,7 @@ time = "0.1.34"
tiny-keccak = "1.0" tiny-keccak = "1.0"
rust-crypto = "0.2.34" rust-crypto = "0.2.34"
slab = "0.2" slab = "0.2"
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
igd = "0.5.0" igd = "0.5.0"
libc = "0.2.7" libc = "0.2.7"
parking_lot = "0.3" parking_lot = "0.3"

View File

@ -395,7 +395,7 @@ impl Session {
PACKET_PEERS => Ok(SessionData::None), PACKET_PEERS => Ok(SessionData::None),
PACKET_USER ... PACKET_LAST => { PACKET_USER ... PACKET_LAST => {
let mut i = 0usize; let mut i = 0usize;
while packet_id > self.info.capabilities[i].id_offset + self.info.capabilities[i].packet_count { while packet_id >= self.info.capabilities[i].id_offset + self.info.capabilities[i].packet_count {
i += 1; i += 1;
if i == self.info.capabilities.len() { if i == self.info.capabilities.len() {
debug!(target: "network", "Unknown packet: {:?}", packet_id); debug!(target: "network", "Unknown packet: {:?}", packet_id);
@ -406,6 +406,7 @@ impl Session {
// map to protocol // map to protocol
let protocol = self.info.capabilities[i].protocol; let protocol = self.info.capabilities[i].protocol;
let pid = packet_id - self.info.capabilities[i].id_offset; let pid = packet_id - self.info.capabilities[i].id_offset;
trace!(target: "network", "Packet {} mapped to {:?}:{}, i={}, capabilities={:?}", packet_id, protocol, pid, i, self.info.capabilities);
Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } ) Ok(SessionData::Packet { data: packet.data, protocol: protocol, packet_id: pid } )
}, },
_ => { _ => {

79
util/src/cache.rs Normal file
View File

@ -0,0 +1,79 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Lru-cache related utilities as quick-and-dirty wrappers around the lru-cache
//! crate.
// TODO: push changes upstream in a clean way.
use heapsize::HeapSizeOf;
use lru_cache::LruCache;
use std::hash::Hash;
const INITIAL_CAPACITY: usize = 4;
/// An LRU-cache which operates on memory used.
pub struct MemoryLruCache<K: Eq + Hash, V: HeapSizeOf> {
inner: LruCache<K, V>,
cur_size: usize,
max_size: usize,
}
impl<K: Eq + Hash, V: HeapSizeOf> MemoryLruCache<K, V> {
/// Create a new cache with a maximum size in bytes.
pub fn new(max_size: usize) -> Self {
MemoryLruCache {
inner: LruCache::new(INITIAL_CAPACITY),
max_size: max_size,
cur_size: 0,
}
}
/// Insert an item.
pub fn insert(&mut self, key: K, val: V) {
let cap = self.inner.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if self.inner.len() == cap && self.cur_size < self.max_size {
self.inner.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = self.inner.insert(key, val) {
self.cur_size -= lru.heap_size_of_children();
}
// remove elements until we are below the memory target.
while self.cur_size > self.max_size {
match self.inner.remove_lru() {
Some((_, v)) => self.cur_size -= v.heap_size_of_children(),
_ => break,
}
}
}
/// Get a reference to an item in the cache. It is a logic error for its
/// heap size to be altered while borrowed.
pub fn get_mut(&mut self, key: &K) -> Option<&mut V> {
self.inner.get_mut(key)
}
/// Currently-used size of values in bytes.
pub fn current_size(&self) -> usize {
self.cur_size
}
}

View File

@ -78,7 +78,7 @@ impl HashDB for ArchiveDB {
ret.insert(h, 1); ret.insert(h, 1);
} }
for (key, refs) in self.overlay.keys().into_iter() { for (key, refs) in self.overlay.keys() {
let refs = *ret.get(&key).unwrap_or(&0) + refs; let refs = *ret.get(&key).unwrap_or(&0) + refs;
ret.insert(key, refs); ret.insert(key, refs);
} }
@ -152,7 +152,7 @@ impl JournalDB for ArchiveDB {
let mut inserts = 0usize; let mut inserts = 0usize;
let mut deletes = 0usize; let mut deletes = 0usize;
for i in self.overlay.drain().into_iter() { for i in self.overlay.drain() {
let (key, (value, rc)) = i; let (key, (value, rc)) = i;
if rc > 0 { if rc > 0 {
batch.put(self.column, &key, &value); batch.put(self.column, &key, &value);
@ -164,7 +164,7 @@ impl JournalDB for ArchiveDB {
} }
} }
for (mut key, value) in self.overlay.drain_aux().into_iter() { for (mut key, value) in self.overlay.drain_aux() {
key.push(AUX_FLAG); key.push(AUX_FLAG);
batch.put(self.column, &key, &value); batch.put(self.column, &key, &value);
} }
@ -185,7 +185,7 @@ impl JournalDB for ArchiveDB {
let mut inserts = 0usize; let mut inserts = 0usize;
let mut deletes = 0usize; let mut deletes = 0usize;
for i in self.overlay.drain().into_iter() { for i in self.overlay.drain() {
let (key, (value, rc)) = i; let (key, (value, rc)) = i;
if rc > 0 { if rc > 0 {
if try!(self.backing.get(self.column, &key)).is_some() { if try!(self.backing.get(self.column, &key)).is_some() {
@ -204,7 +204,7 @@ impl JournalDB for ArchiveDB {
} }
} }
for (mut key, value) in self.overlay.drain_aux().into_iter() { for (mut key, value) in self.overlay.drain_aux() {
key.push(AUX_FLAG); key.push(AUX_FLAG);
batch.put(self.column, &key, &value); batch.put(self.column, &key, &value);
} }

View File

@ -63,9 +63,11 @@ enum RemoveFrom {
/// the removals actually take effect. /// the removals actually take effect.
/// ///
/// journal format: /// journal format:
/// ```
/// [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ] /// [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ]
/// [era, 1] => [ id, [insert_0, ...], [remove_0, ...] ] /// [era, 1] => [ id, [insert_0, ...], [remove_0, ...] ]
/// [era, n] => [ ... ] /// [era, n] => [ ... ]
/// ```
/// ///
/// When we make a new commit, we make a journal of all blocks in the recent history and record /// When we make a new commit, we make a journal of all blocks in the recent history and record
/// all keys that were inserted and deleted. The journal is ordered by era; multiple commits can /// all keys that were inserted and deleted. The journal is ordered by era; multiple commits can
@ -80,6 +82,7 @@ enum RemoveFrom {
/// which includes an original key, if any. /// which includes an original key, if any.
/// ///
/// The semantics of the `counter` are: /// The semantics of the `counter` are:
/// ```
/// insert key k: /// insert key k:
/// counter already contains k: count += 1 /// counter already contains k: count += 1
/// counter doesn't contain k: /// counter doesn't contain k:
@ -91,9 +94,11 @@ enum RemoveFrom {
/// count == 1: remove counter /// count == 1: remove counter
/// count == 0: remove key from backing db /// count == 0: remove key from backing db
/// counter doesn't contain k: remove key from backing db /// counter doesn't contain k: remove key from backing db
/// ```
/// ///
/// Practically, this means that for each commit block turning from recent to ancient we do the /// Practically, this means that for each commit block turning from recent to ancient we do the
/// following: /// following:
/// ```
/// is_canonical: /// is_canonical:
/// inserts: Ignored (left alone in the backing database). /// inserts: Ignored (left alone in the backing database).
/// deletes: Enacted; however, recent history queue is checked for ongoing references. This is /// deletes: Enacted; however, recent history queue is checked for ongoing references. This is
@ -102,8 +107,9 @@ enum RemoveFrom {
/// inserts: Reverted; however, recent history queue is checked for ongoing references. This is /// inserts: Reverted; however, recent history queue is checked for ongoing references. This is
/// reduced as a preference to deletion from the backing database. /// reduced as a preference to deletion from the backing database.
/// deletes: Ignored (they were never inserted). /// deletes: Ignored (they were never inserted).
/// ```
/// ///
/// TODO: store_reclaim_period /// TODO: `store_reclaim_period`
pub struct EarlyMergeDB { pub struct EarlyMergeDB {
overlay: MemoryDB, overlay: MemoryDB,
backing: Arc<Database>, backing: Arc<Database>,
@ -310,7 +316,7 @@ impl HashDB for EarlyMergeDB {
ret.insert(h, 1); ret.insert(h, 1);
} }
for (key, refs) in self.overlay.keys().into_iter() { for (key, refs) in self.overlay.keys() {
let refs = *ret.get(&key).unwrap_or(&0) + refs; let refs = *ret.get(&key).unwrap_or(&0) + refs;
ret.insert(key, refs); ret.insert(key, refs);
} }

View File

@ -379,7 +379,7 @@ impl HashDB for OverlayRecentDB {
ret.insert(h, 1); ret.insert(h, 1);
} }
for (key, refs) in self.transaction_overlay.keys().into_iter() { for (key, refs) in self.transaction_overlay.keys() {
let refs = *ret.get(&key).unwrap_or(&0) + refs; let refs = *ret.get(&key).unwrap_or(&0) + refs;
ret.insert(key, refs); ret.insert(key, refs);
} }

View File

@ -36,12 +36,14 @@ use std::env;
/// the removals actually take effect. /// the removals actually take effect.
/// ///
/// journal format: /// journal format:
/// ```
/// [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ] /// [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ]
/// [era, 1] => [ id, [insert_0, ...], [remove_0, ...] ] /// [era, 1] => [ id, [insert_0, ...], [remove_0, ...] ]
/// [era, n] => [ ... ] /// [era, n] => [ ... ]
/// ```
/// ///
/// when we make a new commit, we journal the inserts and removes. /// when we make a new commit, we journal the inserts and removes.
/// for each end_era that we journaled that we are no passing by, /// for each `end_era` that we journaled that we are no passing by,
/// we remove all of its removes assuming it is canonical and all /// we remove all of its removes assuming it is canonical and all
/// of its inserts otherwise. /// of its inserts otherwise.
// TODO: store last_era, reclaim_period. // TODO: store last_era, reclaim_period.

Some files were not shown because too many files have changed in this diff Show More