Merge branch 'master' into les-impl

This commit is contained in:
Robert Habermeier 2016-11-04 15:00:05 +01:00
commit 1bc124f980
362 changed files with 6375 additions and 5094 deletions

View File

@ -1,11 +1,11 @@
stages:
- build
- test
variables:
GIT_DEPTH: "3"
SIMPLECOV: "true"
SIMPLECOV: "true"
RUST_BACKTRACE: "1"
RUSTFLAGS: "-D warnings"
RUSTFLAGS: ""
CARGOFLAGS: ""
cache:
key: "$CI_BUILD_NAME/$CI_BUILD_REF_NAME"
untracked: true
@ -18,7 +18,7 @@ linux-stable:
- tags
- stable
script:
- cargo build --release --verbose
- cargo build --release $CARGOFLAGS
- strip target/release/parity
- md5sum target/release/parity >> parity.md5
- sh scripts/deb-build.sh amd64
@ -26,7 +26,7 @@ linux-stable:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
@ -48,7 +48,7 @@ linux-stable-14.04:
- tags
- stable
script:
- cargo build --release --verbose
- cargo build --release $CARGOFLAGS
- strip target/release/parity
- md5sum target/release/parity >> parity.md5
- sh scripts/deb-build.sh amd64
@ -56,7 +56,7 @@ linux-stable-14.04:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity.md5 --body parity.md5
@ -78,7 +78,7 @@ linux-beta:
- tags
- stable
script:
- cargo build --release --verbose
- cargo build --release $CARGOFLAGS
- strip target/release/parity
tags:
- rust
@ -97,7 +97,7 @@ linux-nightly:
- tags
- stable
script:
- cargo build --release --verbose
- cargo build --release $CARGOFLAGS
- strip target/release/parity
tags:
- rust
@ -118,10 +118,10 @@ linux-centos:
script:
- export CXX="g++"
- export CC="gcc"
- cargo build --release --verbose
- cargo build --release $CARGOFLAGS
- strip target/release/parity
- md5sum target/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5
@ -136,16 +136,21 @@ linux-armv7:
stage: build
image: ethcore/rust-armv7:latest
only:
- master
- beta
- tags
- stable
script:
- export CC=arm-linux-gnueabihf-gcc
- export CXX=arm-linux-gnueabihf-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo
- mkdir -p .cargo
- echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target armv7-unknown-linux-gnueabihf --release --verbose
- cargo build --target armv7-unknown-linux-gnueabihf --release $CARGOFLAGS
- arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity
- md5sum target/armv7-unknown-linux-gnueabihf/release/parity >> parity.md5
- sh scripts/deb-build.sh armhf
@ -153,7 +158,7 @@ linux-armv7:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity.md5 --body parity.md5
@ -171,16 +176,21 @@ linux-arm:
stage: build
image: ethcore/rust-arm:latest
only:
- master
- beta
- tags
- stable
script:
- export CC=arm-linux-gnueabihf-gcc
- export CXX=arm-linux-gnueabihf-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo
- mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabihf --release --verbose
- cargo build --target arm-unknown-linux-gnueabihf --release $CARGOFLAGS
- arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity
- md5sum target/arm-unknown-linux-gnueabihf/release/parity >> parity.md5
- sh scripts/deb-build.sh armhf
@ -188,7 +198,7 @@ linux-arm:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity.md5 --body parity.md5
@ -210,15 +220,19 @@ linux-armv6:
- tags
- stable
script:
- export CC=arm-linux-gnueabi-gcc
- export CXX=arm-linux-gnueabi-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo
- mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabi-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabi --release --verbose
- cargo build --target arm-unknown-linux-gnueabi --release $CARGOFLAGS
- arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity
- md5sum target/arm-unknown-linux-gnueabi/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- md5sum target/arm-unknown-linux-gnueabi/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity.md5 --body parity.md5
@ -234,16 +248,21 @@ linux-aarch64:
stage: build
image: ethcore/rust-aarch64:latest
only:
- master
- beta
- tags
- stable
script:
- export CC=aarch64-linux-gnu-gcc
- export CXX=aarch64-linux-gnu-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo
- mkdir -p .cargo
- echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config
- echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config
- cat .cargo/config
- cargo build --target aarch64-unknown-linux-gnu --release --verbose
- cargo build --target aarch64-unknown-linux-gnu --release $CARGOFLAGS
- aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity
- md5sum target/aarch64-unknown-linux-gnu/release/parity >> parity.md5
- sh scripts/deb-build.sh arm64
@ -251,7 +270,7 @@ linux-aarch64:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_arm64.deb"
- md5sum "parity_"$VER"_arm64.deb" >> "parity_"$VER"_arm64.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5
@ -273,9 +292,10 @@ darwin:
- tags
- stable
script:
- cargo build --release --verbose
- cargo build --release $CARGOFLAGS
- rm -rf parity.md5
- md5sum target/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity.md5 --body parity.md5
@ -296,9 +316,9 @@ windows:
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
- set RUST_BACKTRACE=1
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off -D warnings
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off
- rustup default stable-x86_64-pc-windows-msvc
- cargo build --release --verbose
- cargo build --release %CARGOFLAGS%
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
@ -333,25 +353,39 @@ windows:
- target/release/parity.pdb
- nsis/InstallParity.exe
name: "x86_64-pc-windows-msvc_parity"
#test-darwin:
# stage: build
# before_script:
# - git submodule update --init --recursive
# script:
# - export RUST_BACKTRACE=1
# - ./test.sh $CARGOFLAGS --no-release
# tags:
# - osx
#test-windows:
# stage: build
# before_script:
# - git submodule update --init --recursive
# script:
# - set RUST_BACKTRACE=1
# - cargo test --features json-tests -p rlp -p ethash -p ethcore -p ethcore-bigint -p ethcore-dapps -p ethcore-rpc -p ethcore-signer -p ethcore-util -p ethcore-network -p ethcore-io -p ethkey -p ethstore -p ethsync -p ethcore-ipc -p ethcore-ipc-tests -p ethcore-ipc-nano -p parity %CARGOFLAGS% --verbose --release
# tags:
# - rust-windows
# allow_failure: true
test-linux:
stage: test
stage: build
before_script:
- git submodule update --init --recursive
script:
- export RUST_BACKTRACE=1
- ./test.sh --verbose
- ./test.sh $CARGOFLAGS --no-release
tags:
- rust-test
dependencies:
- linux-stable
js-release:
stage: build
image: ethcore/javascript:latest
only:
- master
- beta
- tags
- stable
before_script:
- ./js/scripts/install-deps.sh
script:
@ -360,7 +394,7 @@ js-release:
tags:
- javascript
js-lint:
stage: test
stage: build
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh
@ -369,7 +403,7 @@ js-lint:
tags:
- javascript-test
js-test:
stage: test
stage: build
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh
@ -378,7 +412,7 @@ js-test:
tags:
- javascript-test
js-pack:
stage: test
stage: build
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh

View File

@ -32,6 +32,7 @@ env:
- RUN_DOCS="false"
- TEST_OPTIONS=""
- RUSTFLAGS="-D warnings"
- TRAVIS_NODE_VERSION="6"
# GH_TOKEN for documentation
- secure: bumJASbZSU8bxJ0EyPUJmu16AiV9EXOpyOj86Jlq/Ty9CfwGqsSXt96uDyE+OUJf34RUFQMsw0nk37/zC4lcn6kqk2wpuH3N/o85Zo/cVZY/NusBWLQqtT5VbYWsV+u2Ua4Tmmsw8yVYQhYwU2ZOejNpflL+Cs9XGgORp1L+/gMRMC2y5Se6ZhwnKPQlRJ8LGsG1dzjQULxzADIt3/zuspNBS8a2urJwlHfGMkvHDoUWCviP/GXoSqw3TZR7FmKyxE19I8n9+iSvm9+oZZquvcgfUxMHn8Gq/b44UbPvjtFOg2yam4xdWXF/RyWCHdc/R9EHorSABeCbefIsm+zcUF3/YQxwpSxM4IZEeH2rTiC7dcrsKw3XsO16xFQz5YI5Bay+CT/wTdMmJd7DdYz7Dyf+pOvcM9WOf/zorxYWSBOMYy0uzbusU2iyIghQ82s7E/Ahg+WARtPgkuTLSB5aL1oCTBKHqQscMr7lo5Ti6RpWLxEdTQMBznc+bMr+6dEtkEcG9zqc6cE9XX+ox3wTU6+HVMfQ1ltCntJ4UKcw3A6INEbw9wgocQa812CIASQ2fE+SCAbz6JxBjIAlFUnD1lUB7S8PdMPwn9plfQgKQ2A5YZqg6FnBdf0rQXIJYxQWKHXj/rBHSUCT0tHACDlzTA+EwWggvkP5AGIxRxm8jhw=
- KCOV_CMD="./kcov-master/tmp/usr/local/bin/kcov"
@ -41,6 +42,7 @@ cache:
directories:
- $TRAVIS_BUILD_DIR/target
- $TRAVIS_BUILD_DIR/kcov-master
- $TRAVIS_BUILD_DIR/js/node_modules
- $HOME/.cargo
addons:
@ -64,9 +66,14 @@ install:
make && make install DESTDIR=../tmp &&
cd
)
- nvm install $TRAVIS_NODE_VERSION && nvm use $TRAVIS_NODE_VERSION && ./js/scripts/install-deps.sh
script:
- if [ "$RUN_TESTS" = "true" ]; then ./test.sh $TEST_OPTIONS --verbose; fi
- if [ "$RUN_TESTS" = "true" ]; then
./js/scripts/lint.sh &&
./js/scripts/test.sh &&
./test.sh $TEST_OPTIONS --verbose;
fi
- if [ "$RUN_COVERAGE" = "true" ]; then ./scripts/cov.sh "$KCOV_CMD"; fi
after_success: |

97
Cargo.lock generated
View File

@ -3,7 +3,7 @@ name = "parity"
version = "1.4.0"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
@ -145,15 +145,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "clippy"
version = "0.0.90"
version = "0.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"clippy_lints 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "clippy_lints"
version = "0.0.90"
version = "0.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -222,8 +222,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "elastic-array"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
version = "0.6.0"
source = "git+https://github.com/ethcore/elastic-array#70e4012e691b732c7c4cb04e9232799e6aa268bc"
dependencies = [
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "env_logger"
@ -276,7 +279,7 @@ dependencies = [
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0",
@ -295,7 +298,7 @@ dependencies = [
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)",
"lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -304,6 +307,7 @@ dependencies = [
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"transient-hashmap 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -327,7 +331,7 @@ dependencies = [
name = "ethcore-dapps"
version = "1.4.0"
dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0",
@ -366,7 +370,7 @@ version = "1.4.0"
dependencies = [
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"mio 0.6.0 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -447,6 +451,7 @@ name = "ethcore-network"
version = "1.4.0"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0",
"ethcore-io 1.4.0",
"ethcore-util 1.4.0",
@ -455,7 +460,7 @@ dependencies = [
"igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"mio 0.6.0 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0",
@ -470,7 +475,7 @@ dependencies = [
name = "ethcore-rpc"
version = "1.4.0"
dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0",
"ethcore 1.4.0",
"ethcore-devtools 1.4.0",
@ -500,7 +505,7 @@ dependencies = [
name = "ethcore-signer"
version = "1.4.0"
dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0",
"ethcore-io 1.4.0",
@ -512,7 +517,7 @@ dependencies = [
"parity-ui 1.4.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)",
"ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)",
]
[[package]]
@ -539,8 +544,8 @@ version = "1.4.0"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethcore-bigint 0.1.1",
@ -551,6 +556,7 @@ dependencies = [
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)",
@ -628,7 +634,7 @@ dependencies = [
name = "ethsync"
version = "1.4.0"
dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.4.0",
"ethcore-io 1.4.0",
@ -881,6 +887,11 @@ name = "lazy_static"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazycell"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "libc"
version = "0.2.15"
@ -903,8 +914,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lru-cache"
version = "0.0.7"
source = "git+https://github.com/contain-rs/lru-cache#13255e33c45ceb69a4b143f235a4322df5fb580e"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -985,7 +996,7 @@ dependencies = [
[[package]]
name = "mio"
version = "0.6.0-dev"
source = "git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d#62ec763c9cc34d8a452ed0392c575c50ddd5fc8d"
source = "git+https://github.com/ethcore/mio?branch=timer-fix#31eccc40ece3d47abaefaf23bb2114033175b972"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
@ -997,6 +1008,22 @@ dependencies = [
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "mio"
version = "0.6.0"
source = "git+https://github.com/carllerche/mio#9f17b70d6fecbf912168267ea74cf536f2cba705"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "miow"
version = "0.1.3"
@ -1208,7 +1235,7 @@ dependencies = [
[[package]]
name = "parity-ui-precompiled"
version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#6be42e2bcf15db125797097df7a2dcfbf7d1e1d2"
source = "git+https://github.com/ethcore/js-precompiled.git#8e8432d2986c29e9ff4c338cb4d2a11bc9c3c557"
dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1421,7 +1448,7 @@ dependencies = [
name = "rlp"
version = "0.1.0"
dependencies = [
"elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"ethcore-bigint 0.1.1",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1588,6 +1615,11 @@ name = "slab"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slab"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "smallvec"
version = "0.1.8"
@ -1879,13 +1911,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ws"
version = "0.5.2"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#00bd2134b07b4bc8ea47b7f6c7afce16bbe34c8f"
version = "0.5.3"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#0cd6c5e3e9d5e61a37d53eb8dcbad523dcc69314"
dependencies = [
"bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)",
"mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)",
@ -1945,8 +1977,8 @@ dependencies = [
"checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27"
"checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>"
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
"checksum clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "d19bda68c3db98e3a780342f6101b44312fef20a5f13ce756d1202a35922b01b"
"checksum clippy_lints 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "3d4ed67c69b9bb35169be2538691d290a3aa0cbfd4b9f0bfb7c221fc1d399a96"
"checksum clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "6eacf01b0aad84a0817703498f72d252df7c0faf6a5b86d0be4265f1829e459f"
"checksum clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "a49960c9aab544ce86b004dcb61620e8b898fea5fc0f697a028f460f48221ed6"
"checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245"
"checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc"
"checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>"
@ -1954,7 +1986,7 @@ dependencies = [
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
"checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76"
"checksum dtoa 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0dd841b58510c9618291ffa448da2e4e0f699d984d436122372f446dae62263d"
"checksum elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4bc9250a632e7c001b741eb0ec6cee93c9a5b6d5f1879696a4b94d62b012210a"
"checksum elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)" = "<none>"
"checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5"
"checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>"
"checksum ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0c53453517f620847be51943db329276ae52f2e210cfc659e81182864be2f"
@ -1979,11 +2011,12 @@ dependencies = [
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
"checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "23e3757828fa702a20072c37ff47938e9dd331b92fac6e223d26d4b7a55f7ee2"
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
"checksum lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)" = "<none>"
"checksum lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "656fa4dfcb02bcf1063c592ba3ff6a5303ee1f2afe98c8a889e8b1a77c6dfdb7"
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2"
@ -1991,7 +2024,8 @@ dependencies = [
"checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54"
"checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>"
"checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e"
"checksum mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)" = "<none>"
"checksum mio 0.6.0 (git+https://github.com/carllerche/mio)" = "<none>"
"checksum mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)" = "<none>"
"checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a"
"checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8"
"checksum nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>"
@ -2057,6 +2091,7 @@ dependencies = [
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
"checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2"
"checksum spmc 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "93bdab61c1a413e591c4d17388ffa859eaff2df27f1e13a5ec8b716700605adf"
@ -2094,7 +2129,7 @@ dependencies = [
"checksum webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "813503a5985585e0812d430cd1328ee322f47f66629c8ed4ecab939cf9e92f91"
"checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>"
"checksum ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>"
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef"
"checksum xmltree 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "472a9d37c7c53ab2391161df5b89b1f3bf76dab6ab150d7941ecbdd832282082"

View File

@ -46,7 +46,7 @@ ethcore-logger = { path = "logger" }
rlp = { path = "util/rlp" }
ethcore-stratum = { path = "stratum" }
ethcore-dapps = { path = "dapps", optional = true }
clippy = { version = "0.0.90", optional = true}
clippy = { version = "0.0.96", optional = true}
[target.'cfg(windows)'.dependencies]
winapi = "0.2"
@ -73,6 +73,7 @@ ipc = ["ethcore/ipc", "ethsync/ipc"]
jit = ["ethcore/jit"]
dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"]
json-tests = ["ethcore/json-tests"]
test-heavy = ["ethcore/test-heavy"]
stratum = ["ipc"]
ethkey-cli = ["ethcore/ethkey-cli"]
ethstore-cli = ["ethcore/ethstore-cli"]

View File

@ -1,6 +1,8 @@
# [Parity](https://ethcore.io/parity.html)
### Fast, light, and robust Ethereum implementation
[![Join the chat at https://gitter.im/ethcore/parity.js](https://badges.gitter.im/ethcore/parity.js.svg)](https://gitter.im/ethcore/parity.js?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status][travis-image]][travis-url] [![Coverage Status][coveralls-image]][coveralls-url] [![Join the chat at https://gitter.im/ethcore/parity][gitter-image]][gitter-url] [![GPLv3][license-image]][license-url]
[Internal Documentation][doc-url]

View File

@ -33,7 +33,7 @@ fetch = { path = "../util/fetch" }
parity-ui = { path = "./ui" }
mime_guess = { version = "1.6.1" }
clippy = { version = "0.0.90", optional = true}
clippy = { version = "0.0.96", optional = true}
[build-dependencies]
serde_codegen = { version = "0.8", optional = true }

View File

@ -105,8 +105,12 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
trace!(target: "dapps", "Resolving to fetchable content.");
self.fetch.to_async_handler(path.clone(), control)
},
// NOTE [todr] /home is redirected to home page since some users may have the redirection cached
// (in the past we used 301 instead of 302)
// It should be safe to remove it in (near) future.
//
// 404 for non-existent content
(Some(_), _) if *req.method() == hyper::method::Method::Get => {
(Some(ref path), _) if *req.method() == hyper::Method::Get && path.app_id != "home" => {
trace!(target: "dapps", "Resolving to 404.");
Box::new(ContentHandler::error(
StatusCode::NotFound,
@ -116,7 +120,7 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
))
},
// Redirect any other GET request to signer.
_ if *req.method() == hyper::method::Method::Get => {
_ if *req.method() == hyper::Method::Get => {
if let Some(port) = self.signer_port {
trace!(target: "dapps", "Redirecting to signer interface.");
Redirection::boxed(&format!("http://{}", signer_address(port)))

View File

@ -56,6 +56,26 @@ fn should_redirect_to_home_when_trailing_slash_is_missing() {
assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
}
#[test]
fn should_redirect_to_home_for_users_with_cached_redirection() {
// given
let server = serve();
// when
let response = request(server,
"\
GET /home/ HTTP/1.1\r\n\
Host: 127.0.0.1:8080\r\n\
Connection: close\r\n\
\r\n\
"
);
// then
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned());
assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
}
#[test]
fn should_display_404_on_invalid_dapp() {
// given

View File

@ -11,7 +11,7 @@ build = "build.rs"
ethcore-ipc-codegen = { path = "../ipc/codegen" }
[dependencies]
clippy = { version = "0.0.90", optional = true}
clippy = { version = "0.0.96", optional = true}
ethcore-devtools = { path = "../devtools" }
ethcore-ipc = { path = "../ipc/rpc" }
rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }

View File

@ -69,14 +69,19 @@ impl EthashManager {
Some(ref e) if *e == epoch => lights.recent.clone(),
_ => match lights.prev_epoch.clone() {
Some(e) if e == epoch => {
// swap
let t = lights.prev_epoch;
lights.prev_epoch = lights.recent_epoch;
lights.recent_epoch = t;
let t = lights.prev.clone();
lights.prev = lights.recent.clone();
lights.recent = t;
lights.recent.clone()
// don't swap if recent is newer.
if lights.recent_epoch > lights.prev_epoch {
None
} else {
// swap
let t = lights.prev_epoch;
lights.prev_epoch = lights.recent_epoch;
lights.recent_epoch = t;
let t = lights.prev.clone();
lights.prev = lights.recent.clone();
lights.recent = t;
lights.recent.clone()
}
}
_ => None,
},

View File

@ -24,8 +24,11 @@ rayon = "0.4.2"
semver = "0.2"
bit-set = "0.4"
time = "0.1"
rand = "0.3"
byteorder = "0.5"
transient-hashmap = "0.1"
evmjit = { path = "../evmjit", optional = true }
clippy = { version = "0.0.90", optional = true}
clippy = { version = "0.0.96", optional = true}
ethash = { path = "../ethash" }
ethcore-util = { path = "../util" }
ethcore-io = { path = "../util/io" }
@ -36,10 +39,8 @@ ethstore = { path = "../ethstore" }
ethkey = { path = "../ethkey" }
ethcore-ipc-nano = { path = "../ipc/nano" }
rlp = { path = "../util/rlp" }
rand = "0.3"
lru-cache = { git = "https://github.com/contain-rs/lru-cache" }
lru-cache = "0.1.0"
ethcore-bloom-journal = { path = "../util/bloom" }
byteorder = "0.5"
[dependencies.hyper]
git = "https://github.com/ethcore/hyper"

View File

@ -39,10 +39,18 @@
"stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
},
"nodes": [
"enode://08c7ee6a4f861ff0664a49532bcc86de1363acd608999d1b76609bb9bc278649906f069057630fd9493924a368b5d1dc9b8f8bf13ac26df72512f6d1fabd8c95@45.32.7.81:30303",
"enode://e809c4a2fec7daed400e5e28564e23693b23b2cc5a019b612505631bbe7b9ccf709c1796d2a3d29ef2b045f210caf51e3c4f5b6d3587d43ad5d6397526fa6179@174.112.32.157:30303",
"enode://687be94c3a7beaa3d2fde82fa5046cdeb3e8198354e05b29d6e0d4e276713e3707ac10f784a7904938b06b46c764875c241b0337dd853385a4d8bfcbf8190647@95.183.51.229:30303",
"enode://6e538e7c1280f0a31ff08b382db5302480f775480b8e68f8febca0ceff81e4b19153c6f8bf60313b93bef2cc34d34e1df41317de0ce613a201d1660a788a03e2@52.206.67.235:30303",
"enode://217ebe27e89bf4fec8ce06509323ff095b1014378deb75ab2e5f6759a4e8750a3bd8254b8c6833136e4d5e58230d65ee8ab34a5db5abf0640408c4288af3c8a7@188.138.1.237:30303"
"enode://ca5ae4eca09ba6787e29cf6d86f7634d07aae6b9e6317a59aff675851c0bf445068173208cf8ef7f5cd783d8e29b85b2fa3fa358124cf0546823149724f9bde1@138.68.1.16:30303",
"enode://217ebe27e89bf4fec8ce06509323ff095b1014378deb75ab2e5f6759a4e8750a3bd8254b8c6833136e4d5e58230d65ee8ab34a5db5abf0640408c4288af3c8a7@188.138.1.237:30303",
"enode://fa20444ef991596ce99b81652ac4e61de1eddc4ff21d3cd42762abd7ed47e7cf044d3c9ccddaf6035d39725e4eb372806787829ccb9a08ec7cb71883cb8c3abd@50.149.116.182:30303",
"enode://4bd6a4df3612c718333eb5ea7f817923a8cdf1bed89cee70d1710b45a0b6b77b2819846440555e451a9b602ad2efa2d2facd4620650249d8468008946887820a@71.178.232.20:30304",
"enode://921cf8e4c345fe8db913c53964f9cadc667644e7f20195a0b7d877bd689a5934e146ff2c2259f1bae6817b6585153a007ceb67d260b720fa3e6fc4350df25c7f@51.255.49.170:30303",
"enode://ffea3b01c000cdd89e1e9229fea3e80e95b646f9b2aa55071fc865e2f19543c9b06045cc2e69453e6b78100a119e66be1b5ad50b36f2ffd27293caa28efdd1b2@128.199.93.177:3030",
"enode://ee3da491ce6a155eb132708eb0e8d04b0637926ec0ae1b79e63fc97cb9fc3818f49250a0ae0d7f79ed62b66ec677f408c4e01741504dc7a051e274f1e803d454@91.121.65.179:40404",
"enode://48e063a6cf5f335b1ef2ed98126bf522cf254396f850c7d442fe2edbbc23398787e14cd4de7968a00175a82762de9cbe9e1407d8ccbcaeca5004d65f8398d759@159.203.255.59:30303"
],
"accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },

View File

@ -158,14 +158,21 @@
"stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
},
"nodes": [
"enode://cd6611461840543d5b9c56fbf088736154c699c43973b3a1a32390cf27106f87e58a818a606ccb05f3866de95a4fe860786fea71bf891ea95f234480d3022aa3@136.243.154.245:30303",
"enode://efe4f2493f4aff2d641b1db8366b96ddacfe13e7a6e9c8f8f8cf49f9cdba0fdf3258d8c8f8d0c5db529f8123c8f1d95f36d54d590ca1bb366a5818b9a4ba521c@163.172.187.252:30303",
"enode://cd6611461840543d5b9c56fbf088736154c699c43973b3a1a32390cf27106f87e58a818a606ccb05f3866de95a4fe860786fea71bf891ea95f234480d3022aa3@163.172.157.114:30303",
"enode://bcc7240543fe2cf86f5e9093d05753dd83343f8fda7bf0e833f65985c73afccf8f981301e13ef49c4804491eab043647374df1c4adf85766af88a624ecc3330e@136.243.154.244:30303",
"enode://ed4227681ca8c70beb2277b9e870353a9693f12e7c548c35df6bca6a956934d6f659999c2decb31f75ce217822eefca149ace914f1cbe461ed5a2ebaf9501455@88.212.206.70:30303",
"enode://cadc6e573b6bc2a9128f2f635ac0db3353e360b56deef239e9be7e7fce039502e0ec670b595f6288c0d2116812516ad6b6ff8d5728ff45eba176989e40dead1e@37.128.191.230:30303",
"enode://595a9a06f8b9bc9835c8723b6a82105aea5d55c66b029b6d44f229d6d135ac3ecdd3e9309360a961ea39d7bee7bac5d03564077a4e08823acc723370aace65ec@46.20.235.22:30303",
"enode://029178d6d6f9f8026fc0bc17d5d1401aac76ec9d86633bba2320b5eed7b312980c0a210b74b20c4f9a8b0b2bf884b111fa9ea5c5f916bb9bbc0e0c8640a0f56c@216.158.85.185:30303",
"enode://84f5d5957b4880a8b0545e32e05472318898ad9fc8ebe1d56c90c12334a98e12351eccfdf3a2bf72432ac38b57e9d348400d17caa083879ade3822390f89773f@10.1.52.78:30303",
"enode://f90dc9b9bf7b8db97726b7849e175f1eb2707f3d8f281c929336e398dd89b0409fc6aeceb89e846278e9d3ecc3857cebfbe6758ff352ece6fe5d42921ee761db@10.1.173.87:30303",
"enode://6a868ced2dec399c53f730261173638a93a40214cf299ccf4d42a76e3fa54701db410669e8006347a4b3a74fa090bb35af0320e4bc8d04cf5b7f582b1db285f5@10.3.149.199:30303",
"enode://fdd1b9bb613cfbc200bba17ce199a9490edc752a833f88d4134bf52bb0d858aa5524cb3ec9366c7a4ef4637754b8b15b5dc913e4ed9fdb6022f7512d7b63f181@212.47.247.103:30303",
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",
"enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303",
"enode://4cd540b2c3292e17cff39922e864094bf8b0741fcc8c5dcea14957e389d7944c70278d872902e3d0345927f621547efa659013c400865485ab4bfa0c6596936f@zero.parity.io:30303",
"enode://cc92c4c40d612a10c877ca023ef0496c843fbc92b6c6c0d55ce0b863d51d821c4bd70daebb54324a6086374e6dc05708fed39862b275f169cb678e655da9d07d@136.243.154.246:30303"
"enode://4cd540b2c3292e17cff39922e864094bf8b0741fcc8c5dcea14957e389d7944c70278d872902e3d0345927f621547efa659013c400865485ab4bfa0c6596936f@138.201.144.135:30303"
],
"accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },

View File

@ -96,9 +96,9 @@ impl<'db> HashDB for AccountDB<'db>{
unimplemented!()
}
fn get(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
@ -114,7 +114,7 @@ impl<'db> HashDB for AccountDB<'db>{
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: Bytes) {
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
@ -122,7 +122,7 @@ impl<'db> HashDB for AccountDB<'db>{
unimplemented!()
}
fn get_aux(&self, hash: &[u8]) -> Option<Vec<u8>> {
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
self.db.get_aux(hash)
}
}
@ -158,9 +158,9 @@ impl<'db> HashDB for AccountDBMut<'db>{
unimplemented!()
}
fn get(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.db.get(&combine_key(&self.address_hash, key))
}
@ -178,16 +178,16 @@ impl<'db> HashDB for AccountDBMut<'db>{
}
let k = value.sha3();
let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, value.to_vec());
self.db.emplace(ak, DBValue::from_slice(value));
k
}
fn emplace(&mut self, key: H256, value: Bytes) {
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value.to_vec())
self.db.emplace(key, value)
}
fn remove(&mut self, key: &H256) {
@ -202,7 +202,7 @@ impl<'db> HashDB for AccountDBMut<'db>{
self.db.insert_aux(hash, value);
}
fn get_aux(&self, hash: &[u8]) -> Option<Vec<u8>> {
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
self.db.get_aux(hash)
}
@ -218,9 +218,9 @@ impl<'db> HashDB for Wrapping<'db> {
unimplemented!()
}
fn get(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
@ -236,7 +236,7 @@ impl<'db> HashDB for Wrapping<'db> {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: Bytes) {
fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!()
}
@ -252,9 +252,9 @@ impl<'db> HashDB for WrappingMut<'db>{
unimplemented!()
}
fn get(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
}
self.0.get(key)
}
@ -273,7 +273,7 @@ impl<'db> HashDB for WrappingMut<'db>{
self.0.insert(value)
}
fn emplace(&mut self, key: H256, value: Bytes) {
fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP {
return;
}
@ -286,4 +286,4 @@ impl<'db> HashDB for WrappingMut<'db>{
}
self.0.remove(key)
}
}
}

View File

@ -36,7 +36,7 @@ enum Unlock {
/// Use with caution.
Perm,
/// Account unlocked with a timeout
Timed((Instant, u32)),
Timed(Instant),
}
/// Data associated with account.
@ -267,17 +267,17 @@ impl AccountProvider {
/// Returns `true` if the password for `account` is `password`. `false` if not.
pub fn test_password(&self, account: &Address, password: String) -> Result<bool, Error> {
match self.sstore.sign(&account, &password, &Default::default()) {
match self.sstore.sign(account, &password, &Default::default()) {
Ok(_) => Ok(true),
Err(SSError::InvalidPassword) => Ok(false),
Err(e) => Err(Error::SStore(e)),
}
}
}
/// Changes the password of `account` from `password` to `new_password`. Fails if incorrect `password` given.
pub fn change_password(&self, account: &Address, password: String, new_password: String) -> Result<(), Error> {
self.sstore.change_password(&account, &password, &new_password).map_err(Error::SStore)
}
self.sstore.change_password(account, &password, &new_password).map_err(Error::SStore)
}
/// Helper method used for unlocking accounts.
fn unlock_account(&self, account: Address, password: String, unlock: Unlock) -> Result<(), Error> {
@ -308,8 +308,8 @@ impl AccountProvider {
if let Unlock::Temp = data.unlock {
unlocked.remove(account).expect("data exists: so key must exist: qed");
}
if let Unlock::Timed((ref start, ref duration)) = data.unlock {
if start.elapsed() > Duration::from_millis(*duration as u64) {
if let Unlock::Timed(ref end) = data.unlock {
if Instant::now() > *end {
unlocked.remove(account).expect("data exists: so key must exist: qed");
return Err(Error::NotUnlocked);
}
@ -329,7 +329,7 @@ impl AccountProvider {
/// Unlocks account temporarily with a timeout.
pub fn unlock_account_timed(&self, account: Address, password: String, duration_ms: u32) -> Result<(), Error> {
self.unlock_account(account, password, Unlock::Timed((Instant::now(), duration_ms)))
self.unlock_account(account, password, Unlock::Timed(Instant::now() + Duration::from_millis(duration_ms as u64)))
}
/// Checks if given account is unlocked
@ -363,11 +363,11 @@ impl AccountProvider {
#[cfg(test)]
mod tests {
use super::{AccountProvider, AddressBook};
use super::{AccountProvider, AddressBook, Unlock};
use std::collections::HashMap;
use std::time::Instant;
use ethjson::misc::AccountMeta;
use ethstore::ethkey::{Generator, Random};
use std::time::Duration;
use devtools::RandomTempPath;
#[test]
@ -411,10 +411,10 @@ mod tests {
let kp = Random.generate().unwrap();
let ap = AccountProvider::transient_provider();
assert!(ap.insert_account(kp.secret().clone(), "test").is_ok());
assert!(ap.unlock_account_timed(kp.address(), "test1".into(), 2000).is_err());
assert!(ap.unlock_account_timed(kp.address(), "test".into(), 2000).is_ok());
assert!(ap.unlock_account_timed(kp.address(), "test1".into(), 60000).is_err());
assert!(ap.unlock_account_timed(kp.address(), "test".into(), 60000).is_ok());
assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
::std::thread::sleep(Duration::from_millis(2000));
ap.unlocked.lock().get_mut(&kp.address()).unwrap().unlock = Unlock::Timed(Instant::now());
assert!(ap.sign(kp.address(), None, Default::default()).is_err());
}
}

View File

@ -542,7 +542,7 @@ pub fn enact(
Ok(b.close_and_lock())
}
#[inline(always)]
#[inline]
#[cfg(not(feature = "slow-blocks"))]
fn push_transactions(block: &mut OpenBlock, transactions: &[SignedTransaction]) -> Result<(), Error> {
for t in transactions {

View File

@ -196,6 +196,7 @@ pub struct BlockChain {
pending_best_block: RwLock<Option<BestBlock>>,
pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>,
pending_block_details: RwLock<HashMap<H256, BlockDetails>>,
pending_transaction_addresses: RwLock<HashMap<H256, Option<TransactionAddress>>>,
}
@ -414,6 +415,7 @@ impl<'a> Iterator for AncestryIter<'a> {
}
impl BlockChain {
#[cfg_attr(feature="dev", allow(useless_let_if_seq))]
/// Create new instance of blockchain from given Genesis
pub fn new(config: Config, genesis: &[u8], db: Arc<Database>) -> BlockChain {
// 400 is the avarage size of the key
@ -438,6 +440,7 @@ impl BlockChain {
cache_man: Mutex::new(cache_man),
pending_best_block: RwLock::new(None),
pending_block_hashes: RwLock::new(HashMap::new()),
pending_block_details: RwLock::new(HashMap::new()),
pending_transaction_addresses: RwLock::new(HashMap::new()),
};
@ -565,7 +568,7 @@ impl BlockChain {
let range = extras.number as bc::Number .. extras.number as bc::Number;
let chain = bc::group::BloomGroupChain::new(self.blooms_config, self);
let changes = chain.replace(&range, vec![]);
for (k, v) in changes.into_iter() {
for (k, v) in changes {
batch.write(db::COL_EXTRA, &LogGroupPosition::from(k), &BloomGroup::from(v));
}
batch.put(db::COL_EXTRA, b"best", &hash);
@ -789,11 +792,10 @@ impl BlockChain {
/// the chain and the child's parent is this block.
///
/// Used in snapshots to glue the chunks together at the end.
pub fn add_child(&self, block_hash: H256, child_hash: H256) {
pub fn add_child(&self, batch: &mut DBTransaction, block_hash: H256, child_hash: H256) {
let mut parent_details = self.block_details(&block_hash)
.unwrap_or_else(|| panic!("Invalid block hash: {:?}", block_hash));
let mut batch = self.db.transaction();
parent_details.children.push(child_hash);
let mut update = HashMap::new();
@ -804,8 +806,6 @@ impl BlockChain {
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update, CacheUpdatePolicy::Overwrite);
self.cache_man.lock().note_used(CacheID::BlockDetails(block_hash));
self.db.write(batch).unwrap();
}
#[cfg_attr(feature="dev", allow(similar_names))]
@ -894,17 +894,6 @@ impl BlockChain {
/// Prepares extras update.
fn prepare_update(&self, batch: &mut DBTransaction, update: ExtrasUpdate, is_best: bool) {
{
let block_hashes: Vec<_> = update.block_details.keys().cloned().collect();
let mut write_details = self.block_details.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update.block_details, CacheUpdatePolicy::Overwrite);
let mut cache_man = self.cache_man.lock();
for hash in block_hashes {
cache_man.note_used(CacheID::BlockDetails(hash));
}
}
{
let mut write_receipts = self.block_receipts.write();
@ -916,7 +905,7 @@ impl BlockChain {
batch.extend_with_cache(db::COL_EXTRA, &mut *write_blocks_blooms, update.blocks_blooms, CacheUpdatePolicy::Remove);
}
// These cached values must be updated last with all three locks taken to avoid
// These cached values must be updated last with all four locks taken to avoid
// cache decoherence
{
let mut best_block = self.pending_best_block.write();
@ -934,8 +923,10 @@ impl BlockChain {
},
}
let mut write_hashes = self.pending_block_hashes.write();
let mut write_details = self.pending_block_details.write();
let mut write_txs = self.pending_transaction_addresses.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update.block_details, CacheUpdatePolicy::Overwrite);
batch.extend_with_cache(db::COL_EXTRA, &mut *write_hashes, update.block_hashes, CacheUpdatePolicy::Overwrite);
batch.extend_with_option_cache(db::COL_EXTRA, &mut *write_txs, update.transactions_addresses, CacheUpdatePolicy::Overwrite);
}
@ -945,9 +936,11 @@ impl BlockChain {
pub fn commit(&self) {
let mut pending_best_block = self.pending_best_block.write();
let mut pending_write_hashes = self.pending_block_hashes.write();
let mut pending_block_details = self.pending_block_details.write();
let mut pending_write_txs = self.pending_transaction_addresses.write();
let mut best_block = self.best_block.write();
let mut write_block_details = self.block_details.write();
let mut write_hashes = self.block_hashes.write();
let mut write_txs = self.transaction_addresses.write();
// update best block
@ -960,9 +953,11 @@ impl BlockChain {
let pending_hashes_keys: Vec<_> = pending_write_hashes.keys().cloned().collect();
let enacted_txs_keys: Vec<_> = enacted_txs.keys().cloned().collect();
let pending_block_hashes: Vec<_> = pending_block_details.keys().cloned().collect();
write_hashes.extend(mem::replace(&mut *pending_write_hashes, HashMap::new()));
write_txs.extend(enacted_txs.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed"))));
write_block_details.extend(mem::replace(&mut *pending_block_details, HashMap::new()));
for hash in retracted_txs.keys() {
write_txs.remove(hash);
@ -976,6 +971,10 @@ impl BlockChain {
for hash in enacted_txs_keys {
cache_man.note_used(CacheID::TransactionAddresses(hash));
}
for hash in pending_block_hashes {
cache_man.note_used(CacheID::BlockDetails(hash));
}
}
/// Iterator that lists `first` and then all of `first`'s ancestors, by hash.
@ -1296,6 +1295,11 @@ impl BlockChain {
ancient_block_number: best_ancient_block.as_ref().map(|b| b.number),
}
}
#[cfg(test)]
pub fn db(&self) -> &Arc<Database> {
&self.db
}
}
#[cfg(test)]

View File

@ -66,7 +66,7 @@ impl<T> CacheManager<T> where T: Eq + Hash {
}
fn rotate_cache_if_needed(&mut self) {
if self.cache_usage.len() == 0 { return }
if self.cache_usage.is_empty() { return }
if self.cache_usage[0].len() * self.bytes_per_cache_entry > self.pref_cache_size / COLLECTION_QUEUE_SIZE {
if let Some(cache) = self.cache_usage.pop_back() {

View File

@ -45,6 +45,7 @@ use block::*;
use transaction::{LocalizedTransaction, SignedTransaction, Action};
use blockchain::extras::TransactionAddress;
use types::filter::Filter;
use types::mode::Mode as IpcMode;
use log_entry::LocalizedLogEntry;
use verification::queue::BlockQueue;
use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute};
@ -60,12 +61,13 @@ use receipt::LocalizedReceipt;
use trace::{TraceDB, ImportRequest as TraceImportRequest, LocalizedTrace, Database as TraceDatabase};
use trace;
use trace::FlatTransactionTraces;
use evm::Factory as EvmFactory;
use evm::{Factory as EvmFactory, Schedule};
use miner::{Miner, MinerService};
use snapshot::{self, io as snapshot_io};
use factory::Factories;
use rlp::{View, UntrustedRlp};
use state_db::StateDB;
use rand::OsRng;
// re-export
pub use types::blockchain_info::BlockChainInfo;
@ -122,7 +124,7 @@ impl SleepState {
/// Blockchain database client backed by a persistent database. Owns and manages a blockchain and a block queue.
/// Call `import_block()` to import a block asynchronously; `flush_queue()` flushes the queue.
pub struct Client {
mode: Mode,
mode: Mutex<Mode>,
chain: RwLock<Arc<BlockChain>>,
tracedb: RwLock<TraceDB<BlockChain>>,
engine: Arc<Engine>,
@ -138,12 +140,13 @@ pub struct Client {
miner: Arc<Miner>,
sleep_state: Mutex<SleepState>,
liveness: AtomicBool,
io_channel: IoChannel<ClientIoMessage>,
io_channel: Mutex<IoChannel<ClientIoMessage>>,
notify: RwLock<Vec<Weak<ChainNotify>>>,
queue_transactions: AtomicUsize,
last_hashes: RwLock<VecDeque<H256>>,
factories: Factories,
history: u64,
rng: Mutex<OsRng>,
}
impl Client {
@ -219,7 +222,7 @@ impl Client {
let client = Client {
sleep_state: Mutex::new(SleepState::new(awake)),
liveness: AtomicBool::new(awake),
mode: config.mode.clone(),
mode: Mutex::new(config.mode.clone()),
chain: RwLock::new(chain),
tracedb: tracedb,
engine: engine,
@ -233,12 +236,13 @@ impl Client {
import_lock: Mutex::new(()),
panic_handler: panic_handler,
miner: miner,
io_channel: message_channel,
io_channel: Mutex::new(message_channel),
notify: RwLock::new(Vec::new()),
queue_transactions: AtomicUsize::new(0),
last_hashes: RwLock::new(VecDeque::new()),
factories: factories,
history: history,
rng: Mutex::new(try!(OsRng::new().map_err(::util::UtilError::StdIo))),
};
Ok(Arc::new(client))
}
@ -314,7 +318,7 @@ impl Client {
if let Some(parent) = chain_has_parent {
// Enact Verified Block
let last_hashes = self.build_last_hashes(header.parent_hash().clone());
let db = self.state_db.lock().boxed_clone_canon(&header.parent_hash());
let db = self.state_db.lock().boxed_clone_canon(header.parent_hash());
let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
let locked_block = try!(enact_result.map_err(|e| {
@ -434,14 +438,26 @@ impl Client {
/// Import a block with transaction receipts.
/// The block is guaranteed to be the next best blocks in the first block sequence.
/// Does no sealing or transaction validation.
fn import_old_block(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> H256 {
fn import_old_block(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> Result<H256, ::error::Error> {
let block = BlockView::new(&block_bytes);
let hash = block.header().hash();
let header = block.header();
let hash = header.hash();
let _import_lock = self.import_lock.lock();
{
let _timer = PerfTimer::new("import_old_block");
let mut rng = self.rng.lock();
let chain = self.chain.read();
// verify block.
try!(::snapshot::verify_old_block(
&mut *rng,
&header,
&*self.engine,
&*chain,
Some(&block_bytes),
false,
));
// Commit results
let receipts = ::rlp::decode(&receipts_bytes);
let mut batch = DBTransaction::new(&self.db.read());
@ -451,7 +467,7 @@ impl Client {
chain.commit();
}
self.db.read().flush().expect("DB flush failed.");
hash
Ok(hash)
}
fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain {
@ -599,7 +615,8 @@ impl Client {
self.block_queue.collect_garbage();
self.tracedb.read().collect_garbage();
match self.mode {
let mode = self.mode.lock().clone();
match mode {
Mode::Dark(timeout) => {
let mut ss = self.sleep_state.lock();
if let Some(t) = ss.last_activity {
@ -751,7 +768,7 @@ impl BlockChainClient for Client {
fn call(&self, t: &SignedTransaction, block: BlockID, analytics: CallAnalytics) -> Result<Executed, CallError> {
let header = try!(self.block_header(block).ok_or(CallError::StatePruned));
let view = HeaderView::new(&header);
let last_hashes = self.build_last_hashes(view.hash());
let last_hashes = self.build_last_hashes(view.parent_hash());
let env_info = EnvInfo {
number: view.number(),
author: view.author(),
@ -823,12 +840,24 @@ impl BlockChainClient for Client {
}
fn keep_alive(&self) {
if self.mode != Mode::Active {
let mode = self.mode.lock().clone();
if mode != Mode::Active {
self.wake_up();
(*self.sleep_state.lock()).last_activity = Some(Instant::now());
}
}
fn mode(&self) -> IpcMode { self.mode.lock().clone().into() }
fn set_mode(&self, mode: IpcMode) {
*self.mode.lock() = mode.clone().into();
match mode {
IpcMode::Active => self.wake_up(),
IpcMode::Off => self.sleep(),
_ => {(*self.sleep_state.lock()).last_activity = Some(Instant::now()); }
}
}
fn best_block_header(&self) -> Bytes {
self.chain.read().best_block_header()
}
@ -1036,7 +1065,7 @@ impl BlockChainClient for Client {
return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash())));
}
}
Ok(self.import_old_block(block_bytes, receipts_bytes))
self.import_old_block(block_bytes, receipts_bytes).map_err(Into::into)
}
fn queue_info(&self) -> BlockQueueInfo {
@ -1124,7 +1153,7 @@ impl BlockChainClient for Client {
debug!("Ignoring {} transactions: queue is full", transactions.len());
} else {
let len = transactions.len();
match self.io_channel.send(ClientIoMessage::NewTransactions(transactions)) {
match self.io_channel.lock().send(ClientIoMessage::NewTransactions(transactions)) {
Ok(_) => {
self.queue_transactions.fetch_add(len, AtomicOrdering::SeqCst);
}
@ -1141,6 +1170,23 @@ impl BlockChainClient for Client {
}
impl MiningBlockChainClient for Client {
fn latest_schedule(&self) -> Schedule {
let header_data = self.best_block_header();
let view = HeaderView::new(&header_data);
let env_info = EnvInfo {
number: view.number(),
author: view.author(),
timestamp: view.timestamp(),
difficulty: view.difficulty(),
last_hashes: self.build_last_hashes(view.hash()),
gas_used: U256::default(),
gas_limit: view.gas_limit(),
};
self.engine.schedule(&env_info)
}
fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock {
let engine = &*self.engine;
let chain = self.chain.read();
@ -1216,3 +1262,33 @@ impl MayPanic for Client {
self.panic_handler.on_panic(closure);
}
}
#[test]
fn should_not_cache_details_before_commit() {
use tests::helpers::*;
use std::thread;
use std::time::Duration;
use std::sync::atomic::{AtomicBool, Ordering};
let client = generate_dummy_client(0);
let genesis = client.chain_info().best_block_hash;
let (new_hash, new_block) = get_good_dummy_block_hash();
let go = {
// Separate thread uncommited transaction
let go = Arc::new(AtomicBool::new(false));
let go_thread = go.clone();
let another_client = client.reference().clone();
thread::spawn(move || {
let mut batch = DBTransaction::new(&*another_client.chain.read().db().clone());
another_client.chain.read().insert_block(&mut batch, &new_block, Vec::new());
go_thread.store(true, Ordering::SeqCst);
});
go
};
while !go.load(Ordering::SeqCst) { thread::park_timeout(Duration::from_millis(5)); }
assert!(client.tree_route(&genesis, &new_hash).is_none());
}

View File

@ -76,6 +76,8 @@ pub enum Mode {
/// Goes offline after RLP is inactive for some (given) time and
/// stays inactive.
Dark(Duration),
/// Always off.
Off,
}
impl Default for Mode {

View File

@ -34,9 +34,10 @@ use log_entry::LocalizedLogEntry;
use receipt::{Receipt, LocalizedReceipt};
use blockchain::extras::BlockReceipts;
use error::{ImportResult};
use evm::{Factory as EvmFactory, VMType};
use evm::{Factory as EvmFactory, VMType, Schedule};
use miner::{Miner, MinerService, TransactionImportResult};
use spec::Spec;
use types::mode::Mode;
use verification::queue::QueueInfo;
use block::{OpenBlock, SealedBlock};
@ -83,6 +84,10 @@ pub struct TestBlockChainClient {
pub vm_factory: EvmFactory,
/// Timestamp assigned to latest sealed block
pub latest_block_timestamp: RwLock<u64>,
/// Ancient block info.
pub ancient_block: RwLock<Option<(H256, u64)>>,
/// First block info.
pub first_block: RwLock<Option<(H256, u64)>>,
}
#[derive(Clone)]
@ -132,12 +137,14 @@ impl TestBlockChainClient {
spec: spec,
vm_factory: EvmFactory::new(VMType::Interpreter, 1024 * 1024),
latest_block_timestamp: RwLock::new(10_000_000),
ancient_block: RwLock::new(None),
first_block: RwLock::new(None),
};
client.add_blocks(1, EachBlockWith::Nothing); // add genesis block
client.genesis_hash = client.last_hash.read().clone();
client
}
/// Set the transaction receipt result
pub fn set_transaction_receipt(&self, id: TransactionID, receipt: LocalizedReceipt) {
self.receipts.write().insert(id, receipt);
@ -306,6 +313,10 @@ pub fn get_temp_state_db() -> GuardedTempResult<StateDB> {
}
impl MiningBlockChainClient for TestBlockChainClient {
fn latest_schedule(&self) -> Schedule {
Schedule::new_homestead_gas_fix()
}
fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock {
let engine = &*self.spec.engine;
let genesis_header = self.spec.genesis_header();
@ -589,10 +600,10 @@ impl BlockChainClient for TestBlockChainClient {
genesis_hash: self.genesis_hash.clone(),
best_block_hash: self.last_hash.read().clone(),
best_block_number: self.blocks.read().len() as BlockNumber - 1,
first_block_hash: None,
first_block_number: None,
ancient_block_hash: None,
ancient_block_number: None,
first_block_hash: self.first_block.read().as_ref().map(|x| x.0),
first_block_number: self.first_block.read().as_ref().map(|x| x.1),
ancient_block_hash: self.ancient_block.read().as_ref().map(|x| x.0),
ancient_block_number: self.ancient_block.read().as_ref().map(|x| x.1)
}
}
@ -621,4 +632,8 @@ impl BlockChainClient for TestBlockChainClient {
fn pending_transactions(&self) -> Vec<SignedTransaction> {
self.miner.pending_transactions(self.chain_info().best_block_number)
}
fn mode(&self) -> Mode { Mode::Active }
fn set_mode(&self, _: Mode) { unimplemented!(); }
}

View File

@ -16,6 +16,7 @@
use std::collections::BTreeMap;
use util::{U256, Address, H256, H2048, Bytes, Itertools};
use util::stats::Histogram;
use blockchain::TreeRoute;
use verification::queue::QueueInfo as BlockQueueInfo;
use block::{OpenBlock, SealedBlock};
@ -27,7 +28,7 @@ use views::{BlockView};
use error::{ImportResult, CallError};
use receipt::LocalizedReceipt;
use trace::LocalizedTrace;
use evm::Factory as EvmFactory;
use evm::{Factory as EvmFactory, Schedule};
use types::ids::*;
use types::trace_filter::Filter as TraceFilter;
use executive::Executed;
@ -37,6 +38,7 @@ use block_import_error::BlockImportError;
use ipc::IpcConfig;
use types::blockchain_info::BlockChainInfo;
use types::block_status::BlockStatus;
use types::mode::Mode;
#[ipc(client_ident="RemoteClient")]
/// Blockchain database client. Owns and manages a blockchain and a block queue.
@ -190,8 +192,8 @@ pub trait BlockChainClient : Sync + Send {
/// list all transactions
fn pending_transactions(&self) -> Vec<SignedTransaction>;
/// Get the gas price distribution.
fn gas_price_statistics(&self, sample_size: usize, distribution_size: usize) -> Result<Vec<U256>, ()> {
/// Sorted list of transaction gas prices from at least last sample_size blocks.
fn gas_price_corpus(&self, sample_size: usize) -> Vec<U256> {
let mut h = self.chain_info().best_block_hash;
let mut corpus = Vec::new();
while corpus.is_empty() {
@ -200,26 +202,34 @@ pub trait BlockChainClient : Sync + Send {
let block = BlockView::new(&block_bytes);
let header = block.header_view();
if header.number() == 0 {
if corpus.is_empty() {
corpus.push(20_000_000_000u64.into()); // we have literally no information - it' as good a number as any.
}
break;
return corpus;
}
block.transaction_views().iter().foreach(|t| corpus.push(t.gas_price()));
h = header.parent_hash().clone();
}
}
corpus.sort();
let n = corpus.len();
if n > 0 {
Ok((0..(distribution_size + 1))
.map(|i| corpus[i * (n - 1) / distribution_size])
.collect::<Vec<_>>()
)
} else {
Err(())
}
corpus
}
/// Calculate median gas price from recent blocks if they have any transactions.
fn gas_price_median(&self, sample_size: usize) -> Option<U256> {
let corpus = self.gas_price_corpus(sample_size);
corpus.get(corpus.len()/2).cloned()
}
/// Get the gas price distribution based on recent blocks if they have any transactions.
fn gas_price_histogram(&self, sample_size: usize, bucket_number: usize) -> Option<Histogram> {
let raw_corpus = self.gas_price_corpus(sample_size);
let raw_len = raw_corpus.len();
// Throw out outliers.
let (corpus, _) = raw_corpus.split_at(raw_len-raw_len/40);
Histogram::new(corpus, bucket_number)
}
fn mode(&self) -> Mode;
fn set_mode(&self, mode: Mode);
}
/// Extended client interface used for mining
@ -236,6 +246,9 @@ pub trait MiningBlockChainClient : BlockChainClient {
/// Import sealed block. Skips all verifications.
fn import_sealed_block(&self, block: SealedBlock) -> ImportResult;
/// Returns latest schedule.
fn latest_schedule(&self) -> Schedule;
}
impl IpcConfig for BlockChainClient { }

View File

@ -114,7 +114,7 @@ pub trait Writable {
R: Deref<Target = [u8]> {
match policy {
CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() {
for (key, value) in values {
self.write(col, &key, &value);
cache.insert(key, value);
}
@ -135,7 +135,7 @@ pub trait Writable {
R: Deref<Target = [u8]> {
match policy {
CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() {
for (key, value) in values {
match value {
Some(ref v) => self.write(col, &key, v),
None => self.delete(col, &key),
@ -144,7 +144,7 @@ pub trait Writable {
}
},
CacheUpdatePolicy::Remove => {
for (key, value) in values.into_iter() {
for (key, value) in values {
match value {
Some(v) => self.write(col, &key, &v),
None => self.delete(col, &key),

View File

@ -47,6 +47,13 @@ pub enum TransactionError {
/// Transaction gas price
got: U256,
},
/// Transaction's gas is below currently set minimal gas requirement.
InsufficientGas {
/// Minimal expected gas
minimal: U256,
/// Transaction gas
got: U256,
},
/// Sender doesn't have enough funds to pay for this transaction
InsufficientBalance {
/// Senders balance
@ -63,6 +70,12 @@ pub enum TransactionError {
},
/// Transaction's gas limit (aka gas) is invalid.
InvalidGasLimit(OutOfBounds<U256>),
/// Transaction sender is banned.
SenderBanned,
/// Transaction receipient is banned.
RecipientBanned,
/// Contract creation code is banned.
CodeBanned,
}
impl fmt::Display for TransactionError {
@ -75,12 +88,17 @@ impl fmt::Display for TransactionError {
LimitReached => "Transaction limit reached".into(),
InsufficientGasPrice { minimal, got } =>
format!("Insufficient gas price. Min={}, Given={}", minimal, got),
InsufficientGas { minimal, got } =>
format!("Insufficient gas. Min={}, Given={}", minimal, got),
InsufficientBalance { balance, cost } =>
format!("Insufficient balance for transaction. Balance={}, Cost={}",
balance, cost),
GasLimitExceeded { limit, got } =>
format!("Gas limit exceeded. Limit={}, Given={}", limit, got),
InvalidGasLimit(ref err) => format!("Invalid gas limit. {}", err),
SenderBanned => "Sender is temporarily banned.".into(),
RecipientBanned => "Recipient is temporarily banned.".into(),
CodeBanned => "Contract code is temporarily banned.".into(),
};
f.write_fmt(format_args!("Transaction error ({})", msg))

View File

@ -173,7 +173,7 @@ lazy_static! {
arr[SIGNEXTEND as usize] = InstructionInfo::new("SIGNEXTEND", 0, 2, 1, false, GasPriceTier::Low);
arr[SHA3 as usize] = InstructionInfo::new("SHA3", 0, 2, 1, false, GasPriceTier::Special);
arr[ADDRESS as usize] = InstructionInfo::new("ADDRESS", 0, 0, 1, false, GasPriceTier::Base);
arr[BALANCE as usize] = InstructionInfo::new("BALANCE", 0, 1, 1, false, GasPriceTier::Ext);
arr[BALANCE as usize] = InstructionInfo::new("BALANCE", 0, 1, 1, false, GasPriceTier::Special);
arr[ORIGIN as usize] = InstructionInfo::new("ORIGIN", 0, 0, 1, false, GasPriceTier::Base);
arr[CALLER as usize] = InstructionInfo::new("CALLER", 0, 0, 1, false, GasPriceTier::Base);
arr[CALLVALUE as usize] = InstructionInfo::new("CALLVALUE", 0, 0, 1, false, GasPriceTier::Base);
@ -183,8 +183,8 @@ lazy_static! {
arr[CODESIZE as usize] = InstructionInfo::new("CODESIZE", 0, 0, 1, false, GasPriceTier::Base);
arr[CODECOPY as usize] = InstructionInfo::new("CODECOPY", 0, 3, 0, true, GasPriceTier::VeryLow);
arr[GASPRICE as usize] = InstructionInfo::new("GASPRICE", 0, 0, 1, false, GasPriceTier::Base);
arr[EXTCODESIZE as usize] = InstructionInfo::new("EXTCODESIZE", 0, 1, 1, false, GasPriceTier::Ext);
arr[EXTCODECOPY as usize] = InstructionInfo::new("EXTCODECOPY", 0, 4, 0, true, GasPriceTier::Ext);
arr[EXTCODESIZE as usize] = InstructionInfo::new("EXTCODESIZE", 0, 1, 1, false, GasPriceTier::Special);
arr[EXTCODECOPY as usize] = InstructionInfo::new("EXTCODECOPY", 0, 4, 0, true, GasPriceTier::Special);
arr[BLOCKHASH as usize] = InstructionInfo::new("BLOCKHASH", 0, 1, 1, false, GasPriceTier::Ext);
arr[COINBASE as usize] = InstructionInfo::new("COINBASE", 0, 0, 1, false, GasPriceTier::Base);
arr[TIMESTAMP as usize] = InstructionInfo::new("TIMESTAMP", 0, 0, 1, false, GasPriceTier::Base);
@ -277,7 +277,7 @@ lazy_static! {
arr[CALLCODE as usize] = InstructionInfo::new("CALLCODE", 0, 7, 1, true, GasPriceTier::Special);
arr[RETURN as usize] = InstructionInfo::new("RETURN", 0, 2, 0, true, GasPriceTier::Zero);
arr[DELEGATECALL as usize] = InstructionInfo::new("DELEGATECALL", 0, 6, 1, true, GasPriceTier::Special);
arr[SUICIDE as usize] = InstructionInfo::new("SUICIDE", 0, 1, 0, true, GasPriceTier::Zero);
arr[SUICIDE as usize] = InstructionInfo::new("SUICIDE", 0, 1, 0, true, GasPriceTier::Special);
arr
};
}

View File

@ -30,12 +30,20 @@ macro_rules! overflowing {
}
#[cfg_attr(feature="dev", allow(enum_variant_names))]
enum InstructionCost<Cost: CostType> {
enum Request<Cost: CostType> {
Gas(Cost),
GasMem(Cost, Cost, Option<Cost>),
GasMem(Cost, Cost),
GasMemProvide(Cost, Cost, Option<U256>),
GasMemCopy(Cost, Cost, Cost)
}
pub struct InstructionRequirements<Cost: CostType> {
pub gas_cost: Cost,
pub provide_gas: Option<Cost>,
pub memory_total_gas: Cost,
pub memory_required_size: usize,
}
pub struct Gasometer<Gas: CostType> {
pub current_gas: Gas,
pub current_mem_gas: Gas,
@ -59,11 +67,19 @@ impl<Gas: CostType> Gasometer<Gas> {
/// How much gas is provided to a CALL/CREATE, given that we need to deduct `needed` for this operation
/// and that we `requested` some.
pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<evm::Result<Gas>>) -> evm::Result<Gas> {
pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<U256>) -> evm::Result<Gas> {
// Try converting requested gas to `Gas` (`U256/u64`)
// but in EIP150 even if we request more we should never fail from OOG
let requested = requested.map(Gas::from_u256);
match schedule.sub_gas_cap_divisor {
Some(cap_divisor) if self.current_gas >= needed => {
let gas_remaining = self.current_gas - needed;
let max_gas_provided = gas_remaining - gas_remaining / Gas::from(cap_divisor);
let max_gas_provided = match cap_divisor {
64 => gas_remaining - (gas_remaining >> 6),
cap_divisor => gas_remaining - gas_remaining / Gas::from(cap_divisor),
};
if let Some(Ok(r)) = requested {
Ok(min(r, max_gas_provided))
} else {
@ -78,7 +94,7 @@ impl<Gas: CostType> Gasometer<Gas> {
} else {
Ok(0.into())
}
}
},
}
}
@ -88,21 +104,21 @@ impl<Gas: CostType> Gasometer<Gas> {
/// We guarantee that the final element of the returned tuple (`provided`) will be `Some`
/// iff the `instruction` is one of `CREATE`, or any of the `CALL` variants. In this case,
/// it will be the amount of gas that the current context provides to the child context.
pub fn get_gas_cost_mem(
pub fn requirements(
&mut self,
ext: &evm::Ext,
instruction: Instruction,
info: &InstructionInfo,
stack: &Stack<U256>,
current_mem_size: usize,
) -> evm::Result<(Gas, Gas, usize, Option<Gas>)> {
) -> evm::Result<InstructionRequirements<Gas>> {
let schedule = ext.schedule();
let tier = instructions::get_tier_idx(info.tier);
let default_gas = Gas::from(schedule.tier_step_gas[tier]);
let cost = match instruction {
instructions::JUMPDEST => {
InstructionCost::Gas(Gas::from(1))
Request::Gas(Gas::from(1))
},
instructions::SSTORE => {
let address = H256::from(stack.peek(0));
@ -116,16 +132,16 @@ impl<Gas: CostType> Gasometer<Gas> {
// !is_zero(&val) && is_zero(newval)
schedule.sstore_reset_gas
};
InstructionCost::Gas(Gas::from(gas))
Request::Gas(Gas::from(gas))
},
instructions::SLOAD => {
InstructionCost::Gas(Gas::from(schedule.sload_gas))
Request::Gas(Gas::from(schedule.sload_gas))
},
instructions::BALANCE => {
InstructionCost::Gas(Gas::from(schedule.balance_gas))
Request::Gas(Gas::from(schedule.balance_gas))
},
instructions::EXTCODESIZE => {
InstructionCost::Gas(Gas::from(schedule.extcodesize_gas))
Request::Gas(Gas::from(schedule.extcodesize_gas))
},
instructions::SUICIDE => {
let mut gas = Gas::from(schedule.suicide_gas);
@ -135,28 +151,28 @@ impl<Gas: CostType> Gasometer<Gas> {
gas = overflowing!(gas.overflow_add(schedule.suicide_to_new_account_cost.into()));
}
InstructionCost::Gas(gas)
Request::Gas(gas)
},
instructions::MSTORE | instructions::MLOAD => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32)), None)
Request::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32)))
},
instructions::MSTORE8 => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1)), None)
Request::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1)))
},
instructions::RETURN => {
InstructionCost::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None)
Request::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
},
instructions::SHA3 => {
let w = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(1))), 31));
let words = w >> 5;
let gas = Gas::from(schedule.sha3_gas) + (Gas::from(schedule.sha3_word_gas) * words);
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None)
Request::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
},
instructions::CALLDATACOPY | instructions::CODECOPY => {
InstructionCost::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2))))
Request::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2))))
},
instructions::EXTCODECOPY => {
InstructionCost::GasMemCopy(schedule.extcodecopy_base_gas.into(), try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3))))
Request::GasMemCopy(schedule.extcodecopy_base_gas.into(), try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3))))
},
instructions::LOG0...instructions::LOG4 => {
let no_of_topics = instructions::get_log_topics(instruction);
@ -164,7 +180,7 @@ impl<Gas: CostType> Gasometer<Gas> {
let data_gas = overflowing!(try!(Gas::from_u256(*stack.peek(1))).overflow_mul(Gas::from(schedule.log_data_gas)));
let gas = overflowing!(data_gas.overflow_add(Gas::from(log_gas)));
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None)
Request::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
},
instructions::CALL | instructions::CALLCODE => {
let mut gas = Gas::from(schedule.call_gas);
@ -183,70 +199,82 @@ impl<Gas: CostType> Gasometer<Gas> {
gas = overflowing!(gas.overflow_add(schedule.call_value_transfer_gas.into()));
};
// TODO: refactor to avoid duplicate calculation here and later on.
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
let requested = *stack.peek(0);
InstructionCost::GasMem(gas, mem, Some(provided))
Request::GasMemProvide(gas, mem, Some(requested))
},
instructions::DELEGATECALL => {
let mut gas = Gas::from(schedule.call_gas);
let gas = Gas::from(schedule.call_gas);
let mem = cmp::max(
try!(mem_needed(stack.peek(4), stack.peek(5))),
try!(mem_needed(stack.peek(2), stack.peek(3)))
);
let requested = *stack.peek(0);
// TODO: refactor to avoid duplicate calculation here and later on.
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
Request::GasMemProvide(gas, mem, Some(requested))
},
instructions::CREATE => {
let mut gas = Gas::from(schedule.create_gas);
let gas = Gas::from(schedule.create_gas);
let mem = try!(mem_needed(stack.peek(1), stack.peek(2)));
// TODO: refactor to avoid duplicate calculation here and later on.
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let provided = try!(self.gas_provided(schedule, cost_so_far, None));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
Request::GasMemProvide(gas, mem, None)
},
instructions::EXP => {
let expon = stack.peek(1);
let bytes = ((expon.bits() + 7) / 8) as usize;
let gas = Gas::from(schedule.exp_gas + schedule.exp_byte_gas * bytes);
InstructionCost::Gas(gas)
Request::Gas(gas)
},
_ => InstructionCost::Gas(default_gas),
_ => Request::Gas(default_gas),
};
match cost {
InstructionCost::Gas(gas) => {
Ok((gas, self.current_mem_gas, 0, None))
Ok(match cost {
Request::Gas(gas) => {
InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: 0,
memory_total_gas: self.current_mem_gas,
}
},
InstructionCost::GasMem(gas, mem_size, provided) => {
Request::GasMem(gas, mem_size) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size, provided))
InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
},
InstructionCost::GasMemCopy(gas, mem_size, copy) => {
Request::GasMemProvide(gas, mem_size, requested) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let gas = overflowing!(gas.overflow_add(mem_gas_cost));
let provided = try!(self.gas_provided(schedule, gas, requested));
let total_gas = overflowing!(gas.overflow_add(provided));
InstructionRequirements {
gas_cost: total_gas,
provide_gas: Some(provided),
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
},
Request::GasMemCopy(gas, mem_size, copy) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let copy = overflowing!(add_gas_usize(copy, 31)) >> 5;
let copy_gas = Gas::from(schedule.copy_gas) * copy;
let gas = overflowing!(gas.overflow_add(copy_gas));
let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size, None))
}
}
InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
},
})
}
fn mem_gas_cost(&self, schedule: &evm::Schedule, current_mem_size: usize, mem_size: &Gas) -> evm::Result<(Gas, Gas, usize)> {
@ -256,7 +284,7 @@ impl<Gas: CostType> Gasometer<Gas> {
let a = overflowing!(s.overflow_mul(Gas::from(schedule.memory_gas)));
// Calculate s*s/quad_coeff_div
debug_assert_eq!(schedule.quad_coeff_div, 512);
assert_eq!(schedule.quad_coeff_div, 512);
let b = overflowing!(s.overflow_mul_shr(s, 9));
Ok(overflowing!(a.overflow_add(b)))
};
@ -328,3 +356,4 @@ fn test_calculate_mem_cost() {
assert_eq!(new_mem_gas, 3);
assert_eq!(mem_size, 32);
}

View File

@ -54,14 +54,14 @@ const TWO_POW_248: U256 = U256([0, 0, 0, 0x100000000000000]); //0x1 00000000 000
/// Abstraction over raw vector of Bytes. Easier state management of PC.
struct CodeReader<'a> {
position: ProgramCounter,
code: &'a Bytes
code: &'a [u8]
}
#[cfg_attr(feature="dev", allow(len_without_is_empty))]
impl<'a> CodeReader<'a> {
/// Create new code reader - starting at position 0.
fn new(code: &'a Bytes) -> Self {
fn new(code: &'a [u8]) -> Self {
CodeReader {
position: 0,
code: code,
@ -120,14 +120,14 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
try!(self.verify_instruction(ext, instruction, info, &stack));
// Calculate gas cost
let (gas_cost, mem_gas, mem_size, provided) = try!(gasometer.get_gas_cost_mem(ext, instruction, info, &stack, self.mem.size()));
let requirements = try!(gasometer.requirements(ext, instruction, info, &stack, self.mem.size()));
// TODO: make compile-time removable if too much of a performance hit.
let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &gas_cost.as_u256());
let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &requirements.gas_cost.as_u256());
try!(gasometer.verify_gas(&gas_cost));
self.mem.expand(mem_size);
gasometer.current_mem_gas = mem_gas;
gasometer.current_gas = gasometer.current_gas - gas_cost;
try!(gasometer.verify_gas(&requirements.gas_cost));
self.mem.expand(requirements.memory_required_size);
gasometer.current_mem_gas = requirements.memory_total_gas;
gasometer.current_gas = gasometer.current_gas - requirements.gas_cost;
evm_debug!({ informant.before_instruction(reader.position, instruction, info, &gasometer.current_gas, &stack) });
@ -138,7 +138,7 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
// Execute instruction
let result = try!(self.exec_instruction(
gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack, provided
gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack, requirements.provide_gas
));
evm_debug!({ informant.after_instruction(instruction) });

View File

@ -15,20 +15,27 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use lru_cache::LruCache;
use util::{H256, Mutex};
use util::{H256, HeapSizeOf, Mutex};
use util::sha3::*;
use util::cache::MemoryLruCache;
use bit_set::BitSet;
use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
// stub for a HeapSizeOf implementation.
struct Bits(Arc<BitSet>);
impl HeapSizeOf for Bits {
fn heap_size_of_children(&self) -> usize {
// dealing in bits here
self.0.capacity() * 8
}
}
/// Global cache for EVM interpreter
pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>,
max_size: usize,
cur_size: Mutex<usize>,
jump_destinations: Mutex<MemoryLruCache<H256, Bits>>,
}
impl SharedCache {
@ -36,9 +43,7 @@ impl SharedCache {
/// to cache.
pub fn new(max_size: usize) -> Self {
SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
jump_destinations: Mutex::new(MemoryLruCache::new(max_size)),
}
}
@ -49,37 +54,11 @@ impl SharedCache {
}
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone();
return d.0.clone();
}
let d = Self::find_jump_destinations(code);
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
self.jump_destinations.lock().insert(code_hash.clone(), Bits(d.clone()));
d
}

View File

@ -37,6 +37,5 @@ declare_test!{BlockchainTests_Homestead_bcUncleTest, "BlockchainTests/Homestead/
declare_test!{BlockchainTests_Homestead_bcValidBlockTest, "BlockchainTests/Homestead/bcValidBlockTest"}
declare_test!{BlockchainTests_Homestead_bcWalletTest, "BlockchainTests/Homestead/bcWalletTest"}
declare_test!{BlockchainTests_Homestead_bcShanghaiLove, "BlockchainTests/Homestead/bcShanghaiLove"}
// TODO [ToDr] uncomment as soon as eip150 tests are merged to develop branch of ethereum/tests
// declare_test!{BlockchainTests_Homestead_bcSuicideIssue, "BlockchainTests/Homestead/bcSuicideIssue"}
declare_test!{BlockchainTests_Homestead_bcSuicideIssue, "BlockchainTests/Homestead/bcSuicideIssue"}
declare_test!{BlockchainTests_Homestead_bcExploitTest, "BlockchainTests/Homestead/bcExploitTest"}

View File

@ -101,6 +101,7 @@ extern crate bit_set;
extern crate rlp;
extern crate ethcore_bloom_journal as bloom_journal;
extern crate byteorder;
extern crate transient_hashmap;
#[macro_use]
extern crate log;

View File

@ -154,7 +154,7 @@ impl OverlayRecentV7 {
// and commit the altered entries.
fn migrate_journal(&self, source: Arc<Database>, mut batch: Batch, dest: &mut Database) -> Result<(), Error> {
if let Some(val) = try!(source.get(None, V7_LATEST_ERA_KEY).map_err(Error::Custom)) {
try!(batch.insert(V7_LATEST_ERA_KEY.into(), val.to_owned(), dest));
try!(batch.insert(V7_LATEST_ERA_KEY.into(), val.clone().to_vec(), dest));
let mut era = decode::<u64>(&val);
loop {

View File

@ -61,7 +61,7 @@ pub fn generate_bloom(source: Arc<Database>, dest: &mut Database) -> Result<(),
let account_trie = try!(TrieDB::new(state_db.as_hashdb(), &state_root).map_err(|e| Error::Custom(format!("Cannot open trie: {:?}", e))));
for item in try!(account_trie.iter().map_err(|_| Error::MigrationImpossible)) {
let (ref account_key, _) = try!(item.map_err(|_| Error::MigrationImpossible));
let account_key_hash = H256::from_slice(&account_key);
let account_key_hash = H256::from_slice(account_key);
bloom.set(&*account_key_hash);
}

View File

@ -0,0 +1,339 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Banning Queue
//! Transacton Queue wrapper maintaining additional list of banned senders and contract hashes.
use std::time::Duration;
use std::ops::{Deref, DerefMut};
use std::cell::Cell;
use transaction::{SignedTransaction, Action};
use transient_hashmap::TransientHashMap;
use miner::{TransactionQueue, TransactionImportResult, TransactionOrigin, AccountDetails};
use error::{Error, TransactionError};
use util::{Uint, U256, H256, Address, Hashable};
type Count = u16;
/// Auto-Banning threshold
pub enum Threshold {
/// Should ban after given number of misbehaves reported.
BanAfter(Count),
/// Should never ban anything
NeverBan
}
impl Default for Threshold {
fn default() -> Self {
Threshold::NeverBan
}
}
/// Transaction queue with banlist.
pub struct BanningTransactionQueue {
queue: TransactionQueue,
ban_threshold: Threshold,
senders_bans: TransientHashMap<Address, Cell<Count>>,
recipients_bans: TransientHashMap<Address, Cell<Count>>,
codes_bans: TransientHashMap<H256, Cell<Count>>,
}
impl BanningTransactionQueue {
/// Creates new banlisting transaction queue
pub fn new(queue: TransactionQueue, ban_threshold: Threshold, ban_lifetime: Duration) -> Self {
let ban_lifetime_sec = ban_lifetime.as_secs();
assert!(ban_lifetime_sec > 0, "Lifetime has to be specified in seconds.");
BanningTransactionQueue {
queue: queue,
ban_threshold: ban_threshold,
senders_bans: TransientHashMap::new(ban_lifetime_sec),
recipients_bans: TransientHashMap::new(ban_lifetime_sec),
codes_bans: TransientHashMap::new(ban_lifetime_sec),
}
}
/// Borrows internal queue.
/// NOTE: you can insert transactions to the queue even
/// if they would be rejected because of ban otherwise.
/// But probably you shouldn't.
pub fn queue(&mut self) -> &mut TransactionQueue {
&mut self.queue
}
/// Add to the queue taking bans into consideration.
/// May reject transaction because of the banlist.
pub fn add_with_banlist<F, G>(
&mut self,
transaction: SignedTransaction,
account_details: &F,
gas_estimator: &G,
) -> Result<TransactionImportResult, Error> where
F: Fn(&Address) -> AccountDetails,
G: Fn(&SignedTransaction) -> U256,
{
if let Threshold::BanAfter(threshold) = self.ban_threshold {
// NOTE In all checks use direct query to avoid increasing ban timeout.
// Check sender
if let Ok(sender) = transaction.sender() {
let count = self.senders_bans.direct().get(&sender).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because sender is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::SenderBanned));
}
}
// Check recipient
if let Action::Call(recipient) = transaction.action {
let count = self.recipients_bans.direct().get(&recipient).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because recipient is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::RecipientBanned));
}
}
// Check code
if let Action::Create = transaction.action {
let code_hash = transaction.data.sha3();
let count = self.codes_bans.direct().get(&code_hash).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because code is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::CodeBanned));
}
}
}
self.queue.add(transaction, TransactionOrigin::External, account_details, gas_estimator)
}
/// Ban transaction with given hash.
/// Transaction has to be in the queue.
///
/// Bans sender and recipient/code and returns `true` when any ban has reached threshold.
pub fn ban_transaction(&mut self, hash: &H256) -> bool {
let transaction = self.queue.find(hash);
match transaction {
Some(transaction) => {
let sender = transaction.sender().expect("Transaction is in queue, so the sender is already validated; qed");
// Ban sender
let sender_banned = self.ban_sender(sender);
// Ban recipient and codehash
let recipient_or_code_banned = match transaction.action {
Action::Call(recipient) => {
self.ban_recipient(recipient)
},
Action::Create => {
self.ban_codehash(transaction.data.sha3())
},
};
sender_banned || recipient_or_code_banned
},
None => false,
}
}
/// Ban given sender.
/// If bans threshold is reached all subsequent transactions from this sender will be rejected.
/// Reaching bans threshold also removes all existsing transaction from this sender that are already in the
/// queue.
fn ban_sender(&mut self, address: Address) -> bool {
let count = {
let mut count = self.senders_bans.entry(address).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
count.get()
};
match self.ban_threshold {
Threshold::BanAfter(threshold) if count > threshold => {
// Banlist the sender.
// Remove all transactions from the queue.
self.remove_all(address, !U256::zero());
true
},
_ => false
}
}
/// Ban given recipient.
/// If bans threshold is reached all subsequent transactions to this address will be rejected.
/// Returns true if bans threshold has been reached.
fn ban_recipient(&mut self, address: Address) -> bool {
let count = {
let mut count = self.recipients_bans.entry(address).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
count.get()
};
match self.ban_threshold {
// TODO [ToDr] Consider removing other transactions to the same recipient from the queue?
Threshold::BanAfter(threshold) if count > threshold => true,
_ => false
}
}
/// Ban given codehash.
/// If bans threshold is reached all subsequent transactions to contracts with this codehash will be rejected.
/// Returns true if bans threshold has been reached.
fn ban_codehash(&mut self, code_hash: H256) -> bool {
let mut count = self.codes_bans.entry(code_hash).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
match self.ban_threshold {
// TODO [ToDr] Consider removing other transactions with the same code from the queue?
Threshold::BanAfter(threshold) if count.get() > threshold => true,
_ => false,
}
}
}
impl Deref for BanningTransactionQueue {
type Target = TransactionQueue;
fn deref(&self) -> &Self::Target {
&self.queue
}
}
impl DerefMut for BanningTransactionQueue {
fn deref_mut(&mut self) -> &mut Self::Target {
self.queue()
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use super::{BanningTransactionQueue, Threshold};
use ethkey::{Random, Generator};
use transaction::{Transaction, SignedTransaction, Action};
use error::{Error, TransactionError};
use client::TransactionImportResult;
use miner::{TransactionQueue, TransactionOrigin, AccountDetails};
use util::{Uint, U256, Address, FromHex, Hashable};
fn queue() -> BanningTransactionQueue {
BanningTransactionQueue::new(TransactionQueue::default(), Threshold::BanAfter(1), Duration::from_secs(180))
}
fn default_account_details(_address: &Address) -> AccountDetails {
AccountDetails {
nonce: U256::zero(),
balance: !U256::zero(),
}
}
fn gas_required(_tx: &SignedTransaction) -> U256 {
0.into()
}
fn transaction(action: Action) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: action,
value: U256::from(100),
data: "3331600055".from_hex().unwrap(),
gas: U256::from(100_000),
gas_price: U256::from(10),
nonce: U256::from(0),
}.sign(keypair.secret())
}
fn unwrap_err(res: Result<TransactionImportResult, Error>) -> TransactionError {
match res {
Err(Error::Transaction(e)) => e,
Ok(x) => panic!("Expected error, got: Ok({:?})", x),
Err(e) => panic!("Unexpected error type returned by queue: {:?}", e),
}
}
#[test]
fn should_allow_to_borrow_the_queue() {
// given
let tx = transaction(Action::Create);
let mut txq = queue();
// when
txq.queue().add(tx, TransactionOrigin::External, &default_account_details, &gas_required).unwrap();
// then
// should also deref to queue
assert_eq!(txq.status().pending, 1);
}
#[test]
fn should_not_accept_transactions_from_banned_sender() {
// given
let tx = transaction(Action::Create);
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_sender(tx.sender().unwrap());
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_sender(tx.sender().unwrap());
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::SenderBanned);
// Should also remove transacion from the queue
assert_eq!(txq.find(&tx.hash()), None);
}
#[test]
fn should_not_accept_transactions_to_banned_recipient() {
// given
let recipient = Address::default();
let tx = transaction(Action::Call(recipient));
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_recipient(recipient);
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_recipient(recipient);
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::RecipientBanned);
}
#[test]
fn should_not_accept_transactions_with_banned_code() {
// given
let tx = transaction(Action::Create);
let codehash = tx.data.sha3();
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_codehash(codehash);
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_codehash(codehash);
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::CodeBanned);
}
}

View File

@ -31,6 +31,7 @@ use receipt::{Receipt, RichReceipt};
use spec::Spec;
use engines::Engine;
use miner::{MinerService, MinerStatus, TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin};
use miner::banning_queue::{BanningTransactionQueue, Threshold};
use miner::work_notify::WorkPoster;
use client::TransactionImportResult;
use miner::price_info::PriceInfo;
@ -59,6 +60,22 @@ pub enum GasLimit {
Fixed(U256),
}
/// Transaction queue banning settings.
#[derive(Debug, PartialEq, Clone)]
pub enum Banning {
/// Banning in transaction queue is disabled
Disabled,
/// Banning in transaction queue is enabled
Enabled {
/// Upper limit of transaction processing time before banning.
offend_threshold: Duration,
/// Number of similar offending transactions before banning.
min_offends: u16,
/// Number of seconds the offender is banned for.
ban_duration: Duration,
},
}
/// Configures the behaviour of the miner.
#[derive(Debug, PartialEq)]
pub struct MinerOptions {
@ -86,6 +103,8 @@ pub struct MinerOptions {
pub enable_resubmission: bool,
/// Global gas limit for all transaction in the queue except for local and retracted.
pub tx_queue_gas_limit: GasLimit,
/// Banning settings
pub tx_queue_banning: Banning,
}
impl Default for MinerOptions {
@ -98,11 +117,12 @@ impl Default for MinerOptions {
tx_gas_limit: !U256::zero(),
tx_queue_size: 1024,
tx_queue_gas_limit: GasLimit::Auto,
tx_queue_strategy: PrioritizationStrategy::GasFactorAndGasPrice,
tx_queue_strategy: PrioritizationStrategy::GasPriceOnly,
pending_set: PendingSet::AlwaysQueue,
reseal_min_period: Duration::from_secs(2),
work_queue_size: 20,
enable_resubmission: true,
tx_queue_banning: Banning::Disabled,
}
}
}
@ -186,7 +206,7 @@ struct SealingWork {
/// Handles preparing work for "work sealing" or seals "internally" if Engine does not require work.
pub struct Miner {
// NOTE [ToDr] When locking always lock in this order!
transaction_queue: Arc<Mutex<TransactionQueue>>,
transaction_queue: Arc<Mutex<BanningTransactionQueue>>,
sealing_work: Mutex<SealingWork>,
next_allowed_reseal: Mutex<Instant>,
sealing_block_last_request: Mutex<u64>,
@ -215,11 +235,18 @@ impl Miner {
GasLimit::Fixed(ref limit) => *limit,
_ => !U256::zero(),
};
let txq = Arc::new(Mutex::new(TransactionQueue::with_limits(
options.tx_queue_strategy, options.tx_queue_size, gas_limit, options.tx_gas_limit
)));
let txq = TransactionQueue::with_limits(options.tx_queue_strategy, options.tx_queue_size, gas_limit, options.tx_gas_limit);
let txq = match options.tx_queue_banning {
Banning::Disabled => BanningTransactionQueue::new(txq, Threshold::NeverBan, Duration::from_secs(180)),
Banning::Enabled { ban_duration, min_offends, .. } => BanningTransactionQueue::new(
txq,
Threshold::BanAfter(min_offends),
ban_duration,
),
};
Miner {
transaction_queue: txq,
transaction_queue: Arc::new(Mutex::new(txq)),
next_allowed_reseal: Mutex::new(Instant::now()),
sealing_block_last_request: Mutex::new(0),
sealing_work: Mutex::new(SealingWork{
@ -318,10 +345,31 @@ impl Miner {
let mut invalid_transactions = HashSet::new();
let mut transactions_to_penalize = HashSet::new();
let block_number = open_block.block().fields().header.number();
// TODO: push new uncles, too.
// TODO Push new uncles too.
for tx in transactions {
let hash = tx.hash();
match open_block.push_transaction(tx, None) {
let start = Instant::now();
let result = open_block.push_transaction(tx, None);
let took = start.elapsed();
// Check for heavy transactions
match self.options.tx_queue_banning {
Banning::Enabled { ref offend_threshold, .. } if &took > offend_threshold => {
match self.transaction_queue.lock().ban_transaction(&hash) {
true => {
warn!(target: "miner", "Detected heavy transaction. Banning the sender and recipient/code.");
},
false => {
transactions_to_penalize.insert(hash);
debug!(target: "miner", "Detected heavy transaction. Penalizing sender.")
}
}
},
_ => {},
}
match result {
Err(Error::Execution(ExecutionError::BlockGasLimitReached { gas_limit, gas_used, gas })) => {
debug!(target: "miner", "Skipping adding transaction to block because of gas limit: {:?} (limit: {:?}, used: {:?}, gas: {:?})", hash, gas_limit, gas_used, gas);
@ -362,7 +410,7 @@ impl Miner {
{
let mut queue = self.transaction_queue.lock();
for hash in invalid_transactions.into_iter() {
for hash in invalid_transactions {
queue.remove_invalid(&hash, &fetch_account);
}
for hash in transactions_to_penalize {
@ -506,7 +554,7 @@ impl Miner {
prepare_new
}
fn add_transactions_to_queue(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, origin: TransactionOrigin, transaction_queue: &mut TransactionQueue) ->
fn add_transactions_to_queue(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, origin: TransactionOrigin, transaction_queue: &mut BanningTransactionQueue) ->
Vec<Result<TransactionImportResult, Error>> {
let fetch_account = |a: &Address| AccountDetails {
@ -514,14 +562,25 @@ impl Miner {
balance: chain.latest_balance(a),
};
let schedule = chain.latest_schedule();
let gas_required = |tx: &SignedTransaction| tx.gas_required(&schedule).into();
transactions.into_iter()
.map(|tx| transaction_queue.add(tx, &fetch_account, origin))
.map(|tx| match origin {
TransactionOrigin::Local | TransactionOrigin::RetractedBlock => {
transaction_queue.add(tx, origin, &fetch_account, &gas_required)
},
TransactionOrigin::External => {
transaction_queue.add_with_banlist(tx, &fetch_account, &gas_required)
}
})
.collect()
}
/// Are we allowed to do a non-mandatory reseal?
fn tx_reseal_allowed(&self) -> bool { Instant::now() > *self.next_allowed_reseal.lock() }
#[cfg_attr(feature="dev", allow(wrong_self_convention))]
#[cfg_attr(feature="dev", allow(redundant_closure))]
fn from_pending_block<H, F, G>(&self, latest_block_number: BlockNumber, from_chain: F, map_block: G) -> H
where F: Fn() -> H, G: Fn(&ClosedBlock) -> H {
let sealing_work = self.sealing_work.lock();
@ -885,7 +944,7 @@ impl MinerService for Miner {
fn pending_receipts(&self, best_block: BlockNumber) -> BTreeMap<H256, Receipt> {
self.from_pending_block(
best_block,
|| BTreeMap::new(),
BTreeMap::new,
|pending| {
let hashes = pending.transactions()
.iter()
@ -1019,7 +1078,7 @@ impl MinerService for Miner {
tx.sender().expect("Transaction is in block, so sender has to be defined.")
})
.collect::<HashSet<Address>>();
for sender in to_remove.into_iter() {
for sender in to_remove {
transaction_queue.remove_all(sender, chain.latest_nonce(&sender));
}
});
@ -1097,6 +1156,7 @@ mod tests {
pending_set: PendingSet::AlwaysSealing,
work_queue_size: 5,
enable_resubmission: true,
tx_queue_banning: Banning::Disabled,
},
GasPricer::new_fixed(0u64.into()),
&Spec::new_test(),

View File

@ -44,11 +44,12 @@
mod miner;
mod external;
mod transaction_queue;
mod banning_queue;
mod work_notify;
mod price_info;
pub use self::transaction_queue::{TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin};
pub use self::miner::{Miner, MinerOptions, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit};
pub use self::miner::{Miner, MinerOptions, Banning, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit};
pub use self::external::{ExternalMiner, ExternalMinerService};
pub use client::TransactionImportResult;
@ -157,7 +158,7 @@ pub trait MinerService : Send + Sync {
fn is_sealing(&self) -> bool;
/// Suggested gas price.
fn sensible_gas_price(&self) -> U256 { 20000000000u64.into() }
fn sensible_gas_price(&self) -> U256;
/// Suggested gas limit.
fn sensible_gas_limit(&self) -> U256 { 21000.into() }

View File

@ -48,10 +48,11 @@
//! nonce: U256::from(10),
//! balance: U256::from(1_000_000),
//! };
//! let gas_estimator = |_tx: &SignedTransaction| 2.into();
//!
//! let mut txq = TransactionQueue::default();
//! txq.add(st2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
//! txq.add(st1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
//! txq.add(st2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
//! txq.add(st1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
//!
//! // Check status
//! assert_eq!(txq.status().pending, 2);
@ -109,6 +110,7 @@ impl PartialOrd for TransactionOrigin {
}
impl Ord for TransactionOrigin {
#[cfg_attr(feature="dev", allow(match_same_arms))]
fn cmp(&self, other: &TransactionOrigin) -> Ordering {
if *other == *self {
return Ordering::Equal;
@ -446,6 +448,7 @@ pub struct AccountDetails {
const GAS_LIMIT_HYSTERESIS: usize = 10; // (100/GAS_LIMIT_HYSTERESIS) %
/// Describes the strategy used to prioritize transactions in the queue.
#[cfg_attr(feature="dev", allow(enum_variant_names))]
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PrioritizationStrategy {
/// Use only gas price. Disregards the actual computation cost of the transaction.
@ -592,9 +595,20 @@ impl TransactionQueue {
}
}
/// Add signed transaction to queue to be verified and imported
pub fn add<T>(&mut self, tx: SignedTransaction, fetch_account: &T, origin: TransactionOrigin) -> Result<TransactionImportResult, Error>
where T: Fn(&Address) -> AccountDetails {
/// Add signed transaction to queue to be verified and imported.
///
/// NOTE fetch_account and gas_estimator should be cheap to compute
/// otherwise it might open up an attack vector.
pub fn add<F, G>(
&mut self,
tx: SignedTransaction,
origin: TransactionOrigin,
fetch_account: &F,
gas_estimator: &G,
) -> Result<TransactionImportResult, Error> where
F: Fn(&Address) -> AccountDetails,
G: Fn(&SignedTransaction) -> U256,
{
if tx.gas_price < self.minimal_gas_price && origin != TransactionOrigin::Local {
trace!(target: "txqueue",
@ -625,8 +639,6 @@ impl TransactionQueue {
}));
}
try!(tx.check_low_s());
if tx.gas > self.gas_limit || tx.gas > self.tx_gas_limit {
trace!(target: "txqueue",
"Dropping transaction above gas limit: {:?} ({} > min({}, {}))",
@ -642,6 +654,24 @@ impl TransactionQueue {
}));
}
let minimal_gas = gas_estimator(&tx);
if tx.gas < minimal_gas {
trace!(target: "txqueue",
"Dropping transaction with insufficient gas: {:?} ({} > {})",
tx.hash(),
tx.gas,
minimal_gas,
);
return Err(Error::Transaction(TransactionError::InsufficientGas {
minimal: minimal_gas,
got: tx.gas,
}));
}
// Verify signature
try!(tx.check_low_s());
let vtx = try!(VerifiedTransaction::new(tx, origin));
let client_account = fetch_account(&vtx.sender());
@ -904,16 +934,6 @@ impl TransactionQueue {
let nonce = tx.nonce();
let hash = tx.hash();
{
// Rough size sanity check
let gas = &tx.transaction.gas;
if U256::from(tx.transaction.data.len()) > *gas {
// Droping transaction
trace!(target: "txqueue", "Dropping oversized transaction: {:?} (gas: {} < size {})", hash, gas, tx.transaction.data.len());
return Err(TransactionError::LimitReached);
}
}
// The transaction might be old, let's check that.
// This has to be the first test, otherwise calculating
// nonce height would result in overflow.
@ -1103,6 +1123,10 @@ mod test {
}
}
fn gas_estimator(_tx: &SignedTransaction) -> U256 {
U256::zero()
}
fn new_tx_pair(nonce: U256, gas_price: U256, nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) {
let tx1 = new_unsigned_tx(nonce, default_gas_val(), gas_price);
let tx2 = new_unsigned_tx(nonce + nonce_increment, default_gas_val(), gas_price + gas_price_increment);
@ -1154,14 +1178,14 @@ mod test {
let (tx1, tx2) = new_tx_pair(123.into(), 1.into(), 1.into(), 0.into());
let sender = tx1.sender().unwrap();
let nonce = tx1.nonce;
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(nonce + 1.into()));
// when
let tx = new_tx(123.into(), 1.into());
let res = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External);
let res = txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
// No longer the case as we don't even consider a transaction that isn't above a full
@ -1317,12 +1341,12 @@ mod test {
!U256::zero() };
// First insert one transaction to future
let res = txq.add(tx, &prev_nonce, TransactionOrigin::External);
let res = txq.add(tx, TransactionOrigin::External, &prev_nonce, &gas_estimator);
assert_eq!(res.unwrap(), TransactionImportResult::Future);
assert_eq!(txq.status().future, 1);
// now import second transaction to current
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External);
let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// and then there should be only one transaction in current (the one with higher gas_price)
assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1342,12 +1366,12 @@ mod test {
!U256::zero() };
// First insert one transaction to future
let res = txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External);
let res = txq.add(tx.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator);
assert_eq!(res.unwrap(), TransactionImportResult::Future);
assert_eq!(txq.status().future, 1);
// now import second transaction to current
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External);
let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1366,7 +1390,7 @@ mod test {
let tx = new_tx_default();
// when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External);
let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1385,10 +1409,10 @@ mod test {
txq.set_minimal_gas_price(15.into());
// when
let res1 = txq.add(tx1, &default_account_details, TransactionOrigin::External);
let res2 = txq.add(tx2, &default_account_details, TransactionOrigin::External);
let res3 = txq.add(tx3, &default_account_details, TransactionOrigin::External);
let res4 = txq.add(tx4, &default_account_details, TransactionOrigin::External);
let res1 = txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res3 = txq.add(tx3, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res4 = txq.add(tx4, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
assert_eq!(res1.unwrap(), TransactionImportResult::Current);
@ -1419,10 +1443,10 @@ mod test {
txq.set_minimal_gas_price(15.into());
// when
let res1 = txq.add(tx1, &default_account_details, TransactionOrigin::External);
let res2 = txq.add(tx2, &default_account_details, TransactionOrigin::External);
let res3 = txq.add(tx3, &default_account_details, TransactionOrigin::External);
let res4 = txq.add(tx4, &default_account_details, TransactionOrigin::External);
let res1 = txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res3 = txq.add(tx3, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res4 = txq.add(tx4, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
assert_eq!(res1.unwrap(), TransactionImportResult::Current);
@ -1465,7 +1489,7 @@ mod test {
txq.set_gas_limit(limit);
// when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External);
let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
assert_eq!(unwrap_tx_err(res), TransactionError::GasLimitExceeded {
@ -1489,7 +1513,7 @@ mod test {
};
// when
let res = txq.add(tx, &account, TransactionOrigin::External);
let res = txq.add(tx, TransactionOrigin::External, &account, &gas_estimator);
// then
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientBalance {
@ -1509,7 +1533,7 @@ mod test {
txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External);
let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice {
@ -1529,7 +1553,7 @@ mod test {
txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when
let res = txq.add(tx, &default_account_details, TransactionOrigin::Local);
let res = txq.add(tx, TransactionOrigin::Local, &default_account_details, &gas_estimator);
// then
assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1559,7 +1583,7 @@ mod test {
rlp::decode(s.as_raw())
};
// when
let res = txq.add(stx, &default_account_details, TransactionOrigin::External);
let res = txq.add(stx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
assert!(res.is_err());
@ -1573,8 +1597,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
let top = txq.top_transactions();
@ -1593,9 +1617,9 @@ mod test {
// when
// first insert the one with higher gas price
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then the one with lower gas price, but local
txq.add(tx.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then
let top = txq.top_transactions();
@ -1614,9 +1638,9 @@ mod test {
// when
// first insert local one with higher gas price
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then the one with lower gas price, but from retracted block
txq.add(tx.clone(), &default_account_details, TransactionOrigin::RetractedBlock).unwrap();
txq.add(tx.clone(), TransactionOrigin::RetractedBlock, &default_account_details, &gas_estimator).unwrap();
// then
let top = txq.top_transactions();
@ -1632,8 +1656,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then
let top = txq.top_transactions();
@ -1652,10 +1676,10 @@ mod test {
let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into());
// insert everything
txq.add(txa.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
txq.add(txb.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
txq.add(txa.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(txb.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 4);
@ -1681,10 +1705,10 @@ mod test {
let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into());
// insert everything
txq.add(txa.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(txb.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(txa.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(txb.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let top = txq.top_transactions();
assert_eq!(top[0], tx1);
@ -1713,8 +1737,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
let top = txq.pending_hashes();
@ -1731,8 +1755,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(2.into(), 0.into());
// when
let res1 = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
let res2 = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
let res1 = txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let res2 = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
assert_eq!(res1, TransactionImportResult::Current);
@ -1755,8 +1779,8 @@ mod test {
let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 2);
// when
@ -1778,13 +1802,13 @@ mod test {
let tx1 = new_unsigned_tx(124.into(), default_gas_val(), 1.into()).sign(secret);
let tx2 = new_unsigned_tx(125.into(), default_gas_val(), 1.into()).sign(secret);
txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 1);
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1);
// when
txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
let stats = txq.status();
@ -1800,8 +1824,8 @@ mod test {
// given
let mut txq2 = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(3.into(), 0.into());
txq2.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq2.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq2.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq2.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq2.status().pending, 1);
assert_eq!(txq2.status().future, 1);
@ -1822,10 +1846,10 @@ mod test {
let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 3);
// when
@ -1844,8 +1868,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// add
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let stats = txq.status();
assert_eq!(stats.pending, 2);
@ -1864,11 +1888,11 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let sender = tx.sender().unwrap();
let nonce = tx.nonce;
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 1);
// when
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External);
let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then
let t = txq.top_transactions();
@ -1885,14 +1909,14 @@ mod test {
txq.current.set_limit(10);
let (tx1, tx2) = new_tx_pair_default(4.into(), 1.into());
let (tx3, tx4) = new_tx_pair_default(4.into(), 2.into());
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2);
// when
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1);
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx4.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
assert_eq!(txq.status().future, 1);
@ -1903,11 +1927,11 @@ mod test {
let mut txq = TransactionQueue::with_limits(PrioritizationStrategy::GasPriceOnly, 100, default_gas_val() * U256::from(2), !U256::zero());
let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1));
let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2));
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// limited by gas
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::External).unwrap_err();
txq.add(tx4.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap_err();
assert_eq!(txq.status().pending, 2);
}
@ -1917,13 +1941,13 @@ mod test {
let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1));
let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2));
let (tx5, tx6) = new_tx_pair_default(U256::from(1), U256::from(2));
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx5.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx5.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// Not accepted because of limit
txq.add(tx6.clone(), &default_account_details, TransactionOrigin::External).unwrap_err();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx6.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap_err();
txq.add(tx3.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx4.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 4);
}
@ -1935,7 +1959,7 @@ mod test {
let fetch_last_nonce = |_a: &Address| AccountDetails { nonce: last_nonce, balance: !U256::zero() };
// when
let res = txq.add(tx, &fetch_last_nonce, TransactionOrigin::External);
let res = txq.add(tx, TransactionOrigin::External, &fetch_last_nonce, &gas_estimator);
// then
assert_eq!(unwrap_tx_err(res), TransactionError::Old);
@ -1951,12 +1975,12 @@ mod test {
balance: !U256::zero() };
let mut txq = TransactionQueue::default();
let (_tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1);
assert_eq!(txq.status().pending, 0);
// when
let res = txq.add(tx2.clone(), &nonce, TransactionOrigin::External);
let res = txq.add(tx2.clone(), TransactionOrigin::External, &nonce, &gas_estimator);
// then
assert_eq!(unwrap_tx_err(res), TransactionError::AlreadyImported);
@ -1970,15 +1994,15 @@ mod test {
// given
let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2);
// when
txq.remove_invalid(&tx1.hash(), &default_account_details);
assert_eq!(txq.status().pending, 0);
assert_eq!(txq.status().future, 1);
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
let stats = txq.status();
@ -1992,10 +2016,10 @@ mod test {
let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 3);
// when
@ -2022,8 +2046,8 @@ mod test {
};
// when
txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
let stats = txq.status();
@ -2050,10 +2074,10 @@ mod test {
};
// when
txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1);
txq.add(tx0, &default_account_details, TransactionOrigin::External).unwrap();
txq.add(tx0, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then
let stats = txq.status();
@ -2071,8 +2095,8 @@ mod test {
!U256::zero() };
let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &previous_nonce, TransactionOrigin::External).unwrap();
txq.add(tx2, &previous_nonce, TransactionOrigin::External).unwrap();
txq.add(tx1.clone(), TransactionOrigin::External, &previous_nonce, &gas_estimator).unwrap();
txq.add(tx2, TransactionOrigin::External, &previous_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 2);
// when
@ -2103,7 +2127,7 @@ mod test {
let details = |_a: &Address| AccountDetails { nonce: nonce, balance: !U256::zero() };
// when
txq.add(tx, &details, TransactionOrigin::External).unwrap();
txq.add(tx, TransactionOrigin::External, &details, &gas_estimator).unwrap();
// then
assert_eq!(txq.last_nonce(&from), Some(nonce));
@ -2118,7 +2142,7 @@ mod test {
let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() };
// Insert first transaction
txq.add(tx1, &details1, TransactionOrigin::External).unwrap();
txq.add(tx1, TransactionOrigin::External, &details1, &gas_estimator).unwrap();
// when
txq.remove_all(tx2.sender().unwrap(), nonce2 + U256::one());
@ -2138,9 +2162,9 @@ mod test {
// when
// Insert first transaction
assert_eq!(txq.add(tx1, &details1, TransactionOrigin::External).unwrap(), TransactionImportResult::Current);
assert_eq!(txq.add(tx1, TransactionOrigin::External, &details1, &gas_estimator).unwrap(), TransactionImportResult::Current);
// Second should go to future
assert_eq!(txq.add(tx2, &details1, TransactionOrigin::External).unwrap(), TransactionImportResult::Future);
assert_eq!(txq.add(tx2, TransactionOrigin::External, &details1, &gas_estimator).unwrap(), TransactionImportResult::Future);
// Now block is imported
txq.remove_all(sender, nonce2 - U256::from(1));
// tx2 should be not be promoted to current
@ -2159,9 +2183,9 @@ mod test {
assert_eq!(txq.has_local_pending_transactions(), false);
// when
assert_eq!(txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(), TransactionImportResult::Current);
assert_eq!(txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap(), TransactionImportResult::Current);
assert_eq!(txq.has_local_pending_transactions(), false);
assert_eq!(txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap(), TransactionImportResult::Current);
assert_eq!(txq.add(tx2, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap(), TransactionImportResult::Current);
// then
assert_eq!(txq.has_local_pending_transactions(), true);
@ -2176,8 +2200,8 @@ mod test {
default_account_details(a).balance };
// when
assert_eq!(txq.add(tx2, &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future);
assert_eq!(txq.add(tx1.clone(), &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future);
assert_eq!(txq.add(tx2, TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap(), TransactionImportResult::Future);
assert_eq!(txq.add(tx1.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap(), TransactionImportResult::Future);
// then
assert_eq!(txq.future.by_priority.len(), 1);
@ -2202,14 +2226,14 @@ mod test {
(tx.sign(secret), tx2.sign(secret), tx2_2.sign(secret), tx3.sign(secret))
};
let sender = tx1.sender().unwrap();
txq.add(tx1, &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx3, &default_account_details, TransactionOrigin::Local).unwrap();
txq.add(tx1, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.future.by_priority.len(), 0);
assert_eq!(txq.current.by_priority.len(), 3);
// when
let res = txq.add(tx2_2, &default_account_details, TransactionOrigin::Local);
let res = txq.add(tx2_2, TransactionOrigin::Local, &default_account_details, &gas_estimator);
// then
assert_eq!(txq.last_nonce(&sender).unwrap(), 125.into());
@ -2217,4 +2241,24 @@ mod test {
assert_eq!(txq.current.by_priority.len(), 3);
}
#[test]
fn should_reject_transactions_below_bas_gas() {
// given
let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
let high_gas = |_: &SignedTransaction| 100_001.into();
// when
let res1 = txq.add(tx1, TransactionOrigin::Local, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, TransactionOrigin::Local, &default_account_details, &high_gas);
// then
assert_eq!(res1.unwrap(), TransactionImportResult::Current);
assert_eq!(unwrap_tx_err(res2), TransactionError::InsufficientGas {
minimal: 100_001.into(),
got: 100_000.into(),
});
}
}

View File

@ -87,7 +87,7 @@ impl ClientService {
db_config.set_cache(::db::COL_STATE, size);
}
db_config.compaction = config.db_compaction.compaction_profile(&client_path);
db_config.compaction = config.db_compaction.compaction_profile(client_path);
db_config.wal = config.db_wal;
let pruning = config.pruning;
@ -188,6 +188,8 @@ impl IoHandler<ClientIoMessage> for ClientIoHandler {
#[cfg_attr(feature="dev", allow(single_match))]
fn message(&self, _io: &IoContext<ClientIoMessage>, net_message: &ClientIoMessage) {
use std::thread;
match *net_message {
ClientIoMessage::BlockVerified => { self.client.import_verified_blocks(); }
ClientIoMessage::NewTransactions(ref transactions) => { self.client.import_queued_transactions(transactions); }
@ -199,9 +201,19 @@ impl IoHandler<ClientIoMessage> for ClientIoHandler {
ClientIoMessage::FeedStateChunk(ref hash, ref chunk) => self.snapshot.feed_state_chunk(*hash, chunk),
ClientIoMessage::FeedBlockChunk(ref hash, ref chunk) => self.snapshot.feed_block_chunk(*hash, chunk),
ClientIoMessage::TakeSnapshot(num) => {
if let Err(e) = self.snapshot.take_snapshot(&*self.client, num) {
warn!("Failed to take snapshot at block #{}: {}", num, e);
let client = self.client.clone();
let snapshot = self.snapshot.clone();
let res = thread::Builder::new().name("Periodic Snapshot".into()).spawn(move || {
if let Err(e) = snapshot.take_snapshot(&*client, num) {
warn!("Failed to take snapshot at block #{}: {}", num, e);
}
});
if let Err(e) = res {
debug!(target: "snapshot", "Failed to initialize periodic snapshot thread: {:?}", e);
}
}
_ => {} // ignore other messages
}

View File

@ -19,7 +19,7 @@
use account_db::{AccountDB, AccountDBMut};
use snapshot::Error;
use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP};
use util::{U256, FixedHash, H256, Bytes, HashDB, DBValue, SHA3_EMPTY, SHA3_NULL_RLP};
use util::trie::{TrieDB, Trie};
use rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View};
@ -112,7 +112,7 @@ impl Account {
let mut stream = RlpStream::new_list(pairs.len());
for (k, v) in pairs {
stream.begin_list(2).append(&k).append(&v);
stream.begin_list(2).append(&k).append(&&*v);
}
let pairs_rlp = stream.out();
@ -130,7 +130,7 @@ impl Account {
match acct_db.get(&self.code_hash) {
Some(c) => {
used_code.insert(self.code_hash.clone());
account_stream.append(&CodeState::Inline.raw()).append(&c);
account_stream.append(&CodeState::Inline.raw()).append(&&*c);
}
None => {
warn!("code lookup failed during snapshot");
@ -178,7 +178,7 @@ impl Account {
CodeState::Hash => {
let code_hash = try!(rlp.val_at(3));
if let Some(code) = code_map.get(&code_hash) {
acct_db.emplace(code_hash.clone(), code.clone());
acct_db.emplace(code_hash.clone(), DBValue::from_slice(code));
}
(code_hash, None)
@ -226,7 +226,7 @@ mod tests {
use snapshot::tests::helpers::fill_storage;
use util::sha3::{SHA3_EMPTY, SHA3_NULL_RLP};
use util::{Address, FixedHash, H256, HashDB};
use util::{Address, FixedHash, H256, HashDB, DBValue};
use rlp::{UntrustedRlp, View};
use std::collections::{HashSet, HashMap};
@ -292,7 +292,7 @@ mod tests {
{
let mut acct_db = AccountDBMut::new(db.as_hashdb_mut(), &addr2);
acct_db.emplace(code_hash.clone(), b"this is definitely code".to_vec());
acct_db.emplace(code_hash.clone(), DBValue::from_slice(b"this is definitely code"));
}
let account1 = Account {

View File

@ -33,6 +33,12 @@ pub enum Error {
BlockNotFound(H256),
/// Incomplete chain.
IncompleteChain,
/// Best block has wrong state root.
WrongStateRoot(H256, H256),
/// Wrong block hash.
WrongBlockHash(u64, H256, H256),
/// Too many blocks contained within the snapshot.
TooManyBlocks(u64, u64),
/// Old starting block in a pruned database.
OldBlockPrunedDB,
/// Missing code.
@ -52,7 +58,11 @@ impl fmt::Display for Error {
match *self {
Error::InvalidStartingBlock(ref id) => write!(f, "Invalid starting block: {:?}", id),
Error::BlockNotFound(ref hash) => write!(f, "Block not found in chain: {}", hash),
Error::IncompleteChain => write!(f, "Cannot create snapshot due to incomplete chain."),
Error::IncompleteChain => write!(f, "Incomplete blockchain."),
Error::WrongStateRoot(ref expected, ref found) => write!(f, "Final block has wrong state root. Expected {:?}, got {:?}", expected, found),
Error::WrongBlockHash(ref num, ref expected, ref found) =>
write!(f, "Block {} had wrong hash. expected {:?}, got {:?}", num, expected, found),
Error::TooManyBlocks(ref expected, ref found) => write!(f, "Snapshot contained too many blocks. Expected {}, got {}", expected, found),
Error::OldBlockPrunedDB => write!(f, "Attempted to create a snapshot at an old block while using \
a pruned database. Please re-run with the --pruning archive flag."),
Error::MissingCode(ref missing) => write!(f, "Incomplete snapshot: {} contract codes not found.", missing.len()),

View File

@ -26,11 +26,11 @@ use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use account_db::{AccountDB, AccountDBMut};
use blockchain::{BlockChain, BlockProvider};
use engines::Engine;
use header::Header;
use ids::BlockID;
use views::BlockView;
use util::{Bytes, Hashable, HashDB, snappy, U256, Uint};
use util::memorydb::MemoryDB;
use util::{Bytes, Hashable, HashDB, DBValue, snappy, U256, Uint};
use util::Mutex;
use util::hash::{FixedHash, H256};
use util::journaldb::{self, Algorithm, JournalDB};
@ -47,7 +47,7 @@ use self::io::SnapshotWriter;
use super::state_db::StateDB;
use super::state::Account as StateAccount;
use crossbeam::{scope, ScopedJoinHandle};
use crossbeam::scope;
use rand::{Rng, OsRng};
pub use self::error::Error;
@ -203,7 +203,7 @@ impl<'a> BlockChunker<'a> {
// cut off the chunk if too large.
if new_loaded_size > PREFERRED_CHUNK_SIZE && self.rlps.len() > 0 {
if new_loaded_size > PREFERRED_CHUNK_SIZE && !self.rlps.is_empty() {
try!(self.write_chunk(last));
loaded_size = pair.len();
} else {
@ -369,7 +369,7 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
// account_key here is the address' hash.
for item in try!(account_trie.iter()) {
let (account_key, account_data) = try!(item);
let account = Account::from_thin_rlp(account_data);
let account = Account::from_thin_rlp(&*account_data);
let account_key_hash = H256::from_slice(&account_key);
let account_db = AccountDB::from_hash(db, account_key_hash);
@ -421,43 +421,19 @@ impl StateRebuilder {
// new code contained within this chunk.
let mut chunk_code = HashMap::new();
// build account tries in parallel.
// Todo [rob] keep a thread pool around so we don't do this per-chunk.
try!(scope(|scope| {
let mut handles = Vec::new();
for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) {
let code_map = &self.code_map;
let handle: ScopedJoinHandle<Result<_, ::error::Error>> = scope.spawn(move || {
let mut db = MemoryDB::new();
let status = try!(rebuild_accounts(&mut db, account_chunk, out_pairs_chunk, code_map));
trace!(target: "snapshot", "thread rebuilt {} account tries", account_chunk.len());
Ok((db, status))
});
handles.push(handle);
for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) {
let code_map = &self.code_map;
let status = try!(rebuild_accounts(self.db.as_hashdb_mut(), account_chunk, out_pairs_chunk, code_map));
chunk_code.extend(status.new_code);
for (addr_hash, code_hash) in status.missing_code {
self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
}
// consolidate all edits into the main overlay.
for handle in handles {
let (thread_db, status): (MemoryDB, _) = try!(handle.join());
self.db.consolidate(thread_db);
chunk_code.extend(status.new_code);
for (addr_hash, code_hash) in status.missing_code {
self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
}
}
Ok::<_, ::error::Error>(())
}));
}
// patch up all missing code. must be done after collecting all new missing code entries.
for (code_hash, code) in chunk_code {
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash);
db.emplace(code_hash, code.clone());
db.emplace(code_hash, DBValue::from_slice(&code));
}
self.code_map.insert(code_hash, code);
@ -553,6 +529,20 @@ fn rebuild_accounts(
/// Proportion of blocks which we will verify `PoW` for.
const POW_VERIFY_RATE: f32 = 0.02;
/// Verify an old block with the given header, engine, blockchain, body. If `always` is set, it will perform
/// the fullest verification possible. If not, it will take a random sample to determine whether it will
/// do heavy or light verification.
pub fn verify_old_block(rng: &mut OsRng, header: &Header, engine: &Engine, chain: &BlockChain, body: Option<&[u8]>, always: bool) -> Result<(), ::error::Error> {
if always || rng.gen::<f32>() <= POW_VERIFY_RATE {
match chain.block_header(header.parent_hash()) {
Some(parent) => engine.verify_block_family(&header, &parent, body),
None => engine.verify_block_seal(&header),
}
} else {
engine.verify_block_basic(&header, body)
}
}
/// Rebuilds the blockchain from chunks.
///
/// Does basic verification for all blocks, but `PoW` verification for some.
@ -568,17 +558,23 @@ pub struct BlockRebuilder {
rng: OsRng,
disconnected: Vec<(u64, H256)>,
best_number: u64,
best_hash: H256,
best_root: H256,
fed_blocks: u64,
}
impl BlockRebuilder {
/// Create a new BlockRebuilder.
pub fn new(chain: BlockChain, db: Arc<Database>, best_number: u64) -> Result<Self, ::error::Error> {
pub fn new(chain: BlockChain, db: Arc<Database>, manifest: &ManifestData) -> Result<Self, ::error::Error> {
Ok(BlockRebuilder {
chain: chain,
db: db,
rng: try!(OsRng::new()),
disconnected: Vec::new(),
best_number: best_number,
best_number: manifest.block_number,
best_hash: manifest.block_hash,
best_root: manifest.state_root,
fed_blocks: 0,
})
}
@ -591,9 +587,14 @@ impl BlockRebuilder {
let rlp = UntrustedRlp::new(chunk);
let item_count = rlp.item_count();
let num_blocks = (item_count - 3) as u64;
trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3);
if self.fed_blocks + num_blocks > SNAPSHOT_BLOCKS {
return Err(Error::TooManyBlocks(SNAPSHOT_BLOCKS, self.fed_blocks).into())
}
// todo: assert here that these values are consistent with chunks being in order.
let mut cur_number = try!(rlp.val_at::<u64>(0)) + 1;
let mut parent_hash = try!(rlp.val_at::<H256>(1));
@ -610,14 +611,27 @@ impl BlockRebuilder {
let block = try!(abridged_block.to_block(parent_hash, cur_number, receipts_root));
let block_bytes = block.rlp_bytes(With);
let is_best = cur_number == self.best_number;
if self.rng.gen::<f32>() <= POW_VERIFY_RATE {
try!(engine.verify_block_seal(&block.header))
} else {
try!(engine.verify_block_basic(&block.header, Some(&block_bytes)));
if is_best {
if block.header.hash() != self.best_hash {
return Err(Error::WrongBlockHash(cur_number, self.best_hash, block.header.hash()).into())
}
if block.header.state_root() != &self.best_root {
return Err(Error::WrongStateRoot(self.best_root, *block.header.state_root()).into())
}
}
let is_best = cur_number == self.best_number;
try!(verify_old_block(
&mut self.rng,
&block.header,
engine,
&self.chain,
Some(&block_bytes),
is_best
));
let mut batch = self.db.transaction();
// special-case the first block in each chunk.
@ -635,11 +649,15 @@ impl BlockRebuilder {
cur_number += 1;
}
Ok(item_count as u64 - 3)
self.fed_blocks += num_blocks;
Ok(num_blocks)
}
/// Glue together any disconnected chunks. To be called at the end.
pub fn glue_chunks(self) {
/// Glue together any disconnected chunks and check that the chain is complete.
pub fn finalize(self, canonical: HashMap<u64, H256>) -> Result<(), Error> {
let mut batch = self.db.transaction();
for (first_num, first_hash) in self.disconnected {
let parent_num = first_num - 1;
@ -648,8 +666,23 @@ impl BlockRebuilder {
// the first block of the first chunks has nothing to connect to.
if let Some(parent_hash) = self.chain.block_hash(parent_num) {
// if so, add the child to it.
self.chain.add_child(parent_hash, first_hash);
self.chain.add_child(&mut batch, parent_hash, first_hash);
}
}
self.db.write_buffered(batch);
let best_number = self.best_number;
for num in (0..self.fed_blocks).map(|x| best_number - x) {
let hash = try!(self.chain.block_hash(num).ok_or(Error::IncompleteChain));
if let Some(canon_hash) = canonical.get(&num).cloned() {
if canon_hash != hash {
return Err(Error::WrongBlockHash(num, canon_hash, hash));
}
}
}
Ok(())
}
}

View File

@ -16,7 +16,7 @@
//! Snapshot network service implementation.
use std::collections::HashSet;
use std::collections::{HashMap, HashSet};
use std::io::ErrorKind;
use std::fs;
use std::path::PathBuf;
@ -74,6 +74,7 @@ struct Restoration {
snappy_buffer: Bytes,
final_state_root: H256,
guard: Guard,
canonical_hashes: HashMap<u64, H256>,
db: Arc<Database>,
}
@ -99,7 +100,7 @@ impl Restoration {
.map_err(UtilError::SimpleString)));
let chain = BlockChain::new(Default::default(), params.genesis, raw_db.clone());
let blocks = try!(BlockRebuilder::new(chain, raw_db.clone(), manifest.block_number));
let blocks = try!(BlockRebuilder::new(chain, raw_db.clone(), &manifest));
let root = manifest.state_root.clone();
Ok(Restoration {
@ -112,6 +113,7 @@ impl Restoration {
snappy_buffer: Vec::new(),
final_state_root: root,
guard: params.guard,
canonical_hashes: HashMap::new(),
db: raw_db,
})
}
@ -138,13 +140,18 @@ impl Restoration {
try!(self.blocks.feed(&self.snappy_buffer[..len], engine));
if let Some(ref mut writer) = self.writer.as_mut() {
try!(writer.write_block_chunk(hash, chunk));
try!(writer.write_block_chunk(hash, chunk));
}
}
Ok(())
}
// note canonical hashes.
fn note_canonical(&mut self, hashes: &[(u64, H256)]) {
self.canonical_hashes.extend(hashes.iter().cloned());
}
// finish up restoration.
fn finalize(self) -> Result<(), Error> {
use util::trie::TrieError;
@ -161,8 +168,8 @@ impl Restoration {
// check for missing code.
try!(self.state.check_missing());
// connect out-of-order chunks.
self.blocks.glue_chunks();
// connect out-of-order chunks and verify chain integrity.
try!(self.blocks.finalize(self.canonical_hashes));
if let Some(writer) = self.writer {
try!(writer.finish(self.manifest));
@ -206,7 +213,7 @@ pub struct Service {
restoration: Mutex<Option<Restoration>>,
snapshot_root: PathBuf,
db_config: DatabaseConfig,
io_channel: Channel,
io_channel: Mutex<Channel>,
pruning: Algorithm,
status: Mutex<RestorationStatus>,
reader: RwLock<Option<LooseReader>>,
@ -226,7 +233,7 @@ impl Service {
restoration: Mutex::new(None),
snapshot_root: params.snapshot_root,
db_config: params.db_config,
io_channel: params.channel,
io_channel: Mutex::new(params.channel),
pruning: params.pruning,
status: Mutex::new(RestorationStatus::Inactive),
reader: RwLock::new(None),
@ -352,7 +359,8 @@ impl Service {
// "Cancelled" is mincing words a bit -- what really happened
// is that the state we were snapshotting got pruned out
// before we could finish.
info!("Cancelled prematurely-started periodic snapshot.");
info!("Periodic snapshot failed: block state pruned.\
Run with a longer `--pruning-history` or with `--no-periodic-snapshot`");
return Ok(())
} else {
return Err(e);
@ -497,7 +505,8 @@ impl Service {
match is_done {
true => {
try!(db.flush().map_err(::util::UtilError::SimpleString));
self.finalize_restoration(&mut *restoration)
drop(db);
return self.finalize_restoration(&mut *restoration);
},
false => Ok(())
}
@ -558,7 +567,7 @@ impl SnapshotService for Service {
}
fn begin_restore(&self, manifest: ManifestData) {
if let Err(e) = self.io_channel.send(ClientIoMessage::BeginRestoration(manifest)) {
if let Err(e) = self.io_channel.lock().send(ClientIoMessage::BeginRestoration(manifest)) {
trace!("Error sending snapshot service message: {:?}", e);
}
}
@ -569,16 +578,24 @@ impl SnapshotService for Service {
}
fn restore_state_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedStateChunk(hash, chunk)) {
if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedStateChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e);
}
}
fn restore_block_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedBlockChunk(hash, chunk)) {
if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedBlockChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e);
}
}
fn provide_canon_hashes(&self, canonical: &[(u64, H256)]) {
let mut rest = self.restoration.lock();
if let Some(ref mut rest) = rest.as_mut() {
rest.note_canonical(canonical);
}
}
}
impl Drop for Service {

View File

@ -48,6 +48,10 @@ pub trait SnapshotService : Sync + Send {
/// Feed a raw block chunk to the service to be processed asynchronously.
/// no-op if currently restoring.
fn restore_block_chunk(&self, hash: H256, chunk: Bytes);
/// Give the restoration in-progress some canonical block hashes for
/// extra verification (performed at the end)
fn provide_canon_hashes(&self, canonical: &[(u64, H256)]);
}
impl IpcConfig for SnapshotService { }

View File

@ -26,6 +26,7 @@ use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter};
use util::{Mutex, snappy};
use util::kvdb::{Database, DatabaseConfig};
use std::collections::HashMap;
use std::sync::Arc;
fn chunk_and_restore(amount: u64) {
@ -58,18 +59,20 @@ fn chunk_and_restore(amount: u64) {
// snapshot it.
let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap());
let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap();
writer.into_inner().finish(::snapshot::ManifestData {
let manifest = ::snapshot::ManifestData {
state_hashes: Vec::new(),
block_hashes: block_hashes,
state_root: Default::default(),
state_root: ::util::sha3::SHA3_NULL_RLP,
block_number: amount,
block_hash: best_hash,
}).unwrap();
};
writer.into_inner().finish(manifest.clone()).unwrap();
// restore it.
let new_db = Arc::new(Database::open(&db_cfg, new_path.as_str()).unwrap());
let new_chain = BlockChain::new(Default::default(), &genesis, new_db.clone());
let mut rebuilder = BlockRebuilder::new(new_chain, new_db.clone(), amount).unwrap();
let mut rebuilder = BlockRebuilder::new(new_chain, new_db.clone(), &manifest).unwrap();
let reader = PackedReader::new(&snapshot_path).unwrap().unwrap();
let engine = ::engines::NullEngine::new(Default::default(), Default::default());
for chunk_hash in &reader.manifest().block_hashes {
@ -78,7 +81,7 @@ fn chunk_and_restore(amount: u64) {
rebuilder.feed(&chunk, &engine).unwrap();
}
rebuilder.glue_chunks();
rebuilder.finalize(HashMap::new()).unwrap();
// and test it.
let new_chain = BlockChain::new(Default::default(), &genesis, new_db);

View File

@ -21,6 +21,7 @@ use account_db::AccountDBMut;
use rand::Rng;
use snapshot::account::Account;
use util::DBValue;
use util::hash::{FixedHash, H256};
use util::hashdb::HashDB;
use util::trie::{Alphabet, StandardMap, SecTrieDBMut, TrieMut, ValueMode};
@ -66,7 +67,7 @@ impl StateProducer {
let mut account = Account::from_thin_rlp(&*account_data);
let acct_db = AccountDBMut::from_hash(db, *address_hash);
fill_storage(acct_db, account.storage_root_mut(), &mut self.storage_seed);
*account_data = account.to_thin_rlp();
*account_data = DBValue::from_vec(account.to_thin_rlp());
}
// sweep again to alter account trie.

View File

@ -16,6 +16,7 @@
//! Watcher for snapshot-related chain events.
use util::Mutex;
use client::{BlockChainClient, Client, ChainNotify};
use ids::BlockID;
use service::ClientIoMessage;
@ -55,7 +56,7 @@ trait Broadcast: Send + Sync {
fn take_at(&self, num: Option<u64>);
}
impl Broadcast for IoChannel<ClientIoMessage> {
impl Broadcast for Mutex<IoChannel<ClientIoMessage>> {
fn take_at(&self, num: Option<u64>) {
let num = match num {
Some(n) => n,
@ -64,7 +65,7 @@ impl Broadcast for IoChannel<ClientIoMessage> {
trace!(target: "snapshot_watcher", "broadcast: {}", num);
if let Err(e) = self.send(ClientIoMessage::TakeSnapshot(num)) {
if let Err(e) = self.lock().send(ClientIoMessage::TakeSnapshot(num)) {
warn!("Snapshot watcher disconnected from IoService: {}", e);
}
}
@ -91,7 +92,7 @@ impl Watcher {
client: client,
sync_status: sync_status,
}),
broadcast: Box::new(channel),
broadcast: Box::new(Mutex::new(channel)),
period: period,
history: history,
}

View File

@ -172,7 +172,7 @@ impl Account {
using it will not fail.");
let item: U256 = match db.get(key){
Ok(x) => x.map_or_else(U256::zero, decode),
Ok(x) => x.map_or_else(U256::zero, |v| decode(&*v)),
Err(e) => panic!("Encountered potential DB corruption: {}", e),
};
let value: H256 = item.into();
@ -247,23 +247,34 @@ impl Account {
}
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> bool {
pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
// TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.is_cached() ||
if self.is_cached() { return Some(self.code_cache.clone()) }
match db.get(&self.code_hash) {
Some(x) => {
self.code_cache = Arc::new(x.to_vec());
self.code_size = Some(x.len());
true
self.code_cache = Arc::new(x.to_vec());
Some(self.code_cache.clone())
},
_ => {
warn!("Failed reverse get of {}", self.code_hash);
false
None
},
}
}
/// Provide code to cache. For correctness, should be the correct code for the
/// account.
pub fn cache_given_code(&mut self, code: Arc<Bytes>) {
trace!("Account::cache_given_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size = Some(code.len());
self.code_cache = code;
}
/// Provide a database to get `code_size`. Should not be called if it is a contract without code.
pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache;
@ -351,7 +362,7 @@ impl Account {
self.code_filth = Filth::Clean;
},
(true, false) => {
db.emplace(self.code_hash.clone(), (*self.code_cache).clone());
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache));
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Clean;
},
@ -413,7 +424,7 @@ impl Account {
self.code_size = other.code_size;
self.address_hash = other.address_hash;
let mut cache = self.storage_cache.borrow_mut();
for (k, v) in other.storage_cache.into_inner().into_iter() {
for (k, v) in other.storage_cache.into_inner() {
cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here
}
self.storage_changes = other.storage_changes;
@ -476,7 +487,7 @@ mod tests {
};
let mut a = Account::from_rlp(&rlp);
assert!(a.cache_code(&db.immutable()));
assert!(a.cache_code(&db.immutable()).is_some());
let mut a = Account::from_rlp(&rlp);
assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(()));

View File

@ -127,11 +127,10 @@ impl AccountEntry {
fn overwrite_with(&mut self, other: AccountEntry) {
self.state = other.state;
match other.account {
Some(acc) => match self.account {
Some(ref mut ours) => {
Some(acc) => {
if let Some(ref mut ours) = self.account {
ours.overwrite_with(acc);
},
None => {},
}
},
None => self.account = None,
}
@ -281,13 +280,10 @@ impl State {
}
},
None => {
match self.cache.get_mut().entry(k) {
Entry::Occupied(e) => {
if e.get().is_dirty() {
e.remove();
}
},
_ => {}
if let Entry::Occupied(e) = self.cache.get_mut().entry(k) {
if e.get().is_dirty() {
e.remove();
}
}
}
}
@ -408,7 +404,7 @@ impl State {
// account is not found in the global cache, get from the DB and insert into local
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let maybe_acc = match db.get(address) {
Ok(acc) => acc.map(Account::from_rlp),
Ok(acc) => acc.map(|v| Account::from_rlp(&v)),
Err(e) => panic!("Potential DB corruption encountered: {}", e),
};
let r = maybe_acc.as_ref().map_or(H256::new(), |a| {
@ -501,6 +497,7 @@ impl State {
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
/// `accounts` is mutable because we may need to commit the code or storage and record that.
#[cfg_attr(feature="dev", allow(match_ref_pats))]
#[cfg_attr(feature="dev", allow(needless_borrow))]
fn commit_into(
factories: &Factories,
db: &mut StateDB,
@ -509,17 +506,14 @@ impl State {
) -> Result<(), Error> {
// first, commit the sub trees.
for (address, ref mut a) in accounts.iter_mut().filter(|&(_, ref a)| a.is_dirty()) {
match a.account {
Some(ref mut account) => {
if !account.is_empty() {
db.note_account_bloom(&address);
}
let addr_hash = account.address_hash(address);
let mut account_db = factories.accountdb.create(db.as_hashdb_mut(), addr_hash);
account.commit_storage(&factories.trie, account_db.as_hashdb_mut());
account.commit_code(account_db.as_hashdb_mut());
if let Some(ref mut account) = a.account {
if !account.is_empty() {
db.note_account_bloom(address);
}
_ => {}
let addr_hash = account.address_hash(address);
let mut account_db = factories.accountdb.create(db.as_hashdb_mut(), addr_hash);
account.commit_storage(&factories.trie, account_db.as_hashdb_mut());
account.commit_code(account_db.as_hashdb_mut());
}
}
@ -586,7 +580,7 @@ impl State {
fn query_pod(&mut self, query: &PodState) {
for (address, pod_account) in query.get().into_iter()
.filter(|&(ref a, _)| self.ensure_cached(a, RequireCache::Code, true, |a| a.is_some()))
.filter(|&(a, _)| self.ensure_cached(a, RequireCache::Code, true, |a| a.is_some()))
{
// needs to be split into two parts for the refcell code here
// to work.
@ -605,14 +599,30 @@ impl State {
pod_state::diff_pod(&state_pre.to_pod(), &pod_state_post)
}
fn update_account_cache(require: RequireCache, account: &mut Account, db: &HashDB) {
match require {
RequireCache::None => {},
RequireCache::Code => {
account.cache_code(db);
}
RequireCache::CodeSize => {
account.cache_code_size(db);
// load required account data from the databases.
fn update_account_cache(require: RequireCache, account: &mut Account, state_db: &StateDB, db: &HashDB) {
match (account.is_cached(), require) {
(true, _) | (false, RequireCache::None) => {}
(false, require) => {
// if there's already code in the global cache, always cache it
// locally.
let hash = account.code_hash();
match state_db.get_cached_code(&hash) {
Some(code) => account.cache_given_code(code),
None => match require {
RequireCache::None => {},
RequireCache::Code => {
if let Some(code) = account.cache_code(db) {
// propagate code loaded from the database to
// the global code cache.
state_db.cache_code(hash, code)
}
}
RequireCache::CodeSize => {
account.cache_code_size(db);
}
}
}
}
}
}
@ -626,7 +636,7 @@ impl State {
if let Some(ref mut maybe_acc) = self.cache.borrow_mut().get_mut(a) {
if let Some(ref mut account) = maybe_acc.account {
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a));
Self::update_account_cache(require, account, accountdb.as_hashdb());
Self::update_account_cache(require, account, &self.db, accountdb.as_hashdb());
return f(Some(account));
}
return f(None);
@ -635,7 +645,7 @@ impl State {
let result = self.db.get_cached(a, |mut acc| {
if let Some(ref mut account) = acc {
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a));
Self::update_account_cache(require, account, accountdb.as_hashdb());
Self::update_account_cache(require, account, &self.db, accountdb.as_hashdb());
}
f(acc.map(|a| &*a))
});
@ -648,12 +658,12 @@ impl State {
// not found in the global cache, get from the DB and insert into local
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let mut maybe_acc = match db.get(a) {
Ok(acc) => acc.map(Account::from_rlp),
Ok(acc) => acc.map(|v| Account::from_rlp(&v)),
Err(e) => panic!("Potential DB corruption encountered: {}", e),
};
if let Some(ref mut account) = maybe_acc.as_mut() {
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), account.address_hash(a));
Self::update_account_cache(require, account, accountdb.as_hashdb());
Self::update_account_cache(require, account, &self.db, accountdb.as_hashdb());
}
let r = f(maybe_acc.as_ref());
self.insert_cache(a, AccountEntry::new_clean(maybe_acc));
@ -679,14 +689,12 @@ impl State {
None => {
let maybe_acc = if self.db.check_account_bloom(a) {
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
let maybe_acc = match db.get(a) {
Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(acc))),
match db.get(a) {
Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(&acc))),
Ok(None) => AccountEntry::new_clean(None),
Err(e) => panic!("Potential DB corruption encountered: {}", e),
};
maybe_acc
}
else {
}
} else {
AccountEntry::new_clean(None)
};
self.insert_cache(a, maybe_acc);
@ -711,7 +719,7 @@ impl State {
if require_code {
let addr_hash = account.address_hash(a);
let accountdb = self.factories.accountdb.readonly(self.db.as_hashdb(), addr_hash);
account.cache_code(accountdb.as_hashdb());
Self::update_account_cache(RequireCache::Code, account, &self.db, accountdb.as_hashdb());
}
account
},

View File

@ -16,6 +16,7 @@
use std::collections::{VecDeque, HashSet};
use lru_cache::LruCache;
use util::cache::MemoryLruCache;
use util::journaldb::JournalDB;
use util::hash::{H256};
use util::hashdb::HashDB;
@ -33,6 +34,9 @@ pub const ACCOUNT_BLOOM_HASHCOUNT_KEY: &'static [u8] = b"account_hash_count";
const STATE_CACHE_BLOCKS: usize = 12;
// The percentage of supplied cache size to go to accounts.
const ACCOUNT_CACHE_RATIO: usize = 90;
/// Shared canonical state cache.
struct AccountCache {
/// DB Account cache. `None` indicates that account is known to be missing.
@ -89,6 +93,8 @@ pub struct StateDB {
db: Box<JournalDB>,
/// Shared canonical state cache.
account_cache: Arc<Mutex<AccountCache>>,
/// DB Code cache. Maps code hashes to shared bytes.
code_cache: Arc<Mutex<MemoryLruCache<H256, Arc<Vec<u8>>>>>,
/// Local dirty cache.
local_cache: Vec<CacheQueueItem>,
/// Shared account bloom. Does not handle chain reorganizations.
@ -111,7 +117,9 @@ impl StateDB {
// into the `AccountCache` structure as its own `LruCache<(Address, H256), H256>`.
pub fn new(db: Box<JournalDB>, cache_size: usize) -> StateDB {
let bloom = Self::load_bloom(db.backing());
let cache_items = cache_size / ::std::mem::size_of::<Option<Account>>();
let acc_cache_size = cache_size * ACCOUNT_CACHE_RATIO / 100;
let code_cache_size = cache_size - acc_cache_size;
let cache_items = acc_cache_size / ::std::mem::size_of::<Option<Account>>();
StateDB {
db: db,
@ -119,6 +127,7 @@ impl StateDB {
accounts: LruCache::new(cache_items),
modifications: VecDeque::new(),
})),
code_cache: Arc::new(Mutex::new(MemoryLruCache::new(code_cache_size))),
local_cache: Vec::new(),
account_bloom: Arc::new(Mutex::new(bloom)),
cache_size: cache_size,
@ -170,7 +179,7 @@ impl StateDB {
pub fn commit_bloom(batch: &mut DBTransaction, journal: BloomJournal) -> Result<(), UtilError> {
assert!(journal.hash_functions <= 255);
batch.put(COL_ACCOUNT_BLOOM, ACCOUNT_BLOOM_HASHCOUNT_KEY, &vec![journal.hash_functions as u8]);
batch.put(COL_ACCOUNT_BLOOM, ACCOUNT_BLOOM_HASHCOUNT_KEY, &[journal.hash_functions as u8]);
let mut key = [0u8; 8];
let mut val = [0u8; 8];
@ -216,7 +225,7 @@ impl StateDB {
let mut clear = false;
for block in enacted.iter().filter(|h| self.commit_hash.as_ref().map_or(true, |p| *h != p)) {
clear = clear || {
if let Some(ref mut m) = cache.modifications.iter_mut().find(|ref m| &m.hash == block) {
if let Some(ref mut m) = cache.modifications.iter_mut().find(|m| &m.hash == block) {
trace!("Reverting enacted block {:?}", block);
m.is_canon = true;
for a in &m.accounts {
@ -232,7 +241,7 @@ impl StateDB {
for block in retracted {
clear = clear || {
if let Some(ref mut m) = cache.modifications.iter_mut().find(|ref m| &m.hash == block) {
if let Some(ref mut m) = cache.modifications.iter_mut().find(|m| &m.hash == block) {
trace!("Retracting block {:?}", block);
m.is_canon = false;
for a in &m.accounts {
@ -286,7 +295,7 @@ impl StateDB {
is_canon: is_best,
parent: parent.clone(),
};
let insert_at = cache.modifications.iter().enumerate().find(|&(_, ref m)| m.number < *number).map(|(i, _)| i);
let insert_at = cache.modifications.iter().enumerate().find(|&(_, m)| m.number < *number).map(|(i, _)| i);
trace!("inserting modifications at {:?}", insert_at);
if let Some(insert_at) = insert_at {
cache.modifications.insert(insert_at, block_changes);
@ -311,6 +320,7 @@ impl StateDB {
StateDB {
db: self.db.boxed_clone(),
account_cache: self.account_cache.clone(),
code_cache: self.code_cache.clone(),
local_cache: Vec::new(),
account_bloom: self.account_bloom.clone(),
cache_size: self.cache_size,
@ -325,6 +335,7 @@ impl StateDB {
StateDB {
db: self.db.boxed_clone(),
account_cache: self.account_cache.clone(),
code_cache: self.code_cache.clone(),
local_cache: Vec::new(),
account_bloom: self.account_bloom.clone(),
cache_size: self.cache_size,
@ -342,7 +353,11 @@ impl StateDB {
/// Heap size used.
pub fn mem_used(&self) -> usize {
// TODO: account for LRU-cache overhead; this is a close approximation.
self.db.mem_used() + self.account_cache.lock().accounts.len() * ::std::mem::size_of::<Option<Account>>()
self.db.mem_used() + {
let accounts = self.account_cache.lock().accounts.len();
let code_size = self.code_cache.lock().current_size();
code_size + accounts * ::std::mem::size_of::<Option<Account>>()
}
}
/// Returns underlying `JournalDB`.
@ -362,6 +377,15 @@ impl StateDB {
})
}
/// Add a global code cache entry. This doesn't need to worry about canonicality because
/// it simply maps hashes to raw code and will always be correct in the absence of
/// hash collisions.
pub fn cache_code(&self, hash: H256, code: Arc<Vec<u8>>) {
let mut cache = self.code_cache.lock();
cache.insert(hash, code);
}
/// Get basic copy of the cached account. Does not include storage.
/// Returns 'None' if cache is disabled or if the account is not cached.
pub fn get_cached_account(&self, addr: &Address) -> Option<Option<Account>> {
@ -369,7 +393,14 @@ impl StateDB {
if !Self::is_allowed(addr, &self.parent_hash, &cache.modifications) {
return None;
}
cache.accounts.get_mut(&addr).map(|a| a.as_ref().map(|a| a.clone_basic()))
cache.accounts.get_mut(addr).map(|a| a.as_ref().map(|a| a.clone_basic()))
}
/// Get cached code based on hash.
pub fn get_cached_code(&self, hash: &H256) -> Option<Arc<Vec<u8>>> {
let mut cache = self.code_cache.lock();
cache.get_mut(hash).map(|code| code.clone())
}
/// Get value from a cached account.
@ -406,8 +437,7 @@ impl StateDB {
// We search for our parent in that list first and then for
// all its parent until we hit the canonical block,
// checking against all the intermediate modifications.
let mut iter = modifications.iter();
while let Some(ref m) = iter.next() {
for m in modifications {
if &m.hash == parent {
if m.is_canon {
return true;
@ -420,7 +450,7 @@ impl StateDB {
}
}
trace!("Cache lookup skipped for {:?}: parent hash is unknown", addr);
return false;
false
}
}

View File

@ -26,6 +26,7 @@ use miner::Miner;
use rlp::{Rlp, View};
use spec::Spec;
use views::BlockView;
use util::stats::Histogram;
#[test]
fn imports_from_empty() {
@ -198,19 +199,37 @@ fn can_collect_garbage() {
assert!(client.blockchain_cache_info().blocks < 100 * 1024);
}
#[test]
#[cfg_attr(feature="dev", allow(useless_vec))]
fn can_generate_gas_price_statistics() {
let client_result = generate_dummy_client_with_data(16, 1, &vec_into![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]);
fn can_generate_gas_price_median() {
let client_result = generate_dummy_client_with_data(3, 1, &vec_into![1, 2, 3]);
let client = client_result.reference();
let s = client.gas_price_statistics(8, 8).unwrap();
assert_eq!(s, vec_into![8, 8, 9, 10, 11, 12, 13, 14, 15]);
let s = client.gas_price_statistics(16, 8).unwrap();
assert_eq!(s, vec_into![0, 1, 3, 5, 7, 9, 11, 13, 15]);
let s = client.gas_price_statistics(32, 8).unwrap();
assert_eq!(s, vec_into![0, 1, 3, 5, 7, 9, 11, 13, 15]);
assert_eq!(Some(U256::from(2)), client.gas_price_median(3));
let client_result = generate_dummy_client_with_data(4, 1, &vec_into![1, 4, 3, 2]);
let client = client_result.reference();
assert_eq!(Some(U256::from(3)), client.gas_price_median(4));
}
#[test]
fn can_generate_gas_price_histogram() {
let client_result = generate_dummy_client_with_data(20, 1, &vec_into![6354,8593,6065,4842,7845,7002,689,4958,4250,6098,5804,4320,643,8895,2296,8589,7145,2000,2512,1408]);
let client = client_result.reference();
let hist = client.gas_price_histogram(20, 5).unwrap();
let correct_hist = Histogram { bucket_bounds: vec_into![643,2293,3943,5593,7243,8893], counts: vec![4,2,4,6,3] };
assert_eq!(hist, correct_hist);
}
#[test]
fn empty_gas_price_histogram() {
let client_result = generate_dummy_client_with_data(20, 0, &vec_into![]);
let client = client_result.reference();
assert!(client.gas_price_histogram(20, 5).is_none());
}
#[test]
fn can_handle_long_fork() {
let client_result = generate_dummy_client(1200);

View File

@ -388,7 +388,7 @@ pub fn get_good_dummy_block_fork_seq(start_number: usize, count: usize, parent_h
r
}
pub fn get_good_dummy_block() -> Bytes {
pub fn get_good_dummy_block_hash() -> (H256, Bytes) {
let mut block_header = Header::new();
let test_spec = get_test_spec();
let test_engine = &test_spec.engine;
@ -399,7 +399,12 @@ pub fn get_good_dummy_block() -> Bytes {
block_header.set_parent_hash(test_spec.genesis_header().hash());
block_header.set_state_root(test_spec.genesis_header().state_root().clone());
create_test_block(&block_header)
(block_header.hash(), create_test_block(&block_header))
}
pub fn get_good_dummy_block() -> Bytes {
let (_, bytes) = get_good_dummy_block_hash();
bytes
}
pub fn get_bad_state_dummy_block() -> Bytes {

View File

@ -285,7 +285,7 @@ impl<T> TraceDatabase for TraceDB<T> where T: DatabaseExtras {
let mut blooms = self.blooms.write();
batch.extend_with_cache(db::COL_TRACE, &mut *blooms, blooms_to_insert, CacheUpdatePolicy::Remove);
// note_used must be called after locking blooms to avoid cache/traces deadlock on garbage collection
for key in blooms_keys.into_iter() {
for key in blooms_keys {
self.note_used(CacheID::Bloom(key));
}
}

View File

@ -50,12 +50,12 @@ fn prefix_subtrace_addresses(mut traces: Vec<FlatTrace>) -> Vec<FlatTrace> {
// [1, 0]
let mut current_subtrace_index = 0;
let mut first = true;
for trace in traces.iter_mut() {
for trace in &mut traces {
match (first, trace.trace_address.is_empty()) {
(true, _) => first = false,
(_, true) => current_subtrace_index += 1,
_ => {}
}
}
trace.trace_address.push_front(current_subtrace_index);
}
traces
@ -78,7 +78,7 @@ fn should_prefix_address_properly() {
let t = vec![vec![], vec![0], vec![0, 0], vec![0], vec![], vec![], vec![0], vec![]].into_iter().map(&f).collect();
let t = prefix_subtrace_addresses(t);
assert_eq!(t, vec![vec![0], vec![0, 0], vec![0, 0, 0], vec![0, 0], vec![1], vec![2], vec![2, 0], vec![3]].into_iter().map(&f).collect::<Vec<_>>());
}
}
impl Tracer for ExecutiveTracer {
fn prepare_trace_call(&self, params: &ActionParams) -> Option<Call> {

View File

@ -33,4 +33,5 @@ pub mod transaction_import;
pub mod block_import_error;
pub mod restoration_status;
pub mod snapshot_manifest;
pub mod proof_request;
pub mod proof_request;
pub mod mode;

55
ethcore/src/types/mode.rs Normal file
View File

@ -0,0 +1,55 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Mode type
pub use std::time::Duration;
use client::Mode as ClientMode;
/// IPC-capable shadow-type for client::config::Mode
#[derive(Clone, Binary)]
pub enum Mode {
/// Same as ClientMode::Off.
Off,
/// Same as ClientMode::Dark; values in seconds.
Dark(u64),
/// Same as ClientMode::Passive; values in seconds.
Passive(u64, u64),
/// Same as ClientMode::Active.
Active,
}
impl From<ClientMode> for Mode {
fn from(mode: ClientMode) -> Self {
match mode {
ClientMode::Off => Mode::Off,
ClientMode::Dark(timeout) => Mode::Dark(timeout.as_secs()),
ClientMode::Passive(timeout, alarm) => Mode::Passive(timeout.as_secs(), alarm.as_secs()),
ClientMode::Active => Mode::Active,
}
}
}
impl From<Mode> for ClientMode {
fn from(mode: Mode) -> Self {
match mode {
Mode::Off => ClientMode::Off,
Mode::Dark(timeout) => ClientMode::Dark(Duration::from_secs(timeout)),
Mode::Passive(timeout, alarm) => ClientMode::Passive(Duration::from_secs(timeout), Duration::from_secs(alarm)),
Mode::Active => ClientMode::Active,
}
}
}

View File

@ -109,7 +109,7 @@ pub struct VerificationQueue<K: Kind> {
struct QueueSignal {
deleting: Arc<AtomicBool>,
signalled: AtomicBool,
message_channel: IoChannel<ClientIoMessage>,
message_channel: Mutex<IoChannel<ClientIoMessage>>,
}
impl QueueSignal {
@ -121,7 +121,8 @@ impl QueueSignal {
}
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
if let Err(e) = self.message_channel.send_sync(ClientIoMessage::BlockVerified) {
let channel = self.message_channel.lock().clone();
if let Err(e) = channel.send_sync(ClientIoMessage::BlockVerified) {
debug!("Error sending BlockVerified message: {:?}", e);
}
}
@ -135,7 +136,8 @@ impl QueueSignal {
}
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
if let Err(e) = self.message_channel.send(ClientIoMessage::BlockVerified) {
let channel = self.message_channel.lock().clone();
if let Err(e) = channel.send(ClientIoMessage::BlockVerified) {
debug!("Error sending BlockVerified message: {:?}", e);
}
}
@ -178,7 +180,7 @@ impl<K: Kind> VerificationQueue<K> {
let ready_signal = Arc::new(QueueSignal {
deleting: deleting.clone(),
signalled: AtomicBool::new(false),
message_channel: message_channel
message_channel: Mutex::new(message_channel),
});
let empty = Arc::new(SCondvar::new());
let panic_handler = PanicHandler::new_in_arc();

View File

@ -15,7 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use keccak::Keccak256;
use super::{KeyPair, Error, Generator, Secret};
use super::{KeyPair, Error, Generator};
/// Simple brainwallet.
pub struct Brain(String);
@ -38,9 +38,9 @@ impl Generator for Brain {
match i > 16384 {
false => i += 1,
true => {
let result = KeyPair::from_secret(Secret::from(secret.clone()));
if result.is_ok() {
return result
let result = KeyPair::from_secret(secret.clone().into());
if result.as_ref().ok().map_or(false, |r| r.address()[0] == 0) {
return result;
}
},
}

38
evmbin/Cargo.lock generated
View File

@ -155,7 +155,7 @@ name = "ethash"
version = "1.4.0"
dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"sha3 0.1.0",
]
@ -208,6 +208,9 @@ dependencies = [
[[package]]
name = "ethcore-bloom-journal"
version = "0.1.0"
dependencies = [
"siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "ethcore-devtools"
@ -223,7 +226,7 @@ dependencies = [
"crossbeam 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -275,8 +278,9 @@ dependencies = [
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.77 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0",
"rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
@ -334,6 +338,7 @@ dependencies = [
"itertools 0.4.19 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -632,13 +637,28 @@ name = "odds"
version = "0.2.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "owning_ref"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "parking_lot"
version = "0.2.8"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parking_lot_core"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -867,6 +887,11 @@ dependencies = [
"gcc 0.3.37 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "siphasher"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slab"
version = "0.1.3"
@ -1179,7 +1204,9 @@ dependencies = [
"checksum num-traits 0.1.36 (registry+https://github.com/rust-lang/crates.io-index)" = "a16a42856a256b39c6d3484f097f6713e14feacd9bfb02290917904fae46c81c"
"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3"
"checksum odds 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "e04630a62b3f1cc8c58b4d8f2555a40136f02b420e158242936ef286a72d33a0"
"checksum parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "968f685642555d2f7e202c48b8b11de80569e9bfea817f7f12d7c61aac62d4e6"
"checksum owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8d91377085359426407a287ab16884a0111ba473aa6844ff01d4ec20ce3d75e7"
"checksum parking_lot 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e1435e7a2a00dfebededd6c6bdbd54008001e94b4a2aadd6aef0dc4c56317621"
"checksum parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb1b97670a2ffadce7c397fb80a3d687c4f3060140b885621ef1653d0e5d5068"
"checksum primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0e31b86efadeaeb1235452171a66689682783149a6249ff334a2c5d8218d00a4"
"checksum primal-bit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "464a91febc06166783d4f5ba3577b5ed8dda8e421012df80bfe48a971ed7be8f"
"checksum primal-check 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "647c81b67bb9551a7b88d0bcd785ac35b7d0bf4b2f358683d7c2375d04daec51"
@ -1205,6 +1232,7 @@ dependencies = [
"checksum serde_codegen 0.8.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e575e583f7d162e163af117fb9791fbd2bd203c31023b3219617e12c5997a738"
"checksum serde_codegen_internals 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "318f7e77aa5187391d74aaf4553d2189f56b0ce25e963414c951b97877ffdcec"
"checksum serde_json 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1cb6b19e74d9f65b9d03343730b643d729a446b29376785cd65efdff4675e2fc"
"checksum siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c44e42fa187b5a8782489cf7740cc27c3125806be2bf33563cf5e02e9533fcd"
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"

View File

@ -31,7 +31,7 @@ pub struct FakeExt {
impl Default for FakeExt {
fn default() -> Self {
FakeExt {
schedule: Schedule::new_homestead(),
schedule: Schedule::new_homestead_gas_fix(),
store: HashMap::new(),
depth: 1,
}

1
js/.gitignore vendored
View File

@ -5,3 +5,4 @@ build
.coverage
.dist
.happypack
.npmjs

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 92 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.7 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.7 KiB

View File

@ -1,157 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 19.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Слой_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 83.609 73.184" enable-background="new 0 0 83.609 73.184" xml:space="preserve">
<g id="XMLID_2_">
<path id="XMLID_3_" fill="#213654" d="M44.782,73.184c-3.33,0-6.614-0.446-9.764-1.325l0.443-1.586
c3.004,0.84,6.141,1.265,9.32,1.265c2.181,0,4.902-0.338,7.314-0.791c-1.933-1.169-3.504-3.376-4.303-6.29
c-0.674-2.458-0.595-4.833,0.202-6.665c-2.754,0.904-5.73,1.375-8.768,1.375l-0.524-0.005l0.029-1.646l0.495,0.005
c3.953,0,7.791-0.843,11.107-2.439c0.515-0.302,1.085-0.521,1.702-0.653c0.459-0.099,0.942-0.142,1.381-0.143l-0.016,1.646
c-0.336,0.008-0.679,0.032-1.021,0.105c-0.449,0.096-0.857,0.252-1.224,0.468l0.002,0.005c-0.019,0.01-0.037,0.019-0.056,0.027
c-0.521,0.319-0.956,0.763-1.298,1.325c-0.927,1.523-1.082,3.769-0.426,6.159c0.997,3.64,3.34,5.964,5.905,6.012
c0.085-0.022,0.169-0.046,0.251-0.068l0.018,0.065c0.244-0.011,0.489-0.042,0.735-0.095c1.212-0.258,2.106-0.996,2.66-2.194
l1.494,0.689c-0.772,1.672-2.09,2.748-3.812,3.114c-0.383,0.082-0.764,0.127-1.141,0.136
C52.543,72.447,48.078,73.184,44.782,73.184z M23.554,66.171c-7.101-5.453-11.173-13.35-11.173-21.668
c0-2.19,0.281-4.376,0.835-6.497l1.592,0.416c-0.518,1.985-0.78,4.031-0.78,6.081c0,7.802,3.837,15.224,10.528,20.363
L23.554,66.171z M30.185,65.251c-0.496-3.824,1.038-4.895,2.392-5.841c0.542-0.378,1.054-0.735,1.486-1.276
c0.905-1.134,0.361-1.996-1.427-4.17c-0.342-0.416-0.699-0.852-1.054-1.314c-0.63-0.824-0.858-1.736-0.661-2.639
c0.43-1.96,2.681-3.202,2.936-3.338l0.772,1.454c-0.508,0.271-1.876,1.204-2.101,2.239c-0.092,0.425,0.026,0.845,0.362,1.283
c0.341,0.447,0.688,0.867,1.017,1.269c1.604,1.95,3.261,3.966,1.441,6.243c-0.581,0.728-1.244,1.19-1.829,1.599
c-1.246,0.87-2.069,1.445-1.701,4.28L30.185,65.251z M24.476,60.15c-0.498-3.825,1.037-4.896,2.391-5.842
c0.542-0.378,1.054-0.734,1.485-1.276c0.907-1.135,0.363-1.996-1.424-4.17c-0.342-0.416-0.7-0.852-1.055-1.314
c-0.63-0.824-0.859-1.737-0.661-2.64c0.199-0.909,0.79-1.664,1.393-2.226c-2.809-1.872-2.855-4.946-2.855-4.991h1.646h-0.823
l0.823-0.005c0.001,0.142,0.071,2.605,2.665,3.933c0.039-0.021,0.153,0.075,0.153,0.075c0.897,0.428,2.079,0.721,3.639,0.752
c6.379-0.128,6.453-4.563,6.453-4.755h1.646c0,0.016-0.006,0.413-0.14,1.004c0.722-0.037,1.959-0.263,2.47-0.519
c-0.36-1.032-2.216-4.104-3.907-6.63l0.244-1.153l0.581-0.368c-0.252-1.127-0.806-3.661-1.078-5.328
c-5.859-0.881-10.078-0.201-12.548,2.033c-2.386,2.157-2.244,5.016-2.237,5.136l-1.642,0.121c-0.031-0.38-0.297-0.778-0.617-0.928
c-0.284-0.133-0.617-0.067-0.991,0.195c-0.584,0.413-0.792,0.936-0.655,1.646c0.243,1.262,1.494,2.646,2.471,3.25
c0.137-0.083,0.236-0.137,0.279-0.16l0.215,0.406c0.048,0.017,0.102,0.191,0.102,0.191l0.454,0.856
c-0.182,0.097-0.473,0.278-0.782,0.519c0,0-0.058,0.066-0.077,0.062c-0.535,0.428-1.102,1.023-1.24,1.658
c-0.092,0.426,0.026,0.846,0.361,1.285c0.341,0.446,0.688,0.867,1.018,1.269c1.603,1.949,3.259,3.964,1.441,6.241
c-0.582,0.728-1.245,1.19-1.83,1.6c-1.246,0.87-2.069,1.444-1.7,4.279l-1.633,0.213c-0.498-3.826,1.037-4.896,2.391-5.842
c0.541-0.378,1.053-0.735,1.486-1.277c0.904-1.134,0.36-1.995-1.427-4.168c-0.342-0.416-0.7-0.853-1.055-1.316
c-0.629-0.825-0.857-1.737-0.66-2.64c0.188-0.854,0.722-1.573,1.285-2.123c-1.237-0.933-2.423-2.471-2.716-3.992
c-0.26-1.344,0.21-2.516,1.322-3.301c0.864-0.611,1.802-0.732,2.636-0.344c0.066,0.03,0.129,0.063,0.191,0.1
c0.322-1.246,1.014-2.798,2.479-4.134c2.843-2.591,7.506-3.422,13.866-2.472c2.637-3.356,0.348-5.941-0.428-6.66
c-2.225-2.067-6.116-2.676-8.784-0.119l-1.374-0.424c-0.45-2.123-2.957-3.178-5.154-3.457c-3.329-0.417-7.678,0.653-9.351,4.017
c-2.24,4.505,0.714,6.775,0.744,6.798l-0.536,1.481c-3.291-0.177-5.506,0.357-6.576,1.599c-1.38,1.599-0.814,4.36-0.36,6.58
c0.385,1.881,0.689,3.367-0.074,4.262c-1.182,1.387-2.969,1.769-4.556,0.97c-1.34-0.675-2.081-1.984-1.804-3.185l1.604,0.371
c-0.102,0.444,0.302,1.021,0.941,1.344c0.342,0.171,1.534,0.638,2.563-0.568c0.241-0.283-0.089-1.898-0.287-2.863
c-0.526-2.572-1.181-5.773,0.728-7.985c1.201-1.394,3.201-2.117,6.084-2.196c-0.891-1.452-1.64-3.93,0.056-7.339
c2.031-4.084,7.142-5.418,11.033-4.917c2.924,0.371,5.152,1.69,6.124,3.55c3.286-2.304,7.567-1.477,10.101,0.877
c2.02,1.876,3.364,5.275,0.674,8.79c0.277,1.952,1.246,6.197,1.257,6.242l-0.363,0.879l-0.359,0.228
c4.242,6.423,3.866,7.153,3.661,7.552c-0.375,0.73-1.675,1.153-2.877,1.336c1.418,0.983,2.757,2.646,2.659,5.126
c-0.078,2.006-1.592,3.617-3.057,5.175c-1.937,2.061-3.07,3.405-2.016,5.004l-1.375,0.906c-1.823-2.766,0.31-5.035,2.191-7.037
c1.257-1.338,2.557-2.721,2.611-4.112c0.108-2.769-2.197-3.966-3.157-4.343c-1.015,1.511-2.966,3.008-6.718,3.145v0.011
c-0.096,0-0.189-0.001-0.283-0.003c-0.094,0.002-0.188,0.003-0.283,0.003v-0.011c-1.332-0.048-2.438-0.269-3.354-0.6
c-0.576,0.434-1.244,1.082-1.395,1.778c-0.093,0.425,0.025,0.845,0.361,1.284c0.342,0.447,0.688,0.868,1.018,1.27
c1.603,1.949,3.259,3.965,1.439,6.242c-0.58,0.728-1.244,1.19-1.829,1.599c-1.246,0.87-2.069,1.445-1.7,4.279L24.476,60.15z
M5.457,42.863c-0.305,0-0.621-0.013-0.951-0.037l0.121-1.641c1.847,0.134,3.15-0.171,3.892-0.91
c1.213-1.21,1.039-3.611,0.871-5.934c-0.079-1.104-0.155-2.148-0.09-3.101c0.146-2.149,1.784-4.662,8.723-4.395L17.96,28.49
c-3.1-0.114-6.842,0.284-7.018,2.862c-0.058,0.838,0.014,1.825,0.089,2.871c0.186,2.566,0.396,5.475-1.35,7.217
C8.726,42.395,7.338,42.863,5.457,42.863z M29.865,32.328l-0.621-1.524l5.17-2.109l0.614-0.003l3.086,1.225l-0.607,1.529
l-2.778-1.102L29.865,32.328z M19.314,25.852c-0.91,0-1.826-0.459-2.441-1.283c-0.887-1.187-0.828-2.714,0.149-3.891l1.266,1.051
c-0.704,0.849-0.303,1.579-0.097,1.854c0.336,0.448,0.966,0.778,1.512,0.544c0.96-0.414,1.107-0.628,1.918-1.819
c0.443-0.65,1.051-1.541,2.038-2.816c1.016-1.312,2.184-2.004,3.472-2.055c2.058-0.094,3.574,1.559,3.639,1.63l-1.219,1.107
l0.609-0.554l-0.607,0.556c-0.012-0.011-1.09-1.126-2.367-1.095c-0.775,0.034-1.524,0.512-2.226,1.418
c-0.956,1.235-1.547,2.102-1.979,2.735c-0.868,1.274-1.228,1.802-2.628,2.404C20.019,25.782,19.667,25.852,19.314,25.852z"/>
</g>
<g id="XMLID_11_">
<path id="XMLID_12_" fill="#213654" d="M34.195,32.441c0-0.503,0.463-0.911,1.033-0.911s1.033,0.408,1.033,0.911
c0,0.503-0.463,0.911-1.033,0.911S34.195,32.944,34.195,32.441"/>
</g>
<g id="XMLID_13_">
<path id="XMLID_14_" fill="#213654" d="M69.862,50.537c-0.047,0-0.094,0-0.142-0.002l-1.418-0.026L44.59,50.51v-1.426l25.162,0.027
c0.772-0.012,1.068-0.171,1.119-0.285c0.074-0.167-0.056-0.563-0.394-0.913l-4.377-4.139c-1.043-1.025-2.743-1.046-3.803-0.055
l-3.212,3.01c-1.606,1.506-4.201,1.488-5.788-0.036l-3.071-2.951c-1.033-0.992-2.698-1.08-3.641-0.197l-0.975-1.039
c1.505-1.409,4.017-1.315,5.602,0.209l3.072,2.951c1.05,1.007,2.767,1.019,3.826,0.024l3.212-3.01c0.71-0.666,1.61-1.031,2.53-1.1
l-7.112-7.095l-3.018,2.734c-0.615,0.532-0.96,1.282-0.96,2.07c0,1.504,1.237,2.728,2.759,2.728c1.398,0,2.375-0.81,2.375-1.97
c0.007-0.102,0.052-0.603-0.312-1.005c-0.355-0.393-1.03-0.6-1.949-0.6v-1.426c1.37,0,2.391,0.369,3.031,1.098
c0.771,0.877,0.665,1.908,0.65,2.023c0.006,1.845-1.628,3.305-3.795,3.305c-2.308,0-4.185-1.863-4.185-4.153
c0-1.202,0.525-2.346,1.441-3.138l3.508-3.178l0.981,0.023l9.296,9.272l-0.023,0.024c0.193,0.133,0.377,0.284,0.55,0.453
l4.39,4.151c0.759,0.783,1.023,1.756,0.697,2.499C71.946,49.924,71.358,50.537,69.862,50.537z"/>
</g>
<g id="XMLID_15_">
<path id="XMLID_16_" fill="#213654" d="M58.721,60.635c-0.793-2.043-1.708-3.035-1.717-3.045c-0.073-0.081-1.451-1.592-3.732-1.491
l-0.08-1.645c3.064-0.155,4.947,1.936,5.026,2.024c0.025,0.026,1.116,1.186,2.038,3.561L58.721,60.635z"/>
</g>
<g id="XMLID_17_">
<path id="XMLID_18_" fill="#213654" d="M43.486,21.351c-0.696,0-1.263-0.585-1.263-1.304c0-0.72,0.566-1.305,1.263-1.305
c0.695,0,1.261,0.585,1.261,1.305C44.747,20.766,44.182,21.351,43.486,21.351z"/>
</g>
<g id="XMLID_20_">
<path id="XMLID_21_" fill="#213654" d="M61.776,17.014c-0.33,0-0.596-0.285-0.596-0.636c0-0.353,0.266-0.639,0.596-0.639
c0.327,0,0.594,0.286,0.594,0.639C62.369,16.729,62.103,17.014,61.776,17.014"/>
</g>
<g id="XMLID_22_">
<path id="XMLID_23_" fill="#213654" d="M70.143,25.702c-0.751,0-1.361-0.628-1.361-1.399s0.61-1.399,1.361-1.399
c0.749,0,1.357,0.627,1.357,1.399S70.892,25.702,70.143,25.702z M70.143,24.147c-0.105,0-0.197,0.072-0.197,0.155
c0.001,0.169,0.391,0.173,0.391,0C70.336,24.219,70.247,24.147,70.143,24.147z M61.871,17.871c-0.75,0-1.359-0.627-1.359-1.398
c0-0.773,0.609-1.4,1.359-1.4c0.749,0,1.358,0.628,1.358,1.4C63.23,17.244,62.62,17.871,61.871,17.871z M61.871,16.316
c-0.107,0-0.196,0.071-0.196,0.156c0,0.165,0.392,0.167,0.392,0C62.067,16.388,61.977,16.316,61.871,16.316z"/>
</g>
<g id="XMLID_28_">
<path id="XMLID_29_" fill="#213654" d="M63.998,31.065c-0.328,0-0.596-0.284-0.596-0.638c0-0.351,0.268-0.636,0.596-0.636
c0.328,0,0.594,0.285,0.594,0.636C64.592,30.781,64.326,31.065,63.998,31.065"/>
</g>
<g id="XMLID_30_">
<path id="XMLID_31_" fill="#213654" d="M63.998,31.732c-0.696,0-1.263-0.585-1.263-1.304c0-0.72,0.566-1.305,1.263-1.305
c0.696,0,1.262,0.585,1.262,1.305C65.26,31.147,64.694,31.732,63.998,31.732z M63.998,30.178c-0.159,0-0.293,0.115-0.293,0.251
c0,0.271,0.585,0.272,0.585,0C64.29,30.293,64.156,30.178,63.998,30.178z"/>
</g>
<g id="XMLID_34_">
<path id="XMLID_35_" fill="#213654" d="M59.842,22.463c-0.329,0-0.596-0.284-0.596-0.637c0-0.351,0.267-0.637,0.596-0.637
c0.328,0,0.595,0.285,0.595,0.637C60.437,22.179,60.17,22.463,59.842,22.463"/>
</g>
<g id="XMLID_36_">
<path id="XMLID_37_" fill="#213654" d="M59.842,23.13c-0.696,0-1.263-0.585-1.263-1.304c0-0.719,0.566-1.303,1.263-1.303
s1.262,0.584,1.262,1.303C61.104,22.545,60.538,23.13,59.842,23.13z M59.842,21.575c-0.159,0-0.293,0.115-0.293,0.251
c0,0.273,0.585,0.274,0.585,0C60.134,21.69,60,21.575,59.842,21.575z"/>
</g>
<g id="XMLID_40_">
<path id="XMLID_41_" fill="#213654" d="M49.209,32.226c-0.328,0-0.595-0.286-0.595-0.638c0-0.352,0.267-0.637,0.595-0.637
c0.328,0,0.596,0.285,0.596,0.637C49.805,31.939,49.537,32.226,49.209,32.226"/>
</g>
<g id="XMLID_42_">
<path id="XMLID_43_" fill="#213654" d="M49.209,32.893c-0.696,0-1.262-0.585-1.262-1.305c0-0.72,0.565-1.305,1.262-1.305
s1.263,0.585,1.263,1.305C50.472,32.308,49.905,32.893,49.209,32.893z M49.209,31.338c-0.158,0-0.292,0.114-0.292,0.25
c0,0.271,0.585,0.271,0.585,0C49.502,31.452,49.368,31.338,49.209,31.338z"/>
</g>
<g id="XMLID_46_">
<path id="XMLID_47_" fill="#213654" d="M53.801,27.296c-0.33,0-0.596-0.285-0.596-0.637c0-0.351,0.266-0.636,0.596-0.636
c0.328,0,0.595,0.284,0.595,0.636C54.396,27.011,54.129,27.296,53.801,27.296"/>
</g>
<g id="XMLID_48_">
<path id="XMLID_49_" fill="#213654" d="M53.801,27.963c-0.696,0-1.263-0.585-1.263-1.304c0-0.719,0.566-1.304,1.263-1.304
s1.262,0.585,1.262,1.304C55.063,27.378,54.497,27.963,53.801,27.963z M53.801,26.408c-0.159,0-0.293,0.115-0.293,0.251
c0,0.271,0.585,0.274,0.585,0C54.093,26.523,53.959,26.408,53.801,26.408z"/>
</g>
<g id="XMLID_52_">
<path id="XMLID_53_" fill="#213654" d="M43.29,19.365l10.143-4.444l0.385-0.054l7.977,1.116l-0.094,0.674l0.579-0.619l8.377,7.845
L70,25.095l-8.679-2.136l3.292,7.111l-0.882,0.973l-9.77-3.426l-4.181,4.392l-1.154-0.175l-5.636-11.392l0.489-0.242l0.106-0.159
M45.373,22.042l4.079,8.246l3.212-3.373L45.373,22.042z M55.202,26.542l7.41,2.599l-2.899-6.264L55.202,26.542z M45.083,20.135
l8.64,5.773l5.663-4.602l0.619-0.139l0.486,0.119l0.046-0.021l0.017,0.037l7.14,1.757l-6.091-5.703l-0.005,0.039l-7.777-1.089
L45.083,20.135z"/>
</g>
<g id="XMLID_58_">
<path id="XMLID_59_" fill="#213654" d="M65.19,67.118l-0.801-1.438c3.859-2.15,7.216-4.971,9.976-8.38
c4.899-6.054,7.598-13.684,7.598-21.486c0-18.841-15.324-34.168-34.161-34.168c-5.173,0-10.14,1.125-14.763,3.346l-0.713-1.484
C37.173,1.181,42.379,0,47.801,0c19.744,0,35.808,16.066,35.808,35.815c0,8.178-2.829,16.177-7.965,22.521
C72.751,61.91,69.235,64.865,65.19,67.118z M54.461,66.359L54.1,64.754c13.359-3.012,23.056-15.153,23.056-28.872
c0-16.319-13.274-29.596-29.59-29.596c-5.931,0-11.651,1.747-16.544,5.054L30.1,9.977C35.266,6.485,41.305,4.64,47.566,4.64
c17.224,0,31.236,14.016,31.236,31.242C78.802,50.363,68.566,63.181,54.461,66.359z"/>
</g>
<g id="XMLID_62_">
<polygon id="XMLID_63_" fill="#213654" points="59.634,22.086 53.083,16.162 45.122,11.763 45.812,10.515 53.978,15.05
60.589,21.029 "/>
</g>
<g id="XMLID_64_">
<path id="XMLID_65_" fill="#213654" d="M53.59,16.806c-0.696,0-1.263-0.584-1.263-1.303c0-0.72,0.567-1.305,1.263-1.305
c0.696,0,1.262,0.585,1.262,1.305C54.852,16.222,54.286,16.806,53.59,16.806z M53.59,15.251c-0.159,0-0.293,0.115-0.293,0.252
c0.001,0.271,0.586,0.272,0.585,0C53.882,15.366,53.748,15.251,53.59,15.251z"/>
</g>
<g id="XMLID_68_">
<path id="XMLID_69_" fill="#213654" d="M45.555,12.443c-0.695,0-1.262-0.584-1.262-1.303c0-0.72,0.566-1.305,1.262-1.305
c0.695,0,1.262,0.585,1.262,1.305C46.817,11.859,46.25,12.443,45.555,12.443z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.3 KiB

View File

@ -1,3 +0,0 @@
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" x="0px" y="0px" viewBox="0 0 233 133.3" xml:space="preserve"><style type="text/css">
.st0{fill:#AA0E20;}
</style><polygon class="st0" points="137.3 74.6 116.5 114.9 89.6 63.9 89.7 63.7 84.2 53.8 78.2 63.9 116.5 133.3 154.8 63.9 148.8 53.8 "/><polygon class="st0" points="116.5 0 111.8 7.5 90.1 43.9 95.6 54.3 116.5 21 137 54.3 142.6 43.4 124.4 12.9 "/><path class="st0" d="M116.5 19.8C52.2 19.8 0 67.7 0 67.8c0 0 52.2 48 116.5 48s116.5-48 116.5-48C233 67.9 180.8 19.8 116.5 19.8zM116.5 105.2C66.4 105.2 9.8 67.8 9.8 67.8c0 0 56.6-37.4 106.7-37.4s106.7 36.5 106.7 37.4C223.2 68.7 166.6 105.2 116.5 105.2z"/></svg>

Before

Width:  |  Height:  |  Size: 668 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 9.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 41 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 674 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.3 KiB

Some files were not shown because too many files have changed in this diff Show More