Merge branch 'master' into ng-accounts-backup

This commit is contained in:
Nicolas Gotchac 2016-11-14 20:31:10 +01:00
commit b9f3c4b266
614 changed files with 20356 additions and 7948 deletions

2
.gitignore vendored
View File

@ -30,3 +30,5 @@
# Build artifacts # Build artifacts
out/ out/
.vscode

View File

@ -1,31 +1,34 @@
stages: stages:
- build
- test - test
- js-build
- build
variables: variables:
GIT_DEPTH: "3" GIT_DEPTH: "3"
SIMPLECOV: "true" SIMPLECOV: "true"
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
RUSTFLAGS: "-D warnings" RUSTFLAGS: ""
CARGOFLAGS: ""
NIGHTLY: "nigtly"
cache: cache:
key: "$CI_BUILD_NAME/$CI_BUILD_REF_NAME" key: "$CI_BUILD_STAGE/$CI_BUILD_REF_NAME"
untracked: true untracked: true
linux-stable: linux-stable:
stage: build stage: build
image: ethcore/rust:stable image: ethcore/rust:stable
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- cargo build --release --verbose - cargo build --release $CARGOFLAGS
- strip target/release/parity - strip target/release/parity
- md5sum target/release/parity >> parity.md5 - md5sum target/release/parity > parity.md5
- sh scripts/deb-build.sh amd64 - sh scripts/deb-build.sh amd64
- cp target/release/parity deb/usr/bin/parity - cp target/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb" - dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5" - md5sum "parity_"$VER"_amd64.deb" > "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
@ -39,46 +42,16 @@ linux-stable:
paths: paths:
- target/release/parity - target/release/parity
name: "stable-x86_64-unknown-linux-gnu_parity" name: "stable-x86_64-unknown-linux-gnu_parity"
linux-stable-14.04:
stage: build
image: ethcore/rust-14.04:latest
only:
- master
- beta
- tags
- stable
script:
- cargo build --release --verbose
- strip target/release/parity
- md5sum target/release/parity >> parity.md5
- sh scripts/deb-build.sh amd64
- cp target/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity.md5 --body parity.md5
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
tags:
- rust
- rust-14.04
artifacts:
paths:
- target/release/parity
name: "stable-x86_64-unknown-ubuntu_14_04-gnu_parity"
linux-beta: linux-beta:
stage: build stage: build
image: ethcore/rust:beta image: ethcore/rust:beta
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- cargo build --release --verbose - cargo build --release $CARGOFLAGS
- strip target/release/parity - strip target/release/parity
tags: tags:
- rust - rust
@ -92,12 +65,12 @@ linux-nightly:
stage: build stage: build
image: ethcore/rust:nightly image: ethcore/rust:nightly
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- cargo build --release --verbose - cargo build --release $CARGOFLAGS
- strip target/release/parity - strip target/release/parity
tags: tags:
- rust - rust
@ -111,16 +84,16 @@ linux-centos:
stage: build stage: build
image: ethcore/rust-centos:latest image: ethcore/rust-centos:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- export CXX="g++" - export CXX="g++"
- export CC="gcc" - export CC="gcc"
- cargo build --release --verbose - cargo build --release $CARGOFLAGS
- strip target/release/parity - strip target/release/parity
- md5sum target/release/parity >> parity.md5 - md5sum target/release/parity > parity.md5
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity
@ -132,6 +105,39 @@ linux-centos:
paths: paths:
- target/release/parity - target/release/parity
name: "x86_64-unknown-centos-gnu_parity" name: "x86_64-unknown-centos-gnu_parity"
linux-i686:
stage: build
image: ethcore/rust-i686:latest
only:
- beta
- tags
- stable
- triggers
script:
- export HOST_CC=gcc
- export HOST_CXX=g++
- cargo build --target i686-unknown-linux-gnu --release $CARGOFLAGS
- strip target/i686-unknown-linux-gnu/release/parity
- md5sum target/i686-unknown-linux-gnu/release/parity > parity.md5
- sh scripts/deb-build.sh i386
- cp target/i686-unknown-linux-gnu/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_i386.deb"
- md5sum "parity_"$VER"_i386.deb" > "parity_"$VER"_i386.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/i686-unknown-linux-gnu/parity --body target/i686-unknown-linux-gnu/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/i686-unknown-linux-gnu/parity.md5 --body parity.md5
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/i686-unknown-linux-gnu/"parity_"$VER"_i386.deb" --body "parity_"$VER"_i386.deb"
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/i686-unknown-linux-gnu/"parity_"$VER"_i386.deb.md5" --body "parity_"$VER"_i386.deb.md5"
tags:
- rust
- rust-i686
artifacts:
paths:
- target/i686-unknown-linux-gnu/release/parity
name: "i686-unknown-linux-gnu"
allow_failure: true
linux-armv7: linux-armv7:
stage: build stage: build
image: ethcore/rust-armv7:latest image: ethcore/rust-armv7:latest
@ -139,20 +145,25 @@ linux-armv7:
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- export CC=arm-linux-gnueabihf-gcc
- export CXX=arm-linux-gnueabihf-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config - echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
- cat .cargo/config - cat .cargo/config
- cargo build --target armv7-unknown-linux-gnueabihf --release --verbose - cargo build --target armv7-unknown-linux-gnueabihf --release $CARGOFLAGS
- arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity - arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity
- md5sum target/armv7-unknown-linux-gnueabihf/release/parity >> parity.md5 - md5sum target/armv7-unknown-linux-gnueabihf/release/parity > parity.md5
- sh scripts/deb-build.sh armhf - sh scripts/deb-build.sh armhf
- cp target/armv7-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity - cp target/armv7-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb" - dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5" - md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity
@ -174,20 +185,25 @@ linux-arm:
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- export CC=arm-linux-gnueabihf-gcc
- export CXX=arm-linux-gnueabihf-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config - echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config
- cat .cargo/config - cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabihf --release --verbose - cargo build --target arm-unknown-linux-gnueabihf --release $CARGOFLAGS
- arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity - arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity
- md5sum target/arm-unknown-linux-gnueabihf/release/parity >> parity.md5 - md5sum target/arm-unknown-linux-gnueabihf/release/parity > parity.md5
- sh scripts/deb-build.sh armhf - sh scripts/deb-build.sh armhf
- cp target/arm-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity - cp target/arm-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb" - dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5" - md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity
@ -209,15 +225,20 @@ linux-armv6:
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- export CC=arm-linux-gnueabi-gcc
- export CXX=arm-linux-gnueabi-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config - echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config
- echo "linker= \"arm-linux-gnueabi-gcc\"" >> .cargo/config - echo "linker= \"arm-linux-gnueabi-gcc\"" >> .cargo/config
- cat .cargo/config - cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabi --release --verbose - cargo build --target arm-unknown-linux-gnueabi --release $CARGOFLAGS
- arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity - arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity
- md5sum target/arm-unknown-linux-gnueabi/release/parity >> parity.md5 - md5sum target/arm-unknown-linux-gnueabi/release/parity > parity.md5
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity
@ -237,20 +258,25 @@ linux-aarch64:
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- export CC=aarch64-linux-gnu-gcc
- export CXX=aarch64-linux-gnu-g++
- export HOST_CC=gcc
- export HOST_CXX=g++
- rm -rf .cargo - rm -rf .cargo
- mkdir -p .cargo - mkdir -p .cargo
- echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config - echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config
- echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config - echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config
- cat .cargo/config - cat .cargo/config
- cargo build --target aarch64-unknown-linux-gnu --release --verbose - cargo build --target aarch64-unknown-linux-gnu --release $CARGOFLAGS
- aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity - aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity
- md5sum target/aarch64-unknown-linux-gnu/release/parity >> parity.md5 - md5sum target/aarch64-unknown-linux-gnu/release/parity > parity.md5
- sh scripts/deb-build.sh arm64 - sh scripts/deb-build.sh arm64
- cp target/aarch64-unknown-linux-gnu/release/parity deb/usr/bin/parity - cp target/aarch64-unknown-linux-gnu/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_arm64.deb" - dpkg-deb -b deb "parity_"$VER"_arm64.deb"
- md5sum "parity_"$VER"_arm64.deb" >> "parity_"$VER"_arm64.deb.md5" - md5sum "parity_"$VER"_arm64.deb" > "parity_"$VER"_arm64.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity
@ -268,13 +294,14 @@ linux-aarch64:
darwin: darwin:
stage: build stage: build
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- cargo build --release --verbose - cargo build --release $CARGOFLAGS
- md5sum target/release/parity >> parity.md5 - rm -rf parity.md5
- md5sum target/release/parity > parity.md5
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity --body target/release/parity
@ -286,22 +313,27 @@ darwin:
- target/release/parity - target/release/parity
name: "x86_64-apple-darwin_parity" name: "x86_64-apple-darwin_parity"
windows: windows:
cache:
key: "%CI_BUILD_STAGE%/%CI_BUILD_REF_NAME%"
untracked: true
stage: build stage: build
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
- triggers
script: script:
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt - set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64 - set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
- set RUST_BACKTRACE=1 - set RUST_BACKTRACE=1
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off -D warnings - set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off
- rustup default stable-x86_64-pc-windows-msvc - rustup default stable-x86_64-pc-windows-msvc
- cargo build --release --verbose - cargo build --release %CARGOFLAGS%
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll - curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe - curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe - signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
- msbuild windows\ptray\ptray.vcxproj /p:Platform=x64 /p:Configuration=Release
- signtool sign /f %keyfile% /p %certpass% windows\ptray\x64\release\ptray.exe
- cd nsis - cd nsis
- makensis.exe installer.nsi - makensis.exe installer.nsi
- copy installer.exe InstallParity.exe - copy installer.exe InstallParity.exe
@ -333,56 +365,103 @@ windows:
- target/release/parity.pdb - target/release/parity.pdb
- nsis/InstallParity.exe - nsis/InstallParity.exe
name: "x86_64-pc-windows-msvc_parity" name: "x86_64-pc-windows-msvc_parity"
test-linux: test-darwin:
stage: test stage: test
only:
- triggers
before_script: before_script:
- git submodule update --init --recursive - git submodule update --init --recursive
script: script:
- export RUST_BACKTRACE=1 - export RUST_BACKTRACE=1
- ./test.sh --verbose - ./test.sh $CARGOFLAGS --no-release
tags: tags:
- rust-test - osx
dependencies: allow_failure: true
- linux-stable test-windows:
js-release:
stage: build
image: ethcore/javascript:latest
only:
- master
- beta
- tags
- stable
before_script:
- ./js/scripts/install-deps.sh
script:
- ./js/scripts/build.sh
- ./js/scripts/release.sh
tags:
- javascript
js-lint:
stage: test stage: test
image: ethcore/javascript:latest only:
- triggers
before_script:
- git submodule update --init --recursive
script:
- set RUST_BACKTRACE=1
- cargo test --features json-tests -p rlp -p ethash -p ethcore -p ethcore-bigint -p ethcore-dapps -p ethcore-rpc -p ethcore-signer -p ethcore-util -p ethcore-network -p ethcore-io -p ethkey -p ethstore -p ethsync -p ethcore-ipc -p ethcore-ipc-tests -p ethcore-ipc-nano -p parity %CARGOFLAGS% --verbose --release
tags:
- rust-windows
allow_failure: true
test-rust-stable:
stage: test
image: ethcore/rust:stable
before_script:
- git submodule update --init --recursive
- export JS_FILES_MODIFIED=$(git --no-pager diff --name-only CI_BUILD_REF CI_BUILD_REF@{1} | grep \.js | wc -l)
- echo $JS_FILES_MODIFIED
- if [ -z $JS_FILES_MODIFIED ]; then echo "skip js test"; fi
script:
- export RUST_BACKTRACE=1
- echo $JS_FILES_MODIFIED
- if [ -z $JS_FILES_MODIFIED ]; then echo "skip js test"; else ./test.sh $CARGOFLAGS --no-release; fi
tags:
- rust
- rust-stable
test-rust-beta:
stage: test
only:
- triggers
image: ethcore/rust:beta
before_script:
- git submodule update --init --recursive
- export JS_FILES_MODIFIED=$(git --no-pager diff --name-only CI_BUILD_REF CI_BUILD_REF@{1} | grep \.js | wc -l)
- echo $JS_FILES_MODIFIED
- if [ -z $JS_FILES_MODIFIED ]; then echo "skip js test"; fi
script:
- export RUST_BACKTRACE=1
- echo $JS_FILES_MODIFIED
- if [ -z $JS_FILES_MODIFIED ]; then echo "skip js test"; else ./test.sh $CARGOFLAGS --no-release; fi
tags:
- rust
- rust-beta
allow_failure: true
test-rust-nightly:
stage: test
only:
- triggers
image: ethcore/rust:nightly
before_script:
- git submodule update --init --recursive
- export JS_FILES_MODIFIED=$(git --no-pager diff --name-only CI_BUILD_REF CI_BUILD_REF@{1} | grep \.js | wc -l)
- echo $JS_FILES_MODIFIED
- if [ -z $JS_FILES_MODIFIED ]; then echo "skip js test"; fi
script:
- export RUST_BACKTRACE=1
- echo $JS_FILES_MODIFIED
- if [ -z $JS_FILES_MODIFIED ]; then echo "skip js test"; else ./test.sh $CARGOFLAGS --no-release; fi
tags:
- rust
- rust-nightly
allow_failure: true
js-tests:
stage: test
image: ethcore/rust:stable
before_script: before_script:
- ./js/scripts/install-deps.sh - ./js/scripts/install-deps.sh
script: script:
- ./js/scripts/lint.sh - ./js/scripts/lint.sh
tags:
- javascript-test
js-test:
stage: test
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh
script:
- ./js/scripts/test.sh - ./js/scripts/test.sh
tags:
- javascript-test
js-pack:
stage: test
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh
script:
- ./js/scripts/build.sh - ./js/scripts/build.sh
tags: tags:
- javascript-test - javascript-test
js-release:
stage: js-build
only:
- master
- beta
- stable
image: ethcore/rust:stable
before_script:
- if [[ $NIGHTLY != "master" ]]; then ./js/scripts/install-deps.sh; fi
script:
- if [[ $NIGHTLY != "master" ]]; then ./js/scripts/build.sh; fi
- if [[ $NIGHTLY != "master" ]]; then ./js/scripts/release.sh; fi
tags:
- javascript

View File

@ -17,7 +17,7 @@ matrix:
include: include:
- rust: stable - rust: stable
env: RUN_TESTS="true" TEST_OPTIONS="--no-release" env: RUN_TESTS="true" TEST_OPTIONS="--no-release"
- rust: beta - rust: stable
env: RUN_COVERAGE="true" env: RUN_COVERAGE="true"
- rust: stable - rust: stable
env: RUN_DOCS="true" env: RUN_DOCS="true"
@ -32,6 +32,7 @@ env:
- RUN_DOCS="false" - RUN_DOCS="false"
- TEST_OPTIONS="" - TEST_OPTIONS=""
- RUSTFLAGS="-D warnings" - RUSTFLAGS="-D warnings"
- TRAVIS_NODE_VERSION="6"
# GH_TOKEN for documentation # GH_TOKEN for documentation
- secure: bumJASbZSU8bxJ0EyPUJmu16AiV9EXOpyOj86Jlq/Ty9CfwGqsSXt96uDyE+OUJf34RUFQMsw0nk37/zC4lcn6kqk2wpuH3N/o85Zo/cVZY/NusBWLQqtT5VbYWsV+u2Ua4Tmmsw8yVYQhYwU2ZOejNpflL+Cs9XGgORp1L+/gMRMC2y5Se6ZhwnKPQlRJ8LGsG1dzjQULxzADIt3/zuspNBS8a2urJwlHfGMkvHDoUWCviP/GXoSqw3TZR7FmKyxE19I8n9+iSvm9+oZZquvcgfUxMHn8Gq/b44UbPvjtFOg2yam4xdWXF/RyWCHdc/R9EHorSABeCbefIsm+zcUF3/YQxwpSxM4IZEeH2rTiC7dcrsKw3XsO16xFQz5YI5Bay+CT/wTdMmJd7DdYz7Dyf+pOvcM9WOf/zorxYWSBOMYy0uzbusU2iyIghQ82s7E/Ahg+WARtPgkuTLSB5aL1oCTBKHqQscMr7lo5Ti6RpWLxEdTQMBznc+bMr+6dEtkEcG9zqc6cE9XX+ox3wTU6+HVMfQ1ltCntJ4UKcw3A6INEbw9wgocQa812CIASQ2fE+SCAbz6JxBjIAlFUnD1lUB7S8PdMPwn9plfQgKQ2A5YZqg6FnBdf0rQXIJYxQWKHXj/rBHSUCT0tHACDlzTA+EwWggvkP5AGIxRxm8jhw= - secure: bumJASbZSU8bxJ0EyPUJmu16AiV9EXOpyOj86Jlq/Ty9CfwGqsSXt96uDyE+OUJf34RUFQMsw0nk37/zC4lcn6kqk2wpuH3N/o85Zo/cVZY/NusBWLQqtT5VbYWsV+u2Ua4Tmmsw8yVYQhYwU2ZOejNpflL+Cs9XGgORp1L+/gMRMC2y5Se6ZhwnKPQlRJ8LGsG1dzjQULxzADIt3/zuspNBS8a2urJwlHfGMkvHDoUWCviP/GXoSqw3TZR7FmKyxE19I8n9+iSvm9+oZZquvcgfUxMHn8Gq/b44UbPvjtFOg2yam4xdWXF/RyWCHdc/R9EHorSABeCbefIsm+zcUF3/YQxwpSxM4IZEeH2rTiC7dcrsKw3XsO16xFQz5YI5Bay+CT/wTdMmJd7DdYz7Dyf+pOvcM9WOf/zorxYWSBOMYy0uzbusU2iyIghQ82s7E/Ahg+WARtPgkuTLSB5aL1oCTBKHqQscMr7lo5Ti6RpWLxEdTQMBznc+bMr+6dEtkEcG9zqc6cE9XX+ox3wTU6+HVMfQ1ltCntJ4UKcw3A6INEbw9wgocQa812CIASQ2fE+SCAbz6JxBjIAlFUnD1lUB7S8PdMPwn9plfQgKQ2A5YZqg6FnBdf0rQXIJYxQWKHXj/rBHSUCT0tHACDlzTA+EwWggvkP5AGIxRxm8jhw=
- KCOV_CMD="./kcov-master/tmp/usr/local/bin/kcov" - KCOV_CMD="./kcov-master/tmp/usr/local/bin/kcov"
@ -41,6 +42,7 @@ cache:
directories: directories:
- $TRAVIS_BUILD_DIR/target - $TRAVIS_BUILD_DIR/target
- $TRAVIS_BUILD_DIR/kcov-master - $TRAVIS_BUILD_DIR/kcov-master
- $TRAVIS_BUILD_DIR/js/node_modules
- $HOME/.cargo - $HOME/.cargo
addons: addons:
@ -64,9 +66,14 @@ install:
make && make install DESTDIR=../tmp && make && make install DESTDIR=../tmp &&
cd cd
) )
- nvm install $TRAVIS_NODE_VERSION && nvm use $TRAVIS_NODE_VERSION && ./js/scripts/install-deps.sh
script: script:
- if [ "$RUN_TESTS" = "true" ]; then ./test.sh $TEST_OPTIONS --verbose; fi - if [ "$RUN_TESTS" = "true" ]; then
./js/scripts/lint.sh &&
./js/scripts/test.sh &&
./test.sh $TEST_OPTIONS --verbose;
fi
- if [ "$RUN_COVERAGE" = "true" ]; then ./scripts/cov.sh "$KCOV_CMD"; fi - if [ "$RUN_COVERAGE" = "true" ]; then ./scripts/cov.sh "$KCOV_CMD"; fi
after_success: | after_success: |

259
Cargo.lock generated
View File

@ -1,28 +1,28 @@
[root] [root]
name = "parity" name = "parity"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)", "ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.4.0", "ethcore 1.5.0",
"ethcore-dapps 1.4.0", "ethcore-dapps 1.5.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.5.0",
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0", "ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-hypervisor 1.2.0", "ethcore-ipc-hypervisor 1.2.0",
"ethcore-ipc-nano 1.4.0", "ethcore-ipc-nano 1.4.0",
"ethcore-ipc-tests 0.1.0", "ethcore-ipc-tests 0.1.0",
"ethcore-logger 1.4.0", "ethcore-logger 1.5.0",
"ethcore-rpc 1.4.0", "ethcore-rpc 1.5.0",
"ethcore-signer 1.4.0", "ethcore-signer 1.5.0",
"ethcore-stratum 1.4.0", "ethcore-stratum 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"ethsync 1.4.0", "ethsync 1.5.0",
"fdlimit 0.1.0", "fdlimit 0.1.0",
"hyper 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
"isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -145,15 +145,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "clippy" name = "clippy"
version = "0.0.90" version = "0.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"clippy_lints 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "clippy_lints" name = "clippy_lints"
version = "0.0.90" version = "0.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -185,7 +185,7 @@ version = "1.1.1"
source = "git+https://github.com/ethcore/rust-ctrlc.git#f4927770f89eca80ec250911eea3adcbf579ac48" source = "git+https://github.com/ethcore/rust-ctrlc.git#f4927770f89eca80ec250911eea3adcbf579ac48"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -194,7 +194,7 @@ name = "daemonize"
version = "0.2.2" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -244,7 +244,7 @@ source = "git+https://github.com/ethcore/rust-secp256k1#a9a0b1be1f39560ca86e8fc8
dependencies = [ dependencies = [
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -274,22 +274,22 @@ dependencies = [
[[package]] [[package]]
name = "ethcore" name = "ethcore"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0", "ethash 1.4.0",
"ethcore-bloom-journal 0.1.0", "ethcore-bloom-journal 0.1.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.5.0",
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0", "ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0", "ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"ethjson 0.1.0", "ethjson 0.1.0",
"ethkey 0.2.0", "ethkey 0.2.0",
"ethstore 0.1.0", "ethstore 0.1.0",
@ -298,7 +298,7 @@ dependencies = [
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)", "lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -307,11 +307,12 @@ dependencies = [
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"transient-hashmap 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "ethcore-bigint" name = "ethcore-bigint"
version = "0.1.1" version = "0.1.2"
dependencies = [ dependencies = [
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -328,14 +329,14 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-dapps" name = "ethcore-dapps"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-rpc 1.4.0", "ethcore-rpc 1.5.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"fetch 0.1.0", "fetch 0.1.0",
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -351,6 +352,7 @@ dependencies = [
"serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde_codegen 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"zip 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "zip 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
@ -365,11 +367,11 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-io" name = "ethcore-io"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.6.1 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -379,7 +381,7 @@ name = "ethcore-ipc"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)", "nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -426,7 +428,7 @@ dependencies = [
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0", "ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0", "ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)", "nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -434,10 +436,10 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-logger" name = "ethcore-logger"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -447,18 +449,19 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-network" name = "ethcore-network"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.5.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"ethcrypto 0.1.0", "ethcrypto 0.1.0",
"ethkey 0.2.0", "ethkey 0.2.0",
"igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.6.1 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
@ -471,20 +474,20 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-rpc" name = "ethcore-rpc"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0", "ethash 1.4.0",
"ethcore 1.4.0", "ethcore 1.5.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.5.0",
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"ethcrypto 0.1.0", "ethcrypto 0.1.0",
"ethjson 0.1.0", "ethjson 0.1.0",
"ethkey 0.2.0", "ethkey 0.2.0",
"ethstore 0.1.0", "ethstore 0.1.0",
"ethsync 1.4.0", "ethsync 1.5.0",
"fetch 0.1.0", "fetch 0.1.0",
"json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)", "json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -501,21 +504,21 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-signer" name = "ethcore-signer"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.5.0",
"ethcore-rpc 1.4.0", "ethcore-rpc 1.5.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-ui 1.4.0", "parity-ui 1.4.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)", "ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)",
] ]
[[package]] [[package]]
@ -527,7 +530,7 @@ dependencies = [
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0", "ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0", "ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)", "json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -538,22 +541,23 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-util" name = "ethcore-util"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)", "elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethcore-bigint 0.1.1", "ethcore-bigint 0.1.2",
"ethcore-bloom-journal 0.1.0", "ethcore-bloom-journal 0.1.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)",
@ -576,7 +580,7 @@ name = "ethcrypto"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethcore-bigint 0.1.1", "ethcore-bigint 0.1.2",
"ethkey 0.2.0", "ethkey 0.2.0",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -586,7 +590,7 @@ dependencies = [
name = "ethjson" name = "ethjson"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde_codegen 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -599,7 +603,7 @@ version = "0.2.0"
dependencies = [ dependencies = [
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethcore-bigint 0.1.1", "ethcore-bigint 0.1.2",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -615,7 +619,7 @@ dependencies = [
"ethkey 0.2.0", "ethkey 0.2.0",
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
@ -629,17 +633,17 @@ dependencies = [
[[package]] [[package]]
name = "ethsync" name = "ethsync"
version = "1.4.0" version = "1.5.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.4.0", "ethcore 1.5.0",
"ethcore-io 1.4.0", "ethcore-io 1.5.0",
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0", "ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0", "ethcore-ipc-nano 1.4.0",
"ethcore-network 1.4.0", "ethcore-network 1.5.0",
"ethcore-util 1.4.0", "ethcore-util 1.5.0",
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -660,7 +664,7 @@ dependencies = [
name = "fdlimit" name = "fdlimit"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -678,7 +682,7 @@ name = "flate2"
version = "0.2.14" version = "0.2.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -799,7 +803,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -884,9 +888,14 @@ name = "lazy_static"
version = "0.2.1" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazycell"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.15" version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -906,8 +915,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "lru-cache" name = "lru-cache"
version = "0.0.7" version = "0.1.0"
source = "git+https://github.com/contain-rs/lru-cache#13255e33c45ceb69a4b143f235a4322df5fb580e" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -922,7 +931,7 @@ name = "memchr"
version = "0.1.11" version = "0.1.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -950,7 +959,7 @@ version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -959,7 +968,7 @@ version = "0.5.1"
source = "git+https://github.com/ethcore/mio?branch=v0.5.x#3842d3b250ffd7bd9b16f9586b875ddcbac2b0dd" source = "git+https://github.com/ethcore/mio?branch=v0.5.x#3842d3b250ffd7bd9b16f9586b875ddcbac2b0dd"
dependencies = [ dependencies = [
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
@ -975,7 +984,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
@ -988,10 +997,10 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.0-dev" version = "0.6.0-dev"
source = "git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d#62ec763c9cc34d8a452ed0392c575c50ddd5fc8d" source = "git+https://github.com/ethcore/mio?branch=timer-fix#31eccc40ece3d47abaefaf23bb2114033175b972"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)", "net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1000,6 +1009,22 @@ dependencies = [
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "mio"
version = "0.6.1"
source = "git+https://github.com/carllerche/mio#56f8663510196fdca04bdf7c5f4d60b24297826f"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "miow" name = "miow"
version = "0.1.3" version = "0.1.3"
@ -1026,7 +1051,7 @@ name = "nanomsg"
version = "0.5.1" version = "0.5.1"
source = "git+https://github.com/ethcore/nanomsg.rs.git#c40fe442c9afaea5b38009a3d992ca044dcceb00" source = "git+https://github.com/ethcore/nanomsg.rs.git#c40fe442c9afaea5b38009a3d992ca044dcceb00"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg-sys 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git)", "nanomsg-sys 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git)",
] ]
@ -1036,7 +1061,7 @@ version = "0.5.0"
source = "git+https://github.com/ethcore/nanomsg.rs.git#c40fe442c9afaea5b38009a3d992ca044dcceb00" source = "git+https://github.com/ethcore/nanomsg.rs.git#c40fe442c9afaea5b38009a3d992ca044dcceb00"
dependencies = [ dependencies = [
"gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1046,7 +1071,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1057,7 +1082,7 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1067,7 +1092,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)",
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "nix"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)",
"void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1157,7 +1195,7 @@ name = "num_cpus"
version = "0.2.11" version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1211,7 +1249,7 @@ dependencies = [
[[package]] [[package]]
name = "parity-ui-precompiled" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#9f8baa9d0e54056c41a842b351597d0565beda98" source = "git+https://github.com/ethcore/js-precompiled.git#610876fb2bb01705b81de14dd7e04d6a3cdf80ab"
dependencies = [ dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1222,7 +1260,7 @@ version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1242,7 +1280,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1381,7 +1419,7 @@ name = "rand"
version = "0.3.14" version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1425,7 +1463,7 @@ name = "rlp"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)", "elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"ethcore-bigint 0.1.1", "ethcore-bigint 0.1.2",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1435,7 +1473,7 @@ name = "rocksdb"
version = "0.4.5" version = "0.4.5"
source = "git+https://github.com/ethcore/rust-rocksdb#64c63ccbe1f62c2e2b39262486f9ba813793af58" source = "git+https://github.com/ethcore/rust-rocksdb#64c63ccbe1f62c2e2b39262486f9ba813793af58"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)", "rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)",
] ]
@ -1445,7 +1483,7 @@ version = "0.3.0"
source = "git+https://github.com/ethcore/rust-rocksdb#64c63ccbe1f62c2e2b39262486f9ba813793af58" source = "git+https://github.com/ethcore/rust-rocksdb#64c63ccbe1f62c2e2b39262486f9ba813793af58"
dependencies = [ dependencies = [
"gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1465,7 +1503,7 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"termios 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "termios 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1476,7 +1514,7 @@ version = "0.2.36"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1591,6 +1629,11 @@ name = "slab"
version = "0.2.0" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slab"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "0.1.8" version = "0.1.8"
@ -1642,7 +1685,7 @@ name = "syntex_errors"
version = "0.42.0" version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex_pos 0.42.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_pos 0.42.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1664,7 +1707,7 @@ version = "0.33.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1677,7 +1720,7 @@ version = "0.42.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex_errors 0.42.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_errors 0.42.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1718,7 +1761,7 @@ name = "termios"
version = "0.2.2" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1727,7 +1770,7 @@ version = "2.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1744,7 +1787,7 @@ version = "0.1.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1882,13 +1925,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "ws" name = "ws"
version = "0.5.2" version = "0.5.3"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#00bd2134b07b4bc8ea47b7f6c7afce16bbe34c8f" source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#f5c0b35d660244d1b7500693c8cc28277ce1d418"
dependencies = [ dependencies = [
"bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)", "bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)", "mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)", "slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)",
@ -1948,8 +1991,8 @@ dependencies = [
"checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27" "checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27"
"checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>" "checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>"
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" "checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
"checksum clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "d19bda68c3db98e3a780342f6101b44312fef20a5f13ce756d1202a35922b01b" "checksum clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "6eacf01b0aad84a0817703498f72d252df7c0faf6a5b86d0be4265f1829e459f"
"checksum clippy_lints 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)" = "3d4ed67c69b9bb35169be2538691d290a3aa0cbfd4b9f0bfb7c221fc1d399a96" "checksum clippy_lints 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)" = "a49960c9aab544ce86b004dcb61620e8b898fea5fc0f697a028f460f48221ed6"
"checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245" "checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245"
"checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc" "checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc"
"checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>" "checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>"
@ -1982,11 +2025,12 @@ dependencies = [
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" "checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f" "checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
"checksum libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "23e3757828fa702a20072c37ff47938e9dd331b92fac6e223d26d4b7a55f7ee2" "checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum libc 0.2.16 (registry+https://github.com/rust-lang/crates.io-index)" = "408014cace30ee0f767b1c4517980646a573ec61a57957aeeabcac8ac0a02e8d"
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48" "checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd" "checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" "checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
"checksum lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)" = "<none>" "checksum lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "656fa4dfcb02bcf1063c592ba3ff6a5303ee1f2afe98c8a889e8b1a77c6dfdb7"
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e" "checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" "checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2" "checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2"
@ -1994,7 +2038,8 @@ dependencies = [
"checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54" "checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54"
"checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>" "checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>"
"checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e" "checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e"
"checksum mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)" = "<none>" "checksum mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)" = "<none>"
"checksum mio 0.6.1 (git+https://github.com/carllerche/mio)" = "<none>"
"checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a" "checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a"
"checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8" "checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8"
"checksum nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>" "checksum nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>"
@ -2002,6 +2047,7 @@ dependencies = [
"checksum net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "6a816012ca11cb47009693c1e0c6130e26d39e4d97ee2a13c50e868ec83e3204" "checksum net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "6a816012ca11cb47009693c1e0c6130e26d39e4d97ee2a13c50e868ec83e3204"
"checksum nix 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f05c2fc965fc1cd6b73fa57fa7b89f288178737f2f3ce9e63e4a6a141189000e" "checksum nix 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f05c2fc965fc1cd6b73fa57fa7b89f288178737f2f3ce9e63e4a6a141189000e"
"checksum nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a7bb1da2be7da3cbffda73fc681d509ffd9e665af478d2bee1907cee0bc64b2" "checksum nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a7bb1da2be7da3cbffda73fc681d509ffd9e665af478d2bee1907cee0bc64b2"
"checksum nix 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a0d95c5fa8b641c10ad0b8887454ebaafa3c92b5cd5350f8fc693adafd178e7b"
"checksum nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4d9a22dbcebdeef7bf275cbf444d6521d4e7a2fee187b72d80dba0817120dd8f" "checksum nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4d9a22dbcebdeef7bf275cbf444d6521d4e7a2fee187b72d80dba0817120dd8f"
"checksum nom 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6caab12c5f97aa316cb249725aa32115118e1522b445e26c257dd77cad5ffd4e" "checksum nom 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6caab12c5f97aa316cb249725aa32115118e1522b445e26c257dd77cad5ffd4e"
"checksum num 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "c04bd954dbf96f76bab6e5bd6cef6f1ce1262d15268ce4f926d2b5b778fa7af2" "checksum num 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "c04bd954dbf96f76bab6e5bd6cef6f1ce1262d15268ce4f926d2b5b778fa7af2"
@ -2060,6 +2106,7 @@ dependencies = [
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e" "checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>" "checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4" "checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410" "checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
"checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2" "checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2"
"checksum spmc 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "93bdab61c1a413e591c4d17388ffa859eaff2df27f1e13a5ec8b716700605adf" "checksum spmc 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "93bdab61c1a413e591c4d17388ffa859eaff2df27f1e13a5ec8b716700605adf"
@ -2097,7 +2144,7 @@ dependencies = [
"checksum webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "813503a5985585e0812d430cd1328ee322f47f66629c8ed4ecab939cf9e92f91" "checksum webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "813503a5985585e0812d430cd1328ee322f47f66629c8ed4ecab939cf9e92f91"
"checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4" "checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>" "checksum ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>"
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef" "checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef"
"checksum xmltree 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "472a9d37c7c53ab2391161df5b89b1f3bf76dab6ab150d7941ecbdd832282082" "checksum xmltree 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "472a9d37c7c53ab2391161df5b89b1f3bf76dab6ab150d7941ecbdd832282082"

View File

@ -1,7 +1,7 @@
[package] [package]
description = "Ethcore client." description = "Ethcore client."
name = "parity" name = "parity"
version = "1.4.0" version = "1.5.0"
license = "GPL-3.0" license = "GPL-3.0"
authors = ["Ethcore <admin@ethcore.io>"] authors = ["Ethcore <admin@ethcore.io>"]
build = "build.rs" build = "build.rs"
@ -46,7 +46,7 @@ ethcore-logger = { path = "logger" }
rlp = { path = "util/rlp" } rlp = { path = "util/rlp" }
ethcore-stratum = { path = "stratum" } ethcore-stratum = { path = "stratum" }
ethcore-dapps = { path = "dapps", optional = true } ethcore-dapps = { path = "dapps", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi = "0.2" winapi = "0.2"

View File

@ -1,7 +1,12 @@
# [Parity](https://ethcore.io/parity.html) # [Parity](https://ethcore.io/parity.html)
### Fast, light, and robust Ethereum implementation ### Fast, light, and robust Ethereum implementation
[![Build Status][travis-image]][travis-url] [![Coverage Status][coveralls-image]][coveralls-url] [![Join the chat at https://gitter.im/ethcore/parity][gitter-image]][gitter-url] [![GPLv3][license-image]][license-url] [![Build Status][travis-image]][travis-url] [![build status](https://gitlab.ethcore.io/Mirrors/ethcore-parity/badges/master/build.svg)](https://gitlab.ethcore.io/Mirrors/ethcore-parity/commits/master) [![Coverage Status][coveralls-image]][coveralls-url] [![GPLv3][license-image]][license-url]
### Join the chat!
Parity [![Join the chat at https://gitter.im/ethcore/parity][gitter-image]][gitter-url] and
parity.js [![Join the chat at https://gitter.im/ethcore/parity.js](https://badges.gitter.im/ethcore/parity.js.svg)](https://gitter.im/ethcore/parity.js?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[Internal Documentation][doc-url] [Internal Documentation][doc-url]
@ -19,7 +24,7 @@ Be sure to check out [our wiki][wiki-url] for more information.
[doc-url]: https://ethcore.github.io/parity/ethcore/index.html [doc-url]: https://ethcore.github.io/parity/ethcore/index.html
[wiki-url]: https://github.com/ethcore/parity/wiki [wiki-url]: https://github.com/ethcore/parity/wiki
**Requires Rust version 1.12.0 to build** **Parity requires Rust version 1.12.0 to build**
---- ----
@ -29,12 +34,15 @@ Be sure to check out [our wiki][wiki-url] for more information.
Parity's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity using the sophisticated and Parity's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity using the sophisticated and
cutting-edge Rust programming language. Parity is licensed under the GPLv3, and can be used for all your Ethereum needs. cutting-edge Rust programming language. Parity is licensed under the GPLv3, and can be used for all your Ethereum needs.
By default, Parity will run a JSONRPC server on `127.0.0.1:8545`. This is fully configurable and supports a number Parity comes with a built-in wallet. To access [Parity Wallet](http://127.0.0.1:8080/) this simply go to http://127.0.0.1:8080/. It
of RPC APIs. includes various functionality allowing you to:
- create and manage your Ethereum accounts;
- manage your Ether and any Ethereum tokens;
- create and register your own tokens;
- and much more.
Parity also runs a server for running decentralized apps, or "Dapps", on `http://127.0.0.1:8080`. By default, Parity will also run a JSONRPC server on `127.0.0.1:8545`. This is fully configurable and supports a number
This includes a few useful Dapps, including Ethereum Wallet, Maker OTC, and a node status page. of RPC APIs.
In a near-future release, it will be easy to install Dapps and use them through this web interface.
If you run into an issue while using parity, feel free to file one in this repository If you run into an issue while using parity, feel free to file one in this repository
or hop on our [gitter chat room][gitter-url] to ask a question. We are glad to help! or hop on our [gitter chat room][gitter-url] to ask a question. We are glad to help!
@ -55,6 +63,13 @@ We recommend installing Rust through [rustup](https://www.rustup.rs/). If you do
$ curl https://sh.rustup.rs -sSf | sh $ curl https://sh.rustup.rs -sSf | sh
``` ```
Parity also requires `gcc`, `g++` and `make` packages to be installed.
- OSX:
```bash
$ curl https://sh.rustup.rs -sSf | sh
```
`clang` and `make` are required. These come with Xcode command line tools or can be installed with homebrew.
- Windows - Windows
Make sure you have Visual Studio 2015 with C++ support installed. Next, download and run the rustup installer from Make sure you have Visual Studio 2015 with C++ support installed. Next, download and run the rustup installer from

View File

@ -38,6 +38,8 @@ after_test:
- cargo build --verbose --release - cargo build --verbose --release
- ps: if($env:cert) { Start-FileDownload $env:cert -FileName $env:keyfile } - ps: if($env:cert) { Start-FileDownload $env:cert -FileName $env:keyfile }
- ps: if($env:cert) { signtool sign /f $env:keyfile /p $env:certpass target\release\parity.exe } - ps: if($env:cert) { signtool sign /f $env:keyfile /p $env:certpass target\release\parity.exe }
- msbuild windows\ptray\ptray.vcxproj /p:Platform=x64 /p:Configuration=Release
- ps: if($env:cert) { signtool sign /f $env:keyfile /p $env:certpass windows\ptray\x64\release\ptray.exe }
- makensis.exe nsis\installer.nsi - makensis.exe nsis\installer.nsi
- ps: if($env:cert) { signtool sign /f $env:keyfile /p $env:certpass nsis\installer.exe } - ps: if($env:cert) { signtool sign /f $env:keyfile /p $env:certpass nsis\installer.exe }

View File

@ -1,7 +1,7 @@
[package] [package]
description = "Parity Dapps crate" description = "Parity Dapps crate"
name = "ethcore-dapps" name = "ethcore-dapps"
version = "1.4.0" version = "1.5.0"
license = "GPL-3.0" license = "GPL-3.0"
authors = ["Ethcore <admin@ethcore.io"] authors = ["Ethcore <admin@ethcore.io"]
build = "build.rs" build = "build.rs"
@ -24,6 +24,7 @@ ethabi = "0.2.2"
linked-hash-map = "0.3" linked-hash-map = "0.3"
parity-dapps-glue = "1.4" parity-dapps-glue = "1.4"
mime = "0.2" mime = "0.2"
time = "0.1.35"
serde_macros = { version = "0.8", optional = true } serde_macros = { version = "0.8", optional = true }
zip = { version = "0.1", default-features = false } zip = { version = "0.1", default-features = false }
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
@ -33,7 +34,7 @@ fetch = { path = "../util/fetch" }
parity-ui = { path = "./ui" } parity-ui = { path = "./ui" }
mime_guess = { version = "1.6.1" } mime_guess = { version = "1.6.1" }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.8", optional = true } serde_codegen = { version = "0.8", optional = true }

View File

@ -32,21 +32,22 @@ use random_filename;
use SyncStatus; use SyncStatus;
use util::{Mutex, H256}; use util::{Mutex, H256};
use util::sha3::sha3; use util::sha3::sha3;
use page::LocalPageEndpoint; use page::{LocalPageEndpoint, PageCache};
use handlers::{ContentHandler, ContentFetcherHandler, ContentValidator}; use handlers::{ContentHandler, ContentFetcherHandler, ContentValidator};
use endpoint::{Endpoint, EndpointPath, Handler}; use endpoint::{Endpoint, EndpointPath, Handler};
use apps::cache::{ContentCache, ContentStatus}; use apps::cache::{ContentCache, ContentStatus};
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest}; use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest};
use apps::urlhint::{URLHintContract, URLHint, URLHintResult}; use apps::urlhint::{URLHintContract, URLHint, URLHintResult};
const MAX_CACHED_DAPPS: usize = 10; /// Limit of cached dapps/content
const MAX_CACHED_DAPPS: usize = 20;
pub struct ContentFetcher<R: URLHint = URLHintContract> { pub struct ContentFetcher<R: URLHint = URLHintContract> {
dapps_path: PathBuf, dapps_path: PathBuf,
resolver: R, resolver: R,
cache: Arc<Mutex<ContentCache>>, cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>, sync: Arc<SyncStatus>,
embeddable_at: Option<u16>, embeddable_on: Option<(String, u16)>,
} }
impl<R: URLHint> Drop for ContentFetcher<R> { impl<R: URLHint> Drop for ContentFetcher<R> {
@ -58,7 +59,7 @@ impl<R: URLHint> Drop for ContentFetcher<R> {
impl<R: URLHint> ContentFetcher<R> { impl<R: URLHint> ContentFetcher<R> {
pub fn new(resolver: R, sync_status: Arc<SyncStatus>, embeddable_at: Option<u16>) -> Self { pub fn new(resolver: R, sync_status: Arc<SyncStatus>, embeddable_on: Option<(String, u16)>) -> Self {
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push(random_filename()); dapps_path.push(random_filename());
@ -67,16 +68,17 @@ impl<R: URLHint> ContentFetcher<R> {
resolver: resolver, resolver: resolver,
sync: sync_status, sync: sync_status,
cache: Arc::new(Mutex::new(ContentCache::default())), cache: Arc::new(Mutex::new(ContentCache::default())),
embeddable_at: embeddable_at, embeddable_on: embeddable_on,
} }
} }
fn still_syncing() -> Box<Handler> { fn still_syncing(address: Option<(String, u16)>) -> Box<Handler> {
Box::new(ContentHandler::error( Box::new(ContentHandler::error(
StatusCode::ServiceUnavailable, StatusCode::ServiceUnavailable,
"Sync In Progress", "Sync In Progress",
"Your node is still syncing. We cannot resolve any content before it's fully synced.", "Your node is still syncing. We cannot resolve any content before it's fully synced.",
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>") Some("<a href=\"javascript:window.location.reload()\">Refresh</a>"),
address,
)) ))
} }
@ -143,19 +145,19 @@ impl<R: URLHint> ContentFetcher<R> {
match content { match content {
// Don't serve dapps if we are still syncing (but serve content) // Don't serve dapps if we are still syncing (but serve content)
Some(URLHintResult::Dapp(_)) if self.sync.is_major_importing() => { Some(URLHintResult::Dapp(_)) if self.sync.is_major_importing() => {
(None, Self::still_syncing()) (None, Self::still_syncing(self.embeddable_on.clone()))
}, },
Some(URLHintResult::Dapp(dapp)) => { Some(URLHintResult::Dapp(dapp)) => {
let (handler, fetch_control) = ContentFetcherHandler::new( let (handler, fetch_control) = ContentFetcherHandler::new(
dapp.url(), dapp.url(),
control, control,
path.using_dapps_domains,
DappInstaller { DappInstaller {
id: content_id.clone(), id: content_id.clone(),
dapps_path: self.dapps_path.clone(), dapps_path: self.dapps_path.clone(),
on_done: Box::new(on_done), on_done: Box::new(on_done),
embeddable_at: self.embeddable_at, embeddable_on: self.embeddable_on.clone(),
} },
self.embeddable_on.clone(),
); );
(Some(ContentStatus::Fetching(fetch_control)), Box::new(handler) as Box<Handler>) (Some(ContentStatus::Fetching(fetch_control)), Box::new(handler) as Box<Handler>)
@ -164,19 +166,19 @@ impl<R: URLHint> ContentFetcher<R> {
let (handler, fetch_control) = ContentFetcherHandler::new( let (handler, fetch_control) = ContentFetcherHandler::new(
content.url, content.url,
control, control,
path.using_dapps_domains,
ContentInstaller { ContentInstaller {
id: content_id.clone(), id: content_id.clone(),
mime: content.mime, mime: content.mime,
content_path: self.dapps_path.clone(), content_path: self.dapps_path.clone(),
on_done: Box::new(on_done), on_done: Box::new(on_done),
} },
self.embeddable_on.clone(),
); );
(Some(ContentStatus::Fetching(fetch_control)), Box::new(handler) as Box<Handler>) (Some(ContentStatus::Fetching(fetch_control)), Box::new(handler) as Box<Handler>)
}, },
None if self.sync.is_major_importing() => { None if self.sync.is_major_importing() => {
(None, Self::still_syncing()) (None, Self::still_syncing(self.embeddable_on.clone()))
}, },
None => { None => {
// This may happen when sync status changes in between // This may happen when sync status changes in between
@ -185,7 +187,8 @@ impl<R: URLHint> ContentFetcher<R> {
StatusCode::NotFound, StatusCode::NotFound,
"Resource Not Found", "Resource Not Found",
"Requested resource was not found.", "Requested resource was not found.",
None None,
self.embeddable_on.clone(),
)) as Box<Handler>) )) as Box<Handler>)
}, },
} }
@ -255,6 +258,17 @@ impl ContentValidator for ContentInstaller {
// Create dir // Create dir
try!(fs::create_dir_all(&self.content_path)); try!(fs::create_dir_all(&self.content_path));
// Validate hash
let mut file_reader = io::BufReader::new(try!(fs::File::open(&path)));
let hash = try!(sha3(&mut file_reader));
let id = try!(self.id.as_str().parse().map_err(|_| ValidationError::InvalidContentId));
if id != hash {
return Err(ValidationError::HashMismatch {
expected: id,
got: hash,
});
}
// And prepare path for a file // And prepare path for a file
let filename = path.file_name().expect("We always fetch a file."); let filename = path.file_name().expect("We always fetch a file.");
let mut content_path = self.content_path.clone(); let mut content_path = self.content_path.clone();
@ -266,7 +280,7 @@ impl ContentValidator for ContentInstaller {
try!(fs::copy(&path, &content_path)); try!(fs::copy(&path, &content_path));
Ok((self.id.clone(), LocalPageEndpoint::single_file(content_path, self.mime.clone()))) Ok((self.id.clone(), LocalPageEndpoint::single_file(content_path, self.mime.clone(), PageCache::Enabled)))
} }
fn done(&self, endpoint: Option<LocalPageEndpoint>) { fn done(&self, endpoint: Option<LocalPageEndpoint>) {
@ -279,7 +293,7 @@ struct DappInstaller {
id: String, id: String,
dapps_path: PathBuf, dapps_path: PathBuf,
on_done: Box<Fn(String, Option<LocalPageEndpoint>) + Send>, on_done: Box<Fn(String, Option<LocalPageEndpoint>) + Send>,
embeddable_at: Option<u16>, embeddable_on: Option<(String, u16)>,
} }
impl DappInstaller { impl DappInstaller {
@ -372,7 +386,7 @@ impl ContentValidator for DappInstaller {
try!(manifest_file.write_all(manifest_str.as_bytes())); try!(manifest_file.write_all(manifest_str.as_bytes()));
// Create endpoint // Create endpoint
let app = LocalPageEndpoint::new(target, manifest.clone().into(), self.embeddable_at); let app = LocalPageEndpoint::new(target, manifest.clone().into(), PageCache::Enabled, self.embeddable_on.clone());
// Return modified app manifest // Return modified app manifest
Ok((manifest.id.clone(), app)) Ok((manifest.id.clone(), app))
@ -412,7 +426,7 @@ mod tests {
version: "".into(), version: "".into(),
author: "".into(), author: "".into(),
icon_url: "".into(), icon_url: "".into(),
}, None); }, Default::default(), None);
// when // when
fetcher.set_status("test", ContentStatus::Ready(handler)); fetcher.set_status("test", ContentStatus::Ready(handler));

View File

@ -18,7 +18,7 @@ use std::io;
use std::io::Read; use std::io::Read;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use page::LocalPageEndpoint; use page::{LocalPageEndpoint, PageCache};
use endpoint::{Endpoints, EndpointInfo}; use endpoint::{Endpoints, EndpointInfo};
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest}; use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest};
@ -97,12 +97,12 @@ fn read_manifest(name: &str, mut path: PathBuf) -> EndpointInfo {
}) })
} }
pub fn local_endpoints(dapps_path: String, signer_port: Option<u16>) -> Endpoints { pub fn local_endpoints(dapps_path: String, signer_address: Option<(String, u16)>) -> Endpoints {
let mut pages = Endpoints::new(); let mut pages = Endpoints::new();
for dapp in local_dapps(dapps_path) { for dapp in local_dapps(dapps_path) {
pages.insert( pages.insert(
dapp.id, dapp.id,
Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, signer_port)) Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, PageCache::Disabled, signer_address.clone()))
); );
} }
pages pages

View File

@ -33,38 +33,30 @@ pub const RPC_PATH : &'static str = "rpc";
pub const API_PATH : &'static str = "api"; pub const API_PATH : &'static str = "api";
pub const UTILS_PATH : &'static str = "parity-utils"; pub const UTILS_PATH : &'static str = "parity-utils";
pub fn redirection_address(using_dapps_domains: bool, app_id: &str) -> String {
if using_dapps_domains {
format!("http://{}{}/", app_id, DAPPS_DOMAIN)
} else {
format!("/{}/", app_id)
}
}
pub fn utils() -> Box<Endpoint> { pub fn utils() -> Box<Endpoint> {
Box::new(PageEndpoint::with_prefix(parity_ui::App::default(), UTILS_PATH.to_owned())) Box::new(PageEndpoint::with_prefix(parity_ui::App::default(), UTILS_PATH.to_owned()))
} }
pub fn all_endpoints(dapps_path: String, signer_port: Option<u16>) -> Endpoints { pub fn all_endpoints(dapps_path: String, signer_address: Option<(String, u16)>) -> Endpoints {
// fetch fs dapps at first to avoid overwriting builtins // fetch fs dapps at first to avoid overwriting builtins
let mut pages = fs::local_endpoints(dapps_path, signer_port); let mut pages = fs::local_endpoints(dapps_path, signer_address.clone());
// NOTE [ToDr] Dapps will be currently embeded on 8180 // NOTE [ToDr] Dapps will be currently embeded on 8180
insert::<parity_ui::App>(&mut pages, "ui", Embeddable::Yes(signer_port)); insert::<parity_ui::App>(&mut pages, "ui", Embeddable::Yes(signer_address.clone()));
pages.insert("proxy".into(), ProxyPac::boxed(signer_port)); pages.insert("proxy".into(), ProxyPac::boxed(signer_address));
pages pages
} }
fn insert<T : WebApp + Default + 'static>(pages: &mut Endpoints, id: &str, embed_at: Embeddable) { fn insert<T : WebApp + Default + 'static>(pages: &mut Endpoints, id: &str, embed_at: Embeddable) {
pages.insert(id.to_owned(), Box::new(match embed_at { pages.insert(id.to_owned(), Box::new(match embed_at {
Embeddable::Yes(port) => PageEndpoint::new_safe_to_embed(T::default(), port), Embeddable::Yes(address) => PageEndpoint::new_safe_to_embed(T::default(), address),
Embeddable::No => PageEndpoint::new(T::default()), Embeddable::No => PageEndpoint::new(T::default()),
})); }));
} }
enum Embeddable { enum Embeddable {
Yes(Option<u16>), Yes(Option<(String, u16)>),
#[allow(dead_code)] #[allow(dead_code)]
No, No,
} }

View File

@ -32,7 +32,7 @@ pub struct ContentHandler {
content: String, content: String,
mimetype: Mime, mimetype: Mime,
write_pos: usize, write_pos: usize,
safe_to_embed_at_port: Option<u16>, safe_to_embed_on: Option<(String, u16)>,
} }
impl ContentHandler { impl ContentHandler {
@ -44,35 +44,31 @@ impl ContentHandler {
Self::new(StatusCode::NotFound, content, mimetype) Self::new(StatusCode::NotFound, content, mimetype)
} }
pub fn html(code: StatusCode, content: String, embeddable_at: Option<u16>) -> Self { pub fn html(code: StatusCode, content: String, embeddable_on: Option<(String, u16)>) -> Self {
Self::new_embeddable(code, content, mime!(Text/Html), embeddable_at) Self::new_embeddable(code, content, mime!(Text/Html), embeddable_on)
} }
pub fn error(code: StatusCode, title: &str, message: &str, details: Option<&str>) -> Self { pub fn error(code: StatusCode, title: &str, message: &str, details: Option<&str>, embeddable_on: Option<(String, u16)>) -> Self {
Self::error_embeddable(code, title, message, details, None)
}
pub fn error_embeddable(code: StatusCode, title: &str, message: &str, details: Option<&str>, embeddable_at: Option<u16>) -> Self {
Self::html(code, format!( Self::html(code, format!(
include_str!("../error_tpl.html"), include_str!("../error_tpl.html"),
title=title, title=title,
message=message, message=message,
details=details.unwrap_or_else(|| ""), details=details.unwrap_or_else(|| ""),
version=version(), version=version(),
), embeddable_at) ), embeddable_on)
} }
pub fn new(code: StatusCode, content: String, mimetype: Mime) -> Self { pub fn new(code: StatusCode, content: String, mimetype: Mime) -> Self {
Self::new_embeddable(code, content, mimetype, None) Self::new_embeddable(code, content, mimetype, None)
} }
pub fn new_embeddable(code: StatusCode, content: String, mimetype: Mime, embeddable_at: Option<u16>) -> Self { pub fn new_embeddable(code: StatusCode, content: String, mimetype: Mime, embeddable_on: Option<(String, u16)>) -> Self {
ContentHandler { ContentHandler {
code: code, code: code,
content: content, content: content,
mimetype: mimetype, mimetype: mimetype,
write_pos: 0, write_pos: 0,
safe_to_embed_at_port: embeddable_at, safe_to_embed_on: embeddable_on,
} }
} }
} }
@ -89,7 +85,7 @@ impl server::Handler<HttpStream> for ContentHandler {
fn on_response(&mut self, res: &mut server::Response) -> Next { fn on_response(&mut self, res: &mut server::Response) -> Next {
res.set_status(self.code); res.set_status(self.code);
res.headers_mut().set(header::ContentType(self.mimetype.clone())); res.headers_mut().set(header::ContentType(self.mimetype.clone()));
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_at_port.clone()); add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on.clone());
Next::write() Next::write()
} }

View File

@ -22,14 +22,14 @@ use std::sync::{mpsc, Arc};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{Instant, Duration}; use std::time::{Instant, Duration};
use util::Mutex; use util::Mutex;
use url::Url;
use fetch::{Client, Fetch, FetchResult}; use fetch::{Client, Fetch, FetchResult};
use hyper::{server, Decoder, Encoder, Next, Method, Control}; use hyper::{server, Decoder, Encoder, Next, Method, Control};
use hyper::net::HttpStream; use hyper::net::HttpStream;
use hyper::status::StatusCode; use hyper::status::StatusCode;
use handlers::{ContentHandler, Redirection}; use handlers::{ContentHandler, Redirection, extract_url};
use apps::redirection_address;
use page::LocalPageEndpoint; use page::LocalPageEndpoint;
const FETCH_TIMEOUT: u64 = 30; const FETCH_TIMEOUT: u64 = 30;
@ -136,8 +136,9 @@ pub struct ContentFetcherHandler<H: ContentValidator> {
control: Option<Control>, control: Option<Control>,
status: FetchState, status: FetchState,
client: Option<Client>, client: Option<Client>,
using_dapps_domains: bool,
installer: H, installer: H,
request_url: Option<Url>,
embeddable_on: Option<(String, u16)>,
} }
impl<H: ContentValidator> Drop for ContentFetcherHandler<H> { impl<H: ContentValidator> Drop for ContentFetcherHandler<H> {
@ -155,8 +156,9 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
pub fn new( pub fn new(
url: String, url: String,
control: Control, control: Control,
using_dapps_domains: bool, handler: H,
handler: H) -> (Self, Arc<FetchControl>) { embeddable_on: Option<(String, u16)>,
) -> (Self, Arc<FetchControl>) {
let fetch_control = Arc::new(FetchControl::default()); let fetch_control = Arc::new(FetchControl::default());
let client = Client::default(); let client = Client::default();
@ -165,8 +167,9 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
control: Some(control), control: Some(control),
client: Some(client), client: Some(client),
status: FetchState::NotStarted(url), status: FetchState::NotStarted(url),
using_dapps_domains: using_dapps_domains,
installer: handler, installer: handler,
request_url: None,
embeddable_on: embeddable_on,
}; };
(handler, fetch_control) (handler, fetch_control)
@ -189,6 +192,7 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<H> { impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<H> {
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next { fn on_request(&mut self, request: server::Request<HttpStream>) -> Next {
self.request_url = extract_url(&request);
let status = if let FetchState::NotStarted(ref url) = self.status { let status = if let FetchState::NotStarted(ref url) = self.status {
Some(match *request.method() { Some(match *request.method() {
// Start fetching content // Start fetching content
@ -204,6 +208,7 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
"Unable To Start Dapp Download", "Unable To Start Dapp Download",
"Could not initialize download of the dapp. It might be a problem with the remote server.", "Could not initialize download of the dapp. It might be a problem with the remote server.",
Some(&format!("{}", e)), Some(&format!("{}", e)),
self.embeddable_on.clone(),
)), )),
} }
}, },
@ -213,6 +218,7 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
"Method Not Allowed", "Method Not Allowed",
"Only <code>GET</code> requests are allowed.", "Only <code>GET</code> requests are allowed.",
None, None,
self.embeddable_on.clone(),
)), )),
}) })
} else { None }; } else { None };
@ -234,7 +240,8 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
StatusCode::GatewayTimeout, StatusCode::GatewayTimeout,
"Download Timeout", "Download Timeout",
&format!("Could not fetch content within {} seconds.", FETCH_TIMEOUT), &format!("Could not fetch content within {} seconds.", FETCH_TIMEOUT),
None None,
self.embeddable_on.clone(),
); );
Self::close_client(&mut self.client); Self::close_client(&mut self.client);
(Some(FetchState::Error(timeout)), Next::write()) (Some(FetchState::Error(timeout)), Next::write())
@ -255,12 +262,15 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
StatusCode::BadGateway, StatusCode::BadGateway,
"Invalid Dapp", "Invalid Dapp",
"Downloaded bundle does not contain a valid content.", "Downloaded bundle does not contain a valid content.",
Some(&format!("{:?}", e)) Some(&format!("{:?}", e)),
self.embeddable_on.clone(),
)) ))
}, },
Ok((id, result)) => { Ok((id, result)) => {
let address = redirection_address(self.using_dapps_domains, &id); let url: String = self.request_url.take()
FetchState::Done(id, result, Redirection::new(&address)) .map(|url| url.raw.into_string())
.expect("Request URL always read in on_request; qed");
FetchState::Done(id, result, Redirection::new(&url))
}, },
}; };
// Remove temporary zip file // Remove temporary zip file
@ -274,6 +284,7 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
"Download Error", "Download Error",
"There was an error when fetching the content.", "There was an error when fetching the content.",
Some(&format!("{:?}", e)), Some(&format!("{:?}", e)),
self.embeddable_on.clone(),
); );
(Some(FetchState::Error(error)), Next::write()) (Some(FetchState::Error(error)), Next::write())
}, },

View File

@ -30,18 +30,18 @@ pub use self::fetch::{ContentFetcherHandler, ContentValidator, FetchControl};
use url::Url; use url::Url;
use hyper::{server, header, net, uri}; use hyper::{server, header, net, uri};
use signer_address; use address;
/// Adds security-related headers to the Response. /// Adds security-related headers to the Response.
pub fn add_security_headers(headers: &mut header::Headers, embeddable_at: Option<u16>) { pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Option<(String, u16)>) {
headers.set_raw("X-XSS-Protection", vec![b"1; mode=block".to_vec()]); headers.set_raw("X-XSS-Protection", vec![b"1; mode=block".to_vec()]);
headers.set_raw("X-Content-Type-Options", vec![b"nosniff".to_vec()]); headers.set_raw("X-Content-Type-Options", vec![b"nosniff".to_vec()]);
// Embedding header: // Embedding header:
if let Some(port) = embeddable_at { if let Some(embeddable_on) = embeddable_on {
headers.set_raw( headers.set_raw(
"X-Frame-Options", "X-Frame-Options",
vec![format!("ALLOW-FROM http://{}", signer_address(port)).into_bytes()] vec![format!("ALLOW-FROM http://{}", address(embeddable_on)).into_bytes()]
); );
} else { } else {
// TODO [ToDr] Should we be more strict here (DENY?)? // TODO [ToDr] Should we be more strict here (DENY?)?

View File

@ -44,6 +44,7 @@
#![cfg_attr(feature="nightly", plugin(clippy))] #![cfg_attr(feature="nightly", plugin(clippy))]
extern crate hyper; extern crate hyper;
extern crate time;
extern crate url as url_lib; extern crate url as url_lib;
extern crate unicase; extern crate unicase;
extern crate serde; extern crate serde;
@ -111,7 +112,7 @@ pub struct ServerBuilder {
handler: Arc<IoHandler>, handler: Arc<IoHandler>,
registrar: Arc<ContractClient>, registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
signer_port: Option<u16>, signer_address: Option<(String, u16)>,
} }
impl Extendable for ServerBuilder { impl Extendable for ServerBuilder {
@ -128,7 +129,7 @@ impl ServerBuilder {
handler: Arc::new(IoHandler::new()), handler: Arc::new(IoHandler::new()),
registrar: registrar, registrar: registrar,
sync_status: Arc::new(|| false), sync_status: Arc::new(|| false),
signer_port: None, signer_address: None,
} }
} }
@ -138,8 +139,8 @@ impl ServerBuilder {
} }
/// Change default signer port. /// Change default signer port.
pub fn with_signer_port(&mut self, signer_port: Option<u16>) { pub fn with_signer_address(&mut self, signer_address: Option<(String, u16)>) {
self.signer_port = signer_port; self.signer_address = signer_address;
} }
/// Asynchronously start server with no authentication, /// Asynchronously start server with no authentication,
@ -151,7 +152,7 @@ impl ServerBuilder {
NoAuth, NoAuth,
self.handler.clone(), self.handler.clone(),
self.dapps_path.clone(), self.dapps_path.clone(),
self.signer_port.clone(), self.signer_address.clone(),
self.registrar.clone(), self.registrar.clone(),
self.sync_status.clone(), self.sync_status.clone(),
) )
@ -166,7 +167,7 @@ impl ServerBuilder {
HttpBasicAuth::single_user(username, password), HttpBasicAuth::single_user(username, password),
self.handler.clone(), self.handler.clone(),
self.dapps_path.clone(), self.dapps_path.clone(),
self.signer_port.clone(), self.signer_address.clone(),
self.registrar.clone(), self.registrar.clone(),
self.sync_status.clone(), self.sync_status.clone(),
) )
@ -196,11 +197,11 @@ impl Server {
} }
/// Returns a list of CORS domains for API endpoint. /// Returns a list of CORS domains for API endpoint.
fn cors_domains(signer_port: Option<u16>) -> Vec<String> { fn cors_domains(signer_address: Option<(String, u16)>) -> Vec<String> {
match signer_port { match signer_address {
Some(port) => vec![ Some(signer_address) => vec![
format!("http://{}{}", HOME_PAGE, DAPPS_DOMAIN), format!("http://{}{}", HOME_PAGE, DAPPS_DOMAIN),
format!("http://{}", signer_address(port)), format!("http://{}", address(signer_address)),
], ],
None => vec![], None => vec![],
} }
@ -212,15 +213,15 @@ impl Server {
authorization: A, authorization: A,
handler: Arc<IoHandler>, handler: Arc<IoHandler>,
dapps_path: String, dapps_path: String,
signer_port: Option<u16>, signer_address: Option<(String, u16)>,
registrar: Arc<ContractClient>, registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
) -> Result<Server, ServerError> { ) -> Result<Server, ServerError> {
let panic_handler = Arc::new(Mutex::new(None)); let panic_handler = Arc::new(Mutex::new(None));
let authorization = Arc::new(authorization); let authorization = Arc::new(authorization);
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status, signer_port)); let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status, signer_address.clone()));
let endpoints = Arc::new(apps::all_endpoints(dapps_path, signer_port.clone())); let endpoints = Arc::new(apps::all_endpoints(dapps_path, signer_address.clone()));
let cors_domains = Self::cors_domains(signer_port); let cors_domains = Self::cors_domains(signer_address.clone());
let special = Arc::new({ let special = Arc::new({
let mut special = HashMap::new(); let mut special = HashMap::new();
@ -237,7 +238,7 @@ impl Server {
try!(hyper::Server::http(addr)) try!(hyper::Server::http(addr))
.handle(move |ctrl| router::Router::new( .handle(move |ctrl| router::Router::new(
ctrl, ctrl,
signer_port.clone(), signer_address.clone(),
content_fetcher.clone(), content_fetcher.clone(),
endpoints.clone(), endpoints.clone(),
special.clone(), special.clone(),
@ -301,8 +302,8 @@ pub fn random_filename() -> String {
rng.gen_ascii_chars().take(12).collect() rng.gen_ascii_chars().take(12).collect()
} }
fn signer_address(port: u16) -> String { fn address(address: (String, u16)) -> String {
format!("127.0.0.1:{}", port) format!("{}:{}", address.0, address.1)
} }
#[cfg(test)] #[cfg(test)]
@ -331,7 +332,7 @@ mod util_tests {
// when // when
let none = Server::cors_domains(None); let none = Server::cors_domains(None);
let some = Server::cors_domains(Some(18180)); let some = Server::cors_domains(Some(("127.0.0.1".into(), 18180)));
// then // then
assert_eq!(none, Vec::<String>::new()); assert_eq!(none, Vec::<String>::new());

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use page::handler; use page::{handler, PageCache};
use std::sync::Arc; use std::sync::Arc;
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler}; use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler};
use parity_dapps::{WebApp, File, Info}; use parity_dapps::{WebApp, File, Info};
@ -25,7 +25,7 @@ pub struct PageEndpoint<T : WebApp + 'static> {
/// Prefix to strip from the path (when `None` deducted from `app_id`) /// Prefix to strip from the path (when `None` deducted from `app_id`)
pub prefix: Option<String>, pub prefix: Option<String>,
/// Safe to be loaded in frame by other origin. (use wisely!) /// Safe to be loaded in frame by other origin. (use wisely!)
safe_to_embed_at_port: Option<u16>, safe_to_embed_on: Option<(String, u16)>,
info: EndpointInfo, info: EndpointInfo,
} }
@ -36,7 +36,7 @@ impl<T: WebApp + 'static> PageEndpoint<T> {
PageEndpoint { PageEndpoint {
app: Arc::new(app), app: Arc::new(app),
prefix: None, prefix: None,
safe_to_embed_at_port: None, safe_to_embed_on: None,
info: EndpointInfo::from(info), info: EndpointInfo::from(info),
} }
} }
@ -49,7 +49,7 @@ impl<T: WebApp + 'static> PageEndpoint<T> {
PageEndpoint { PageEndpoint {
app: Arc::new(app), app: Arc::new(app),
prefix: Some(prefix), prefix: Some(prefix),
safe_to_embed_at_port: None, safe_to_embed_on: None,
info: EndpointInfo::from(info), info: EndpointInfo::from(info),
} }
} }
@ -57,12 +57,12 @@ impl<T: WebApp + 'static> PageEndpoint<T> {
/// Creates new `PageEndpoint` which can be safely used in iframe /// Creates new `PageEndpoint` which can be safely used in iframe
/// even from different origin. It might be dangerous (clickjacking). /// even from different origin. It might be dangerous (clickjacking).
/// Use wisely! /// Use wisely!
pub fn new_safe_to_embed(app: T, port: Option<u16>) -> Self { pub fn new_safe_to_embed(app: T, address: Option<(String, u16)>) -> Self {
let info = app.info(); let info = app.info();
PageEndpoint { PageEndpoint {
app: Arc::new(app), app: Arc::new(app),
prefix: None, prefix: None,
safe_to_embed_at_port: port, safe_to_embed_on: address,
info: EndpointInfo::from(info), info: EndpointInfo::from(info),
} }
} }
@ -79,8 +79,9 @@ impl<T: WebApp> Endpoint for PageEndpoint<T> {
app: BuiltinDapp::new(self.app.clone()), app: BuiltinDapp::new(self.app.clone()),
prefix: self.prefix.clone(), prefix: self.prefix.clone(),
path: path, path: path,
file: handler::ServedFile::new(self.safe_to_embed_at_port.clone()), file: handler::ServedFile::new(self.safe_to_embed_on.clone()),
safe_to_embed_at_port: self.safe_to_embed_at_port.clone(), cache: PageCache::Disabled,
safe_to_embed_on: self.safe_to_embed_on.clone(),
}) })
} }
} }

View File

@ -15,6 +15,8 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::io::Write; use std::io::Write;
use time::{self, Duration};
use hyper::header; use hyper::header;
use hyper::server; use hyper::server;
use hyper::uri::RequestUri; use hyper::uri::RequestUri;
@ -58,17 +60,30 @@ pub enum ServedFile<T: Dapp> {
} }
impl<T: Dapp> ServedFile<T> { impl<T: Dapp> ServedFile<T> {
pub fn new(embeddable_at: Option<u16>) -> Self { pub fn new(embeddable_on: Option<(String, u16)>) -> Self {
ServedFile::Error(ContentHandler::error_embeddable( ServedFile::Error(ContentHandler::error(
StatusCode::NotFound, StatusCode::NotFound,
"404 Not Found", "404 Not Found",
"Requested dapp resource was not found.", "Requested dapp resource was not found.",
None, None,
embeddable_at, embeddable_on,
)) ))
} }
} }
/// Defines what cache headers should be appended to returned resources.
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum PageCache {
Enabled,
Disabled,
}
impl Default for PageCache {
fn default() -> Self {
PageCache::Disabled
}
}
/// A handler for a single webapp. /// A handler for a single webapp.
/// Resolves correct paths and serves as a plumbing code between /// Resolves correct paths and serves as a plumbing code between
/// hyper server and dapp. /// hyper server and dapp.
@ -82,7 +97,9 @@ pub struct PageHandler<T: Dapp> {
/// Requested path. /// Requested path.
pub path: EndpointPath, pub path: EndpointPath,
/// Flag indicating if the file can be safely embeded (put in iframe). /// Flag indicating if the file can be safely embeded (put in iframe).
pub safe_to_embed_at_port: Option<u16>, pub safe_to_embed_on: Option<(String, u16)>,
/// Cache settings for this page.
pub cache: PageCache,
} }
impl<T: Dapp> PageHandler<T> { impl<T: Dapp> PageHandler<T> {
@ -116,7 +133,7 @@ impl<T: Dapp> server::Handler<HttpStream> for PageHandler<T> {
self.app.file(&self.extract_path(url.path())) self.app.file(&self.extract_path(url.path()))
}, },
_ => None, _ => None,
}.map_or_else(|| ServedFile::new(self.safe_to_embed_at_port.clone()), |f| ServedFile::File(f)); }.map_or_else(|| ServedFile::new(self.safe_to_embed_on.clone()), |f| ServedFile::File(f));
Next::write() Next::write()
} }
@ -129,13 +146,23 @@ impl<T: Dapp> server::Handler<HttpStream> for PageHandler<T> {
ServedFile::File(ref f) => { ServedFile::File(ref f) => {
res.set_status(StatusCode::Ok); res.set_status(StatusCode::Ok);
if let PageCache::Enabled = self.cache {
let mut headers = res.headers_mut();
let validity = Duration::days(365);
headers.set(header::CacheControl(vec![
header::CacheDirective::Public,
header::CacheDirective::MaxAge(validity.num_seconds() as u32),
]));
headers.set(header::Expires(header::HttpDate(time::now() + validity)));
}
match f.content_type().parse() { match f.content_type().parse() {
Ok(mime) => res.headers_mut().set(header::ContentType(mime)), Ok(mime) => res.headers_mut().set(header::ContentType(mime)),
Err(()) => debug!(target: "page_handler", "invalid MIME type: {}", f.content_type()), Err(()) => debug!(target: "dapps", "invalid MIME type: {}", f.content_type()),
} }
// Security headers: // Security headers:
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_at_port); add_security_headers(&mut res.headers_mut(), self.safe_to_embed_on.clone());
Next::write() Next::write()
}, },
ServedFile::Error(ref mut handler) => { ServedFile::Error(ref mut handler) => {
@ -218,7 +245,8 @@ fn should_extract_path_with_appid() {
using_dapps_domains: true, using_dapps_domains: true,
}, },
file: ServedFile::new(None), file: ServedFile::new(None),
safe_to_embed_at_port: None, cache: Default::default(),
safe_to_embed_on: None,
}; };
// when // when

View File

@ -18,7 +18,7 @@ use mime_guess;
use std::io::{Seek, Read, SeekFrom}; use std::io::{Seek, Read, SeekFrom};
use std::fs; use std::fs;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use page::handler; use page::handler::{self, PageCache};
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler}; use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -26,25 +26,28 @@ pub struct LocalPageEndpoint {
path: PathBuf, path: PathBuf,
mime: Option<String>, mime: Option<String>,
info: Option<EndpointInfo>, info: Option<EndpointInfo>,
embeddable_at: Option<u16>, cache: PageCache,
embeddable_on: Option<(String, u16)>,
} }
impl LocalPageEndpoint { impl LocalPageEndpoint {
pub fn new(path: PathBuf, info: EndpointInfo, embeddable_at: Option<u16>) -> Self { pub fn new(path: PathBuf, info: EndpointInfo, cache: PageCache, embeddable_on: Option<(String, u16)>) -> Self {
LocalPageEndpoint { LocalPageEndpoint {
path: path, path: path,
mime: None, mime: None,
info: Some(info), info: Some(info),
embeddable_at: embeddable_at, cache: cache,
embeddable_on: embeddable_on,
} }
} }
pub fn single_file(path: PathBuf, mime: String) -> Self { pub fn single_file(path: PathBuf, mime: String, cache: PageCache) -> Self {
LocalPageEndpoint { LocalPageEndpoint {
path: path, path: path,
mime: Some(mime), mime: Some(mime),
info: None, info: None,
embeddable_at: None, cache: cache,
embeddable_on: None,
} }
} }
@ -65,7 +68,8 @@ impl Endpoint for LocalPageEndpoint {
prefix: None, prefix: None,
path: path, path: path,
file: handler::ServedFile::new(None), file: handler::ServedFile::new(None),
safe_to_embed_at_port: self.embeddable_at, safe_to_embed_on: self.embeddable_on.clone(),
cache: self.cache,
}) })
} else { } else {
Box::new(handler::PageHandler { Box::new(handler::PageHandler {
@ -73,7 +77,8 @@ impl Endpoint for LocalPageEndpoint {
prefix: None, prefix: None,
path: path, path: path,
file: handler::ServedFile::new(None), file: handler::ServedFile::new(None),
safe_to_embed_at_port: self.embeddable_at, safe_to_embed_on: self.embeddable_on.clone(),
cache: self.cache,
}) })
} }
} }

View File

@ -21,4 +21,5 @@ mod handler;
pub use self::local::LocalPageEndpoint; pub use self::local::LocalPageEndpoint;
pub use self::builtin::PageEndpoint; pub use self::builtin::PageEndpoint;
pub use self::handler::PageCache;

View File

@ -19,24 +19,24 @@
use endpoint::{Endpoint, Handler, EndpointPath}; use endpoint::{Endpoint, Handler, EndpointPath};
use handlers::ContentHandler; use handlers::ContentHandler;
use apps::{HOME_PAGE, DAPPS_DOMAIN}; use apps::{HOME_PAGE, DAPPS_DOMAIN};
use signer_address; use address;
pub struct ProxyPac { pub struct ProxyPac {
signer_port: Option<u16>, signer_address: Option<(String, u16)>,
} }
impl ProxyPac { impl ProxyPac {
pub fn boxed(signer_port: Option<u16>) -> Box<Endpoint> { pub fn boxed(signer_address: Option<(String, u16)>) -> Box<Endpoint> {
Box::new(ProxyPac { Box::new(ProxyPac {
signer_port: signer_port signer_address: signer_address
}) })
} }
} }
impl Endpoint for ProxyPac { impl Endpoint for ProxyPac {
fn to_handler(&self, path: EndpointPath) -> Box<Handler> { fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
let signer = self.signer_port let signer = self.signer_address.clone()
.map(signer_address) .map(address)
.unwrap_or_else(|| format!("{}:{}", path.host, path.port)); .unwrap_or_else(|| format!("{}:{}", path.host, path.port));
let content = format!( let content = format!(

View File

@ -59,7 +59,8 @@ impl Authorization for HttpBasicAuth {
status::StatusCode::Unauthorized, status::StatusCode::Unauthorized,
"Unauthorized", "Unauthorized",
"You need to provide valid credentials to access this page.", "You need to provide valid credentials to access this page.",
None None,
None,
))) )))
}, },
Access::AuthRequired => { Access::AuthRequired => {

View File

@ -41,6 +41,7 @@ pub fn host_invalid_response() -> Box<server::Handler<HttpStream> + Send> {
Box::new(ContentHandler::error(StatusCode::Forbidden, Box::new(ContentHandler::error(StatusCode::Forbidden,
"Current Host Is Disallowed", "Current Host Is Disallowed",
"You are trying to access your node using incorrect address.", "You are trying to access your node using incorrect address.",
Some("Use allowed URL or specify different <code>hosts</code> CLI options.") Some("Use allowed URL or specify different <code>hosts</code> CLI options."),
None,
)) ))
} }

View File

@ -20,7 +20,7 @@
pub mod auth; pub mod auth;
mod host_validation; mod host_validation;
use signer_address; use address;
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashMap; use std::collections::HashMap;
use url::{Url, Host}; use url::{Url, Host};
@ -43,7 +43,7 @@ pub enum SpecialEndpoint {
pub struct Router<A: Authorization + 'static> { pub struct Router<A: Authorization + 'static> {
control: Option<Control>, control: Option<Control>,
signer_port: Option<u16>, signer_address: Option<(String, u16)>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
fetch: Arc<ContentFetcher>, fetch: Arc<ContentFetcher>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
@ -105,28 +105,34 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
trace!(target: "dapps", "Resolving to fetchable content."); trace!(target: "dapps", "Resolving to fetchable content.");
self.fetch.to_async_handler(path.clone(), control) self.fetch.to_async_handler(path.clone(), control)
}, },
// NOTE [todr] /home is redirected to home page since some users may have the redirection cached
// (in the past we used 301 instead of 302)
// It should be safe to remove it in (near) future.
//
// 404 for non-existent content // 404 for non-existent content
(Some(_), _) if *req.method() == hyper::method::Method::Get => { (Some(ref path), _) if *req.method() == hyper::Method::Get && path.app_id != "home" => {
trace!(target: "dapps", "Resolving to 404."); trace!(target: "dapps", "Resolving to 404.");
Box::new(ContentHandler::error( Box::new(ContentHandler::error(
StatusCode::NotFound, StatusCode::NotFound,
"404 Not Found", "404 Not Found",
"Requested content was not found.", "Requested content was not found.",
None, None,
self.signer_address.clone(),
)) ))
}, },
// Redirect any other GET request to signer. // Redirect any other GET request to signer.
_ if *req.method() == hyper::method::Method::Get => { _ if *req.method() == hyper::Method::Get => {
if let Some(port) = self.signer_port { if let Some(signer_address) = self.signer_address.clone() {
trace!(target: "dapps", "Redirecting to signer interface."); trace!(target: "dapps", "Redirecting to signer interface.");
Redirection::boxed(&format!("http://{}", signer_address(port))) Redirection::boxed(&format!("http://{}", address(signer_address)))
} else { } else {
trace!(target: "dapps", "Signer disabled, returning 404."); trace!(target: "dapps", "Signer disabled, returning 404.");
Box::new(ContentHandler::error( Box::new(ContentHandler::error(
StatusCode::NotFound, StatusCode::NotFound,
"404 Not Found", "404 Not Found",
"Your homepage is not available when Trusted Signer is disabled.", "Your homepage is not available when Trusted Signer is disabled.",
Some("You can still access dapps by writing a correct address, though. Re-enabled Signer to get your homepage back."), Some("You can still access dapps by writing a correct address, though. Re-enable Signer to get your homepage back."),
self.signer_address.clone(),
)) ))
} }
}, },
@ -162,7 +168,7 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
impl<A: Authorization> Router<A> { impl<A: Authorization> Router<A> {
pub fn new( pub fn new(
control: Control, control: Control,
signer_port: Option<u16>, signer_address: Option<(String, u16)>,
content_fetcher: Arc<ContentFetcher>, content_fetcher: Arc<ContentFetcher>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
@ -175,7 +181,7 @@ impl<A: Authorization> Router<A> {
.to_handler(EndpointPath::default()); .to_handler(EndpointPath::default());
Router { Router {
control: Some(control), control: Some(control),
signer_port: signer_port, signer_address: signer_address,
endpoints: endpoints, endpoints: endpoints,
fetch: content_fetcher, fetch: content_fetcher,
special: special, special: special,

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use tests::helpers::{serve_with_registrar, serve_with_registrar_and_sync, request, assert_security_headers}; use tests::helpers::{serve_with_registrar, serve_with_registrar_and_sync, request, assert_security_headers_for_embed};
#[test] #[test]
fn should_resolve_dapp() { fn should_resolve_dapp() {
@ -34,7 +34,7 @@ fn should_resolve_dapp() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert_eq!(registrar.calls.lock().len(), 2); assert_eq!(registrar.calls.lock().len(), 2);
assert_security_headers(&response.headers); assert_security_headers_for_embed(&response.headers);
} }
#[test] #[test]
@ -63,5 +63,5 @@ fn should_return_503_when_syncing_but_should_make_the_calls() {
// then // then
assert_eq!(response.status, "HTTP/1.1 503 Service Unavailable".to_owned()); assert_eq!(response.status, "HTTP/1.1 503 Service Unavailable".to_owned());
assert_eq!(registrar.calls.lock().len(), 4); assert_eq!(registrar.calls.lock().len(), 4);
assert_security_headers(&response.headers); assert_security_headers_for_embed(&response.headers);
} }

View File

@ -76,7 +76,7 @@ pub fn init_server(hosts: Option<Vec<String>>, is_syncing: bool) -> (Server, Arc
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let mut builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar.clone()); let mut builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar.clone());
builder.with_sync_status(Arc::new(move || is_syncing)); builder.with_sync_status(Arc::new(move || is_syncing));
builder.with_signer_port(Some(SIGNER_PORT)); builder.with_signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)));
( (
builder.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap(), builder.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap(),
registrar, registrar,
@ -89,7 +89,7 @@ pub fn serve_with_auth(user: &str, pass: &str) -> Server {
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let mut builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar); let mut builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar);
builder.with_signer_port(Some(SIGNER_PORT)); builder.with_signer_address(Some(("127.0.0.1".into(), SIGNER_PORT)));
builder.start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap() builder.start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap()
} }

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use tests::helpers::{serve, request, assert_security_headers}; use tests::helpers::{serve, request, assert_security_headers, assert_security_headers_for_embed};
#[test] #[test]
fn should_redirect_to_home() { fn should_redirect_to_home() {
@ -56,6 +56,26 @@ fn should_redirect_to_home_when_trailing_slash_is_missing() {
assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180"); assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
} }
#[test]
fn should_redirect_to_home_for_users_with_cached_redirection() {
// given
let server = serve();
// when
let response = request(server,
"\
GET /home/ HTTP/1.1\r\n\
Host: 127.0.0.1:8080\r\n\
Connection: close\r\n\
\r\n\
"
);
// then
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned());
assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
}
#[test] #[test]
fn should_display_404_on_invalid_dapp() { fn should_display_404_on_invalid_dapp() {
// given // given
@ -73,7 +93,7 @@ fn should_display_404_on_invalid_dapp() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert_security_headers(&response.headers); assert_security_headers_for_embed(&response.headers);
} }
#[test] #[test]
@ -93,7 +113,7 @@ fn should_display_404_on_invalid_dapp_with_domain() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert_security_headers(&response.headers); assert_security_headers_for_embed(&response.headers);
} }
#[test] #[test]

View File

@ -11,7 +11,7 @@ build = "build.rs"
ethcore-ipc-codegen = { path = "../ipc/codegen" } ethcore-ipc-codegen = { path = "../ipc/codegen" }
[dependencies] [dependencies]
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
ethcore-ipc = { path = "../ipc/rpc" } ethcore-ipc = { path = "../ipc/rpc" }
rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" } rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::thread;
use std::time::Duration; use std::time::Duration;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::str::{self, Lines}; use std::str::{self, Lines};
@ -42,8 +43,28 @@ pub fn read_block(lines: &mut Lines, all: bool) -> String {
block block
} }
fn connect(address: &SocketAddr) -> TcpStream {
let mut retries = 0;
let mut last_error = None;
while retries < 10 {
retries += 1;
let res = TcpStream::connect(address);
match res {
Ok(stream) => {
return stream;
},
Err(e) => {
last_error = Some(e);
thread::sleep(Duration::from_millis(retries * 10));
}
}
}
panic!("Unable to connect to the server. Last error: {:?}", last_error);
}
pub fn request(address: &SocketAddr, request: &str) -> Response { pub fn request(address: &SocketAddr, request: &str) -> Response {
let mut req = TcpStream::connect(address).unwrap(); let mut req = connect(address);
req.set_read_timeout(Some(Duration::from_secs(1))).unwrap(); req.set_read_timeout(Some(Duration::from_secs(1))).unwrap();
req.write_all(request.as_bytes()).unwrap(); req.write_all(request.as_bytes()).unwrap();

View File

@ -69,14 +69,19 @@ impl EthashManager {
Some(ref e) if *e == epoch => lights.recent.clone(), Some(ref e) if *e == epoch => lights.recent.clone(),
_ => match lights.prev_epoch.clone() { _ => match lights.prev_epoch.clone() {
Some(e) if e == epoch => { Some(e) if e == epoch => {
// swap // don't swap if recent is newer.
let t = lights.prev_epoch; if lights.recent_epoch > lights.prev_epoch {
lights.prev_epoch = lights.recent_epoch; None
lights.recent_epoch = t; } else {
let t = lights.prev.clone(); // swap
lights.prev = lights.recent.clone(); let t = lights.prev_epoch;
lights.recent = t; lights.prev_epoch = lights.recent_epoch;
lights.recent.clone() lights.recent_epoch = t;
let t = lights.prev.clone();
lights.prev = lights.recent.clone();
lights.recent = t;
lights.recent.clone()
}
} }
_ => None, _ => None,
}, },

View File

@ -3,7 +3,7 @@ description = "Ethcore library"
homepage = "http://ethcore.io" homepage = "http://ethcore.io"
license = "GPL-3.0" license = "GPL-3.0"
name = "ethcore" name = "ethcore"
version = "1.4.0" version = "1.5.0"
authors = ["Ethcore <admin@ethcore.io>"] authors = ["Ethcore <admin@ethcore.io>"]
build = "build.rs" build = "build.rs"
@ -24,8 +24,11 @@ rayon = "0.4.2"
semver = "0.2" semver = "0.2"
bit-set = "0.4" bit-set = "0.4"
time = "0.1" time = "0.1"
rand = "0.3"
byteorder = "0.5"
transient-hashmap = "0.1"
evmjit = { path = "../evmjit", optional = true } evmjit = { path = "../evmjit", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.96", optional = true}
ethash = { path = "../ethash" } ethash = { path = "../ethash" }
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
ethcore-io = { path = "../util/io" } ethcore-io = { path = "../util/io" }
@ -36,10 +39,8 @@ ethstore = { path = "../ethstore" }
ethkey = { path = "../ethkey" } ethkey = { path = "../ethkey" }
ethcore-ipc-nano = { path = "../ipc/nano" } ethcore-ipc-nano = { path = "../ipc/nano" }
rlp = { path = "../util/rlp" } rlp = { path = "../util/rlp" }
rand = "0.3" lru-cache = "0.1.0"
lru-cache = { git = "https://github.com/contain-rs/lru-cache" }
ethcore-bloom-journal = { path = "../util/bloom" } ethcore-bloom-journal = { path = "../util/bloom" }
byteorder = "0.5"
[dependencies.hyper] [dependencies.hyper]
git = "https://github.com/ethcore/hyper" git = "https://github.com/ethcore/hyper"

16
ethcore/light/Cargo.toml Normal file
View File

@ -0,0 +1,16 @@
[package]
description = "Parity LES primitives"
homepage = "https://ethcore.io"
license = "GPL-3.0"
name = "ethcore-light"
version = "1.5.0"
authors = ["Ethcore <admin@ethcore.io>"]
[dependencies]
log = "0.3"
ethcore = { path = ".." }
ethcore-util = { path = "../../util" }
ethcore-network = { path = "../../util/network" }
ethcore-io = { path = "../../util/io" }
rlp = { path = "../../util/rlp" }
time = "0.1"

115
ethcore/light/src/client.rs Normal file
View File

@ -0,0 +1,115 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Light client implementation. Used for raw data queries as well as the header
//! sync.
use std::sync::Arc;
use ethcore::engines::Engine;
use ethcore::ids::BlockID;
use ethcore::service::ClientIoMessage;
use ethcore::block_import_error::BlockImportError;
use ethcore::block_status::BlockStatus;
use ethcore::verification::queue::{HeaderQueue, QueueInfo};
use ethcore::transaction::SignedTransaction;
use ethcore::blockchain_info::BlockChainInfo;
use io::IoChannel;
use util::hash::H256;
use util::{Bytes, Mutex};
use provider::Provider;
use request;
/// Light client implementation.
pub struct Client {
engine: Arc<Engine>,
header_queue: HeaderQueue,
message_channel: Mutex<IoChannel<ClientIoMessage>>,
}
impl Client {
/// Import a header as rlp-encoded bytes.
pub fn import_header(&self, bytes: Bytes) -> Result<H256, BlockImportError> {
let header = ::rlp::decode(&bytes);
self.header_queue.import(header).map_err(Into::into)
}
/// Whether the block is already known (but not necessarily part of the canonical chain)
pub fn is_known(&self, _id: BlockID) -> bool {
false
}
/// Fetch a vector of all pending transactions.
pub fn pending_transactions(&self) -> Vec<SignedTransaction> {
vec![]
}
/// Inquire about the status of a given block.
pub fn status(&self, _id: BlockID) -> BlockStatus {
BlockStatus::Unknown
}
/// Get the header queue info.
pub fn queue_info(&self) -> QueueInfo {
self.header_queue.queue_info()
}
}
// dummy implementation -- may draw from canonical cache further on.
impl Provider for Client {
fn chain_info(&self) -> BlockChainInfo {
unimplemented!()
}
fn reorg_depth(&self, _a: &H256, _b: &H256) -> Option<u64> {
None
}
fn earliest_state(&self) -> Option<u64> {
None
}
fn block_headers(&self, _req: request::Headers) -> Vec<Bytes> {
Vec::new()
}
fn block_bodies(&self, _req: request::Bodies) -> Vec<Bytes> {
Vec::new()
}
fn receipts(&self, _req: request::Receipts) -> Vec<Bytes> {
Vec::new()
}
fn proofs(&self, _req: request::StateProofs) -> Vec<Bytes> {
Vec::new()
}
fn code(&self, _req: request::ContractCodes) -> Vec<Bytes> {
Vec::new()
}
fn header_proofs(&self, _req: request::HeaderProofs) -> Vec<Bytes> {
Vec::new()
}
fn pending_transactions(&self) -> Vec<SignedTransaction> {
Vec::new()
}
}

47
ethcore/light/src/lib.rs Normal file
View File

@ -0,0 +1,47 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Light client logic and implementation.
//!
//! A "light" client stores very little chain-related data locally
//! unlike a full node, which stores all blocks, headers, receipts, and more.
//!
//! This enables the client to have a much lower resource footprint in
//! exchange for the cost of having to ask the network for state data
//! while responding to queries. This makes a light client unsuitable for
//! low-latency applications, but perfectly suitable for simple everyday
//! use-cases like sending transactions from a personal account.
//!
//! It starts by performing a header-only sync, verifying random samples
//! of members of the chain to varying degrees.
// TODO: remove when integrating with parity.
#![allow(dead_code)]
pub mod client;
pub mod net;
pub mod provider;
pub mod request;
extern crate ethcore_util as util;
extern crate ethcore_network as network;
extern crate ethcore_io as io;
extern crate ethcore;
extern crate rlp;
extern crate time;
#[macro_use]
extern crate log;

View File

@ -0,0 +1,264 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! LES buffer flow management.
//!
//! Every request in the LES protocol leads to a reduction
//! of the requester's buffer value as a rate-limiting mechanism.
//! This buffer value will recharge at a set rate.
//!
//! This module provides an interface for configuration of buffer
//! flow costs and recharge rates.
use request;
use super::packet;
use super::error::Error;
use rlp::*;
use util::U256;
use time::{Duration, SteadyTime};
/// A request cost specification.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Cost(pub U256, pub U256);
/// Buffer value.
///
/// Produced and recharged using `FlowParams`.
/// Definitive updates can be made as well -- these will reset the recharge
/// point to the time of the update.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Buffer {
estimate: U256,
recharge_point: SteadyTime,
}
impl Buffer {
/// Get the current buffer value.
pub fn current(&self) -> U256 { self.estimate.clone() }
/// Make a definitive update.
/// This will be the value obtained after receiving
/// a response to a request.
pub fn update_to(&mut self, value: U256) {
self.estimate = value;
self.recharge_point = SteadyTime::now();
}
/// Attempt to apply the given cost to the buffer.
///
/// If successful, the cost will be deducted successfully.
///
/// If unsuccessful, the structure will be unaltered an an
/// error will be produced.
pub fn deduct_cost(&mut self, cost: U256) -> Result<(), Error> {
match cost > self.estimate {
true => Err(Error::BufferEmpty),
false => {
self.estimate = self.estimate - cost;
Ok(())
}
}
}
}
/// A cost table, mapping requests to base and per-request costs.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CostTable {
headers: Cost,
bodies: Cost,
receipts: Cost,
state_proofs: Cost,
contract_codes: Cost,
header_proofs: Cost,
}
impl Default for CostTable {
fn default() -> Self {
// arbitrarily chosen constants.
CostTable {
headers: Cost(100000.into(), 10000.into()),
bodies: Cost(150000.into(), 15000.into()),
receipts: Cost(50000.into(), 5000.into()),
state_proofs: Cost(250000.into(), 25000.into()),
contract_codes: Cost(200000.into(), 20000.into()),
header_proofs: Cost(150000.into(), 15000.into()),
}
}
}
impl RlpEncodable for CostTable {
fn rlp_append(&self, s: &mut RlpStream) {
fn append_cost(s: &mut RlpStream, msg_id: u8, cost: &Cost) {
s.begin_list(3)
.append(&msg_id)
.append(&cost.0)
.append(&cost.1);
}
s.begin_list(6);
append_cost(s, packet::GET_BLOCK_HEADERS, &self.headers);
append_cost(s, packet::GET_BLOCK_BODIES, &self.bodies);
append_cost(s, packet::GET_RECEIPTS, &self.receipts);
append_cost(s, packet::GET_PROOFS, &self.state_proofs);
append_cost(s, packet::GET_CONTRACT_CODES, &self.contract_codes);
append_cost(s, packet::GET_HEADER_PROOFS, &self.header_proofs);
}
}
impl RlpDecodable for CostTable {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
let rlp = decoder.as_rlp();
let mut headers = None;
let mut bodies = None;
let mut receipts = None;
let mut state_proofs = None;
let mut contract_codes = None;
let mut header_proofs = None;
for row in rlp.iter() {
let msg_id: u8 = try!(row.val_at(0));
let cost = {
let base = try!(row.val_at(1));
let per = try!(row.val_at(2));
Cost(base, per)
};
match msg_id {
packet::GET_BLOCK_HEADERS => headers = Some(cost),
packet::GET_BLOCK_BODIES => bodies = Some(cost),
packet::GET_RECEIPTS => receipts = Some(cost),
packet::GET_PROOFS => state_proofs = Some(cost),
packet::GET_CONTRACT_CODES => contract_codes = Some(cost),
packet::GET_HEADER_PROOFS => header_proofs = Some(cost),
_ => return Err(DecoderError::Custom("Unrecognized message in cost table")),
}
}
Ok(CostTable {
headers: try!(headers.ok_or(DecoderError::Custom("No headers cost specified"))),
bodies: try!(bodies.ok_or(DecoderError::Custom("No bodies cost specified"))),
receipts: try!(receipts.ok_or(DecoderError::Custom("No receipts cost specified"))),
state_proofs: try!(state_proofs.ok_or(DecoderError::Custom("No proofs cost specified"))),
contract_codes: try!(contract_codes.ok_or(DecoderError::Custom("No contract codes specified"))),
header_proofs: try!(header_proofs.ok_or(DecoderError::Custom("No header proofs cost specified"))),
})
}
}
/// A buffer-flow manager handles costs, recharge, limits
#[derive(Debug, Clone, PartialEq)]
pub struct FlowParams {
costs: CostTable,
limit: U256,
recharge: U256,
}
impl FlowParams {
/// Create new flow parameters from a request cost table,
/// buffer limit, and (minimum) rate of recharge.
pub fn new(limit: U256, costs: CostTable, recharge: U256) -> Self {
FlowParams {
costs: costs,
limit: limit,
recharge: recharge,
}
}
/// Get a reference to the buffer limit.
pub fn limit(&self) -> &U256 { &self.limit }
/// Get a reference to the cost table.
pub fn cost_table(&self) -> &CostTable { &self.costs }
/// Get a reference to the recharge rate.
pub fn recharge_rate(&self) -> &U256 { &self.recharge }
/// Compute the actual cost of a request, given the kind of request
/// and number of requests made.
pub fn compute_cost(&self, kind: request::Kind, amount: usize) -> U256 {
let cost = match kind {
request::Kind::Headers => &self.costs.headers,
request::Kind::Bodies => &self.costs.bodies,
request::Kind::Receipts => &self.costs.receipts,
request::Kind::StateProofs => &self.costs.state_proofs,
request::Kind::Codes => &self.costs.contract_codes,
request::Kind::HeaderProofs => &self.costs.header_proofs,
};
let amount: U256 = amount.into();
cost.0 + (amount * cost.1)
}
/// Create initial buffer parameter.
pub fn create_buffer(&self) -> Buffer {
Buffer {
estimate: self.limit,
recharge_point: SteadyTime::now(),
}
}
/// Recharge the buffer based on time passed since last
/// update.
pub fn recharge(&self, buf: &mut Buffer) {
let now = SteadyTime::now();
// recompute and update only in terms of full seconds elapsed
// in order to keep the estimate as an underestimate.
let elapsed = (now - buf.recharge_point).num_seconds();
buf.recharge_point = buf.recharge_point + Duration::seconds(elapsed);
let elapsed: U256 = elapsed.into();
buf.estimate = ::std::cmp::min(self.limit, buf.estimate + (elapsed * self.recharge));
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_serialize_cost_table() {
let costs = CostTable::default();
let serialized = ::rlp::encode(&costs);
let new_costs: CostTable = ::rlp::decode(&*serialized);
assert_eq!(costs, new_costs);
}
#[test]
fn buffer_mechanism() {
use std::thread;
use std::time::Duration;
let flow_params = FlowParams::new(100.into(), Default::default(), 20.into());
let mut buffer = flow_params.create_buffer();
assert!(buffer.deduct_cost(101.into()).is_err());
assert!(buffer.deduct_cost(10.into()).is_ok());
thread::sleep(Duration::from_secs(1));
flow_params.recharge(&mut buffer);
assert_eq!(buffer.estimate, 100.into());
}
}

View File

@ -0,0 +1,94 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Defines error types and levels of punishment to use upon
//! encountering.
use rlp::DecoderError;
use network::NetworkError;
use std::fmt;
/// Levels of punishment.
///
/// Currently just encompasses two different kinds of disconnect and
/// no punishment, but this is where reputation systems might come into play.
// In ascending order
#[derive(Debug, PartialEq, Eq)]
pub enum Punishment {
/// Perform no punishment.
None,
/// Disconnect the peer, but don't prevent them from reconnecting.
Disconnect,
/// Disconnect the peer and prevent them from reconnecting.
Disable,
}
/// Kinds of errors which can be encountered in the course of LES.
#[derive(Debug)]
pub enum Error {
/// An RLP decoding error.
Rlp(DecoderError),
/// A network error.
Network(NetworkError),
/// Out of buffer.
BufferEmpty,
/// Unrecognized packet code.
UnrecognizedPacket(u8),
/// Unexpected handshake.
UnexpectedHandshake,
/// Peer on wrong network (wrong NetworkId or genesis hash)
WrongNetwork,
}
impl Error {
/// What level of punishment does this error warrant?
pub fn punishment(&self) -> Punishment {
match *self {
Error::Rlp(_) => Punishment::Disable,
Error::Network(_) => Punishment::None,
Error::BufferEmpty => Punishment::Disable,
Error::UnrecognizedPacket(_) => Punishment::Disconnect,
Error::UnexpectedHandshake => Punishment::Disconnect,
Error::WrongNetwork => Punishment::Disable,
}
}
}
impl From<DecoderError> for Error {
fn from(err: DecoderError) -> Self {
Error::Rlp(err)
}
}
impl From<NetworkError> for Error {
fn from(err: NetworkError) -> Self {
Error::Network(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Rlp(ref err) => err.fmt(f),
Error::Network(ref err) => err.fmt(f),
Error::BufferEmpty => write!(f, "Out of buffer"),
Error::UnrecognizedPacket(code) => write!(f, "Unrecognized packet: 0x{:x}", code),
Error::UnexpectedHandshake => write!(f, "Unexpected handshake"),
Error::WrongNetwork => write!(f, "Wrong network"),
}
}
}

View File

@ -0,0 +1,506 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! LES Protocol Version 1 implementation.
//!
//! This uses a "Provider" to answer requests.
//! See https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES)
use io::TimerToken;
use network::{NetworkProtocolHandler, NetworkContext, NetworkError, PeerId};
use rlp::{RlpStream, Stream, UntrustedRlp, View};
use util::hash::H256;
use util::RwLock;
use std::collections::{HashMap, HashSet};
use std::sync::atomic::AtomicUsize;
use provider::Provider;
use request::{self, Request};
use self::buffer_flow::{Buffer, FlowParams};
use self::error::{Error, Punishment};
use self::status::{Status, Capabilities};
mod buffer_flow;
mod error;
mod status;
pub use self::status::Announcement;
const TIMEOUT: TimerToken = 0;
const TIMEOUT_INTERVAL_MS: u64 = 1000;
// LPV1
const PROTOCOL_VERSION: u32 = 1;
// TODO [rob] make configurable.
const PROTOCOL_ID: [u8; 3] = *b"les";
// packet ID definitions.
mod packet {
// the status packet.
pub const STATUS: u8 = 0x00;
// announcement of new block hashes or capabilities.
pub const ANNOUNCE: u8 = 0x01;
// request and response for block headers
pub const GET_BLOCK_HEADERS: u8 = 0x02;
pub const BLOCK_HEADERS: u8 = 0x03;
// request and response for block bodies
pub const GET_BLOCK_BODIES: u8 = 0x04;
pub const BLOCK_BODIES: u8 = 0x05;
// request and response for transaction receipts.
pub const GET_RECEIPTS: u8 = 0x06;
pub const RECEIPTS: u8 = 0x07;
// request and response for merkle proofs.
pub const GET_PROOFS: u8 = 0x08;
pub const PROOFS: u8 = 0x09;
// request and response for contract code.
pub const GET_CONTRACT_CODES: u8 = 0x0a;
pub const CONTRACT_CODES: u8 = 0x0b;
// relay transactions to peers.
pub const SEND_TRANSACTIONS: u8 = 0x0c;
// request and response for header proofs in a CHT.
pub const GET_HEADER_PROOFS: u8 = 0x0d;
pub const HEADER_PROOFS: u8 = 0x0e;
}
// A pending peer: one we've sent our status to but
// may not have received one for.
struct PendingPeer {
sent_head: H256,
}
// data about each peer.
struct Peer {
local_buffer: Buffer, // their buffer relative to us
remote_buffer: Buffer, // our buffer relative to them
current_asking: HashSet<usize>, // pending request ids.
status: Status,
capabilities: Capabilities,
remote_flow: FlowParams,
sent_head: H256, // last head we've given them.
}
/// This is an implementation of the light ethereum network protocol, abstracted
/// over a `Provider` of data and a p2p network.
///
/// This is simply designed for request-response purposes. Higher level uses
/// of the protocol, such as synchronization, will function as wrappers around
/// this system.
pub struct LightProtocol {
provider: Box<Provider>,
genesis_hash: H256,
network_id: status::NetworkId,
pending_peers: RwLock<HashMap<PeerId, PendingPeer>>,
peers: RwLock<HashMap<PeerId, Peer>>,
pending_requests: RwLock<HashMap<usize, Request>>,
capabilities: RwLock<Capabilities>,
flow_params: FlowParams, // assumed static and same for every peer.
req_id: AtomicUsize,
}
impl LightProtocol {
/// Make an announcement of new chain head and capabilities to all peers.
/// The announcement is expected to be valid.
pub fn make_announcement(&self, mut announcement: Announcement, io: &NetworkContext) {
let mut reorgs_map = HashMap::new();
// calculate reorg info and send packets
for (peer_id, peer_info) in self.peers.write().iter_mut() {
let reorg_depth = reorgs_map.entry(peer_info.sent_head)
.or_insert_with(|| {
match self.provider.reorg_depth(&announcement.head_hash, &peer_info.sent_head) {
Some(depth) => depth,
None => {
// both values will always originate locally -- this means something
// has gone really wrong
debug!(target: "les", "couldn't compute reorganization depth between {:?} and {:?}",
&announcement.head_hash, &peer_info.sent_head);
0
}
}
});
peer_info.sent_head = announcement.head_hash;
announcement.reorg_depth = *reorg_depth;
if let Err(e) = io.send(*peer_id, packet::ANNOUNCE, status::write_announcement(&announcement)) {
debug!(target: "les", "Error sending to peer {}: {}", peer_id, e);
}
}
}
}
impl LightProtocol {
// called when a peer connects.
fn on_connect(&self, peer: &PeerId, io: &NetworkContext) {
let peer = *peer;
match self.send_status(peer, io) {
Ok(pending_peer) => {
self.pending_peers.write().insert(peer, pending_peer);
}
Err(e) => {
trace!(target: "les", "Error while sending status: {}", e);
io.disconnect_peer(peer);
}
}
}
// called when a peer disconnects.
fn on_disconnect(&self, peer: PeerId) {
// TODO: reassign all requests assigned to this peer.
self.pending_peers.write().remove(&peer);
self.peers.write().remove(&peer);
}
// send status to a peer.
fn send_status(&self, peer: PeerId, io: &NetworkContext) -> Result<PendingPeer, NetworkError> {
let chain_info = self.provider.chain_info();
// TODO: could update capabilities here.
let status = Status {
head_td: chain_info.total_difficulty,
head_hash: chain_info.best_block_hash,
head_num: chain_info.best_block_number,
genesis_hash: chain_info.genesis_hash,
protocol_version: PROTOCOL_VERSION,
network_id: self.network_id,
last_head: None,
};
let capabilities = self.capabilities.read().clone();
let status_packet = status::write_handshake(&status, &capabilities, &self.flow_params);
try!(io.send(peer, packet::STATUS, status_packet));
Ok(PendingPeer {
sent_head: chain_info.best_block_hash,
})
}
// Handle status message from peer.
fn status(&self, peer: &PeerId, data: UntrustedRlp) -> Result<(), Error> {
let pending = match self.pending_peers.write().remove(peer) {
Some(pending) => pending,
None => {
return Err(Error::UnexpectedHandshake);
}
};
let (status, capabilities, flow_params) = try!(status::parse_handshake(data));
trace!(target: "les", "Connected peer with chain head {:?}", (status.head_hash, status.head_num));
if (status.network_id, status.genesis_hash) != (self.network_id, self.genesis_hash) {
return Err(Error::WrongNetwork);
}
self.peers.write().insert(*peer, Peer {
local_buffer: self.flow_params.create_buffer(),
remote_buffer: flow_params.create_buffer(),
current_asking: HashSet::new(),
status: status,
capabilities: capabilities,
remote_flow: flow_params,
sent_head: pending.sent_head,
});
Ok(())
}
// Handle an announcement.
fn announcement(&self, peer: &PeerId, data: UntrustedRlp) -> Result<(), Error> {
if !self.peers.read().contains_key(peer) {
debug!(target: "les", "Ignoring announcement from unknown peer");
return Ok(())
}
let announcement = try!(status::parse_announcement(data));
let mut peers = self.peers.write();
let peer_info = match peers.get_mut(peer) {
Some(info) => info,
None => return Ok(()),
};
// update status.
{
// TODO: punish peer if they've moved backwards.
let status = &mut peer_info.status;
let last_head = status.head_hash;
status.head_hash = announcement.head_hash;
status.head_td = announcement.head_td;
status.head_num = announcement.head_num;
status.last_head = Some((last_head, announcement.reorg_depth));
}
// update capabilities.
{
let caps = &mut peer_info.capabilities;
caps.serve_headers = caps.serve_headers || announcement.serve_headers;
caps.serve_state_since = caps.serve_state_since.or(announcement.serve_state_since);
caps.serve_chain_since = caps.serve_chain_since.or(announcement.serve_chain_since);
caps.tx_relay = caps.tx_relay || announcement.tx_relay;
}
// TODO: notify listeners if new best block.
Ok(())
}
// Handle a request for block headers.
fn get_block_headers(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_HEADERS: usize = 512;
let mut present_buffer = match self.peers.read().get(peer) {
Some(peer) => peer.local_buffer.clone(),
None => {
debug!(target: "les", "Ignoring announcement from unknown peer");
return Ok(())
}
};
self.flow_params.recharge(&mut present_buffer);
let req_id: u64 = try!(data.val_at(0));
let req = request::Headers {
block: {
let rlp = try!(data.at(1));
(try!(rlp.val_at(0)), try!(rlp.val_at(1)))
},
max: ::std::cmp::min(MAX_HEADERS, try!(data.val_at(2))),
skip: try!(data.val_at(3)),
reverse: try!(data.val_at(4)),
};
let max_cost = self.flow_params.compute_cost(request::Kind::Headers, req.max);
try!(present_buffer.deduct_cost(max_cost));
let response = self.provider.block_headers(req);
let actual_cost = self.flow_params.compute_cost(request::Kind::Headers, response.len());
let cur_buffer = match self.peers.write().get_mut(peer) {
Some(peer) => {
self.flow_params.recharge(&mut peer.local_buffer);
try!(peer.local_buffer.deduct_cost(actual_cost));
peer.local_buffer.current()
}
None => {
debug!(target: "les", "peer disconnected during serving of request.");
return Ok(())
}
};
io.respond(packet::BLOCK_HEADERS, {
let mut stream = RlpStream::new_list(response.len() + 2);
stream.append(&req_id).append(&cur_buffer);
for header in response {
stream.append_raw(&header, 1);
}
stream.out()
}).map_err(Into::into)
}
// Receive a response for block headers.
fn block_headers(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Handle a request for block bodies.
fn get_block_bodies(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_BODIES: usize = 256;
let mut present_buffer = match self.peers.read().get(peer) {
Some(peer) => peer.local_buffer.clone(),
None => {
debug!(target: "les", "Ignoring announcement from unknown peer");
return Ok(())
}
};
self.flow_params.recharge(&mut present_buffer);
let req_id: u64 = try!(data.val_at(0));
let req = request::Bodies {
block_hashes: try!(data.iter().skip(1).take(MAX_BODIES).map(|x| x.as_val()).collect())
};
let max_cost = self.flow_params.compute_cost(request::Kind::Bodies, req.block_hashes.len());
try!(present_buffer.deduct_cost(max_cost));
let response = self.provider.block_bodies(req);
let response_len = response.iter().filter(|x| &x[..] != &::rlp::EMPTY_LIST_RLP).count();
let actual_cost = self.flow_params.compute_cost(request::Kind::Bodies, response_len);
let cur_buffer = match self.peers.write().get_mut(peer) {
Some(peer) => {
self.flow_params.recharge(&mut peer.local_buffer);
try!(peer.local_buffer.deduct_cost(actual_cost));
peer.local_buffer.current()
}
None => {
debug!(target: "les", "peer disconnected during serving of request.");
return Ok(())
}
};
io.respond(packet::BLOCK_BODIES, {
let mut stream = RlpStream::new_list(response.len() + 2);
stream.append(&req_id).append(&cur_buffer);
for body in response {
stream.append_raw(&body, 1);
}
stream.out()
}).map_err(Into::into)
}
// Receive a response for block bodies.
fn block_bodies(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Handle a request for receipts.
fn get_receipts(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Receive a response for receipts.
fn receipts(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Handle a request for proofs.
fn get_proofs(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Receive a response for proofs.
fn proofs(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Handle a request for contract code.
fn get_contract_code(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Receive a response for contract code.
fn contract_code(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Handle a request for header proofs
fn get_header_proofs(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Receive a response for header proofs
fn header_proofs(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
// Receive a set of transactions to relay.
fn relay_transactions(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> {
unimplemented!()
}
}
impl NetworkProtocolHandler for LightProtocol {
fn initialize(&self, io: &NetworkContext) {
io.register_timer(TIMEOUT, TIMEOUT_INTERVAL_MS).expect("Error registering sync timer.");
}
fn read(&self, io: &NetworkContext, peer: &PeerId, packet_id: u8, data: &[u8]) {
let rlp = UntrustedRlp::new(data);
// handle the packet
let res = match packet_id {
packet::STATUS => self.status(peer, rlp),
packet::ANNOUNCE => self.announcement(peer, rlp),
packet::GET_BLOCK_HEADERS => self.get_block_headers(peer, io, rlp),
packet::BLOCK_HEADERS => self.block_headers(peer, io, rlp),
packet::GET_BLOCK_BODIES => self.get_block_bodies(peer, io, rlp),
packet::BLOCK_BODIES => self.block_bodies(peer, io, rlp),
packet::GET_RECEIPTS => self.get_receipts(peer, io, rlp),
packet::RECEIPTS => self.receipts(peer, io, rlp),
packet::GET_PROOFS => self.get_proofs(peer, io, rlp),
packet::PROOFS => self.proofs(peer, io, rlp),
packet::GET_CONTRACT_CODES => self.get_contract_code(peer, io, rlp),
packet::CONTRACT_CODES => self.contract_code(peer, io, rlp),
packet::GET_HEADER_PROOFS => self.get_header_proofs(peer, io, rlp),
packet::HEADER_PROOFS => self.header_proofs(peer, io, rlp),
packet::SEND_TRANSACTIONS => self.relay_transactions(peer, io, rlp),
other => {
Err(Error::UnrecognizedPacket(other))
}
};
// if something went wrong, figure out how much to punish the peer.
if let Err(e) = res {
match e.punishment() {
Punishment::None => {}
Punishment::Disconnect => {
debug!(target: "les", "Disconnecting peer {}: {}", peer, e);
io.disconnect_peer(*peer)
}
Punishment::Disable => {
debug!(target: "les", "Disabling peer {}: {}", peer, e);
io.disable_peer(*peer)
}
}
}
}
fn connected(&self, io: &NetworkContext, peer: &PeerId) {
self.on_connect(peer, io);
}
fn disconnected(&self, _io: &NetworkContext, peer: &PeerId) {
self.on_disconnect(*peer);
}
fn timeout(&self, _io: &NetworkContext, timer: TimerToken) {
match timer {
TIMEOUT => {
// broadcast transactions to peers.
}
_ => warn!(target: "les", "received timeout on unknown token {}", timer),
}
}
}

View File

@ -0,0 +1,539 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Peer status and capabilities.
use rlp::{DecoderError, RlpDecodable, RlpEncodable, RlpStream, Stream, UntrustedRlp, View};
use util::{H256, U256};
use super::buffer_flow::FlowParams;
// recognized handshake/announcement keys.
// unknown keys are to be skipped, known keys have a defined order.
// their string values are defined in the LES spec.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd)]
enum Key {
ProtocolVersion,
NetworkId,
HeadTD,
HeadHash,
HeadNum,
GenesisHash,
ServeHeaders,
ServeChainSince,
ServeStateSince,
TxRelay,
BufferLimit,
BufferCostTable,
BufferRechargeRate,
}
impl Key {
// get the string value of this key.
fn as_str(&self) -> &'static str {
match *self {
Key::ProtocolVersion => "protocolVersion",
Key::NetworkId => "networkId",
Key::HeadTD => "headTd",
Key::HeadHash => "headHash",
Key::HeadNum => "headNum",
Key::GenesisHash => "genesisHash",
Key::ServeHeaders => "serveHeaders",
Key::ServeChainSince => "serveChainSince",
Key::ServeStateSince => "serveStateSince",
Key::TxRelay => "txRelay",
Key::BufferLimit => "flowControl/BL",
Key::BufferCostTable => "flowControl/MRC",
Key::BufferRechargeRate => "flowControl/MRR",
}
}
// try to parse the key value from a string.
fn from_str(s: &str) -> Option<Self> {
match s {
"protocolVersion" => Some(Key::ProtocolVersion),
"networkId" => Some(Key::NetworkId),
"headTd" => Some(Key::HeadTD),
"headHash" => Some(Key::HeadHash),
"headNum" => Some(Key::HeadNum),
"genesisHash" => Some(Key::GenesisHash),
"serveHeaders" => Some(Key::ServeHeaders),
"serveChainSince" => Some(Key::ServeChainSince),
"serveStateSince" => Some(Key::ServeStateSince),
"txRelay" => Some(Key::TxRelay),
"flowControl/BL" => Some(Key::BufferLimit),
"flowControl/MRC" => Some(Key::BufferCostTable),
"flowControl/MRR" => Some(Key::BufferRechargeRate),
_ => None
}
}
}
/// Network ID structure.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u32)]
pub enum NetworkId {
/// ID for the mainnet
Mainnet = 1,
/// ID for the testnet
Testnet = 0,
}
impl NetworkId {
fn from_raw(raw: u32) -> Option<Self> {
match raw {
0 => Some(NetworkId::Testnet),
1 => Some(NetworkId::Mainnet),
_ => None,
}
}
}
// helper for decoding key-value pairs in the handshake or an announcement.
struct Parser<'a> {
pos: usize,
rlp: UntrustedRlp<'a>,
}
impl<'a> Parser<'a> {
// expect a specific next key, and decode the value.
// error on unexpected key or invalid value.
fn expect<T: RlpDecodable>(&mut self, key: Key) -> Result<T, DecoderError> {
self.expect_raw(key).and_then(|item| item.as_val())
}
// expect a specific next key, and get the value's RLP.
// if the key isn't found, the position isn't advanced.
fn expect_raw(&mut self, key: Key) -> Result<UntrustedRlp<'a>, DecoderError> {
let pre_pos = self.pos;
if let Some((k, val)) = try!(self.get_next()) {
if k == key { return Ok(val) }
}
self.pos = pre_pos;
Err(DecoderError::Custom("Missing expected key"))
}
// get the next key and value RLP.
fn get_next(&mut self) -> Result<Option<(Key, UntrustedRlp<'a>)>, DecoderError> {
while self.pos < self.rlp.item_count() {
let pair = try!(self.rlp.at(self.pos));
let k: String = try!(pair.val_at(0));
self.pos += 1;
match Key::from_str(&k) {
Some(key) => return Ok(Some((key , try!(pair.at(1))))),
None => continue,
}
}
Ok(None)
}
}
// Helper for encoding a key-value pair
fn encode_pair<T: RlpEncodable>(key: Key, val: &T) -> Vec<u8> {
let mut s = RlpStream::new_list(2);
s.append(&key.as_str()).append(val);
s.out()
}
// Helper for encoding a flag.
fn encode_flag(key: Key) -> Vec<u8> {
let mut s = RlpStream::new_list(2);
s.append(&key.as_str()).append_empty_data();
s.out()
}
/// A peer status message.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Status {
/// Protocol version.
pub protocol_version: u32,
/// Network id of this peer.
pub network_id: NetworkId,
/// Total difficulty of the head of the chain.
pub head_td: U256,
/// Hash of the best block.
pub head_hash: H256,
/// Number of the best block.
pub head_num: u64,
/// Genesis hash
pub genesis_hash: H256,
/// Last announced chain head and reorg depth to common ancestor.
pub last_head: Option<(H256, u64)>,
}
/// Peer capabilities.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Capabilities {
/// Whether this peer can serve headers
pub serve_headers: bool,
/// Earliest block number it can serve block/receipt requests for.
pub serve_chain_since: Option<u64>,
/// Earliest block number it can serve state requests for.
pub serve_state_since: Option<u64>,
/// Whether it can relay transactions to the eth network.
pub tx_relay: bool,
}
impl Default for Capabilities {
fn default() -> Self {
Capabilities {
serve_headers: true,
serve_chain_since: None,
serve_state_since: None,
tx_relay: false,
}
}
}
/// Attempt to parse a handshake message into its three parts:
/// - chain status
/// - serving capabilities
/// - buffer flow parameters
pub fn parse_handshake(rlp: UntrustedRlp) -> Result<(Status, Capabilities, FlowParams), DecoderError> {
let mut parser = Parser {
pos: 0,
rlp: rlp,
};
let status = Status {
protocol_version: try!(parser.expect(Key::ProtocolVersion)),
network_id: try!(parser.expect(Key::NetworkId)
.and_then(|id: u32| NetworkId::from_raw(id).ok_or(DecoderError::Custom("Invalid network ID")))),
head_td: try!(parser.expect(Key::HeadTD)),
head_hash: try!(parser.expect(Key::HeadHash)),
head_num: try!(parser.expect(Key::HeadNum)),
genesis_hash: try!(parser.expect(Key::GenesisHash)),
last_head: None,
};
let capabilities = Capabilities {
serve_headers: parser.expect_raw(Key::ServeHeaders).is_ok(),
serve_chain_since: parser.expect(Key::ServeChainSince).ok(),
serve_state_since: parser.expect(Key::ServeStateSince).ok(),
tx_relay: parser.expect_raw(Key::TxRelay).is_ok(),
};
let flow_params = FlowParams::new(
try!(parser.expect(Key::BufferLimit)),
try!(parser.expect(Key::BufferCostTable)),
try!(parser.expect(Key::BufferRechargeRate)),
);
Ok((status, capabilities, flow_params))
}
/// Write a handshake, given status, capabilities, and flow parameters.
pub fn write_handshake(status: &Status, capabilities: &Capabilities, flow_params: &FlowParams) -> Vec<u8> {
let mut pairs = Vec::new();
pairs.push(encode_pair(Key::ProtocolVersion, &status.protocol_version));
pairs.push(encode_pair(Key::NetworkId, &(status.network_id as u32)));
pairs.push(encode_pair(Key::HeadTD, &status.head_td));
pairs.push(encode_pair(Key::HeadHash, &status.head_hash));
pairs.push(encode_pair(Key::HeadNum, &status.head_num));
pairs.push(encode_pair(Key::GenesisHash, &status.genesis_hash));
if capabilities.serve_headers {
pairs.push(encode_flag(Key::ServeHeaders));
}
if let Some(ref serve_chain_since) = capabilities.serve_chain_since {
pairs.push(encode_pair(Key::ServeChainSince, serve_chain_since));
}
if let Some(ref serve_state_since) = capabilities.serve_state_since {
pairs.push(encode_pair(Key::ServeStateSince, serve_state_since));
}
if capabilities.tx_relay {
pairs.push(encode_flag(Key::TxRelay));
}
pairs.push(encode_pair(Key::BufferLimit, flow_params.limit()));
pairs.push(encode_pair(Key::BufferCostTable, flow_params.cost_table()));
pairs.push(encode_pair(Key::BufferRechargeRate, flow_params.recharge_rate()));
let mut stream = RlpStream::new_list(pairs.len());
for pair in pairs {
stream.append_raw(&pair, 1);
}
stream.out()
}
/// An announcement of new chain head or capabilities made by a peer.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Announcement {
/// Hash of the best block.
pub head_hash: H256,
/// Number of the best block.
pub head_num: u64,
/// Head total difficulty
pub head_td: U256,
/// reorg depth to common ancestor of last announced head.
pub reorg_depth: u64,
/// optional new header-serving capability. false means "no change"
pub serve_headers: bool,
/// optional new state-serving capability
pub serve_state_since: Option<u64>,
/// optional new chain-serving capability
pub serve_chain_since: Option<u64>,
/// optional new transaction-relay capability. false means "no change"
pub tx_relay: bool,
// TODO: changes in buffer flow?
}
/// Parse an announcement.
pub fn parse_announcement(rlp: UntrustedRlp) -> Result<Announcement, DecoderError> {
let mut last_key = None;
let mut announcement = Announcement {
head_hash: try!(rlp.val_at(0)),
head_num: try!(rlp.val_at(1)),
head_td: try!(rlp.val_at(2)),
reorg_depth: try!(rlp.val_at(3)),
serve_headers: false,
serve_state_since: None,
serve_chain_since: None,
tx_relay: false,
};
let mut parser = Parser {
pos: 4,
rlp: rlp,
};
while let Some((key, item)) = try!(parser.get_next()) {
if Some(key) <= last_key { return Err(DecoderError::Custom("Invalid announcement key ordering")) }
last_key = Some(key);
match key {
Key::ServeHeaders => announcement.serve_headers = true,
Key::ServeStateSince => announcement.serve_state_since = Some(try!(item.as_val())),
Key::ServeChainSince => announcement.serve_chain_since = Some(try!(item.as_val())),
Key::TxRelay => announcement.tx_relay = true,
_ => return Err(DecoderError::Custom("Nonsensical key in announcement")),
}
}
Ok(announcement)
}
/// Write an announcement out.
pub fn write_announcement(announcement: &Announcement) -> Vec<u8> {
let mut pairs = Vec::new();
if announcement.serve_headers {
pairs.push(encode_flag(Key::ServeHeaders));
}
if let Some(ref serve_chain_since) = announcement.serve_chain_since {
pairs.push(encode_pair(Key::ServeChainSince, serve_chain_since));
}
if let Some(ref serve_state_since) = announcement.serve_state_since {
pairs.push(encode_pair(Key::ServeStateSince, serve_state_since));
}
if announcement.tx_relay {
pairs.push(encode_flag(Key::TxRelay));
}
let mut stream = RlpStream::new_list(4 + pairs.len());
stream
.append(&announcement.head_hash)
.append(&announcement.head_num)
.append(&announcement.head_td)
.append(&announcement.reorg_depth);
for item in pairs {
stream.append_raw(&item, 1);
}
stream.out()
}
#[cfg(test)]
mod tests {
use super::*;
use super::super::buffer_flow::FlowParams;
use util::{U256, H256, FixedHash};
use rlp::{RlpStream, Stream ,UntrustedRlp, View};
#[test]
fn full_handshake() {
let status = Status {
protocol_version: 1,
network_id: NetworkId::Mainnet,
head_td: U256::default(),
head_hash: H256::default(),
head_num: 10,
genesis_hash: H256::zero(),
last_head: None,
};
let capabilities = Capabilities {
serve_headers: true,
serve_chain_since: Some(5),
serve_state_since: Some(8),
tx_relay: true,
};
let flow_params = FlowParams::new(
1_000_000.into(),
Default::default(),
1000.into(),
);
let handshake = write_handshake(&status, &capabilities, &flow_params);
let (read_status, read_capabilities, read_flow)
= parse_handshake(UntrustedRlp::new(&handshake)).unwrap();
assert_eq!(read_status, status);
assert_eq!(read_capabilities, capabilities);
assert_eq!(read_flow, flow_params);
}
#[test]
fn partial_handshake() {
let status = Status {
protocol_version: 1,
network_id: NetworkId::Mainnet,
head_td: U256::default(),
head_hash: H256::default(),
head_num: 10,
genesis_hash: H256::zero(),
last_head: None,
};
let capabilities = Capabilities {
serve_headers: false,
serve_chain_since: Some(5),
serve_state_since: None,
tx_relay: true,
};
let flow_params = FlowParams::new(
1_000_000.into(),
Default::default(),
1000.into(),
);
let handshake = write_handshake(&status, &capabilities, &flow_params);
let (read_status, read_capabilities, read_flow)
= parse_handshake(UntrustedRlp::new(&handshake)).unwrap();
assert_eq!(read_status, status);
assert_eq!(read_capabilities, capabilities);
assert_eq!(read_flow, flow_params);
}
#[test]
fn skip_unknown_keys() {
let status = Status {
protocol_version: 1,
network_id: NetworkId::Mainnet,
head_td: U256::default(),
head_hash: H256::default(),
head_num: 10,
genesis_hash: H256::zero(),
last_head: None,
};
let capabilities = Capabilities {
serve_headers: false,
serve_chain_since: Some(5),
serve_state_since: None,
tx_relay: true,
};
let flow_params = FlowParams::new(
1_000_000.into(),
Default::default(),
1000.into(),
);
let handshake = write_handshake(&status, &capabilities, &flow_params);
let interleaved = {
let handshake = UntrustedRlp::new(&handshake);
let mut stream = RlpStream::new_list(handshake.item_count() * 3);
for item in handshake.iter() {
stream.append_raw(item.as_raw(), 1);
let (mut s1, mut s2) = (RlpStream::new_list(2), RlpStream::new_list(2));
s1.append(&"foo").append_empty_data();
s2.append(&"bar").append_empty_data();
stream.append_raw(&s1.out(), 1);
stream.append_raw(&s2.out(), 1);
}
stream.out()
};
let (read_status, read_capabilities, read_flow)
= parse_handshake(UntrustedRlp::new(&interleaved)).unwrap();
assert_eq!(read_status, status);
assert_eq!(read_capabilities, capabilities);
assert_eq!(read_flow, flow_params);
}
#[test]
fn announcement_roundtrip() {
let announcement = Announcement {
head_hash: H256::random(),
head_num: 100_000,
head_td: 1_000_000.into(),
reorg_depth: 4,
serve_headers: false,
serve_state_since: Some(99_000),
serve_chain_since: Some(1),
tx_relay: true,
};
let serialized = write_announcement(&announcement);
let read = parse_announcement(UntrustedRlp::new(&serialized)).unwrap();
assert_eq!(read, announcement);
}
#[test]
fn keys_out_of_order() {
use super::{Key, encode_pair, encode_flag};
let mut stream = RlpStream::new_list(6);
stream
.append(&H256::zero())
.append(&10u64)
.append(&100_000u64)
.append(&2u64)
.append_raw(&encode_pair(Key::ServeStateSince, &44u64), 1)
.append_raw(&encode_flag(Key::ServeHeaders), 1);
let out = stream.drain();
assert!(parse_announcement(UntrustedRlp::new(&out)).is_err());
let mut stream = RlpStream::new_list(6);
stream
.append(&H256::zero())
.append(&10u64)
.append(&100_000u64)
.append(&2u64)
.append_raw(&encode_flag(Key::ServeHeaders), 1)
.append_raw(&encode_pair(Key::ServeStateSince, &44u64), 1);
let out = stream.drain();
assert!(parse_announcement(UntrustedRlp::new(&out)).is_ok());
}
}

View File

@ -0,0 +1,71 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! A provider for the LES protocol. This is typically a full node, who can
//! give as much data as necessary to its peers.
use ethcore::transaction::SignedTransaction;
use ethcore::blockchain_info::BlockChainInfo;
use util::{Bytes, H256};
use request;
/// Defines the operations that a provider for `LES` must fulfill.
///
/// These are defined at [1], but may be subject to change.
/// Requests which can't be fulfilled should return an empty RLP list.
///
/// [1]: https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES)
pub trait Provider: Send + Sync {
/// Provide current blockchain info.
fn chain_info(&self) -> BlockChainInfo;
/// Find the depth of a common ancestor between two blocks.
fn reorg_depth(&self, a: &H256, b: &H256) -> Option<u64>;
/// Earliest state.
fn earliest_state(&self) -> Option<u64>;
/// Provide a list of headers starting at the requested block,
/// possibly in reverse and skipping `skip` at a time.
///
/// The returned vector may have any length in the range [0, `max`], but the
/// results within must adhere to the `skip` and `reverse` parameters.
fn block_headers(&self, req: request::Headers) -> Vec<Bytes>;
/// Provide as many as possible of the requested blocks (minus the headers) encoded
/// in RLP format.
fn block_bodies(&self, req: request::Bodies) -> Vec<Bytes>;
/// Provide the receipts as many as possible of the requested blocks.
/// Returns a vector of RLP-encoded lists of receipts.
fn receipts(&self, req: request::Receipts) -> Vec<Bytes>;
/// Provide a set of merkle proofs, as requested. Each request is a
/// block hash and request parameters.
///
/// Returns a vector to RLP-encoded lists satisfying the requests.
fn proofs(&self, req: request::StateProofs) -> Vec<Bytes>;
/// Provide contract code for the specified (block_hash, account_hash) pairs.
fn code(&self, req: request::ContractCodes) -> Vec<Bytes>;
/// Provide header proofs from the Canonical Hash Tries.
fn header_proofs(&self, req: request::HeaderProofs) -> Vec<Bytes>;
/// Provide pending transactions.
fn pending_transactions(&self) -> Vec<SignedTransaction>;
}

View File

@ -0,0 +1,145 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! LES request types.
// TODO: make IPC compatible.
use util::H256;
/// A request for block headers.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Headers {
/// Block information for the request being made.
pub block: (u64, H256),
/// The maximum amount of headers which can be returned.
pub max: usize,
/// The amount of headers to skip between each response entry.
pub skip: usize,
/// Whether the headers should proceed in falling number from the initial block.
pub reverse: bool,
}
/// A request for specific block bodies.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Bodies {
/// Hashes which bodies are being requested for.
pub block_hashes: Vec<H256>
}
/// A request for transaction receipts.
///
/// This request is answered with a list of transaction receipts for each block
/// requested.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Receipts {
/// Block hashes to return receipts for.
pub block_hashes: Vec<H256>,
}
/// A request for a state proof
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StateProof {
/// Block hash to query state from.
pub block: H256,
/// Key of the state trie -- corresponds to account hash.
pub key1: H256,
/// Key in that account's storage trie; if empty, then the account RLP should be
/// returned.
pub key2: Option<H256>,
/// if greater than zero, trie nodes beyond this level may be omitted.
pub from_level: u32, // could even safely be u8; trie w/ 32-byte key can be at most 64-levels deep.
}
/// A request for state proofs.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct StateProofs {
/// All the proof requests.
pub requests: Vec<StateProof>,
}
/// A request for contract code.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct ContractCodes {
/// Block hash and account key (== sha3(address)) pairs to fetch code for.
pub code_requests: Vec<(H256, H256)>,
}
/// A request for a header proof from the Canonical Hash Trie.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct HeaderProof {
/// Number of the CHT.
pub cht_number: u64,
/// Block number requested.
pub block_number: u64,
/// If greater than zero, trie nodes beyond this level may be omitted.
pub from_level: u32,
}
/// A request for header proofs from the CHT.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct HeaderProofs {
/// All the proof requests.
pub requests: Vec<HeaderProofs>,
}
/// Kinds of requests.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Kind {
/// Requesting headers.
Headers,
/// Requesting block bodies.
Bodies,
/// Requesting transaction receipts.
Receipts,
/// Requesting proofs of state trie nodes.
StateProofs,
/// Requesting contract code by hash.
Codes,
/// Requesting header proofs (from the CHT).
HeaderProofs,
}
/// Encompasses all possible types of requests in a single structure.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum Request {
/// Requesting headers.
Headers(Headers),
/// Requesting block bodies.
Bodies(Bodies),
/// Requesting transaction receipts.
Receipts(Receipts),
/// Requesting state proofs.
StateProofs(StateProofs),
/// Requesting contract codes.
Codes(ContractCodes),
/// Requesting header proofs.
HeaderProofs(HeaderProofs),
}
impl Request {
/// Get the kind of request this is.
pub fn kind(&self) -> Kind {
match *self {
Request::Headers(_) => Kind::Headers,
Request::Bodies(_) => Kind::Bodies,
Request::Receipts(_) => Kind::Receipts,
Request::StateProofs(_) => Kind::StateProofs,
Request::Codes(_) => Kind::Codes,
Request::HeaderProofs(_) => Kind::HeaderProofs,
}
}
}

View File

@ -11,7 +11,13 @@
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"homesteadTransition": "0x118c30", "homesteadTransition": "0x118c30",
"eip150Transition": "0x2625a0" "eip150Transition": "0x2625a0",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff",
"ecip1010PauseTransition": "0x2dc6c0",
"ecip1010ContinueTransition": "0x4c4b40"
} }
} }
}, },
@ -39,10 +45,18 @@
"stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544" "stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
}, },
"nodes": [ "nodes": [
"enode://08c7ee6a4f861ff0664a49532bcc86de1363acd608999d1b76609bb9bc278649906f069057630fd9493924a368b5d1dc9b8f8bf13ac26df72512f6d1fabd8c95@45.32.7.81:30303",
"enode://e809c4a2fec7daed400e5e28564e23693b23b2cc5a019b612505631bbe7b9ccf709c1796d2a3d29ef2b045f210caf51e3c4f5b6d3587d43ad5d6397526fa6179@174.112.32.157:30303", "enode://e809c4a2fec7daed400e5e28564e23693b23b2cc5a019b612505631bbe7b9ccf709c1796d2a3d29ef2b045f210caf51e3c4f5b6d3587d43ad5d6397526fa6179@174.112.32.157:30303",
"enode://687be94c3a7beaa3d2fde82fa5046cdeb3e8198354e05b29d6e0d4e276713e3707ac10f784a7904938b06b46c764875c241b0337dd853385a4d8bfcbf8190647@95.183.51.229:30303", "enode://687be94c3a7beaa3d2fde82fa5046cdeb3e8198354e05b29d6e0d4e276713e3707ac10f784a7904938b06b46c764875c241b0337dd853385a4d8bfcbf8190647@95.183.51.229:30303",
"enode://6e538e7c1280f0a31ff08b382db5302480f775480b8e68f8febca0ceff81e4b19153c6f8bf60313b93bef2cc34d34e1df41317de0ce613a201d1660a788a03e2@52.206.67.235:30303", "enode://6e538e7c1280f0a31ff08b382db5302480f775480b8e68f8febca0ceff81e4b19153c6f8bf60313b93bef2cc34d34e1df41317de0ce613a201d1660a788a03e2@52.206.67.235:30303",
"enode://217ebe27e89bf4fec8ce06509323ff095b1014378deb75ab2e5f6759a4e8750a3bd8254b8c6833136e4d5e58230d65ee8ab34a5db5abf0640408c4288af3c8a7@188.138.1.237:30303" "enode://ca5ae4eca09ba6787e29cf6d86f7634d07aae6b9e6317a59aff675851c0bf445068173208cf8ef7f5cd783d8e29b85b2fa3fa358124cf0546823149724f9bde1@138.68.1.16:30303",
"enode://217ebe27e89bf4fec8ce06509323ff095b1014378deb75ab2e5f6759a4e8750a3bd8254b8c6833136e4d5e58230d65ee8ab34a5db5abf0640408c4288af3c8a7@188.138.1.237:30303",
"enode://fa20444ef991596ce99b81652ac4e61de1eddc4ff21d3cd42762abd7ed47e7cf044d3c9ccddaf6035d39725e4eb372806787829ccb9a08ec7cb71883cb8c3abd@50.149.116.182:30303",
"enode://4bd6a4df3612c718333eb5ea7f817923a8cdf1bed89cee70d1710b45a0b6b77b2819846440555e451a9b602ad2efa2d2facd4620650249d8468008946887820a@71.178.232.20:30304",
"enode://921cf8e4c345fe8db913c53964f9cadc667644e7f20195a0b7d877bd689a5934e146ff2c2259f1bae6817b6585153a007ceb67d260b720fa3e6fc4350df25c7f@51.255.49.170:30303",
"enode://ffea3b01c000cdd89e1e9229fea3e80e95b646f9b2aa55071fc865e2f19543c9b06045cc2e69453e6b78100a119e66be1b5ad50b36f2ffd27293caa28efdd1b2@128.199.93.177:3030",
"enode://ee3da491ce6a155eb132708eb0e8d04b0637926ec0ae1b79e63fc97cb9fc3818f49250a0ae0d7f79ed62b66ec677f408c4e01741504dc7a051e274f1e803d454@91.121.65.179:40404",
"enode://48e063a6cf5f335b1ef2ed98126bf522cf254396f850c7d442fe2edbbc23398787e14cd4de7968a00175a82762de9cbe9e1407d8ccbcaeca5004d65f8398d759@159.203.255.59:30303"
], ],
"accounts": { "accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } }, "0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },

View File

@ -10,7 +10,11 @@
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"homesteadTransition": "0x0", "homesteadTransition": "0x0",
"eip150Transition": "0x0" "eip150Transition": "0x0",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -0,0 +1,47 @@
{
"name": "Homestead (Test)",
"engine": {
"Ethash": {
"params": {
"gasLimitBoundDivisor": "0x0400",
"minimumDifficulty": "0x020000",
"difficultyBoundDivisor": "0x0800",
"durationLimit": "0x0d",
"blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"homesteadTransition": "0x0",
"eip150Transition": "0x0",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x0",
"eip161abcTransition": "0x0",
"eip161dTransition": "0x0"
}
}
},
"params": {
"accountStartNonce": "0x00",
"maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388",
"networkID" : "0x1"
},
"genesis": {
"seal": {
"ethereum": {
"nonce": "0x0000000000000042",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
},
"difficulty": "0x400000000",
"author": "0x0000000000000000000000000000000000000000",
"timestamp": "0x00",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"extraData": "0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa",
"gasLimit": "0x1388"
},
"accounts": {
"0000000000000000000000000000000000000001": { "balance": "1", "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
"0000000000000000000000000000000000000002": { "balance": "1", "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } },
"0000000000000000000000000000000000000003": { "balance": "1", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
"0000000000000000000000000000000000000004": { "balance": "1", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } }
}
}

View File

@ -15,7 +15,11 @@
"difficultyHardforkTransition": "0x59d9", "difficultyHardforkTransition": "0x59d9",
"difficultyHardforkBoundDivisor": "0x0200", "difficultyHardforkBoundDivisor": "0x0200",
"bombDefuseTransition": "0x30d40", "bombDefuseTransition": "0x30d40",
"eip150Transition": "0x7fffffffffffffff" "eip150Transition": "0x7fffffffffffffff",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -130,7 +130,11 @@
"0xbb9bc244d798123fde783fcc1c72d3bb8c189413", "0xbb9bc244d798123fde783fcc1c72d3bb8c189413",
"0x807640a13483f8ac783c557fcdf27be11ea4ac7a" "0x807640a13483f8ac783c557fcdf27be11ea4ac7a"
], ],
"eip150Transition": "0x259518" "eip150Transition": "0x259518",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },
@ -158,13 +162,27 @@
"stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544" "stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
}, },
"nodes": [ "nodes": [
"enode://cd6611461840543d5b9c56fbf088736154c699c43973b3a1a32390cf27106f87e58a818a606ccb05f3866de95a4fe860786fea71bf891ea95f234480d3022aa3@136.243.154.245:30303", "enode://efe4f2493f4aff2d641b1db8366b96ddacfe13e7a6e9c8f8f8cf49f9cdba0fdf3258d8c8f8d0c5db529f8123c8f1d95f36d54d590ca1bb366a5818b9a4ba521c@163.172.187.252:30303",
"enode://cd6611461840543d5b9c56fbf088736154c699c43973b3a1a32390cf27106f87e58a818a606ccb05f3866de95a4fe860786fea71bf891ea95f234480d3022aa3@163.172.157.114:30303",
"enode://bcc7240543fe2cf86f5e9093d05753dd83343f8fda7bf0e833f65985c73afccf8f981301e13ef49c4804491eab043647374df1c4adf85766af88a624ecc3330e@136.243.154.244:30303", "enode://bcc7240543fe2cf86f5e9093d05753dd83343f8fda7bf0e833f65985c73afccf8f981301e13ef49c4804491eab043647374df1c4adf85766af88a624ecc3330e@136.243.154.244:30303",
"enode://ed4227681ca8c70beb2277b9e870353a9693f12e7c548c35df6bca6a956934d6f659999c2decb31f75ce217822eefca149ace914f1cbe461ed5a2ebaf9501455@88.212.206.70:30303", "enode://ed4227681ca8c70beb2277b9e870353a9693f12e7c548c35df6bca6a956934d6f659999c2decb31f75ce217822eefca149ace914f1cbe461ed5a2ebaf9501455@88.212.206.70:30303",
"enode://cadc6e573b6bc2a9128f2f635ac0db3353e360b56deef239e9be7e7fce039502e0ec670b595f6288c0d2116812516ad6b6ff8d5728ff45eba176989e40dead1e@37.128.191.230:30303",
"enode://595a9a06f8b9bc9835c8723b6a82105aea5d55c66b029b6d44f229d6d135ac3ecdd3e9309360a961ea39d7bee7bac5d03564077a4e08823acc723370aace65ec@46.20.235.22:30303",
"enode://029178d6d6f9f8026fc0bc17d5d1401aac76ec9d86633bba2320b5eed7b312980c0a210b74b20c4f9a8b0b2bf884b111fa9ea5c5f916bb9bbc0e0c8640a0f56c@216.158.85.185:30303",
"enode://84f5d5957b4880a8b0545e32e05472318898ad9fc8ebe1d56c90c12334a98e12351eccfdf3a2bf72432ac38b57e9d348400d17caa083879ade3822390f89773f@10.1.52.78:30303",
"enode://f90dc9b9bf7b8db97726b7849e175f1eb2707f3d8f281c929336e398dd89b0409fc6aeceb89e846278e9d3ecc3857cebfbe6758ff352ece6fe5d42921ee761db@10.1.173.87:30303",
"enode://6a868ced2dec399c53f730261173638a93a40214cf299ccf4d42a76e3fa54701db410669e8006347a4b3a74fa090bb35af0320e4bc8d04cf5b7f582b1db285f5@10.3.149.199:30303",
"enode://fdd1b9bb613cfbc200bba17ce199a9490edc752a833f88d4134bf52bb0d858aa5524cb3ec9366c7a4ef4637754b8b15b5dc913e4ed9fdb6022f7512d7b63f181@212.47.247.103:30303",
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303", "enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303", "enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",
"enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303", "enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303",
"enode://4cd540b2c3292e17cff39922e864094bf8b0741fcc8c5dcea14957e389d7944c70278d872902e3d0345927f621547efa659013c400865485ab4bfa0c6596936f@138.201.144.135:30303" "enode://4cd540b2c3292e17cff39922e864094bf8b0741fcc8c5dcea14957e389d7944c70278d872902e3d0345927f621547efa659013c400865485ab4bfa0c6596936f@138.201.144.135:30303",
"enode://89d5dc2a81e574c19d0465f497c1af96732d1b61a41de89c2a37f35707689ac416529fae1038809852b235c2d30fd325abdc57c122feeefbeaaf802cc7e9580d@45.55.33.62:30303",
"enode://605e04a43b1156966b3a3b66b980c87b7f18522f7f712035f84576016be909a2798a438b2b17b1a8c58db314d88539a77419ca4be36148c086900fba487c9d39@188.166.255.12:30303",
"enode://016b20125f447a3b203a3cae953b2ede8ffe51290c071e7599294be84317635730c397b8ff74404d6be412d539ee5bb5c3c700618723d3b53958c92bd33eaa82@159.203.210.80:30303",
"enode://01f76fa0561eca2b9a7e224378dd854278735f1449793c46ad0c4e79e8775d080c21dcc455be391e90a98153c3b05dcc8935c8440de7b56fe6d67251e33f4e3c@10.6.6.117:30303",
"enode://fe11ef89fc5ac9da358fc160857855f25bbf9e332c79b9ca7089330c02b728b2349988c6062f10982041702110745e203d26975a6b34bcc97144f9fe439034e8@10.1.72.117:30303"
], ],
"accounts": { "accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } }, "0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },

View File

@ -130,7 +130,11 @@
"0xbb9bc244d798123fde783fcc1c72d3bb8c189413", "0xbb9bc244d798123fde783fcc1c72d3bb8c189413",
"0x807640a13483f8ac783c557fcdf27be11ea4ac7a" "0x807640a13483f8ac783c557fcdf27be11ea4ac7a"
], ],
"eip150Transition": "0x7fffffffffffffff" "eip150Transition": "0x7fffffffffffffff",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -10,7 +10,11 @@
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"homesteadTransition": "0x7fffffffffffffff", "homesteadTransition": "0x7fffffffffffffff",
"eip150Transition": "0x7fffffffffffffff" "eip150Transition": "0x7fffffffffffffff",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -10,7 +10,11 @@
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"homesteadTransition": "0x0", "homesteadTransition": "0x0",
"eip150Transition": "0x7fffffffffffffff" "eip150Transition": "0x7fffffffffffffff",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -10,7 +10,11 @@
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar": "0x52dff57a8a1532e6afb3dc07e2af58bb9eb05b3d", "registrar": "0x52dff57a8a1532e6afb3dc07e2af58bb9eb05b3d",
"homesteadTransition": "0x789b0", "homesteadTransition": "0x789b0",
"eip150Transition": "0x1b34d8" "eip150Transition": "0x1b34d8",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -10,7 +10,11 @@
"blockReward": "0x14D1120D7B160000", "blockReward": "0x14D1120D7B160000",
"registrar": "5e70c0bbcd5636e0f9f9316e9f8633feb64d4050", "registrar": "5e70c0bbcd5636e0f9f9316e9f8633feb64d4050",
"homesteadTransition": "0x7fffffffffffffff", "homesteadTransition": "0x7fffffffffffffff",
"eip150Transition": "0x7fffffffffffffff" "eip150Transition": "0x7fffffffffffffff",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

@ -1 +1 @@
Subproject commit 97066e40ccd061f727deb5cd860e4d9135aa2551 Subproject commit 9028c4801fd39fbb71a9796979182549a24e81c8

View File

@ -130,7 +130,11 @@
"0xbb9bc244d798123fde783fcc1c72d3bb8c189413", "0xbb9bc244d798123fde783fcc1c72d3bb8c189413",
"0x807640a13483f8ac783c557fcdf27be11ea4ac7a" "0x807640a13483f8ac783c557fcdf27be11ea4ac7a"
], ],
"eip150Transition": "0xa" "eip150Transition": "0xa",
"eip155Transition": "0x7fffffffffffffff",
"eip160Transition": "0x7fffffffffffffff",
"eip161abcTransition": "0x7fffffffffffffff",
"eip161dTransition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -1,5 +1,5 @@
{ {
"name": "TestInstantSeal", "name": "DevelopmentChain",
"engine": { "engine": {
"InstantSeal": null "InstantSeal": null
}, },
@ -28,6 +28,6 @@
"0000000000000000000000000000000000000002": { "balance": "1", "nonce": "1048576", "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } }, "0000000000000000000000000000000000000002": { "balance": "1", "nonce": "1048576", "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } },
"0000000000000000000000000000000000000003": { "balance": "1", "nonce": "1048576", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } }, "0000000000000000000000000000000000000003": { "balance": "1", "nonce": "1048576", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
"0000000000000000000000000000000000000004": { "balance": "1", "nonce": "1048576", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } }, "0000000000000000000000000000000000000004": { "balance": "1", "nonce": "1048576", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
"102e61f5d8f9bc71d0ad4a084df4e65e05ce0e1c": { "balance": "1606938044258990275541962092341162602522202993782792835301376", "nonce": "1048576" } "0x00a329c0648769a73afac7f9381e08fb43dbea72": { "balance": "1606938044258990275541962092341162602522202993782792835301376", "nonce": "1048576" }
} }
} }

View File

@ -36,7 +36,7 @@ enum Unlock {
/// Use with caution. /// Use with caution.
Perm, Perm,
/// Account unlocked with a timeout /// Account unlocked with a timeout
Timed((Instant, u32)), Timed(Instant),
} }
/// Data associated with account. /// Data associated with account.
@ -95,6 +95,7 @@ impl KeyDirectory for NullDir {
struct AddressBook { struct AddressBook {
path: PathBuf, path: PathBuf,
cache: HashMap<Address, AccountMeta>, cache: HashMap<Address, AccountMeta>,
transient: bool,
} }
impl AddressBook { impl AddressBook {
@ -106,11 +107,18 @@ impl AddressBook {
let mut r = AddressBook { let mut r = AddressBook {
path: path, path: path,
cache: HashMap::new(), cache: HashMap::new(),
transient: false,
}; };
r.revert(); r.revert();
r r
} }
pub fn transient() -> Self {
let mut book = AddressBook::new(Default::default());
book.transient = true;
book
}
pub fn get(&self) -> HashMap<Address, AccountMeta> { pub fn get(&self) -> HashMap<Address, AccountMeta> {
self.cache.clone() self.cache.clone()
} }
@ -134,6 +142,7 @@ impl AddressBook {
} }
fn revert(&mut self) { fn revert(&mut self) {
if self.transient { return; }
trace!(target: "addressbook", "revert"); trace!(target: "addressbook", "revert");
let _ = fs::File::open(self.path.clone()) let _ = fs::File::open(self.path.clone())
.map_err(|e| trace!(target: "addressbook", "Couldn't open address book: {}", e)) .map_err(|e| trace!(target: "addressbook", "Couldn't open address book: {}", e))
@ -144,6 +153,7 @@ impl AddressBook {
} }
fn save(&mut self) { fn save(&mut self) {
if self.transient { return; }
trace!(target: "addressbook", "save"); trace!(target: "addressbook", "save");
let _ = fs::File::create(self.path.clone()) let _ = fs::File::create(self.path.clone())
.map_err(|e| warn!(target: "addressbook", "Couldn't open address book for writing: {}", e)) .map_err(|e| warn!(target: "addressbook", "Couldn't open address book for writing: {}", e))
@ -175,7 +185,7 @@ impl AccountProvider {
pub fn transient_provider() -> Self { pub fn transient_provider() -> Self {
AccountProvider { AccountProvider {
unlocked: Mutex::new(HashMap::new()), unlocked: Mutex::new(HashMap::new()),
address_book: Mutex::new(AddressBook::new(Default::default())), address_book: Mutex::new(AddressBook::transient()),
sstore: Box::new(EthStore::open(Box::new(NullDir::default())) sstore: Box::new(EthStore::open(Box::new(NullDir::default()))
.expect("NullDir load always succeeds; qed")) .expect("NullDir load always succeeds; qed"))
} }
@ -267,7 +277,7 @@ impl AccountProvider {
/// Returns `true` if the password for `account` is `password`. `false` if not. /// Returns `true` if the password for `account` is `password`. `false` if not.
pub fn test_password(&self, account: &Address, password: String) -> Result<bool, Error> { pub fn test_password(&self, account: &Address, password: String) -> Result<bool, Error> {
match self.sstore.sign(&account, &password, &Default::default()) { match self.sstore.sign(account, &password, &Default::default()) {
Ok(_) => Ok(true), Ok(_) => Ok(true),
Err(SSError::InvalidPassword) => Ok(false), Err(SSError::InvalidPassword) => Ok(false),
Err(e) => Err(Error::SStore(e)), Err(e) => Err(Error::SStore(e)),
@ -276,7 +286,7 @@ impl AccountProvider {
/// Changes the password of `account` from `password` to `new_password`. Fails if incorrect `password` given. /// Changes the password of `account` from `password` to `new_password`. Fails if incorrect `password` given.
pub fn change_password(&self, account: &Address, password: String, new_password: String) -> Result<(), Error> { pub fn change_password(&self, account: &Address, password: String, new_password: String) -> Result<(), Error> {
self.sstore.change_password(&account, &password, &new_password).map_err(Error::SStore) self.sstore.change_password(account, &password, &new_password).map_err(Error::SStore)
} }
/// Helper method used for unlocking accounts. /// Helper method used for unlocking accounts.
@ -308,8 +318,8 @@ impl AccountProvider {
if let Unlock::Temp = data.unlock { if let Unlock::Temp = data.unlock {
unlocked.remove(account).expect("data exists: so key must exist: qed"); unlocked.remove(account).expect("data exists: so key must exist: qed");
} }
if let Unlock::Timed((ref start, ref duration)) = data.unlock { if let Unlock::Timed(ref end) = data.unlock {
if start.elapsed() > Duration::from_millis(*duration as u64) { if Instant::now() > *end {
unlocked.remove(account).expect("data exists: so key must exist: qed"); unlocked.remove(account).expect("data exists: so key must exist: qed");
return Err(Error::NotUnlocked); return Err(Error::NotUnlocked);
} }
@ -329,7 +339,7 @@ impl AccountProvider {
/// Unlocks account temporarily with a timeout. /// Unlocks account temporarily with a timeout.
pub fn unlock_account_timed(&self, account: Address, password: String, duration_ms: u32) -> Result<(), Error> { pub fn unlock_account_timed(&self, account: Address, password: String, duration_ms: u32) -> Result<(), Error> {
self.unlock_account(account, password, Unlock::Timed((Instant::now(), duration_ms))) self.unlock_account(account, password, Unlock::Timed(Instant::now() + Duration::from_millis(duration_ms as u64)))
} }
/// Checks if given account is unlocked /// Checks if given account is unlocked
@ -363,11 +373,11 @@ impl AccountProvider {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::{AccountProvider, AddressBook}; use super::{AccountProvider, AddressBook, Unlock};
use std::collections::HashMap; use std::collections::HashMap;
use std::time::Instant;
use ethjson::misc::AccountMeta; use ethjson::misc::AccountMeta;
use ethstore::ethkey::{Generator, Random}; use ethstore::ethkey::{Generator, Random};
use std::time::Duration;
use devtools::RandomTempPath; use devtools::RandomTempPath;
#[test] #[test]
@ -411,10 +421,10 @@ mod tests {
let kp = Random.generate().unwrap(); let kp = Random.generate().unwrap();
let ap = AccountProvider::transient_provider(); let ap = AccountProvider::transient_provider();
assert!(ap.insert_account(kp.secret().clone(), "test").is_ok()); assert!(ap.insert_account(kp.secret().clone(), "test").is_ok());
assert!(ap.unlock_account_timed(kp.address(), "test1".into(), 2000).is_err()); assert!(ap.unlock_account_timed(kp.address(), "test1".into(), 60000).is_err());
assert!(ap.unlock_account_timed(kp.address(), "test".into(), 2000).is_ok()); assert!(ap.unlock_account_timed(kp.address(), "test".into(), 60000).is_ok());
assert!(ap.sign(kp.address(), None, Default::default()).is_ok()); assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
::std::thread::sleep(Duration::from_millis(2000)); ap.unlocked.lock().get_mut(&kp.address()).unwrap().unlock = Unlock::Timed(Instant::now());
assert!(ap.sign(kp.address(), None, Default::default()).is_err()); assert!(ap.sign(kp.address(), None, Default::default()).is_err());
} }
} }

View File

@ -542,7 +542,7 @@ pub fn enact(
Ok(b.close_and_lock()) Ok(b.close_and_lock())
} }
#[inline(always)] #[inline]
#[cfg(not(feature = "slow-blocks"))] #[cfg(not(feature = "slow-blocks"))]
fn push_transactions(block: &mut OpenBlock, transactions: &[SignedTransaction]) -> Result<(), Error> { fn push_transactions(block: &mut OpenBlock, transactions: &[SignedTransaction]) -> Result<(), Error> {
for t in transactions { for t in transactions {

View File

@ -196,6 +196,7 @@ pub struct BlockChain {
pending_best_block: RwLock<Option<BestBlock>>, pending_best_block: RwLock<Option<BestBlock>>,
pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>, pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>,
pending_block_details: RwLock<HashMap<H256, BlockDetails>>,
pending_transaction_addresses: RwLock<HashMap<H256, Option<TransactionAddress>>>, pending_transaction_addresses: RwLock<HashMap<H256, Option<TransactionAddress>>>,
} }
@ -414,6 +415,7 @@ impl<'a> Iterator for AncestryIter<'a> {
} }
impl BlockChain { impl BlockChain {
#[cfg_attr(feature="dev", allow(useless_let_if_seq))]
/// Create new instance of blockchain from given Genesis /// Create new instance of blockchain from given Genesis
pub fn new(config: Config, genesis: &[u8], db: Arc<Database>) -> BlockChain { pub fn new(config: Config, genesis: &[u8], db: Arc<Database>) -> BlockChain {
// 400 is the avarage size of the key // 400 is the avarage size of the key
@ -438,6 +440,7 @@ impl BlockChain {
cache_man: Mutex::new(cache_man), cache_man: Mutex::new(cache_man),
pending_best_block: RwLock::new(None), pending_best_block: RwLock::new(None),
pending_block_hashes: RwLock::new(HashMap::new()), pending_block_hashes: RwLock::new(HashMap::new()),
pending_block_details: RwLock::new(HashMap::new()),
pending_transaction_addresses: RwLock::new(HashMap::new()), pending_transaction_addresses: RwLock::new(HashMap::new()),
}; };
@ -565,7 +568,7 @@ impl BlockChain {
let range = extras.number as bc::Number .. extras.number as bc::Number; let range = extras.number as bc::Number .. extras.number as bc::Number;
let chain = bc::group::BloomGroupChain::new(self.blooms_config, self); let chain = bc::group::BloomGroupChain::new(self.blooms_config, self);
let changes = chain.replace(&range, vec![]); let changes = chain.replace(&range, vec![]);
for (k, v) in changes.into_iter() { for (k, v) in changes {
batch.write(db::COL_EXTRA, &LogGroupPosition::from(k), &BloomGroup::from(v)); batch.write(db::COL_EXTRA, &LogGroupPosition::from(k), &BloomGroup::from(v));
} }
batch.put(db::COL_EXTRA, b"best", &hash); batch.put(db::COL_EXTRA, b"best", &hash);
@ -789,11 +792,10 @@ impl BlockChain {
/// the chain and the child's parent is this block. /// the chain and the child's parent is this block.
/// ///
/// Used in snapshots to glue the chunks together at the end. /// Used in snapshots to glue the chunks together at the end.
pub fn add_child(&self, block_hash: H256, child_hash: H256) { pub fn add_child(&self, batch: &mut DBTransaction, block_hash: H256, child_hash: H256) {
let mut parent_details = self.block_details(&block_hash) let mut parent_details = self.block_details(&block_hash)
.unwrap_or_else(|| panic!("Invalid block hash: {:?}", block_hash)); .unwrap_or_else(|| panic!("Invalid block hash: {:?}", block_hash));
let mut batch = self.db.transaction();
parent_details.children.push(child_hash); parent_details.children.push(child_hash);
let mut update = HashMap::new(); let mut update = HashMap::new();
@ -804,8 +806,6 @@ impl BlockChain {
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update, CacheUpdatePolicy::Overwrite); batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update, CacheUpdatePolicy::Overwrite);
self.cache_man.lock().note_used(CacheID::BlockDetails(block_hash)); self.cache_man.lock().note_used(CacheID::BlockDetails(block_hash));
self.db.write(batch).unwrap();
} }
#[cfg_attr(feature="dev", allow(similar_names))] #[cfg_attr(feature="dev", allow(similar_names))]
@ -894,17 +894,6 @@ impl BlockChain {
/// Prepares extras update. /// Prepares extras update.
fn prepare_update(&self, batch: &mut DBTransaction, update: ExtrasUpdate, is_best: bool) { fn prepare_update(&self, batch: &mut DBTransaction, update: ExtrasUpdate, is_best: bool) {
{
let block_hashes: Vec<_> = update.block_details.keys().cloned().collect();
let mut write_details = self.block_details.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update.block_details, CacheUpdatePolicy::Overwrite);
let mut cache_man = self.cache_man.lock();
for hash in block_hashes {
cache_man.note_used(CacheID::BlockDetails(hash));
}
}
{ {
let mut write_receipts = self.block_receipts.write(); let mut write_receipts = self.block_receipts.write();
@ -916,7 +905,7 @@ impl BlockChain {
batch.extend_with_cache(db::COL_EXTRA, &mut *write_blocks_blooms, update.blocks_blooms, CacheUpdatePolicy::Remove); batch.extend_with_cache(db::COL_EXTRA, &mut *write_blocks_blooms, update.blocks_blooms, CacheUpdatePolicy::Remove);
} }
// These cached values must be updated last with all three locks taken to avoid // These cached values must be updated last with all four locks taken to avoid
// cache decoherence // cache decoherence
{ {
let mut best_block = self.pending_best_block.write(); let mut best_block = self.pending_best_block.write();
@ -934,8 +923,10 @@ impl BlockChain {
}, },
} }
let mut write_hashes = self.pending_block_hashes.write(); let mut write_hashes = self.pending_block_hashes.write();
let mut write_details = self.pending_block_details.write();
let mut write_txs = self.pending_transaction_addresses.write(); let mut write_txs = self.pending_transaction_addresses.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update.block_details, CacheUpdatePolicy::Overwrite);
batch.extend_with_cache(db::COL_EXTRA, &mut *write_hashes, update.block_hashes, CacheUpdatePolicy::Overwrite); batch.extend_with_cache(db::COL_EXTRA, &mut *write_hashes, update.block_hashes, CacheUpdatePolicy::Overwrite);
batch.extend_with_option_cache(db::COL_EXTRA, &mut *write_txs, update.transactions_addresses, CacheUpdatePolicy::Overwrite); batch.extend_with_option_cache(db::COL_EXTRA, &mut *write_txs, update.transactions_addresses, CacheUpdatePolicy::Overwrite);
} }
@ -945,9 +936,11 @@ impl BlockChain {
pub fn commit(&self) { pub fn commit(&self) {
let mut pending_best_block = self.pending_best_block.write(); let mut pending_best_block = self.pending_best_block.write();
let mut pending_write_hashes = self.pending_block_hashes.write(); let mut pending_write_hashes = self.pending_block_hashes.write();
let mut pending_block_details = self.pending_block_details.write();
let mut pending_write_txs = self.pending_transaction_addresses.write(); let mut pending_write_txs = self.pending_transaction_addresses.write();
let mut best_block = self.best_block.write(); let mut best_block = self.best_block.write();
let mut write_block_details = self.block_details.write();
let mut write_hashes = self.block_hashes.write(); let mut write_hashes = self.block_hashes.write();
let mut write_txs = self.transaction_addresses.write(); let mut write_txs = self.transaction_addresses.write();
// update best block // update best block
@ -960,9 +953,11 @@ impl BlockChain {
let pending_hashes_keys: Vec<_> = pending_write_hashes.keys().cloned().collect(); let pending_hashes_keys: Vec<_> = pending_write_hashes.keys().cloned().collect();
let enacted_txs_keys: Vec<_> = enacted_txs.keys().cloned().collect(); let enacted_txs_keys: Vec<_> = enacted_txs.keys().cloned().collect();
let pending_block_hashes: Vec<_> = pending_block_details.keys().cloned().collect();
write_hashes.extend(mem::replace(&mut *pending_write_hashes, HashMap::new())); write_hashes.extend(mem::replace(&mut *pending_write_hashes, HashMap::new()));
write_txs.extend(enacted_txs.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed")))); write_txs.extend(enacted_txs.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed"))));
write_block_details.extend(mem::replace(&mut *pending_block_details, HashMap::new()));
for hash in retracted_txs.keys() { for hash in retracted_txs.keys() {
write_txs.remove(hash); write_txs.remove(hash);
@ -976,6 +971,10 @@ impl BlockChain {
for hash in enacted_txs_keys { for hash in enacted_txs_keys {
cache_man.note_used(CacheID::TransactionAddresses(hash)); cache_man.note_used(CacheID::TransactionAddresses(hash));
} }
for hash in pending_block_hashes {
cache_man.note_used(CacheID::BlockDetails(hash));
}
} }
/// Iterator that lists `first` and then all of `first`'s ancestors, by hash. /// Iterator that lists `first` and then all of `first`'s ancestors, by hash.
@ -1296,6 +1295,11 @@ impl BlockChain {
ancient_block_number: best_ancient_block.as_ref().map(|b| b.number), ancient_block_number: best_ancient_block.as_ref().map(|b| b.number),
} }
} }
#[cfg(test)]
pub fn db(&self) -> &Arc<Database> {
&self.db
}
} }
#[cfg(test)] #[cfg(test)]
@ -1464,7 +1468,7 @@ mod tests {
action: Action::Create, action: Action::Create,
value: 100.into(), value: 100.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3(), None);
let b1a = canon_chain let b1a = canon_chain
@ -1528,7 +1532,7 @@ mod tests {
action: Action::Create, action: Action::Create,
value: 100.into(), value: 100.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3(), None);
let t2 = Transaction { let t2 = Transaction {
nonce: 1.into(), nonce: 1.into(),
@ -1537,7 +1541,7 @@ mod tests {
action: Action::Create, action: Action::Create,
value: 100.into(), value: 100.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3(), None);
let t3 = Transaction { let t3 = Transaction {
nonce: 2.into(), nonce: 2.into(),
@ -1546,7 +1550,7 @@ mod tests {
action: Action::Create, action: Action::Create,
value: 100.into(), value: 100.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3(), None);
let b1a = canon_chain let b1a = canon_chain
.with_transaction(t1.clone()) .with_transaction(t1.clone())
@ -1852,7 +1856,7 @@ mod tests {
action: Action::Create, action: Action::Create,
value: 101.into(), value: 101.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3(), None);
let t2 = Transaction { let t2 = Transaction {
nonce: 0.into(), nonce: 0.into(),
gas_price: 0.into(), gas_price: 0.into(),
@ -1860,7 +1864,7 @@ mod tests {
action: Action::Create, action: Action::Create,
value: 102.into(), value: 102.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3(), None);
let t3 = Transaction { let t3 = Transaction {
nonce: 0.into(), nonce: 0.into(),
gas_price: 0.into(), gas_price: 0.into(),
@ -1868,7 +1872,7 @@ mod tests {
action: Action::Create, action: Action::Create,
value: 103.into(), value: 103.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3(), None);
let tx_hash1 = t1.hash(); let tx_hash1 = t1.hash();
let tx_hash2 = t2.hash(); let tx_hash2 = t2.hash();
let tx_hash3 = t3.hash(); let tx_hash3 = t3.hash();

View File

@ -66,7 +66,7 @@ impl<T> CacheManager<T> where T: Eq + Hash {
} }
fn rotate_cache_if_needed(&mut self) { fn rotate_cache_if_needed(&mut self) {
if self.cache_usage.len() == 0 { return } if self.cache_usage.is_empty() { return }
if self.cache_usage[0].len() * self.bytes_per_cache_entry > self.pref_cache_size / COLLECTION_QUEUE_SIZE { if self.cache_usage[0].len() * self.bytes_per_cache_entry > self.pref_cache_size / COLLECTION_QUEUE_SIZE {
if let Some(cache) = self.cache_usage.pop_back() { if let Some(cache) = self.cache_usage.pop_back() {

View File

@ -33,7 +33,7 @@ use io::*;
use views::{HeaderView, BodyView, BlockView}; use views::{HeaderView, BodyView, BlockView};
use error::{ImportError, ExecutionError, CallError, BlockError, ImportResult, Error as EthcoreError}; use error::{ImportError, ExecutionError, CallError, BlockError, ImportResult, Error as EthcoreError};
use header::BlockNumber; use header::BlockNumber;
use state::State; use state::{State, CleanupMode};
use spec::Spec; use spec::Spec;
use basic_types::Seal; use basic_types::Seal;
use engines::Engine; use engines::Engine;
@ -45,6 +45,7 @@ use block::*;
use transaction::{LocalizedTransaction, SignedTransaction, Action}; use transaction::{LocalizedTransaction, SignedTransaction, Action};
use blockchain::extras::TransactionAddress; use blockchain::extras::TransactionAddress;
use types::filter::Filter; use types::filter::Filter;
use types::mode::Mode as IpcMode;
use log_entry::LocalizedLogEntry; use log_entry::LocalizedLogEntry;
use verification::queue::BlockQueue; use verification::queue::BlockQueue;
use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute}; use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute};
@ -60,12 +61,13 @@ use receipt::LocalizedReceipt;
use trace::{TraceDB, ImportRequest as TraceImportRequest, LocalizedTrace, Database as TraceDatabase}; use trace::{TraceDB, ImportRequest as TraceImportRequest, LocalizedTrace, Database as TraceDatabase};
use trace; use trace;
use trace::FlatTransactionTraces; use trace::FlatTransactionTraces;
use evm::Factory as EvmFactory; use evm::{Factory as EvmFactory, Schedule};
use miner::{Miner, MinerService}; use miner::{Miner, MinerService};
use snapshot::{self, io as snapshot_io}; use snapshot::{self, io as snapshot_io};
use factory::Factories; use factory::Factories;
use rlp::{View, UntrustedRlp}; use rlp::{decode, View, UntrustedRlp};
use state_db::StateDB; use state_db::StateDB;
use rand::OsRng;
// re-export // re-export
pub use types::blockchain_info::BlockChainInfo; pub use types::blockchain_info::BlockChainInfo;
@ -122,7 +124,7 @@ impl SleepState {
/// Blockchain database client backed by a persistent database. Owns and manages a blockchain and a block queue. /// Blockchain database client backed by a persistent database. Owns and manages a blockchain and a block queue.
/// Call `import_block()` to import a block asynchronously; `flush_queue()` flushes the queue. /// Call `import_block()` to import a block asynchronously; `flush_queue()` flushes the queue.
pub struct Client { pub struct Client {
mode: Mode, mode: Mutex<Mode>,
chain: RwLock<Arc<BlockChain>>, chain: RwLock<Arc<BlockChain>>,
tracedb: RwLock<TraceDB<BlockChain>>, tracedb: RwLock<TraceDB<BlockChain>>,
engine: Arc<Engine>, engine: Arc<Engine>,
@ -138,12 +140,14 @@ pub struct Client {
miner: Arc<Miner>, miner: Arc<Miner>,
sleep_state: Mutex<SleepState>, sleep_state: Mutex<SleepState>,
liveness: AtomicBool, liveness: AtomicBool,
io_channel: IoChannel<ClientIoMessage>, io_channel: Mutex<IoChannel<ClientIoMessage>>,
notify: RwLock<Vec<Weak<ChainNotify>>>, notify: RwLock<Vec<Weak<ChainNotify>>>,
queue_transactions: AtomicUsize, queue_transactions: AtomicUsize,
last_hashes: RwLock<VecDeque<H256>>, last_hashes: RwLock<VecDeque<H256>>,
factories: Factories, factories: Factories,
history: u64, history: u64,
rng: Mutex<OsRng>,
on_mode_change: Mutex<Option<Box<FnMut(&Mode) + 'static + Send>>>,
} }
impl Client { impl Client {
@ -208,7 +212,7 @@ impl Client {
let panic_handler = PanicHandler::new_in_arc(); let panic_handler = PanicHandler::new_in_arc();
panic_handler.forward_from(&block_queue); panic_handler.forward_from(&block_queue);
let awake = match config.mode { Mode::Dark(..) => false, _ => true }; let awake = match config.mode { Mode::Dark(..) | Mode::Off => false, _ => true };
let factories = Factories { let factories = Factories {
vm: EvmFactory::new(config.vm_type.clone(), config.jump_table_size), vm: EvmFactory::new(config.vm_type.clone(), config.jump_table_size),
@ -219,7 +223,7 @@ impl Client {
let client = Client { let client = Client {
sleep_state: Mutex::new(SleepState::new(awake)), sleep_state: Mutex::new(SleepState::new(awake)),
liveness: AtomicBool::new(awake), liveness: AtomicBool::new(awake),
mode: config.mode.clone(), mode: Mutex::new(config.mode.clone()),
chain: RwLock::new(chain), chain: RwLock::new(chain),
tracedb: tracedb, tracedb: tracedb,
engine: engine, engine: engine,
@ -233,12 +237,14 @@ impl Client {
import_lock: Mutex::new(()), import_lock: Mutex::new(()),
panic_handler: panic_handler, panic_handler: panic_handler,
miner: miner, miner: miner,
io_channel: message_channel, io_channel: Mutex::new(message_channel),
notify: RwLock::new(Vec::new()), notify: RwLock::new(Vec::new()),
queue_transactions: AtomicUsize::new(0), queue_transactions: AtomicUsize::new(0),
last_hashes: RwLock::new(VecDeque::new()), last_hashes: RwLock::new(VecDeque::new()),
factories: factories, factories: factories,
history: history, history: history,
rng: Mutex::new(try!(OsRng::new().map_err(::util::UtilError::StdIo))),
on_mode_change: Mutex::new(None),
}; };
Ok(Arc::new(client)) Ok(Arc::new(client))
} }
@ -256,6 +262,11 @@ impl Client {
} }
} }
/// Register an action to be done if a mode change happens.
pub fn on_mode_change<F>(&self, f: F) where F: 'static + FnMut(&Mode) + Send {
*self.on_mode_change.lock() = Some(Box::new(f));
}
/// Flush the block import queue. /// Flush the block import queue.
pub fn flush_queue(&self) { pub fn flush_queue(&self) {
self.block_queue.flush(); self.block_queue.flush();
@ -264,6 +275,22 @@ impl Client {
} }
} }
/// The env info as of the best block.
fn latest_env_info(&self) -> EnvInfo {
let header_data = self.best_block_header();
let view = HeaderView::new(&header_data);
EnvInfo {
number: view.number(),
author: view.author(),
timestamp: view.timestamp(),
difficulty: view.difficulty(),
last_hashes: self.build_last_hashes(view.hash()),
gas_used: U256::default(),
gas_limit: view.gas_limit(),
}
}
fn build_last_hashes(&self, parent_hash: H256) -> Arc<LastHashes> { fn build_last_hashes(&self, parent_hash: H256) -> Arc<LastHashes> {
{ {
let hashes = self.last_hashes.read(); let hashes = self.last_hashes.read();
@ -314,7 +341,7 @@ impl Client {
if let Some(parent) = chain_has_parent { if let Some(parent) = chain_has_parent {
// Enact Verified Block // Enact Verified Block
let last_hashes = self.build_last_hashes(header.parent_hash().clone()); let last_hashes = self.build_last_hashes(header.parent_hash().clone());
let db = self.state_db.lock().boxed_clone_canon(&header.parent_hash()); let db = self.state_db.lock().boxed_clone_canon(header.parent_hash());
let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone()); let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
let locked_block = try!(enact_result.map_err(|e| { let locked_block = try!(enact_result.map_err(|e| {
@ -434,14 +461,26 @@ impl Client {
/// Import a block with transaction receipts. /// Import a block with transaction receipts.
/// The block is guaranteed to be the next best blocks in the first block sequence. /// The block is guaranteed to be the next best blocks in the first block sequence.
/// Does no sealing or transaction validation. /// Does no sealing or transaction validation.
fn import_old_block(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> H256 { fn import_old_block(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> Result<H256, ::error::Error> {
let block = BlockView::new(&block_bytes); let block = BlockView::new(&block_bytes);
let hash = block.header().hash(); let header = block.header();
let hash = header.hash();
let _import_lock = self.import_lock.lock(); let _import_lock = self.import_lock.lock();
{ {
let _timer = PerfTimer::new("import_old_block"); let _timer = PerfTimer::new("import_old_block");
let mut rng = self.rng.lock();
let chain = self.chain.read(); let chain = self.chain.read();
// verify block.
try!(::snapshot::verify_old_block(
&mut *rng,
&header,
&*self.engine,
&*chain,
Some(&block_bytes),
false,
));
// Commit results // Commit results
let receipts = ::rlp::decode(&receipts_bytes); let receipts = ::rlp::decode(&receipts_bytes);
let mut batch = DBTransaction::new(&self.db.read()); let mut batch = DBTransaction::new(&self.db.read());
@ -451,7 +490,7 @@ impl Client {
chain.commit(); chain.commit();
} }
self.db.read().flush().expect("DB flush failed."); self.db.read().flush().expect("DB flush failed.");
hash Ok(hash)
} }
fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain { fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain {
@ -599,7 +638,8 @@ impl Client {
self.block_queue.collect_garbage(); self.block_queue.collect_garbage();
self.tracedb.read().collect_garbage(); self.tracedb.read().collect_garbage();
match self.mode { let mode = self.mode.lock().clone();
match mode {
Mode::Dark(timeout) => { Mode::Dark(timeout) => {
let mut ss = self.sleep_state.lock(); let mut ss = self.sleep_state.lock();
if let Some(t) = ss.last_activity { if let Some(t) = ss.last_activity {
@ -751,7 +791,7 @@ impl BlockChainClient for Client {
fn call(&self, t: &SignedTransaction, block: BlockID, analytics: CallAnalytics) -> Result<Executed, CallError> { fn call(&self, t: &SignedTransaction, block: BlockID, analytics: CallAnalytics) -> Result<Executed, CallError> {
let header = try!(self.block_header(block).ok_or(CallError::StatePruned)); let header = try!(self.block_header(block).ok_or(CallError::StatePruned));
let view = HeaderView::new(&header); let view = HeaderView::new(&header);
let last_hashes = self.build_last_hashes(view.hash()); let last_hashes = self.build_last_hashes(view.parent_hash());
let env_info = EnvInfo { let env_info = EnvInfo {
number: view.number(), number: view.number(),
author: view.author(), author: view.author(),
@ -773,7 +813,7 @@ impl BlockChainClient for Client {
let needed_balance = t.value + t.gas * t.gas_price; let needed_balance = t.value + t.gas * t.gas_price;
if balance < needed_balance { if balance < needed_balance {
// give the sender a sufficient balance // give the sender a sufficient balance
state.add_balance(&sender, &(needed_balance - balance)); state.add_balance(&sender, &(needed_balance - balance), CleanupMode::NoEmpty);
} }
let options = TransactOptions { tracing: analytics.transaction_tracing, vm_tracing: analytics.vm_tracing, check_nonce: false }; let options = TransactOptions { tracing: analytics.transaction_tracing, vm_tracing: analytics.vm_tracing, check_nonce: false };
let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, &self.factories.vm).transact(t, options)); let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, &self.factories.vm).transact(t, options));
@ -823,12 +863,43 @@ impl BlockChainClient for Client {
} }
fn keep_alive(&self) { fn keep_alive(&self) {
if self.mode != Mode::Active { let should_wake = match &*self.mode.lock() {
&Mode::Dark(..) | &Mode::Passive(..) => true,
_ => false,
};
if should_wake {
self.wake_up(); self.wake_up();
(*self.sleep_state.lock()).last_activity = Some(Instant::now()); (*self.sleep_state.lock()).last_activity = Some(Instant::now());
} }
} }
fn mode(&self) -> IpcMode {
let r = self.mode.lock().clone().into();
trace!(target: "mode", "Asked for mode = {:?}. returning {:?}", &*self.mode.lock(), r);
r
}
fn set_mode(&self, new_mode: IpcMode) {
trace!(target: "mode", "Client::set_mode({:?})", new_mode);
{
let mut mode = self.mode.lock();
*mode = new_mode.clone().into();
trace!(target: "mode", "Mode now {:?}", &*mode);
match *self.on_mode_change.lock() {
Some(ref mut f) => {
trace!(target: "mode", "Making callback...");
f(&*mode)
},
_ => {}
}
}
match new_mode {
IpcMode::Active => self.wake_up(),
IpcMode::Off => self.sleep(),
_ => {(*self.sleep_state.lock()).last_activity = Some(Instant::now()); }
}
}
fn best_block_header(&self) -> Bytes { fn best_block_header(&self) -> Bytes {
self.chain.read().best_block_header() self.chain.read().best_block_header()
} }
@ -979,7 +1050,9 @@ impl BlockChainClient for Client {
transaction_hash: transaction_hash.clone(), transaction_hash: transaction_hash.clone(),
transaction_index: transaction_index, transaction_index: transaction_index,
log_index: i log_index: i
}).collect() }).collect(),
log_bloom: receipt.log_bloom,
state_root: receipt.state_root,
}) })
}, },
_ => None _ => None
@ -1036,7 +1109,7 @@ impl BlockChainClient for Client {
return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash()))); return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash())));
} }
} }
Ok(self.import_old_block(block_bytes, receipts_bytes)) self.import_old_block(block_bytes, receipts_bytes).map_err(Into::into)
} }
fn queue_info(&self) -> BlockQueueInfo { fn queue_info(&self) -> BlockQueueInfo {
@ -1124,7 +1197,7 @@ impl BlockChainClient for Client {
debug!("Ignoring {} transactions: queue is full", transactions.len()); debug!("Ignoring {} transactions: queue is full", transactions.len());
} else { } else {
let len = transactions.len(); let len = transactions.len();
match self.io_channel.send(ClientIoMessage::NewTransactions(transactions)) { match self.io_channel.lock().send(ClientIoMessage::NewTransactions(transactions)) {
Ok(_) => { Ok(_) => {
self.queue_transactions.fetch_add(len, AtomicOrdering::SeqCst); self.queue_transactions.fetch_add(len, AtomicOrdering::SeqCst);
} }
@ -1138,9 +1211,29 @@ impl BlockChainClient for Client {
fn pending_transactions(&self) -> Vec<SignedTransaction> { fn pending_transactions(&self) -> Vec<SignedTransaction> {
self.miner.pending_transactions(self.chain.read().best_block_number()) self.miner.pending_transactions(self.chain.read().best_block_number())
} }
fn signing_network_id(&self) -> Option<u8> {
self.engine.signing_network_id(&self.latest_env_info())
}
fn block_extra_info(&self, id: BlockID) -> Option<BTreeMap<String, String>> {
self.block_header(id)
.map(|block| decode(&block))
.map(|header| self.engine.extra_info(&header))
}
fn uncle_extra_info(&self, id: UncleID) -> Option<BTreeMap<String, String>> {
self.uncle(id)
.map(|header| self.engine.extra_info(&decode(&header)))
}
} }
impl MiningBlockChainClient for Client { impl MiningBlockChainClient for Client {
fn latest_schedule(&self) -> Schedule {
self.engine.schedule(&self.latest_env_info())
}
fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock { fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock {
let engine = &*self.engine; let engine = &*self.engine;
let chain = self.chain.read(); let chain = self.chain.read();
@ -1216,3 +1309,33 @@ impl MayPanic for Client {
self.panic_handler.on_panic(closure); self.panic_handler.on_panic(closure);
} }
} }
#[test]
fn should_not_cache_details_before_commit() {
use tests::helpers::*;
use std::thread;
use std::time::Duration;
use std::sync::atomic::{AtomicBool, Ordering};
let client = generate_dummy_client(0);
let genesis = client.chain_info().best_block_hash;
let (new_hash, new_block) = get_good_dummy_block_hash();
let go = {
// Separate thread uncommited transaction
let go = Arc::new(AtomicBool::new(false));
let go_thread = go.clone();
let another_client = client.reference().clone();
thread::spawn(move || {
let mut batch = DBTransaction::new(&*another_client.chain.read().db().clone());
another_client.chain.read().insert_block(&mut batch, &new_block, Vec::new());
go_thread.store(true, Ordering::SeqCst);
});
go
};
while !go.load(Ordering::SeqCst) { thread::park_timeout(Duration::from_millis(5)); }
assert!(client.tree_route(&genesis, &new_hash).is_none());
}

View File

@ -16,6 +16,7 @@
use std::str::FromStr; use std::str::FromStr;
use std::path::Path; use std::path::Path;
use std::fmt::{Display, Formatter, Error as FmtError};
pub use std::time::Duration; pub use std::time::Duration;
pub use blockchain::Config as BlockChainConfig; pub use blockchain::Config as BlockChainConfig;
pub use trace::Config as TraceConfig; pub use trace::Config as TraceConfig;
@ -76,6 +77,8 @@ pub enum Mode {
/// Goes offline after RLP is inactive for some (given) time and /// Goes offline after RLP is inactive for some (given) time and
/// stays inactive. /// stays inactive.
Dark(Duration), Dark(Duration),
/// Always off.
Off,
} }
impl Default for Mode { impl Default for Mode {
@ -84,6 +87,17 @@ impl Default for Mode {
} }
} }
impl Display for Mode {
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
match *self {
Mode::Active => write!(f, "active"),
Mode::Passive(..) => write!(f, "passive"),
Mode::Dark(..) => write!(f, "dark"),
Mode::Off => write!(f, "offline"),
}
}
}
/// Client configuration. Includes configs for all sub-systems. /// Client configuration. Includes configs for all sub-systems.
#[derive(Debug, PartialEq, Default)] #[derive(Debug, PartialEq, Default)]
pub struct ClientConfig { pub struct ClientConfig {

View File

@ -34,9 +34,11 @@ use log_entry::LocalizedLogEntry;
use receipt::{Receipt, LocalizedReceipt}; use receipt::{Receipt, LocalizedReceipt};
use blockchain::extras::BlockReceipts; use blockchain::extras::BlockReceipts;
use error::{ImportResult}; use error::{ImportResult};
use evm::{Factory as EvmFactory, VMType}; use evm::{Factory as EvmFactory, VMType, Schedule};
use miner::{Miner, MinerService, TransactionImportResult}; use miner::{Miner, MinerService, TransactionImportResult};
use spec::Spec; use spec::Spec;
use types::mode::Mode;
use views::BlockView;
use verification::queue::QueueInfo; use verification::queue::QueueInfo;
use block::{OpenBlock, SealedBlock}; use block::{OpenBlock, SealedBlock};
@ -83,6 +85,10 @@ pub struct TestBlockChainClient {
pub vm_factory: EvmFactory, pub vm_factory: EvmFactory,
/// Timestamp assigned to latest sealed block /// Timestamp assigned to latest sealed block
pub latest_block_timestamp: RwLock<u64>, pub latest_block_timestamp: RwLock<u64>,
/// Ancient block info.
pub ancient_block: RwLock<Option<(H256, u64)>>,
/// First block info.
pub first_block: RwLock<Option<(H256, u64)>>,
} }
#[derive(Clone)] #[derive(Clone)]
@ -132,6 +138,8 @@ impl TestBlockChainClient {
spec: spec, spec: spec,
vm_factory: EvmFactory::new(VMType::Interpreter, 1024 * 1024), vm_factory: EvmFactory::new(VMType::Interpreter, 1024 * 1024),
latest_block_timestamp: RwLock::new(10_000_000), latest_block_timestamp: RwLock::new(10_000_000),
ancient_block: RwLock::new(None),
first_block: RwLock::new(None),
}; };
client.add_blocks(1, EachBlockWith::Nothing); // add genesis block client.add_blocks(1, EachBlockWith::Nothing); // add genesis block
client.genesis_hash = client.last_hash.read().clone(); client.genesis_hash = client.last_hash.read().clone();
@ -220,7 +228,7 @@ impl TestBlockChainClient {
gas_price: U256::one(), gas_price: U256::one(),
nonce: U256::zero() nonce: U256::zero()
}; };
let signed_tx = tx.sign(keypair.secret()); let signed_tx = tx.sign(keypair.secret(), None);
txs.append(&signed_tx); txs.append(&signed_tx);
txs.out() txs.out()
}, },
@ -286,7 +294,7 @@ impl TestBlockChainClient {
gas_price: U256::one(), gas_price: U256::one(),
nonce: U256::zero() nonce: U256::zero()
}; };
let signed_tx = tx.sign(keypair.secret()); let signed_tx = tx.sign(keypair.secret(), None);
self.set_balance(signed_tx.sender().unwrap(), 10_000_000.into()); self.set_balance(signed_tx.sender().unwrap(), 10_000_000.into());
let res = self.miner.import_external_transactions(self, vec![signed_tx]); let res = self.miner.import_external_transactions(self, vec![signed_tx]);
let res = res.into_iter().next().unwrap().expect("Successful import"); let res = res.into_iter().next().unwrap().expect("Successful import");
@ -306,6 +314,10 @@ pub fn get_temp_state_db() -> GuardedTempResult<StateDB> {
} }
impl MiningBlockChainClient for TestBlockChainClient { impl MiningBlockChainClient for TestBlockChainClient {
fn latest_schedule(&self) -> Schedule {
Schedule::new_post_eip150(true, true, true)
}
fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock { fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock {
let engine = &*self.spec.engine; let engine = &*self.spec.engine;
let genesis_header = self.spec.genesis_header(); let genesis_header = self.spec.genesis_header();
@ -406,6 +418,10 @@ impl BlockChainClient for TestBlockChainClient {
None // Simple default. None // Simple default.
} }
fn uncle_extra_info(&self, _id: UncleID) -> Option<BTreeMap<String, String>> {
None
}
fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> { fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> {
self.receipts.read().get(&id).cloned() self.receipts.read().get(&id).cloned()
} }
@ -448,6 +464,13 @@ impl BlockChainClient for TestBlockChainClient {
self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).cloned()) self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).cloned())
} }
fn block_extra_info(&self, id: BlockID) -> Option<BTreeMap<String, String>> {
self.block(id)
.map(|block| BlockView::new(&block).header())
.map(|header| self.spec.engine.extra_info(&header))
}
fn block_status(&self, id: BlockID) -> BlockStatus { fn block_status(&self, id: BlockID) -> BlockStatus {
match id { match id {
BlockID::Number(number) if (number as usize) < self.blocks.read().len() => BlockStatus::InChain, BlockID::Number(number) if (number as usize) < self.blocks.read().len() => BlockStatus::InChain,
@ -589,10 +612,10 @@ impl BlockChainClient for TestBlockChainClient {
genesis_hash: self.genesis_hash.clone(), genesis_hash: self.genesis_hash.clone(),
best_block_hash: self.last_hash.read().clone(), best_block_hash: self.last_hash.read().clone(),
best_block_number: self.blocks.read().len() as BlockNumber - 1, best_block_number: self.blocks.read().len() as BlockNumber - 1,
first_block_hash: None, first_block_hash: self.first_block.read().as_ref().map(|x| x.0),
first_block_number: None, first_block_number: self.first_block.read().as_ref().map(|x| x.1),
ancient_block_hash: None, ancient_block_hash: self.ancient_block.read().as_ref().map(|x| x.0),
ancient_block_number: None, ancient_block_number: self.ancient_block.read().as_ref().map(|x| x.1)
} }
} }
@ -621,4 +644,10 @@ impl BlockChainClient for TestBlockChainClient {
fn pending_transactions(&self) -> Vec<SignedTransaction> { fn pending_transactions(&self) -> Vec<SignedTransaction> {
self.miner.pending_transactions(self.chain_info().best_block_number) self.miner.pending_transactions(self.chain_info().best_block_number)
} }
fn signing_network_id(&self) -> Option<u8> { None }
fn mode(&self) -> Mode { Mode::Active }
fn set_mode(&self, _: Mode) { unimplemented!(); }
} }

View File

@ -16,6 +16,7 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use util::{U256, Address, H256, H2048, Bytes, Itertools}; use util::{U256, Address, H256, H2048, Bytes, Itertools};
use util::stats::Histogram;
use blockchain::TreeRoute; use blockchain::TreeRoute;
use verification::queue::QueueInfo as BlockQueueInfo; use verification::queue::QueueInfo as BlockQueueInfo;
use block::{OpenBlock, SealedBlock}; use block::{OpenBlock, SealedBlock};
@ -27,16 +28,17 @@ use views::{BlockView};
use error::{ImportResult, CallError}; use error::{ImportResult, CallError};
use receipt::LocalizedReceipt; use receipt::LocalizedReceipt;
use trace::LocalizedTrace; use trace::LocalizedTrace;
use evm::Factory as EvmFactory; use evm::{Factory as EvmFactory, Schedule};
use types::ids::*;
use types::trace_filter::Filter as TraceFilter;
use executive::Executed; use executive::Executed;
use env_info::LastHashes; use env_info::LastHashes;
use types::call_analytics::CallAnalytics;
use block_import_error::BlockImportError; use block_import_error::BlockImportError;
use ipc::IpcConfig; use ipc::IpcConfig;
use types::ids::*;
use types::trace_filter::Filter as TraceFilter;
use types::call_analytics::CallAnalytics;
use types::blockchain_info::BlockChainInfo; use types::blockchain_info::BlockChainInfo;
use types::block_status::BlockStatus; use types::block_status::BlockStatus;
use types::mode::Mode;
#[ipc(client_ident="RemoteClient")] #[ipc(client_ident="RemoteClient")]
/// Blockchain database client. Owns and manages a blockchain and a block queue. /// Blockchain database client. Owns and manages a blockchain and a block queue.
@ -190,8 +192,8 @@ pub trait BlockChainClient : Sync + Send {
/// list all transactions /// list all transactions
fn pending_transactions(&self) -> Vec<SignedTransaction>; fn pending_transactions(&self) -> Vec<SignedTransaction>;
/// Get the gas price distribution. /// Sorted list of transaction gas prices from at least last sample_size blocks.
fn gas_price_statistics(&self, sample_size: usize, distribution_size: usize) -> Result<Vec<U256>, ()> { fn gas_price_corpus(&self, sample_size: usize) -> Vec<U256> {
let mut h = self.chain_info().best_block_hash; let mut h = self.chain_info().best_block_hash;
let mut corpus = Vec::new(); let mut corpus = Vec::new();
while corpus.is_empty() { while corpus.is_empty() {
@ -200,26 +202,45 @@ pub trait BlockChainClient : Sync + Send {
let block = BlockView::new(&block_bytes); let block = BlockView::new(&block_bytes);
let header = block.header_view(); let header = block.header_view();
if header.number() == 0 { if header.number() == 0 {
if corpus.is_empty() { return corpus;
corpus.push(20_000_000_000u64.into()); // we have literally no information - it' as good a number as any.
}
break;
} }
block.transaction_views().iter().foreach(|t| corpus.push(t.gas_price())); block.transaction_views().iter().foreach(|t| corpus.push(t.gas_price()));
h = header.parent_hash().clone(); h = header.parent_hash().clone();
} }
} }
corpus.sort(); corpus.sort();
let n = corpus.len(); corpus
if n > 0 {
Ok((0..(distribution_size + 1))
.map(|i| corpus[i * (n - 1) / distribution_size])
.collect::<Vec<_>>()
)
} else {
Err(())
}
} }
/// Calculate median gas price from recent blocks if they have any transactions.
fn gas_price_median(&self, sample_size: usize) -> Option<U256> {
let corpus = self.gas_price_corpus(sample_size);
corpus.get(corpus.len() / 2).cloned()
}
/// Get the gas price distribution based on recent blocks if they have any transactions.
fn gas_price_histogram(&self, sample_size: usize, bucket_number: usize) -> Option<Histogram> {
let raw_corpus = self.gas_price_corpus(sample_size);
let raw_len = raw_corpus.len();
// Throw out outliers.
let (corpus, _) = raw_corpus.split_at(raw_len - raw_len / 40);
Histogram::new(corpus, bucket_number)
}
/// Get the preferred network ID to sign on
fn signing_network_id(&self) -> Option<u8>;
/// Get the mode.
fn mode(&self) -> Mode;
/// Set the mode.
fn set_mode(&self, mode: Mode);
/// Returns engine-related extra info for `BlockID`.
fn block_extra_info(&self, id: BlockID) -> Option<BTreeMap<String, String>>;
/// Returns engine-related extra info for `UncleID`.
fn uncle_extra_info(&self, id: UncleID) -> Option<BTreeMap<String, String>>;
} }
/// Extended client interface used for mining /// Extended client interface used for mining
@ -236,6 +257,9 @@ pub trait MiningBlockChainClient : BlockChainClient {
/// Import sealed block. Skips all verifications. /// Import sealed block. Skips all verifications.
fn import_sealed_block(&self, block: SealedBlock) -> ImportResult; fn import_sealed_block(&self, block: SealedBlock) -> ImportResult;
/// Returns latest schedule.
fn latest_schedule(&self) -> Schedule;
} }
impl IpcConfig for BlockChainClient { } impl IpcConfig for BlockChainClient { }

View File

@ -114,7 +114,7 @@ pub trait Writable {
R: Deref<Target = [u8]> { R: Deref<Target = [u8]> {
match policy { match policy {
CacheUpdatePolicy::Overwrite => { CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() { for (key, value) in values {
self.write(col, &key, &value); self.write(col, &key, &value);
cache.insert(key, value); cache.insert(key, value);
} }
@ -135,7 +135,7 @@ pub trait Writable {
R: Deref<Target = [u8]> { R: Deref<Target = [u8]> {
match policy { match policy {
CacheUpdatePolicy::Overwrite => { CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() { for (key, value) in values {
match value { match value {
Some(ref v) => self.write(col, &key, v), Some(ref v) => self.write(col, &key, v),
None => self.delete(col, &key), None => self.delete(col, &key),
@ -144,7 +144,7 @@ pub trait Writable {
} }
}, },
CacheUpdatePolicy::Remove => { CacheUpdatePolicy::Remove => {
for (key, value) in values.into_iter() { for (key, value) in values {
match value { match value {
Some(v) => self.write(col, &key, &v), Some(v) => self.write(col, &key, &v),
None => self.delete(col, &key), None => self.delete(col, &key),

View File

@ -81,7 +81,7 @@ impl Engine for BasicAuthority {
fn builtins(&self) -> &BTreeMap<Address, Builtin> { &self.builtins } fn builtins(&self) -> &BTreeMap<Address, Builtin> { &self.builtins }
/// Additional engine-specific information for the user/developer concerning `header`. /// Additional engine-specific information for the user/developer concerning `header`.
fn extra_info(&self, _header: &Header) -> HashMap<String, String> { hash_map!["signature".to_owned() => "TODO".to_owned()] } fn extra_info(&self, _header: &Header) -> BTreeMap<String, String> { map!["signature".to_owned() => "TODO".to_owned()] }
fn schedule(&self, _env_info: &EnvInfo) -> Schedule { fn schedule(&self, _env_info: &EnvInfo) -> Schedule {
Schedule::new_homestead() Schedule::new_homestead()

View File

@ -55,7 +55,7 @@ impl Engine for InstantSeal {
} }
fn schedule(&self, _env_info: &EnvInfo) -> Schedule { fn schedule(&self, _env_info: &EnvInfo) -> Schedule {
Schedule::new_homestead() Schedule::new_post_eip150(false, false, false)
} }
fn is_sealer(&self, _author: &Address) -> Option<bool> { Some(true) } fn is_sealer(&self, _author: &Address) -> Option<bool> { Some(true) }
@ -79,7 +79,7 @@ mod tests {
let tap = AccountProvider::transient_provider(); let tap = AccountProvider::transient_provider();
let addr = tap.insert_account("".sha3(), "").unwrap(); let addr = tap.insert_account("".sha3(), "").unwrap();
let spec = Spec::new_test_instant(); let spec = Spec::new_instant();
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_state_db(); let mut db_result = get_temp_state_db();
@ -95,7 +95,7 @@ mod tests {
#[test] #[test]
fn instant_cant_verify() { fn instant_cant_verify() {
let engine = Spec::new_test_instant().engine; let engine = Spec::new_instant().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
assert!(engine.verify_block_basic(&header, None).is_ok()); assert!(engine.verify_block_basic(&header, None).is_ok());

View File

@ -47,7 +47,7 @@ pub trait Engine : Sync + Send {
fn seal_fields(&self) -> usize { 0 } fn seal_fields(&self) -> usize { 0 }
/// Additional engine-specific information for the user/developer concerning `header`. /// Additional engine-specific information for the user/developer concerning `header`.
fn extra_info(&self, _header: &Header) -> HashMap<String, String> { HashMap::new() } fn extra_info(&self, _header: &Header) -> BTreeMap<String, String> { BTreeMap::new() }
/// Additional information. /// Additional information.
fn additional_params(&self) -> HashMap<String, String> { HashMap::new() } fn additional_params(&self) -> HashMap<String, String> { HashMap::new() }
@ -108,6 +108,9 @@ pub trait Engine : Sync + Send {
/// Verify a particular transaction is valid. /// Verify a particular transaction is valid.
fn verify_transaction(&self, _t: &SignedTransaction, _header: &Header) -> Result<(), Error> { Ok(()) } fn verify_transaction(&self, _t: &SignedTransaction, _header: &Header) -> Result<(), Error> { Ok(()) }
/// The network ID that transactions should be signed with.
fn signing_network_id(&self, _env_info: &EnvInfo) -> Option<u8> { None }
/// Verify the seal of a block. This is an auxilliary method that actually just calls other `verify_` methods /// Verify the seal of a block. This is an auxilliary method that actually just calls other `verify_` methods
/// to get the job done. By default it must pass `verify_basic` and `verify_block_unordered`. If more or fewer /// to get the job done. By default it must pass `verify_basic` and `verify_block_unordered`. If more or fewer
/// methods are needed for an Engine, this may be overridden. /// methods are needed for an Engine, this may be overridden.

View File

@ -47,6 +47,13 @@ pub enum TransactionError {
/// Transaction gas price /// Transaction gas price
got: U256, got: U256,
}, },
/// Transaction's gas is below currently set minimal gas requirement.
InsufficientGas {
/// Minimal expected gas
minimal: U256,
/// Transaction gas
got: U256,
},
/// Sender doesn't have enough funds to pay for this transaction /// Sender doesn't have enough funds to pay for this transaction
InsufficientBalance { InsufficientBalance {
/// Senders balance /// Senders balance
@ -63,6 +70,14 @@ pub enum TransactionError {
}, },
/// Transaction's gas limit (aka gas) is invalid. /// Transaction's gas limit (aka gas) is invalid.
InvalidGasLimit(OutOfBounds<U256>), InvalidGasLimit(OutOfBounds<U256>),
/// Transaction sender is banned.
SenderBanned,
/// Transaction receipient is banned.
RecipientBanned,
/// Contract creation code is banned.
CodeBanned,
/// Invalid network ID given.
InvalidNetworkId,
} }
impl fmt::Display for TransactionError { impl fmt::Display for TransactionError {
@ -75,12 +90,18 @@ impl fmt::Display for TransactionError {
LimitReached => "Transaction limit reached".into(), LimitReached => "Transaction limit reached".into(),
InsufficientGasPrice { minimal, got } => InsufficientGasPrice { minimal, got } =>
format!("Insufficient gas price. Min={}, Given={}", minimal, got), format!("Insufficient gas price. Min={}, Given={}", minimal, got),
InsufficientGas { minimal, got } =>
format!("Insufficient gas. Min={}, Given={}", minimal, got),
InsufficientBalance { balance, cost } => InsufficientBalance { balance, cost } =>
format!("Insufficient balance for transaction. Balance={}, Cost={}", format!("Insufficient balance for transaction. Balance={}, Cost={}",
balance, cost), balance, cost),
GasLimitExceeded { limit, got } => GasLimitExceeded { limit, got } =>
format!("Gas limit exceeded. Limit={}, Given={}", limit, got), format!("Gas limit exceeded. Limit={}, Given={}", limit, got),
InvalidGasLimit(ref err) => format!("Invalid gas limit. {}", err), InvalidGasLimit(ref err) => format!("Invalid gas limit. {}", err),
SenderBanned => "Sender is temporarily banned.".into(),
RecipientBanned => "Recipient is temporarily banned.".into(),
CodeBanned => "Contract code is temporarily banned.".into(),
InvalidNetworkId => "Transaction of this network ID is not allowed on this chain.".into(),
}; };
f.write_fmt(format_args!("Transaction error ({})", msg)) f.write_fmt(format_args!("Transaction error ({})", msg))

View File

@ -14,13 +14,14 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use ethash::{quick_get_difficulty, slow_get_seedhash, EthashManager, H256 as EH256}; use ethash::{quick_get_difficulty, slow_get_seedhash, EthashManager};
use util::*; use util::*;
use block::*; use block::*;
use builtin::Builtin; use builtin::Builtin;
use env_info::EnvInfo; use env_info::EnvInfo;
use error::{BlockError, Error}; use error::{BlockError, TransactionError, Error};
use header::Header; use header::Header;
use state::CleanupMode;
use spec::CommonParams; use spec::CommonParams;
use transaction::SignedTransaction; use transaction::SignedTransaction;
use engines::Engine; use engines::Engine;
@ -59,8 +60,20 @@ pub struct EthashParams {
pub difficulty_hardfork_bound_divisor: U256, pub difficulty_hardfork_bound_divisor: U256,
/// Block on which there is no additional difficulty from the exponential bomb. /// Block on which there is no additional difficulty from the exponential bomb.
pub bomb_defuse_transition: u64, pub bomb_defuse_transition: u64,
/// Bad gas transition block number. /// Number of first block where EIP-150 rules begin.
pub eip150_transition: u64, pub eip150_transition: u64,
/// Number of first block where EIP-155 rules begin.
pub eip155_transition: u64,
/// Number of first block where EIP-160 rules begin.
pub eip160_transition: u64,
/// Number of first block where EIP-161.abc begin.
pub eip161abc_transition: u64,
/// Number of first block where EIP-161.d begins.
pub eip161d_transition: u64,
/// Number of first block where ECIP-1010 begins.
pub ecip1010_pause_transition: u64,
/// Number of first block where ECIP-1010 ends.
pub ecip1010_continue_transition: u64
} }
impl From<ethjson::spec::EthashParams> for EthashParams { impl From<ethjson::spec::EthashParams> for EthashParams {
@ -81,6 +94,12 @@ impl From<ethjson::spec::EthashParams> for EthashParams {
difficulty_hardfork_bound_divisor: p.difficulty_hardfork_bound_divisor.map_or(p.difficulty_bound_divisor.into(), Into::into), difficulty_hardfork_bound_divisor: p.difficulty_hardfork_bound_divisor.map_or(p.difficulty_bound_divisor.into(), Into::into),
bomb_defuse_transition: p.bomb_defuse_transition.map_or(0x7fffffffffffffff, Into::into), bomb_defuse_transition: p.bomb_defuse_transition.map_or(0x7fffffffffffffff, Into::into),
eip150_transition: p.eip150_transition.map_or(0, Into::into), eip150_transition: p.eip150_transition.map_or(0, Into::into),
eip155_transition: p.eip155_transition.map_or(0, Into::into),
eip160_transition: p.eip160_transition.map_or(0, Into::into),
eip161abc_transition: p.eip161abc_transition.map_or(0, Into::into),
eip161d_transition: p.eip161d_transition.map_or(0x7fffffffffffffff, Into::into),
ecip1010_pause_transition: p.ecip1010_pause_transition.map_or(0x7fffffffffffffff, Into::into),
ecip1010_continue_transition: p.ecip1010_continue_transition.map_or(0x7fffffffffffffff, Into::into),
} }
} }
} }
@ -120,8 +139,8 @@ impl Engine for Ethash {
} }
/// Additional engine-specific information for the user/developer concerning `header`. /// Additional engine-specific information for the user/developer concerning `header`.
fn extra_info(&self, header: &Header) -> HashMap<String, String> { fn extra_info(&self, header: &Header) -> BTreeMap<String, String> {
hash_map!["nonce".to_owned() => format!("0x{}", header.nonce().hex()), "mixHash".to_owned() => format!("0x{}", header.mix_hash().hex())] map!["nonce".to_owned() => format!("0x{}", header.nonce().hex()), "mixHash".to_owned() => format!("0x{}", header.mix_hash().hex())]
} }
fn schedule(&self, env_info: &EnvInfo) -> Schedule { fn schedule(&self, env_info: &EnvInfo) -> Schedule {
@ -132,7 +151,19 @@ impl Engine for Ethash {
} else if env_info.number < self.ethash_params.eip150_transition { } else if env_info.number < self.ethash_params.eip150_transition {
Schedule::new_homestead() Schedule::new_homestead()
} else { } else {
Schedule::new_homestead_gas_fix() Schedule::new_post_eip150(
env_info.number >= self.ethash_params.eip160_transition,
env_info.number >= self.ethash_params.eip161abc_transition,
env_info.number >= self.ethash_params.eip161d_transition
)
}
}
fn signing_network_id(&self, env_info: &EnvInfo) -> Option<u8> {
if env_info.number >= self.ethash_params.eip155_transition && self.params().network_id < 127 {
Some(self.params().network_id as u8)
} else {
None
} }
} }
@ -169,7 +200,7 @@ impl Engine for Ethash {
let mut state = block.fields_mut().state; let mut state = block.fields_mut().state;
for child in &self.ethash_params.dao_hardfork_accounts { for child in &self.ethash_params.dao_hardfork_accounts {
let b = state.balance(child); let b = state.balance(child);
state.transfer_balance(child, &self.ethash_params.dao_hardfork_beneficiary, &b); state.transfer_balance(child, &self.ethash_params.dao_hardfork_beneficiary, &b, CleanupMode::NoEmpty);
} }
// } // }
} }
@ -182,12 +213,12 @@ impl Engine for Ethash {
let fields = block.fields_mut(); let fields = block.fields_mut();
// Bestow block reward // Bestow block reward
fields.state.add_balance(fields.header.author(), &(reward + reward / U256::from(32) * U256::from(fields.uncles.len()))); fields.state.add_balance(fields.header.author(), &(reward + reward / U256::from(32) * U256::from(fields.uncles.len())), CleanupMode::NoEmpty);
// Bestow uncle rewards // Bestow uncle rewards
let current_number = fields.header.number(); let current_number = fields.header.number();
for u in fields.uncles.iter() { for u in fields.uncles.iter() {
fields.state.add_balance(u.author(), &(reward * U256::from(8 + u.number() - current_number) / U256::from(8))); fields.state.add_balance(u.author(), &(reward * U256::from(8 + u.number() - current_number) / U256::from(8)), CleanupMode::NoEmpty);
} }
// Commit state so that we can actually figure out the state root. // Commit state so that we can actually figure out the state root.
@ -212,10 +243,10 @@ impl Engine for Ethash {
return Err(From::from(BlockError::DifficultyOutOfBounds(OutOfBounds { min: Some(min_difficulty), max: None, found: header.difficulty().clone() }))) return Err(From::from(BlockError::DifficultyOutOfBounds(OutOfBounds { min: Some(min_difficulty), max: None, found: header.difficulty().clone() })))
} }
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(quick_get_difficulty( let difficulty = Ethash::boundary_to_difficulty(&H256(quick_get_difficulty(
&Ethash::to_ethash(header.bare_hash()), &header.bare_hash().0,
header.nonce().low_u64(), header.nonce().low_u64(),
&Ethash::to_ethash(header.mix_hash()) &header.mix_hash().0
))); )));
if &difficulty < header.difficulty() { if &difficulty < header.difficulty() {
return Err(From::from(BlockError::InvalidProofOfWork(OutOfBounds { min: Some(header.difficulty().clone()), max: None, found: difficulty }))); return Err(From::from(BlockError::InvalidProofOfWork(OutOfBounds { min: Some(header.difficulty().clone()), max: None, found: difficulty })));
@ -240,10 +271,10 @@ impl Engine for Ethash {
Mismatch { expected: self.seal_fields(), found: header.seal().len() } Mismatch { expected: self.seal_fields(), found: header.seal().len() }
))); )));
} }
let result = self.pow.compute_light(header.number() as u64, &Ethash::to_ethash(header.bare_hash()), header.nonce().low_u64()); let result = self.pow.compute_light(header.number() as u64, &header.bare_hash().0, header.nonce().low_u64());
let mix = Ethash::from_ethash(result.mix_hash); let mix = H256(result.mix_hash);
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value)); let difficulty = Ethash::boundary_to_difficulty(&H256(result.value));
trace!(target: "miner", "num: {}, seed: {}, h: {}, non: {}, mix: {}, res: {}" , header.number() as u64, Ethash::from_ethash(slow_get_seedhash(header.number() as u64)), header.bare_hash(), header.nonce().low_u64(), Ethash::from_ethash(result.mix_hash), Ethash::from_ethash(result.value)); trace!(target: "miner", "num: {}, seed: {}, h: {}, non: {}, mix: {}, res: {}" , header.number() as u64, H256(slow_get_seedhash(header.number() as u64)), header.bare_hash(), header.nonce().low_u64(), H256(result.mix_hash), H256(result.value));
if mix != header.mix_hash() { if mix != header.mix_hash() {
return Err(From::from(BlockError::MismatchedH256SealElement(Mismatch { expected: mix, found: header.mix_hash() }))); return Err(From::from(BlockError::MismatchedH256SealElement(Mismatch { expected: mix, found: header.mix_hash() })));
} }
@ -277,6 +308,13 @@ impl Engine for Ethash {
if header.number() >= self.ethash_params.homestead_transition { if header.number() >= self.ethash_params.homestead_transition {
try!(t.check_low_s()); try!(t.check_low_s());
} }
if let Some(n) = t.network_id() {
if header.number() < self.ethash_params.eip155_transition || n as usize != self.params().network_id {
return Err(TransactionError::InvalidNetworkId.into())
}
}
Ok(()) Ok(())
} }
@ -285,7 +323,7 @@ impl Engine for Ethash {
} }
} }
#[cfg_attr(feature="dev", allow(wrong_self_convention))] // to_ethash should take self #[cfg_attr(feature="dev", allow(wrong_self_convention))]
impl Ethash { impl Ethash {
fn calculate_difficulty(&self, header: &Header, parent: &Header) -> U256 { fn calculate_difficulty(&self, header: &Header, parent: &Header) -> U256 {
const EXP_DIFF_PERIOD: u64 = 100000; const EXP_DIFF_PERIOD: u64 = 100000;
@ -321,9 +359,20 @@ impl Ethash {
}; };
target = max(min_difficulty, target); target = max(min_difficulty, target);
if header.number() < self.ethash_params.bomb_defuse_transition { if header.number() < self.ethash_params.bomb_defuse_transition {
let period = ((parent.number() + 1) / EXP_DIFF_PERIOD) as usize; if header.number() < self.ethash_params.ecip1010_pause_transition {
if period > 1 { let period = ((parent.number() + 1) / EXP_DIFF_PERIOD) as usize;
target = max(min_difficulty, target + (U256::from(1) << (period - 2))); if period > 1 {
target = max(min_difficulty, target + (U256::from(1) << (period - 2)));
}
}
else if header.number() < self.ethash_params.ecip1010_continue_transition {
let fixed_difficulty = ((self.ethash_params.ecip1010_pause_transition / EXP_DIFF_PERIOD) - 2) as usize;
target = max(min_difficulty, target + (U256::from(1) << fixed_difficulty));
}
else {
let period = ((parent.number() + 1) / EXP_DIFF_PERIOD) as usize;
let delay = ((self.ethash_params.ecip1010_continue_transition - self.ethash_params.ecip1010_pause_transition) / EXP_DIFF_PERIOD) as usize;
target = max(min_difficulty, target + (U256::from(1) << (period - delay - 2)));
} }
} }
target target
@ -347,14 +396,6 @@ impl Ethash {
(((U256::one() << 255) / *difficulty) << 1).into() (((U256::one() << 255) / *difficulty) << 1).into()
} }
} }
fn to_ethash(hash: H256) -> EH256 {
unsafe { mem::transmute(hash) }
}
fn from_ethash(hash: EH256) -> H256 {
unsafe { mem::transmute(hash) }
}
} }
impl Header { impl Header {
@ -382,8 +423,8 @@ mod tests {
use env_info::EnvInfo; use env_info::EnvInfo;
use error::{BlockError, Error}; use error::{BlockError, Error};
use header::Header; use header::Header;
use super::super::new_morden; use super::super::{new_morden, new_homestead_test};
use super::Ethash; use super::{Ethash, EthashParams};
use rlp; use rlp;
#[test] #[test]
@ -605,5 +646,122 @@ mod tests {
assert_eq!(Ethash::difficulty_to_boundary(&U256::from(32)), H256::from_str("0800000000000000000000000000000000000000000000000000000000000000").unwrap()); assert_eq!(Ethash::difficulty_to_boundary(&U256::from(32)), H256::from_str("0800000000000000000000000000000000000000000000000000000000000000").unwrap());
} }
// TODO: difficulty test #[test]
fn difficulty_frontier() {
let spec = new_homestead_test();
let ethparams = get_default_ethash_params();
let ethash = Ethash::new(spec.params, ethparams, BTreeMap::new());
let mut parent_header = Header::default();
parent_header.set_number(1000000);
parent_header.set_difficulty(U256::from_str("b69de81a22b").unwrap());
parent_header.set_timestamp(1455404053);
let mut header = Header::default();
header.set_number(parent_header.number() + 1);
header.set_timestamp(1455404058);
let difficulty = ethash.calculate_difficulty(&header, &parent_header);
assert_eq!(U256::from_str("b6b4bbd735f").unwrap(), difficulty);
}
#[test]
fn difficulty_homestead() {
let spec = new_homestead_test();
let ethparams = get_default_ethash_params();
let ethash = Ethash::new(spec.params, ethparams, BTreeMap::new());
let mut parent_header = Header::default();
parent_header.set_number(1500000);
parent_header.set_difficulty(U256::from_str("1fd0fd70792b").unwrap());
parent_header.set_timestamp(1463003133);
let mut header = Header::default();
header.set_number(parent_header.number() + 1);
header.set_timestamp(1463003177);
let difficulty = ethash.calculate_difficulty(&header, &parent_header);
assert_eq!(U256::from_str("1fc50f118efe").unwrap(), difficulty);
}
#[test]
fn difficulty_classic_bomb_delay() {
let spec = new_homestead_test();
let ethparams = EthashParams {
ecip1010_pause_transition: 3000000,
..get_default_ethash_params()
};
let ethash = Ethash::new(spec.params, ethparams, BTreeMap::new());
let mut parent_header = Header::default();
parent_header.set_number(3500000);
parent_header.set_difficulty(U256::from_str("6F62EAF8D3C").unwrap());
parent_header.set_timestamp(1452838500);
let mut header = Header::default();
header.set_number(parent_header.number() + 1);
header.set_timestamp(parent_header.timestamp() + 20);
assert_eq!(
U256::from_str("6F55FE9B74B").unwrap(),
ethash.calculate_difficulty(&header, &parent_header)
);
header.set_timestamp(parent_header.timestamp() + 5);
assert_eq!(
U256::from_str("6F71D75632D").unwrap(),
ethash.calculate_difficulty(&header, &parent_header)
);
header.set_timestamp(parent_header.timestamp() + 80);
assert_eq!(
U256::from_str("6F02746B3A5").unwrap(),
ethash.calculate_difficulty(&header, &parent_header)
);
}
#[test]
fn test_difficulty_bomb_continue() {
let spec = new_homestead_test();
let ethparams = EthashParams {
ecip1010_pause_transition: 3000000,
ecip1010_continue_transition: 5000000,
..get_default_ethash_params()
};
let ethash = Ethash::new(spec.params, ethparams, BTreeMap::new());
let mut parent_header = Header::default();
parent_header.set_number(5000102);
parent_header.set_difficulty(U256::from_str("14944397EE8B").unwrap());
parent_header.set_timestamp(1513175023);
let mut header = Header::default();
header.set_number(parent_header.number() + 1);
header.set_timestamp(parent_header.timestamp() + 6);
assert_eq!(
U256::from_str("1496E6206188").unwrap(),
ethash.calculate_difficulty(&header, &parent_header)
);
parent_header.set_number(5100123);
parent_header.set_difficulty(U256::from_str("14D24B39C7CF").unwrap());
parent_header.set_timestamp(1514609324);
header.set_number(parent_header.number() + 1);
header.set_timestamp(parent_header.timestamp() + 41);
assert_eq!(
U256::from_str("14CA9C5D9227").unwrap(),
ethash.calculate_difficulty(&header, &parent_header)
);
parent_header.set_number(6150001);
parent_header.set_difficulty(U256::from_str("305367B57227").unwrap());
parent_header.set_timestamp(1529664575);
header.set_number(parent_header.number() + 1);
header.set_timestamp(parent_header.timestamp() + 105);
assert_eq!(
U256::from_str("309D09E0C609").unwrap(),
ethash.calculate_difficulty(&header, &parent_header)
);
parent_header.set_number(8000000);
parent_header.set_difficulty(U256::from_str("1180B36D4CE5B6A").unwrap());
parent_header.set_timestamp(1535431724);
header.set_number(parent_header.number() + 1);
header.set_timestamp(parent_header.timestamp() + 420);
assert_eq!(
U256::from_str("5126FFD5BCBB9E7").unwrap(),
ethash.calculate_difficulty(&header, &parent_header)
);
}
} }

View File

@ -54,6 +54,9 @@ pub fn new_homestead_test() -> Spec { load(include_bytes!("../../res/ethereum/ho
/// Create a new Homestead-EIP150 chain spec as though it never changed from Homestead/Frontier. /// Create a new Homestead-EIP150 chain spec as though it never changed from Homestead/Frontier.
pub fn new_eip150_test() -> Spec { load(include_bytes!("../../res/ethereum/eip150_test.json")) } pub fn new_eip150_test() -> Spec { load(include_bytes!("../../res/ethereum/eip150_test.json")) }
/// Create a new Homestead-EIP150 chain spec as though it never changed from Homestead/Frontier.
pub fn new_eip161_test() -> Spec { load(include_bytes!("../../res/ethereum/eip161_test.json")) }
/// Create a new Frontier/Homestead/DAO chain spec with transition points at #5 and #8. /// Create a new Frontier/Homestead/DAO chain spec with transition points at #5 and #8.
pub fn new_transition_test() -> Spec { load(include_bytes!("../../res/ethereum/transition_test.json")) } pub fn new_transition_test() -> Spec { load(include_bytes!("../../res/ethereum/transition_test.json")) }

View File

@ -52,6 +52,12 @@ pub trait Ext {
/// Determine whether an account exists. /// Determine whether an account exists.
fn exists(&self, address: &Address) -> bool; fn exists(&self, address: &Address) -> bool;
/// Determine whether an account exists and is not null (zero balance/nonce, no code).
fn exists_and_not_null(&self, address: &Address) -> bool;
/// Balance of the origin account.
fn origin_balance(&self) -> U256;
/// Returns address balance. /// Returns address balance.
fn balance(&self, address: &Address) -> U256; fn balance(&self, address: &Address) -> U256;

View File

@ -173,7 +173,7 @@ lazy_static! {
arr[SIGNEXTEND as usize] = InstructionInfo::new("SIGNEXTEND", 0, 2, 1, false, GasPriceTier::Low); arr[SIGNEXTEND as usize] = InstructionInfo::new("SIGNEXTEND", 0, 2, 1, false, GasPriceTier::Low);
arr[SHA3 as usize] = InstructionInfo::new("SHA3", 0, 2, 1, false, GasPriceTier::Special); arr[SHA3 as usize] = InstructionInfo::new("SHA3", 0, 2, 1, false, GasPriceTier::Special);
arr[ADDRESS as usize] = InstructionInfo::new("ADDRESS", 0, 0, 1, false, GasPriceTier::Base); arr[ADDRESS as usize] = InstructionInfo::new("ADDRESS", 0, 0, 1, false, GasPriceTier::Base);
arr[BALANCE as usize] = InstructionInfo::new("BALANCE", 0, 1, 1, false, GasPriceTier::Ext); arr[BALANCE as usize] = InstructionInfo::new("BALANCE", 0, 1, 1, false, GasPriceTier::Special);
arr[ORIGIN as usize] = InstructionInfo::new("ORIGIN", 0, 0, 1, false, GasPriceTier::Base); arr[ORIGIN as usize] = InstructionInfo::new("ORIGIN", 0, 0, 1, false, GasPriceTier::Base);
arr[CALLER as usize] = InstructionInfo::new("CALLER", 0, 0, 1, false, GasPriceTier::Base); arr[CALLER as usize] = InstructionInfo::new("CALLER", 0, 0, 1, false, GasPriceTier::Base);
arr[CALLVALUE as usize] = InstructionInfo::new("CALLVALUE", 0, 0, 1, false, GasPriceTier::Base); arr[CALLVALUE as usize] = InstructionInfo::new("CALLVALUE", 0, 0, 1, false, GasPriceTier::Base);
@ -183,8 +183,8 @@ lazy_static! {
arr[CODESIZE as usize] = InstructionInfo::new("CODESIZE", 0, 0, 1, false, GasPriceTier::Base); arr[CODESIZE as usize] = InstructionInfo::new("CODESIZE", 0, 0, 1, false, GasPriceTier::Base);
arr[CODECOPY as usize] = InstructionInfo::new("CODECOPY", 0, 3, 0, true, GasPriceTier::VeryLow); arr[CODECOPY as usize] = InstructionInfo::new("CODECOPY", 0, 3, 0, true, GasPriceTier::VeryLow);
arr[GASPRICE as usize] = InstructionInfo::new("GASPRICE", 0, 0, 1, false, GasPriceTier::Base); arr[GASPRICE as usize] = InstructionInfo::new("GASPRICE", 0, 0, 1, false, GasPriceTier::Base);
arr[EXTCODESIZE as usize] = InstructionInfo::new("EXTCODESIZE", 0, 1, 1, false, GasPriceTier::Ext); arr[EXTCODESIZE as usize] = InstructionInfo::new("EXTCODESIZE", 0, 1, 1, false, GasPriceTier::Special);
arr[EXTCODECOPY as usize] = InstructionInfo::new("EXTCODECOPY", 0, 4, 0, true, GasPriceTier::Ext); arr[EXTCODECOPY as usize] = InstructionInfo::new("EXTCODECOPY", 0, 4, 0, true, GasPriceTier::Special);
arr[BLOCKHASH as usize] = InstructionInfo::new("BLOCKHASH", 0, 1, 1, false, GasPriceTier::Ext); arr[BLOCKHASH as usize] = InstructionInfo::new("BLOCKHASH", 0, 1, 1, false, GasPriceTier::Ext);
arr[COINBASE as usize] = InstructionInfo::new("COINBASE", 0, 0, 1, false, GasPriceTier::Base); arr[COINBASE as usize] = InstructionInfo::new("COINBASE", 0, 0, 1, false, GasPriceTier::Base);
arr[TIMESTAMP as usize] = InstructionInfo::new("TIMESTAMP", 0, 0, 1, false, GasPriceTier::Base); arr[TIMESTAMP as usize] = InstructionInfo::new("TIMESTAMP", 0, 0, 1, false, GasPriceTier::Base);
@ -277,7 +277,7 @@ lazy_static! {
arr[CALLCODE as usize] = InstructionInfo::new("CALLCODE", 0, 7, 1, true, GasPriceTier::Special); arr[CALLCODE as usize] = InstructionInfo::new("CALLCODE", 0, 7, 1, true, GasPriceTier::Special);
arr[RETURN as usize] = InstructionInfo::new("RETURN", 0, 2, 0, true, GasPriceTier::Zero); arr[RETURN as usize] = InstructionInfo::new("RETURN", 0, 2, 0, true, GasPriceTier::Zero);
arr[DELEGATECALL as usize] = InstructionInfo::new("DELEGATECALL", 0, 6, 1, true, GasPriceTier::Special); arr[DELEGATECALL as usize] = InstructionInfo::new("DELEGATECALL", 0, 6, 1, true, GasPriceTier::Special);
arr[SUICIDE as usize] = InstructionInfo::new("SUICIDE", 0, 1, 0, true, GasPriceTier::Zero); arr[SUICIDE as usize] = InstructionInfo::new("SUICIDE", 0, 1, 0, true, GasPriceTier::Special);
arr arr
}; };
} }

View File

@ -30,12 +30,20 @@ macro_rules! overflowing {
} }
#[cfg_attr(feature="dev", allow(enum_variant_names))] #[cfg_attr(feature="dev", allow(enum_variant_names))]
enum InstructionCost<Cost: CostType> { enum Request<Cost: CostType> {
Gas(Cost), Gas(Cost),
GasMem(Cost, Cost, Option<Cost>), GasMem(Cost, Cost),
GasMemProvide(Cost, Cost, Option<U256>),
GasMemCopy(Cost, Cost, Cost) GasMemCopy(Cost, Cost, Cost)
} }
pub struct InstructionRequirements<Cost: CostType> {
pub gas_cost: Cost,
pub provide_gas: Option<Cost>,
pub memory_total_gas: Cost,
pub memory_required_size: usize,
}
pub struct Gasometer<Gas: CostType> { pub struct Gasometer<Gas: CostType> {
pub current_gas: Gas, pub current_gas: Gas,
pub current_mem_gas: Gas, pub current_mem_gas: Gas,
@ -59,11 +67,19 @@ impl<Gas: CostType> Gasometer<Gas> {
/// How much gas is provided to a CALL/CREATE, given that we need to deduct `needed` for this operation /// How much gas is provided to a CALL/CREATE, given that we need to deduct `needed` for this operation
/// and that we `requested` some. /// and that we `requested` some.
pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<evm::Result<Gas>>) -> evm::Result<Gas> { pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<U256>) -> evm::Result<Gas> {
// Try converting requested gas to `Gas` (`U256/u64`)
// but in EIP150 even if we request more we should never fail from OOG
let requested = requested.map(Gas::from_u256);
match schedule.sub_gas_cap_divisor { match schedule.sub_gas_cap_divisor {
Some(cap_divisor) if self.current_gas >= needed => { Some(cap_divisor) if self.current_gas >= needed => {
let gas_remaining = self.current_gas - needed; let gas_remaining = self.current_gas - needed;
let max_gas_provided = gas_remaining - gas_remaining / Gas::from(cap_divisor); let max_gas_provided = match cap_divisor {
64 => gas_remaining - (gas_remaining >> 6),
cap_divisor => gas_remaining - gas_remaining / Gas::from(cap_divisor),
};
if let Some(Ok(r)) = requested { if let Some(Ok(r)) = requested {
Ok(min(r, max_gas_provided)) Ok(min(r, max_gas_provided))
} else { } else {
@ -78,7 +94,7 @@ impl<Gas: CostType> Gasometer<Gas> {
} else { } else {
Ok(0.into()) Ok(0.into())
} }
} },
} }
} }
@ -88,21 +104,21 @@ impl<Gas: CostType> Gasometer<Gas> {
/// We guarantee that the final element of the returned tuple (`provided`) will be `Some` /// We guarantee that the final element of the returned tuple (`provided`) will be `Some`
/// iff the `instruction` is one of `CREATE`, or any of the `CALL` variants. In this case, /// iff the `instruction` is one of `CREATE`, or any of the `CALL` variants. In this case,
/// it will be the amount of gas that the current context provides to the child context. /// it will be the amount of gas that the current context provides to the child context.
pub fn get_gas_cost_mem( pub fn requirements(
&mut self, &mut self,
ext: &evm::Ext, ext: &evm::Ext,
instruction: Instruction, instruction: Instruction,
info: &InstructionInfo, info: &InstructionInfo,
stack: &Stack<U256>, stack: &Stack<U256>,
current_mem_size: usize, current_mem_size: usize,
) -> evm::Result<(Gas, Gas, usize, Option<Gas>)> { ) -> evm::Result<InstructionRequirements<Gas>> {
let schedule = ext.schedule(); let schedule = ext.schedule();
let tier = instructions::get_tier_idx(info.tier); let tier = instructions::get_tier_idx(info.tier);
let default_gas = Gas::from(schedule.tier_step_gas[tier]); let default_gas = Gas::from(schedule.tier_step_gas[tier]);
let cost = match instruction { let cost = match instruction {
instructions::JUMPDEST => { instructions::JUMPDEST => {
InstructionCost::Gas(Gas::from(1)) Request::Gas(Gas::from(1))
}, },
instructions::SSTORE => { instructions::SSTORE => {
let address = H256::from(stack.peek(0)); let address = H256::from(stack.peek(0));
@ -116,47 +132,52 @@ impl<Gas: CostType> Gasometer<Gas> {
// !is_zero(&val) && is_zero(newval) // !is_zero(&val) && is_zero(newval)
schedule.sstore_reset_gas schedule.sstore_reset_gas
}; };
InstructionCost::Gas(Gas::from(gas)) Request::Gas(Gas::from(gas))
}, },
instructions::SLOAD => { instructions::SLOAD => {
InstructionCost::Gas(Gas::from(schedule.sload_gas)) Request::Gas(Gas::from(schedule.sload_gas))
}, },
instructions::BALANCE => { instructions::BALANCE => {
InstructionCost::Gas(Gas::from(schedule.balance_gas)) Request::Gas(Gas::from(schedule.balance_gas))
}, },
instructions::EXTCODESIZE => { instructions::EXTCODESIZE => {
InstructionCost::Gas(Gas::from(schedule.extcodesize_gas)) Request::Gas(Gas::from(schedule.extcodesize_gas))
}, },
instructions::SUICIDE => { instructions::SUICIDE => {
let mut gas = Gas::from(schedule.suicide_gas); let mut gas = Gas::from(schedule.suicide_gas);
let is_value_transfer = !ext.origin_balance().is_zero();
let address = u256_to_address(stack.peek(0)); let address = u256_to_address(stack.peek(0));
if !ext.exists(&address) { if (
!schedule.no_empty && !ext.exists(&address)
) || (
schedule.no_empty && is_value_transfer && !ext.exists_and_not_null(&address)
) {
gas = overflowing!(gas.overflow_add(schedule.suicide_to_new_account_cost.into())); gas = overflowing!(gas.overflow_add(schedule.suicide_to_new_account_cost.into()));
} }
InstructionCost::Gas(gas) Request::Gas(gas)
}, },
instructions::MSTORE | instructions::MLOAD => { instructions::MSTORE | instructions::MLOAD => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32)), None) Request::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32)))
}, },
instructions::MSTORE8 => { instructions::MSTORE8 => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1)), None) Request::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1)))
}, },
instructions::RETURN => { instructions::RETURN => {
InstructionCost::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None) Request::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
}, },
instructions::SHA3 => { instructions::SHA3 => {
let w = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(1))), 31)); let w = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(1))), 31));
let words = w >> 5; let words = w >> 5;
let gas = Gas::from(schedule.sha3_gas) + (Gas::from(schedule.sha3_word_gas) * words); let gas = Gas::from(schedule.sha3_gas) + (Gas::from(schedule.sha3_word_gas) * words);
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None) Request::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
}, },
instructions::CALLDATACOPY | instructions::CODECOPY => { instructions::CALLDATACOPY | instructions::CODECOPY => {
InstructionCost::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2)))) Request::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2))))
}, },
instructions::EXTCODECOPY => { instructions::EXTCODECOPY => {
InstructionCost::GasMemCopy(schedule.extcodecopy_base_gas.into(), try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3)))) Request::GasMemCopy(schedule.extcodecopy_base_gas.into(), try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3))))
}, },
instructions::LOG0...instructions::LOG4 => { instructions::LOG0...instructions::LOG4 => {
let no_of_topics = instructions::get_log_topics(instruction); let no_of_topics = instructions::get_log_topics(instruction);
@ -164,7 +185,7 @@ impl<Gas: CostType> Gasometer<Gas> {
let data_gas = overflowing!(try!(Gas::from_u256(*stack.peek(1))).overflow_mul(Gas::from(schedule.log_data_gas))); let data_gas = overflowing!(try!(Gas::from_u256(*stack.peek(1))).overflow_mul(Gas::from(schedule.log_data_gas)));
let gas = overflowing!(data_gas.overflow_add(Gas::from(log_gas))); let gas = overflowing!(data_gas.overflow_add(Gas::from(log_gas)));
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None) Request::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))))
}, },
instructions::CALL | instructions::CALLCODE => { instructions::CALL | instructions::CALLCODE => {
let mut gas = Gas::from(schedule.call_gas); let mut gas = Gas::from(schedule.call_gas);
@ -174,79 +195,98 @@ impl<Gas: CostType> Gasometer<Gas> {
); );
let address = u256_to_address(stack.peek(1)); let address = u256_to_address(stack.peek(1));
let is_value_transfer = !stack.peek(2).is_zero();
if instruction == instructions::CALL && !ext.exists(&address) { if instruction == instructions::CALL {
gas = overflowing!(gas.overflow_add(schedule.call_new_account_gas.into())); if (
!schedule.no_empty && !ext.exists(&address)
) || (
schedule.no_empty && is_value_transfer && !ext.exists_and_not_null(&address)
) {
gas = overflowing!(gas.overflow_add(schedule.call_new_account_gas.into()));
}
}; };
if !stack.peek(2).is_zero() { if is_value_transfer {
gas = overflowing!(gas.overflow_add(schedule.call_value_transfer_gas.into())); gas = overflowing!(gas.overflow_add(schedule.call_value_transfer_gas.into()));
}; };
// TODO: refactor to avoid duplicate calculation here and later on. let requested = *stack.peek(0);
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided)) Request::GasMemProvide(gas, mem, Some(requested))
}, },
instructions::DELEGATECALL => { instructions::DELEGATECALL => {
let mut gas = Gas::from(schedule.call_gas); let gas = Gas::from(schedule.call_gas);
let mem = cmp::max( let mem = cmp::max(
try!(mem_needed(stack.peek(4), stack.peek(5))), try!(mem_needed(stack.peek(4), stack.peek(5))),
try!(mem_needed(stack.peek(2), stack.peek(3))) try!(mem_needed(stack.peek(2), stack.peek(3)))
); );
let requested = *stack.peek(0);
// TODO: refactor to avoid duplicate calculation here and later on. Request::GasMemProvide(gas, mem, Some(requested))
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
}, },
instructions::CREATE => { instructions::CREATE => {
let mut gas = Gas::from(schedule.create_gas); let gas = Gas::from(schedule.create_gas);
let mem = try!(mem_needed(stack.peek(1), stack.peek(2))); let mem = try!(mem_needed(stack.peek(1), stack.peek(2)));
// TODO: refactor to avoid duplicate calculation here and later on. Request::GasMemProvide(gas, mem, None)
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let provided = try!(self.gas_provided(schedule, cost_so_far, None));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
}, },
instructions::EXP => { instructions::EXP => {
let expon = stack.peek(1); let expon = stack.peek(1);
let bytes = ((expon.bits() + 7) / 8) as usize; let bytes = ((expon.bits() + 7) / 8) as usize;
let gas = Gas::from(schedule.exp_gas + schedule.exp_byte_gas * bytes); let gas = Gas::from(schedule.exp_gas + schedule.exp_byte_gas * bytes);
InstructionCost::Gas(gas) Request::Gas(gas)
}, },
_ => InstructionCost::Gas(default_gas), _ => Request::Gas(default_gas),
}; };
match cost { Ok(match cost {
InstructionCost::Gas(gas) => { Request::Gas(gas) => {
Ok((gas, self.current_mem_gas, 0, None)) InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: 0,
memory_total_gas: self.current_mem_gas,
}
}, },
InstructionCost::GasMem(gas, mem_size, provided) => { Request::GasMem(gas, mem_size) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size)); let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let gas = overflowing!(gas.overflow_add(mem_gas_cost)); let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size, provided)) InstructionRequirements {
gas_cost: gas,
provide_gas: None,
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
}, },
InstructionCost::GasMemCopy(gas, mem_size, copy) => { Request::GasMemProvide(gas, mem_size, requested) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let gas = overflowing!(gas.overflow_add(mem_gas_cost));
let provided = try!(self.gas_provided(schedule, gas, requested));
let total_gas = overflowing!(gas.overflow_add(provided));
InstructionRequirements {
gas_cost: total_gas,
provide_gas: Some(provided),
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
},
Request::GasMemCopy(gas, mem_size, copy) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size)); let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let copy = overflowing!(add_gas_usize(copy, 31)) >> 5; let copy = overflowing!(add_gas_usize(copy, 31)) >> 5;
let copy_gas = Gas::from(schedule.copy_gas) * copy; let copy_gas = Gas::from(schedule.copy_gas) * copy;
let gas = overflowing!(gas.overflow_add(copy_gas)); let gas = overflowing!(gas.overflow_add(copy_gas));
let gas = overflowing!(gas.overflow_add(mem_gas_cost)); let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size, None))
} InstructionRequirements {
} gas_cost: gas,
provide_gas: None,
memory_required_size: new_mem_size,
memory_total_gas: new_mem_gas,
}
},
})
} }
fn mem_gas_cost(&self, schedule: &evm::Schedule, current_mem_size: usize, mem_size: &Gas) -> evm::Result<(Gas, Gas, usize)> { fn mem_gas_cost(&self, schedule: &evm::Schedule, current_mem_size: usize, mem_size: &Gas) -> evm::Result<(Gas, Gas, usize)> {
@ -256,7 +296,7 @@ impl<Gas: CostType> Gasometer<Gas> {
let a = overflowing!(s.overflow_mul(Gas::from(schedule.memory_gas))); let a = overflowing!(s.overflow_mul(Gas::from(schedule.memory_gas)));
// Calculate s*s/quad_coeff_div // Calculate s*s/quad_coeff_div
debug_assert_eq!(schedule.quad_coeff_div, 512); assert_eq!(schedule.quad_coeff_div, 512);
let b = overflowing!(s.overflow_mul_shr(s, 9)); let b = overflowing!(s.overflow_mul_shr(s, 9));
Ok(overflowing!(a.overflow_add(b))) Ok(overflowing!(a.overflow_add(b)))
}; };
@ -328,3 +368,4 @@ fn test_calculate_mem_cost() {
assert_eq!(new_mem_gas, 3); assert_eq!(new_mem_gas, 3);
assert_eq!(mem_size, 32); assert_eq!(mem_size, 32);
} }

View File

@ -54,14 +54,14 @@ const TWO_POW_248: U256 = U256([0, 0, 0, 0x100000000000000]); //0x1 00000000 000
/// Abstraction over raw vector of Bytes. Easier state management of PC. /// Abstraction over raw vector of Bytes. Easier state management of PC.
struct CodeReader<'a> { struct CodeReader<'a> {
position: ProgramCounter, position: ProgramCounter,
code: &'a Bytes code: &'a [u8]
} }
#[cfg_attr(feature="dev", allow(len_without_is_empty))] #[cfg_attr(feature="dev", allow(len_without_is_empty))]
impl<'a> CodeReader<'a> { impl<'a> CodeReader<'a> {
/// Create new code reader - starting at position 0. /// Create new code reader - starting at position 0.
fn new(code: &'a Bytes) -> Self { fn new(code: &'a [u8]) -> Self {
CodeReader { CodeReader {
position: 0, position: 0,
code: code, code: code,
@ -120,14 +120,14 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
try!(self.verify_instruction(ext, instruction, info, &stack)); try!(self.verify_instruction(ext, instruction, info, &stack));
// Calculate gas cost // Calculate gas cost
let (gas_cost, mem_gas, mem_size, provided) = try!(gasometer.get_gas_cost_mem(ext, instruction, info, &stack, self.mem.size())); let requirements = try!(gasometer.requirements(ext, instruction, info, &stack, self.mem.size()));
// TODO: make compile-time removable if too much of a performance hit. // TODO: make compile-time removable if too much of a performance hit.
let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &gas_cost.as_u256()); let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &requirements.gas_cost.as_u256());
try!(gasometer.verify_gas(&gas_cost)); try!(gasometer.verify_gas(&requirements.gas_cost));
self.mem.expand(mem_size); self.mem.expand(requirements.memory_required_size);
gasometer.current_mem_gas = mem_gas; gasometer.current_mem_gas = requirements.memory_total_gas;
gasometer.current_gas = gasometer.current_gas - gas_cost; gasometer.current_gas = gasometer.current_gas - requirements.gas_cost;
evm_debug!({ informant.before_instruction(reader.position, instruction, info, &gasometer.current_gas, &stack) }); evm_debug!({ informant.before_instruction(reader.position, instruction, info, &gasometer.current_gas, &stack) });
@ -138,7 +138,7 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
// Execute instruction // Execute instruction
let result = try!(self.exec_instruction( let result = try!(self.exec_instruction(
gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack, provided gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack, requirements.provide_gas
)); ));
evm_debug!({ informant.after_instruction(instruction) }); evm_debug!({ informant.after_instruction(instruction) });

View File

@ -15,20 +15,27 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc; use std::sync::Arc;
use lru_cache::LruCache; use util::{H256, HeapSizeOf, Mutex};
use util::{H256, Mutex};
use util::sha3::*; use util::sha3::*;
use util::cache::MemoryLruCache;
use bit_set::BitSet; use bit_set::BitSet;
use super::super::instructions; use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024; const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
// stub for a HeapSizeOf implementation.
struct Bits(Arc<BitSet>);
impl HeapSizeOf for Bits {
fn heap_size_of_children(&self) -> usize {
// dealing in bits here
self.0.capacity() * 8
}
}
/// Global cache for EVM interpreter /// Global cache for EVM interpreter
pub struct SharedCache { pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>, jump_destinations: Mutex<MemoryLruCache<H256, Bits>>,
max_size: usize,
cur_size: Mutex<usize>,
} }
impl SharedCache { impl SharedCache {
@ -36,9 +43,7 @@ impl SharedCache {
/// to cache. /// to cache.
pub fn new(max_size: usize) -> Self { pub fn new(max_size: usize) -> Self {
SharedCache { SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)), jump_destinations: Mutex::new(MemoryLruCache::new(max_size)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
} }
} }
@ -49,37 +54,11 @@ impl SharedCache {
} }
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) { if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone(); return d.0.clone();
} }
let d = Self::find_jump_destinations(code); let d = Self::find_jump_destinations(code);
self.jump_destinations.lock().insert(code_hash.clone(), Bits(d.clone()));
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
d d
} }

View File

@ -93,6 +93,10 @@ pub struct Schedule {
/// If Some(x): let limit = GAS * (x - 1) / x; let CALL's gas = min(requested, limit). let CREATE's gas = limit. /// If Some(x): let limit = GAS * (x - 1) / x; let CALL's gas = min(requested, limit). let CREATE's gas = limit.
/// If None: let CALL's gas = (requested > GAS ? [OOG] : GAS). let CREATE's gas = GAS /// If None: let CALL's gas = (requested > GAS ? [OOG] : GAS). let CREATE's gas = GAS
pub sub_gas_cap_divisor: Option<usize>, pub sub_gas_cap_divisor: Option<usize>,
/// Don't ever make empty accounts; contracts start with nonce=1. Also, don't charge 25k when sending/suicide zero-value.
pub no_empty: bool,
/// Kill empty accounts if touched.
pub kill_empty: bool,
} }
impl Schedule { impl Schedule {
@ -106,16 +110,16 @@ impl Schedule {
Self::new(true, true, 53000) Self::new(true, true, 53000)
} }
/// Schedule for the Homestead-era of the Ethereum main net. /// Schedule for the post-EIP-150-era of the Ethereum main net.
pub fn new_homestead_gas_fix() -> Schedule { pub fn new_post_eip150(fix_exp: bool, no_empty: bool, kill_empty: bool) -> Schedule {
Schedule{ Schedule {
exceptional_failed_code_deposit: true, exceptional_failed_code_deposit: true,
have_delegate_call: true, have_delegate_call: true,
stack_limit: 1024, stack_limit: 1024,
max_depth: 1024, max_depth: 1024,
tier_step_gas: [0, 2, 3, 5, 8, 10, 20, 0], tier_step_gas: [0, 2, 3, 5, 8, 10, 20, 0],
exp_gas: 10, exp_gas: 10,
exp_byte_gas: 10, exp_byte_gas: if fix_exp {50} else {10},
sha3_gas: 30, sha3_gas: 30,
sha3_word_gas: 6, sha3_word_gas: 6,
sload_gas: 200, sload_gas: 200,
@ -146,11 +150,13 @@ impl Schedule {
suicide_gas: 5000, suicide_gas: 5000,
suicide_to_new_account_cost: 25000, suicide_to_new_account_cost: 25000,
sub_gas_cap_divisor: Some(64), sub_gas_cap_divisor: Some(64),
no_empty: no_empty,
kill_empty: kill_empty,
} }
} }
fn new(efcd: bool, hdc: bool, tcg: usize) -> Schedule { fn new(efcd: bool, hdc: bool, tcg: usize) -> Schedule {
Schedule{ Schedule {
exceptional_failed_code_deposit: efcd, exceptional_failed_code_deposit: efcd,
have_delegate_call: hdc, have_delegate_call: hdc,
stack_limit: 1024, stack_limit: 1024,
@ -188,6 +194,8 @@ impl Schedule {
suicide_gas: 0, suicide_gas: 0,
suicide_to_new_account_cost: 0, suicide_to_new_account_cost: 0,
sub_gas_cap_divisor: None, sub_gas_cap_divisor: None,
no_empty: false,
kill_empty: false,
} }
} }
} }

View File

@ -94,6 +94,14 @@ impl Ext for FakeExt {
self.balances.contains_key(address) self.balances.contains_key(address)
} }
fn exists_and_not_null(&self, address: &Address) -> bool {
self.balances.get(address).map_or(false, |b| !b.is_zero())
}
fn origin_balance(&self) -> U256 {
unimplemented!()
}
fn balance(&self, address: &Address) -> U256 { fn balance(&self, address: &Address) -> U256 {
*self.balances.get(address).unwrap() *self.balances.get(address).unwrap()
} }

View File

@ -17,7 +17,7 @@
//! Transaction Execution environment. //! Transaction Execution environment.
use util::*; use util::*;
use action_params::{ActionParams, ActionValue}; use action_params::{ActionParams, ActionValue};
use state::{State, Substate}; use state::{State, Substate, CleanupMode};
use engines::Engine; use engines::Engine;
use types::executed::CallType; use types::executed::CallType;
use env_info::EnvInfo; use env_info::EnvInfo;
@ -256,9 +256,11 @@ impl<'a> Executive<'a> {
// backup used in case of running out of gas // backup used in case of running out of gas
self.state.checkpoint(); self.state.checkpoint();
let schedule = self.engine.schedule(self.info);
// at first, transfer value to destination // at first, transfer value to destination
if let ActionValue::Transfer(val) = params.value { if let ActionValue::Transfer(val) = params.value {
self.state.transfer_balance(&params.sender, &params.address, &val); self.state.transfer_balance(&params.sender, &params.address, &val, substate.to_cleanup_mode(&schedule));
} }
trace!("Executive::call(params={:?}) self.env_info={:?}", params, self.info); trace!("Executive::call(params={:?}) self.env_info={:?}", params, self.info);
@ -364,12 +366,14 @@ impl<'a> Executive<'a> {
let mut unconfirmed_substate = Substate::new(); let mut unconfirmed_substate = Substate::new();
// create contract and transfer value to it if necessary // create contract and transfer value to it if necessary
let schedule = self.engine.schedule(self.info);
let nonce_offset = if schedule.no_empty {1} else {0}.into();
let prev_bal = self.state.balance(&params.address); let prev_bal = self.state.balance(&params.address);
if let ActionValue::Transfer(val) = params.value { if let ActionValue::Transfer(val) = params.value {
self.state.sub_balance(&params.sender, &val); self.state.sub_balance(&params.sender, &val);
self.state.new_contract(&params.address, val + prev_bal); self.state.new_contract(&params.address, val + prev_bal, nonce_offset);
} else { } else {
self.state.new_contract(&params.address, prev_bal); self.state.new_contract(&params.address, prev_bal, nonce_offset);
} }
let trace_info = tracer.prepare_trace_create(&params); let trace_info = tracer.prepare_trace_create(&params);
@ -405,7 +409,7 @@ impl<'a> Executive<'a> {
fn finalize( fn finalize(
&mut self, &mut self,
t: &SignedTransaction, t: &SignedTransaction,
substate: Substate, mut substate: Substate,
result: evm::Result<U256>, result: evm::Result<U256>,
output: Bytes, output: Bytes,
trace: Vec<FlatTrace>, trace: Vec<FlatTrace>,
@ -440,15 +444,23 @@ impl<'a> Executive<'a> {
}; };
trace!("exec::finalize: Refunding refund_value={}, sender={}\n", refund_value, sender); trace!("exec::finalize: Refunding refund_value={}, sender={}\n", refund_value, sender);
self.state.add_balance(&sender, &refund_value); // Below: NoEmpty is safe since the sender must already be non-null to have sent this transaction
self.state.add_balance(&sender, &refund_value, CleanupMode::NoEmpty);
trace!("exec::finalize: Compensating author: fees_value={}, author={}\n", fees_value, &self.info.author); trace!("exec::finalize: Compensating author: fees_value={}, author={}\n", fees_value, &self.info.author);
self.state.add_balance(&self.info.author, &fees_value); self.state.add_balance(&self.info.author, &fees_value, substate.to_cleanup_mode(&schedule));
// perform suicides // perform suicides
for address in &substate.suicides { for address in &substate.suicides {
self.state.kill_account(address); self.state.kill_account(address);
} }
// perform garbage-collection
for address in &substate.garbage {
if self.state.exists(address) && !self.state.exists_and_not_null(address) {
self.state.kill_account(address);
}
}
match result { match result {
Err(evm::Error::Internal) => Err(ExecutionError::Internal), Err(evm::Error::Internal) => Err(ExecutionError::Internal),
Err(_) => { Err(_) => {
@ -509,7 +521,7 @@ mod tests {
use env_info::EnvInfo; use env_info::EnvInfo;
use evm::{Factory, VMType}; use evm::{Factory, VMType};
use error::ExecutionError; use error::ExecutionError;
use state::Substate; use state::{Substate, CleanupMode};
use tests::helpers::*; use tests::helpers::*;
use trace::trace; use trace::trace;
use trace::{FlatTrace, Tracer, NoopTracer, ExecutiveTracer}; use trace::{FlatTrace, Tracer, NoopTracer, ExecutiveTracer};
@ -538,7 +550,7 @@ mod tests {
params.value = ActionValue::Transfer(U256::from(0x7)); params.value = ActionValue::Transfer(U256::from(0x7));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(0x100u64)); state.add_balance(&sender, &U256::from(0x100u64), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
let mut substate = Substate::new(); let mut substate = Substate::new();
@ -597,7 +609,7 @@ mod tests {
params.value = ActionValue::Transfer(U256::from(100)); params.value = ActionValue::Transfer(U256::from(100));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(100)); state.add_balance(&sender, &U256::from(100), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
let mut substate = Substate::new(); let mut substate = Substate::new();
@ -656,7 +668,7 @@ mod tests {
params.call_type = CallType::Call; params.call_type = CallType::Call;
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(100)); state.add_balance(&sender, &U256::from(100), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(5); let engine = TestEngine::new(5);
let mut substate = Substate::new(); let mut substate = Substate::new();
@ -767,7 +779,7 @@ mod tests {
params.value = ActionValue::Transfer(100.into()); params.value = ActionValue::Transfer(100.into());
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(100)); state.add_balance(&sender, &U256::from(100), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(5); let engine = TestEngine::new(5);
let mut substate = Substate::new(); let mut substate = Substate::new();
@ -855,7 +867,7 @@ mod tests {
params.value = ActionValue::Transfer(U256::from(100)); params.value = ActionValue::Transfer(U256::from(100));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(100)); state.add_balance(&sender, &U256::from(100), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
let mut substate = Substate::new(); let mut substate = Substate::new();
@ -907,7 +919,7 @@ mod tests {
params.value = ActionValue::Transfer(U256::from(100)); params.value = ActionValue::Transfer(U256::from(100));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(100)); state.add_balance(&sender, &U256::from(100), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(1024); let engine = TestEngine::new(1024);
let mut substate = Substate::new(); let mut substate = Substate::new();
@ -967,7 +979,7 @@ mod tests {
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.init_code(&address_a, code_a.clone()); state.init_code(&address_a, code_a.clone());
state.init_code(&address_b, code_b.clone()); state.init_code(&address_b, code_b.clone());
state.add_balance(&sender, &U256::from(100_000)); state.add_balance(&sender, &U256::from(100_000), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
@ -1040,13 +1052,13 @@ mod tests {
gas: U256::from(100_000), gas: U256::from(100_000),
gas_price: U256::zero(), gas_price: U256::zero(),
nonce: U256::zero() nonce: U256::zero()
}.sign(keypair.secret()); }.sign(keypair.secret(), None);
let sender = t.sender().unwrap(); let sender = t.sender().unwrap();
let contract = contract_address(&sender, &U256::zero()); let contract = contract_address(&sender, &U256::zero());
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(18)); state.add_balance(&sender, &U256::from(18), CleanupMode::NoEmpty);
let mut info = EnvInfo::default(); let mut info = EnvInfo::default();
info.gas_limit = U256::from(100_000); info.gas_limit = U256::from(100_000);
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
@ -1107,12 +1119,12 @@ mod tests {
gas: U256::from(100_000), gas: U256::from(100_000),
gas_price: U256::zero(), gas_price: U256::zero(),
nonce: U256::one() nonce: U256::one()
}.sign(keypair.secret()); }.sign(keypair.secret(), None);
let sender = t.sender().unwrap(); let sender = t.sender().unwrap();
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(17)); state.add_balance(&sender, &U256::from(17), CleanupMode::NoEmpty);
let mut info = EnvInfo::default(); let mut info = EnvInfo::default();
info.gas_limit = U256::from(100_000); info.gas_limit = U256::from(100_000);
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
@ -1140,12 +1152,12 @@ mod tests {
gas: U256::from(80_001), gas: U256::from(80_001),
gas_price: U256::zero(), gas_price: U256::zero(),
nonce: U256::zero() nonce: U256::zero()
}.sign(keypair.secret()); }.sign(keypair.secret(), None);
let sender = t.sender().unwrap(); let sender = t.sender().unwrap();
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(17)); state.add_balance(&sender, &U256::from(17), CleanupMode::NoEmpty);
let mut info = EnvInfo::default(); let mut info = EnvInfo::default();
info.gas_used = U256::from(20_000); info.gas_used = U256::from(20_000);
info.gas_limit = U256::from(100_000); info.gas_limit = U256::from(100_000);
@ -1175,12 +1187,12 @@ mod tests {
gas: U256::from(100_000), gas: U256::from(100_000),
gas_price: U256::one(), gas_price: U256::one(),
nonce: U256::zero() nonce: U256::zero()
}.sign(keypair.secret()); }.sign(keypair.secret(), None);
let sender = t.sender().unwrap(); let sender = t.sender().unwrap();
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from(100_017)); state.add_balance(&sender, &U256::from(100_017), CleanupMode::NoEmpty);
let mut info = EnvInfo::default(); let mut info = EnvInfo::default();
info.gas_limit = U256::from(100_000); info.gas_limit = U256::from(100_000);
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
@ -1215,7 +1227,7 @@ mod tests {
params.value = ActionValue::Transfer(U256::from_str("0de0b6b3a7640000").unwrap()); params.value = ActionValue::Transfer(U256::from_str("0de0b6b3a7640000").unwrap());
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.add_balance(&sender, &U256::from_str("152d02c7e14af6800000").unwrap()); state.add_balance(&sender, &U256::from_str("152d02c7e14af6800000").unwrap(), CleanupMode::NoEmpty);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
let mut substate = Substate::new(); let mut substate = Substate::new();

View File

@ -114,6 +114,12 @@ impl<'a, T, V> Ext for Externalities<'a, T, V> where T: 'a + Tracer, V: 'a + VMT
self.state.exists(address) self.state.exists(address)
} }
fn exists_and_not_null(&self, address: &Address) -> bool {
self.state.exists_and_not_null(address)
}
fn origin_balance(&self) -> U256 { self.balance(&self.origin_info.address) }
fn balance(&self, address: &Address) -> U256 { fn balance(&self, address: &Address) -> U256 {
self.state.balance(address) self.state.balance(address)
} }
@ -269,11 +275,11 @@ impl<'a, T, V> Ext for Externalities<'a, T, V> where T: 'a + Tracer, V: 'a + VMT
let address = self.origin_info.address.clone(); let address = self.origin_info.address.clone();
let balance = self.balance(&address); let balance = self.balance(&address);
if &address == refund_address { if &address == refund_address {
// TODO [todr] To be consisted with CPP client we set balance to 0 in that case. // TODO [todr] To be consistent with CPP client we set balance to 0 in that case.
self.state.sub_balance(&address, &balance); self.state.sub_balance(&address, &balance);
} else { } else {
trace!("Suiciding {} -> {} (xfer: {})", address, refund_address, balance); trace!(target: "ext", "Suiciding {} -> {} (xfer: {})", address, refund_address, balance);
self.state.transfer_balance(&address, refund_address, &balance); self.state.transfer_balance(&address, refund_address, &balance, self.substate.to_cleanup_mode(&self.schedule));
} }
self.tracer.trace_suicide(address, balance, refund_address.clone()); self.tracer.trace_suicide(address, balance, refund_address.clone());

View File

@ -49,6 +49,7 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
ChainEra::Frontier => ethereum::new_frontier_test(), ChainEra::Frontier => ethereum::new_frontier_test(),
ChainEra::Homestead => ethereum::new_homestead_test(), ChainEra::Homestead => ethereum::new_homestead_test(),
ChainEra::Eip150 => ethereum::new_eip150_test(), ChainEra::Eip150 => ethereum::new_eip150_test(),
ChainEra::Eip161 => ethereum::new_eip161_test(),
ChainEra::TransitionTest => ethereum::new_transition_test(), ChainEra::TransitionTest => ethereum::new_transition_test(),
}; };
spec.set_genesis_state(state); spec.set_genesis_state(state);

View File

@ -0,0 +1,51 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use tests::helpers::*;
use super::state::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Eip161)
}
declare_test!{StateTests_EIP158_stEIP158SpecificTest, "StateTests/EIP158/stEIP158SpecificTest"}
declare_test!{StateTests_EIP158_stNonZeroCallsTest, "StateTests/EIP158/stNonZeroCallsTest"}
declare_test!{StateTests_EIP158_stZeroCallsTest, "StateTests/EIP158/stZeroCallsTest"}
declare_test!{StateTests_EIP158_EIP150_stMemExpandingEIPCalls, "StateTests/EIP158/EIP150/stMemExpandingEIPCalls"}
declare_test!{StateTests_EIP158_EIP150_stEIPSpecificTest, "StateTests/EIP158/EIP150/stEIPSpecificTest"}
declare_test!{StateTests_EIP158_EIP150_stEIPsingleCodeGasPrices, "StateTests/EIP158/EIP150/stEIPsingleCodeGasPrices"}
declare_test!{StateTests_EIP158_EIP150_stChangedTests, "StateTests/EIP158/EIP150/stChangedTests"}
declare_test!{StateTests_EIP158_Homestead_stBoundsTest, "StateTests/EIP158/Homestead/stBoundsTest"}
declare_test!{StateTests_EIP158_Homestead_stCallCodes, "StateTests/EIP158/Homestead/stCallCodes"}
declare_test!{StateTests_EIP158_Homestead_stCallCreateCallCodeTest, "StateTests/EIP158/Homestead/stCallCreateCallCodeTest"}
declare_test!{StateTests_EIP158_Homestead_stCallDelegateCodes, "StateTests/EIP158/Homestead/stCallDelegateCodes"}
declare_test!{StateTests_EIP158_Homestead_stCallDelegateCodesCallCode, "StateTests/EIP158/Homestead/stCallDelegateCodesCallCode"}
declare_test!{StateTests_EIP158_Homestead_stDelegatecallTest, "StateTests/EIP158/Homestead/stDelegatecallTest"}
declare_test!{StateTests_EIP158_Homestead_stHomeSteadSpecific, "StateTests/EIP158/Homestead/stHomeSteadSpecific"}
declare_test!{StateTests_EIP158_Homestead_stInitCodeTest, "StateTests/EIP158/Homestead/stInitCodeTest"}
declare_test!{StateTests_EIP158_Homestead_stLogTests, "StateTests/EIP158/Homestead/stLogTests"}
declare_test!{heavy => StateTests_EIP158_Homestead_stMemoryTest, "StateTests/EIP158/Homestead/stMemoryTest"}
declare_test!{StateTests_EIP158_Homestead_stPreCompiledContracts, "StateTests/EIP158/Homestead/stPreCompiledContracts"}
declare_test!{heavy => StateTests_EIP158_Homestead_stQuadraticComplexityTest, "StateTests/EIP158/Homestead/stQuadraticComplexityTest"}
declare_test!{StateTests_EIP158_Homestead_stRecursiveCreate, "StateTests/EIP158/Homestead/stRecursiveCreate"}
declare_test!{StateTests_EIP158_Homestead_stRefundTest, "StateTests/EIP158/Homestead/stRefundTest"}
declare_test!{StateTests_EIP158_Homestead_stSpecialTest, "StateTests/EIP158/Homestead/stSpecialTest"}
declare_test!{StateTests_EIP158_Homestead_stSystemOperationsTest, "StateTests/EIP158/Homestead/stSystemOperationsTest"}
declare_test!{StateTests_EIP158_Homestead_stTransactionTest, "StateTests/EIP158/Homestead/stTransactionTest"}
declare_test!{StateTests_EIP158_Homestead_stWalletTest, "StateTests/EIP158/Homestead/stWalletTest"}

View File

@ -92,10 +92,18 @@ impl<'a, T, V> Ext for TestExt<'a, T, V> where T: Tracer, V: VMTracer {
self.ext.exists(address) self.ext.exists(address)
} }
fn exists_and_not_null(&self, address: &Address) -> bool {
self.ext.exists_and_not_null(address)
}
fn balance(&self, address: &Address) -> U256 { fn balance(&self, address: &Address) -> U256 {
self.ext.balance(address) self.ext.balance(address)
} }
fn origin_balance(&self) -> U256 {
self.ext.origin_balance()
}
fn blockhash(&self, number: &U256) -> H256 { fn blockhash(&self, number: &U256) -> H256 {
self.ext.blockhash(number) self.ext.blockhash(number)
} }

View File

@ -24,4 +24,5 @@ mod chain;
mod homestead_state; mod homestead_state;
mod homestead_chain; mod homestead_chain;
mod eip150_state; mod eip150_state;
mod eip161_state;
mod trie; mod trie;

View File

@ -29,6 +29,7 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
ChainEra::Frontier => ethereum::new_mainnet_like().engine, ChainEra::Frontier => ethereum::new_mainnet_like().engine,
ChainEra::Homestead => ethereum::new_homestead_test().engine, ChainEra::Homestead => ethereum::new_homestead_test().engine,
ChainEra::Eip150 => ethereum::new_eip150_test().engine, ChainEra::Eip150 => ethereum::new_eip150_test().engine,
ChainEra::Eip161 => ethereum::new_eip161_test().engine,
ChainEra::TransitionTest => ethereum::new_transition_test().engine, ChainEra::TransitionTest => ethereum::new_transition_test().engine,
}; };

View File

@ -26,8 +26,7 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
let old_schedule = evm::Schedule::new_frontier(); let old_schedule = evm::Schedule::new_frontier();
let new_schedule = evm::Schedule::new_homestead(); let new_schedule = evm::Schedule::new_homestead();
for (name, test) in tests.into_iter() { for (name, test) in tests.into_iter() {
let mut fail = false; let mut fail_unless = |cond: bool, title: &str| if !cond { failed.push(name.clone()); println!("Transaction failed: {:?}: {:?}", name, title); };
let mut fail_unless = |cond: bool| if !cond && !fail { failed.push(name.clone()); println!("Transaction failed: {:?}", name); fail = true };
let number: Option<u64> = test.block_number.map(Into::into); let number: Option<u64> = test.block_number.map(Into::into);
let schedule = match number { let schedule = match number {
@ -35,27 +34,34 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
Some(x) if x < 1_150_000 => &old_schedule, Some(x) if x < 1_150_000 => &old_schedule,
Some(_) => &new_schedule Some(_) => &new_schedule
}; };
let allow_network_id_of_one = number.map_or(false, |n| n >= 3_500_000);
let rlp: Vec<u8> = test.rlp.into(); let rlp: Vec<u8> = test.rlp.into();
let res = UntrustedRlp::new(&rlp) let res = UntrustedRlp::new(&rlp)
.as_val() .as_val()
.map_err(From::from) .map_err(From::from)
.and_then(|t: SignedTransaction| t.validate(schedule, schedule.have_delegate_call)); .and_then(|t: SignedTransaction| t.validate(schedule, schedule.have_delegate_call, allow_network_id_of_one));
fail_unless(test.transaction.is_none() == res.is_err()); fail_unless(test.transaction.is_none() == res.is_err(), "Validity different");
if let (Some(tx), Some(sender)) = (test.transaction, test.sender) { if let (Some(tx), Some(sender)) = (test.transaction, test.sender) {
let t = res.unwrap(); let t = res.unwrap();
fail_unless(t.sender().unwrap() == sender.into()); fail_unless(t.sender().unwrap() == sender.into(), "sender mismatch");
let is_acceptable_network_id = match t.network_id() {
None => true,
Some(1) if allow_network_id_of_one => true,
_ => false,
};
fail_unless(is_acceptable_network_id, "Network ID unacceptable");
let data: Vec<u8> = tx.data.into(); let data: Vec<u8> = tx.data.into();
fail_unless(t.data == data); fail_unless(t.data == data, "data mismatch");
fail_unless(t.gas_price == tx.gas_price.into()); fail_unless(t.gas_price == tx.gas_price.into(), "gas_price mismatch");
fail_unless(t.nonce == tx.nonce.into()); fail_unless(t.nonce == tx.nonce.into(), "nonce mismatch");
fail_unless(t.value == tx.value.into()); fail_unless(t.value == tx.value.into(), "value mismatch");
let to: Option<ethjson::hash::Address> = tx.to.into(); let to: Option<ethjson::hash::Address> = tx.to.into();
let to: Option<Address> = to.map(Into::into); let to: Option<Address> = to.map(Into::into);
match t.action { match t.action {
Action::Call(dest) => fail_unless(Some(dest) == to), Action::Call(dest) => fail_unless(Some(dest) == to, "call/destination mismatch"),
Action::Create => fail_unless(None == to), Action::Create => fail_unless(None == to, "create mismatch"),
} }
} }
} }
@ -73,3 +79,7 @@ declare_test!{TransactionTests_Homestead_ttTransactionTest, "TransactionTests/Ho
declare_test!{heavy => TransactionTests_Homestead_tt10mbDataField, "TransactionTests/Homestead/tt10mbDataField"} declare_test!{heavy => TransactionTests_Homestead_tt10mbDataField, "TransactionTests/Homestead/tt10mbDataField"}
declare_test!{TransactionTests_Homestead_ttWrongRLPTransaction, "TransactionTests/Homestead/ttWrongRLPTransaction"} declare_test!{TransactionTests_Homestead_ttWrongRLPTransaction, "TransactionTests/Homestead/ttWrongRLPTransaction"}
declare_test!{TransactionTests_RandomTests_tr201506052141PYTHON, "TransactionTests/RandomTests/tr201506052141PYTHON"} declare_test!{TransactionTests_RandomTests_tr201506052141PYTHON, "TransactionTests/RandomTests/tr201506052141PYTHON"}
declare_test!{TransactionTests_Homestead_ttTransactionTestEip155VitaliksTests, "TransactionTests/Homestead/ttTransactionTestEip155VitaliksTests"}
declare_test!{TransactionTests_EIP155_ttTransactionTest, "TransactionTests/EIP155/ttTransactionTest"}
declare_test!{TransactionTests_EIP155_ttTransactionTestEip155VitaliksTests, "TransactionTests/EIP155/ttTransactionTestEip155VitaliksTests"}
declare_test!{TransactionTests_EIP155_ttTransactionTestVRule, "TransactionTests/EIP155/ttTransactionTestVRule"}

View File

@ -101,6 +101,7 @@ extern crate bit_set;
extern crate rlp; extern crate rlp;
extern crate ethcore_bloom_journal as bloom_journal; extern crate ethcore_bloom_journal as bloom_journal;
extern crate byteorder; extern crate byteorder;
extern crate transient_hashmap;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
@ -136,6 +137,7 @@ pub mod miner;
pub mod snapshot; pub mod snapshot;
pub mod action_params; pub mod action_params;
pub mod db; pub mod db;
pub mod verification;
#[macro_use] pub mod evm; #[macro_use] pub mod evm;
mod cache_manager; mod cache_manager;
@ -149,7 +151,6 @@ mod account_db;
mod builtin; mod builtin;
mod executive; mod executive;
mod externalities; mod externalities;
mod verification;
mod blockchain; mod blockchain;
mod types; mod types;
mod factory; mod factory;

View File

@ -61,7 +61,7 @@ pub fn generate_bloom(source: Arc<Database>, dest: &mut Database) -> Result<(),
let account_trie = try!(TrieDB::new(state_db.as_hashdb(), &state_root).map_err(|e| Error::Custom(format!("Cannot open trie: {:?}", e)))); let account_trie = try!(TrieDB::new(state_db.as_hashdb(), &state_root).map_err(|e| Error::Custom(format!("Cannot open trie: {:?}", e))));
for item in try!(account_trie.iter().map_err(|_| Error::MigrationImpossible)) { for item in try!(account_trie.iter().map_err(|_| Error::MigrationImpossible)) {
let (ref account_key, _) = try!(item.map_err(|_| Error::MigrationImpossible)); let (ref account_key, _) = try!(item.map_err(|_| Error::MigrationImpossible));
let account_key_hash = H256::from_slice(&account_key); let account_key_hash = H256::from_slice(account_key);
bloom.set(&*account_key_hash); bloom.set(&*account_key_hash);
} }

View File

@ -0,0 +1,339 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Banning Queue
//! Transacton Queue wrapper maintaining additional list of banned senders and contract hashes.
use std::time::Duration;
use std::ops::{Deref, DerefMut};
use std::cell::Cell;
use transaction::{SignedTransaction, Action};
use transient_hashmap::TransientHashMap;
use miner::{TransactionQueue, TransactionImportResult, TransactionOrigin, AccountDetails};
use error::{Error, TransactionError};
use util::{Uint, U256, H256, Address, Hashable};
type Count = u16;
/// Auto-Banning threshold
pub enum Threshold {
/// Should ban after given number of misbehaves reported.
BanAfter(Count),
/// Should never ban anything
NeverBan
}
impl Default for Threshold {
fn default() -> Self {
Threshold::NeverBan
}
}
/// Transaction queue with banlist.
pub struct BanningTransactionQueue {
queue: TransactionQueue,
ban_threshold: Threshold,
senders_bans: TransientHashMap<Address, Cell<Count>>,
recipients_bans: TransientHashMap<Address, Cell<Count>>,
codes_bans: TransientHashMap<H256, Cell<Count>>,
}
impl BanningTransactionQueue {
/// Creates new banlisting transaction queue
pub fn new(queue: TransactionQueue, ban_threshold: Threshold, ban_lifetime: Duration) -> Self {
let ban_lifetime_sec = ban_lifetime.as_secs();
assert!(ban_lifetime_sec > 0, "Lifetime has to be specified in seconds.");
BanningTransactionQueue {
queue: queue,
ban_threshold: ban_threshold,
senders_bans: TransientHashMap::new(ban_lifetime_sec),
recipients_bans: TransientHashMap::new(ban_lifetime_sec),
codes_bans: TransientHashMap::new(ban_lifetime_sec),
}
}
/// Borrows internal queue.
/// NOTE: you can insert transactions to the queue even
/// if they would be rejected because of ban otherwise.
/// But probably you shouldn't.
pub fn queue(&mut self) -> &mut TransactionQueue {
&mut self.queue
}
/// Add to the queue taking bans into consideration.
/// May reject transaction because of the banlist.
pub fn add_with_banlist<F, G>(
&mut self,
transaction: SignedTransaction,
account_details: &F,
gas_estimator: &G,
) -> Result<TransactionImportResult, Error> where
F: Fn(&Address) -> AccountDetails,
G: Fn(&SignedTransaction) -> U256,
{
if let Threshold::BanAfter(threshold) = self.ban_threshold {
// NOTE In all checks use direct query to avoid increasing ban timeout.
// Check sender
if let Ok(sender) = transaction.sender() {
let count = self.senders_bans.direct().get(&sender).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because sender is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::SenderBanned));
}
}
// Check recipient
if let Action::Call(recipient) = transaction.action {
let count = self.recipients_bans.direct().get(&recipient).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because recipient is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::RecipientBanned));
}
}
// Check code
if let Action::Create = transaction.action {
let code_hash = transaction.data.sha3();
let count = self.codes_bans.direct().get(&code_hash).map(|v| v.get()).unwrap_or(0);
if count > threshold {
debug!(target: "txqueue", "Ignoring transaction {:?} because code is banned.", transaction.hash());
return Err(Error::Transaction(TransactionError::CodeBanned));
}
}
}
self.queue.add(transaction, TransactionOrigin::External, account_details, gas_estimator)
}
/// Ban transaction with given hash.
/// Transaction has to be in the queue.
///
/// Bans sender and recipient/code and returns `true` when any ban has reached threshold.
pub fn ban_transaction(&mut self, hash: &H256) -> bool {
let transaction = self.queue.find(hash);
match transaction {
Some(transaction) => {
let sender = transaction.sender().expect("Transaction is in queue, so the sender is already validated; qed");
// Ban sender
let sender_banned = self.ban_sender(sender);
// Ban recipient and codehash
let recipient_or_code_banned = match transaction.action {
Action::Call(recipient) => {
self.ban_recipient(recipient)
},
Action::Create => {
self.ban_codehash(transaction.data.sha3())
},
};
sender_banned || recipient_or_code_banned
},
None => false,
}
}
/// Ban given sender.
/// If bans threshold is reached all subsequent transactions from this sender will be rejected.
/// Reaching bans threshold also removes all existsing transaction from this sender that are already in the
/// queue.
fn ban_sender(&mut self, address: Address) -> bool {
let count = {
let mut count = self.senders_bans.entry(address).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
count.get()
};
match self.ban_threshold {
Threshold::BanAfter(threshold) if count > threshold => {
// Banlist the sender.
// Remove all transactions from the queue.
self.remove_all(address, !U256::zero());
true
},
_ => false
}
}
/// Ban given recipient.
/// If bans threshold is reached all subsequent transactions to this address will be rejected.
/// Returns true if bans threshold has been reached.
fn ban_recipient(&mut self, address: Address) -> bool {
let count = {
let mut count = self.recipients_bans.entry(address).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
count.get()
};
match self.ban_threshold {
// TODO [ToDr] Consider removing other transactions to the same recipient from the queue?
Threshold::BanAfter(threshold) if count > threshold => true,
_ => false
}
}
/// Ban given codehash.
/// If bans threshold is reached all subsequent transactions to contracts with this codehash will be rejected.
/// Returns true if bans threshold has been reached.
fn ban_codehash(&mut self, code_hash: H256) -> bool {
let mut count = self.codes_bans.entry(code_hash).or_insert_with(|| Cell::new(0));
*count.get_mut() = count.get().saturating_add(1);
match self.ban_threshold {
// TODO [ToDr] Consider removing other transactions with the same code from the queue?
Threshold::BanAfter(threshold) if count.get() > threshold => true,
_ => false,
}
}
}
impl Deref for BanningTransactionQueue {
type Target = TransactionQueue;
fn deref(&self) -> &Self::Target {
&self.queue
}
}
impl DerefMut for BanningTransactionQueue {
fn deref_mut(&mut self) -> &mut Self::Target {
self.queue()
}
}
#[cfg(test)]
mod tests {
use std::time::Duration;
use super::{BanningTransactionQueue, Threshold};
use ethkey::{Random, Generator};
use transaction::{Transaction, SignedTransaction, Action};
use error::{Error, TransactionError};
use client::TransactionImportResult;
use miner::{TransactionQueue, TransactionOrigin, AccountDetails};
use util::{Uint, U256, Address, FromHex, Hashable};
fn queue() -> BanningTransactionQueue {
BanningTransactionQueue::new(TransactionQueue::default(), Threshold::BanAfter(1), Duration::from_secs(180))
}
fn default_account_details(_address: &Address) -> AccountDetails {
AccountDetails {
nonce: U256::zero(),
balance: !U256::zero(),
}
}
fn gas_required(_tx: &SignedTransaction) -> U256 {
0.into()
}
fn transaction(action: Action) -> SignedTransaction {
let keypair = Random.generate().unwrap();
Transaction {
action: action,
value: U256::from(100),
data: "3331600055".from_hex().unwrap(),
gas: U256::from(100_000),
gas_price: U256::from(10),
nonce: U256::from(0),
}.sign(keypair.secret(), None)
}
fn unwrap_err(res: Result<TransactionImportResult, Error>) -> TransactionError {
match res {
Err(Error::Transaction(e)) => e,
Ok(x) => panic!("Expected error, got: Ok({:?})", x),
Err(e) => panic!("Unexpected error type returned by queue: {:?}", e),
}
}
#[test]
fn should_allow_to_borrow_the_queue() {
// given
let tx = transaction(Action::Create);
let mut txq = queue();
// when
txq.queue().add(tx, TransactionOrigin::External, &default_account_details, &gas_required).unwrap();
// then
// should also deref to queue
assert_eq!(txq.status().pending, 1);
}
#[test]
fn should_not_accept_transactions_from_banned_sender() {
// given
let tx = transaction(Action::Create);
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_sender(tx.sender().unwrap());
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_sender(tx.sender().unwrap());
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::SenderBanned);
// Should also remove transacion from the queue
assert_eq!(txq.find(&tx.hash()), None);
}
#[test]
fn should_not_accept_transactions_to_banned_recipient() {
// given
let recipient = Address::default();
let tx = transaction(Action::Call(recipient));
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_recipient(recipient);
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_recipient(recipient);
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::RecipientBanned);
}
#[test]
fn should_not_accept_transactions_with_banned_code() {
// given
let tx = transaction(Action::Create);
let codehash = tx.data.sha3();
let mut txq = queue();
// Banlist once (threshold not reached)
let banlist1 = txq.ban_codehash(codehash);
assert!(!banlist1, "Threshold not reached yet.");
// Insert once
let import1 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required).unwrap();
assert_eq!(import1, TransactionImportResult::Current);
// when
let banlist2 = txq.ban_codehash(codehash);
let import2 = txq.add_with_banlist(tx.clone(), &default_account_details, &gas_required);
// then
assert!(banlist2, "Threshold should be reached - banned.");
assert_eq!(unwrap_err(import2), TransactionError::CodeBanned);
}
}

View File

@ -21,7 +21,7 @@ use util::*;
use util::using_queue::{UsingQueue, GetAction}; use util::using_queue::{UsingQueue, GetAction};
use account_provider::AccountProvider; use account_provider::AccountProvider;
use views::{BlockView, HeaderView}; use views::{BlockView, HeaderView};
use state::State; use state::{State, CleanupMode};
use client::{MiningBlockChainClient, Executive, Executed, EnvInfo, TransactOptions, BlockID, CallAnalytics}; use client::{MiningBlockChainClient, Executive, Executed, EnvInfo, TransactOptions, BlockID, CallAnalytics};
use executive::contract_address; use executive::contract_address;
use block::{ClosedBlock, SealedBlock, IsBlock, Block}; use block::{ClosedBlock, SealedBlock, IsBlock, Block};
@ -31,6 +31,7 @@ use receipt::{Receipt, RichReceipt};
use spec::Spec; use spec::Spec;
use engines::Engine; use engines::Engine;
use miner::{MinerService, MinerStatus, TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin}; use miner::{MinerService, MinerStatus, TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin};
use miner::banning_queue::{BanningTransactionQueue, Threshold};
use miner::work_notify::WorkPoster; use miner::work_notify::WorkPoster;
use client::TransactionImportResult; use client::TransactionImportResult;
use miner::price_info::PriceInfo; use miner::price_info::PriceInfo;
@ -59,6 +60,22 @@ pub enum GasLimit {
Fixed(U256), Fixed(U256),
} }
/// Transaction queue banning settings.
#[derive(Debug, PartialEq, Clone)]
pub enum Banning {
/// Banning in transaction queue is disabled
Disabled,
/// Banning in transaction queue is enabled
Enabled {
/// Upper limit of transaction processing time before banning.
offend_threshold: Duration,
/// Number of similar offending transactions before banning.
min_offends: u16,
/// Number of seconds the offender is banned for.
ban_duration: Duration,
},
}
/// Configures the behaviour of the miner. /// Configures the behaviour of the miner.
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct MinerOptions { pub struct MinerOptions {
@ -86,6 +103,8 @@ pub struct MinerOptions {
pub enable_resubmission: bool, pub enable_resubmission: bool,
/// Global gas limit for all transaction in the queue except for local and retracted. /// Global gas limit for all transaction in the queue except for local and retracted.
pub tx_queue_gas_limit: GasLimit, pub tx_queue_gas_limit: GasLimit,
/// Banning settings
pub tx_queue_banning: Banning,
} }
impl Default for MinerOptions { impl Default for MinerOptions {
@ -98,11 +117,12 @@ impl Default for MinerOptions {
tx_gas_limit: !U256::zero(), tx_gas_limit: !U256::zero(),
tx_queue_size: 1024, tx_queue_size: 1024,
tx_queue_gas_limit: GasLimit::Auto, tx_queue_gas_limit: GasLimit::Auto,
tx_queue_strategy: PrioritizationStrategy::GasFactorAndGasPrice, tx_queue_strategy: PrioritizationStrategy::GasPriceOnly,
pending_set: PendingSet::AlwaysQueue, pending_set: PendingSet::AlwaysQueue,
reseal_min_period: Duration::from_secs(2), reseal_min_period: Duration::from_secs(2),
work_queue_size: 20, work_queue_size: 20,
enable_resubmission: true, enable_resubmission: true,
tx_queue_banning: Banning::Disabled,
} }
} }
} }
@ -186,7 +206,7 @@ struct SealingWork {
/// Handles preparing work for "work sealing" or seals "internally" if Engine does not require work. /// Handles preparing work for "work sealing" or seals "internally" if Engine does not require work.
pub struct Miner { pub struct Miner {
// NOTE [ToDr] When locking always lock in this order! // NOTE [ToDr] When locking always lock in this order!
transaction_queue: Arc<Mutex<TransactionQueue>>, transaction_queue: Arc<Mutex<BanningTransactionQueue>>,
sealing_work: Mutex<SealingWork>, sealing_work: Mutex<SealingWork>,
next_allowed_reseal: Mutex<Instant>, next_allowed_reseal: Mutex<Instant>,
sealing_block_last_request: Mutex<u64>, sealing_block_last_request: Mutex<u64>,
@ -215,11 +235,18 @@ impl Miner {
GasLimit::Fixed(ref limit) => *limit, GasLimit::Fixed(ref limit) => *limit,
_ => !U256::zero(), _ => !U256::zero(),
}; };
let txq = Arc::new(Mutex::new(TransactionQueue::with_limits(
options.tx_queue_strategy, options.tx_queue_size, gas_limit, options.tx_gas_limit let txq = TransactionQueue::with_limits(options.tx_queue_strategy, options.tx_queue_size, gas_limit, options.tx_gas_limit);
))); let txq = match options.tx_queue_banning {
Banning::Disabled => BanningTransactionQueue::new(txq, Threshold::NeverBan, Duration::from_secs(180)),
Banning::Enabled { ban_duration, min_offends, .. } => BanningTransactionQueue::new(
txq,
Threshold::BanAfter(min_offends),
ban_duration,
),
};
Miner { Miner {
transaction_queue: txq, transaction_queue: Arc::new(Mutex::new(txq)),
next_allowed_reseal: Mutex::new(Instant::now()), next_allowed_reseal: Mutex::new(Instant::now()),
sealing_block_last_request: Mutex::new(0), sealing_block_last_request: Mutex::new(0),
sealing_work: Mutex::new(SealingWork{ sealing_work: Mutex::new(SealingWork{
@ -318,10 +345,31 @@ impl Miner {
let mut invalid_transactions = HashSet::new(); let mut invalid_transactions = HashSet::new();
let mut transactions_to_penalize = HashSet::new(); let mut transactions_to_penalize = HashSet::new();
let block_number = open_block.block().fields().header.number(); let block_number = open_block.block().fields().header.number();
// TODO: push new uncles, too.
// TODO Push new uncles too.
for tx in transactions { for tx in transactions {
let hash = tx.hash(); let hash = tx.hash();
match open_block.push_transaction(tx, None) { let start = Instant::now();
let result = open_block.push_transaction(tx, None);
let took = start.elapsed();
// Check for heavy transactions
match self.options.tx_queue_banning {
Banning::Enabled { ref offend_threshold, .. } if &took > offend_threshold => {
match self.transaction_queue.lock().ban_transaction(&hash) {
true => {
warn!(target: "miner", "Detected heavy transaction. Banning the sender and recipient/code.");
},
false => {
transactions_to_penalize.insert(hash);
debug!(target: "miner", "Detected heavy transaction. Penalizing sender.")
}
}
},
_ => {},
}
match result {
Err(Error::Execution(ExecutionError::BlockGasLimitReached { gas_limit, gas_used, gas })) => { Err(Error::Execution(ExecutionError::BlockGasLimitReached { gas_limit, gas_used, gas })) => {
debug!(target: "miner", "Skipping adding transaction to block because of gas limit: {:?} (limit: {:?}, used: {:?}, gas: {:?})", hash, gas_limit, gas_used, gas); debug!(target: "miner", "Skipping adding transaction to block because of gas limit: {:?} (limit: {:?}, used: {:?}, gas: {:?})", hash, gas_limit, gas_used, gas);
@ -362,7 +410,7 @@ impl Miner {
{ {
let mut queue = self.transaction_queue.lock(); let mut queue = self.transaction_queue.lock();
for hash in invalid_transactions.into_iter() { for hash in invalid_transactions {
queue.remove_invalid(&hash, &fetch_account); queue.remove_invalid(&hash, &fetch_account);
} }
for hash in transactions_to_penalize { for hash in transactions_to_penalize {
@ -506,7 +554,7 @@ impl Miner {
prepare_new prepare_new
} }
fn add_transactions_to_queue(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, origin: TransactionOrigin, transaction_queue: &mut TransactionQueue) -> fn add_transactions_to_queue(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, origin: TransactionOrigin, transaction_queue: &mut BanningTransactionQueue) ->
Vec<Result<TransactionImportResult, Error>> { Vec<Result<TransactionImportResult, Error>> {
let fetch_account = |a: &Address| AccountDetails { let fetch_account = |a: &Address| AccountDetails {
@ -514,14 +562,25 @@ impl Miner {
balance: chain.latest_balance(a), balance: chain.latest_balance(a),
}; };
let schedule = chain.latest_schedule();
let gas_required = |tx: &SignedTransaction| tx.gas_required(&schedule).into();
transactions.into_iter() transactions.into_iter()
.map(|tx| transaction_queue.add(tx, &fetch_account, origin)) .map(|tx| match origin {
TransactionOrigin::Local | TransactionOrigin::RetractedBlock => {
transaction_queue.add(tx, origin, &fetch_account, &gas_required)
},
TransactionOrigin::External => {
transaction_queue.add_with_banlist(tx, &fetch_account, &gas_required)
}
})
.collect() .collect()
} }
/// Are we allowed to do a non-mandatory reseal? /// Are we allowed to do a non-mandatory reseal?
fn tx_reseal_allowed(&self) -> bool { Instant::now() > *self.next_allowed_reseal.lock() } fn tx_reseal_allowed(&self) -> bool { Instant::now() > *self.next_allowed_reseal.lock() }
#[cfg_attr(feature="dev", allow(wrong_self_convention))]
#[cfg_attr(feature="dev", allow(redundant_closure))]
fn from_pending_block<H, F, G>(&self, latest_block_number: BlockNumber, from_chain: F, map_block: G) -> H fn from_pending_block<H, F, G>(&self, latest_block_number: BlockNumber, from_chain: F, map_block: G) -> H
where F: Fn() -> H, G: Fn(&ClosedBlock) -> H { where F: Fn() -> H, G: Fn(&ClosedBlock) -> H {
let sealing_work = self.sealing_work.lock(); let sealing_work = self.sealing_work.lock();
@ -591,7 +650,7 @@ impl MinerService for Miner {
let needed_balance = t.value + t.gas * t.gas_price; let needed_balance = t.value + t.gas * t.gas_price;
if balance < needed_balance { if balance < needed_balance {
// give the sender a sufficient balance // give the sender a sufficient balance
state.add_balance(&sender, &(needed_balance - balance)); state.add_balance(&sender, &(needed_balance - balance), CleanupMode::NoEmpty);
} }
let options = TransactOptions { tracing: analytics.transaction_tracing, vm_tracing: analytics.vm_tracing, check_nonce: false }; let options = TransactOptions { tracing: analytics.transaction_tracing, vm_tracing: analytics.vm_tracing, check_nonce: false };
let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, chain.vm_factory()).transact(t, options)); let mut ret = try!(Executive::new(&mut state, &env_info, &*self.engine, chain.vm_factory()).transact(t, options));
@ -876,6 +935,8 @@ impl MinerService for Miner {
} }
}, },
logs: receipt.logs.clone(), logs: receipt.logs.clone(),
log_bloom: receipt.log_bloom,
state_root: receipt.state_root,
} }
}) })
} }
@ -885,7 +946,7 @@ impl MinerService for Miner {
fn pending_receipts(&self, best_block: BlockNumber) -> BTreeMap<H256, Receipt> { fn pending_receipts(&self, best_block: BlockNumber) -> BTreeMap<H256, Receipt> {
self.from_pending_block( self.from_pending_block(
best_block, best_block,
|| BTreeMap::new(), BTreeMap::new,
|pending| { |pending| {
let hashes = pending.transactions() let hashes = pending.transactions()
.iter() .iter()
@ -1019,7 +1080,7 @@ impl MinerService for Miner {
tx.sender().expect("Transaction is in block, so sender has to be defined.") tx.sender().expect("Transaction is in block, so sender has to be defined.")
}) })
.collect::<HashSet<Address>>(); .collect::<HashSet<Address>>();
for sender in to_remove.into_iter() { for sender in to_remove {
transaction_queue.remove_all(sender, chain.latest_nonce(&sender)); transaction_queue.remove_all(sender, chain.latest_nonce(&sender));
} }
}); });
@ -1097,6 +1158,7 @@ mod tests {
pending_set: PendingSet::AlwaysSealing, pending_set: PendingSet::AlwaysSealing,
work_queue_size: 5, work_queue_size: 5,
enable_resubmission: true, enable_resubmission: true,
tx_queue_banning: Banning::Disabled,
}, },
GasPricer::new_fixed(0u64.into()), GasPricer::new_fixed(0u64.into()),
&Spec::new_test(), &Spec::new_test(),
@ -1113,7 +1175,7 @@ mod tests {
gas: U256::from(100_000), gas: U256::from(100_000),
gas_price: U256::zero(), gas_price: U256::zero(),
nonce: U256::zero(), nonce: U256::zero(),
}.sign(keypair.secret()) }.sign(keypair.secret(), None)
} }
#[test] #[test]
@ -1189,7 +1251,7 @@ mod tests {
#[test] #[test]
fn internal_seals_without_work() { fn internal_seals_without_work() {
let miner = Miner::with_spec(&Spec::new_test_instant()); let miner = Miner::with_spec(&Spec::new_instant());
let c = generate_dummy_client(2); let c = generate_dummy_client(2);
let client = c.reference().as_ref(); let client = c.reference().as_ref();

View File

@ -44,11 +44,12 @@
mod miner; mod miner;
mod external; mod external;
mod transaction_queue; mod transaction_queue;
mod banning_queue;
mod work_notify; mod work_notify;
mod price_info; mod price_info;
pub use self::transaction_queue::{TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin}; pub use self::transaction_queue::{TransactionQueue, PrioritizationStrategy, AccountDetails, TransactionOrigin};
pub use self::miner::{Miner, MinerOptions, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit}; pub use self::miner::{Miner, MinerOptions, Banning, PendingSet, GasPricer, GasPriceCalibratorOptions, GasLimit};
pub use self::external::{ExternalMiner, ExternalMinerService}; pub use self::external::{ExternalMiner, ExternalMinerService};
pub use client::TransactionImportResult; pub use client::TransactionImportResult;
@ -157,7 +158,7 @@ pub trait MinerService : Send + Sync {
fn is_sealing(&self) -> bool; fn is_sealing(&self) -> bool;
/// Suggested gas price. /// Suggested gas price.
fn sensible_gas_price(&self) -> U256 { 20000000000u64.into() } fn sensible_gas_price(&self) -> U256;
/// Suggested gas limit. /// Suggested gas limit.
fn sensible_gas_limit(&self) -> U256 { 21000.into() } fn sensible_gas_limit(&self) -> U256 { 21000.into() }

View File

@ -42,16 +42,17 @@
//! let t2 = Transaction { action: Action::Create, value: U256::from(100), data: "3331600055".from_hex().unwrap(), //! let t2 = Transaction { action: Action::Create, value: U256::from(100), data: "3331600055".from_hex().unwrap(),
//! gas: U256::from(100_000), gas_price: U256::one(), nonce: U256::from(11) }; //! gas: U256::from(100_000), gas_price: U256::one(), nonce: U256::from(11) };
//! //!
//! let st1 = t1.sign(&key.secret()); //! let st1 = t1.sign(&key.secret(), None);
//! let st2 = t2.sign(&key.secret()); //! let st2 = t2.sign(&key.secret(), None);
//! let default_account_details = |_a: &Address| AccountDetails { //! let default_account_details = |_a: &Address| AccountDetails {
//! nonce: U256::from(10), //! nonce: U256::from(10),
//! balance: U256::from(1_000_000), //! balance: U256::from(1_000_000),
//! }; //! };
//! let gas_estimator = |_tx: &SignedTransaction| 2.into();
//! //!
//! let mut txq = TransactionQueue::default(); //! let mut txq = TransactionQueue::default();
//! txq.add(st2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); //! txq.add(st2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
//! txq.add(st1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); //! txq.add(st1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
//! //!
//! // Check status //! // Check status
//! assert_eq!(txq.status().pending, 2); //! assert_eq!(txq.status().pending, 2);
@ -109,6 +110,7 @@ impl PartialOrd for TransactionOrigin {
} }
impl Ord for TransactionOrigin { impl Ord for TransactionOrigin {
#[cfg_attr(feature="dev", allow(match_same_arms))]
fn cmp(&self, other: &TransactionOrigin) -> Ordering { fn cmp(&self, other: &TransactionOrigin) -> Ordering {
if *other == *self { if *other == *self {
return Ordering::Equal; return Ordering::Equal;
@ -446,6 +448,7 @@ pub struct AccountDetails {
const GAS_LIMIT_HYSTERESIS: usize = 10; // (100/GAS_LIMIT_HYSTERESIS) % const GAS_LIMIT_HYSTERESIS: usize = 10; // (100/GAS_LIMIT_HYSTERESIS) %
/// Describes the strategy used to prioritize transactions in the queue. /// Describes the strategy used to prioritize transactions in the queue.
#[cfg_attr(feature="dev", allow(enum_variant_names))]
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PrioritizationStrategy { pub enum PrioritizationStrategy {
/// Use only gas price. Disregards the actual computation cost of the transaction. /// Use only gas price. Disregards the actual computation cost of the transaction.
@ -592,9 +595,20 @@ impl TransactionQueue {
} }
} }
/// Add signed transaction to queue to be verified and imported /// Add signed transaction to queue to be verified and imported.
pub fn add<T>(&mut self, tx: SignedTransaction, fetch_account: &T, origin: TransactionOrigin) -> Result<TransactionImportResult, Error> ///
where T: Fn(&Address) -> AccountDetails { /// NOTE fetch_account and gas_estimator should be cheap to compute
/// otherwise it might open up an attack vector.
pub fn add<F, G>(
&mut self,
tx: SignedTransaction,
origin: TransactionOrigin,
fetch_account: &F,
gas_estimator: &G,
) -> Result<TransactionImportResult, Error> where
F: Fn(&Address) -> AccountDetails,
G: Fn(&SignedTransaction) -> U256,
{
if tx.gas_price < self.minimal_gas_price && origin != TransactionOrigin::Local { if tx.gas_price < self.minimal_gas_price && origin != TransactionOrigin::Local {
trace!(target: "txqueue", trace!(target: "txqueue",
@ -625,8 +639,6 @@ impl TransactionQueue {
})); }));
} }
try!(tx.check_low_s());
if tx.gas > self.gas_limit || tx.gas > self.tx_gas_limit { if tx.gas > self.gas_limit || tx.gas > self.tx_gas_limit {
trace!(target: "txqueue", trace!(target: "txqueue",
"Dropping transaction above gas limit: {:?} ({} > min({}, {}))", "Dropping transaction above gas limit: {:?} ({} > min({}, {}))",
@ -642,6 +654,24 @@ impl TransactionQueue {
})); }));
} }
let minimal_gas = gas_estimator(&tx);
if tx.gas < minimal_gas {
trace!(target: "txqueue",
"Dropping transaction with insufficient gas: {:?} ({} > {})",
tx.hash(),
tx.gas,
minimal_gas,
);
return Err(Error::Transaction(TransactionError::InsufficientGas {
minimal: minimal_gas,
got: tx.gas,
}));
}
// Verify signature
try!(tx.check_low_s());
let vtx = try!(VerifiedTransaction::new(tx, origin)); let vtx = try!(VerifiedTransaction::new(tx, origin));
let client_account = fetch_account(&vtx.sender()); let client_account = fetch_account(&vtx.sender());
@ -904,16 +934,6 @@ impl TransactionQueue {
let nonce = tx.nonce(); let nonce = tx.nonce();
let hash = tx.hash(); let hash = tx.hash();
{
// Rough size sanity check
let gas = &tx.transaction.gas;
if U256::from(tx.transaction.data.len()) > *gas {
// Droping transaction
trace!(target: "txqueue", "Dropping oversized transaction: {:?} (gas: {} < size {})", hash, gas, tx.transaction.data.len());
return Err(TransactionError::LimitReached);
}
}
// The transaction might be old, let's check that. // The transaction might be old, let's check that.
// This has to be the first test, otherwise calculating // This has to be the first test, otherwise calculating
// nonce height would result in overflow. // nonce height would result in overflow.
@ -1084,12 +1104,12 @@ mod test {
fn new_tx(nonce: U256, gas_price: U256) -> SignedTransaction { fn new_tx(nonce: U256, gas_price: U256) -> SignedTransaction {
let keypair = Random.generate().unwrap(); let keypair = Random.generate().unwrap();
new_unsigned_tx(nonce, default_gas_val(), gas_price).sign(keypair.secret()) new_unsigned_tx(nonce, default_gas_val(), gas_price).sign(keypair.secret(), None)
} }
fn new_tx_with_gas(gas: U256, gas_price: U256) -> SignedTransaction { fn new_tx_with_gas(gas: U256, gas_price: U256) -> SignedTransaction {
let keypair = Random.generate().unwrap(); let keypair = Random.generate().unwrap();
new_unsigned_tx(default_nonce(), gas, gas_price).sign(keypair.secret()) new_unsigned_tx(default_nonce(), gas, gas_price).sign(keypair.secret(), None)
} }
fn new_tx_default() -> SignedTransaction { fn new_tx_default() -> SignedTransaction {
@ -1103,13 +1123,17 @@ mod test {
} }
} }
fn gas_estimator(_tx: &SignedTransaction) -> U256 {
U256::zero()
}
fn new_tx_pair(nonce: U256, gas_price: U256, nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) { fn new_tx_pair(nonce: U256, gas_price: U256, nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) {
let tx1 = new_unsigned_tx(nonce, default_gas_val(), gas_price); let tx1 = new_unsigned_tx(nonce, default_gas_val(), gas_price);
let tx2 = new_unsigned_tx(nonce + nonce_increment, default_gas_val(), gas_price + gas_price_increment); let tx2 = new_unsigned_tx(nonce + nonce_increment, default_gas_val(), gas_price + gas_price_increment);
let keypair = Random.generate().unwrap(); let keypair = Random.generate().unwrap();
let secret = &keypair.secret(); let secret = &keypair.secret();
(tx1.sign(secret), tx2.sign(secret)) (tx1.sign(secret, None), tx2.sign(secret, None))
} }
/// Returns two consecutive transactions, both with increased gas price /// Returns two consecutive transactions, both with increased gas price
@ -1120,7 +1144,7 @@ mod test {
let keypair = Random.generate().unwrap(); let keypair = Random.generate().unwrap();
let secret = &keypair.secret(); let secret = &keypair.secret();
(tx1.sign(secret), tx2.sign(secret)) (tx1.sign(secret, None), tx2.sign(secret, None))
} }
fn new_tx_pair_default(nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) { fn new_tx_pair_default(nonce_increment: U256, gas_price_increment: U256) -> (SignedTransaction, SignedTransaction) {
@ -1154,14 +1178,14 @@ mod test {
let (tx1, tx2) = new_tx_pair(123.into(), 1.into(), 1.into(), 0.into()); let (tx1, tx2) = new_tx_pair(123.into(), 1.into(), 1.into(), 0.into());
let sender = tx1.sender().unwrap(); let sender = tx1.sender().unwrap();
let nonce = tx1.nonce; let nonce = tx1.nonce;
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
assert_eq!(txq.last_nonce(&sender), Some(nonce + 1.into())); assert_eq!(txq.last_nonce(&sender), Some(nonce + 1.into()));
// when // when
let tx = new_tx(123.into(), 1.into()); let tx = new_tx(123.into(), 1.into());
let res = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
// No longer the case as we don't even consider a transaction that isn't above a full // No longer the case as we don't even consider a transaction that isn't above a full
@ -1317,12 +1341,12 @@ mod test {
!U256::zero() }; !U256::zero() };
// First insert one transaction to future // First insert one transaction to future
let res = txq.add(tx, &prev_nonce, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &prev_nonce, &gas_estimator);
assert_eq!(res.unwrap(), TransactionImportResult::Future); assert_eq!(res.unwrap(), TransactionImportResult::Future);
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// now import second transaction to current // now import second transaction to current
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// and then there should be only one transaction in current (the one with higher gas_price) // and then there should be only one transaction in current (the one with higher gas_price)
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1342,12 +1366,12 @@ mod test {
!U256::zero() }; !U256::zero() };
// First insert one transaction to future // First insert one transaction to future
let res = txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External); let res = txq.add(tx.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator);
assert_eq!(res.unwrap(), TransactionImportResult::Future); assert_eq!(res.unwrap(), TransactionImportResult::Future);
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// now import second transaction to current // now import second transaction to current
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1366,7 +1390,7 @@ mod test {
let tx = new_tx_default(); let tx = new_tx_default();
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1385,10 +1409,10 @@ mod test {
txq.set_minimal_gas_price(15.into()); txq.set_minimal_gas_price(15.into());
// when // when
let res1 = txq.add(tx1, &default_account_details, TransactionOrigin::External); let res1 = txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, &default_account_details, TransactionOrigin::External); let res2 = txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res3 = txq.add(tx3, &default_account_details, TransactionOrigin::External); let res3 = txq.add(tx3, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res4 = txq.add(tx4, &default_account_details, TransactionOrigin::External); let res4 = txq.add(tx4, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res1.unwrap(), TransactionImportResult::Current); assert_eq!(res1.unwrap(), TransactionImportResult::Current);
@ -1419,10 +1443,10 @@ mod test {
txq.set_minimal_gas_price(15.into()); txq.set_minimal_gas_price(15.into());
// when // when
let res1 = txq.add(tx1, &default_account_details, TransactionOrigin::External); let res1 = txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, &default_account_details, TransactionOrigin::External); let res2 = txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res3 = txq.add(tx3, &default_account_details, TransactionOrigin::External); let res3 = txq.add(tx3, TransactionOrigin::External, &default_account_details, &gas_estimator);
let res4 = txq.add(tx4, &default_account_details, TransactionOrigin::External); let res4 = txq.add(tx4, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res1.unwrap(), TransactionImportResult::Current); assert_eq!(res1.unwrap(), TransactionImportResult::Current);
@ -1465,7 +1489,7 @@ mod test {
txq.set_gas_limit(limit); txq.set_gas_limit(limit);
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::GasLimitExceeded { assert_eq!(unwrap_tx_err(res), TransactionError::GasLimitExceeded {
@ -1489,7 +1513,7 @@ mod test {
}; };
// when // when
let res = txq.add(tx, &account, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &account, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientBalance { assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientBalance {
@ -1509,7 +1533,7 @@ mod test {
txq.set_minimal_gas_price(tx.gas_price + U256::one()); txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice { assert_eq!(unwrap_tx_err(res), TransactionError::InsufficientGasPrice {
@ -1529,7 +1553,7 @@ mod test {
txq.set_minimal_gas_price(tx.gas_price + U256::one()); txq.set_minimal_gas_price(tx.gas_price + U256::one());
// when // when
let res = txq.add(tx, &default_account_details, TransactionOrigin::Local); let res = txq.add(tx, TransactionOrigin::Local, &default_account_details, &gas_estimator);
// then // then
assert_eq!(res.unwrap(), TransactionImportResult::Current); assert_eq!(res.unwrap(), TransactionImportResult::Current);
@ -1559,7 +1583,7 @@ mod test {
rlp::decode(s.as_raw()) rlp::decode(s.as_raw())
}; };
// when // when
let res = txq.add(stx, &default_account_details, TransactionOrigin::External); let res = txq.add(stx, TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
assert!(res.is_err()); assert!(res.is_err());
@ -1573,8 +1597,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1593,9 +1617,9 @@ mod test {
// when // when
// first insert the one with higher gas price // first insert the one with higher gas price
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then the one with lower gas price, but local // then the one with lower gas price, but local
txq.add(tx.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1614,9 +1638,9 @@ mod test {
// when // when
// first insert local one with higher gas price // first insert local one with higher gas price
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then the one with lower gas price, but from retracted block // then the one with lower gas price, but from retracted block
txq.add(tx.clone(), &default_account_details, TransactionOrigin::RetractedBlock).unwrap(); txq.add(tx.clone(), TransactionOrigin::RetractedBlock, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1632,8 +1656,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.top_transactions(); let top = txq.top_transactions();
@ -1652,10 +1676,10 @@ mod test {
let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into()); let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into());
// insert everything // insert everything
txq.add(txa.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(txa.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(txb.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(txb.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx1.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 4); assert_eq!(txq.status().future, 4);
@ -1681,10 +1705,10 @@ mod test {
let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into()); let (tx1, tx2) = new_tx_pair_with_gas_price_increment(3.into());
// insert everything // insert everything
txq.add(txa.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(txa.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(txb.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(txb.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let top = txq.top_transactions(); let top = txq.top_transactions();
assert_eq!(top[0], tx1); assert_eq!(top[0], tx1);
@ -1713,8 +1737,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// when // when
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let top = txq.pending_hashes(); let top = txq.pending_hashes();
@ -1731,8 +1755,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(2.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(2.into(), 0.into());
// when // when
let res1 = txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); let res1 = txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let res2 = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); let res2 = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
assert_eq!(res1, TransactionImportResult::Current); assert_eq!(res1, TransactionImportResult::Current);
@ -1755,8 +1779,8 @@ mod test {
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
txq.add(tx2.clone(), &prev_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 2); assert_eq!(txq.status().future, 2);
// when // when
@ -1774,17 +1798,17 @@ mod test {
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let kp = Random.generate().unwrap(); let kp = Random.generate().unwrap();
let secret = kp.secret(); let secret = kp.secret();
let tx = new_unsigned_tx(123.into(), default_gas_val(), 1.into()).sign(secret); let tx = new_unsigned_tx(123.into(), default_gas_val(), 1.into()).sign(secret, None);
let tx1 = new_unsigned_tx(124.into(), default_gas_val(), 1.into()).sign(secret); let tx1 = new_unsigned_tx(124.into(), default_gas_val(), 1.into()).sign(secret, None);
let tx2 = new_unsigned_tx(125.into(), default_gas_val(), 1.into()).sign(secret); let tx2 = new_unsigned_tx(125.into(), default_gas_val(), 1.into()).sign(secret, None);
txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 1); assert_eq!(txq.status().pending, 1);
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
// when // when
txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1800,8 +1824,8 @@ mod test {
// given // given
let mut txq2 = TransactionQueue::default(); let mut txq2 = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(3.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(3.into(), 0.into());
txq2.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq2.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq2.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq2.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq2.status().pending, 1); assert_eq!(txq2.status().pending, 1);
assert_eq!(txq2.status().future, 1); assert_eq!(txq2.status().future, 1);
@ -1822,10 +1846,10 @@ mod test {
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx_default(); let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 3); assert_eq!(txq.status().pending, 3);
// when // when
@ -1844,8 +1868,8 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
// add // add
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
let stats = txq.status(); let stats = txq.status();
assert_eq!(stats.pending, 2); assert_eq!(stats.pending, 2);
@ -1864,11 +1888,11 @@ mod test {
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let sender = tx.sender().unwrap(); let sender = tx.sender().unwrap();
let nonce = tx.nonce; let nonce = tx.nonce;
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 1); assert_eq!(txq.status().pending, 1);
// when // when
let res = txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator);
// then // then
let t = txq.top_transactions(); let t = txq.top_transactions();
@ -1885,14 +1909,14 @@ mod test {
txq.current.set_limit(10); txq.current.set_limit(10);
let (tx1, tx2) = new_tx_pair_default(4.into(), 1.into()); let (tx1, tx2) = new_tx_pair_default(4.into(), 1.into());
let (tx3, tx4) = new_tx_pair_default(4.into(), 2.into()); let (tx3, tx4) = new_tx_pair_default(4.into(), 2.into());
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
// when // when
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx4.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
@ -1903,11 +1927,11 @@ mod test {
let mut txq = TransactionQueue::with_limits(PrioritizationStrategy::GasPriceOnly, 100, default_gas_val() * U256::from(2), !U256::zero()); let mut txq = TransactionQueue::with_limits(PrioritizationStrategy::GasPriceOnly, 100, default_gas_val() * U256::from(2), !U256::zero());
let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1)); let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1));
let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2)); let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2));
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// limited by gas // limited by gas
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::External).unwrap_err(); txq.add(tx4.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap_err();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
} }
@ -1917,13 +1941,13 @@ mod test {
let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1)); let (tx1, tx2) = new_tx_pair_default(U256::from(1), U256::from(1));
let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2)); let (tx3, tx4) = new_tx_pair_default(U256::from(1), U256::from(2));
let (tx5, tx6) = new_tx_pair_default(U256::from(1), U256::from(2)); let (tx5, tx6) = new_tx_pair_default(U256::from(1), U256::from(2));
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx1.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx5.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx5.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// Not accepted because of limit // Not accepted because of limit
txq.add(tx6.clone(), &default_account_details, TransactionOrigin::External).unwrap_err(); txq.add(tx6.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap_err();
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx3.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx4.clone(), &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx4.clone(), TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 4); assert_eq!(txq.status().pending, 4);
} }
@ -1935,7 +1959,7 @@ mod test {
let fetch_last_nonce = |_a: &Address| AccountDetails { nonce: last_nonce, balance: !U256::zero() }; let fetch_last_nonce = |_a: &Address| AccountDetails { nonce: last_nonce, balance: !U256::zero() };
// when // when
let res = txq.add(tx, &fetch_last_nonce, TransactionOrigin::External); let res = txq.add(tx, TransactionOrigin::External, &fetch_last_nonce, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::Old); assert_eq!(unwrap_tx_err(res), TransactionError::Old);
@ -1951,12 +1975,12 @@ mod test {
balance: !U256::zero() }; balance: !U256::zero() };
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (_tx1, tx2) = new_tx_pair_default(1.into(), 0.into()); let (_tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
assert_eq!(txq.status().pending, 0); assert_eq!(txq.status().pending, 0);
// when // when
let res = txq.add(tx2.clone(), &nonce, TransactionOrigin::External); let res = txq.add(tx2.clone(), TransactionOrigin::External, &nonce, &gas_estimator);
// then // then
assert_eq!(unwrap_tx_err(res), TransactionError::AlreadyImported); assert_eq!(unwrap_tx_err(res), TransactionError::AlreadyImported);
@ -1970,15 +1994,15 @@ mod test {
// given // given
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 2); assert_eq!(txq.status().pending, 2);
// when // when
txq.remove_invalid(&tx1.hash(), &default_account_details); txq.remove_invalid(&tx1.hash(), &default_account_details);
assert_eq!(txq.status().pending, 0); assert_eq!(txq.status().pending, 0);
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx1.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -1992,10 +2016,10 @@ mod test {
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx, tx2) = new_tx_pair_default(1.into(), 0.into());
let tx3 = new_tx_default(); let tx3 = new_tx_default();
txq.add(tx2.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx3.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx3.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx.clone(), &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx.clone(), TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().pending, 3); assert_eq!(txq.status().pending, 3);
// when // when
@ -2014,16 +2038,16 @@ mod test {
// given // given
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let keypair = Random.generate().unwrap(); let keypair = Random.generate().unwrap();
let tx = new_unsigned_tx(123.into(), default_gas_val(), 1.into()).sign(keypair.secret()); let tx = new_unsigned_tx(123.into(), default_gas_val(), 1.into()).sign(keypair.secret(), None);
let tx2 = { let tx2 = {
let mut tx2 = (*tx).clone(); let mut tx2 = (*tx).clone();
tx2.gas_price = U256::from(200); tx2.gas_price = U256::from(200);
tx2.sign(keypair.secret()) tx2.sign(keypair.secret(), None)
}; };
// when // when
txq.add(tx, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -2037,23 +2061,23 @@ mod test {
// given // given
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let keypair = Random.generate().unwrap(); let keypair = Random.generate().unwrap();
let tx0 = new_unsigned_tx(123.into(), default_gas_val(), 1.into()).sign(keypair.secret()); let tx0 = new_unsigned_tx(123.into(), default_gas_val(), 1.into()).sign(keypair.secret(), None);
let tx1 = { let tx1 = {
let mut tx1 = (*tx0).clone(); let mut tx1 = (*tx0).clone();
tx1.nonce = U256::from(124); tx1.nonce = U256::from(124);
tx1.sign(keypair.secret()) tx1.sign(keypair.secret(), None)
}; };
let tx2 = { let tx2 = {
let mut tx2 = (*tx1).clone(); let mut tx2 = (*tx1).clone();
tx2.gas_price = U256::from(200); tx2.gas_price = U256::from(200);
tx2.sign(keypair.secret()) tx2.sign(keypair.secret(), None)
}; };
// when // when
txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 1); assert_eq!(txq.status().future, 1);
txq.add(tx0, &default_account_details, TransactionOrigin::External).unwrap(); txq.add(tx0, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
// then // then
let stats = txq.status(); let stats = txq.status();
@ -2071,8 +2095,8 @@ mod test {
!U256::zero() }; !U256::zero() };
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
txq.add(tx1.clone(), &previous_nonce, TransactionOrigin::External).unwrap(); txq.add(tx1.clone(), TransactionOrigin::External, &previous_nonce, &gas_estimator).unwrap();
txq.add(tx2, &previous_nonce, TransactionOrigin::External).unwrap(); txq.add(tx2, TransactionOrigin::External, &previous_nonce, &gas_estimator).unwrap();
assert_eq!(txq.status().future, 2); assert_eq!(txq.status().future, 2);
// when // when
@ -2103,7 +2127,7 @@ mod test {
let details = |_a: &Address| AccountDetails { nonce: nonce, balance: !U256::zero() }; let details = |_a: &Address| AccountDetails { nonce: nonce, balance: !U256::zero() };
// when // when
txq.add(tx, &details, TransactionOrigin::External).unwrap(); txq.add(tx, TransactionOrigin::External, &details, &gas_estimator).unwrap();
// then // then
assert_eq!(txq.last_nonce(&from), Some(nonce)); assert_eq!(txq.last_nonce(&from), Some(nonce));
@ -2118,7 +2142,7 @@ mod test {
let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() }; let details1 = |_a: &Address| AccountDetails { nonce: nonce1, balance: !U256::zero() };
// Insert first transaction // Insert first transaction
txq.add(tx1, &details1, TransactionOrigin::External).unwrap(); txq.add(tx1, TransactionOrigin::External, &details1, &gas_estimator).unwrap();
// when // when
txq.remove_all(tx2.sender().unwrap(), nonce2 + U256::one()); txq.remove_all(tx2.sender().unwrap(), nonce2 + U256::one());
@ -2138,9 +2162,9 @@ mod test {
// when // when
// Insert first transaction // Insert first transaction
assert_eq!(txq.add(tx1, &details1, TransactionOrigin::External).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx1, TransactionOrigin::External, &details1, &gas_estimator).unwrap(), TransactionImportResult::Current);
// Second should go to future // Second should go to future
assert_eq!(txq.add(tx2, &details1, TransactionOrigin::External).unwrap(), TransactionImportResult::Future); assert_eq!(txq.add(tx2, TransactionOrigin::External, &details1, &gas_estimator).unwrap(), TransactionImportResult::Future);
// Now block is imported // Now block is imported
txq.remove_all(sender, nonce2 - U256::from(1)); txq.remove_all(sender, nonce2 - U256::from(1));
// tx2 should be not be promoted to current // tx2 should be not be promoted to current
@ -2159,9 +2183,9 @@ mod test {
assert_eq!(txq.has_local_pending_transactions(), false); assert_eq!(txq.has_local_pending_transactions(), false);
// when // when
assert_eq!(txq.add(tx1, &default_account_details, TransactionOrigin::External).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap(), TransactionImportResult::Current);
assert_eq!(txq.has_local_pending_transactions(), false); assert_eq!(txq.has_local_pending_transactions(), false);
assert_eq!(txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap(), TransactionImportResult::Current); assert_eq!(txq.add(tx2, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap(), TransactionImportResult::Current);
// then // then
assert_eq!(txq.has_local_pending_transactions(), true); assert_eq!(txq.has_local_pending_transactions(), true);
@ -2176,8 +2200,8 @@ mod test {
default_account_details(a).balance }; default_account_details(a).balance };
// when // when
assert_eq!(txq.add(tx2, &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future); assert_eq!(txq.add(tx2, TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap(), TransactionImportResult::Future);
assert_eq!(txq.add(tx1.clone(), &prev_nonce, TransactionOrigin::External).unwrap(), TransactionImportResult::Future); assert_eq!(txq.add(tx1.clone(), TransactionOrigin::External, &prev_nonce, &gas_estimator).unwrap(), TransactionImportResult::Future);
// then // then
assert_eq!(txq.future.by_priority.len(), 1); assert_eq!(txq.future.by_priority.len(), 1);
@ -2199,17 +2223,17 @@ mod test {
let tx3 = new_unsigned_tx(nonce + 2.into(), gas, 1.into()); let tx3 = new_unsigned_tx(nonce + 2.into(), gas, 1.into());
(tx.sign(secret), tx2.sign(secret), tx2_2.sign(secret), tx3.sign(secret)) (tx.sign(secret, None), tx2.sign(secret, None), tx2_2.sign(secret, None), tx3.sign(secret, None))
}; };
let sender = tx1.sender().unwrap(); let sender = tx1.sender().unwrap();
txq.add(tx1, &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx1, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx2, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3, &default_account_details, TransactionOrigin::Local).unwrap(); txq.add(tx3, TransactionOrigin::Local, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.future.by_priority.len(), 0); assert_eq!(txq.future.by_priority.len(), 0);
assert_eq!(txq.current.by_priority.len(), 3); assert_eq!(txq.current.by_priority.len(), 3);
// when // when
let res = txq.add(tx2_2, &default_account_details, TransactionOrigin::Local); let res = txq.add(tx2_2, TransactionOrigin::Local, &default_account_details, &gas_estimator);
// then // then
assert_eq!(txq.last_nonce(&sender).unwrap(), 125.into()); assert_eq!(txq.last_nonce(&sender).unwrap(), 125.into());
@ -2217,4 +2241,24 @@ mod test {
assert_eq!(txq.current.by_priority.len(), 3); assert_eq!(txq.current.by_priority.len(), 3);
} }
#[test]
fn should_reject_transactions_below_bas_gas() {
// given
let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
let high_gas = |_: &SignedTransaction| 100_001.into();
// when
let res1 = txq.add(tx1, TransactionOrigin::Local, &default_account_details, &gas_estimator);
let res2 = txq.add(tx2, TransactionOrigin::Local, &default_account_details, &high_gas);
// then
assert_eq!(res1.unwrap(), TransactionImportResult::Current);
assert_eq!(unwrap_tx_err(res2), TransactionError::InsufficientGas {
minimal: 100_001.into(),
got: 100_000.into(),
});
}
} }

View File

@ -89,7 +89,7 @@ impl From<ethjson::blockchain::Account> for PodAccount {
let key: U256 = key.into(); let key: U256 = key.into();
let value: U256 = value.into(); let value: U256 = value.into();
(H256::from(key), H256::from(value)) (H256::from(key), H256::from(value))
}).collect() }).collect(),
} }
} }
} }
@ -99,8 +99,12 @@ impl From<ethjson::spec::Account> for PodAccount {
PodAccount { PodAccount {
balance: a.balance.map_or_else(U256::zero, Into::into), balance: a.balance.map_or_else(U256::zero, Into::into),
nonce: a.nonce.map_or_else(U256::zero, Into::into), nonce: a.nonce.map_or_else(U256::zero, Into::into),
code: a.code.map(Into::into).or_else(|| Some(Vec::new())), code: Some(a.code.map_or_else(Vec::new, Into::into)),
storage: BTreeMap::new() storage: a.storage.map_or_else(BTreeMap::new, |s| s.into_iter().map(|(key, value)| {
let key: U256 = key.into();
let value: U256 = value.into();
(H256::from(key), H256::from(value))
}).collect()),
} }
} }
} }
@ -112,7 +116,7 @@ impl fmt::Display for PodAccount {
self.nonce, self.nonce,
self.code.as_ref().map_or(0, |c| c.len()), self.code.as_ref().map_or(0, |c| c.len()),
self.code.as_ref().map_or_else(H256::new, |c| c.sha3()), self.code.as_ref().map_or_else(H256::new, |c| c.sha3()),
self.storage.len() self.storage.len(),
) )
} }
} }

View File

@ -87,7 +87,7 @@ impl ClientService {
db_config.set_cache(::db::COL_STATE, size); db_config.set_cache(::db::COL_STATE, size);
} }
db_config.compaction = config.db_compaction.compaction_profile(&client_path); db_config.compaction = config.db_compaction.compaction_profile(client_path);
db_config.wal = config.db_wal; db_config.wal = config.db_wal;
let pruning = config.pruning; let pruning = config.pruning;
@ -188,6 +188,8 @@ impl IoHandler<ClientIoMessage> for ClientIoHandler {
#[cfg_attr(feature="dev", allow(single_match))] #[cfg_attr(feature="dev", allow(single_match))]
fn message(&self, _io: &IoContext<ClientIoMessage>, net_message: &ClientIoMessage) { fn message(&self, _io: &IoContext<ClientIoMessage>, net_message: &ClientIoMessage) {
use std::thread;
match *net_message { match *net_message {
ClientIoMessage::BlockVerified => { self.client.import_verified_blocks(); } ClientIoMessage::BlockVerified => { self.client.import_verified_blocks(); }
ClientIoMessage::NewTransactions(ref transactions) => { self.client.import_queued_transactions(transactions); } ClientIoMessage::NewTransactions(ref transactions) => { self.client.import_queued_transactions(transactions); }
@ -199,9 +201,19 @@ impl IoHandler<ClientIoMessage> for ClientIoHandler {
ClientIoMessage::FeedStateChunk(ref hash, ref chunk) => self.snapshot.feed_state_chunk(*hash, chunk), ClientIoMessage::FeedStateChunk(ref hash, ref chunk) => self.snapshot.feed_state_chunk(*hash, chunk),
ClientIoMessage::FeedBlockChunk(ref hash, ref chunk) => self.snapshot.feed_block_chunk(*hash, chunk), ClientIoMessage::FeedBlockChunk(ref hash, ref chunk) => self.snapshot.feed_block_chunk(*hash, chunk),
ClientIoMessage::TakeSnapshot(num) => { ClientIoMessage::TakeSnapshot(num) => {
if let Err(e) = self.snapshot.take_snapshot(&*self.client, num) { let client = self.client.clone();
warn!("Failed to take snapshot at block #{}: {}", num, e); let snapshot = self.snapshot.clone();
let res = thread::Builder::new().name("Periodic Snapshot".into()).spawn(move || {
if let Err(e) = snapshot.take_snapshot(&*client, num) {
warn!("Failed to take snapshot at block #{}: {}", num, e);
}
});
if let Err(e) = res {
debug!(target: "snapshot", "Failed to initialize periodic snapshot thread: {:?}", e);
} }
} }
_ => {} // ignore other messages _ => {} // ignore other messages
} }

View File

@ -19,11 +19,11 @@
use account_db::{AccountDB, AccountDBMut}; use account_db::{AccountDB, AccountDBMut};
use snapshot::Error; use snapshot::Error;
use util::{U256, FixedHash, H256, Bytes, HashDB, DBValue, SHA3_EMPTY, SHA3_NULL_RLP}; use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP};
use util::trie::{TrieDB, Trie}; use util::trie::{TrieDB, Trie};
use rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View}; use rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View};
use std::collections::{HashMap, HashSet}; use std::collections::HashSet;
// An empty account -- these are replaced with RLP null data for a space optimization. // An empty account -- these are replaced with RLP null data for a space optimization.
const ACC_EMPTY: Account = Account { const ACC_EMPTY: Account = Account {
@ -150,7 +150,6 @@ impl Account {
pub fn from_fat_rlp( pub fn from_fat_rlp(
acct_db: &mut AccountDBMut, acct_db: &mut AccountDBMut,
rlp: UntrustedRlp, rlp: UntrustedRlp,
code_map: &HashMap<H256, Bytes>,
) -> Result<(Self, Option<Bytes>), Error> { ) -> Result<(Self, Option<Bytes>), Error> {
use util::{TrieDBMut, TrieMut}; use util::{TrieDBMut, TrieMut};
@ -177,9 +176,6 @@ impl Account {
} }
CodeState::Hash => { CodeState::Hash => {
let code_hash = try!(rlp.val_at(3)); let code_hash = try!(rlp.val_at(3));
if let Some(code) = code_map.get(&code_hash) {
acct_db.emplace(code_hash.clone(), DBValue::from_slice(&code));
}
(code_hash, None) (code_hash, None)
} }
@ -229,7 +225,7 @@ mod tests {
use util::{Address, FixedHash, H256, HashDB, DBValue}; use util::{Address, FixedHash, H256, HashDB, DBValue};
use rlp::{UntrustedRlp, View}; use rlp::{UntrustedRlp, View};
use std::collections::{HashSet, HashMap}; use std::collections::HashSet;
use super::{ACC_EMPTY, Account}; use super::{ACC_EMPTY, Account};
@ -250,7 +246,7 @@ mod tests {
let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap(); let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlp);
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, &Default::default()).unwrap().0, account); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account);
} }
#[test] #[test]
@ -275,7 +271,7 @@ mod tests {
let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap(); let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlp);
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, &Default::default()).unwrap().0, account); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account);
} }
#[test] #[test]
@ -318,12 +314,11 @@ mod tests {
let fat_rlp1 = UntrustedRlp::new(&fat_rlp1); let fat_rlp1 = UntrustedRlp::new(&fat_rlp1);
let fat_rlp2 = UntrustedRlp::new(&fat_rlp2); let fat_rlp2 = UntrustedRlp::new(&fat_rlp2);
let code_map = HashMap::new(); let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2).unwrap();
let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2, &code_map).unwrap();
assert!(maybe_code.is_none()); assert!(maybe_code.is_none());
assert_eq!(acc, account2); assert_eq!(acc, account2);
let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1, &code_map).unwrap(); let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1).unwrap();
assert_eq!(maybe_code, Some(b"this is definitely code".to_vec())); assert_eq!(maybe_code, Some(b"this is definitely code".to_vec()));
assert_eq!(acc, account1); assert_eq!(acc, account1);
} }
@ -332,9 +327,8 @@ mod tests {
fn encoding_empty_acc() { fn encoding_empty_acc() {
let mut db = get_temp_state_db(); let mut db = get_temp_state_db();
let mut used_code = HashSet::new(); let mut used_code = HashSet::new();
let code_map = HashMap::new();
assert_eq!(ACC_EMPTY.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code).unwrap(), ::rlp::NULL_RLP.to_vec()); assert_eq!(ACC_EMPTY.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code).unwrap(), ::rlp::NULL_RLP.to_vec());
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP), &code_map).unwrap(), (ACC_EMPTY, None)); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP)).unwrap(), (ACC_EMPTY, None));
} }
} }

View File

@ -33,12 +33,20 @@ pub enum Error {
BlockNotFound(H256), BlockNotFound(H256),
/// Incomplete chain. /// Incomplete chain.
IncompleteChain, IncompleteChain,
/// Best block has wrong state root.
WrongStateRoot(H256, H256),
/// Wrong block hash.
WrongBlockHash(u64, H256, H256),
/// Too many blocks contained within the snapshot.
TooManyBlocks(u64, u64),
/// Old starting block in a pruned database. /// Old starting block in a pruned database.
OldBlockPrunedDB, OldBlockPrunedDB,
/// Missing code. /// Missing code.
MissingCode(Vec<H256>), MissingCode(Vec<H256>),
/// Unrecognized code encoding. /// Unrecognized code encoding.
UnrecognizedCodeState(u8), UnrecognizedCodeState(u8),
/// Restoration aborted.
RestorationAborted,
/// Trie error. /// Trie error.
Trie(TrieError), Trie(TrieError),
/// Decoder error. /// Decoder error.
@ -52,11 +60,16 @@ impl fmt::Display for Error {
match *self { match *self {
Error::InvalidStartingBlock(ref id) => write!(f, "Invalid starting block: {:?}", id), Error::InvalidStartingBlock(ref id) => write!(f, "Invalid starting block: {:?}", id),
Error::BlockNotFound(ref hash) => write!(f, "Block not found in chain: {}", hash), Error::BlockNotFound(ref hash) => write!(f, "Block not found in chain: {}", hash),
Error::IncompleteChain => write!(f, "Cannot create snapshot due to incomplete chain."), Error::IncompleteChain => write!(f, "Incomplete blockchain."),
Error::WrongStateRoot(ref expected, ref found) => write!(f, "Final block has wrong state root. Expected {:?}, got {:?}", expected, found),
Error::WrongBlockHash(ref num, ref expected, ref found) =>
write!(f, "Block {} had wrong hash. expected {:?}, got {:?}", num, expected, found),
Error::TooManyBlocks(ref expected, ref found) => write!(f, "Snapshot contained too many blocks. Expected {}, got {}", expected, found),
Error::OldBlockPrunedDB => write!(f, "Attempted to create a snapshot at an old block while using \ Error::OldBlockPrunedDB => write!(f, "Attempted to create a snapshot at an old block while using \
a pruned database. Please re-run with the --pruning archive flag."), a pruned database. Please re-run with the --pruning archive flag."),
Error::MissingCode(ref missing) => write!(f, "Incomplete snapshot: {} contract codes not found.", missing.len()), Error::MissingCode(ref missing) => write!(f, "Incomplete snapshot: {} contract codes not found.", missing.len()),
Error::UnrecognizedCodeState(state) => write!(f, "Unrecognized code encoding ({})", state), Error::UnrecognizedCodeState(state) => write!(f, "Unrecognized code encoding ({})", state),
Error::RestorationAborted => write!(f, "Snapshot restoration aborted."),
Error::Io(ref err) => err.fmt(f), Error::Io(ref err) => err.fmt(f),
Error::Decoder(ref err) => err.fmt(f), Error::Decoder(ref err) => err.fmt(f),
Error::Trie(ref err) => err.fmt(f), Error::Trie(ref err) => err.fmt(f),

View File

@ -26,6 +26,7 @@ use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering};
use account_db::{AccountDB, AccountDBMut}; use account_db::{AccountDB, AccountDBMut};
use blockchain::{BlockChain, BlockProvider}; use blockchain::{BlockChain, BlockProvider};
use engines::Engine; use engines::Engine;
use header::Header;
use ids::BlockID; use ids::BlockID;
use views::BlockView; use views::BlockView;
@ -202,7 +203,7 @@ impl<'a> BlockChunker<'a> {
// cut off the chunk if too large. // cut off the chunk if too large.
if new_loaded_size > PREFERRED_CHUNK_SIZE && self.rlps.len() > 0 { if new_loaded_size > PREFERRED_CHUNK_SIZE && !self.rlps.is_empty() {
try!(self.write_chunk(last)); try!(self.write_chunk(last));
loaded_size = pair.len(); loaded_size = pair.len();
} else { } else {
@ -388,7 +389,7 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
pub struct StateRebuilder { pub struct StateRebuilder {
db: Box<JournalDB>, db: Box<JournalDB>,
state_root: H256, state_root: H256,
code_map: HashMap<H256, Bytes>, // maps code hashes to code itself. known_code: HashMap<H256, H256>, // code hashes mapped to first account with this code.
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code. missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
bloom: Bloom, bloom: Bloom,
} }
@ -399,43 +400,41 @@ impl StateRebuilder {
StateRebuilder { StateRebuilder {
db: journaldb::new(db.clone(), pruning, ::db::COL_STATE), db: journaldb::new(db.clone(), pruning, ::db::COL_STATE),
state_root: SHA3_NULL_RLP, state_root: SHA3_NULL_RLP,
code_map: HashMap::new(), known_code: HashMap::new(),
missing_code: HashMap::new(), missing_code: HashMap::new(),
bloom: StateDB::load_bloom(&*db), bloom: StateDB::load_bloom(&*db),
} }
} }
/// Feed an uncompressed state chunk into the rebuilder. /// Feed an uncompressed state chunk into the rebuilder.
pub fn feed(&mut self, chunk: &[u8]) -> Result<(), ::error::Error> { pub fn feed(&mut self, chunk: &[u8], flag: &AtomicBool) -> Result<(), ::error::Error> {
let rlp = UntrustedRlp::new(chunk); let rlp = UntrustedRlp::new(chunk);
let empty_rlp = StateAccount::new_basic(U256::zero(), U256::zero()).rlp(); let empty_rlp = StateAccount::new_basic(U256::zero(), U256::zero()).rlp();
let account_fat_rlps: Vec<_> = rlp.iter().map(|r| r.as_raw()).collect();
let mut pairs = Vec::with_capacity(rlp.item_count()); let mut pairs = Vec::with_capacity(rlp.item_count());
// initialize the pairs vector with empty values so we have slots to write into. // initialize the pairs vector with empty values so we have slots to write into.
pairs.resize(rlp.item_count(), (H256::new(), Vec::new())); pairs.resize(rlp.item_count(), (H256::new(), Vec::new()));
let chunk_size = account_fat_rlps.len() / ::num_cpus::get() + 1; let status = try!(rebuild_accounts(
self.db.as_hashdb_mut(),
rlp,
&mut pairs,
&self.known_code,
flag
));
// new code contained within this chunk. for (addr_hash, code_hash) in status.missing_code {
let mut chunk_code = HashMap::new(); self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) {
let code_map = &self.code_map;
let status = try!(rebuild_accounts(self.db.as_hashdb_mut(), account_chunk, out_pairs_chunk, code_map));
chunk_code.extend(status.new_code);
for (addr_hash, code_hash) in status.missing_code {
self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
}
} }
// patch up all missing code. must be done after collecting all new missing code entries. // patch up all missing code. must be done after collecting all new missing code entries.
for (code_hash, code) in chunk_code { for (code_hash, code, first_with) in status.new_code {
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) { for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash); let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash);
db.emplace(code_hash, DBValue::from_slice(&code)); db.emplace(code_hash, DBValue::from_slice(&code));
} }
self.code_map.insert(code_hash, code); self.known_code.insert(code_hash, first_with);
} }
let backing = self.db.backing().clone(); let backing = self.db.backing().clone();
@ -449,6 +448,8 @@ impl StateRebuilder {
}; };
for (hash, thin_rlp) in pairs { for (hash, thin_rlp) in pairs {
if !flag.load(Ordering::SeqCst) { return Err(Error::RestorationAborted.into()) }
if &thin_rlp[..] != &empty_rlp[..] { if &thin_rlp[..] != &empty_rlp[..] {
self.bloom.set(&*hash); self.bloom.set(&*hash);
} }
@ -481,38 +482,55 @@ impl StateRebuilder {
#[derive(Default)] #[derive(Default)]
struct RebuiltStatus { struct RebuiltStatus {
new_code: Vec<(H256, Bytes)>, // new code that's become available. // new code that's become available. (code_hash, code, addr_hash)
new_code: Vec<(H256, Bytes, H256)>,
missing_code: Vec<(H256, H256)>, // accounts that are missing code. missing_code: Vec<(H256, H256)>, // accounts that are missing code.
} }
// rebuild a set of accounts and their storage. // rebuild a set of accounts and their storage.
// returns // returns a status detailing newly-loaded code and accounts missing code.
fn rebuild_accounts( fn rebuild_accounts(
db: &mut HashDB, db: &mut HashDB,
account_chunk: &[&[u8]], account_fat_rlps: UntrustedRlp,
out_chunk: &mut [(H256, Bytes)], out_chunk: &mut [(H256, Bytes)],
code_map: &HashMap<H256, Bytes> known_code: &HashMap<H256, H256>,
) -> Result<RebuiltStatus, ::error::Error> abort_flag: &AtomicBool,
{ ) -> Result<RebuiltStatus, ::error::Error> {
let mut status = RebuiltStatus::default(); let mut status = RebuiltStatus::default();
for (account_pair, out) in account_chunk.into_iter().zip(out_chunk) { for (account_rlp, out) in account_fat_rlps.into_iter().zip(out_chunk) {
let account_rlp = UntrustedRlp::new(account_pair); if !abort_flag.load(Ordering::SeqCst) { return Err(Error::RestorationAborted.into()) }
let hash: H256 = try!(account_rlp.val_at(0)); let hash: H256 = try!(account_rlp.val_at(0));
let fat_rlp = try!(account_rlp.at(1)); let fat_rlp = try!(account_rlp.at(1));
let thin_rlp = { let thin_rlp = {
let mut acct_db = AccountDBMut::from_hash(db, hash);
// fill out the storage trie and code while decoding. // fill out the storage trie and code while decoding.
let (acc, maybe_code) = try!(Account::from_fat_rlp(&mut acct_db, fat_rlp, code_map)); let (acc, maybe_code) = {
let mut acct_db = AccountDBMut::from_hash(db, hash);
try!(Account::from_fat_rlp(&mut acct_db, fat_rlp))
};
let code_hash = acc.code_hash().clone(); let code_hash = acc.code_hash().clone();
match maybe_code { match maybe_code {
Some(code) => status.new_code.push((code_hash, code)), // new inline code
Some(code) => status.new_code.push((code_hash, code, hash)),
None => { None => {
if code_hash != ::util::SHA3_EMPTY && !code_map.contains_key(&code_hash) { if code_hash != ::util::SHA3_EMPTY {
status.missing_code.push((hash, code_hash)); // see if this code has already been included inline
match known_code.get(&code_hash) {
Some(&first_with) => {
// if so, load it from the database.
let code = try!(AccountDB::from_hash(db, first_with)
.get(&code_hash)
.ok_or_else(|| Error::MissingCode(vec![first_with])));
// and write it again under a different mangled key
AccountDBMut::from_hash(db, hash).emplace(code_hash, code);
}
// if not, queue it up to be filled later
None => status.missing_code.push((hash, code_hash)),
}
} }
} }
} }
@ -528,6 +546,20 @@ fn rebuild_accounts(
/// Proportion of blocks which we will verify `PoW` for. /// Proportion of blocks which we will verify `PoW` for.
const POW_VERIFY_RATE: f32 = 0.02; const POW_VERIFY_RATE: f32 = 0.02;
/// Verify an old block with the given header, engine, blockchain, body. If `always` is set, it will perform
/// the fullest verification possible. If not, it will take a random sample to determine whether it will
/// do heavy or light verification.
pub fn verify_old_block(rng: &mut OsRng, header: &Header, engine: &Engine, chain: &BlockChain, body: Option<&[u8]>, always: bool) -> Result<(), ::error::Error> {
if always || rng.gen::<f32>() <= POW_VERIFY_RATE {
match chain.block_header(header.parent_hash()) {
Some(parent) => engine.verify_block_family(&header, &parent, body),
None => engine.verify_block_seal(&header),
}
} else {
engine.verify_block_basic(&header, body)
}
}
/// Rebuilds the blockchain from chunks. /// Rebuilds the blockchain from chunks.
/// ///
/// Does basic verification for all blocks, but `PoW` verification for some. /// Does basic verification for all blocks, but `PoW` verification for some.
@ -543,38 +575,51 @@ pub struct BlockRebuilder {
rng: OsRng, rng: OsRng,
disconnected: Vec<(u64, H256)>, disconnected: Vec<(u64, H256)>,
best_number: u64, best_number: u64,
best_hash: H256,
best_root: H256,
fed_blocks: u64,
} }
impl BlockRebuilder { impl BlockRebuilder {
/// Create a new BlockRebuilder. /// Create a new BlockRebuilder.
pub fn new(chain: BlockChain, db: Arc<Database>, best_number: u64) -> Result<Self, ::error::Error> { pub fn new(chain: BlockChain, db: Arc<Database>, manifest: &ManifestData) -> Result<Self, ::error::Error> {
Ok(BlockRebuilder { Ok(BlockRebuilder {
chain: chain, chain: chain,
db: db, db: db,
rng: try!(OsRng::new()), rng: try!(OsRng::new()),
disconnected: Vec::new(), disconnected: Vec::new(),
best_number: best_number, best_number: manifest.block_number,
best_hash: manifest.block_hash,
best_root: manifest.state_root,
fed_blocks: 0,
}) })
} }
/// Feed the rebuilder an uncompressed block chunk. /// Feed the rebuilder an uncompressed block chunk.
/// Returns the number of blocks fed or any errors. /// Returns the number of blocks fed or any errors.
pub fn feed(&mut self, chunk: &[u8], engine: &Engine) -> Result<u64, ::error::Error> { pub fn feed(&mut self, chunk: &[u8], engine: &Engine, abort_flag: &AtomicBool) -> Result<u64, ::error::Error> {
use basic_types::Seal::With; use basic_types::Seal::With;
use util::U256; use util::U256;
use util::triehash::ordered_trie_root; use util::triehash::ordered_trie_root;
let rlp = UntrustedRlp::new(chunk); let rlp = UntrustedRlp::new(chunk);
let item_count = rlp.item_count(); let item_count = rlp.item_count();
let num_blocks = (item_count - 3) as u64;
trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3); trace!(target: "snapshot", "restoring block chunk with {} blocks.", item_count - 3);
if self.fed_blocks + num_blocks > SNAPSHOT_BLOCKS {
return Err(Error::TooManyBlocks(SNAPSHOT_BLOCKS, self.fed_blocks).into())
}
// todo: assert here that these values are consistent with chunks being in order. // todo: assert here that these values are consistent with chunks being in order.
let mut cur_number = try!(rlp.val_at::<u64>(0)) + 1; let mut cur_number = try!(rlp.val_at::<u64>(0)) + 1;
let mut parent_hash = try!(rlp.val_at::<H256>(1)); let mut parent_hash = try!(rlp.val_at::<H256>(1));
let parent_total_difficulty = try!(rlp.val_at::<U256>(2)); let parent_total_difficulty = try!(rlp.val_at::<U256>(2));
for idx in 3..item_count { for idx in 3..item_count {
if !abort_flag.load(Ordering::SeqCst) { return Err(Error::RestorationAborted.into()) }
let pair = try!(rlp.at(idx)); let pair = try!(rlp.at(idx));
let abridged_rlp = try!(pair.at(0)).as_raw().to_owned(); let abridged_rlp = try!(pair.at(0)).as_raw().to_owned();
let abridged_block = AbridgedBlock::from_raw(abridged_rlp); let abridged_block = AbridgedBlock::from_raw(abridged_rlp);
@ -585,14 +630,27 @@ impl BlockRebuilder {
let block = try!(abridged_block.to_block(parent_hash, cur_number, receipts_root)); let block = try!(abridged_block.to_block(parent_hash, cur_number, receipts_root));
let block_bytes = block.rlp_bytes(With); let block_bytes = block.rlp_bytes(With);
let is_best = cur_number == self.best_number;
if self.rng.gen::<f32>() <= POW_VERIFY_RATE { if is_best {
try!(engine.verify_block_seal(&block.header)) if block.header.hash() != self.best_hash {
} else { return Err(Error::WrongBlockHash(cur_number, self.best_hash, block.header.hash()).into())
try!(engine.verify_block_basic(&block.header, Some(&block_bytes))); }
if block.header.state_root() != &self.best_root {
return Err(Error::WrongStateRoot(self.best_root, *block.header.state_root()).into())
}
} }
let is_best = cur_number == self.best_number; try!(verify_old_block(
&mut self.rng,
&block.header,
engine,
&self.chain,
Some(&block_bytes),
is_best
));
let mut batch = self.db.transaction(); let mut batch = self.db.transaction();
// special-case the first block in each chunk. // special-case the first block in each chunk.
@ -610,11 +668,15 @@ impl BlockRebuilder {
cur_number += 1; cur_number += 1;
} }
Ok(item_count as u64 - 3) self.fed_blocks += num_blocks;
Ok(num_blocks)
} }
/// Glue together any disconnected chunks. To be called at the end. /// Glue together any disconnected chunks and check that the chain is complete.
pub fn glue_chunks(self) { pub fn finalize(self, canonical: HashMap<u64, H256>) -> Result<(), Error> {
let mut batch = self.db.transaction();
for (first_num, first_hash) in self.disconnected { for (first_num, first_hash) in self.disconnected {
let parent_num = first_num - 1; let parent_num = first_num - 1;
@ -623,8 +685,23 @@ impl BlockRebuilder {
// the first block of the first chunks has nothing to connect to. // the first block of the first chunks has nothing to connect to.
if let Some(parent_hash) = self.chain.block_hash(parent_num) { if let Some(parent_hash) = self.chain.block_hash(parent_num) {
// if so, add the child to it. // if so, add the child to it.
self.chain.add_child(parent_hash, first_hash); self.chain.add_child(&mut batch, parent_hash, first_hash);
} }
} }
self.db.write_buffered(batch);
let best_number = self.best_number;
for num in (0..self.fed_blocks).map(|x| best_number - x) {
let hash = try!(self.chain.block_hash(num).ok_or(Error::IncompleteChain));
if let Some(canon_hash) = canonical.get(&num).cloned() {
if canon_hash != hash {
return Err(Error::WrongBlockHash(num, canon_hash, hash));
}
}
}
Ok(())
} }
} }

View File

@ -16,7 +16,7 @@
//! Snapshot network service implementation. //! Snapshot network service implementation.
use std::collections::HashSet; use std::collections::{HashMap, HashSet};
use std::io::ErrorKind; use std::io::ErrorKind;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
@ -74,6 +74,7 @@ struct Restoration {
snappy_buffer: Bytes, snappy_buffer: Bytes,
final_state_root: H256, final_state_root: H256,
guard: Guard, guard: Guard,
canonical_hashes: HashMap<u64, H256>,
db: Arc<Database>, db: Arc<Database>,
} }
@ -99,7 +100,7 @@ impl Restoration {
.map_err(UtilError::SimpleString))); .map_err(UtilError::SimpleString)));
let chain = BlockChain::new(Default::default(), params.genesis, raw_db.clone()); let chain = BlockChain::new(Default::default(), params.genesis, raw_db.clone());
let blocks = try!(BlockRebuilder::new(chain, raw_db.clone(), manifest.block_number)); let blocks = try!(BlockRebuilder::new(chain, raw_db.clone(), &manifest));
let root = manifest.state_root.clone(); let root = manifest.state_root.clone();
Ok(Restoration { Ok(Restoration {
@ -112,16 +113,17 @@ impl Restoration {
snappy_buffer: Vec::new(), snappy_buffer: Vec::new(),
final_state_root: root, final_state_root: root,
guard: params.guard, guard: params.guard,
canonical_hashes: HashMap::new(),
db: raw_db, db: raw_db,
}) })
} }
// feeds a state chunk // feeds a state chunk, aborts early if `flag` becomes false.
fn feed_state(&mut self, hash: H256, chunk: &[u8]) -> Result<(), Error> { fn feed_state(&mut self, hash: H256, chunk: &[u8], flag: &AtomicBool) -> Result<(), Error> {
if self.state_chunks_left.remove(&hash) { if self.state_chunks_left.remove(&hash) {
let len = try!(snappy::decompress_into(chunk, &mut self.snappy_buffer)); let len = try!(snappy::decompress_into(chunk, &mut self.snappy_buffer));
try!(self.state.feed(&self.snappy_buffer[..len])); try!(self.state.feed(&self.snappy_buffer[..len], flag));
if let Some(ref mut writer) = self.writer.as_mut() { if let Some(ref mut writer) = self.writer.as_mut() {
try!(writer.write_state_chunk(hash, chunk)); try!(writer.write_state_chunk(hash, chunk));
@ -132,19 +134,24 @@ impl Restoration {
} }
// feeds a block chunk // feeds a block chunk
fn feed_blocks(&mut self, hash: H256, chunk: &[u8], engine: &Engine) -> Result<(), Error> { fn feed_blocks(&mut self, hash: H256, chunk: &[u8], engine: &Engine, flag: &AtomicBool) -> Result<(), Error> {
if self.block_chunks_left.remove(&hash) { if self.block_chunks_left.remove(&hash) {
let len = try!(snappy::decompress_into(chunk, &mut self.snappy_buffer)); let len = try!(snappy::decompress_into(chunk, &mut self.snappy_buffer));
try!(self.blocks.feed(&self.snappy_buffer[..len], engine)); try!(self.blocks.feed(&self.snappy_buffer[..len], engine, flag));
if let Some(ref mut writer) = self.writer.as_mut() { if let Some(ref mut writer) = self.writer.as_mut() {
try!(writer.write_block_chunk(hash, chunk)); try!(writer.write_block_chunk(hash, chunk));
} }
} }
Ok(()) Ok(())
} }
// note canonical hashes.
fn note_canonical(&mut self, hashes: &[(u64, H256)]) {
self.canonical_hashes.extend(hashes.iter().cloned());
}
// finish up restoration. // finish up restoration.
fn finalize(self) -> Result<(), Error> { fn finalize(self) -> Result<(), Error> {
use util::trie::TrieError; use util::trie::TrieError;
@ -161,8 +168,8 @@ impl Restoration {
// check for missing code. // check for missing code.
try!(self.state.check_missing()); try!(self.state.check_missing());
// connect out-of-order chunks. // connect out-of-order chunks and verify chain integrity.
self.blocks.glue_chunks(); try!(self.blocks.finalize(self.canonical_hashes));
if let Some(writer) = self.writer { if let Some(writer) = self.writer {
try!(writer.finish(self.manifest)); try!(writer.finish(self.manifest));
@ -206,7 +213,7 @@ pub struct Service {
restoration: Mutex<Option<Restoration>>, restoration: Mutex<Option<Restoration>>,
snapshot_root: PathBuf, snapshot_root: PathBuf,
db_config: DatabaseConfig, db_config: DatabaseConfig,
io_channel: Channel, io_channel: Mutex<Channel>,
pruning: Algorithm, pruning: Algorithm,
status: Mutex<RestorationStatus>, status: Mutex<RestorationStatus>,
reader: RwLock<Option<LooseReader>>, reader: RwLock<Option<LooseReader>>,
@ -217,6 +224,7 @@ pub struct Service {
db_restore: Arc<DatabaseRestore>, db_restore: Arc<DatabaseRestore>,
progress: super::Progress, progress: super::Progress,
taking_snapshot: AtomicBool, taking_snapshot: AtomicBool,
restoring_snapshot: AtomicBool,
} }
impl Service { impl Service {
@ -226,7 +234,7 @@ impl Service {
restoration: Mutex::new(None), restoration: Mutex::new(None),
snapshot_root: params.snapshot_root, snapshot_root: params.snapshot_root,
db_config: params.db_config, db_config: params.db_config,
io_channel: params.channel, io_channel: Mutex::new(params.channel),
pruning: params.pruning, pruning: params.pruning,
status: Mutex::new(RestorationStatus::Inactive), status: Mutex::new(RestorationStatus::Inactive),
reader: RwLock::new(None), reader: RwLock::new(None),
@ -237,6 +245,7 @@ impl Service {
db_restore: params.db_restore, db_restore: params.db_restore,
progress: Default::default(), progress: Default::default(),
taking_snapshot: AtomicBool::new(false), taking_snapshot: AtomicBool::new(false),
restoring_snapshot: AtomicBool::new(false),
}; };
// create the root snapshot dir if it doesn't exist. // create the root snapshot dir if it doesn't exist.
@ -352,7 +361,8 @@ impl Service {
// "Cancelled" is mincing words a bit -- what really happened // "Cancelled" is mincing words a bit -- what really happened
// is that the state we were snapshotting got pruned out // is that the state we were snapshotting got pruned out
// before we could finish. // before we could finish.
info!("Cancelled prematurely-started periodic snapshot."); info!("Periodic snapshot failed: block state pruned.\
Run with a longer `--pruning-history` or with `--no-periodic-snapshot`");
return Ok(()) return Ok(())
} else { } else {
return Err(e); return Err(e);
@ -428,6 +438,8 @@ impl Service {
state_chunks_done: self.state_chunks.load(Ordering::SeqCst) as u32, state_chunks_done: self.state_chunks.load(Ordering::SeqCst) as u32,
block_chunks_done: self.block_chunks.load(Ordering::SeqCst) as u32, block_chunks_done: self.block_chunks.load(Ordering::SeqCst) as u32,
}; };
self.restoring_snapshot.store(true, Ordering::SeqCst);
Ok(()) Ok(())
} }
@ -482,8 +494,8 @@ impl Service {
}; };
(match is_state { (match is_state {
true => rest.feed_state(hash, chunk), true => rest.feed_state(hash, chunk, &self.restoring_snapshot),
false => rest.feed_blocks(hash, chunk, &*self.engine), false => rest.feed_blocks(hash, chunk, &*self.engine, &self.restoring_snapshot),
}.map(|_| rest.is_done()), rest.db.clone()) }.map(|_| rest.is_done()), rest.db.clone())
}; };
@ -559,27 +571,36 @@ impl SnapshotService for Service {
} }
fn begin_restore(&self, manifest: ManifestData) { fn begin_restore(&self, manifest: ManifestData) {
if let Err(e) = self.io_channel.send(ClientIoMessage::BeginRestoration(manifest)) { if let Err(e) = self.io_channel.lock().send(ClientIoMessage::BeginRestoration(manifest)) {
trace!("Error sending snapshot service message: {:?}", e); trace!("Error sending snapshot service message: {:?}", e);
} }
} }
fn abort_restore(&self) { fn abort_restore(&self) {
self.restoring_snapshot.store(false, Ordering::SeqCst);
*self.restoration.lock() = None; *self.restoration.lock() = None;
*self.status.lock() = RestorationStatus::Inactive; *self.status.lock() = RestorationStatus::Inactive;
} }
fn restore_state_chunk(&self, hash: H256, chunk: Bytes) { fn restore_state_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedStateChunk(hash, chunk)) { if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedStateChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e); trace!("Error sending snapshot service message: {:?}", e);
} }
} }
fn restore_block_chunk(&self, hash: H256, chunk: Bytes) { fn restore_block_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedBlockChunk(hash, chunk)) { if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedBlockChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e); trace!("Error sending snapshot service message: {:?}", e);
} }
} }
fn provide_canon_hashes(&self, canonical: &[(u64, H256)]) {
let mut rest = self.restoration.lock();
if let Some(ref mut rest) = rest.as_mut() {
rest.note_canonical(canonical);
}
}
} }
impl Drop for Service { impl Drop for Service {

View File

@ -48,6 +48,10 @@ pub trait SnapshotService : Sync + Send {
/// Feed a raw block chunk to the service to be processed asynchronously. /// Feed a raw block chunk to the service to be processed asynchronously.
/// no-op if currently restoring. /// no-op if currently restoring.
fn restore_block_chunk(&self, hash: H256, chunk: Bytes); fn restore_block_chunk(&self, hash: H256, chunk: Bytes);
/// Give the restoration in-progress some canonical block hashes for
/// extra verification (performed at the end)
fn provide_canon_hashes(&self, canonical: &[(u64, H256)]);
} }
impl IpcConfig for SnapshotService { } impl IpcConfig for SnapshotService { }

View File

@ -17,16 +17,19 @@
//! Block chunker and rebuilder tests. //! Block chunker and rebuilder tests.
use devtools::RandomTempPath; use devtools::RandomTempPath;
use error::Error;
use blockchain::generator::{ChainGenerator, ChainIterator, BlockFinalizer}; use blockchain::generator::{ChainGenerator, ChainIterator, BlockFinalizer};
use blockchain::BlockChain; use blockchain::BlockChain;
use snapshot::{chunk_blocks, BlockRebuilder, Progress}; use snapshot::{chunk_blocks, BlockRebuilder, Error as SnapshotError, Progress};
use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter}; use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter};
use util::{Mutex, snappy}; use util::{Mutex, snappy};
use util::kvdb::{Database, DatabaseConfig}; use util::kvdb::{Database, DatabaseConfig};
use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::AtomicBool;
fn chunk_and_restore(amount: u64) { fn chunk_and_restore(amount: u64) {
let mut canon_chain = ChainGenerator::default(); let mut canon_chain = ChainGenerator::default();
@ -58,27 +61,30 @@ fn chunk_and_restore(amount: u64) {
// snapshot it. // snapshot it.
let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap()); let writer = Mutex::new(PackedWriter::new(&snapshot_path).unwrap());
let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap(); let block_hashes = chunk_blocks(&bc, best_hash, &writer, &Progress::default()).unwrap();
writer.into_inner().finish(::snapshot::ManifestData { let manifest = ::snapshot::ManifestData {
state_hashes: Vec::new(), state_hashes: Vec::new(),
block_hashes: block_hashes, block_hashes: block_hashes,
state_root: Default::default(), state_root: ::util::sha3::SHA3_NULL_RLP,
block_number: amount, block_number: amount,
block_hash: best_hash, block_hash: best_hash,
}).unwrap(); };
writer.into_inner().finish(manifest.clone()).unwrap();
// restore it. // restore it.
let new_db = Arc::new(Database::open(&db_cfg, new_path.as_str()).unwrap()); let new_db = Arc::new(Database::open(&db_cfg, new_path.as_str()).unwrap());
let new_chain = BlockChain::new(Default::default(), &genesis, new_db.clone()); let new_chain = BlockChain::new(Default::default(), &genesis, new_db.clone());
let mut rebuilder = BlockRebuilder::new(new_chain, new_db.clone(), amount).unwrap(); let mut rebuilder = BlockRebuilder::new(new_chain, new_db.clone(), &manifest).unwrap();
let reader = PackedReader::new(&snapshot_path).unwrap().unwrap(); let reader = PackedReader::new(&snapshot_path).unwrap().unwrap();
let engine = ::engines::NullEngine::new(Default::default(), Default::default()); let engine = ::engines::NullEngine::new(Default::default(), Default::default());
let flag = AtomicBool::new(true);
for chunk_hash in &reader.manifest().block_hashes { for chunk_hash in &reader.manifest().block_hashes {
let compressed = reader.chunk(*chunk_hash).unwrap(); let compressed = reader.chunk(*chunk_hash).unwrap();
let chunk = snappy::decompress(&compressed).unwrap(); let chunk = snappy::decompress(&compressed).unwrap();
rebuilder.feed(&chunk, &engine).unwrap(); rebuilder.feed(&chunk, &engine, &flag).unwrap();
} }
rebuilder.glue_chunks(); rebuilder.finalize(HashMap::new()).unwrap();
// and test it. // and test it.
let new_chain = BlockChain::new(Default::default(), &genesis, new_db); let new_chain = BlockChain::new(Default::default(), &genesis, new_db);
@ -90,3 +96,46 @@ fn chunk_and_restore_500() { chunk_and_restore(500) }
#[test] #[test]
fn chunk_and_restore_40k() { chunk_and_restore(40000) } fn chunk_and_restore_40k() { chunk_and_restore(40000) }
#[test]
fn checks_flag() {
use ::rlp::{RlpStream, Stream};
use util::H256;
let mut stream = RlpStream::new_list(5);
stream.append(&100u64)
.append(&H256::default())
.append(&(!0u64));
stream.append_empty_data().append_empty_data();
let genesis = {
let mut canon_chain = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
canon_chain.generate(&mut finalizer).unwrap()
};
let chunk = stream.out();
let path = RandomTempPath::create_dir();
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let db = Arc::new(Database::open(&db_cfg, path.as_str()).unwrap());
let chain = BlockChain::new(Default::default(), &genesis, db.clone());
let engine = ::engines::NullEngine::new(Default::default(), Default::default());
let manifest = ::snapshot::ManifestData {
state_hashes: Vec::new(),
block_hashes: Vec::new(),
state_root: ::util::sha3::SHA3_NULL_RLP,
block_number: 102,
block_hash: H256::default(),
};
let mut rebuilder = BlockRebuilder::new(chain, db.clone(), &manifest).unwrap();
match rebuilder.feed(&chunk, &engine, &AtomicBool::new(false)) {
Err(Error::Snapshot(SnapshotError::RestorationAborted)) => {}
_ => panic!("Wrong result on abort flag set")
}
}

View File

@ -16,10 +16,13 @@
//! State snapshotting tests. //! State snapshotting tests.
use snapshot::{chunk_state, Progress, StateRebuilder}; use snapshot::{chunk_state, Error as SnapshotError, Progress, StateRebuilder};
use snapshot::account::Account;
use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter}; use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter};
use super::helpers::{compare_dbs, StateProducer}; use super::helpers::{compare_dbs, StateProducer};
use error::Error;
use rand::{XorShiftRng, SeedableRng}; use rand::{XorShiftRng, SeedableRng};
use util::hash::H256; use util::hash::H256;
use util::journaldb::{self, Algorithm}; use util::journaldb::{self, Algorithm};
@ -28,7 +31,10 @@ use util::memorydb::MemoryDB;
use util::Mutex; use util::Mutex;
use devtools::RandomTempPath; use devtools::RandomTempPath;
use util::sha3::SHA3_NULL_RLP;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::AtomicBool;
#[test] #[test]
fn snap_and_restore() { fn snap_and_restore() {
@ -65,11 +71,13 @@ fn snap_and_restore() {
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::Archive); let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::Archive);
let reader = PackedReader::new(&snap_file).unwrap().unwrap(); let reader = PackedReader::new(&snap_file).unwrap().unwrap();
let flag = AtomicBool::new(true);
for chunk_hash in &reader.manifest().state_hashes { for chunk_hash in &reader.manifest().state_hashes {
let raw = reader.chunk(*chunk_hash).unwrap(); let raw = reader.chunk(*chunk_hash).unwrap();
let chunk = ::util::snappy::decompress(&raw).unwrap(); let chunk = ::util::snappy::decompress(&raw).unwrap();
rebuilder.feed(&chunk).unwrap(); rebuilder.feed(&chunk, &flag).unwrap();
} }
assert_eq!(rebuilder.state_root(), state_root); assert_eq!(rebuilder.state_root(), state_root);
@ -82,3 +90,104 @@ fn snap_and_restore() {
compare_dbs(&old_db, new_db.as_hashdb()); compare_dbs(&old_db, new_db.as_hashdb());
} }
#[test]
fn get_code_from_prev_chunk() {
use std::collections::HashSet;
use rlp::{RlpStream, Stream};
use util::{HashDB, H256, FixedHash, U256, Hashable};
use account_db::{AccountDBMut, AccountDB};
let code = b"this is definitely code";
let mut used_code = HashSet::new();
let mut acc_stream = RlpStream::new_list(4);
acc_stream.append(&U256::default())
.append(&U256::default())
.append(&SHA3_NULL_RLP)
.append(&code.sha3());
let (h1, h2) = (H256::random(), H256::random());
// two accounts with the same code, one per chunk.
// first one will have code inlined,
// second will just have its hash.
let thin_rlp = acc_stream.out();
let acc1 = Account::from_thin_rlp(&thin_rlp);
let acc2 = Account::from_thin_rlp(&thin_rlp);
let mut make_chunk = |acc: Account, hash| {
let mut db = MemoryDB::new();
AccountDBMut::from_hash(&mut db, hash).insert(&code[..]);
let fat_rlp = acc.to_fat_rlp(&AccountDB::from_hash(&db, hash), &mut used_code).unwrap();
let mut stream = RlpStream::new_list(1);
stream.begin_list(2).append(&hash).append_raw(&fat_rlp, 1);
stream.out()
};
let chunk1 = make_chunk(acc1, h1);
let chunk2 = make_chunk(acc2, h2);
let db_path = RandomTempPath::create_dir();
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
let mut rebuilder = StateRebuilder::new(new_db, Algorithm::Archive);
let flag = AtomicBool::new(true);
rebuilder.feed(&chunk1, &flag).unwrap();
rebuilder.feed(&chunk2, &flag).unwrap();
rebuilder.check_missing().unwrap();
}
#[test]
fn checks_flag() {
let mut producer = StateProducer::new();
let mut rng = XorShiftRng::from_seed([5, 6, 7, 8]);
let mut old_db = MemoryDB::new();
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
for _ in 0..10 {
producer.tick(&mut rng, &mut old_db);
}
let snap_dir = RandomTempPath::create_dir();
let mut snap_file = snap_dir.as_path().to_owned();
snap_file.push("SNAP");
let state_root = producer.state_root();
let writer = Mutex::new(PackedWriter::new(&snap_file).unwrap());
let state_hashes = chunk_state(&old_db, &state_root, &writer, &Progress::default()).unwrap();
writer.into_inner().finish(::snapshot::ManifestData {
state_hashes: state_hashes,
block_hashes: Vec::new(),
state_root: state_root,
block_number: 0,
block_hash: H256::default(),
}).unwrap();
let mut db_path = snap_dir.as_path().to_owned();
db_path.push("db");
{
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
let mut rebuilder = StateRebuilder::new(new_db.clone(), Algorithm::Archive);
let reader = PackedReader::new(&snap_file).unwrap().unwrap();
let flag = AtomicBool::new(false);
for chunk_hash in &reader.manifest().state_hashes {
let raw = reader.chunk(*chunk_hash).unwrap();
let chunk = ::util::snappy::decompress(&raw).unwrap();
match rebuilder.feed(&chunk, &flag) {
Err(Error::Snapshot(SnapshotError::RestorationAborted)) => {},
_ => panic!("unexpected result when feeding with flag off"),
}
}
}
}

View File

@ -16,6 +16,7 @@
//! Watcher for snapshot-related chain events. //! Watcher for snapshot-related chain events.
use util::Mutex;
use client::{BlockChainClient, Client, ChainNotify}; use client::{BlockChainClient, Client, ChainNotify};
use ids::BlockID; use ids::BlockID;
use service::ClientIoMessage; use service::ClientIoMessage;
@ -55,7 +56,7 @@ trait Broadcast: Send + Sync {
fn take_at(&self, num: Option<u64>); fn take_at(&self, num: Option<u64>);
} }
impl Broadcast for IoChannel<ClientIoMessage> { impl Broadcast for Mutex<IoChannel<ClientIoMessage>> {
fn take_at(&self, num: Option<u64>) { fn take_at(&self, num: Option<u64>) {
let num = match num { let num = match num {
Some(n) => n, Some(n) => n,
@ -64,7 +65,7 @@ impl Broadcast for IoChannel<ClientIoMessage> {
trace!(target: "snapshot_watcher", "broadcast: {}", num); trace!(target: "snapshot_watcher", "broadcast: {}", num);
if let Err(e) = self.send(ClientIoMessage::TakeSnapshot(num)) { if let Err(e) = self.lock().send(ClientIoMessage::TakeSnapshot(num)) {
warn!("Snapshot watcher disconnected from IoService: {}", e); warn!("Snapshot watcher disconnected from IoService: {}", e);
} }
} }
@ -91,7 +92,7 @@ impl Watcher {
client: client, client: client,
sync_status: sync_status, sync_status: sync_status,
}), }),
broadcast: Box::new(channel), broadcast: Box::new(Mutex::new(channel)),
period: period, period: period,
history: history, history: history,
} }

View File

@ -38,7 +38,7 @@ pub struct CommonParams {
/// Maximum size of extra data. /// Maximum size of extra data.
pub maximum_extra_data_size: usize, pub maximum_extra_data_size: usize,
/// Network id. /// Network id.
pub network_id: U256, pub network_id: usize,
/// Main subprotocol name. /// Main subprotocol name.
pub subprotocol_name: String, pub subprotocol_name: String,
/// Minimum gas limit. /// Minimum gas limit.
@ -135,6 +135,12 @@ impl From<ethjson::spec::Spec> for Spec {
} }
} }
macro_rules! load_bundled {
($e:expr) => {
Spec::load(include_bytes!(concat!("../../res/", $e, ".json")) as &[u8]).expect(concat!("Chain spec ", $e, " is invalid."))
};
}
impl Spec { impl Spec {
/// Convert engine spec into a arc'd Engine of the right underlying type. /// Convert engine spec into a arc'd Engine of the right underlying type.
/// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead. /// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead.
@ -160,9 +166,9 @@ impl Spec {
pub fn nodes(&self) -> &[String] { &self.nodes } pub fn nodes(&self) -> &[String] { &self.nodes }
/// Get the configured Network ID. /// Get the configured Network ID.
pub fn network_id(&self) -> U256 { self.params.network_id } pub fn network_id(&self) -> usize { self.params.network_id }
/// Get the configured Network ID. /// Get the configured subprotocol name.
pub fn subprotocol_name(&self) -> String { self.params.subprotocol_name.clone() } pub fn subprotocol_name(&self) -> String { self.params.subprotocol_name.clone() }
/// Get the configured network fork block. /// Get the configured network fork block.
@ -250,7 +256,7 @@ impl Spec {
} }
trace!(target: "spec", "ensure_db_good: Populated sec trie; root is {}", root); trace!(target: "spec", "ensure_db_good: Populated sec trie; root is {}", root);
for (address, account) in self.genesis_state.get().iter() { for (address, account) in self.genesis_state.get().iter() {
db.note_account_bloom(address); db.note_non_null_account(address);
account.insert_additional(&mut AccountDBMut::new(db.as_hashdb_mut(), address)); account.insert_additional(&mut AccountDBMut::new(db.as_hashdb_mut(), address));
} }
assert!(db.as_hashdb().contains(&self.state_root())); assert!(db.as_hashdb().contains(&self.state_root()));
@ -267,19 +273,13 @@ impl Spec {
} }
/// Create a new Spec which conforms to the Frontier-era Morden chain except that it's a NullEngine consensus. /// Create a new Spec which conforms to the Frontier-era Morden chain except that it's a NullEngine consensus.
pub fn new_test() -> Self { pub fn new_test() -> Spec { load_bundled!("null_morden") }
Spec::load(include_bytes!("../../res/null_morden.json") as &[u8]).expect("null_morden.json is invalid")
}
/// Create a new Spec which is a NullEngine consensus with a premine of address whose secret is sha3(''). /// Create a new Spec which is a NullEngine consensus with a premine of address whose secret is sha3('').
pub fn new_null() -> Self { pub fn new_null() -> Spec { load_bundled!("null") }
Spec::load(include_bytes!("../../res/null.json") as &[u8]).expect("null.json is invalid")
}
/// Create a new Spec with InstantSeal consensus which does internal sealing (not requiring work). /// Create a new Spec with InstantSeal consensus which does internal sealing (not requiring work).
pub fn new_test_instant() -> Self { pub fn new_instant() -> Spec { load_bundled!("instant_seal") }
Spec::load(include_bytes!("../../res/instant_seal.json") as &[u8]).expect("instant_seal.json is invalid")
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -247,23 +247,34 @@ impl Account {
} }
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code. /// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &HashDB) -> bool { pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
// TODO: fill out self.code_cache; // TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty()); trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.is_cached() ||
if self.is_cached() { return Some(self.code_cache.clone()) }
match db.get(&self.code_hash) { match db.get(&self.code_hash) {
Some(x) => { Some(x) => {
self.code_size = Some(x.len()); self.code_size = Some(x.len());
self.code_cache = Arc::new(x.to_vec()); self.code_cache = Arc::new(x.to_vec());
true Some(self.code_cache.clone())
}, },
_ => { _ => {
warn!("Failed reverse get of {}", self.code_hash); warn!("Failed reverse get of {}", self.code_hash);
false None
}, },
} }
} }
/// Provide code to cache. For correctness, should be the correct code for the
/// account.
pub fn cache_given_code(&mut self, code: Arc<Bytes>) {
trace!("Account::cache_given_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size = Some(code.len());
self.code_cache = code;
}
/// Provide a database to get `code_size`. Should not be called if it is a contract without code. /// Provide a database to get `code_size`. Should not be called if it is a contract without code.
pub fn cache_code_size(&mut self, db: &HashDB) -> bool { pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
// TODO: fill out self.code_cache; // TODO: fill out self.code_cache;
@ -289,11 +300,17 @@ impl Account {
pub fn storage_is_clean(&self) -> bool { self.storage_changes.is_empty() } pub fn storage_is_clean(&self) -> bool { self.storage_changes.is_empty() }
/// Check if account has zero nonce, balance, no code and no storage. /// Check if account has zero nonce, balance, no code and no storage.
///
/// NOTE: Will panic if `!self.storage_is_clean()`
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.storage_changes.is_empty() && assert!(self.storage_is_clean(), "Account::is_empty() may only legally be called when storage is clean.");
self.is_null() && self.storage_root == SHA3_NULL_RLP
}
/// Check if account has zero nonce, balance, no code.
pub fn is_null(&self) -> bool {
self.balance.is_zero() && self.balance.is_zero() &&
self.nonce.is_zero() && self.nonce.is_zero() &&
self.storage_root == SHA3_NULL_RLP &&
self.code_hash == SHA3_EMPTY self.code_hash == SHA3_EMPTY
} }
@ -413,7 +430,7 @@ impl Account {
self.code_size = other.code_size; self.code_size = other.code_size;
self.address_hash = other.address_hash; self.address_hash = other.address_hash;
let mut cache = self.storage_cache.borrow_mut(); let mut cache = self.storage_cache.borrow_mut();
for (k, v) in other.storage_cache.into_inner().into_iter() { for (k, v) in other.storage_cache.into_inner() {
cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here cache.insert(k.clone() , v.clone()); //TODO: cloning should not be required here
} }
self.storage_changes = other.storage_changes; self.storage_changes = other.storage_changes;
@ -476,7 +493,7 @@ mod tests {
}; };
let mut a = Account::from_rlp(&rlp); let mut a = Account::from_rlp(&rlp);
assert!(a.cache_code(&db.immutable())); assert!(a.cache_code(&db.immutable()).is_some());
let mut a = Account::from_rlp(&rlp); let mut a = Account::from_rlp(&rlp);
assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(())); assert_eq!(a.note_code(vec![0x55, 0x44, 0xffu8]), Ok(()));

Some files were not shown because too many files have changed in this diff Show More