Merge remote-tracking branch 'parity/master'

This commit is contained in:
keorn 2016-10-26 16:53:30 +01:00
commit 4ee669b77e
1350 changed files with 73751 additions and 5862 deletions

View File

@ -5,6 +5,7 @@ variables:
GIT_DEPTH: "3" GIT_DEPTH: "3"
SIMPLECOV: "true" SIMPLECOV: "true"
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
RUSTFLAGS: "-D warnings"
cache: cache:
key: "$CI_BUILD_NAME/$CI_BUILD_REF_NAME" key: "$CI_BUILD_NAME/$CI_BUILD_REF_NAME"
untracked: true untracked: true
@ -19,9 +20,18 @@ linux-stable:
script: script:
- cargo build --release --verbose - cargo build --release --verbose
- strip target/release/parity - strip target/release/parity
- md5sum target/release/parity >> parity.md5
- sh scripts/deb-build.sh amd64
- cp target/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
tags: tags:
- rust - rust
- rust-stable - rust-stable
@ -40,9 +50,18 @@ linux-stable-14.04:
script: script:
- cargo build --release --verbose - cargo build --release --verbose
- strip target/release/parity - strip target/release/parity
- md5sum target/release/parity >> parity.md5
- sh scripts/deb-build.sh amd64
- cp target/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity.md5 --body parity.md5
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
tags: tags:
- rust - rust
- rust-14.04 - rust-14.04
@ -101,9 +120,11 @@ linux-centos:
- export CC="gcc" - export CC="gcc"
- cargo build --release --verbose - cargo build --release --verbose
- strip target/release/parity - strip target/release/parity
- md5sum target/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5
tags: tags:
- rust - rust
- rust-centos - rust-centos
@ -115,7 +136,6 @@ linux-armv7:
stage: build stage: build
image: ethcore/rust-armv7:latest image: ethcore/rust-armv7:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
@ -127,9 +147,18 @@ linux-armv7:
- cat .cargo/config - cat .cargo/config
- cargo build --target armv7-unknown-linux-gnueabihf --release --verbose - cargo build --target armv7-unknown-linux-gnueabihf --release --verbose
- arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity - arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity
- md5sum target/armv7-unknown-linux-gnueabihf/release/parity >> parity.md5
- sh scripts/deb-build.sh armhf
- cp target/armv7-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity.md5 --body parity.md5
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -142,7 +171,6 @@ linux-arm:
stage: build stage: build
image: ethcore/rust-arm:latest image: ethcore/rust-arm:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
@ -154,9 +182,18 @@ linux-arm:
- cat .cargo/config - cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabihf --release --verbose - cargo build --target arm-unknown-linux-gnueabihf --release --verbose
- arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity - arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity
- md5sum target/arm-unknown-linux-gnueabihf/release/parity >> parity.md5
- sh scripts/deb-build.sh armhf
- cp target/arm-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity.md5 --body parity.md5
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -169,7 +206,6 @@ linux-armv6:
stage: build stage: build
image: ethcore/rust-armv6:latest image: ethcore/rust-armv6:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
@ -181,9 +217,11 @@ linux-armv6:
- cat .cargo/config - cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabi --release --verbose - cargo build --target arm-unknown-linux-gnueabi --release --verbose
- arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity - arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity
- md5sum target/arm-unknown-linux-gnueabi/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity.md5 --body parity.md5
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -196,7 +234,6 @@ linux-aarch64:
stage: build stage: build
image: ethcore/rust-aarch64:latest image: ethcore/rust-aarch64:latest
only: only:
- master
- beta - beta
- tags - tags
- stable - stable
@ -208,9 +245,18 @@ linux-aarch64:
- cat .cargo/config - cat .cargo/config
- cargo build --target aarch64-unknown-linux-gnu --release --verbose - cargo build --target aarch64-unknown-linux-gnu --release --verbose
- aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity - aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity
- md5sum target/aarch64-unknown-linux-gnu/release/parity >> parity.md5
- sh scripts/deb-build.sh arm64
- cp target/aarch64-unknown-linux-gnu/release/parity deb/usr/bin/parity
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_arm64.deb"
- md5sum "parity_"$VER"_arm64.deb" >> "parity_"$VER"_arm64.deb.md5"
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb"
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5"
tags: tags:
- rust - rust
- rust-arm - rust-arm
@ -228,9 +274,11 @@ darwin:
- stable - stable
script: script:
- cargo build --release --verbose - cargo build --release --verbose
- md5sum target/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key - aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret - aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity --body target/release/parity - aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity.md5 --body parity.md5
tags: tags:
- osx - osx
artifacts: artifacts:
@ -248,26 +296,93 @@ windows:
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt - set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64 - set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
- set RUST_BACKTRACE=1 - set RUST_BACKTRACE=1
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off -D warnings
- rustup default stable-x86_64-pc-windows-msvc - rustup default stable-x86_64-pc-windows-msvc
- cargo build --release --verbose - cargo build --release --verbose
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
- cd nsis
- makensis.exe installer.nsi
- copy installer.exe InstallParity.exe
- signtool sign /f %keyfile% /p %certpass% InstallParity.exe
- md5sums InstallParity.exe > InstallParity.exe.md5
- zip win-installer.zip InstallParity.exe InstallParity.exe.md5
- md5sums win-installer.zip > win-installer.zip.md5
- cd ..\target\release\
- md5sums parity.exe parity.pdb > parity.md5
- md5sums parity.exe > parity.exe.md5
- zip parity.zip parity.exe parity.pdb parity.md5
- md5sums parity.zip > parity.zip.md5
- cd ..\..
- aws configure set aws_access_key_id %s3_key% - aws configure set aws_access_key_id %s3_key%
- aws configure set aws_secret_access_key %s3_secret% - aws configure set aws_secret_access_key %s3_secret%
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity --body target/release/parity.exe - aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.exe --body target\release\parity.exe
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity --body target/release/parity.pdb - aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.exe.md5 --body target\release\parity.exe.md5
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.zip --body target\release\parity.zip
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/parity.zip.md5 --body target\release\parity.zip.md5
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe --body nsis\InstallParity.exe
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/InstallParity.exe.md5 --body nsis\InstallParity.exe.md5
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip --body nsis\win-installer.zip
- aws s3api put-object --bucket builds-parity --key %CI_BUILD_REF_NAME%/x86_64-pc-windows-msvc/win-installer.zip.md5 --body nsis\win-installer.zip.md5
tags: tags:
- rust-windows - rust-windows
artifacts: artifacts:
paths: paths:
- target/release/parity.exe - target/release/parity.exe
- target/release/parity.pdb - target/release/parity.pdb
- nsis/InstallParity.exe
name: "x86_64-pc-windows-msvc_parity" name: "x86_64-pc-windows-msvc_parity"
test-linux: test-linux:
stage: test stage: test
before_script: before_script:
- git submodule update --init --recursive - git submodule update --init --recursive
script: script:
- export RUST_BACKTRACE=1
- ./test.sh --verbose - ./test.sh --verbose
tags: tags:
- rust-test - rust-test
dependencies: dependencies:
- linux-stable - linux-stable
js-release:
stage: build
image: ethcore/javascript:latest
only:
- master
- beta
- tags
- stable
before_script:
- ./js/scripts/install-deps.sh
script:
- ./js/scripts/build.sh
- ./js/scripts/release.sh
tags:
- javascript
js-lint:
stage: test
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh
script:
- ./js/scripts/lint.sh
tags:
- javascript-test
js-test:
stage: test
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh
script:
- ./js/scripts/test.sh
tags:
- javascript-test
js-pack:
stage: test
image: ethcore/javascript:latest
before_script:
- ./js/scripts/install-deps.sh
script:
- ./js/scripts/build.sh
tags:
- javascript-test

View File

@ -31,6 +31,7 @@ env:
- RUN_COVERAGE="false" - RUN_COVERAGE="false"
- RUN_DOCS="false" - RUN_DOCS="false"
- TEST_OPTIONS="" - TEST_OPTIONS=""
- RUSTFLAGS="-D warnings"
# GH_TOKEN for documentation # GH_TOKEN for documentation
- secure: bumJASbZSU8bxJ0EyPUJmu16AiV9EXOpyOj86Jlq/Ty9CfwGqsSXt96uDyE+OUJf34RUFQMsw0nk37/zC4lcn6kqk2wpuH3N/o85Zo/cVZY/NusBWLQqtT5VbYWsV+u2Ua4Tmmsw8yVYQhYwU2ZOejNpflL+Cs9XGgORp1L+/gMRMC2y5Se6ZhwnKPQlRJ8LGsG1dzjQULxzADIt3/zuspNBS8a2urJwlHfGMkvHDoUWCviP/GXoSqw3TZR7FmKyxE19I8n9+iSvm9+oZZquvcgfUxMHn8Gq/b44UbPvjtFOg2yam4xdWXF/RyWCHdc/R9EHorSABeCbefIsm+zcUF3/YQxwpSxM4IZEeH2rTiC7dcrsKw3XsO16xFQz5YI5Bay+CT/wTdMmJd7DdYz7Dyf+pOvcM9WOf/zorxYWSBOMYy0uzbusU2iyIghQ82s7E/Ahg+WARtPgkuTLSB5aL1oCTBKHqQscMr7lo5Ti6RpWLxEdTQMBznc+bMr+6dEtkEcG9zqc6cE9XX+ox3wTU6+HVMfQ1ltCntJ4UKcw3A6INEbw9wgocQa812CIASQ2fE+SCAbz6JxBjIAlFUnD1lUB7S8PdMPwn9plfQgKQ2A5YZqg6FnBdf0rQXIJYxQWKHXj/rBHSUCT0tHACDlzTA+EwWggvkP5AGIxRxm8jhw= - secure: bumJASbZSU8bxJ0EyPUJmu16AiV9EXOpyOj86Jlq/Ty9CfwGqsSXt96uDyE+OUJf34RUFQMsw0nk37/zC4lcn6kqk2wpuH3N/o85Zo/cVZY/NusBWLQqtT5VbYWsV+u2Ua4Tmmsw8yVYQhYwU2ZOejNpflL+Cs9XGgORp1L+/gMRMC2y5Se6ZhwnKPQlRJ8LGsG1dzjQULxzADIt3/zuspNBS8a2urJwlHfGMkvHDoUWCviP/GXoSqw3TZR7FmKyxE19I8n9+iSvm9+oZZquvcgfUxMHn8Gq/b44UbPvjtFOg2yam4xdWXF/RyWCHdc/R9EHorSABeCbefIsm+zcUF3/YQxwpSxM4IZEeH2rTiC7dcrsKw3XsO16xFQz5YI5Bay+CT/wTdMmJd7DdYz7Dyf+pOvcM9WOf/zorxYWSBOMYy0uzbusU2iyIghQ82s7E/Ahg+WARtPgkuTLSB5aL1oCTBKHqQscMr7lo5Ti6RpWLxEdTQMBznc+bMr+6dEtkEcG9zqc6cE9XX+ox3wTU6+HVMfQ1ltCntJ4UKcw3A6INEbw9wgocQa812CIASQ2fE+SCAbz6JxBjIAlFUnD1lUB7S8PdMPwn9plfQgKQ2A5YZqg6FnBdf0rQXIJYxQWKHXj/rBHSUCT0tHACDlzTA+EwWggvkP5AGIxRxm8jhw=
- KCOV_CMD="./kcov-master/tmp/usr/local/bin/kcov" - KCOV_CMD="./kcov-master/tmp/usr/local/bin/kcov"

231
Cargo.lock generated
View File

@ -120,6 +120,11 @@ name = "bloomchain"
version = "0.1.0" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "byteorder"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "bytes" name = "bytes"
version = "0.3.0" version = "0.3.0"
@ -217,8 +222,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "elastic-array" name = "elastic-array"
version = "0.5.0" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/ethcore/elastic-array#70e4012e691b732c7c4cb04e9232799e6aa268bc"
dependencies = [
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "env_logger" name = "env_logger"
@ -235,7 +243,7 @@ version = "0.5.4"
source = "git+https://github.com/ethcore/rust-secp256k1#a9a0b1be1f39560ca86e8fc8e55e205a753ff25c" source = "git+https://github.com/ethcore/rust-secp256k1#a9a0b1be1f39560ca86e8fc8e55e205a753ff25c"
dependencies = [ dependencies = [
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -259,7 +267,7 @@ name = "ethash"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"sha3 0.1.0", "sha3 0.1.0",
] ]
@ -270,10 +278,12 @@ version = "1.4.0"
dependencies = [ dependencies = [
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.4.0", "ethash 1.4.0",
"ethcore-bloom-journal 0.1.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.4.0",
"ethcore-ipc 1.4.0", "ethcore-ipc 1.4.0",
@ -288,6 +298,7 @@ dependencies = [
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)",
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -300,7 +311,7 @@ dependencies = [
[[package]] [[package]]
name = "ethcore-bigint" name = "ethcore-bigint"
version = "0.1.0" version = "0.1.1"
dependencies = [ dependencies = [
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -308,26 +319,33 @@ dependencies = [
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "ethcore-bloom-journal"
version = "0.1.0"
dependencies = [
"siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "ethcore-dapps" name = "ethcore-dapps"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-rpc 1.4.0", "ethcore-rpc 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"https-fetch 0.1.0", "fetch 0.1.0",
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)", "jsonrpc-http-server 6.1.1 (git+https://github.com/ethcore/jsonrpc-http-server.git)",
"linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps-home 1.4.0 (git+https://github.com/ethcore/parity-ui.git)", "parity-ui 1.4.0",
"parity-dapps-status 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-wallet 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -352,7 +370,7 @@ dependencies = [
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -387,6 +405,7 @@ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)", "nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -440,7 +459,7 @@ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
@ -466,9 +485,10 @@ dependencies = [
"ethkey 0.2.0", "ethkey 0.2.0",
"ethstore 0.1.0", "ethstore 0.1.0",
"ethsync 1.4.0", "ethsync 1.4.0",
"fetch 0.1.0",
"json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)", "json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)", "jsonrpc-http-server 6.1.1 (git+https://github.com/ethcore/jsonrpc-http-server.git)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -491,7 +511,8 @@ dependencies = [
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps-signer 1.4.0 (git+https://github.com/ethcore/parity-ui.git)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-ui 1.4.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)", "ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)",
@ -522,18 +543,20 @@ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethcore-bigint 0.1.0", "ethcore-bigint 0.1.1",
"ethcore-bloom-journal 0.1.0",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
"rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)", "rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
@ -553,7 +576,7 @@ name = "ethcrypto"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethcore-bigint 0.1.0", "ethcore-bigint 0.1.1",
"ethkey 0.2.0", "ethkey 0.2.0",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -576,7 +599,7 @@ version = "0.2.0"
dependencies = [ dependencies = [
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)", "eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
"ethcore-bigint 0.1.0", "ethcore-bigint 0.1.1",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -593,6 +616,7 @@ dependencies = [
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -618,7 +642,7 @@ dependencies = [
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -639,6 +663,16 @@ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "fetch"
version = "0.1.0"
dependencies = [
"https-fetch 0.1.0",
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "flate2" name = "flate2"
version = "0.2.14" version = "0.2.14"
@ -650,8 +684,11 @@ dependencies = [
[[package]] [[package]]
name = "gcc" name = "gcc"
version = "0.3.28" version = "0.3.35"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "glob" name = "glob"
@ -690,7 +727,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"rustls 0.1.1 (git+https://github.com/ctz/rustls)", "rustls 0.1.2 (git+https://github.com/ctz/rustls)",
] ]
[[package]] [[package]]
@ -779,7 +816,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "json-ipc-server" name = "json-ipc-server"
version = "0.2.4" version = "0.2.4"
source = "git+https://github.com/ethcore/json-ipc-server.git#5fbd0253750d3097b9a8fb27effa84c18d630bbb" source = "git+https://github.com/ethcore/json-ipc-server.git#4642cd03ec1d23db89df80d22d5a88e7364ab885"
dependencies = [ dependencies = [
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -811,7 +848,7 @@ version = "3.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde_codegen 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -819,8 +856,8 @@ dependencies = [
[[package]] [[package]]
name = "jsonrpc-http-server" name = "jsonrpc-http-server"
version = "6.1.0" version = "6.1.1"
source = "git+https://github.com/ethcore/jsonrpc-http-server.git#2766c6708f66f6f4e667211461d220b49c0d9fdf" source = "git+https://github.com/ethcore/jsonrpc-http-server.git#cd6d4cb37d672cc3057aecd0692876f9e85f3ba5"
dependencies = [ dependencies = [
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)", "hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -852,6 +889,11 @@ name = "libc"
version = "0.2.15" version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "linked-hash-map"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "linked-hash-map" name = "linked-hash-map"
version = "0.3.0" version = "0.3.0"
@ -862,6 +904,14 @@ name = "log"
version = "0.3.6" version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lru-cache"
version = "0.0.7"
source = "git+https://github.com/contain-rs/lru-cache#13255e33c45ceb69a4b143f235a4322df5fb580e"
dependencies = [
"linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "matches" name = "matches"
version = "0.1.2" version = "0.1.2"
@ -899,7 +949,7 @@ name = "miniz-sys"
version = "0.1.7" version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -985,7 +1035,7 @@ name = "nanomsg-sys"
version = "0.5.0" version = "0.5.0"
source = "git+https://github.com/ethcore/nanomsg.rs.git#c40fe442c9afaea5b38009a3d992ca044dcceb00" source = "git+https://github.com/ethcore/nanomsg.rs.git#c40fe442c9afaea5b38009a3d992ca044dcceb00"
dependencies = [ dependencies = [
"gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1124,9 +1174,14 @@ version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "parity-dapps" name = "owning_ref"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "parity-dapps-glue"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"aster 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)", "aster 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1138,40 +1193,32 @@ dependencies = [
] ]
[[package]] [[package]]
name = "parity-dapps-home" name = "parity-ui"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
dependencies = [ dependencies = [
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)", "parity-ui-dev 1.4.0",
"parity-ui-precompiled 1.4.0 (git+https://github.com/ethcore/js-precompiled.git)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "parity-dapps-signer" name = "parity-ui-dev"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
dependencies = [ dependencies = [
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "parity-dapps-status" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc" source = "git+https://github.com/ethcore/js-precompiled.git#9f8baa9d0e54056c41a842b351597d0565beda98"
dependencies = [ dependencies = [
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parity-dapps-wallet"
version = "1.4.0"
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
dependencies = [
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
] ]
[[package]] [[package]]
name = "parking_lot" name = "parking_lot"
version = "0.2.6" version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1180,6 +1227,27 @@ dependencies = [
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "parking_lot"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parking_lot_core"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "phf" name = "phf"
version = "0.7.14" version = "0.7.14"
@ -1345,7 +1413,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "ring" name = "ring"
version = "0.3.1" version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1356,8 +1424,8 @@ dependencies = [
name = "rlp" name = "rlp"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
"ethcore-bigint 0.1.0", "ethcore-bigint 0.1.1",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1365,7 +1433,7 @@ dependencies = [
[[package]] [[package]]
name = "rocksdb" name = "rocksdb"
version = "0.4.5" version = "0.4.5"
source = "git+https://github.com/ethcore/rust-rocksdb#485dd747a2c9a9f910fc8ac696fc9edf5fa22aa3" source = "git+https://github.com/ethcore/rust-rocksdb#64c63ccbe1f62c2e2b39262486f9ba813793af58"
dependencies = [ dependencies = [
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)", "rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)",
@ -1374,9 +1442,9 @@ dependencies = [
[[package]] [[package]]
name = "rocksdb-sys" name = "rocksdb-sys"
version = "0.3.0" version = "0.3.0"
source = "git+https://github.com/ethcore/rust-rocksdb#485dd747a2c9a9f910fc8ac696fc9edf5fa22aa3" source = "git+https://github.com/ethcore/rust-rocksdb#64c63ccbe1f62c2e2b39262486f9ba813793af58"
dependencies = [ dependencies = [
"gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1407,7 +1475,7 @@ name = "rust-crypto"
version = "0.2.36" version = "0.2.36"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1429,15 +1497,15 @@ dependencies = [
[[package]] [[package]]
name = "rustls" name = "rustls"
version = "0.1.1" version = "0.1.2"
source = "git+https://github.com/ctz/rustls#a9c5a79f49337e22ac05bb1ea114240bdbe0fdd2" source = "git+https://github.com/ctz/rustls#3d2db624997004b7b18ba4463d6081f37598b2f5"
dependencies = [ dependencies = [
"base64 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "base64 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ring 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "ring 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"untrusted 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "untrusted 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"webpki 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1500,9 +1568,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
name = "sha3" name = "sha3"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "siphasher"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.1.3" version = "0.1.3"
@ -1788,10 +1861,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "webpki" name = "webpki"
version = "0.2.2" version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"ring 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "ring 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
"untrusted 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "untrusted 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1810,7 +1883,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "ws" name = "ws"
version = "0.5.2" version = "0.5.2"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#afbff59776ce16ccec5ee9e218b8891830ee6fdf" source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#00bd2134b07b4bc8ea47b7f6c7afce16bbe34c8f"
dependencies = [ dependencies = [
"bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)", "bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1871,6 +1944,7 @@ dependencies = [
"checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23" "checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
"checksum blastfig 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "09640e0509d97d5cdff03a9f5daf087a8e04c735c3b113a75139634a19cfc7b2" "checksum blastfig 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "09640e0509d97d5cdff03a9f5daf087a8e04c735c3b113a75139634a19cfc7b2"
"checksum bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f421095d2a76fc24cd3fb3f912b90df06be7689912b1bdb423caefae59c258d" "checksum bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f421095d2a76fc24cd3fb3f912b90df06be7689912b1bdb423caefae59c258d"
"checksum byteorder 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0fc10e8cc6b2580fda3f36eb6dc5316657f812a3df879a44a66fc9f0fdbc4855"
"checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27" "checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27"
"checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>" "checksum bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)" = "<none>"
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c" "checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
@ -1883,12 +1957,12 @@ dependencies = [
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf" "checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
"checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76" "checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76"
"checksum dtoa 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0dd841b58510c9618291ffa448da2e4e0f699d984d436122372f446dae62263d" "checksum dtoa 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0dd841b58510c9618291ffa448da2e4e0f699d984d436122372f446dae62263d"
"checksum elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4bc9250a632e7c001b741eb0ec6cee93c9a5b6d5f1879696a4b94d62b012210a" "checksum elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)" = "<none>"
"checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5" "checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5"
"checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>" "checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>"
"checksum ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0c53453517f620847be51943db329276ae52f2e210cfc659e81182864be2f" "checksum ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0c53453517f620847be51943db329276ae52f2e210cfc659e81182864be2f"
"checksum flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "3eeb481e957304178d2e782f2da1257f1434dfecbae883bafb61ada2a9fea3bb" "checksum flate2 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "3eeb481e957304178d2e782f2da1257f1434dfecbae883bafb61ada2a9fea3bb"
"checksum gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "3da3a2cbaeb01363c8e3704fd9fd0eb2ceb17c6f27abd4c1ef040fb57d20dc79" "checksum gcc 0.3.35 (registry+https://github.com/rust-lang/crates.io-index)" = "91ecd03771effb0c968fd6950b37e89476a578aaf1c70297d8e92b6516ec3312"
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
"checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1" "checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1"
"checksum heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "abb306abb8d398e053cfb1b3e7b72c2f580be048b85745c52652954f8ad1439c" "checksum heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "abb306abb8d398e053cfb1b3e7b72c2f580be048b85745c52652954f8ad1439c"
@ -1903,14 +1977,16 @@ dependencies = [
"checksum itoa 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ae3088ea4baeceb0284ee9eea42f591226e6beaecf65373e41b38d95a1b8e7a1" "checksum itoa 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ae3088ea4baeceb0284ee9eea42f591226e6beaecf65373e41b38d95a1b8e7a1"
"checksum json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)" = "<none>" "checksum json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)" = "<none>"
"checksum json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)" = "<none>" "checksum json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)" = "<none>"
"checksum jsonrpc-core 3.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e913b3c809aab9378889da8b990b4a46b98bd4794c8117946a1cf63c5f87bcde" "checksum jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3c5094610b07f28f3edaf3947b732dadb31dbba4941d4d0c1c7a8350208f4414"
"checksum jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)" = "<none>" "checksum jsonrpc-http-server 6.1.1 (git+https://github.com/ethcore/jsonrpc-http-server.git)" = "<none>"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" "checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f" "checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
"checksum libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "23e3757828fa702a20072c37ff47938e9dd331b92fac6e223d26d4b7a55f7ee2" "checksum libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "23e3757828fa702a20072c37ff47938e9dd331b92fac6e223d26d4b7a55f7ee2"
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd" "checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" "checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
"checksum lru-cache 0.0.7 (git+https://github.com/contain-rs/lru-cache)" = "<none>"
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e" "checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" "checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
"checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2" "checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2"
@ -1938,12 +2014,12 @@ dependencies = [
"checksum num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "51fedae97a05f7353612fe017ab705a37e6db8f4d67c5c6fe739a9e70d6eed09" "checksum num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "51fedae97a05f7353612fe017ab705a37e6db8f4d67c5c6fe739a9e70d6eed09"
"checksum number_prefix 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "084d05f4bf60621a9ac9bde941a410df548f4de9545f06e5ee9d3aef4b97cd77" "checksum number_prefix 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "084d05f4bf60621a9ac9bde941a410df548f4de9545f06e5ee9d3aef4b97cd77"
"checksum odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "b28c06e81b0f789122d415d6394b5fe849bde8067469f4c2980d3cdc10c78ec1" "checksum odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "b28c06e81b0f789122d415d6394b5fe849bde8067469f4c2980d3cdc10c78ec1"
"checksum parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>" "checksum owning_ref 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "8d91377085359426407a287ab16884a0111ba473aa6844ff01d4ec20ce3d75e7"
"checksum parity-dapps-home 1.4.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>" "checksum parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "98378dec0a185da2b7180308752f0bad73aaa949c3e0a3b0528d0e067945f7ab"
"checksum parity-dapps-signer 1.4.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>" "checksum parity-ui-precompiled 1.4.0 (git+https://github.com/ethcore/js-precompiled.git)" = "<none>"
"checksum parity-dapps-status 1.4.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>" "checksum parking_lot 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "968f685642555d2f7e202c48b8b11de80569e9bfea817f7f12d7c61aac62d4e6"
"checksum parity-dapps-wallet 1.4.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>" "checksum parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "dbc5847584161f273e69edc63c1a86254a22f570a0b5dd87aa6f9773f6f7d125"
"checksum parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e0fd1be2c3cf5fef20a6d18fec252c4f3c87c14fc3039002eb7d4ed91e436826" "checksum parking_lot_core 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fb1b97670a2ffadce7c397fb80a3d687c4f3060140b885621ef1653d0e5d5068"
"checksum phf 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "447d9d45f2e0b4a9b532e808365abf18fc211be6ca217202fcd45236ef12f026" "checksum phf 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "447d9d45f2e0b4a9b532e808365abf18fc211be6ca217202fcd45236ef12f026"
"checksum phf_codegen 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "8af7ae7c3f75a502292b491e5cc0a1f69e3407744abe6e57e2a3b712bb82f01d" "checksum phf_codegen 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "8af7ae7c3f75a502292b491e5cc0a1f69e3407744abe6e57e2a3b712bb82f01d"
"checksum phf_generator 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "db005608fd99800c8c74106a7c894cf582055b689aa14a79462cefdcb7dc1cc3" "checksum phf_generator 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "db005608fd99800c8c74106a7c894cf582055b689aa14a79462cefdcb7dc1cc3"
@ -1964,7 +2040,7 @@ dependencies = [
"checksum rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "655df67c314c30fa3055a365eae276eb88aa4f3413a352a1ab32c1320eda41ea" "checksum rayon 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "655df67c314c30fa3055a365eae276eb88aa4f3413a352a1ab32c1320eda41ea"
"checksum regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)" = "b4329b8928a284580a1c63ec9d846b12f6d3472317243ff7077aff11f23f2b29" "checksum regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)" = "b4329b8928a284580a1c63ec9d846b12f6d3472317243ff7077aff11f23f2b29"
"checksum regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "841591b1e05609a643e3b4d0045fce04f701daba7151ddcd3ad47b080693d5a9" "checksum regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "841591b1e05609a643e3b4d0045fce04f701daba7151ddcd3ad47b080693d5a9"
"checksum ring 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d059a6a96d3be79042e3f70eb97945912839265f9d8ab45b921abaf266c70dbb" "checksum ring 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0d2f6547bf9640f1d3cc4e771f82374ec8fd237c17eeb3ff5cd5ccbe22377a09"
"checksum rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)" = "<none>" "checksum rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)" = "<none>"
"checksum rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)" = "<none>" "checksum rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)" = "<none>"
"checksum rotor 0.6.3 (git+https://github.com/ethcore/rotor)" = "<none>" "checksum rotor 0.6.3 (git+https://github.com/ethcore/rotor)" = "<none>"
@ -1972,7 +2048,7 @@ dependencies = [
"checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a" "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a"
"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b" "checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b"
"checksum rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c5f5376ea5e30ce23c03eb77cbe4962b988deead10910c372b226388b594c084" "checksum rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c5f5376ea5e30ce23c03eb77cbe4962b988deead10910c372b226388b594c084"
"checksum rustls 0.1.1 (git+https://github.com/ctz/rustls)" = "<none>" "checksum rustls 0.1.2 (git+https://github.com/ctz/rustls)" = "<none>"
"checksum semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)" = "d4f410fedcf71af0345d7607d246e7ad15faaadd49d240ee3b24e5dc21a820ac" "checksum semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)" = "d4f410fedcf71af0345d7607d246e7ad15faaadd49d240ee3b24e5dc21a820ac"
"checksum semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2d5b7638a1f03815d94e88cb3b3c08e87f0db4d683ef499d1836aaf70a45623f" "checksum semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2d5b7638a1f03815d94e88cb3b3c08e87f0db4d683ef499d1836aaf70a45623f"
"checksum serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b1dfda9ebb31d29fa8b94d7eb3031a86a8dcec065f0fe268a30f98867bf45775" "checksum serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)" = "b1dfda9ebb31d29fa8b94d7eb3031a86a8dcec065f0fe268a30f98867bf45775"
@ -1980,6 +2056,7 @@ dependencies = [
"checksum serde_codegen_internals 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f877e2781ed0a323295d1c9f0e26556117b5a11489fc47b1848dfb98b3173d21" "checksum serde_codegen_internals 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f877e2781ed0a323295d1c9f0e26556117b5a11489fc47b1848dfb98b3173d21"
"checksum serde_json 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0e10f8a9d94b06cf5d3bef66475f04c8ff90950f1be7004c357ff9472ccbaebc" "checksum serde_json 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0e10f8a9d94b06cf5d3bef66475f04c8ff90950f1be7004c357ff9472ccbaebc"
"checksum sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc30b1e1e8c40c121ca33b86c23308a090d19974ef001b4bf6e61fd1a0fb095c" "checksum sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc30b1e1e8c40c121ca33b86c23308a090d19974ef001b4bf6e61fd1a0fb095c"
"checksum siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c44e42fa187b5a8782489cf7740cc27c3125806be2bf33563cf5e02e9533fcd"
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e" "checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>" "checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4" "checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
@ -2017,7 +2094,7 @@ dependencies = [
"checksum vecio 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0795a11576d29ae80525a3fda315bf7b534f8feb9d34101e5fe63fb95bb2fd24" "checksum vecio 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0795a11576d29ae80525a3fda315bf7b534f8feb9d34101e5fe63fb95bb2fd24"
"checksum vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "56b639f935488eb40f06d17c3e3bcc3054f6f75d264e187b1107c8d1cba8d31c" "checksum vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "56b639f935488eb40f06d17c3e3bcc3054f6f75d264e187b1107c8d1cba8d31c"
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" "checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
"checksum webpki 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5dc10a815fabbb0c3145c1153240528f3a8703a47e26e8dbb4a5d4f6386200ad" "checksum webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "813503a5985585e0812d430cd1328ee322f47f66629c8ed4ecab939cf9e92f91"
"checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4" "checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>" "checksum ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>"

View File

@ -26,7 +26,11 @@ lazy_static = "0.2"
regex = "0.1" regex = "0.1"
isatty = "0.1" isatty = "0.1"
toml = "0.2" toml = "0.2"
serde = "0.8.0"
serde_json = "0.8.0"
hyper = { version = "0.9", default-features = false }
ctrlc = { git = "https://github.com/ethcore/rust-ctrlc.git" } ctrlc = { git = "https://github.com/ethcore/rust-ctrlc.git" }
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
fdlimit = { path = "util/fdlimit" } fdlimit = { path = "util/fdlimit" }
ethcore = { path = "ethcore" } ethcore = { path = "ethcore" }
ethcore-util = { path = "util" } ethcore-util = { path = "util" }
@ -40,12 +44,9 @@ ethcore-ipc = { path = "ipc/rpc" }
ethcore-ipc-hypervisor = { path = "ipc/hypervisor" } ethcore-ipc-hypervisor = { path = "ipc/hypervisor" }
ethcore-logger = { path = "logger" } ethcore-logger = { path = "logger" }
rlp = { path = "util/rlp" } rlp = { path = "util/rlp" }
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" } ethcore-stratum = { path = "stratum" }
ethcore-dapps = { path = "dapps", optional = true } ethcore-dapps = { path = "dapps", optional = true }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.90", optional = true}
ethcore-stratum = { path = "stratum" }
serde = "0.8.0"
serde_json = "0.8.0"
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi = "0.2" winapi = "0.2"
@ -53,29 +54,38 @@ winapi = "0.2"
[target.'cfg(not(windows))'.dependencies] [target.'cfg(not(windows))'.dependencies]
daemonize = "0.2" daemonize = "0.2"
[dependencies.hyper]
version = "0.9"
default-features = false
[features] [features]
default = ["ui", "use-precompiled-js", "ipc"] default = ["ui-precompiled"]
ui = ["dapps", "ethcore-signer/ui"]
use-precompiled-js = ["ethcore-dapps/use-precompiled-js", "ethcore-signer/use-precompiled-js"] ui = [
"dapps",
"ethcore-dapps/ui",
"ethcore-signer/ui",
]
ui-precompiled = [
"dapps",
"ethcore-signer/ui-precompiled",
"ethcore-dapps/ui-precompiled",
]
dapps = ["ethcore-dapps"] dapps = ["ethcore-dapps"]
ipc = ["ethcore/ipc"] ipc = ["ethcore/ipc", "ethsync/ipc"]
jit = ["ethcore/jit"] jit = ["ethcore/jit"]
dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"] dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"]
json-tests = ["ethcore/json-tests"] json-tests = ["ethcore/json-tests"]
test-heavy = ["ethcore/test-heavy"]
stratum = ["ipc"] stratum = ["ipc"]
ethkey-cli = ["ethcore/ethkey-cli"] ethkey-cli = ["ethcore/ethkey-cli"]
ethstore-cli = ["ethcore/ethstore-cli"] ethstore-cli = ["ethcore/ethstore-cli"]
evm-debug = ["ethcore/evm-debug"] evm-debug = ["ethcore/evm-debug"]
evm-debug-tests = ["ethcore/evm-debug-tests"]
slow-blocks = ["ethcore/slow-blocks"]
[[bin]] [[bin]]
path = "parity/main.rs" path = "parity/main.rs"
name = "parity" name = "parity"
[profile.release] [profile.release]
debug = true debug = false
lto = false lto = false

View File

@ -5,6 +5,7 @@
[Internal Documentation][doc-url] [Internal Documentation][doc-url]
Be sure to check out [our wiki][wiki-url] for more information. Be sure to check out [our wiki][wiki-url] for more information.
[travis-image]: https://travis-ci.org/ethcore/parity.svg?branch=master [travis-image]: https://travis-ci.org/ethcore/parity.svg?branch=master
@ -18,8 +19,11 @@ Be sure to check out [our wiki][wiki-url] for more information.
[doc-url]: https://ethcore.github.io/parity/ethcore/index.html [doc-url]: https://ethcore.github.io/parity/ethcore/index.html
[wiki-url]: https://github.com/ethcore/parity/wiki [wiki-url]: https://github.com/ethcore/parity/wiki
**Requires Rust version 1.12.0 to build**
---- ----
## About Parity ## About Parity
Parity's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity using the sophisticated and Parity's goal is to be the fastest, lightest, and most secure Ethereum client. We are developing Parity using the sophisticated and
@ -96,9 +100,9 @@ and Parity will begin syncing the Ethereum blockchain.
### Using systemd service file ### Using systemd service file
To start Parity as a regular user using systemd init: To start Parity as a regular user using systemd init:
1. Copy ```parity/scripts/parity.service``` to your 1. Copy `parity/scripts/parity.service` to your
systemd user directory (usually ```~/.config/systemd/user```). systemd user directory (usually `~/.config/systemd/user`).
2. To pass any argument to Parity, write a ```~/.parity/parity.conf``` file this way: 2. To pass any argument to Parity, write a `~/.parity/parity.conf` file this way:
```ARGS="ARG1 ARG2 ARG3"```. `ARGS="ARG1 ARG2 ARG3"`.
Example: ```ARGS="ui --geth --identity MyMachine"```. Example: `ARGS="ui --geth --identity MyMachine"`.

View File

@ -6,6 +6,7 @@ environment:
certpass: certpass:
secure: 0BgXJqxq9Ei34/hZ7121FQ== secure: 0BgXJqxq9Ei34/hZ7121FQ==
keyfile: C:\users\appveyor\Certificates.p12 keyfile: C:\users\appveyor\Certificates.p12
RUSTFLAGS: -Zorbit=off -D warnings
branches: branches:
only: only:
@ -18,10 +19,10 @@ branches:
install: install:
- git submodule update --init --recursive - git submodule update --init --recursive
- ps: Install-Product node 6 - ps: Install-Product node 6
- ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-1.10.0-x86_64-pc-windows-msvc.exe" - ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-1.12.0-x86_64-pc-windows-msvc.exe"
- ps: Start-FileDownload "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -FileName nsis\SimpleFC.dll - ps: Start-FileDownload "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -FileName nsis\SimpleFC.dll
- ps: Start-FileDownload "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -FileName nsis\vc_redist.x64.exe - ps: Start-FileDownload "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -FileName nsis\vc_redist.x64.exe
- rust-1.10.0-x86_64-pc-windows-msvc.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust" - rust-1.12.0-x86_64-pc-windows-msvc.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust"
- SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin;C:\Program Files (x86)\NSIS;C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Bin - SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin;C:\Program Files (x86)\NSIS;C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Bin
- rustc -V - rustc -V
- cargo -V - cargo -V

View File

@ -11,6 +11,7 @@ build = "build.rs"
[dependencies] [dependencies]
rand = "0.3.14" rand = "0.3.14"
log = "0.3" log = "0.3"
env_logger = "0.3"
jsonrpc-core = "3.0" jsonrpc-core = "3.0"
jsonrpc-http-server = { git = "https://github.com/ethcore/jsonrpc-http-server.git" } jsonrpc-http-server = { git = "https://github.com/ethcore/jsonrpc-http-server.git" }
hyper = { default-features = false, git = "https://github.com/ethcore/hyper" } hyper = { default-features = false, git = "https://github.com/ethcore/hyper" }
@ -19,19 +20,18 @@ url = "1.0"
rustc-serialize = "0.3" rustc-serialize = "0.3"
serde = "0.8" serde = "0.8"
serde_json = "0.8" serde_json = "0.8"
serde_macros = { version = "0.8", optional = true }
zip = { version = "0.1", default-features = false }
ethabi = "0.2.2" ethabi = "0.2.2"
linked-hash-map = "0.3" linked-hash-map = "0.3"
parity-dapps-glue = "1.4"
mime = "0.2"
serde_macros = { version = "0.8", optional = true }
zip = { version = "0.1", default-features = false }
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }
ethcore-rpc = { path = "../rpc" } ethcore-rpc = { path = "../rpc" }
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
https-fetch = { path = "../util/https-fetch" } fetch = { path = "../util/fetch" }
parity-dapps = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" } parity-ui = { path = "./ui" }
# List of apps
parity-dapps-status = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" }
parity-dapps-home = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" }
parity-dapps-wallet = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4", optional = true }
mime_guess = { version = "1.6.1" } mime_guess = { version = "1.6.1" }
clippy = { version = "0.0.90", optional = true} clippy = { version = "0.0.90", optional = true}
@ -39,13 +39,9 @@ clippy = { version = "0.0.90", optional = true}
serde_codegen = { version = "0.8", optional = true } serde_codegen = { version = "0.8", optional = true }
[features] [features]
default = ["serde_codegen", "extra-dapps"] default = ["serde_codegen"]
extra-dapps = ["parity-dapps-wallet"]
nightly = ["serde_macros"] nightly = ["serde_macros"]
dev = ["clippy", "ethcore-rpc/dev", "ethcore-util/dev"] dev = ["clippy", "ethcore-rpc/dev", "ethcore-util/dev"]
use-precompiled-js = [ ui = ["parity-ui/no-precompiled-js"]
"parity-dapps-status/use-precompiled-js", ui-precompiled = ["parity-ui/use-precompiled-js"]
"parity-dapps-home/use-precompiled-js",
"parity-dapps-wallet/use-precompiled-js"
]

31
dapps/js-glue/Cargo.toml Normal file
View File

@ -0,0 +1,31 @@
[package]
description = "Base Package for all Parity built-in dapps"
name = "parity-dapps-glue"
version = "1.4.0"
license = "GPL-3.0"
authors = ["Ethcore <admin@ethcore.io"]
build = "build.rs"
[build-dependencies]
quasi_codegen = { version = "0.11", optional = true }
syntex = { version = "0.33", optional = true }
[dependencies]
glob = { version = "0.2.11" }
mime_guess = { version = "1.6.1" }
aster = { version = "0.17", default-features = false }
quasi = { version = "0.11", default-features = false }
quasi_macros = { version = "0.11", optional = true }
syntex = { version = "0.33", optional = true }
syntex_syntax = { version = "0.33", optional = true }
clippy = { version = "0.0.90", optional = true }
[features]
dev = ["clippy"]
default = ["with-syntex"]
nightly = ["quasi_macros"]
nightly-testing = ["clippy"]
with-syntex = ["quasi/with-syntex", "quasi_codegen", "quasi_codegen/with-syntex", "syntex", "syntex_syntax"]
use-precompiled-js = []

65
dapps/js-glue/README.md Normal file
View File

@ -0,0 +1,65 @@
# Parity Dapps (JS-glue)
Code generator to simplify creating a built-in Parity Dapp
# How to create new builtin Dapp.
1. Clone this repository.
```bash
$ git clone https://github.com/ethcore/parity.git
```
1. Create a new directory for your Dapp. (`./myapp`)
```bash
$ mkdir -p ./parity/dapps/myapp/src/web
```
1. Copy your frontend files to `./dapps/myapp/src/web` (bundled ones)
```bash
$ cp -r ./myapp-src/* ./parity/dapps/myapp/src/web
```
1. Instead of creating `web3` in your app. Load (as the first script tag in `head`):
```html
<script src="/parity-utils/inject.js"></script>
```
The `inject.js` script will create global `web3` instance with proper provider that should be used by your dapp.
1. Create `./parity/dapps/myapp/Cargo.toml` with you apps details. See example here: [parity-status Cargo.toml](https://github.com/ethcore/parity-ui/blob/master/status/Cargo.toml).
```bash
$ git clone https://github.com/ethcore/parity-ui.git
$ cd ./parity-ui/
$ cp ./home/Cargo.toml ../parity/dapps/myapp/Cargo.toml
$ cp ./home/build.rs ../parity/dapps/myapp/build.rs
$ cp ./home/src/lib.rs ../parity/dapps/myapp/src/lib.rs
$ cp ./home/src/lib.rs.in ../parity/dapps/myapp/src/lib.rs.in
# And edit the details of your app
$ vim ../parity/dapps/myapp/Cargo.toml # Edit the details
$ vim ./parity/dapps/myapp/src/lib.rs.in # Edit the details
```
# How to include your Dapp into `Parity`?
1. Edit `dapps/Cargo.toml` and add dependency to your application (it can be optional)
```toml
# Use git repo and version
parity-dapps-myapp = { path="./myapp" }
```
1. Edit `dapps/src/apps.rs` and add your application to `all_pages` (if it's optional you need to specify two functions - see `parity-dapps-wallet` example)
1. Compile parity.
```bash
$ cargo build --release # While inside `parity`
```
1. Commit the results.
```bash
$ git add myapp && git commit -am "My first Parity Dapp".
```

View File

@ -14,21 +14,32 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Block queue info types
/// Block queue status #[cfg(feature = "with-syntex")]
#[derive(Debug, Binary)] mod inner {
pub struct BlockQueueInfo { extern crate syntex;
/// Number of queued blocks pending verification extern crate quasi_codegen;
pub unverified_queue_size: usize,
/// Number of verified queued blocks pending import use std::env;
pub verified_queue_size: usize, use std::path::Path;
/// Number of blocks being verified
pub verifying_queue_size: usize, pub fn main() {
/// Configured maximum number of blocks in the queue let out_dir = env::var_os("OUT_DIR").unwrap();
pub max_queue_size: usize, let mut registry = syntex::Registry::new();
/// Configured maximum number of bytes to use quasi_codegen::register(&mut registry);
pub max_mem_use: usize,
/// Heap memory used in bytes let src = Path::new("src/lib.rs.in");
pub mem_used: usize, let dst = Path::new(&out_dir).join("lib.rs");
registry.expand("", &src, &dst).unwrap();
}
}
#[cfg(not(feature = "with-syntex"))]
mod inner {
pub fn main() {}
}
fn main() {
inner::main();
} }

View File

@ -0,0 +1,66 @@
#[cfg(feature = "with-syntex")]
pub mod inner {
use syntex;
use codegen;
use syntax::{ast, fold};
use std::env;
use std::path::Path;
fn strip_attributes(krate: ast::Crate) -> ast::Crate {
/// Helper folder that strips the serde attributes after the extensions have been expanded.
struct StripAttributeFolder;
impl fold::Folder for StripAttributeFolder {
fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {
match attr.node.value.node {
ast::MetaItemKind::List(ref n, _) if n == &"webapp" => { return None; }
_ => {}
}
Some(attr)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}
fold::Folder::fold_crate(&mut StripAttributeFolder, krate)
}
pub fn register(reg: &mut syntex::Registry) {
reg.add_attr("feature(custom_derive)");
reg.add_attr("feature(custom_attribute)");
reg.add_decorator("derive_WebAppFiles", codegen::expand_webapp_implementation);
reg.add_post_expansion_pass(strip_attributes);
}
pub fn generate() {
let out_dir = env::var_os("OUT_DIR").unwrap();
let mut registry = syntex::Registry::new();
register(&mut registry);
let src = Path::new("src/lib.rs.in");
let dst = Path::new(&out_dir).join("lib.rs");
registry.expand("", &src, &dst).unwrap();
}
}
#[cfg(not(feature = "with-syntex"))]
pub mod inner {
use codegen;
pub fn register(reg: &mut rustc_plugin::Registry) {
reg.register_syntax_extension(
syntax::parse::token::intern("derive_WebAppFiles"),
syntax::ext::base::MultiDecorator(
Box::new(codegen::expand_webapp_implementation)));
reg.register_attribute("webapp".to_owned(), AttributeType::Normal);
}
pub fn generate() {}
}

View File

@ -0,0 +1,189 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
extern crate aster;
extern crate glob;
extern crate mime_guess;
use self::mime_guess::guess_mime_type;
use std::path::{self, Path, PathBuf};
use std::ops::Deref;
use syntax::ast::{MetaItem, Item};
use syntax::ast;
use syntax::attr;
use syntax::codemap::Span;
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ptr::P;
use syntax::print::pprust::{lit_to_string};
use syntax::parse::token::{InternedString};
pub fn expand_webapp_implementation(
cx: &mut ExtCtxt,
span: Span,
meta_item: &MetaItem,
annotatable: &Annotatable,
push: &mut FnMut(Annotatable)
) {
let item = match *annotatable {
Annotatable::Item(ref item) => item,
_ => {
cx.span_err(meta_item.span, "`#[derive(WebAppFiles)]` may only be applied to struct implementations");
return;
},
};
let builder = aster::AstBuilder::new().span(span);
implement_webapp(cx, &builder, &item, push);
}
fn implement_webapp(cx: &ExtCtxt, builder: &aster::AstBuilder, item: &Item, push: &mut FnMut(Annotatable)) {
let static_files_dir = extract_path(cx, item);
let src = Path::new("src");
let static_files = {
let mut buf = src.to_path_buf();
buf.push(static_files_dir.deref());
buf
};
let search_location = {
let mut buf = static_files.to_path_buf();
buf.push("**");
buf.push("*");
buf
};
let files = glob::glob(search_location.to_str().expect("Valid UTF8 path"))
.expect("The sources directory is missing.")
.collect::<Result<Vec<PathBuf>, glob::GlobError>>()
.expect("There should be no error when reading a list of files.");
let statements = files
.iter()
.filter(|path_buf| path_buf.is_file())
.map(|path_buf| {
let path = path_buf.as_path();
let filename = path.file_name().and_then(|s| s.to_str()).expect("Only UTF8 paths.");
let mime_type = guess_mime_type(filename).to_string();
let file_path = as_uri(path.strip_prefix(&static_files).ok().expect("Prefix is always there, cause it's absolute path;qed"));
let file_path_in_source = path.to_str().expect("Only UTF8 paths.");
let path_lit = builder.expr().str(file_path.as_str());
let mime_lit = builder.expr().str(mime_type.as_str());
let web_path_lit = builder.expr().str(file_path_in_source);
let separator_lit = builder.expr().str(path::MAIN_SEPARATOR.to_string().as_str());
let concat_id = builder.id("concat!");
let env_id = builder.id("env!");
let macro_id = builder.id("include_bytes!");
let content = quote_expr!(
cx,
$macro_id($concat_id($env_id("CARGO_MANIFEST_DIR"), $separator_lit, $web_path_lit))
);
quote_stmt!(
cx,
files.insert($path_lit, File { path: $path_lit, content_type: $mime_lit, content: $content });
).expect("The statement is always ok, because it just uses literals.")
}).collect::<Vec<ast::Stmt>>();
let type_name = item.ident;
let files_impl = quote_item!(cx,
impl $type_name {
fn files() -> ::std::collections::HashMap<&'static str, File> {
let mut files = ::std::collections::HashMap::new();
$statements
files
}
}
).unwrap();
push(Annotatable::Item(files_impl));
}
fn extract_path(cx: &ExtCtxt, item: &Item) -> String {
for meta_items in item.attrs().iter().filter_map(webapp_meta_items) {
for meta_item in meta_items {
match meta_item.node {
ast::MetaItemKind::NameValue(ref name, ref lit) if name == &"path" => {
if let Some(s) = get_str_from_lit(cx, name, lit) {
return s.deref().to_owned();
}
},
_ => {},
}
}
}
// default
"web".to_owned()
}
fn get_str_from_lit(cx: &ExtCtxt, name: &str, lit: &ast::Lit) -> Option<InternedString> {
match lit.node {
ast::LitKind::Str(ref s, _) => Some(s.clone()),
_ => {
cx.span_err(
lit.span,
&format!("webapp annotation `{}` must be a string, not `{}`",
name,
lit_to_string(lit)
)
);
return None;
}
}
}
fn webapp_meta_items(attr: &ast::Attribute) -> Option<&[P<ast::MetaItem>]> {
match attr.node.value.node {
ast::MetaItemKind::List(ref name, ref items) if name == &"webapp" => {
attr::mark_used(&attr);
Some(items)
}
_ => None
}
}
fn as_uri(path: &Path) -> String {
let mut s = String::new();
for component in path.iter() {
s.push_str(component.to_str().expect("Only UTF-8 filenames are supported."));
s.push('/');
}
s[0..s.len()-1].into()
}
#[test]
fn should_convert_path_separators_on_all_platforms() {
// given
let p = {
let mut p = PathBuf::new();
p.push("web");
p.push("src");
p.push("index.html");
p
};
// when
let path = as_uri(&p);
// then
assert_eq!(path, "web/src/index.html".to_owned());
}

89
dapps/js-glue/src/js.rs Normal file
View File

@ -0,0 +1,89 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
#![cfg_attr(feature="use-precompiled-js", allow(dead_code))]
#![cfg_attr(feature="use-precompiled-js", allow(unused_imports))]
use std::fmt;
use std::process::Command;
#[cfg(not(windows))]
mod platform {
use std::process::Command;
pub static NPM_CMD: &'static str = "npm";
pub fn handle_fd(cmd: &mut Command) -> &mut Command {
cmd
}
}
#[cfg(windows)]
mod platform {
use std::process::{Command, Stdio};
pub static NPM_CMD: &'static str = "npm.cmd";
// NOTE [ToDr] For some reason on windows
// We cannot have any file descriptors open when running a child process
// during build phase.
pub fn handle_fd(cmd: &mut Command) -> &mut Command {
cmd.stdin(Stdio::null())
.stdout(Stdio::null())
.stderr(Stdio::null())
}
}
fn die<T : fmt::Debug>(s: &'static str, e: T) -> ! {
panic!("Error: {}: {:?}", s, e);
}
#[cfg(feature = "use-precompiled-js")]
pub fn test(_path: &str) {
}
#[cfg(feature = "use-precompiled-js")]
pub fn build(_path: &str, _dest: &str) {
}
#[cfg(not(feature = "use-precompiled-js"))]
pub fn build(path: &str, dest: &str) {
let child = platform::handle_fd(&mut Command::new(platform::NPM_CMD))
.arg("install")
.arg("--no-progress")
.current_dir(path)
.status()
.unwrap_or_else(|e| die("Installing node.js dependencies with npm", e));
assert!(child.success(), "There was an error installing dependencies.");
let child = platform::handle_fd(&mut Command::new(platform::NPM_CMD))
.arg("run")
.arg("build")
.env("NODE_ENV", "production")
.env("BUILD_DEST", dest)
.current_dir(path)
.status()
.unwrap_or_else(|e| die("Building JS code", e));
assert!(child.success(), "There was an error build JS code.");
}
#[cfg(not(feature = "use-precompiled-js"))]
pub fn test(path: &str) {
let child = Command::new(platform::NPM_CMD)
.arg("run")
.arg("test")
.current_dir(path)
.status()
.unwrap_or_else(|e| die("Running test command", e));
assert!(child.success(), "There was an error while running JS tests.");
}

39
dapps/js-glue/src/lib.rs Normal file
View File

@ -0,0 +1,39 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
#![cfg_attr(not(feature = "with-syntex"), feature(rustc_private, plugin))]
#![cfg_attr(not(feature = "with-syntex"), plugin(quasi_macros))]
#[cfg(feature = "with-syntex")]
extern crate syntex;
#[cfg(feature = "with-syntex")]
#[macro_use]
extern crate syntex_syntax as syntax;
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "with-syntex"))]
#[macro_use]
extern crate syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate rustc_plugin;
#[cfg(not(feature = "with-syntex"))]
include!("lib.rs.in");

View File

@ -0,0 +1,46 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
extern crate quasi;
mod codegen;
mod build;
pub mod js;
pub use build::inner::generate;
use std::default::Default;
#[derive(Clone)]
pub struct File {
pub path: &'static str,
pub content: &'static [u8],
// TODO: use strongly-typed MIME.
pub content_type: &'static str,
}
#[derive(Clone, Debug)]
pub struct Info {
pub name: &'static str,
pub version: &'static str,
pub author: &'static str,
pub description: &'static str,
pub icon_url: &'static str,
}
pub trait WebApp : Default + Send + Sync {
fn file(&self, path: &str) -> Option<&File>;
fn info(&self) -> Info;
}

View File

@ -15,24 +15,31 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc; use std::sync::Arc;
use unicase::UniCase;
use hyper::{server, net, Decoder, Encoder, Next, Control}; use hyper::{server, net, Decoder, Encoder, Next, Control};
use hyper::header;
use hyper::method::Method;
use hyper::header::AccessControlAllowOrigin;
use api::types::{App, ApiError}; use api::types::{App, ApiError};
use api::response::{as_json, as_json_error, ping_response}; use api::response;
use apps::fetcher::ContentFetcher;
use handlers::extract_url; use handlers::extract_url;
use endpoint::{Endpoint, Endpoints, Handler, EndpointPath}; use endpoint::{Endpoint, Endpoints, Handler, EndpointPath};
use apps::fetcher::ContentFetcher; use jsonrpc_http_server::cors;
#[derive(Clone)] #[derive(Clone)]
pub struct RestApi { pub struct RestApi {
local_domain: String, cors_domains: Option<Vec<AccessControlAllowOrigin>>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
fetcher: Arc<ContentFetcher>, fetcher: Arc<ContentFetcher>,
} }
impl RestApi { impl RestApi {
pub fn new(local_domain: String, endpoints: Arc<Endpoints>, fetcher: Arc<ContentFetcher>) -> Box<Endpoint> { pub fn new(cors_domains: Vec<String>, endpoints: Arc<Endpoints>, fetcher: Arc<ContentFetcher>) -> Box<Endpoint> {
Box::new(RestApi { Box::new(RestApi {
local_domain: local_domain, cors_domains: Some(cors_domains.into_iter().map(AccessControlAllowOrigin::Value).collect()),
endpoints: endpoints, endpoints: endpoints,
fetcher: fetcher, fetcher: fetcher,
}) })
@ -53,6 +60,7 @@ impl Endpoint for RestApi {
struct RestApiRouter { struct RestApiRouter {
api: RestApi, api: RestApi,
origin: Option<String>,
path: Option<EndpointPath>, path: Option<EndpointPath>,
control: Option<Control>, control: Option<Control>,
handler: Box<Handler>, handler: Box<Handler>,
@ -62,9 +70,10 @@ impl RestApiRouter {
fn new(api: RestApi, path: EndpointPath, control: Control) -> Self { fn new(api: RestApi, path: EndpointPath, control: Control) -> Self {
RestApiRouter { RestApiRouter {
path: Some(path), path: Some(path),
origin: None,
control: Some(control), control: Some(control),
api: api, api: api,
handler: as_json_error(&ApiError { handler: response::as_json_error(&ApiError {
code: "404".into(), code: "404".into(),
title: "Not Found".into(), title: "Not Found".into(),
detail: "Resource you requested has not been found.".into(), detail: "Resource you requested has not been found.".into(),
@ -80,11 +89,40 @@ impl RestApiRouter {
_ => None _ => None
} }
} }
/// Returns basic headers for a response (it may be overwritten by the handler)
fn response_headers(&self) -> header::Headers {
let mut headers = header::Headers::new();
headers.set(header::AccessControlAllowCredentials);
headers.set(header::AccessControlAllowMethods(vec![
Method::Options,
Method::Post,
Method::Get,
]));
headers.set(header::AccessControlAllowHeaders(vec![
UniCase("origin".to_owned()),
UniCase("content-type".to_owned()),
UniCase("accept".to_owned()),
]));
if let Some(cors_header) = cors::get_cors_header(&self.api.cors_domains, &self.origin) {
headers.set(cors_header);
}
headers
}
} }
impl server::Handler<net::HttpStream> for RestApiRouter { impl server::Handler<net::HttpStream> for RestApiRouter {
fn on_request(&mut self, request: server::Request<net::HttpStream>) -> Next { fn on_request(&mut self, request: server::Request<net::HttpStream>) -> Next {
self.origin = cors::read_origin(&request);
if let Method::Options = *request.method() {
self.handler = response::empty();
return Next::write();
}
let url = extract_url(&request); let url = extract_url(&request);
if url.is_none() { if url.is_none() {
// Just return 404 if we can't parse URL // Just return 404 if we can't parse URL
@ -92,15 +130,18 @@ impl server::Handler<net::HttpStream> for RestApiRouter {
} }
let url = url.expect("Check for None early-exists above; qed"); let url = url.expect("Check for None early-exists above; qed");
let path = self.path.take().expect("on_request called only once, and path is always defined in new; qed"); let mut path = self.path.take().expect("on_request called only once, and path is always defined in new; qed");
let control = self.control.take().expect("on_request called only once, and control is always defined in new; qed"); let control = self.control.take().expect("on_request called only once, and control is always defined in new; qed");
let endpoint = url.path.get(1).map(|v| v.as_str()); let endpoint = url.path.get(1).map(|v| v.as_str());
let hash = url.path.get(2).map(|v| v.as_str()); let hash = url.path.get(2).map(|v| v.as_str());
// at this point path.app_id contains 'api', adjust it to the hash properly, otherwise
// we will try and retrieve 'api' as the hash when doing the /api/content route
if let Some(ref hash) = hash { path.app_id = hash.clone().to_owned() }
let handler = endpoint.and_then(|v| match v { let handler = endpoint.and_then(|v| match v {
"apps" => Some(as_json(&self.api.list_apps())), "apps" => Some(response::as_json(&self.api.list_apps())),
"ping" => Some(ping_response(&self.api.local_domain)), "ping" => Some(response::ping()),
"content" => self.resolve_content(hash, path, control), "content" => self.resolve_content(hash, path, control),
_ => None _ => None
}); });
@ -118,6 +159,7 @@ impl server::Handler<net::HttpStream> for RestApiRouter {
} }
fn on_response(&mut self, res: &mut server::Response) -> Next { fn on_response(&mut self, res: &mut server::Response) -> Next {
*res.headers_mut() = self.response_headers();
self.handler.on_response(res) self.handler.on_response(res)
} }

0
dapps/src/api/cors.rs Normal file
View File

View File

@ -19,18 +19,22 @@ use serde_json;
use endpoint::Handler; use endpoint::Handler;
use handlers::{ContentHandler, EchoHandler}; use handlers::{ContentHandler, EchoHandler};
pub fn as_json<T : Serialize>(val: &T) -> Box<Handler> { pub fn empty() -> Box<Handler> {
Box::new(ContentHandler::ok(serde_json::to_string(val).unwrap(), "application/json".to_owned())) Box::new(ContentHandler::ok("".into(), mime!(Text/Plain)))
} }
pub fn as_json_error<T : Serialize>(val: &T) -> Box<Handler> { pub fn as_json<T: Serialize>(val: &T) -> Box<Handler> {
Box::new(ContentHandler::not_found(serde_json::to_string(val).unwrap(), "application/json".to_owned())) let json = serde_json::to_string(val)
.expect("serialization to string is infallible; qed");
Box::new(ContentHandler::ok(json, mime!(Application/Json)))
} }
pub fn ping_response(local_domain: &str) -> Box<Handler> { pub fn as_json_error<T: Serialize>(val: &T) -> Box<Handler> {
Box::new(EchoHandler::cors(vec![ let json = serde_json::to_string(val)
format!("http://{}", local_domain), .expect("serialization to string is infallible; qed");
// Allow CORS calls also for localhost Box::new(ContentHandler::not_found(json, mime!(Application/Json)))
format!("http://{}", local_domain.replace("127.0.0.1", "localhost")), }
]))
pub fn ping() -> Box<Handler> {
Box::new(EchoHandler::default())
} }

View File

@ -47,15 +47,17 @@ impl ContentCache {
} }
pub fn clear_garbage(&mut self, expected_size: usize) -> Vec<(String, ContentStatus)> { pub fn clear_garbage(&mut self, expected_size: usize) -> Vec<(String, ContentStatus)> {
let mut len = self.cache.len(); let len = self.cache.len();
if len <= expected_size { if len <= expected_size {
return Vec::new(); return Vec::new();
} }
let mut removed = Vec::with_capacity(len - expected_size); let mut removed = Vec::with_capacity(len - expected_size);
while len > expected_size {
let entry = self.cache.pop_front().unwrap(); while self.cache.len() > expected_size {
let entry = self.cache.pop_front().expect("expected_size bounded at 0, len is greater; qed");
match entry.1 { match entry.1 {
ContentStatus::Fetching(ref fetch) => { ContentStatus::Fetching(ref fetch) => {
trace!(target: "dapps", "Aborting {} because of limit.", entry.0); trace!(target: "dapps", "Aborting {} because of limit.", entry.0);
@ -64,16 +66,15 @@ impl ContentCache {
}, },
ContentStatus::Ready(ref endpoint) => { ContentStatus::Ready(ref endpoint) => {
trace!(target: "dapps", "Removing {} because of limit.", entry.0); trace!(target: "dapps", "Removing {} because of limit.", entry.0);
// Remove path // Remove path (dir or file)
let res = fs::remove_dir_all(&endpoint.path()); let res = fs::remove_dir_all(&endpoint.path()).or_else(|_| fs::remove_file(&endpoint.path()));
if let Err(e) = res { if let Err(e) = res {
warn!(target: "dapps", "Unable to remove dapp: {:?}", e); warn!(target: "dapps", "Unable to remove dapp/content from cache: {:?}", e);
} }
} }
} }
removed.push(entry); removed.push(entry);
len -= 1;
} }
removed removed
} }

View File

@ -46,6 +46,7 @@ pub struct ContentFetcher<R: URLHint = URLHintContract> {
resolver: R, resolver: R,
cache: Arc<Mutex<ContentCache>>, cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>, sync: Arc<SyncStatus>,
embeddable_at: Option<u16>,
} }
impl<R: URLHint> Drop for ContentFetcher<R> { impl<R: URLHint> Drop for ContentFetcher<R> {
@ -57,7 +58,7 @@ impl<R: URLHint> Drop for ContentFetcher<R> {
impl<R: URLHint> ContentFetcher<R> { impl<R: URLHint> ContentFetcher<R> {
pub fn new(resolver: R, sync_status: Arc<SyncStatus>) -> Self { pub fn new(resolver: R, sync_status: Arc<SyncStatus>, embeddable_at: Option<u16>) -> Self {
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push(random_filename()); dapps_path.push(random_filename());
@ -66,9 +67,19 @@ impl<R: URLHint> ContentFetcher<R> {
resolver: resolver, resolver: resolver,
sync: sync_status, sync: sync_status,
cache: Arc::new(Mutex::new(ContentCache::default())), cache: Arc::new(Mutex::new(ContentCache::default())),
embeddable_at: embeddable_at,
} }
} }
fn still_syncing() -> Box<Handler> {
Box::new(ContentHandler::error(
StatusCode::ServiceUnavailable,
"Sync In Progress",
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>")
))
}
#[cfg(test)] #[cfg(test)]
fn set_status(&self, content_id: &str, status: ContentStatus) { fn set_status(&self, content_id: &str, status: ContentStatus) {
self.cache.lock().insert(content_id.to_owned(), status); self.cache.lock().insert(content_id.to_owned(), status);
@ -84,12 +95,10 @@ impl<R: URLHint> ContentFetcher<R> {
} }
// fallback to resolver // fallback to resolver
if let Ok(content_id) = content_id.from_hex() { if let Ok(content_id) = content_id.from_hex() {
// if app_id is valid, but we are syncing always return true.
if self.sync.is_major_syncing() {
return true;
}
// else try to resolve the app_id // else try to resolve the app_id
self.resolver.resolve(content_id).is_some() let has_content = self.resolver.resolve(content_id).is_some();
// if there is content or we are syncing return true
has_content || self.sync.is_major_importing()
} else { } else {
false false
} }
@ -99,30 +108,21 @@ impl<R: URLHint> ContentFetcher<R> {
let mut cache = self.cache.lock(); let mut cache = self.cache.lock();
let content_id = path.app_id.clone(); let content_id = path.app_id.clone();
if self.sync.is_major_syncing() {
return Box::new(ContentHandler::error(
StatusCode::ServiceUnavailable,
"Sync In Progress",
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>")
));
}
let (new_status, handler) = { let (new_status, handler) = {
let status = cache.get(&content_id); let status = cache.get(&content_id);
match status { match status {
// Just server dapp // Just serve the content
Some(&mut ContentStatus::Ready(ref endpoint)) => { Some(&mut ContentStatus::Ready(ref endpoint)) => {
(None, endpoint.to_async_handler(path, control)) (None, endpoint.to_async_handler(path, control))
}, },
// App is already being fetched // Content is already being fetched
Some(&mut ContentStatus::Fetching(ref fetch_control)) => { Some(&mut ContentStatus::Fetching(ref fetch_control)) => {
trace!(target: "dapps", "Content fetching in progress. Waiting..."); trace!(target: "dapps", "Content fetching in progress. Waiting...");
(None, fetch_control.to_handler(control)) (None, fetch_control.to_handler(control))
}, },
// We need to start fetching app // We need to start fetching the content
None => { None => {
trace!(target: "dapps", "Content unavailable. Fetching..."); trace!(target: "dapps", "Content unavailable. Fetching... {:?}", content_id);
let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true."); let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
let content = self.resolver.resolve(content_hex); let content = self.resolver.resolve(content_hex);
@ -141,6 +141,10 @@ impl<R: URLHint> ContentFetcher<R> {
}; };
match content { match content {
// Don't serve dapps if we are still syncing (but serve content)
Some(URLHintResult::Dapp(_)) if self.sync.is_major_importing() => {
(None, Self::still_syncing())
},
Some(URLHintResult::Dapp(dapp)) => { Some(URLHintResult::Dapp(dapp)) => {
let (handler, fetch_control) = ContentFetcherHandler::new( let (handler, fetch_control) = ContentFetcherHandler::new(
dapp.url(), dapp.url(),
@ -150,6 +154,7 @@ impl<R: URLHint> ContentFetcher<R> {
id: content_id.clone(), id: content_id.clone(),
dapps_path: self.dapps_path.clone(), dapps_path: self.dapps_path.clone(),
on_done: Box::new(on_done), on_done: Box::new(on_done),
embeddable_at: self.embeddable_at,
} }
); );
@ -170,6 +175,9 @@ impl<R: URLHint> ContentFetcher<R> {
(Some(ContentStatus::Fetching(fetch_control)), Box::new(handler) as Box<Handler>) (Some(ContentStatus::Fetching(fetch_control)), Box::new(handler) as Box<Handler>)
}, },
None if self.sync.is_major_importing() => {
(None, Self::still_syncing())
},
None => { None => {
// This may happen when sync status changes in between // This may happen when sync status changes in between
// `contains` and `to_handler` // `contains` and `to_handler`
@ -271,6 +279,7 @@ struct DappInstaller {
id: String, id: String,
dapps_path: PathBuf, dapps_path: PathBuf,
on_done: Box<Fn(String, Option<LocalPageEndpoint>) + Send>, on_done: Box<Fn(String, Option<LocalPageEndpoint>) + Send>,
embeddable_at: Option<u16>,
} }
impl DappInstaller { impl DappInstaller {
@ -363,7 +372,7 @@ impl ContentValidator for DappInstaller {
try!(manifest_file.write_all(manifest_str.as_bytes())); try!(manifest_file.write_all(manifest_str.as_bytes()));
// Create endpoint // Create endpoint
let app = LocalPageEndpoint::new(target, manifest.clone().into()); let app = LocalPageEndpoint::new(target, manifest.clone().into(), self.embeddable_at);
// Return modified app manifest // Return modified app manifest
Ok((manifest.id.clone(), app)) Ok((manifest.id.clone(), app))
@ -396,14 +405,14 @@ mod tests {
fn should_true_if_contains_the_app() { fn should_true_if_contains_the_app() {
// given // given
let path = env::temp_dir(); let path = env::temp_dir();
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false)); let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false), None);
let handler = LocalPageEndpoint::new(path, EndpointInfo { let handler = LocalPageEndpoint::new(path, EndpointInfo {
name: "fake".into(), name: "fake".into(),
description: "".into(), description: "".into(),
version: "".into(), version: "".into(),
author: "".into(), author: "".into(),
icon_url: "".into(), icon_url: "".into(),
}); }, None);
// when // when
fetcher.set_status("test", ContentStatus::Ready(handler)); fetcher.set_status("test", ContentStatus::Ready(handler));
@ -415,4 +424,3 @@ mod tests {
assert_eq!(fetcher.contains("test3"), false); assert_eq!(fetcher.contains("test3"), false);
} }
} }

View File

@ -97,12 +97,12 @@ fn read_manifest(name: &str, mut path: PathBuf) -> EndpointInfo {
}) })
} }
pub fn local_endpoints(dapps_path: String) -> Endpoints { pub fn local_endpoints(dapps_path: String, signer_port: Option<u16>) -> Endpoints {
let mut pages = Endpoints::new(); let mut pages = Endpoints::new();
for dapp in local_dapps(dapps_path) { for dapp in local_dapps(dapps_path) {
pages.insert( pages.insert(
dapp.id, dapp.id,
Box::new(LocalPageEndpoint::new(dapp.path, dapp.info)) Box::new(LocalPageEndpoint::new(dapp.path, dapp.info, signer_port))
); );
} }
pages pages

View File

@ -25,17 +25,14 @@ pub mod urlhint;
pub mod fetcher; pub mod fetcher;
pub mod manifest; pub mod manifest;
extern crate parity_dapps_status; extern crate parity_ui;
extern crate parity_dapps_home;
pub const HOME_PAGE: &'static str = "home";
pub const DAPPS_DOMAIN : &'static str = ".parity"; pub const DAPPS_DOMAIN : &'static str = ".parity";
pub const RPC_PATH : &'static str = "rpc"; pub const RPC_PATH : &'static str = "rpc";
pub const API_PATH : &'static str = "api"; pub const API_PATH : &'static str = "api";
pub const UTILS_PATH : &'static str = "parity-utils"; pub const UTILS_PATH : &'static str = "parity-utils";
pub fn main_page() -> &'static str {
"home"
}
pub fn redirection_address(using_dapps_domains: bool, app_id: &str) -> String { pub fn redirection_address(using_dapps_domains: bool, app_id: &str) -> String {
if using_dapps_domains { if using_dapps_domains {
format!("http://{}{}/", app_id, DAPPS_DOMAIN) format!("http://{}{}/", app_id, DAPPS_DOMAIN)
@ -45,36 +42,29 @@ pub fn redirection_address(using_dapps_domains: bool, app_id: &str) -> String {
} }
pub fn utils() -> Box<Endpoint> { pub fn utils() -> Box<Endpoint> {
Box::new(PageEndpoint::with_prefix(parity_dapps_home::App::default(), UTILS_PATH.to_owned())) Box::new(PageEndpoint::with_prefix(parity_ui::App::default(), UTILS_PATH.to_owned()))
} }
pub fn all_endpoints(dapps_path: String) -> Endpoints { pub fn all_endpoints(dapps_path: String, signer_port: Option<u16>) -> Endpoints {
// fetch fs dapps at first to avoid overwriting builtins // fetch fs dapps at first to avoid overwriting builtins
let mut pages = fs::local_endpoints(dapps_path); let mut pages = fs::local_endpoints(dapps_path, signer_port);
// Home page needs to be safe embed
// because we use Cross-Origin LocalStorage.
// TODO [ToDr] Account naming should be moved to parity.
pages.insert("home".into(), Box::new(
PageEndpoint::new_safe_to_embed(parity_dapps_home::App::default())
));
pages.insert("proxy".into(), ProxyPac::boxed());
insert::<parity_dapps_status::App>(&mut pages, "parity");
insert::<parity_dapps_status::App>(&mut pages, "status");
// Optional dapps // NOTE [ToDr] Dapps will be currently embeded on 8180
wallet_page(&mut pages); insert::<parity_ui::App>(&mut pages, "ui", Embeddable::Yes(signer_port));
pages.insert("proxy".into(), ProxyPac::boxed(signer_port));
pages pages
} }
#[cfg(feature = "parity-dapps-wallet")] fn insert<T : WebApp + Default + 'static>(pages: &mut Endpoints, id: &str, embed_at: Embeddable) {
fn wallet_page(pages: &mut Endpoints) { pages.insert(id.to_owned(), Box::new(match embed_at {
extern crate parity_dapps_wallet; Embeddable::Yes(port) => PageEndpoint::new_safe_to_embed(T::default(), port),
insert::<parity_dapps_wallet::App>(pages, "wallet"); Embeddable::No => PageEndpoint::new(T::default()),
}));
} }
#[cfg(not(feature = "parity-dapps-wallet"))]
fn wallet_page(_pages: &mut Endpoints) {}
fn insert<T : WebApp + Default + 'static>(pages: &mut Endpoints, id: &str) { enum Embeddable {
pages.insert(id.to_owned(), Box::new(PageEndpoint::new(T::default()))); Yes(Option<u16>),
#[allow(dead_code)]
No,
} }

View File

@ -156,9 +156,9 @@ impl URLHintContract {
} }
let mut it = vec.into_iter(); let mut it = vec.into_iter();
let account_slash_repo = it.next().unwrap(); let account_slash_repo = it.next().expect("element 0 of 3-len vector known to exist; qed");
let commit = it.next().unwrap(); let commit = it.next().expect("element 1 of 3-len vector known to exist; qed");
let owner = it.next().unwrap(); let owner = it.next().expect("element 2 of 3-len vector known to exist qed");
match (account_slash_repo, commit, owner) { match (account_slash_repo, commit, owner) {
(Token::String(account_slash_repo), Token::FixedBytes(commit), Token::Address(owner)) => { (Token::String(account_slash_repo), Token::FixedBytes(commit), Token::Address(owner)) => {

View File

@ -1,113 +0,0 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Hyper Client Handlers
pub mod fetch_file;
use std::env;
use std::sync::{mpsc, Arc};
use std::sync::atomic::AtomicBool;
use std::path::PathBuf;
use hyper;
use https_fetch as https;
use random_filename;
use self::fetch_file::{Fetch, Error as HttpFetchError};
pub type FetchResult = Result<PathBuf, FetchError>;
#[derive(Debug)]
pub enum FetchError {
InvalidUrl,
Http(HttpFetchError),
Https(https::FetchError),
Other(String),
}
impl From<HttpFetchError> for FetchError {
fn from(e: HttpFetchError) -> Self {
FetchError::Http(e)
}
}
pub struct Client {
http_client: hyper::Client<Fetch>,
https_client: https::Client,
}
impl Client {
pub fn new() -> Self {
Client {
http_client: hyper::Client::new().expect("Unable to initialize http client."),
https_client: https::Client::new().expect("Unable to initialize https client."),
}
}
pub fn close(self) {
self.http_client.close();
self.https_client.close();
}
pub fn request(&mut self, url: &str, abort: Arc<AtomicBool>, on_done: Box<Fn() + Send>) -> Result<mpsc::Receiver<FetchResult>, FetchError> {
let is_https = url.starts_with("https://");
let url = try!(url.parse().map_err(|_| FetchError::InvalidUrl));
trace!(target: "dapps", "Fetching from: {:?}", url);
if is_https {
let url = try!(Self::convert_url(url));
let (tx, rx) = mpsc::channel();
let temp_path = Self::temp_path();
let res = self.https_client.fetch_to_file(url, temp_path.clone(), abort, move |result| {
let res = tx.send(
result.map(|_| temp_path).map_err(FetchError::Https)
);
if let Err(_) = res {
warn!("Fetch finished, but no one was listening");
}
on_done();
});
match res {
Ok(_) => Ok(rx),
Err(e) => Err(FetchError::Other(format!("{:?}", e))),
}
} else {
let (tx, rx) = mpsc::channel();
let res = self.http_client.request(url, Fetch::new(tx, abort, on_done));
match res {
Ok(_) => Ok(rx),
Err(e) => Err(FetchError::Other(format!("{:?}", e))),
}
}
}
fn convert_url(url: hyper::Url) -> Result<https::Url, FetchError> {
let host = format!("{}", try!(url.host().ok_or(FetchError::InvalidUrl)));
let port = try!(url.port_or_known_default().ok_or(FetchError::InvalidUrl));
https::Url::new(&host, port, url.path()).map_err(|_| FetchError::InvalidUrl)
}
fn temp_path() -> PathBuf {
let mut dir = env::temp_dir();
dir.push(random_filename());
dir
}
}

View File

@ -19,57 +19,60 @@
use std::io::Write; use std::io::Write;
use hyper::{header, server, Decoder, Encoder, Next}; use hyper::{header, server, Decoder, Encoder, Next};
use hyper::net::HttpStream; use hyper::net::HttpStream;
use hyper::mime::Mime;
use hyper::status::StatusCode; use hyper::status::StatusCode;
use util::version; use util::version;
use handlers::add_security_headers;
#[derive(Clone)] #[derive(Clone)]
pub struct ContentHandler { pub struct ContentHandler {
code: StatusCode, code: StatusCode,
content: String, content: String,
mimetype: String, mimetype: Mime,
write_pos: usize, write_pos: usize,
safe_to_embed_at_port: Option<u16>,
} }
impl ContentHandler { impl ContentHandler {
pub fn ok(content: String, mimetype: String) -> Self { pub fn ok(content: String, mimetype: Mime) -> Self {
ContentHandler { Self::new(StatusCode::Ok, content, mimetype)
code: StatusCode::Ok,
content: content,
mimetype: mimetype,
write_pos: 0
}
} }
pub fn not_found(content: String, mimetype: String) -> Self { pub fn not_found(content: String, mimetype: Mime) -> Self {
ContentHandler { Self::new(StatusCode::NotFound, content, mimetype)
code: StatusCode::NotFound,
content: content,
mimetype: mimetype,
write_pos: 0
}
} }
pub fn html(code: StatusCode, content: String) -> Self { pub fn html(code: StatusCode, content: String, embeddable_at: Option<u16>) -> Self {
Self::new(code, content, "text/html".into()) Self::new_embeddable(code, content, mime!(Text/Html), embeddable_at)
} }
pub fn error(code: StatusCode, title: &str, message: &str, details: Option<&str>) -> Self { pub fn error(code: StatusCode, title: &str, message: &str, details: Option<&str>) -> Self {
Self::error_embeddable(code, title, message, details, None)
}
pub fn error_embeddable(code: StatusCode, title: &str, message: &str, details: Option<&str>, embeddable_at: Option<u16>) -> Self {
Self::html(code, format!( Self::html(code, format!(
include_str!("../error_tpl.html"), include_str!("../error_tpl.html"),
title=title, title=title,
message=message, message=message,
details=details.unwrap_or_else(|| ""), details=details.unwrap_or_else(|| ""),
version=version(), version=version(),
)) ), embeddable_at)
} }
pub fn new(code: StatusCode, content: String, mimetype: String) -> Self { pub fn new(code: StatusCode, content: String, mimetype: Mime) -> Self {
Self::new_embeddable(code, content, mimetype, None)
}
pub fn new_embeddable(code: StatusCode, content: String, mimetype: Mime, embeddable_at: Option<u16>) -> Self {
ContentHandler { ContentHandler {
code: code, code: code,
content: content, content: content,
mimetype: mimetype, mimetype: mimetype,
write_pos: 0, write_pos: 0,
safe_to_embed_at_port: embeddable_at,
} }
} }
} }
@ -85,7 +88,8 @@ impl server::Handler<HttpStream> for ContentHandler {
fn on_response(&mut self, res: &mut server::Response) -> Next { fn on_response(&mut self, res: &mut server::Response) -> Next {
res.set_status(self.code); res.set_status(self.code);
res.headers_mut().set(header::ContentType(self.mimetype.parse().unwrap())); res.headers_mut().set(header::ContentType(self.mimetype.clone()));
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_at_port.clone());
Next::write() Next::write()
} }

View File

@ -17,82 +17,25 @@
//! Echo Handler //! Echo Handler
use std::io::Read; use std::io::Read;
use hyper::{header, server, Decoder, Encoder, Next}; use hyper::{server, Decoder, Encoder, Next};
use hyper::method::Method;
use hyper::net::HttpStream; use hyper::net::HttpStream;
use unicase::UniCase;
use super::ContentHandler; use super::ContentHandler;
#[derive(Debug, PartialEq)] #[derive(Default)]
/// Type of Cross-Origin request
enum Cors {
/// Not a Cross-Origin request - no headers needed
No,
/// Cross-Origin request with valid Origin
Allowed(String),
/// Cross-Origin request with invalid Origin
Forbidden,
}
pub struct EchoHandler { pub struct EchoHandler {
safe_origins: Vec<String>,
content: String, content: String,
cors: Cors,
handler: Option<ContentHandler>, handler: Option<ContentHandler>,
} }
impl EchoHandler {
pub fn cors(safe_origins: Vec<String>) -> Self {
EchoHandler {
safe_origins: safe_origins,
content: String::new(),
cors: Cors::Forbidden,
handler: None,
}
}
fn cors_header(&self, origin: Option<String>) -> Cors {
fn origin_is_allowed(origin: &str, safe_origins: &[String]) -> bool {
for safe in safe_origins {
if origin.starts_with(safe) {
return true;
}
}
false
}
match origin {
Some(ref origin) if origin_is_allowed(origin, &self.safe_origins) => {
Cors::Allowed(origin.clone())
},
None => Cors::No,
_ => Cors::Forbidden,
}
}
}
impl server::Handler<HttpStream> for EchoHandler { impl server::Handler<HttpStream> for EchoHandler {
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next { fn on_request(&mut self, _: server::Request<HttpStream>) -> Next {
let origin = request.headers().get_raw("origin") Next::read()
.and_then(|list| list.get(0))
.and_then(|origin| String::from_utf8(origin.clone()).ok());
self.cors = self.cors_header(origin);
// Don't even read the payload if origin is forbidden!
if let Cors::Forbidden = self.cors {
self.handler = Some(ContentHandler::ok(String::new(), "text/plain".into()));
Next::write()
} else {
Next::read()
}
} }
fn on_request_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next { fn on_request_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next {
match decoder.read_to_string(&mut self.content) { match decoder.read_to_string(&mut self.content) {
Ok(0) => { Ok(0) => {
self.handler = Some(ContentHandler::ok(self.content.clone(), "application/json".into())); self.handler = Some(ContentHandler::ok(self.content.clone(), mime!(Application/Json)));
Next::write() Next::write()
}, },
Ok(_) => Next::read(), Ok(_) => Next::read(),
@ -104,45 +47,14 @@ impl server::Handler<HttpStream> for EchoHandler {
} }
fn on_response(&mut self, res: &mut server::Response) -> Next { fn on_response(&mut self, res: &mut server::Response) -> Next {
if let Cors::Allowed(ref domain) = self.cors { self.handler.as_mut()
let mut headers = res.headers_mut(); .expect("handler always set in on_request, which is before now; qed")
headers.set(header::Allow(vec![Method::Options, Method::Post, Method::Get])); .on_response(res)
headers.set(header::AccessControlAllowHeaders(vec![
UniCase("origin".to_owned()),
UniCase("content-type".to_owned()),
UniCase("accept".to_owned()),
]));
headers.set(header::AccessControlAllowOrigin::Value(domain.clone()));
}
self.handler.as_mut().unwrap().on_response(res)
} }
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next { fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
self.handler.as_mut().unwrap().on_response_writable(encoder) self.handler.as_mut()
.expect("handler always set in on_request, which is before now; qed")
.on_response_writable(encoder)
} }
} }
#[test]
fn should_return_correct_cors_value() {
// given
let safe_origins = vec!["chrome-extension://".to_owned(), "http://localhost:8080".to_owned()];
let cut = EchoHandler {
safe_origins: safe_origins,
content: String::new(),
cors: Cors::No,
handler: None,
};
// when
let res1 = cut.cors_header(Some("http://ethcore.io".into()));
let res2 = cut.cors_header(Some("http://localhost:8080".into()));
let res3 = cut.cors_header(Some("chrome-extension://deadbeefcafe".into()));
let res4 = cut.cors_header(None);
// then
assert_eq!(res1, Cors::Forbidden);
assert_eq!(res2, Cors::Allowed("http://localhost:8080".into()));
assert_eq!(res3, Cors::Allowed("chrome-extension://deadbeefcafe".into()));
assert_eq!(res4, Cors::No);
}

View File

@ -22,13 +22,13 @@ use std::sync::{mpsc, Arc};
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::time::{Instant, Duration}; use std::time::{Instant, Duration};
use util::Mutex; use util::Mutex;
use fetch::{Client, Fetch, FetchResult};
use hyper::{server, Decoder, Encoder, Next, Method, Control}; use hyper::{server, Decoder, Encoder, Next, Method, Control};
use hyper::net::HttpStream; use hyper::net::HttpStream;
use hyper::status::StatusCode; use hyper::status::StatusCode;
use handlers::{ContentHandler, Redirection}; use handlers::{ContentHandler, Redirection};
use handlers::client::{Client, FetchResult};
use apps::redirection_address; use apps::redirection_address;
use page::LocalPageEndpoint; use page::LocalPageEndpoint;
@ -159,7 +159,7 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
handler: H) -> (Self, Arc<FetchControl>) { handler: H) -> (Self, Arc<FetchControl>) {
let fetch_control = Arc::new(FetchControl::default()); let fetch_control = Arc::new(FetchControl::default());
let client = Client::new(); let client = Client::default();
let handler = ContentFetcherHandler { let handler = ContentFetcherHandler {
fetch_control: fetch_control.clone(), fetch_control: fetch_control.clone(),
control: Some(control), control: Some(control),

View File

@ -21,7 +21,6 @@ mod echo;
mod content; mod content;
mod redirect; mod redirect;
mod fetch; mod fetch;
pub mod client;
pub use self::auth::AuthRequiredHandler; pub use self::auth::AuthRequiredHandler;
pub use self::echo::EchoHandler; pub use self::echo::EchoHandler;
@ -31,7 +30,26 @@ pub use self::fetch::{ContentFetcherHandler, ContentValidator, FetchControl};
use url::Url; use url::Url;
use hyper::{server, header, net, uri}; use hyper::{server, header, net, uri};
use signer_address;
/// Adds security-related headers to the Response.
pub fn add_security_headers(headers: &mut header::Headers, embeddable_at: Option<u16>) {
headers.set_raw("X-XSS-Protection", vec![b"1; mode=block".to_vec()]);
headers.set_raw("X-Content-Type-Options", vec![b"nosniff".to_vec()]);
// Embedding header:
if let Some(port) = embeddable_at {
headers.set_raw(
"X-Frame-Options",
vec![format!("ALLOW-FROM http://{}", signer_address(port)).into_bytes()]
);
} else {
// TODO [ToDr] Should we be more strict here (DENY?)?
headers.set_raw("X-Frame-Options", vec![b"SAMEORIGIN".to_vec()]);
}
}
/// Extracts URL part from the Request.
pub fn extract_url(req: &server::Request<net::HttpStream>) -> Option<Url> { pub fn extract_url(req: &server::Request<net::HttpStream>) -> Option<Url> {
match *req.uri() { match *req.uri() {
uri::RequestUri::AbsoluteUri(ref url) => { uri::RequestUri::AbsoluteUri(ref url) => {

View File

@ -43,10 +43,8 @@
#![warn(missing_docs)] #![warn(missing_docs)]
#![cfg_attr(feature="nightly", plugin(clippy))] #![cfg_attr(feature="nightly", plugin(clippy))]
#[macro_use]
extern crate log;
extern crate url as url_lib;
extern crate hyper; extern crate hyper;
extern crate url as url_lib;
extern crate unicase; extern crate unicase;
extern crate serde; extern crate serde;
extern crate serde_json; extern crate serde_json;
@ -57,13 +55,21 @@ extern crate jsonrpc_core;
extern crate jsonrpc_http_server; extern crate jsonrpc_http_server;
extern crate mime_guess; extern crate mime_guess;
extern crate rustc_serialize; extern crate rustc_serialize;
extern crate parity_dapps;
extern crate https_fetch;
extern crate ethcore_rpc; extern crate ethcore_rpc;
extern crate ethcore_util as util; extern crate ethcore_util as util;
extern crate linked_hash_map; extern crate linked_hash_map;
extern crate fetch;
extern crate parity_dapps_glue as parity_dapps;
#[macro_use]
extern crate log;
#[macro_use]
extern crate mime;
#[cfg(test)] #[cfg(test)]
extern crate ethcore_devtools as devtools; extern crate ethcore_devtools as devtools;
#[cfg(test)]
extern crate env_logger;
mod endpoint; mod endpoint;
mod apps; mod apps;
@ -87,16 +93,16 @@ use jsonrpc_core::{IoHandler, IoDelegate};
use router::auth::{Authorization, NoAuth, HttpBasicAuth}; use router::auth::{Authorization, NoAuth, HttpBasicAuth};
use ethcore_rpc::Extendable; use ethcore_rpc::Extendable;
static DAPPS_DOMAIN : &'static str = ".parity"; use self::apps::{HOME_PAGE, DAPPS_DOMAIN};
/// Indicates sync status /// Indicates sync status
pub trait SyncStatus: Send + Sync { pub trait SyncStatus: Send + Sync {
/// Returns true if there is a major sync happening. /// Returns true if there is a major sync happening.
fn is_major_syncing(&self) -> bool; fn is_major_importing(&self) -> bool;
} }
impl<F> SyncStatus for F where F: Fn() -> bool + Send + Sync { impl<F> SyncStatus for F where F: Fn() -> bool + Send + Sync {
fn is_major_syncing(&self) -> bool { self() } fn is_major_importing(&self) -> bool { self() }
} }
/// Webapps HTTP+RPC server build. /// Webapps HTTP+RPC server build.
@ -105,6 +111,7 @@ pub struct ServerBuilder {
handler: Arc<IoHandler>, handler: Arc<IoHandler>,
registrar: Arc<ContractClient>, registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
signer_port: Option<u16>,
} }
impl Extendable for ServerBuilder { impl Extendable for ServerBuilder {
@ -121,6 +128,7 @@ impl ServerBuilder {
handler: Arc::new(IoHandler::new()), handler: Arc::new(IoHandler::new()),
registrar: registrar, registrar: registrar,
sync_status: Arc::new(|| false), sync_status: Arc::new(|| false),
signer_port: None,
} }
} }
@ -129,6 +137,11 @@ impl ServerBuilder {
self.sync_status = status; self.sync_status = status;
} }
/// Change default signer port.
pub fn with_signer_port(&mut self, signer_port: Option<u16>) {
self.signer_port = signer_port;
}
/// Asynchronously start server with no authentication, /// Asynchronously start server with no authentication,
/// returns result with `Server` handle on success or an error. /// returns result with `Server` handle on success or an error.
pub fn start_unsecured_http(&self, addr: &SocketAddr, hosts: Option<Vec<String>>) -> Result<Server, ServerError> { pub fn start_unsecured_http(&self, addr: &SocketAddr, hosts: Option<Vec<String>>) -> Result<Server, ServerError> {
@ -138,6 +151,7 @@ impl ServerBuilder {
NoAuth, NoAuth,
self.handler.clone(), self.handler.clone(),
self.dapps_path.clone(), self.dapps_path.clone(),
self.signer_port.clone(),
self.registrar.clone(), self.registrar.clone(),
self.sync_status.clone(), self.sync_status.clone(),
) )
@ -152,6 +166,7 @@ impl ServerBuilder {
HttpBasicAuth::single_user(username, password), HttpBasicAuth::single_user(username, password),
self.handler.clone(), self.handler.clone(),
self.dapps_path.clone(), self.dapps_path.clone(),
self.signer_port.clone(),
self.registrar.clone(), self.registrar.clone(),
self.sync_status.clone(), self.sync_status.clone(),
) )
@ -180,26 +195,40 @@ impl Server {
Some(allowed) Some(allowed)
} }
/// Returns a list of CORS domains for API endpoint.
fn cors_domains(signer_port: Option<u16>) -> Vec<String> {
match signer_port {
Some(port) => vec![
format!("http://{}{}", HOME_PAGE, DAPPS_DOMAIN),
format!("http://{}", signer_address(port)),
],
None => vec![],
}
}
fn start_http<A: Authorization + 'static>( fn start_http<A: Authorization + 'static>(
addr: &SocketAddr, addr: &SocketAddr,
hosts: Option<Vec<String>>, hosts: Option<Vec<String>>,
authorization: A, authorization: A,
handler: Arc<IoHandler>, handler: Arc<IoHandler>,
dapps_path: String, dapps_path: String,
signer_port: Option<u16>,
registrar: Arc<ContractClient>, registrar: Arc<ContractClient>,
sync_status: Arc<SyncStatus>, sync_status: Arc<SyncStatus>,
) -> Result<Server, ServerError> { ) -> Result<Server, ServerError> {
let panic_handler = Arc::new(Mutex::new(None)); let panic_handler = Arc::new(Mutex::new(None));
let authorization = Arc::new(authorization); let authorization = Arc::new(authorization);
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status)); let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status, signer_port));
let endpoints = Arc::new(apps::all_endpoints(dapps_path)); let endpoints = Arc::new(apps::all_endpoints(dapps_path, signer_port.clone()));
let cors_domains = Self::cors_domains(signer_port);
let special = Arc::new({ let special = Arc::new({
let mut special = HashMap::new(); let mut special = HashMap::new();
special.insert(router::SpecialEndpoint::Rpc, rpc::rpc(handler, panic_handler.clone())); special.insert(router::SpecialEndpoint::Rpc, rpc::rpc(handler, panic_handler.clone()));
special.insert(router::SpecialEndpoint::Utils, apps::utils()); special.insert(router::SpecialEndpoint::Utils, apps::utils());
special.insert( special.insert(
router::SpecialEndpoint::Api, router::SpecialEndpoint::Api,
api::RestApi::new(format!("{}", addr), endpoints.clone(), content_fetcher.clone()) api::RestApi::new(cors_domains, endpoints.clone(), content_fetcher.clone())
); );
special special
}); });
@ -208,7 +237,7 @@ impl Server {
try!(hyper::Server::http(addr)) try!(hyper::Server::http(addr))
.handle(move |ctrl| router::Router::new( .handle(move |ctrl| router::Router::new(
ctrl, ctrl,
apps::main_page(), signer_port.clone(),
content_fetcher.clone(), content_fetcher.clone(),
endpoints.clone(), endpoints.clone(),
special.clone(), special.clone(),
@ -272,6 +301,10 @@ pub fn random_filename() -> String {
rng.gen_ascii_chars().take(12).collect() rng.gen_ascii_chars().take(12).collect()
} }
fn signer_address(port: u16) -> String {
format!("127.0.0.1:{}", port)
}
#[cfg(test)] #[cfg(test)]
mod util_tests { mod util_tests {
use super::Server; use super::Server;
@ -291,4 +324,17 @@ mod util_tests {
assert_eq!(address, Some(vec!["localhost".into(), "127.0.0.1".into()])); assert_eq!(address, Some(vec!["localhost".into(), "127.0.0.1".into()]));
assert_eq!(some, Some(vec!["ethcore.io".into(), "localhost".into(), "127.0.0.1".into()])); assert_eq!(some, Some(vec!["ethcore.io".into(), "localhost".into(), "127.0.0.1".into()]));
} }
#[test]
fn should_return_cors_domains() {
// given
// when
let none = Server::cors_domains(None);
let some = Server::cors_domains(Some(18180));
// then
assert_eq!(none, Vec::<String>::new());
assert_eq!(some, vec!["http://home.parity".to_owned(), "http://127.0.0.1:18180".into()]);
}
} }

View File

@ -25,7 +25,7 @@ pub struct PageEndpoint<T : WebApp + 'static> {
/// Prefix to strip from the path (when `None` deducted from `app_id`) /// Prefix to strip from the path (when `None` deducted from `app_id`)
pub prefix: Option<String>, pub prefix: Option<String>,
/// Safe to be loaded in frame by other origin. (use wisely!) /// Safe to be loaded in frame by other origin. (use wisely!)
safe_to_embed: bool, safe_to_embed_at_port: Option<u16>,
info: EndpointInfo, info: EndpointInfo,
} }
@ -36,7 +36,7 @@ impl<T: WebApp + 'static> PageEndpoint<T> {
PageEndpoint { PageEndpoint {
app: Arc::new(app), app: Arc::new(app),
prefix: None, prefix: None,
safe_to_embed: false, safe_to_embed_at_port: None,
info: EndpointInfo::from(info), info: EndpointInfo::from(info),
} }
} }
@ -49,7 +49,7 @@ impl<T: WebApp + 'static> PageEndpoint<T> {
PageEndpoint { PageEndpoint {
app: Arc::new(app), app: Arc::new(app),
prefix: Some(prefix), prefix: Some(prefix),
safe_to_embed: false, safe_to_embed_at_port: None,
info: EndpointInfo::from(info), info: EndpointInfo::from(info),
} }
} }
@ -57,12 +57,12 @@ impl<T: WebApp + 'static> PageEndpoint<T> {
/// Creates new `PageEndpoint` which can be safely used in iframe /// Creates new `PageEndpoint` which can be safely used in iframe
/// even from different origin. It might be dangerous (clickjacking). /// even from different origin. It might be dangerous (clickjacking).
/// Use wisely! /// Use wisely!
pub fn new_safe_to_embed(app: T) -> Self { pub fn new_safe_to_embed(app: T, port: Option<u16>) -> Self {
let info = app.info(); let info = app.info();
PageEndpoint { PageEndpoint {
app: Arc::new(app), app: Arc::new(app),
prefix: None, prefix: None,
safe_to_embed: true, safe_to_embed_at_port: port,
info: EndpointInfo::from(info), info: EndpointInfo::from(info),
} }
} }
@ -79,8 +79,8 @@ impl<T: WebApp> Endpoint for PageEndpoint<T> {
app: BuiltinDapp::new(self.app.clone()), app: BuiltinDapp::new(self.app.clone()),
prefix: self.prefix.clone(), prefix: self.prefix.clone(),
path: path, path: path,
file: Default::default(), file: handler::ServedFile::new(self.safe_to_embed_at_port.clone()),
safe_to_embed: self.safe_to_embed, safe_to_embed_at_port: self.safe_to_embed_at_port.clone(),
}) })
} }
} }

View File

@ -22,7 +22,7 @@ use hyper::net::HttpStream;
use hyper::status::StatusCode; use hyper::status::StatusCode;
use hyper::{Decoder, Encoder, Next}; use hyper::{Decoder, Encoder, Next};
use endpoint::EndpointPath; use endpoint::EndpointPath;
use handlers::ContentHandler; use handlers::{ContentHandler, add_security_headers};
/// Represents a file that can be sent to client. /// Represents a file that can be sent to client.
/// Implementation should keep track of bytes already sent internally. /// Implementation should keep track of bytes already sent internally.
@ -57,13 +57,14 @@ pub enum ServedFile<T: Dapp> {
Error(ContentHandler), Error(ContentHandler),
} }
impl<T: Dapp> Default for ServedFile<T> { impl<T: Dapp> ServedFile<T> {
fn default() -> Self { pub fn new(embeddable_at: Option<u16>) -> Self {
ServedFile::Error(ContentHandler::error( ServedFile::Error(ContentHandler::error_embeddable(
StatusCode::NotFound, StatusCode::NotFound,
"404 Not Found", "404 Not Found",
"Requested dapp resource was not found.", "Requested dapp resource was not found.",
None None,
embeddable_at,
)) ))
} }
} }
@ -81,7 +82,7 @@ pub struct PageHandler<T: Dapp> {
/// Requested path. /// Requested path.
pub path: EndpointPath, pub path: EndpointPath,
/// Flag indicating if the file can be safely embeded (put in iframe). /// Flag indicating if the file can be safely embeded (put in iframe).
pub safe_to_embed: bool, pub safe_to_embed_at_port: Option<u16>,
} }
impl<T: Dapp> PageHandler<T> { impl<T: Dapp> PageHandler<T> {
@ -115,7 +116,7 @@ impl<T: Dapp> server::Handler<HttpStream> for PageHandler<T> {
self.app.file(&self.extract_path(url.path())) self.app.file(&self.extract_path(url.path()))
}, },
_ => None, _ => None,
}.map_or_else(|| ServedFile::default(), |f| ServedFile::File(f)); }.map_or_else(|| ServedFile::new(self.safe_to_embed_at_port.clone()), |f| ServedFile::File(f));
Next::write() Next::write()
} }
@ -127,10 +128,14 @@ impl<T: Dapp> server::Handler<HttpStream> for PageHandler<T> {
match self.file { match self.file {
ServedFile::File(ref f) => { ServedFile::File(ref f) => {
res.set_status(StatusCode::Ok); res.set_status(StatusCode::Ok);
res.headers_mut().set(header::ContentType(f.content_type().parse().unwrap()));
if !self.safe_to_embed { match f.content_type().parse() {
res.headers_mut().set_raw("X-Frame-Options", vec![b"SAMEORIGIN".to_vec()]); Ok(mime) => res.headers_mut().set(header::ContentType(mime)),
Err(()) => debug!(target: "page_handler", "invalid MIME type: {}", f.content_type()),
} }
// Security headers:
add_security_headers(&mut res.headers_mut(), self.safe_to_embed_at_port);
Next::write() Next::write()
}, },
ServedFile::Error(ref mut handler) => { ServedFile::Error(ref mut handler) => {
@ -212,8 +217,8 @@ fn should_extract_path_with_appid() {
port: 8080, port: 8080,
using_dapps_domains: true, using_dapps_domains: true,
}, },
file: Default::default(), file: ServedFile::new(None),
safe_to_embed: true, safe_to_embed_at_port: None,
}; };
// when // when

View File

@ -26,14 +26,16 @@ pub struct LocalPageEndpoint {
path: PathBuf, path: PathBuf,
mime: Option<String>, mime: Option<String>,
info: Option<EndpointInfo>, info: Option<EndpointInfo>,
embeddable_at: Option<u16>,
} }
impl LocalPageEndpoint { impl LocalPageEndpoint {
pub fn new(path: PathBuf, info: EndpointInfo) -> Self { pub fn new(path: PathBuf, info: EndpointInfo, embeddable_at: Option<u16>) -> Self {
LocalPageEndpoint { LocalPageEndpoint {
path: path, path: path,
mime: None, mime: None,
info: Some(info), info: Some(info),
embeddable_at: embeddable_at,
} }
} }
@ -42,6 +44,7 @@ impl LocalPageEndpoint {
path: path, path: path,
mime: Some(mime), mime: Some(mime),
info: None, info: None,
embeddable_at: None,
} }
} }
@ -61,16 +64,16 @@ impl Endpoint for LocalPageEndpoint {
app: LocalSingleFile { path: self.path.clone(), mime: mime.clone() }, app: LocalSingleFile { path: self.path.clone(), mime: mime.clone() },
prefix: None, prefix: None,
path: path, path: path,
file: Default::default(), file: handler::ServedFile::new(None),
safe_to_embed: false, safe_to_embed_at_port: self.embeddable_at,
}) })
} else { } else {
Box::new(handler::PageHandler { Box::new(handler::PageHandler {
app: LocalDapp { path: self.path.clone() }, app: LocalDapp { path: self.path.clone() },
prefix: None, prefix: None,
path: path, path: path,
file: Default::default(), file: handler::ServedFile::new(None),
safe_to_embed: false, safe_to_embed_at_port: self.embeddable_at,
}) })
} }
} }

View File

@ -18,31 +18,46 @@
use endpoint::{Endpoint, Handler, EndpointPath}; use endpoint::{Endpoint, Handler, EndpointPath};
use handlers::ContentHandler; use handlers::ContentHandler;
use apps::DAPPS_DOMAIN; use apps::{HOME_PAGE, DAPPS_DOMAIN};
use signer_address;
pub struct ProxyPac; pub struct ProxyPac {
signer_port: Option<u16>,
}
impl ProxyPac { impl ProxyPac {
pub fn boxed() -> Box<Endpoint> { pub fn boxed(signer_port: Option<u16>) -> Box<Endpoint> {
Box::new(ProxyPac) Box::new(ProxyPac {
signer_port: signer_port
})
} }
} }
impl Endpoint for ProxyPac { impl Endpoint for ProxyPac {
fn to_handler(&self, path: EndpointPath) -> Box<Handler> { fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
let signer = self.signer_port
.map(signer_address)
.unwrap_or_else(|| format!("{}:{}", path.host, path.port));
let content = format!( let content = format!(
r#" r#"
function FindProxyForURL(url, host) {{ function FindProxyForURL(url, host) {{
if (shExpMatch(host, "*{0}")) if (shExpMatch(host, "{0}{1}"))
{{ {{
return "PROXY {1}:{2}"; return "PROXY {4}";
}}
if (shExpMatch(host, "*{1}"))
{{
return "PROXY {2}:{3}";
}} }}
return "DIRECT"; return "DIRECT";
}} }}
"#, "#,
DAPPS_DOMAIN, path.host, path.port); HOME_PAGE, DAPPS_DOMAIN, path.host, path.port, signer);
Box::new(ContentHandler::ok(content, "application/javascript".to_owned()))
Box::new(ContentHandler::ok(content, mime!(Application/Javascript)))
} }
} }

View File

@ -15,7 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use DAPPS_DOMAIN; use apps::DAPPS_DOMAIN;
use hyper::{server, header, StatusCode}; use hyper::{server, header, StatusCode};
use hyper::net::HttpStream; use hyper::net::HttpStream;

View File

@ -20,13 +20,13 @@
pub mod auth; pub mod auth;
mod host_validation; mod host_validation;
use DAPPS_DOMAIN; use signer_address;
use std::sync::Arc; use std::sync::Arc;
use std::collections::HashMap; use std::collections::HashMap;
use url::{Url, Host}; use url::{Url, Host};
use hyper::{self, server, Next, Encoder, Decoder, Control, StatusCode}; use hyper::{self, server, Next, Encoder, Decoder, Control, StatusCode};
use hyper::net::HttpStream; use hyper::net::HttpStream;
use apps; use apps::{self, DAPPS_DOMAIN};
use apps::fetcher::ContentFetcher; use apps::fetcher::ContentFetcher;
use endpoint::{Endpoint, Endpoints, EndpointPath}; use endpoint::{Endpoint, Endpoints, EndpointPath};
use handlers::{Redirection, extract_url, ContentHandler}; use handlers::{Redirection, extract_url, ContentHandler};
@ -43,7 +43,7 @@ pub enum SpecialEndpoint {
pub struct Router<A: Authorization + 'static> { pub struct Router<A: Authorization + 'static> {
control: Option<Control>, control: Option<Control>,
main_page: &'static str, signer_port: Option<u16>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
fetch: Arc<ContentFetcher>, fetch: Arc<ContentFetcher>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
@ -61,54 +61,81 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
let endpoint = extract_endpoint(&url); let endpoint = extract_endpoint(&url);
let is_utils = endpoint.1 == SpecialEndpoint::Utils; let is_utils = endpoint.1 == SpecialEndpoint::Utils;
trace!(target: "dapps", "Routing request to {:?}. Details: {:?}", url, req);
// Validate Host header // Validate Host header
if let Some(ref hosts) = self.allowed_hosts { if let Some(ref hosts) = self.allowed_hosts {
trace!(target: "dapps", "Validating host headers against: {:?}", hosts);
let is_valid = is_utils || host_validation::is_valid(&req, hosts, self.endpoints.keys().cloned().collect()); let is_valid = is_utils || host_validation::is_valid(&req, hosts, self.endpoints.keys().cloned().collect());
if !is_valid { if !is_valid {
debug!(target: "dapps", "Rejecting invalid host header.");
self.handler = host_validation::host_invalid_response(); self.handler = host_validation::host_invalid_response();
return self.handler.on_request(req); return self.handler.on_request(req);
} }
} }
trace!(target: "dapps", "Checking authorization.");
// Check authorization // Check authorization
let auth = self.authorization.is_authorized(&req); let auth = self.authorization.is_authorized(&req);
if let Authorized::No(handler) = auth { if let Authorized::No(handler) = auth {
debug!(target: "dapps", "Authorization denied.");
self.handler = handler; self.handler = handler;
return self.handler.on_request(req); return self.handler.on_request(req);
} }
let control = self.control.take().expect("on_request is called only once; control is always defined at start; qed"); let control = self.control.take().expect("on_request is called only once; control is always defined at start; qed");
debug!(target: "dapps", "Handling endpoint request: {:?}", endpoint);
self.handler = match endpoint { self.handler = match endpoint {
// First check special endpoints // First check special endpoints
(ref path, ref endpoint) if self.special.contains_key(endpoint) => { (ref path, ref endpoint) if self.special.contains_key(endpoint) => {
self.special.get(endpoint).unwrap().to_async_handler(path.clone().unwrap_or_default(), control) trace!(target: "dapps", "Resolving to special endpoint.");
self.special.get(endpoint)
.expect("special known to contain key; qed")
.to_async_handler(path.clone().unwrap_or_default(), control)
}, },
// Then delegate to dapp // Then delegate to dapp
(Some(ref path), _) if self.endpoints.contains_key(&path.app_id) => { (Some(ref path), _) if self.endpoints.contains_key(&path.app_id) => {
self.endpoints.get(&path.app_id).unwrap().to_async_handler(path.clone(), control) trace!(target: "dapps", "Resolving to local/builtin dapp.");
self.endpoints.get(&path.app_id)
.expect("special known to contain key; qed")
.to_async_handler(path.clone(), control)
}, },
// Try to resolve and fetch the dapp // Try to resolve and fetch the dapp
(Some(ref path), _) if self.fetch.contains(&path.app_id) => { (Some(ref path), _) if self.fetch.contains(&path.app_id) => {
trace!(target: "dapps", "Resolving to fetchable content.");
self.fetch.to_async_handler(path.clone(), control) self.fetch.to_async_handler(path.clone(), control)
}, },
// 404 for non-existent content // 404 for non-existent content
(Some(ref path), _) if *req.method() == hyper::method::Method::Get => { (Some(_), _) if *req.method() == hyper::method::Method::Get => {
let address = apps::redirection_address(path.using_dapps_domains, self.main_page); trace!(target: "dapps", "Resolving to 404.");
Box::new(ContentHandler::error( Box::new(ContentHandler::error(
StatusCode::NotFound, StatusCode::NotFound,
"404 Not Found", "404 Not Found",
"Requested content was not found.", "Requested content was not found.",
Some(&format!("Go back to the <a href=\"{}\">Home Page</a>.", address)) None,
)) ))
}, },
// Redirect any GET request to home. // Redirect any other GET request to signer.
_ if *req.method() == hyper::method::Method::Get => { _ if *req.method() == hyper::method::Method::Get => {
let address = apps::redirection_address(false, self.main_page); if let Some(port) = self.signer_port {
Redirection::boxed(address.as_str()) trace!(target: "dapps", "Redirecting to signer interface.");
Redirection::boxed(&format!("http://{}", signer_address(port)))
} else {
trace!(target: "dapps", "Signer disabled, returning 404.");
Box::new(ContentHandler::error(
StatusCode::NotFound,
"404 Not Found",
"Your homepage is not available when Trusted Signer is disabled.",
Some("You can still access dapps by writing a correct address, though. Re-enabled Signer to get your homepage back."),
))
}
}, },
// RPC by default // RPC by default
_ => { _ => {
self.special.get(&SpecialEndpoint::Rpc).unwrap().to_async_handler(EndpointPath::default(), control) trace!(target: "dapps", "Resolving to RPC call.");
self.special.get(&SpecialEndpoint::Rpc)
.expect("RPC endpoint always stored; qed")
.to_async_handler(EndpointPath::default(), control)
} }
}; };
@ -135,7 +162,7 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
impl<A: Authorization> Router<A> { impl<A: Authorization> Router<A> {
pub fn new( pub fn new(
control: Control, control: Control,
main_page: &'static str, signer_port: Option<u16>,
content_fetcher: Arc<ContentFetcher>, content_fetcher: Arc<ContentFetcher>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
@ -143,10 +170,12 @@ impl<A: Authorization> Router<A> {
allowed_hosts: Option<Vec<String>>, allowed_hosts: Option<Vec<String>>,
) -> Self { ) -> Self {
let handler = special.get(&SpecialEndpoint::Utils).unwrap().to_handler(EndpointPath::default()); let handler = special.get(&SpecialEndpoint::Utils)
.expect("Utils endpoint always stored; qed")
.to_handler(EndpointPath::default());
Router { Router {
control: Some(control), control: Some(control),
main_page: main_page, signer_port: signer_port,
endpoints: endpoints, endpoints: endpoints,
fetch: content_fetcher, fetch: content_fetcher,
special: special, special: special,

View File

@ -24,7 +24,7 @@ pub fn rpc(handler: Arc<IoHandler>, panic_handler: Arc<Mutex<Option<Box<Fn() ->
Box::new(RpcEndpoint { Box::new(RpcEndpoint {
handler: handler, handler: handler,
panic_handler: panic_handler, panic_handler: panic_handler,
cors_domain: Some(vec![AccessControlAllowOrigin::Null]), cors_domain: None,
// NOTE [ToDr] We don't need to do any hosts validation here. It's already done in router. // NOTE [ToDr] We don't need to do any hosts validation here. It's already done in router.
allowed_hosts: None, allowed_hosts: None,
}) })

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use tests::helpers::{serve, serve_with_registrar, request}; use tests::helpers::{serve, serve_with_registrar, request, assert_security_headers};
#[test] #[test]
fn should_return_error() { fn should_return_error() {
@ -34,8 +34,9 @@ fn should_return_error() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert_eq!(response.headers.get(0).unwrap(), "Content-Type: application/json"); assert_eq!(response.headers.get(3).unwrap(), "Content-Type: application/json");
assert_eq!(response.body, format!("58\n{}\n0\n\n", r#"{"code":"404","title":"Not Found","detail":"Resource you requested has not been found."}"#)); assert_eq!(response.body, format!("58\n{}\n0\n\n", r#"{"code":"404","title":"Not Found","detail":"Resource you requested has not been found."}"#));
assert_security_headers(&response.headers);
} }
#[test] #[test]
@ -56,8 +57,9 @@ fn should_serve_apps() {
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert_eq!(response.headers.get(0).unwrap(), "Content-Type: application/json"); assert_eq!(response.headers.get(3).unwrap(), "Content-Type: application/json");
assert!(response.body.contains("Parity Home Screen"), response.body); assert!(response.body.contains("Parity UI"), response.body);
assert_security_headers(&response.headers);
} }
#[test] #[test]
@ -78,8 +80,9 @@ fn should_handle_ping() {
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert_eq!(response.headers.get(0).unwrap(), "Content-Type: application/json"); assert_eq!(response.headers.get(3).unwrap(), "Content-Type: application/json");
assert_eq!(response.body, "0\n\n".to_owned()); assert_eq!(response.body, "0\n\n".to_owned());
assert_security_headers(&response.headers);
} }
@ -101,5 +104,57 @@ fn should_try_to_resolve_dapp() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert_eq!(registrar.calls.lock().len(), 2); assert_eq!(registrar.calls.lock().len(), 2);
assert_security_headers(&response.headers);
} }
#[test]
fn should_return_signer_port_cors_headers() {
// given
let server = serve();
// when
let response = request(server,
"\
POST /api/ping HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Origin: http://127.0.0.1:18180\r\n\
Connection: close\r\n\
\r\n\
{}
"
);
// then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert!(
response.headers_raw.contains("Access-Control-Allow-Origin: http://127.0.0.1:18180"),
"CORS header for signer missing: {:?}",
response.headers
);
}
#[test]
fn should_return_signer_port_cors_headers_for_home_parity() {
// given
let server = serve();
// when
let response = request(server,
"\
POST /api/ping HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Origin: http://home.parity\r\n\
Connection: close\r\n\
\r\n\
{}
"
);
// then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert!(
response.headers_raw.contains("Access-Control-Allow-Origin: http://home.parity"),
"CORS header for home.parity missing: {:?}",
response.headers
);
}

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use tests::helpers::{serve_with_auth, request}; use tests::helpers::{serve_with_auth, request, assert_security_headers_for_embed};
#[test] #[test]
fn should_require_authorization() { fn should_require_authorization() {
@ -66,7 +66,7 @@ fn should_allow_on_valid_auth() {
// when // when
let response = request(server, let response = request(server,
"\ "\
GET /home/ HTTP/1.1\r\n\ GET /ui/ HTTP/1.1\r\n\
Host: 127.0.0.1:8080\r\n\ Host: 127.0.0.1:8080\r\n\
Connection: close\r\n\ Connection: close\r\n\
Authorization: Basic QWxhZGRpbjpPcGVuU2VzYW1l\r\n Authorization: Basic QWxhZGRpbjpPcGVuU2VzYW1l\r\n
@ -76,4 +76,5 @@ fn should_allow_on_valid_auth() {
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert_security_headers_for_embed(&response.headers);
} }

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use tests::helpers::{serve_with_registrar, request}; use tests::helpers::{serve_with_registrar, serve_with_registrar_and_sync, request, assert_security_headers};
#[test] #[test]
fn should_resolve_dapp() { fn should_resolve_dapp() {
@ -34,5 +34,34 @@ fn should_resolve_dapp() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert_eq!(registrar.calls.lock().len(), 2); assert_eq!(registrar.calls.lock().len(), 2);
assert_security_headers(&response.headers);
} }
#[test]
fn should_return_503_when_syncing_but_should_make_the_calls() {
// given
let (server, registrar) = serve_with_registrar_and_sync();
{
let mut responses = registrar.responses.lock();
let res1 = responses.get(0).unwrap().clone();
let res2 = responses.get(1).unwrap().clone();
// Registrar will be called twice - fill up the responses.
responses.push(res1);
responses.push(res2);
}
// when
let response = request(server,
"\
GET / HTTP/1.1\r\n\
Host: 1472a9e190620cdf6b31f383373e45efcfe869a820c91f9ccd7eb9fb45e4985d.parity\r\n\
Connection: close\r\n\
\r\n\
"
);
// then
assert_eq!(response.status, "HTTP/1.1 503 Service Unavailable".to_owned());
assert_eq!(registrar.calls.lock().len(), 4);
assert_security_headers(&response.headers);
}

View File

@ -18,6 +18,7 @@ use std::env;
use std::str; use std::str;
use std::sync::Arc; use std::sync::Arc;
use rustc_serialize::hex::FromHex; use rustc_serialize::hex::FromHex;
use env_logger::LogBuilder;
use ServerBuilder; use ServerBuilder;
use Server; use Server;
@ -27,6 +28,7 @@ use devtools::http_client;
const REGISTRAR: &'static str = "8e4e9b13d4b45cb0befc93c3061b1408f67316b2"; const REGISTRAR: &'static str = "8e4e9b13d4b45cb0befc93c3061b1408f67316b2";
const URLHINT: &'static str = "deadbeefcafe0000000000000000000000000000"; const URLHINT: &'static str = "deadbeefcafe0000000000000000000000000000";
const SIGNER_PORT: u16 = 18180;
pub struct FakeRegistrar { pub struct FakeRegistrar {
pub calls: Arc<Mutex<Vec<(String, String)>>>, pub calls: Arc<Mutex<Vec<(String, String)>>>,
@ -58,11 +60,23 @@ impl ContractClient for FakeRegistrar {
} }
} }
pub fn init_server(hosts: Option<Vec<String>>) -> (Server, Arc<FakeRegistrar>) { fn init_logger() {
// Initialize logger
if let Ok(log) = env::var("RUST_LOG") {
let mut builder = LogBuilder::new();
builder.parse(&log);
builder.init().expect("Logger is initialized only once.");
}
}
pub fn init_server(hosts: Option<Vec<String>>, is_syncing: bool) -> (Server, Arc<FakeRegistrar>) {
init_logger();
let registrar = Arc::new(FakeRegistrar::new()); let registrar = Arc::new(FakeRegistrar::new());
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar.clone()); let mut builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar.clone());
builder.with_sync_status(Arc::new(move || is_syncing));
builder.with_signer_port(Some(SIGNER_PORT));
( (
builder.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap(), builder.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap(),
registrar, registrar,
@ -70,25 +84,38 @@ pub fn init_server(hosts: Option<Vec<String>>) -> (Server, Arc<FakeRegistrar>) {
} }
pub fn serve_with_auth(user: &str, pass: &str) -> Server { pub fn serve_with_auth(user: &str, pass: &str) -> Server {
init_logger();
let registrar = Arc::new(FakeRegistrar::new()); let registrar = Arc::new(FakeRegistrar::new());
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading"); dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
let builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar); let mut builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar);
builder.with_signer_port(Some(SIGNER_PORT));
builder.start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap() builder.start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap()
} }
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server { pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
init_server(hosts).0 init_server(hosts, false).0
} }
pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) { pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) {
init_server(None) init_server(None, false)
}
pub fn serve_with_registrar_and_sync() -> (Server, Arc<FakeRegistrar>) {
init_server(None, true)
} }
pub fn serve() -> Server { pub fn serve() -> Server {
init_server(None).0 init_server(None, false).0
} }
pub fn request(server: Server, request: &str) -> http_client::Response { pub fn request(server: Server, request: &str) -> http_client::Response {
http_client::request(server.addr(), request) http_client::request(server.addr(), request)
} }
pub fn assert_security_headers(headers: &[String]) {
http_client::assert_security_headers_present(headers, None)
}
pub fn assert_security_headers_for_embed(headers: &[String]) {
http_client::assert_security_headers_present(headers, Some(SIGNER_PORT))
}

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use tests::helpers::{serve, request}; use tests::helpers::{serve, request, assert_security_headers};
#[test] #[test]
fn should_redirect_to_home() { fn should_redirect_to_home() {
@ -33,7 +33,7 @@ fn should_redirect_to_home() {
// then // then
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned());
assert_eq!(response.headers.get(0).unwrap(), "Location: /home/"); assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
} }
#[test] #[test]
@ -53,7 +53,7 @@ fn should_redirect_to_home_when_trailing_slash_is_missing() {
// then // then
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned());
assert_eq!(response.headers.get(0).unwrap(), "Location: /home/"); assert_eq!(response.headers.get(0).unwrap(), "Location: http://127.0.0.1:18180");
} }
#[test] #[test]
@ -73,7 +73,7 @@ fn should_display_404_on_invalid_dapp() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert!(response.body.contains("href=\"/home/")); assert_security_headers(&response.headers);
} }
#[test] #[test]
@ -93,7 +93,7 @@ fn should_display_404_on_invalid_dapp_with_domain() {
// then // then
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned()); assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
assert!(response.body.contains("href=\"http://home.parity/")); assert_security_headers(&response.headers);
} }
#[test] #[test]
@ -159,7 +159,8 @@ fn should_serve_proxy_pac() {
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert_eq!(response.body, "86\n\nfunction FindProxyForURL(url, host) {\n\tif (shExpMatch(host, \"*.parity\"))\n\t{\n\t\treturn \"PROXY 127.0.0.1:8080\";\n\t}\n\n\treturn \"DIRECT\";\n}\n\n0\n\n".to_owned()); assert_eq!(response.body, "D5\n\nfunction FindProxyForURL(url, host) {\n\tif (shExpMatch(host, \"home.parity\"))\n\t{\n\t\treturn \"PROXY 127.0.0.1:18180\";\n\t}\n\n\tif (shExpMatch(host, \"*.parity\"))\n\t{\n\t\treturn \"PROXY 127.0.0.1:8080\";\n\t}\n\n\treturn \"DIRECT\";\n}\n\n0\n\n".to_owned());
assert_security_headers(&response.headers);
} }
#[test] #[test]
@ -181,5 +182,6 @@ fn should_serve_utils() {
// then // then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert_eq!(response.body.contains("function(){"), true); assert_eq!(response.body.contains("function(){"), true);
assert_security_headers(&response.headers);
} }

View File

@ -45,7 +45,7 @@ fn should_allow_valid_host() {
// when // when
let response = request(server, let response = request(server,
"\ "\
GET /home/ HTTP/1.1\r\n\ GET /ui/ HTTP/1.1\r\n\
Host: localhost:8080\r\n\ Host: localhost:8080\r\n\
Connection: close\r\n\ Connection: close\r\n\
\r\n\ \r\n\
@ -66,7 +66,7 @@ fn should_serve_dapps_domains() {
let response = request(server, let response = request(server,
"\ "\
GET / HTTP/1.1\r\n\ GET / HTTP/1.1\r\n\
Host: home.parity\r\n\ Host: ui.parity\r\n\
Connection: close\r\n\ Connection: close\r\n\
\r\n\ \r\n\
{} {}
@ -98,3 +98,30 @@ fn should_allow_parity_utils_even_on_invalid_domain() {
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned()); assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
} }
#[test]
fn should_not_return_cors_headers_for_rpc() {
// given
let server = serve_hosts(Some(vec!["localhost:8080".into()]));
// when
let response = request(server,
"\
POST /rpc HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Origin: null\r\n\
Content-Type: application/json\r\n\
Connection: close\r\n\
\r\n\
{}
"
);
// then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert!(
!response.headers_raw.contains("Access-Control-Allow-Origin"),
"CORS headers were not expected: {:?}",
response.headers
);
}

18
dapps/ui/Cargo.toml Normal file
View File

@ -0,0 +1,18 @@
[package]
description = "Ethcore Parity UI"
homepage = "http://ethcore.io"
license = "GPL-3.0"
name = "parity-ui"
version = "1.4.0"
authors = ["Ethcore <admin@ethcore.io>"]
[build-dependencies]
rustc_version = "0.1"
[dependencies]
parity-ui-dev = { path = "../../js", optional = true }
parity-ui-precompiled = { git = "https://github.com/ethcore/js-precompiled.git", optional = true }
[features]
no-precompiled-js = ["parity-ui-dev"]
use-precompiled-js = ["parity-ui-precompiled"]

33
dapps/ui/src/lib.rs Normal file
View File

@ -0,0 +1,33 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
#[cfg(feature = "parity-ui-dev")]
mod inner {
extern crate parity_ui_dev;
pub use self::parity_ui_dev::*;
}
#[cfg(feature = "parity-ui-precompiled")]
mod inner {
extern crate parity_ui_precompiled;
pub use self::parity_ui_precompiled::*;
}
pub use self::inner::*;

View File

@ -157,7 +157,7 @@ impl Drop for Database {
} }
} }
#[derive(Ipc)] #[ipc]
impl DatabaseService for Database { impl DatabaseService for Database {
fn open(&self, config: DatabaseConfig, path: String) -> Result<(), Error> { fn open(&self, config: DatabaseConfig, path: String) -> Result<(), Error> {
let mut db = self.db.write(); let mut db = self.db.write();

View File

@ -64,3 +64,25 @@ pub fn request(address: &SocketAddr, request: &str) -> Response {
} }
} }
/// Check if all required security headers are present
pub fn assert_security_headers_present(headers: &[String], port: Option<u16>) {
if let Some(port) = port {
assert!(
headers.iter().find(|header| header.as_str() == &format!("X-Frame-Options: ALLOW-FROM http://127.0.0.1:{}", port)).is_some(),
"X-Frame-Options: ALLOW-FROM missing: {:?}", headers
);
} else {
assert!(
headers.iter().find(|header| header.as_str() == "X-Frame-Options: SAMEORIGIN").is_some(),
"X-Frame-Options: SAMEORIGIN missing: {:?}", headers
);
}
assert!(
headers.iter().find(|header| header.as_str() == "X-XSS-Protection: 1; mode=block").is_some(),
"X-XSS-Protection missing: {:?}", headers
);
assert!(
headers.iter().find(|header| header.as_str() == "X-Content-Type-Options: nosniff").is_some(),
"X-Content-Type-Options missing: {:?}", headers
);
}

View File

@ -0,0 +1,39 @@
FROM ubuntu:14.04
WORKDIR /build
# install tools and dependencies
RUN apt-get update && \
apt-get install -y \
g++ \
curl \
git \
file \
binutils
# install rustup
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y
# rustup directory
ENV PATH /root/.cargo/bin:$PATH
# show backtraces
ENV RUST_BACKTRACE 1
# show tools
RUN rustc -vV && \
cargo -V && \
gcc -v &&\
g++ -v
# build parity
RUN git clone https://github.com/ethcore/parity && \
cd parity && \
git checkout stable && \
git pull && \
cargo build --release --verbose && \
ls /build/parity/target/release/parity && \
strip /build/parity/target/release/parity
RUN file /build/parity/target/release/parity
EXPOSE 8080 8545 8180
ENTRYPOINT ["/build/parity/target/release/parity"]

View File

@ -9,4 +9,4 @@ authors = ["arkpar <arkadiy@ethcore.io"]
log = "0.3" log = "0.3"
sha3 = { path = "../util/sha3" } sha3 = { path = "../util/sha3" }
primal = "0.2.3" primal = "0.2.3"
parking_lot = "0.2.6" parking_lot = "0.3"

View File

@ -202,6 +202,9 @@ impl SeedHashCompute {
} }
} }
pub fn slow_get_seedhash(block_number: u64) -> H256 {
SeedHashCompute::resume_compute_seedhash([0u8; 32], 0, block_number / ETHASH_EPOCH_LENGTH)
}
#[inline] #[inline]
fn fnv_hash(x: u32, y: u32) -> u32 { fn fnv_hash(x: u32, y: u32) -> u32 {

View File

@ -26,7 +26,7 @@ mod compute;
use std::mem; use std::mem;
use compute::Light; use compute::Light;
pub use compute::{ETHASH_EPOCH_LENGTH, H256, ProofOfWork, SeedHashCompute, quick_get_difficulty}; pub use compute::{ETHASH_EPOCH_LENGTH, H256, ProofOfWork, SeedHashCompute, quick_get_difficulty, slow_get_seedhash};
use std::sync::Arc; use std::sync::Arc;
use parking_lot::Mutex; use parking_lot::Mutex;

View File

@ -37,6 +37,9 @@ ethkey = { path = "../ethkey" }
ethcore-ipc-nano = { path = "../ipc/nano" } ethcore-ipc-nano = { path = "../ipc/nano" }
rlp = { path = "../util/rlp" } rlp = { path = "../util/rlp" }
rand = "0.3" rand = "0.3"
lru-cache = { git = "https://github.com/contain-rs/lru-cache" }
ethcore-bloom-journal = { path = "../util/bloom" }
byteorder = "0.5"
[dependencies.hyper] [dependencies.hyper]
git = "https://github.com/ethcore/hyper" git = "https://github.com/ethcore/hyper"
@ -44,7 +47,9 @@ default-features = false
[features] [features]
jit = ["evmjit"] jit = ["evmjit"]
evm-debug = [] evm-debug = ["slow-blocks"]
evm-debug-tests = ["evm-debug"]
slow-blocks = [] # Use SLOW_TX_DURATION="50" (compile time!) to track transactions over 50ms
json-tests = [] json-tests = []
test-heavy = [] test-heavy = []
dev = ["clippy"] dev = ["clippy"]

View File

@ -18,7 +18,7 @@ extern crate ethcore_ipc_codegen;
fn main() { fn main() {
ethcore_ipc_codegen::derive_binary("src/types/mod.rs.in").unwrap(); ethcore_ipc_codegen::derive_binary("src/types/mod.rs.in").unwrap();
ethcore_ipc_codegen::derive_ipc("src/client/traits.rs").unwrap(); ethcore_ipc_codegen::derive_ipc_cond("src/client/traits.rs", cfg!(feature="ipc")).unwrap();
ethcore_ipc_codegen::derive_ipc("src/snapshot/snapshot_service_trait.rs").unwrap(); ethcore_ipc_codegen::derive_ipc_cond("src/snapshot/snapshot_service_trait.rs", cfg!(feature="ipc")).unwrap();
ethcore_ipc_codegen::derive_ipc("src/client/chain_notify.rs").unwrap(); ethcore_ipc_codegen::derive_ipc_cond("src/client/chain_notify.rs", cfg!(feature="ipc")).unwrap();
} }

View File

@ -10,7 +10,8 @@
"durationLimit": "0x0d", "durationLimit": "0x0d",
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"frontierCompatibilityModeLimit": "0x118c30" "homesteadTransition": "0x118c30",
"eip150Transition": "0x2625a0"
} }
} }
}, },

View File

@ -0,0 +1,43 @@
{
"name": "Homestead (Test)",
"engine": {
"Ethash": {
"params": {
"gasLimitBoundDivisor": "0x0400",
"minimumDifficulty": "0x020000",
"difficultyBoundDivisor": "0x0800",
"durationLimit": "0x0d",
"blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"homesteadTransition": "0x0",
"eip150Transition": "0x0"
}
}
},
"params": {
"accountStartNonce": "0x00",
"maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388",
"networkID" : "0x1"
},
"genesis": {
"seal": {
"ethereum": {
"nonce": "0x0000000000000042",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
},
"difficulty": "0x400000000",
"author": "0x0000000000000000000000000000000000000000",
"timestamp": "0x00",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"extraData": "0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa",
"gasLimit": "0x1388"
},
"accounts": {
"0000000000000000000000000000000000000001": { "balance": "1", "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
"0000000000000000000000000000000000000002": { "balance": "1", "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } },
"0000000000000000000000000000000000000003": { "balance": "1", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
"0000000000000000000000000000000000000004": { "balance": "1", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } }
}
}

View File

@ -0,0 +1,70 @@
{
"name": "Expanse",
"forkName": "expanse",
"engine": {
"Ethash": {
"params": {
"gasLimitBoundDivisor": "0x0400",
"minimumDifficulty": "0x020000",
"difficultyBoundDivisor": "0x0800",
"difficultyIncrementDivisor": "60",
"durationLimit": "0x3C",
"blockReward": "0x6f05b59d3b200000",
"registrar" : "0x6c221ca53705f3497ec90ca7b84c59ae7382fc21",
"homesteadTransition": "0x30d40",
"difficultyHardforkTransition": "0x59d9",
"difficultyHardforkBoundDivisor": "0x0200",
"bombDefuseTransition": "0x30d40",
"eip150Transition": "0x7fffffffffffffff"
}
}
},
"params": {
"accountStartNonce": "0x00",
"maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388",
"networkID": "0x1",
"subprotocolName": "exp"
},
"genesis": {
"seal": {
"ethereum": {
"nonce": "0x214652414e4b4f21",
"mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
}
},
"difficulty": "0x40000000",
"author": "0x93decab0cd745598860f782ac1e8f046cb99e898",
"timestamp": "0x00",
"parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000",
"extraData": "0x4672616e6b6f497346726565646f6d",
"gasLimit": "0x1388"
},
"nodes": [
"enode://7f335a047654f3e70d6f91312a7cf89c39704011f1a584e2698250db3d63817e74b88e26b7854111e16b2c9d0c7173c05419aeee2d0321850227b126d8b1be3f@46.101.156.249:42786",
"enode://df872f81e25f72356152b44cab662caf1f2e57c3a156ecd20e9ac9246272af68a2031b4239a0bc831f2c6ab34733a041464d46b3ea36dce88d6c11714446e06b@178.62.208.109:42786",
"enode://96d3919b903e7f5ad59ac2f73c43be9172d9d27e2771355db03fd194732b795829a31fe2ea6de109d0804786c39a807e155f065b4b94c6fce167becd0ac02383@45.55.22.34:42786",
"enode://5f6c625bf287e3c08aad568de42d868781e961cbda805c8397cfb7be97e229419bef9a5a25a75f97632787106bba8a7caf9060fab3887ad2cfbeb182ab0f433f@46.101.182.53:42786",
"enode://d33a8d4c2c38a08971ed975b750f21d54c927c0bf7415931e214465a8d01651ecffe4401e1db913f398383381413c78105656d665d83f385244ab302d6138414@128.199.183.48:42786",
"enode://df872f81e25f72356152b44cab662caf1f2e57c3a156ecd20e9ac9246272af68a2031b4239a0bc831f2c6ab34733a041464d46b3ea36dce88d6c11714446e06b@178.62.208.109:42786",
"enode://f6f0d6b9b7d02ec9e8e4a16e38675f3621ea5e69860c739a65c1597ca28aefb3cec7a6d84e471ac927d42a1b64c1cbdefad75e7ce8872d57548ddcece20afdd1@159.203.64.95:42786"
],
"accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
"0000000000000000000000000000000000000002": { "builtin": { "name": "sha256", "pricing": { "linear": { "base": 60, "word": 12 } } } },
"0000000000000000000000000000000000000003": { "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
"0000000000000000000000000000000000000004": { "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
"bb94f0ceb32257275b2a7a9c094c13e469b4563e": {
"balance": "10000000000000000000000000"
},
"15656715068ab0dbdf0ab00748a8a19e40f28192": {
"balance": "1000000000000000000000000"
},
"c075fa11f85bda3aaba67106226aaf086ac16f4e": {
"balance": "100000000000000000000000"
},
"93decab0cd745598860f782ac1e8f046cb99e898": {
"balance": "10000000000000000000000"
}
}
}

View File

@ -8,11 +8,11 @@
"difficultyBoundDivisor": "0x0800", "difficultyBoundDivisor": "0x0800",
"durationLimit": "0x0d", "durationLimit": "0x0d",
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0x3bb2bb5c6c9c9b7f4ef430b47dc7e026310042ea",
"frontierCompatibilityModeLimit": "0x118c30", "homesteadTransition": "0x118c30",
"daoHardforkTransition": "0x1d4c00", "daoHardforkTransition": "0x1d4c00",
"daoHardforkBeneficiary": "0xbf4ed7b27f1d666546e30d74d50d173d20bca754", "daoHardforkBeneficiary": "0xbf4ed7b27f1d666546e30d74d50d173d20bca754",
"daoHardforkAccounts": [ "daoHardforkAccounts": [
"0xd4fe7bc31cedb7bfb8a345f31e668033056b2728", "0xd4fe7bc31cedb7bfb8a345f31e668033056b2728",
"0xb3fb0e5aba0e20e5c49d252dfd30e102b171a425", "0xb3fb0e5aba0e20e5c49d252dfd30e102b171a425",
"0x2c19c7f9ae8b751e37aeb2d93a699722395ae18f", "0x2c19c7f9ae8b751e37aeb2d93a699722395ae18f",
@ -129,7 +129,8 @@
"0x7602b46df5390e432ef1c307d4f2c9ff6d65cc97", "0x7602b46df5390e432ef1c307d4f2c9ff6d65cc97",
"0xbb9bc244d798123fde783fcc1c72d3bb8c189413", "0xbb9bc244d798123fde783fcc1c72d3bb8c189413",
"0x807640a13483f8ac783c557fcdf27be11ea4ac7a" "0x807640a13483f8ac783c557fcdf27be11ea4ac7a"
] ],
"eip150Transition": "0x259518"
} }
} }
}, },
@ -157,10 +158,13 @@
"stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544" "stateRoot": "0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
}, },
"nodes": [ "nodes": [
"enode://cd6611461840543d5b9c56fbf088736154c699c43973b3a1a32390cf27106f87e58a818a606ccb05f3866de95a4fe860786fea71bf891ea95f234480d3022aa3@136.243.154.245:30303",
"enode://bcc7240543fe2cf86f5e9093d05753dd83343f8fda7bf0e833f65985c73afccf8f981301e13ef49c4804491eab043647374df1c4adf85766af88a624ecc3330e@136.243.154.244:30303",
"enode://ed4227681ca8c70beb2277b9e870353a9693f12e7c548c35df6bca6a956934d6f659999c2decb31f75ce217822eefca149ace914f1cbe461ed5a2ebaf9501455@88.212.206.70:30303",
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303", "enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303", "enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",
"enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303", "enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303",
"enode://248f12bc8b18d5289358085520ac78cd8076485211e6d96ab0bc93d6cd25442db0ce3a937dc404f64f207b0b9aed50e25e98ce32af5ac7cb321ff285b97de485@zero.parity.io:30303" "enode://4cd540b2c3292e17cff39922e864094bf8b0741fcc8c5dcea14957e389d7944c70278d872902e3d0345927f621547efa659013c400865485ab4bfa0c6596936f@138.201.144.135:30303"
], ],
"accounts": { "accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } }, "0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },

View File

@ -9,7 +9,7 @@
"durationLimit": "0x0d", "durationLimit": "0x0d",
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"frontierCompatibilityModeLimit": "0x118c30", "homesteadTransition": "0x118c30",
"daoHardforkTransition": "0x1d4c00", "daoHardforkTransition": "0x1d4c00",
"daoHardforkBeneficiary": "0xbf4ed7b27f1d666546e30d74d50d173d20bca754", "daoHardforkBeneficiary": "0xbf4ed7b27f1d666546e30d74d50d173d20bca754",
"daoHardforkAccounts": [ "daoHardforkAccounts": [
@ -129,7 +129,8 @@
"0x7602b46df5390e432ef1c307d4f2c9ff6d65cc97", "0x7602b46df5390e432ef1c307d4f2c9ff6d65cc97",
"0xbb9bc244d798123fde783fcc1c72d3bb8c189413", "0xbb9bc244d798123fde783fcc1c72d3bb8c189413",
"0x807640a13483f8ac783c557fcdf27be11ea4ac7a" "0x807640a13483f8ac783c557fcdf27be11ea4ac7a"
] ],
"eip150Transition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -9,7 +9,8 @@
"durationLimit": "0x0d", "durationLimit": "0x0d",
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"frontierCompatibilityModeLimit": "0xffffffffffffffff" "homesteadTransition": "0x7fffffffffffffff",
"eip150Transition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -9,7 +9,8 @@
"durationLimit": "0x0d", "durationLimit": "0x0d",
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"frontierCompatibilityModeLimit": "0x0" "homesteadTransition": "0x0",
"eip150Transition": "0x7fffffffffffffff"
} }
} }
}, },

View File

@ -8,8 +8,9 @@
"difficultyBoundDivisor": "0x0800", "difficultyBoundDivisor": "0x0800",
"durationLimit": "0x0d", "durationLimit": "0x0d",
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar": "0x8e4e9b13d4b45cb0befc93c3061b1408f67316b2", "registrar": "0x52dff57a8a1532e6afb3dc07e2af58bb9eb05b3d",
"frontierCompatibilityModeLimit": "0x789b0" "homesteadTransition": "0x789b0",
"eip150Transition": "0x1b34d8"
} }
} }
}, },
@ -17,7 +18,9 @@
"accountStartNonce": "0x0100000", "accountStartNonce": "0x0100000",
"maximumExtraDataSize": "0x20", "maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388", "minGasLimit": "0x1388",
"networkID" : "0x2" "networkID" : "0x2",
"forkBlock": "0x1b34d8",
"forkCanonHash": "0xf376243aeff1f256d970714c3de9fd78fa4e63cf63e32a51fe1169e375d98145"
}, },
"genesis": { "genesis": {
"seal": { "seal": {

View File

@ -8,7 +8,9 @@
"difficultyBoundDivisor": "0x0800", "difficultyBoundDivisor": "0x0800",
"durationLimit": "0x08", "durationLimit": "0x08",
"blockReward": "0x14D1120D7B160000", "blockReward": "0x14D1120D7B160000",
"registrar": "5e70c0bbcd5636e0f9f9316e9f8633feb64d4050" "registrar": "5e70c0bbcd5636e0f9f9316e9f8633feb64d4050",
"homesteadTransition": "0x7fffffffffffffff",
"eip150Transition": "0x7fffffffffffffff"
} }
} }
}, },

@ -1 +1 @@
Subproject commit ac5475d676536cb945f98e9ff98384c01abd0599 Subproject commit 97066e40ccd061f727deb5cd860e4d9135aa2551

View File

@ -1,5 +1,5 @@
{ {
"name": "DAO hard-fork consensus test", "name": "EIP150.1b hard-fork consensus test",
"engine": { "engine": {
"Ethash": { "Ethash": {
"params": { "params": {
@ -9,7 +9,7 @@
"durationLimit": "0x0d", "durationLimit": "0x0d",
"blockReward": "0x4563918244F40000", "blockReward": "0x4563918244F40000",
"registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b", "registrar" : "0xc6d9d2cd449a754c494264e1809c50e34d64562b",
"frontierCompatibilityModeLimit": "0x5", "homesteadTransition": "0x5",
"daoHardforkTransition": "0x8", "daoHardforkTransition": "0x8",
"daoHardforkBeneficiary": "0xbf4ed7b27f1d666546e30d74d50d173d20bca754", "daoHardforkBeneficiary": "0xbf4ed7b27f1d666546e30d74d50d173d20bca754",
"daoHardforkAccounts": [ "daoHardforkAccounts": [
@ -129,7 +129,8 @@
"0x7602b46df5390e432ef1c307d4f2c9ff6d65cc97", "0x7602b46df5390e432ef1c307d4f2c9ff6d65cc97",
"0xbb9bc244d798123fde783fcc1c72d3bb8c189413", "0xbb9bc244d798123fde783fcc1c72d3bb8c189413",
"0x807640a13483f8ac783c557fcdf27be11ea4ac7a" "0x807640a13483f8ac783c557fcdf27be11ea4ac7a"
] ],
"eip150Transition": "0xa"
} }
} }
}, },

View File

@ -96,9 +96,9 @@ impl<'db> HashDB for AccountDB<'db>{
unimplemented!() unimplemented!()
} }
fn get(&self, key: &H256) -> Option<&[u8]> { fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP { if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC); return Some(DBValue::from_slice(&NULL_RLP_STATIC));
} }
self.db.get(&combine_key(&self.address_hash, key)) self.db.get(&combine_key(&self.address_hash, key))
} }
@ -114,13 +114,17 @@ impl<'db> HashDB for AccountDB<'db>{
unimplemented!() unimplemented!()
} }
fn emplace(&mut self, _key: H256, _value: Bytes) { fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!() unimplemented!()
} }
fn remove(&mut self, _key: &H256) { fn remove(&mut self, _key: &H256) {
unimplemented!() unimplemented!()
} }
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
self.db.get_aux(hash)
}
} }
/// DB backend wrapper for Account trie /// DB backend wrapper for Account trie
@ -154,9 +158,9 @@ impl<'db> HashDB for AccountDBMut<'db>{
unimplemented!() unimplemented!()
} }
fn get(&self, key: &H256) -> Option<&[u8]> { fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP { if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC); return Some(DBValue::from_slice(&NULL_RLP_STATIC));
} }
self.db.get(&combine_key(&self.address_hash, key)) self.db.get(&combine_key(&self.address_hash, key))
} }
@ -174,16 +178,16 @@ impl<'db> HashDB for AccountDBMut<'db>{
} }
let k = value.sha3(); let k = value.sha3();
let ak = combine_key(&self.address_hash, &k); let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, value.to_vec()); self.db.emplace(ak, DBValue::from_slice(value));
k k
} }
fn emplace(&mut self, key: H256, value: Bytes) { fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP { if key == SHA3_NULL_RLP {
return; return;
} }
let key = combine_key(&self.address_hash, &key); let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value.to_vec()) self.db.emplace(key, value)
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256) {
@ -193,6 +197,18 @@ impl<'db> HashDB for AccountDBMut<'db>{
let key = combine_key(&self.address_hash, key); let key = combine_key(&self.address_hash, key);
self.db.remove(&key) self.db.remove(&key)
} }
fn insert_aux(&mut self, hash: Vec<u8>, value: Vec<u8>) {
self.db.insert_aux(hash, value);
}
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
self.db.get_aux(hash)
}
fn remove_aux(&mut self, hash: &[u8]) {
self.db.remove_aux(hash);
}
} }
struct Wrapping<'db>(&'db HashDB); struct Wrapping<'db>(&'db HashDB);
@ -202,9 +218,9 @@ impl<'db> HashDB for Wrapping<'db> {
unimplemented!() unimplemented!()
} }
fn get(&self, key: &H256) -> Option<&[u8]> { fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP { if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC); return Some(DBValue::from_slice(&NULL_RLP_STATIC));
} }
self.0.get(key) self.0.get(key)
} }
@ -220,7 +236,7 @@ impl<'db> HashDB for Wrapping<'db> {
unimplemented!() unimplemented!()
} }
fn emplace(&mut self, _key: H256, _value: Bytes) { fn emplace(&mut self, _key: H256, _value: DBValue) {
unimplemented!() unimplemented!()
} }
@ -236,9 +252,9 @@ impl<'db> HashDB for WrappingMut<'db>{
unimplemented!() unimplemented!()
} }
fn get(&self, key: &H256) -> Option<&[u8]> { fn get(&self, key: &H256) -> Option<DBValue> {
if key == &SHA3_NULL_RLP { if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC); return Some(DBValue::from_slice(&NULL_RLP_STATIC));
} }
self.0.get(key) self.0.get(key)
} }
@ -257,7 +273,7 @@ impl<'db> HashDB for WrappingMut<'db>{
self.0.insert(value) self.0.insert(value)
} }
fn emplace(&mut self, key: H256, value: Bytes) { fn emplace(&mut self, key: H256, value: DBValue) {
if key == SHA3_NULL_RLP { if key == SHA3_NULL_RLP {
return; return;
} }
@ -270,4 +286,4 @@ impl<'db> HashDB for WrappingMut<'db>{
} }
self.0.remove(key) self.0.remove(key)
} }
} }

View File

@ -23,7 +23,7 @@ use std::time::{Instant, Duration};
use util::{Mutex, RwLock}; use util::{Mutex, RwLock};
use ethstore::{SecretStore, Error as SSError, SafeAccount, EthStore}; use ethstore::{SecretStore, Error as SSError, SafeAccount, EthStore};
use ethstore::dir::{KeyDirectory}; use ethstore::dir::{KeyDirectory};
use ethstore::ethkey::{Address, Message, Secret, Random, Generator}; use ethstore::ethkey::{Address, Message, Public, Secret, Random, Generator};
use ethjson::misc::AccountMeta; use ethjson::misc::AccountMeta;
pub use ethstore::ethkey::Signature; pub use ethstore::ethkey::Signature;
@ -176,15 +176,23 @@ impl AccountProvider {
AccountProvider { AccountProvider {
unlocked: Mutex::new(HashMap::new()), unlocked: Mutex::new(HashMap::new()),
address_book: Mutex::new(AddressBook::new(Default::default())), address_book: Mutex::new(AddressBook::new(Default::default())),
sstore: Box::new(EthStore::open(Box::new(NullDir::default())).unwrap()) sstore: Box::new(EthStore::open(Box::new(NullDir::default()))
.expect("NullDir load always succeeds; qed"))
} }
} }
/// Creates new random account. /// Creates new random account.
pub fn new_account(&self, password: &str) -> Result<Address, Error> { pub fn new_account(&self, password: &str) -> Result<Address, Error> {
let secret = Random.generate().unwrap().secret().clone(); self.new_account_and_public(password).map(|d| d.0)
}
/// Creates new random account and returns address and public key
pub fn new_account_and_public(&self, password: &str) -> Result<(Address, Public), Error> {
let acc = Random.generate().expect("secp context has generation capabilities; qed");
let public = acc.public().clone();
let secret = acc.secret().clone();
let address = try!(self.sstore.insert_account(secret, password)); let address = try!(self.sstore.insert_account(secret, password));
Ok(address) Ok((address, public))
} }
/// Inserts new account into underlying store. /// Inserts new account into underlying store.
@ -257,6 +265,20 @@ impl AccountProvider {
Ok(()) Ok(())
} }
/// Returns `true` if the password for `account` is `password`. `false` if not.
pub fn test_password(&self, account: &Address, password: String) -> Result<bool, Error> {
match self.sstore.sign(&account, &password, &Default::default()) {
Ok(_) => Ok(true),
Err(SSError::InvalidPassword) => Ok(false),
Err(e) => Err(Error::SStore(e)),
}
}
/// Changes the password of `account` from `password` to `new_password`. Fails if incorrect `password` given.
pub fn change_password(&self, account: &Address, password: String, new_password: String) -> Result<(), Error> {
self.sstore.change_password(&account, &password, &new_password).map_err(Error::SStore)
}
/// Helper method used for unlocking accounts. /// Helper method used for unlocking accounts.
fn unlock_account(&self, account: Address, password: String, unlock: Unlock) -> Result<(), Error> { fn unlock_account(&self, account: Address, password: String, unlock: Unlock) -> Result<(), Error> {
// verify password by signing dump message // verify password by signing dump message
@ -280,6 +302,21 @@ impl AccountProvider {
Ok(()) Ok(())
} }
fn password(&self, account: &Address) -> Result<String, Error> {
let mut unlocked = self.unlocked.lock();
let data = try!(unlocked.get(account).ok_or(Error::NotUnlocked)).clone();
if let Unlock::Temp = data.unlock {
unlocked.remove(account).expect("data exists: so key must exist: qed");
}
if let Unlock::Timed((ref start, ref duration)) = data.unlock {
if start.elapsed() > Duration::from_millis(*duration as u64) {
unlocked.remove(account).expect("data exists: so key must exist: qed");
return Err(Error::NotUnlocked);
}
}
Ok(data.password.clone())
}
/// Unlocks account permanently. /// Unlocks account permanently.
pub fn unlock_account_permanently(&self, account: Address, password: String) -> Result<(), Error> { pub fn unlock_account_permanently(&self, account: Address, password: String) -> Result<(), Error> {
self.unlock_account(account, password, Unlock::Perm) self.unlock_account(account, password, Unlock::Perm)
@ -301,51 +338,16 @@ impl AccountProvider {
unlocked.get(&account).is_some() unlocked.get(&account).is_some()
} }
/// Signs the message. Account must be unlocked. /// Signs the message. If password is not provided the account must be unlocked.
pub fn sign(&self, account: Address, message: Message) -> Result<Signature, Error> { pub fn sign(&self, account: Address, password: Option<String>, message: Message) -> Result<Signature, Error> {
let data = { let password = try!(password.map(Ok).unwrap_or_else(|| self.password(&account)));
let mut unlocked = self.unlocked.lock(); Ok(try!(self.sstore.sign(&account, &password, &message)))
let data = try!(unlocked.get(&account).ok_or(Error::NotUnlocked)).clone();
if let Unlock::Temp = data.unlock {
unlocked.remove(&account).expect("data exists: so key must exist: qed");
}
if let Unlock::Timed((ref start, ref duration)) = data.unlock {
if start.elapsed() > Duration::from_millis(*duration as u64) {
unlocked.remove(&account).expect("data exists: so key must exist: qed");
return Err(Error::NotUnlocked);
}
}
data
};
let signature = try!(self.sstore.sign(&account, &data.password, &message));
Ok(signature)
} }
/// Decrypts a message. Account must be unlocked. /// Decrypts a message. If password is not provided the account must be unlocked.
pub fn decrypt(&self, account: Address, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> { pub fn decrypt(&self, account: Address, password: Option<String>, shared_mac: &[u8], message: &[u8]) -> Result<Vec<u8>, Error> {
let data = { let password = try!(password.map(Ok).unwrap_or_else(|| self.password(&account)));
let mut unlocked = self.unlocked.lock(); Ok(try!(self.sstore.decrypt(&account, &password, shared_mac, message)))
let data = try!(unlocked.get(&account).ok_or(Error::NotUnlocked)).clone();
if let Unlock::Temp = data.unlock {
unlocked.remove(&account).expect("data exists: so key must exist: qed");
}
if let Unlock::Timed((ref start, ref duration)) = data.unlock {
if start.elapsed() > Duration::from_millis(*duration as u64) {
unlocked.remove(&account).expect("data exists: so key must exist: qed");
return Err(Error::NotUnlocked);
}
}
data
};
Ok(try!(self.sstore.decrypt(&account, &data.password, shared_mac, message)))
}
/// Unlocks an account, signs the message, and locks it again.
pub fn sign_with_password(&self, account: Address, password: String, message: Message) -> Result<Signature, Error> {
let signature = try!(self.sstore.sign(&account, &password, &message));
Ok(signature)
} }
/// Returns the underlying `SecretStore` reference if one exists. /// Returns the underlying `SecretStore` reference if one exists.
@ -386,8 +388,8 @@ mod tests {
assert!(ap.insert_account(kp.secret().clone(), "test").is_ok()); assert!(ap.insert_account(kp.secret().clone(), "test").is_ok());
assert!(ap.unlock_account_temporarily(kp.address(), "test1".into()).is_err()); assert!(ap.unlock_account_temporarily(kp.address(), "test1".into()).is_err());
assert!(ap.unlock_account_temporarily(kp.address(), "test".into()).is_ok()); assert!(ap.unlock_account_temporarily(kp.address(), "test".into()).is_ok());
assert!(ap.sign(kp.address(), Default::default()).is_ok()); assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
assert!(ap.sign(kp.address(), Default::default()).is_err()); assert!(ap.sign(kp.address(), None, Default::default()).is_err());
} }
#[test] #[test]
@ -397,11 +399,11 @@ mod tests {
assert!(ap.insert_account(kp.secret().clone(), "test").is_ok()); assert!(ap.insert_account(kp.secret().clone(), "test").is_ok());
assert!(ap.unlock_account_permanently(kp.address(), "test1".into()).is_err()); assert!(ap.unlock_account_permanently(kp.address(), "test1".into()).is_err());
assert!(ap.unlock_account_permanently(kp.address(), "test".into()).is_ok()); assert!(ap.unlock_account_permanently(kp.address(), "test".into()).is_ok());
assert!(ap.sign(kp.address(), Default::default()).is_ok()); assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
assert!(ap.sign(kp.address(), Default::default()).is_ok()); assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
assert!(ap.unlock_account_temporarily(kp.address(), "test".into()).is_ok()); assert!(ap.unlock_account_temporarily(kp.address(), "test".into()).is_ok());
assert!(ap.sign(kp.address(), Default::default()).is_ok()); assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
assert!(ap.sign(kp.address(), Default::default()).is_ok()); assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
} }
#[test] #[test]
@ -411,8 +413,8 @@ mod tests {
assert!(ap.insert_account(kp.secret().clone(), "test").is_ok()); assert!(ap.insert_account(kp.secret().clone(), "test").is_ok());
assert!(ap.unlock_account_timed(kp.address(), "test1".into(), 2000).is_err()); assert!(ap.unlock_account_timed(kp.address(), "test1".into(), 2000).is_err());
assert!(ap.unlock_account_timed(kp.address(), "test".into(), 2000).is_ok()); assert!(ap.unlock_account_timed(kp.address(), "test".into(), 2000).is_ok());
assert!(ap.sign(kp.address(), Default::default()).is_ok()); assert!(ap.sign(kp.address(), None, Default::default()).is_ok());
::std::thread::sleep(Duration::from_millis(2000)); ::std::thread::sleep(Duration::from_millis(2000));
assert!(ap.sign(kp.address(), Default::default()).is_err()); assert!(ap.sign(kp.address(), None, Default::default()).is_err());
} }
} }

View File

@ -15,10 +15,14 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Evm input params. //! Evm input params.
use common::*; use util::{Address, Bytes, Uint, U256};
use util::hash::{H256, FixedHash};
use util::sha3::{Hashable, SHA3_EMPTY};
use ethjson; use ethjson;
use types::executed::CallType; use types::executed::CallType;
use std::sync::Arc;
/// Transaction value /// Transaction value
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub enum ActionValue { pub enum ActionValue {
@ -43,6 +47,8 @@ impl ActionValue {
pub struct ActionParams { pub struct ActionParams {
/// Address of currently executed code. /// Address of currently executed code.
pub code_address: Address, pub code_address: Address,
/// Hash of currently executed code.
pub code_hash: H256,
/// Receive address. Usually equal to code_address, /// Receive address. Usually equal to code_address,
/// except when called using CALLCODE. /// except when called using CALLCODE.
pub address: Address, pub address: Address,
@ -57,7 +63,7 @@ pub struct ActionParams {
/// Transaction value. /// Transaction value.
pub value: ActionValue, pub value: ActionValue,
/// Code being executed. /// Code being executed.
pub code: Option<Bytes>, pub code: Option<Arc<Bytes>>,
/// Input data. /// Input data.
pub data: Option<Bytes>, pub data: Option<Bytes>,
/// Type of call /// Type of call
@ -70,6 +76,7 @@ impl Default for ActionParams {
fn default() -> ActionParams { fn default() -> ActionParams {
ActionParams { ActionParams {
code_address: Address::new(), code_address: Address::new(),
code_hash: SHA3_EMPTY,
address: Address::new(), address: Address::new(),
sender: Address::new(), sender: Address::new(),
origin: Address::new(), origin: Address::new(),
@ -88,10 +95,11 @@ impl From<ethjson::vm::Transaction> for ActionParams {
let address: Address = t.address.into(); let address: Address = t.address.into();
ActionParams { ActionParams {
code_address: Address::new(), code_address: Address::new(),
code_hash: (&*t.code).sha3(),
address: address, address: address,
sender: t.sender.into(), sender: t.sender.into(),
origin: t.origin.into(), origin: t.origin.into(),
code: Some(t.code.into()), code: Some(Arc::new(t.code.into())),
data: Some(t.data.into()), data: Some(t.data.into()),
gas: t.gas.into(), gas: t.gas.into(),
gas_price: t.gas_price.into(), gas_price: t.gas_price.into(),

View File

@ -16,7 +16,7 @@
//! Ethcore basic typenames. //! Ethcore basic typenames.
use util::*; use util::hash::H2048;
/// Type for a 2048-bit log-bloom, as used by our blocks. /// Type for a 2048-bit log-bloom, as used by our blocks.
pub type LogBloom = H2048; pub type LogBloom = H2048;

View File

@ -16,13 +16,26 @@
//! Blockchain block. //! Blockchain block.
use common::*; use std::sync::Arc;
use std::collections::HashSet;
use rlp::{UntrustedRlp, RlpStream, Encodable, Decodable, Decoder, DecoderError, View, Stream};
use util::{Bytes, Address, Uint, FixedHash, Hashable, U256, H256, ordered_trie_root, SHA3_NULL_RLP};
use util::error::{Mismatch, OutOfBounds};
use basic_types::{LogBloom, Seal};
use env_info::{EnvInfo, LastHashes};
use engines::Engine; use engines::Engine;
use state::*; use error::{Error, BlockError, TransactionError};
use verification::PreverifiedBlock;
use trace::FlatTrace;
use factory::Factories; use factory::Factories;
use rlp::*; use header::Header;
use receipt::Receipt;
use state::State;
use state_db::StateDB;
use trace::FlatTrace;
use transaction::SignedTransaction;
use verification::PreverifiedBlock;
use views::BlockView;
/// A block, encoded as it is on the block chain. /// A block, encoded as it is on the block chain.
#[derive(Default, Debug, Clone, PartialEq)] #[derive(Default, Debug, Clone, PartialEq)]
@ -69,7 +82,7 @@ impl Decodable for Block {
} }
} }
/// Internal type for a block's common elements. /// An internal type for a block's common elements.
#[derive(Clone)] #[derive(Clone)]
pub struct ExecutedBlock { pub struct ExecutedBlock {
base: Block, base: Block,
@ -179,7 +192,7 @@ pub trait IsBlock {
/// Trait for a object that has a state database. /// Trait for a object that has a state database.
pub trait Drain { pub trait Drain {
/// Drop this object and return the underlieing database. /// Drop this object and return the underlieing database.
fn drain(self) -> Box<JournalDB>; fn drain(self) -> StateDB;
} }
impl IsBlock for ExecutedBlock { impl IsBlock for ExecutedBlock {
@ -205,6 +218,7 @@ pub struct ClosedBlock {
block: ExecutedBlock, block: ExecutedBlock,
uncle_bytes: Bytes, uncle_bytes: Bytes,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
unclosed_state: State,
} }
/// Just like `ClosedBlock` except that we can't reopen it and it's faster. /// Just like `ClosedBlock` except that we can't reopen it and it's faster.
@ -231,7 +245,7 @@ impl<'x> OpenBlock<'x> {
engine: &'x Engine, engine: &'x Engine,
factories: Factories, factories: Factories,
tracing: bool, tracing: bool,
db: Box<JournalDB>, db: StateDB,
parent: &Header, parent: &Header,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
author: Address, author: Address,
@ -336,7 +350,7 @@ impl<'x> OpenBlock<'x> {
let t = outcome.trace; let t = outcome.trace;
self.block.traces.as_mut().map(|traces| traces.push(t)); self.block.traces.as_mut().map(|traces| traces.push(t));
self.block.receipts.push(outcome.receipt); self.block.receipts.push(outcome.receipt);
Ok(self.block.receipts.last().unwrap()) Ok(self.block.receipts.last().expect("receipt just pushed; qed"))
} }
Err(x) => Err(From::from(x)) Err(x) => Err(From::from(x))
} }
@ -346,8 +360,7 @@ impl<'x> OpenBlock<'x> {
pub fn close(self) -> ClosedBlock { pub fn close(self) -> ClosedBlock {
let mut s = self; let mut s = self;
// take a snapshot so the engine's changes can be rolled back. let unclosed_state = s.block.state.clone();
s.block.state.snapshot();
s.engine.on_close_block(&mut s.block); s.engine.on_close_block(&mut s.block);
s.block.base.header.set_transactions_root(ordered_trie_root(s.block.base.transactions.iter().map(|e| e.rlp_bytes().to_vec()))); s.block.base.header.set_transactions_root(ordered_trie_root(s.block.base.transactions.iter().map(|e| e.rlp_bytes().to_vec())));
@ -362,6 +375,7 @@ impl<'x> OpenBlock<'x> {
block: s.block, block: s.block,
uncle_bytes: uncle_bytes, uncle_bytes: uncle_bytes,
last_hashes: s.last_hashes, last_hashes: s.last_hashes,
unclosed_state: unclosed_state,
} }
} }
@ -369,9 +383,6 @@ impl<'x> OpenBlock<'x> {
pub fn close_and_lock(self) -> LockedBlock { pub fn close_and_lock(self) -> LockedBlock {
let mut s = self; let mut s = self;
// take a snapshot so the engine's changes can be rolled back.
s.block.state.snapshot();
s.engine.on_close_block(&mut s.block); s.engine.on_close_block(&mut s.block);
if s.block.base.header.transactions_root().is_zero() || s.block.base.header.transactions_root() == &SHA3_NULL_RLP { if s.block.base.header.transactions_root().is_zero() || s.block.base.header.transactions_root() == &SHA3_NULL_RLP {
s.block.base.header.set_transactions_root(ordered_trie_root(s.block.base.transactions.iter().map(|e| e.rlp_bytes().to_vec()))); s.block.base.header.set_transactions_root(ordered_trie_root(s.block.base.transactions.iter().map(|e| e.rlp_bytes().to_vec())));
@ -388,12 +399,15 @@ impl<'x> OpenBlock<'x> {
s.block.base.header.set_log_bloom(s.block.receipts.iter().fold(LogBloom::zero(), |mut b, r| {b = &b | &r.log_bloom; b})); //TODO: use |= operator s.block.base.header.set_log_bloom(s.block.receipts.iter().fold(LogBloom::zero(), |mut b, r| {b = &b | &r.log_bloom; b})); //TODO: use |= operator
s.block.base.header.set_gas_used(s.block.receipts.last().map_or(U256::zero(), |r| r.gas_used)); s.block.base.header.set_gas_used(s.block.receipts.last().map_or(U256::zero(), |r| r.gas_used));
ClosedBlock { LockedBlock {
block: s.block, block: s.block,
uncle_bytes: uncle_bytes, uncle_bytes: uncle_bytes,
last_hashes: s.last_hashes, }
}.lock()
} }
#[cfg(test)]
/// Return mutable block reference. To be used in tests only.
pub fn block_mut (&mut self) -> &mut ExecutedBlock { &mut self.block }
} }
impl<'x> IsBlock for OpenBlock<'x> { impl<'x> IsBlock for OpenBlock<'x> {
@ -413,17 +427,7 @@ impl ClosedBlock {
pub fn hash(&self) -> H256 { self.header().rlp_sha3(Seal::Without) } pub fn hash(&self) -> H256 { self.header().rlp_sha3(Seal::Without) }
/// Turn this into a `LockedBlock`, unable to be reopened again. /// Turn this into a `LockedBlock`, unable to be reopened again.
pub fn lock(mut self) -> LockedBlock { pub fn lock(self) -> LockedBlock {
// finalize the changes made by the engine.
self.block.state.clear_snapshot();
if let Err(e) = self.block.state.commit() {
warn!("Error committing closed block's state: {:?}", e);
}
// set the state root here, after commit recalculates with the block
// rewards.
self.block.base.header.set_state_root(self.block.state.root().clone());
LockedBlock { LockedBlock {
block: self.block, block: self.block,
uncle_bytes: self.uncle_bytes, uncle_bytes: self.uncle_bytes,
@ -431,12 +435,12 @@ impl ClosedBlock {
} }
/// Given an engine reference, reopen the `ClosedBlock` into an `OpenBlock`. /// Given an engine reference, reopen the `ClosedBlock` into an `OpenBlock`.
pub fn reopen(mut self, engine: &Engine) -> OpenBlock { pub fn reopen(self, engine: &Engine) -> OpenBlock {
// revert rewards (i.e. set state back at last transaction's state). // revert rewards (i.e. set state back at last transaction's state).
self.block.state.revert_snapshot(); let mut block = self.block;
block.state = self.unclosed_state;
OpenBlock { OpenBlock {
block: self.block, block: block,
engine: engine, engine: engine,
last_hashes: self.last_hashes, last_hashes: self.last_hashes,
} }
@ -462,11 +466,11 @@ impl LockedBlock {
/// Provide a valid seal in order to turn this into a `SealedBlock`. /// Provide a valid seal in order to turn this into a `SealedBlock`.
/// This does check the validity of `seal` with the engine. /// This does check the validity of `seal` with the engine.
/// Returns the `ClosedBlock` back again if the seal is no good. /// Returns the `ClosedBlock` back again if the seal is no good.
pub fn try_seal(self, engine: &Engine, seal: Vec<Bytes>) -> Result<SealedBlock, LockedBlock> { pub fn try_seal(self, engine: &Engine, seal: Vec<Bytes>) -> Result<SealedBlock, (Error, LockedBlock)> {
let mut s = self; let mut s = self;
s.block.base.header.set_seal(seal); s.block.base.header.set_seal(seal);
match engine.verify_block_seal(&s.block.base.header) { match engine.verify_block_seal(&s.block.base.header) {
Err(_) => Err(s), Err(e) => Err((e, s)),
_ => Ok(SealedBlock { block: s.block, uncle_bytes: s.uncle_bytes }), _ => Ok(SealedBlock { block: s.block, uncle_bytes: s.uncle_bytes }),
} }
} }
@ -474,7 +478,9 @@ impl LockedBlock {
impl Drain for LockedBlock { impl Drain for LockedBlock {
/// Drop this object and return the underlieing database. /// Drop this object and return the underlieing database.
fn drain(self) -> Box<JournalDB> { self.block.state.drop().1 } fn drain(self) -> StateDB {
self.block.state.drop().1
}
} }
impl SealedBlock { impl SealedBlock {
@ -490,7 +496,9 @@ impl SealedBlock {
impl Drain for SealedBlock { impl Drain for SealedBlock {
/// Drop this object and return the underlieing database. /// Drop this object and return the underlieing database.
fn drain(self) -> Box<JournalDB> { self.block.state.drop().1 } fn drain(self) -> StateDB {
self.block.state.drop().1
}
} }
impl IsBlock for SealedBlock { impl IsBlock for SealedBlock {
@ -505,7 +513,7 @@ pub fn enact(
uncles: &[Header], uncles: &[Header],
engine: &Engine, engine: &Engine,
tracing: bool, tracing: bool,
db: Box<JournalDB>, db: StateDB,
parent: &Header, parent: &Header,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
factories: Factories, factories: Factories,
@ -526,25 +534,38 @@ pub fn enact(
b.set_uncles_hash(header.uncles_hash().clone()); b.set_uncles_hash(header.uncles_hash().clone());
b.set_transactions_root(header.transactions_root().clone()); b.set_transactions_root(header.transactions_root().clone());
b.set_receipts_root(header.receipts_root().clone()); b.set_receipts_root(header.receipts_root().clone());
for t in transactions { try!(b.push_transaction(t.clone(), None)); }
for u in uncles { try!(b.push_uncle(u.clone())); } try!(push_transactions(&mut b, transactions));
for u in uncles {
try!(b.push_uncle(u.clone()));
}
Ok(b.close_and_lock()) Ok(b.close_and_lock())
} }
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header #[inline(always)]
#[cfg_attr(feature="dev", allow(too_many_arguments))] #[cfg(not(feature = "slow-blocks"))]
pub fn enact_bytes( fn push_transactions(block: &mut OpenBlock, transactions: &[SignedTransaction]) -> Result<(), Error> {
block_bytes: &[u8], for t in transactions {
engine: &Engine, try!(block.push_transaction(t.clone(), None));
tracing: bool, }
db: Box<JournalDB>, Ok(())
parent: &Header, }
last_hashes: Arc<LastHashes>,
factories: Factories, #[cfg(feature = "slow-blocks")]
) -> Result<LockedBlock, Error> { fn push_transactions(block: &mut OpenBlock, transactions: &[SignedTransaction]) -> Result<(), Error> {
let block = BlockView::new(block_bytes); use std::time;
let header = block.header();
enact(&header, &block.transactions(), &block.uncles(), engine, tracing, db, parent, last_hashes, factories) let slow_tx = option_env!("SLOW_TX_DURATION").and_then(|v| v.parse().ok()).unwrap_or(100);
for t in transactions {
let hash = t.hash();
let start = time::Instant::now();
try!(block.push_transaction(t.clone(), None));
let took = start.elapsed();
if took > time::Duration::from_millis(slow_tx) {
warn!("Heavy transaction in block {:?}: {:?}", block.header().number(), hash);
}
}
Ok(())
} }
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header /// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
@ -553,7 +574,7 @@ pub fn enact_verified(
block: &PreverifiedBlock, block: &PreverifiedBlock,
engine: &Engine, engine: &Engine,
tracing: bool, tracing: bool,
db: Box<JournalDB>, db: StateDB,
parent: &Header, parent: &Header,
last_hashes: Arc<LastHashes>, last_hashes: Arc<LastHashes>,
factories: Factories, factories: Factories,
@ -562,35 +583,61 @@ pub fn enact_verified(
enact(&block.header, &block.transactions, &view.uncles(), engine, tracing, db, parent, last_hashes, factories) enact(&block.header, &block.transactions, &view.uncles(), engine, tracing, db, parent, last_hashes, factories)
} }
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards
#[cfg_attr(feature="dev", allow(too_many_arguments))]
pub fn enact_and_seal(
block_bytes: &[u8],
engine: &Engine,
tracing: bool,
db: Box<JournalDB>,
parent: &Header,
last_hashes: Arc<LastHashes>,
factories: Factories,
) -> Result<SealedBlock, Error> {
let header = BlockView::new(block_bytes).header_view();
Ok(try!(try!(enact_bytes(block_bytes, engine, tracing, db, parent, last_hashes, factories)).seal(engine, header.seal())))
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use tests::helpers::*; use tests::helpers::*;
use super::*; use super::*;
use common::*; use engines::Engine;
use env_info::LastHashes;
use error::Error;
use header::Header;
use factory::Factories;
use state_db::StateDB;
use views::BlockView;
use util::Address;
use util::hash::FixedHash;
use std::sync::Arc;
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
#[cfg_attr(feature="dev", allow(too_many_arguments))]
fn enact_bytes(
block_bytes: &[u8],
engine: &Engine,
tracing: bool,
db: StateDB,
parent: &Header,
last_hashes: Arc<LastHashes>,
factories: Factories,
) -> Result<LockedBlock, Error> {
let block = BlockView::new(block_bytes);
let header = block.header();
enact(&header, &block.transactions(), &block.uncles(), engine, tracing, db, parent, last_hashes, factories)
}
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards
#[cfg_attr(feature="dev", allow(too_many_arguments))]
fn enact_and_seal(
block_bytes: &[u8],
engine: &Engine,
tracing: bool,
db: StateDB,
parent: &Header,
last_hashes: Arc<LastHashes>,
factories: Factories,
) -> Result<SealedBlock, Error> {
let header = BlockView::new(block_bytes).header_view();
Ok(try!(try!(enact_bytes(block_bytes, engine, tracing, db, parent, last_hashes, factories)).seal(engine, header.seal())))
}
#[test] #[test]
fn open_block() { fn open_block() {
use spec::*; use spec::*;
let spec = Spec::new_test(); let spec = Spec::new_test();
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let b = OpenBlock::new(&*spec.engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = OpenBlock::new(&*spec.engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close_and_lock(); let b = b.close_and_lock();
@ -604,25 +651,25 @@ mod tests {
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap() let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap()
.close_and_lock().seal(engine, vec![]).unwrap(); .close_and_lock().seal(engine, vec![]).unwrap();
let orig_bytes = b.rlp_bytes(); let orig_bytes = b.rlp_bytes();
let orig_db = b.drain(); let orig_db = b.drain();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap();
assert_eq!(e.rlp_bytes(), orig_bytes); assert_eq!(e.rlp_bytes(), orig_bytes);
let db = e.drain(); let db = e.drain();
assert_eq!(orig_db.keys(), db.keys()); assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys());
assert!(orig_db.keys().iter().filter(|k| orig_db.get(k.0) != db.get(k.0)).next() == None); assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0) != db.journal_db().get(k.0)).next() == None);
} }
#[test] #[test]
@ -632,9 +679,9 @@ mod tests {
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let mut open_block = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let mut open_block = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let mut uncle1_header = Header::new(); let mut uncle1_header = Header::new();
@ -648,9 +695,9 @@ mod tests {
let orig_bytes = b.rlp_bytes(); let orig_bytes = b.rlp_bytes();
let orig_db = b.drain(); let orig_db = b.drain();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap(); let e = enact_and_seal(&orig_bytes, engine, false, db, &genesis_header, last_hashes, Default::default()).unwrap();
let bytes = e.rlp_bytes(); let bytes = e.rlp_bytes();
@ -659,7 +706,7 @@ mod tests {
assert_eq!(uncles[1].extra_data(), b"uncle2"); assert_eq!(uncles[1].extra_data(), b"uncle2");
let db = e.drain(); let db = e.drain();
assert_eq!(orig_db.keys(), db.keys()); assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys());
assert!(orig_db.keys().iter().filter(|k| orig_db.get(k.0) != db.get(k.0)).next() == None); assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0) != db.journal_db().get(k.0)).next() == None);
} }
} }

View File

@ -29,3 +29,12 @@ pub struct BestBlock {
/// Best block uncompressed bytes /// Best block uncompressed bytes
pub block: Bytes, pub block: Bytes,
} }
/// Best ancient block info. If the blockchain has a gap this keeps track of where it starts.
#[derive(Default)]
pub struct BestAncientBlock {
/// Best block hash.
pub hash: H256,
/// Best block number.
pub number: BlockNumber,
}

View File

@ -27,7 +27,8 @@ use log_entry::{LogEntry, LocalizedLogEntry};
use receipt::Receipt; use receipt::Receipt;
use blooms::{Bloom, BloomGroup}; use blooms::{Bloom, BloomGroup};
use blockchain::block_info::{BlockInfo, BlockLocation, BranchBecomingCanonChainData}; use blockchain::block_info::{BlockInfo, BlockLocation, BranchBecomingCanonChainData};
use blockchain::best_block::BestBlock; use blockchain::best_block::{BestBlock, BestAncientBlock};
use types::blockchain_info::BlockChainInfo;
use types::tree_route::TreeRoute; use types::tree_route::TreeRoute;
use blockchain::update::ExtrasUpdate; use blockchain::update::ExtrasUpdate;
use blockchain::{CacheSize, ImportRoute, Config}; use blockchain::{CacheSize, ImportRoute, Config};
@ -43,16 +44,24 @@ pub trait BlockProvider {
/// (though not necessarily a part of the canon chain). /// (though not necessarily a part of the canon chain).
fn is_known(&self, hash: &H256) -> bool; fn is_known(&self, hash: &H256) -> bool;
/// Get the first block which this chain holds. /// Get the first block of the best part of the chain.
/// Return `None` if there is no gap and the first block is the genesis.
/// Any queries of blocks which precede this one are not guaranteed to /// Any queries of blocks which precede this one are not guaranteed to
/// succeed. /// succeed.
fn first_block(&self) -> H256; fn first_block(&self) -> Option<H256>;
/// Get the number of the first block. /// Get the number of the first block.
fn first_block_number(&self) -> BlockNumber { fn first_block_number(&self) -> Option<BlockNumber> {
self.block_number(&self.first_block()).expect("First block always stored; qed") self.first_block().map(|b| self.block_number(&b).expect("First block is always set to an existing block or `None`. Existing block always has a number; qed"))
} }
/// Get the best block of an first block sequence if there is a gap.
fn best_ancient_block(&self) -> Option<H256>;
/// Get the number of the first block.
fn best_ancient_number(&self) -> Option<BlockNumber> {
self.best_ancient_block().map(|h| self.block_number(&h).expect("Ancient block is always set to an existing block or `None`. Existing block always has a number; qed"))
}
/// Get raw block data /// Get raw block data
fn block(&self, hash: &H256) -> Option<Bytes>; fn block(&self, hash: &H256) -> Option<Bytes>;
@ -123,7 +132,8 @@ pub trait BlockProvider {
/// Returns the header of the genesis block. /// Returns the header of the genesis block.
fn genesis_header(&self) -> Header { fn genesis_header(&self) -> Header {
self.block_header(&self.genesis_hash()).unwrap() self.block_header(&self.genesis_hash())
.expect("Genesis header always stored; qed")
} }
/// Returns numbers of blocks containing given bloom. /// Returns numbers of blocks containing given bloom.
@ -160,9 +170,14 @@ impl bc::group::BloomGroupDatabase for BlockChain {
pub struct BlockChain { pub struct BlockChain {
// All locks must be captured in the order declared here. // All locks must be captured in the order declared here.
blooms_config: bc::Config, blooms_config: bc::Config,
first_block: H256,
best_block: RwLock<BestBlock>, best_block: RwLock<BestBlock>,
// Stores best block of the first uninterrupted sequence of blocks. `None` if there are no gaps.
// Only updated with `insert_unordered_block`.
best_ancient_block: RwLock<Option<BestAncientBlock>>,
// Stores the last block of the last sequence of blocks. `None` if there are no gaps.
// This is calculated on start and does not get updated.
first_block: Option<H256>,
// block cache // block cache
block_headers: RwLock<HashMap<H256, Bytes>>, block_headers: RwLock<HashMap<H256, Bytes>>,
@ -181,7 +196,7 @@ pub struct BlockChain {
pending_best_block: RwLock<Option<BestBlock>>, pending_best_block: RwLock<Option<BestBlock>>,
pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>, pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>,
pending_transaction_addresses: RwLock<HashMap<H256, TransactionAddress>>, pending_transaction_addresses: RwLock<HashMap<H256, Option<TransactionAddress>>>,
} }
impl BlockProvider for BlockChain { impl BlockProvider for BlockChain {
@ -191,8 +206,16 @@ impl BlockProvider for BlockChain {
self.db.exists_with_cache(db::COL_EXTRA, &self.block_details, hash) self.db.exists_with_cache(db::COL_EXTRA, &self.block_details, hash)
} }
fn first_block(&self) -> H256 { fn first_block(&self) -> Option<H256> {
self.first_block self.first_block.clone()
}
fn best_ancient_block(&self) -> Option<H256> {
self.best_ancient_block.read().as_ref().map(|b| b.hash.clone())
}
fn best_ancient_number(&self) -> Option<BlockNumber> {
self.best_ancient_block.read().as_ref().map(|b| b.number)
} }
/// Get raw block data /// Get raw block data
@ -331,11 +354,15 @@ impl BlockProvider for BlockChain {
.filter_map(|number| self.block_hash(number).map(|hash| (number, hash))) .filter_map(|number| self.block_hash(number).map(|hash| (number, hash)))
.filter_map(|(number, hash)| self.block_receipts(&hash).map(|r| (number, hash, r.receipts))) .filter_map(|(number, hash)| self.block_receipts(&hash).map(|r| (number, hash, r.receipts)))
.filter_map(|(number, hash, receipts)| self.block_body(&hash).map(|ref b| (number, hash, receipts, BodyView::new(b).transaction_hashes()))) .filter_map(|(number, hash, receipts)| self.block_body(&hash).map(|ref b| (number, hash, receipts, BodyView::new(b).transaction_hashes())))
.flat_map(|(number, hash, mut receipts, hashes)| { .flat_map(|(number, hash, mut receipts, mut hashes)| {
assert_eq!(receipts.len(), hashes.len()); if receipts.len() != hashes.len() {
warn!("Block {} ({}) has different number of receipts ({}) to transactions ({}). Database corrupt?", number, hash, receipts.len(), hashes.len());
assert!(false);
}
log_index = receipts.iter().fold(0, |sum, receipt| sum + receipt.logs.len()); log_index = receipts.iter().fold(0, |sum, receipt| sum + receipt.logs.len());
let receipts_len = receipts.len(); let receipts_len = receipts.len();
hashes.reverse();
receipts.reverse(); receipts.reverse();
receipts.into_iter() receipts.into_iter()
.map(|receipt| receipt.logs) .map(|receipt| receipt.logs)
@ -367,6 +394,8 @@ impl BlockProvider for BlockChain {
} }
} }
/// An iterator which walks the blockchain towards the genesis.
#[derive(Clone)]
pub struct AncestryIter<'a> { pub struct AncestryIter<'a> {
current: H256, current: H256,
chain: &'a BlockChain, chain: &'a BlockChain,
@ -376,11 +405,10 @@ impl<'a> Iterator for AncestryIter<'a> {
type Item = H256; type Item = H256;
fn next(&mut self) -> Option<H256> { fn next(&mut self) -> Option<H256> {
if self.current.is_zero() { if self.current.is_zero() {
Option::None None
} else { } else {
let mut n = self.chain.block_details(&self.current).unwrap().parent; self.chain.block_details(&self.current)
mem::swap(&mut self.current, &mut n); .map(|details| mem::replace(&mut self.current, details.parent))
Some(n)
} }
} }
} }
@ -396,8 +424,9 @@ impl BlockChain {
levels: LOG_BLOOMS_LEVELS, levels: LOG_BLOOMS_LEVELS,
elements_per_index: LOG_BLOOMS_ELEMENTS_PER_INDEX, elements_per_index: LOG_BLOOMS_ELEMENTS_PER_INDEX,
}, },
first_block: H256::zero(), first_block: None,
best_block: RwLock::new(BestBlock::default()), best_block: RwLock::new(BestBlock::default()),
best_ancient_block: RwLock::new(None),
block_headers: RwLock::new(HashMap::new()), block_headers: RwLock::new(HashMap::new()),
block_bodies: RwLock::new(HashMap::new()), block_bodies: RwLock::new(HashMap::new()),
block_details: RwLock::new(HashMap::new()), block_details: RwLock::new(HashMap::new()),
@ -439,7 +468,6 @@ impl BlockChain {
batch.write(db::COL_EXTRA, &header.number(), &hash); batch.write(db::COL_EXTRA, &header.number(), &hash);
batch.put(db::COL_EXTRA, b"best", &hash); batch.put(db::COL_EXTRA, b"best", &hash);
batch.put(db::COL_EXTRA, b"first", &hash);
bc.db.write(batch).expect("Low level database error. Some issue with disk?"); bc.db.write(batch).expect("Low level database error. Some issue with disk?");
hash hash
} }
@ -451,32 +479,45 @@ impl BlockChain {
let best_block_total_difficulty = bc.block_details(&best_block_hash).unwrap().total_difficulty; let best_block_total_difficulty = bc.block_details(&best_block_hash).unwrap().total_difficulty;
let best_block_rlp = bc.block(&best_block_hash).unwrap(); let best_block_rlp = bc.block(&best_block_hash).unwrap();
let raw_first = bc.db.get(db::COL_EXTRA, b"first").unwrap().map_or(Vec::new(), |v| v.to_vec()); let raw_first = bc.db.get(db::COL_EXTRA, b"first").unwrap().map(|v| v.to_vec());
let mut best_ancient = bc.db.get(db::COL_EXTRA, b"ancient").unwrap().map(|h| H256::from_slice(&h));
let best_ancient_number;
if best_ancient.is_none() && best_block_number > 1 && bc.block_hash(1).is_none() {
best_ancient = Some(bc.genesis_hash());
best_ancient_number = Some(0);
} else {
best_ancient_number = best_ancient.as_ref().and_then(|h| bc.block_number(h));
}
// binary search for the first block. // binary search for the first block.
if raw_first.is_empty() { match raw_first {
let (mut f, mut hash) = (best_block_number, best_block_hash); None => {
let mut l = 0; let (mut f, mut hash) = (best_block_number, best_block_hash);
let mut l = best_ancient_number.unwrap_or(0);
loop { loop {
if l >= f { break; } if l >= f { break; }
let step = (f - l) >> 1; let step = (f - l) >> 1;
let m = l + step; let m = l + step;
match bc.block_hash(m) { match bc.block_hash(m) {
Some(h) => { f = m; hash = h }, Some(h) => { f = m; hash = h },
None => { l = m + 1 }, None => { l = m + 1 },
}
} }
}
let mut batch = db.transaction(); if hash != bc.genesis_hash() {
batch.put(db::COL_EXTRA, b"first", &hash); trace!("First block calculated: {:?}", hash);
db.write(batch).expect("Low level database error."); let mut batch = db.transaction();
batch.put(db::COL_EXTRA, b"first", &hash);
bc.first_block = hash; db.write(batch).expect("Low level database error.");
} else { bc.first_block = Some(hash);
bc.first_block = H256::from_slice(&raw_first); }
},
Some(raw_first) => {
bc.first_block = Some(H256::from_slice(&raw_first));
},
} }
// and write them // and write them
@ -487,6 +528,14 @@ impl BlockChain {
hash: best_block_hash, hash: best_block_hash,
block: best_block_rlp, block: best_block_rlp,
}; };
if let (Some(hash), Some(number)) = (best_ancient, best_ancient_number) {
let mut best_ancient_block = bc.best_ancient_block.write();
*best_ancient_block = Some(BestAncientBlock {
hash: hash,
number: number,
});
}
} }
bc bc
@ -640,11 +689,12 @@ impl BlockChain {
/// Inserts a verified, known block from the canonical chain. /// Inserts a verified, known block from the canonical chain.
/// ///
/// Can be performed out-of-order, but care must be taken that the final chain is in a correct state. /// Can be performed out-of-order, but care must be taken that the final chain is in a correct state.
/// This is used by snapshot restoration. /// This is used by snapshot restoration and when downloading missing blocks for the chain gap.
/// /// `is_best` forces the best block to be updated to this block.
/// `is_ancient` forces the best block of the first block sequence to be updated to this block.
/// Supply a dummy parent total difficulty when the parent block may not be in the chain. /// Supply a dummy parent total difficulty when the parent block may not be in the chain.
/// Returns true if the block is disconnected. /// Returns true if the block is disconnected.
pub fn insert_snapshot_block(&self, bytes: &[u8], receipts: Vec<Receipt>, parent_td: Option<U256>, is_best: bool) -> bool { pub fn insert_unordered_block(&self, batch: &mut DBTransaction, bytes: &[u8], receipts: Vec<Receipt>, parent_td: Option<U256>, is_best: bool, is_ancient: bool) -> bool {
let block = BlockView::new(bytes); let block = BlockView::new(bytes);
let header = block.header_view(); let header = block.header_view();
let hash = header.sha3(); let hash = header.sha3();
@ -655,8 +705,6 @@ impl BlockChain {
assert!(self.pending_best_block.read().is_none()); assert!(self.pending_best_block.read().is_none());
let mut batch = self.db.transaction();
let block_rlp = UntrustedRlp::new(bytes); let block_rlp = UntrustedRlp::new(bytes);
let compressed_header = block_rlp.at(0).unwrap().compress(RlpType::Blocks); let compressed_header = block_rlp.at(0).unwrap().compress(RlpType::Blocks);
let compressed_body = UntrustedRlp::new(&Self::block_to_body(bytes)).compress(RlpType::Blocks); let compressed_body = UntrustedRlp::new(&Self::block_to_body(bytes)).compress(RlpType::Blocks);
@ -670,22 +718,36 @@ impl BlockChain {
if let Some(parent_details) = maybe_parent { if let Some(parent_details) = maybe_parent {
// parent known to be in chain. // parent known to be in chain.
let info = BlockInfo { let info = BlockInfo {
hash: hash, hash: hash.clone(),
number: header.number(), number: header.number(),
total_difficulty: parent_details.total_difficulty + header.difficulty(), total_difficulty: parent_details.total_difficulty + header.difficulty(),
location: BlockLocation::CanonChain, location: BlockLocation::CanonChain,
}; };
self.prepare_update(&mut batch, ExtrasUpdate { self.prepare_update(batch, ExtrasUpdate {
block_hashes: self.prepare_block_hashes_update(bytes, &info), block_hashes: self.prepare_block_hashes_update(bytes, &info),
block_details: self.prepare_block_details_update(bytes, &info), block_details: self.prepare_block_details_update(bytes, &info),
block_receipts: self.prepare_block_receipts_update(receipts, &info), block_receipts: self.prepare_block_receipts_update(receipts, &info),
transactions_addresses: self.prepare_transaction_addresses_update(bytes, &info),
blocks_blooms: self.prepare_block_blooms_update(bytes, &info), blocks_blooms: self.prepare_block_blooms_update(bytes, &info),
transactions_addresses: self.prepare_transaction_addresses_update(bytes, &info),
info: info, info: info,
block: bytes block: bytes
}, is_best); }, is_best);
self.db.write(batch).unwrap();
if is_ancient {
let mut best_ancient_block = self.best_ancient_block.write();
let ancient_number = best_ancient_block.as_ref().map_or(0, |b| b.number);
if self.block_hash(header.number() + 1).is_some() {
batch.delete(db::COL_EXTRA, b"ancient");
*best_ancient_block = None;
} else if header.number() > ancient_number {
batch.put(db::COL_EXTRA, b"ancient", &hash);
*best_ancient_block = Some(BestAncientBlock {
hash: hash,
number: header.number(),
});
}
}
false false
} else { } else {
@ -710,17 +772,15 @@ impl BlockChain {
let mut update = HashMap::new(); let mut update = HashMap::new();
update.insert(hash, block_details); update.insert(hash, block_details);
self.prepare_update(&mut batch, ExtrasUpdate { self.prepare_update(batch, ExtrasUpdate {
block_hashes: self.prepare_block_hashes_update(bytes, &info), block_hashes: self.prepare_block_hashes_update(bytes, &info),
block_details: update, block_details: update,
block_receipts: self.prepare_block_receipts_update(receipts, &info), block_receipts: self.prepare_block_receipts_update(receipts, &info),
transactions_addresses: self.prepare_transaction_addresses_update(bytes, &info),
blocks_blooms: self.prepare_block_blooms_update(bytes, &info), blocks_blooms: self.prepare_block_blooms_update(bytes, &info),
transactions_addresses: self.prepare_transaction_addresses_update(bytes, &info),
info: info, info: info,
block: bytes, block: bytes,
}, is_best); }, is_best);
self.db.write(batch).unwrap();
true true
} }
} }
@ -783,8 +843,8 @@ impl BlockChain {
block_hashes: self.prepare_block_hashes_update(bytes, &info), block_hashes: self.prepare_block_hashes_update(bytes, &info),
block_details: self.prepare_block_details_update(bytes, &info), block_details: self.prepare_block_details_update(bytes, &info),
block_receipts: self.prepare_block_receipts_update(receipts, &info), block_receipts: self.prepare_block_receipts_update(receipts, &info),
transactions_addresses: self.prepare_transaction_addresses_update(bytes, &info),
blocks_blooms: self.prepare_block_blooms_update(bytes, &info), blocks_blooms: self.prepare_block_blooms_update(bytes, &info),
transactions_addresses: self.prepare_transaction_addresses_update(bytes, &info),
info: info.clone(), info: info.clone(),
block: bytes, block: bytes,
}, true); }, true);
@ -877,7 +937,7 @@ impl BlockChain {
let mut write_txs = self.pending_transaction_addresses.write(); let mut write_txs = self.pending_transaction_addresses.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_hashes, update.block_hashes, CacheUpdatePolicy::Overwrite); batch.extend_with_cache(db::COL_EXTRA, &mut *write_hashes, update.block_hashes, CacheUpdatePolicy::Overwrite);
batch.extend_with_cache(db::COL_EXTRA, &mut *write_txs, update.transactions_addresses, CacheUpdatePolicy::Overwrite); batch.extend_with_option_cache(db::COL_EXTRA, &mut *write_txs, update.transactions_addresses, CacheUpdatePolicy::Overwrite);
} }
} }
@ -895,18 +955,25 @@ impl BlockChain {
*best_block = block; *best_block = block;
} }
let pending_txs = mem::replace(&mut *pending_write_txs, HashMap::new());
let (retracted_txs, enacted_txs) = pending_txs.into_iter().partition::<HashMap<_, _>, _>(|&(_, ref value)| value.is_none());
let pending_hashes_keys: Vec<_> = pending_write_hashes.keys().cloned().collect(); let pending_hashes_keys: Vec<_> = pending_write_hashes.keys().cloned().collect();
let pending_txs_keys: Vec<_> = pending_write_txs.keys().cloned().collect(); let enacted_txs_keys: Vec<_> = enacted_txs.keys().cloned().collect();
write_hashes.extend(mem::replace(&mut *pending_write_hashes, HashMap::new())); write_hashes.extend(mem::replace(&mut *pending_write_hashes, HashMap::new()));
write_txs.extend(mem::replace(&mut *pending_write_txs, HashMap::new())); write_txs.extend(enacted_txs.into_iter().map(|(k, v)| (k, v.expect("Transactions were partitioned; qed"))));
for hash in retracted_txs.keys() {
write_txs.remove(hash);
}
let mut cache_man = self.cache_man.lock(); let mut cache_man = self.cache_man.lock();
for n in pending_hashes_keys { for n in pending_hashes_keys {
cache_man.note_used(CacheID::BlockHashes(n)); cache_man.note_used(CacheID::BlockHashes(n));
} }
for hash in pending_txs_keys { for hash in enacted_txs_keys {
cache_man.note_used(CacheID::TransactionAddresses(hash)); cache_man.note_used(CacheID::TransactionAddresses(hash));
} }
} }
@ -933,17 +1000,29 @@ impl BlockChain {
if !self.is_known(parent) { return None; } if !self.is_known(parent) { return None; }
let mut excluded = HashSet::new(); let mut excluded = HashSet::new();
for a in self.ancestry_iter(parent.clone()).unwrap().take(uncle_generations) { let ancestry = match self.ancestry_iter(parent.clone()) {
excluded.extend(self.uncle_hashes(&a).unwrap().into_iter()); Some(iter) => iter,
excluded.insert(a); None => return None,
};
for a in ancestry.clone().take(uncle_generations) {
if let Some(uncles) = self.uncle_hashes(&a) {
excluded.extend(uncles);
excluded.insert(a);
} else {
break
}
} }
let mut ret = Vec::new(); let mut ret = Vec::new();
for a in self.ancestry_iter(parent.clone()).unwrap().skip(1).take(uncle_generations) { for a in ancestry.skip(1).take(uncle_generations) {
ret.extend(self.block_details(&a).unwrap().children.iter() if let Some(details) = self.block_details(&a) {
.filter(|h| !excluded.contains(h)) ret.extend(details.children.iter().filter(|h| !excluded.contains(h)))
); } else {
break
}
} }
Some(ret) Some(ret)
} }
@ -1008,7 +1087,7 @@ impl BlockChain {
} }
/// This function returns modified transaction addresses. /// This function returns modified transaction addresses.
fn prepare_transaction_addresses_update(&self, block_bytes: &[u8], info: &BlockInfo) -> HashMap<H256, TransactionAddress> { fn prepare_transaction_addresses_update(&self, block_bytes: &[u8], info: &BlockInfo) -> HashMap<H256, Option<TransactionAddress>> {
let block = BlockView::new(block_bytes); let block = BlockView::new(block_bytes);
let transaction_hashes = block.transaction_hashes(); let transaction_hashes = block.transaction_hashes();
@ -1017,10 +1096,10 @@ impl BlockChain {
transaction_hashes.into_iter() transaction_hashes.into_iter()
.enumerate() .enumerate()
.map(|(i ,tx_hash)| { .map(|(i ,tx_hash)| {
(tx_hash, TransactionAddress { (tx_hash, Some(TransactionAddress {
block_hash: info.hash.clone(), block_hash: info.hash.clone(),
index: i index: i
}) }))
}) })
.collect() .collect()
}, },
@ -1031,23 +1110,30 @@ impl BlockChain {
let hashes = BodyView::new(&bytes).transaction_hashes(); let hashes = BodyView::new(&bytes).transaction_hashes();
hashes.into_iter() hashes.into_iter()
.enumerate() .enumerate()
.map(|(i, tx_hash)| (tx_hash, TransactionAddress { .map(|(i, tx_hash)| (tx_hash, Some(TransactionAddress {
block_hash: hash.clone(), block_hash: hash.clone(),
index: i, index: i,
})) })))
.collect::<HashMap<H256, TransactionAddress>>() .collect::<HashMap<H256, Option<TransactionAddress>>>()
}); });
let current_addresses = transaction_hashes.into_iter() let current_addresses = transaction_hashes.into_iter()
.enumerate() .enumerate()
.map(|(i ,tx_hash)| { .map(|(i ,tx_hash)| {
(tx_hash, TransactionAddress { (tx_hash, Some(TransactionAddress {
block_hash: info.hash.clone(), block_hash: info.hash.clone(),
index: i index: i
}) }))
}); });
addresses.chain(current_addresses).collect() let retracted = data.retracted.iter().flat_map(|hash| {
let bytes = self.block_body(hash).expect("Retracted block must be in database.");
let hashes = BodyView::new(&bytes).transaction_hashes();
hashes.into_iter().map(|hash| (hash, None)).collect::<HashMap<H256, Option<TransactionAddress>>>()
});
// The order here is important! Don't remove transaction if it was part of enacted blocks as well.
retracted.chain(addresses).chain(current_addresses).collect()
}, },
BlockLocation::Branch => HashMap::new(), BlockLocation::Branch => HashMap::new(),
} }
@ -1192,6 +1278,24 @@ impl BlockChain {
body.append_raw(block_rlp.at(2).as_raw(), 1); body.append_raw(block_rlp.at(2).as_raw(), 1);
body.out() body.out()
} }
/// Returns general blockchain information
pub fn chain_info(&self) -> BlockChainInfo {
// ensure data consistencly by locking everything first
let best_block = self.best_block.read();
let best_ancient_block = self.best_ancient_block.read();
BlockChainInfo {
total_difficulty: best_block.total_difficulty.clone(),
pending_total_difficulty: best_block.total_difficulty.clone(),
genesis_hash: self.genesis_hash(),
best_block_hash: best_block.hash.clone(),
best_block_number: best_block.number,
first_block_hash: self.first_block(),
first_block_number: From::from(self.first_block_number()),
ancient_block_hash: best_ancient_block.as_ref().map(|b| b.hash.clone()),
ancient_block_number: best_ancient_block.as_ref().map(|b| b.number),
}
}
} }
#[cfg(test)] #[cfg(test)]
@ -1345,6 +1449,70 @@ mod tests {
// TODO: insert block that already includes one of them as an uncle to check it's not allowed. // TODO: insert block that already includes one of them as an uncle to check it's not allowed.
} }
#[test]
fn test_fork_transaction_addresses() {
let mut canon_chain = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let genesis = canon_chain.generate(&mut finalizer).unwrap();
let mut fork_chain = canon_chain.fork(1);
let mut fork_finalizer = finalizer.fork();
let t1 = Transaction {
nonce: 0.into(),
gas_price: 0.into(),
gas: 100_000.into(),
action: Action::Create,
value: 100.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3());
let b1a = canon_chain
.with_transaction(t1.clone())
.generate(&mut finalizer).unwrap();
// Empty block
let b1b = fork_chain
.generate(&mut fork_finalizer).unwrap();
let b2 = fork_chain
.generate(&mut fork_finalizer).unwrap();
let b1a_hash = BlockView::new(&b1a).header_view().sha3();
let b2_hash = BlockView::new(&b2).header_view().sha3();
let t1_hash = t1.hash();
let temp = RandomTempPath::new();
let db = new_db(temp.as_str());
let bc = BlockChain::new(Config::default(), &genesis, db.clone());
let mut batch = db.transaction();
let _ = bc.insert_block(&mut batch, &b1a, vec![]);
bc.commit();
let _ = bc.insert_block(&mut batch, &b1b, vec![]);
bc.commit();
db.write(batch).unwrap();
assert_eq!(bc.best_block_hash(), b1a_hash);
assert_eq!(bc.transaction_address(&t1_hash), Some(TransactionAddress {
block_hash: b1a_hash.clone(),
index: 0,
}));
// now let's make forked chain the canon chain
let mut batch = db.transaction();
let _ = bc.insert_block(&mut batch, &b2, vec![]);
bc.commit();
db.write(batch).unwrap();
// Transaction should be retracted
assert_eq!(bc.best_block_hash(), b2_hash);
assert_eq!(bc.transaction_address(&t1_hash), None);
}
#[test] #[test]
fn test_overwriting_transaction_addresses() { fn test_overwriting_transaction_addresses() {
let mut canon_chain = ChainGenerator::default(); let mut canon_chain = ChainGenerator::default();
@ -1415,14 +1583,14 @@ mod tests {
db.write(batch).unwrap(); db.write(batch).unwrap();
assert_eq!(bc.best_block_hash(), b1a_hash); assert_eq!(bc.best_block_hash(), b1a_hash);
assert_eq!(bc.transaction_address(&t1_hash).unwrap(), TransactionAddress { assert_eq!(bc.transaction_address(&t1_hash), Some(TransactionAddress {
block_hash: b1a_hash.clone(), block_hash: b1a_hash.clone(),
index: 0, index: 0,
}); }));
assert_eq!(bc.transaction_address(&t2_hash).unwrap(), TransactionAddress { assert_eq!(bc.transaction_address(&t2_hash), Some(TransactionAddress {
block_hash: b1a_hash.clone(), block_hash: b1a_hash.clone(),
index: 1, index: 1,
}); }));
// now let's make forked chain the canon chain // now let's make forked chain the canon chain
let mut batch = db.transaction(); let mut batch = db.transaction();
@ -1431,18 +1599,18 @@ mod tests {
db.write(batch).unwrap(); db.write(batch).unwrap();
assert_eq!(bc.best_block_hash(), b2_hash); assert_eq!(bc.best_block_hash(), b2_hash);
assert_eq!(bc.transaction_address(&t1_hash).unwrap(), TransactionAddress { assert_eq!(bc.transaction_address(&t1_hash), Some(TransactionAddress {
block_hash: b1b_hash.clone(), block_hash: b1b_hash.clone(),
index: 1, index: 1,
}); }));
assert_eq!(bc.transaction_address(&t2_hash).unwrap(), TransactionAddress { assert_eq!(bc.transaction_address(&t2_hash), Some(TransactionAddress {
block_hash: b1b_hash.clone(), block_hash: b1b_hash.clone(),
index: 0, index: 0,
}); }));
assert_eq!(bc.transaction_address(&t3_hash).unwrap(), TransactionAddress { assert_eq!(bc.transaction_address(&t3_hash), Some(TransactionAddress {
block_hash: b2_hash.clone(), block_hash: b2_hash.clone(),
index: 0, index: 0,
}); }));
} }
#[test] #[test]
@ -1682,7 +1850,7 @@ mod tests {
gas_price: 0.into(), gas_price: 0.into(),
gas: 100_000.into(), gas: 100_000.into(),
action: Action::Create, action: Action::Create,
value: 100.into(), value: 101.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3());
let t2 = Transaction { let t2 = Transaction {
@ -1690,7 +1858,7 @@ mod tests {
gas_price: 0.into(), gas_price: 0.into(),
gas: 100_000.into(), gas: 100_000.into(),
action: Action::Create, action: Action::Create,
value: 100.into(), value: 102.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3());
let t3 = Transaction { let t3 = Transaction {
@ -1698,7 +1866,7 @@ mod tests {
gas_price: 0.into(), gas_price: 0.into(),
gas: 100_000.into(), gas: 100_000.into(),
action: Action::Create, action: Action::Create,
value: 100.into(), value: 103.into(),
data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(), data: "601080600c6000396000f3006000355415600957005b60203560003555".from_hex().unwrap(),
}.sign(&"".sha3()); }.sign(&"".sha3());
let tx_hash1 = t1.hash(); let tx_hash1 = t1.hash();

View File

@ -17,8 +17,8 @@ pub struct ExtrasUpdate<'a> {
pub block_details: HashMap<H256, BlockDetails>, pub block_details: HashMap<H256, BlockDetails>,
/// Modified block receipts. /// Modified block receipts.
pub block_receipts: HashMap<H256, BlockReceipts>, pub block_receipts: HashMap<H256, BlockReceipts>,
/// Modified transaction addresses.
pub transactions_addresses: HashMap<H256, TransactionAddress>,
/// Modified blocks blooms. /// Modified blocks blooms.
pub blocks_blooms: HashMap<LogGroupPosition, BloomGroup>, pub blocks_blooms: HashMap<LogGroupPosition, BloomGroup>,
/// Modified transaction addresses (None signifies removed transactions).
pub transactions_addresses: HashMap<H256, Option<TransactionAddress>>,
} }

View File

@ -55,18 +55,23 @@ impl<T> CacheManager<T> where T: Eq + Hash {
} }
for _ in 0..COLLECTION_QUEUE_SIZE { for _ in 0..COLLECTION_QUEUE_SIZE {
let current_size = notify_unused(self.cache_usage.pop_back().unwrap()); if let Some(back) = self.cache_usage.pop_back() {
self.cache_usage.push_front(Default::default()); let current_size = notify_unused(back);
if current_size < self.max_cache_size { self.cache_usage.push_front(Default::default());
break; if current_size < self.max_cache_size {
break
}
} }
} }
} }
fn rotate_cache_if_needed(&mut self) { fn rotate_cache_if_needed(&mut self) {
if self.cache_usage.len() == 0 { return }
if self.cache_usage[0].len() * self.bytes_per_cache_entry > self.pref_cache_size / COLLECTION_QUEUE_SIZE { if self.cache_usage[0].len() * self.bytes_per_cache_entry > self.pref_cache_size / COLLECTION_QUEUE_SIZE {
let cache = self.cache_usage.pop_back().unwrap(); if let Some(cache) = self.cache_usage.pop_back() {
self.cache_usage.push_front(cache); self.cache_usage.push_front(cache);
}
} }
} }
} }

View File

@ -18,7 +18,7 @@ use ipc::IpcConfig;
use util::H256; use util::H256;
/// Represents what has to be handled by actor listening to chain events /// Represents what has to be handled by actor listening to chain events
#[derive(Ipc)] #[ipc]
pub trait ChainNotify : Send + Sync { pub trait ChainNotify : Send + Sync {
/// fires when chain has new blocks. /// fires when chain has new blocks.
fn new_blocks(&self, fn new_blocks(&self,

View File

@ -23,15 +23,14 @@ use time::precise_time_ns;
// util // util
use util::{Bytes, PerfTimer, Itertools, Mutex, RwLock}; use util::{Bytes, PerfTimer, Itertools, Mutex, RwLock};
use util::journaldb::{self, JournalDB}; use util::{journaldb, TrieFactory, Trie};
use util::{U256, H256, Address, H2048, Uint}; use util::trie::TrieSpec;
use util::sha3::*; use util::{U256, H256, Address, H2048, Uint, FixedHash};
use util::TrieFactory;
use util::kvdb::*; use util::kvdb::*;
// other // other
use io::*; use io::*;
use views::{BlockView, HeaderView, BodyView}; use views::{HeaderView, BodyView, BlockView};
use error::{ImportError, ExecutionError, CallError, BlockError, ImportResult, Error as EthcoreError}; use error::{ImportError, ExecutionError, CallError, BlockError, ImportResult, Error as EthcoreError};
use header::BlockNumber; use header::BlockNumber;
use state::State; use state::State;
@ -47,12 +46,12 @@ use transaction::{LocalizedTransaction, SignedTransaction, Action};
use blockchain::extras::TransactionAddress; use blockchain::extras::TransactionAddress;
use types::filter::Filter; use types::filter::Filter;
use log_entry::LocalizedLogEntry; use log_entry::LocalizedLogEntry;
use block_queue::{BlockQueue, BlockQueueInfo}; use verification::queue::BlockQueue;
use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute}; use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute};
use client::{ use client::{
BlockID, TransactionID, UncleID, TraceId, ClientConfig, BlockChainClient, BlockID, TransactionID, UncleID, TraceId, ClientConfig, BlockChainClient,
MiningBlockChainClient, TraceFilter, CallAnalytics, BlockImportError, Mode, MiningBlockChainClient, TraceFilter, CallAnalytics, BlockImportError, Mode,
ChainNotify ChainNotify,
}; };
use client::Error as ClientError; use client::Error as ClientError;
use env_info::EnvInfo; use env_info::EnvInfo;
@ -66,15 +65,17 @@ use miner::{Miner, MinerService};
use snapshot::{self, io as snapshot_io}; use snapshot::{self, io as snapshot_io};
use factory::Factories; use factory::Factories;
use rlp::{View, UntrustedRlp}; use rlp::{View, UntrustedRlp};
use state_db::StateDB;
// re-export // re-export
pub use types::blockchain_info::BlockChainInfo; pub use types::blockchain_info::BlockChainInfo;
pub use types::block_status::BlockStatus; pub use types::block_status::BlockStatus;
pub use blockchain::CacheSize as BlockChainCacheSize; pub use blockchain::CacheSize as BlockChainCacheSize;
pub use verification::queue::QueueInfo as BlockQueueInfo;
const MAX_TX_QUEUE_SIZE: usize = 4096; const MAX_TX_QUEUE_SIZE: usize = 4096;
const MAX_QUEUE_SIZE_TO_SLEEP_ON: usize = 2; const MAX_QUEUE_SIZE_TO_SLEEP_ON: usize = 2;
const MIN_HISTORY_SIZE: u64 = 8;
impl fmt::Display for BlockChainInfo { impl fmt::Display for BlockChainInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
@ -126,9 +127,9 @@ pub struct Client {
tracedb: RwLock<TraceDB<BlockChain>>, tracedb: RwLock<TraceDB<BlockChain>>,
engine: Arc<Engine>, engine: Arc<Engine>,
config: ClientConfig, config: ClientConfig,
db: RwLock<Arc<Database>>,
pruning: journaldb::Algorithm, pruning: journaldb::Algorithm,
state_db: RwLock<Box<JournalDB>>, db: RwLock<Arc<Database>>,
state_db: Mutex<StateDB>,
block_queue: BlockQueue, block_queue: BlockQueue,
report: RwLock<ClientReport>, report: RwLock<ClientReport>,
import_lock: Mutex<()>, import_lock: Mutex<()>,
@ -142,17 +143,7 @@ pub struct Client {
queue_transactions: AtomicUsize, queue_transactions: AtomicUsize,
last_hashes: RwLock<VecDeque<H256>>, last_hashes: RwLock<VecDeque<H256>>,
factories: Factories, factories: Factories,
} history: u64,
/// The pruning constant -- how old blocks must be before we
/// assume finality of a given candidate.
pub const HISTORY: u64 = 1200;
/// Append a path element to the given path and return the string.
pub fn append_path<P>(path: P, item: &str) -> String where P: AsRef<Path> {
let mut p = path.as_ref().to_path_buf();
p.push(item);
p.to_str().unwrap().to_owned()
} }
impl Client { impl Client {
@ -168,32 +159,60 @@ impl Client {
let path = path.to_path_buf(); let path = path.to_path_buf();
let gb = spec.genesis_block(); let gb = spec.genesis_block();
let db = Arc::new(try!(Database::open(&db_config, &path.to_str().unwrap()).map_err(ClientError::Database))); let db = Arc::new(try!(Database::open(&db_config, &path.to_str().expect("DB path could not be converted to string.")).map_err(ClientError::Database)));
let chain = Arc::new(BlockChain::new(config.blockchain.clone(), &gb, db.clone())); let chain = Arc::new(BlockChain::new(config.blockchain.clone(), &gb, db.clone()));
let tracedb = RwLock::new(TraceDB::new(config.tracing.clone(), db.clone(), chain.clone())); let tracedb = RwLock::new(TraceDB::new(config.tracing.clone(), db.clone(), chain.clone()));
let mut state_db = journaldb::new(db.clone(), config.pruning, ::db::COL_STATE); let trie_spec = match config.fat_db {
if state_db.is_empty() && try!(spec.ensure_db_good(state_db.as_hashdb_mut())) { true => TrieSpec::Fat,
false => TrieSpec::Secure,
};
let journal_db = journaldb::new(db.clone(), config.pruning, ::db::COL_STATE);
let mut state_db = StateDB::new(journal_db, config.state_cache_size);
if state_db.journal_db().is_empty() && try!(spec.ensure_db_good(&mut state_db)) {
let mut batch = DBTransaction::new(&db); let mut batch = DBTransaction::new(&db);
try!(state_db.commit(&mut batch, 0, &spec.genesis_header().hash(), None)); try!(state_db.journal_under(&mut batch, 0, &spec.genesis_header().hash()));
try!(db.write(batch).map_err(ClientError::Database)); try!(db.write(batch).map_err(ClientError::Database));
} }
if !chain.block_header(&chain.best_block_hash()).map_or(true, |h| state_db.contains(h.state_root())) { trace!("Cleanup journal: DB Earliest = {:?}, Latest = {:?}", state_db.journal_db().earliest_era(), state_db.journal_db().latest_era());
let history = if config.history < MIN_HISTORY_SIZE {
info!(target: "client", "Ignoring pruning history parameter of {}\
, falling back to minimum of {}",
config.history, MIN_HISTORY_SIZE);
MIN_HISTORY_SIZE
} else {
config.history
};
if let (Some(earliest), Some(latest)) = (state_db.journal_db().earliest_era(), state_db.journal_db().latest_era()) {
if latest > earliest && latest - earliest > history {
for era in earliest..(latest - history + 1) {
trace!("Removing era {}", era);
let mut batch = DBTransaction::new(&db);
try!(state_db.mark_canonical(&mut batch, era, &chain.block_hash(era).expect("Old block not found in the database")));
try!(db.write(batch).map_err(ClientError::Database));
}
}
}
if !chain.block_header(&chain.best_block_hash()).map_or(true, |h| state_db.journal_db().contains(h.state_root())) {
warn!("State root not found for block #{} ({})", chain.best_block_number(), chain.best_block_hash().hex()); warn!("State root not found for block #{} ({})", chain.best_block_number(), chain.best_block_hash().hex());
} }
let engine = spec.engine.clone(); let engine = spec.engine.clone();
let block_queue = BlockQueue::new(config.queue.clone(), engine.clone(), message_channel.clone()); let block_queue = BlockQueue::new(config.queue.clone(), engine.clone(), message_channel.clone(), config.verifier_type.verifying_seal());
let panic_handler = PanicHandler::new_in_arc(); let panic_handler = PanicHandler::new_in_arc();
panic_handler.forward_from(&block_queue); panic_handler.forward_from(&block_queue);
let awake = match config.mode { Mode::Dark(..) => false, _ => true }; let awake = match config.mode { Mode::Dark(..) => false, _ => true };
let factories = Factories { let factories = Factories {
vm: EvmFactory::new(config.vm_type.clone()), vm: EvmFactory::new(config.vm_type.clone(), config.jump_table_size),
trie: TrieFactory::new(config.trie_spec.clone()), trie: TrieFactory::new(trie_spec),
accountdb: Default::default(), accountdb: Default::default(),
}; };
@ -208,7 +227,7 @@ impl Client {
verifier: verification::new(config.verifier_type.clone()), verifier: verification::new(config.verifier_type.clone()),
config: config, config: config,
db: RwLock::new(db), db: RwLock::new(db),
state_db: RwLock::new(state_db), state_db: Mutex::new(state_db),
block_queue: block_queue, block_queue: block_queue,
report: RwLock::new(Default::default()), report: RwLock::new(Default::default()),
import_lock: Mutex::new(()), import_lock: Mutex::new(()),
@ -219,6 +238,7 @@ impl Client {
queue_transactions: AtomicUsize::new(0), queue_transactions: AtomicUsize::new(0),
last_hashes: RwLock::new(VecDeque::new()), last_hashes: RwLock::new(VecDeque::new()),
factories: factories, factories: factories,
history: history,
}; };
Ok(Arc::new(client)) Ok(Arc::new(client))
} }
@ -277,7 +297,7 @@ impl Client {
let chain = self.chain.read(); let chain = self.chain.read();
// Check the block isn't so old we won't be able to enact it. // Check the block isn't so old we won't be able to enact it.
let best_block_number = chain.best_block_number(); let best_block_number = chain.best_block_number();
if best_block_number >= HISTORY && header.number() <= best_block_number - HISTORY { if best_block_number >= self.history && header.number() <= best_block_number - self.history {
warn!(target: "client", "Block import failed for #{} ({})\nBlock is ancient (current best block: #{}).", header.number(), header.hash(), best_block_number); warn!(target: "client", "Block import failed for #{} ({})\nBlock is ancient (current best block: #{}).", header.number(), header.hash(), best_block_number);
return Err(()); return Err(());
} }
@ -291,30 +311,27 @@ impl Client {
// Check if Parent is in chain // Check if Parent is in chain
let chain_has_parent = chain.block_header(header.parent_hash()); let chain_has_parent = chain.block_header(header.parent_hash());
if let None = chain_has_parent { if let Some(parent) = chain_has_parent {
// Enact Verified Block
let last_hashes = self.build_last_hashes(header.parent_hash().clone());
let db = self.state_db.lock().boxed_clone_canon(&header.parent_hash());
let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
let locked_block = try!(enact_result.map_err(|e| {
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
}));
// Final Verification
if let Err(e) = self.verifier.verify_block_final(header, locked_block.block().header()) {
warn!(target: "client", "Stage 4 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
return Err(());
}
Ok(locked_block)
} else {
warn!(target: "client", "Block import failed for #{} ({}): Parent not found ({}) ", header.number(), header.hash(), header.parent_hash()); warn!(target: "client", "Block import failed for #{} ({}): Parent not found ({}) ", header.number(), header.hash(), header.parent_hash());
return Err(()); Err(())
};
// Enact Verified Block
let parent = chain_has_parent.unwrap();
let last_hashes = self.build_last_hashes(header.parent_hash().clone());
let db = self.state_db.read().boxed_clone();
let enact_result = enact_verified(block, engine, self.tracedb.read().tracing_enabled(), db, &parent, last_hashes, self.factories.clone());
if let Err(e) = enact_result {
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
return Err(());
};
// Final Verification
let locked_block = enact_result.unwrap();
if let Err(e) = self.verifier.verify_block_final(header, locked_block.block().header()) {
warn!(target: "client", "Stage 4 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
return Err(());
} }
Ok(locked_block)
} }
fn calculate_enacted_retracted(&self, import_results: &[ImportRoute]) -> (Vec<H256>, Vec<H256>) { fn calculate_enacted_retracted(&self, import_results: &[ImportRoute]) -> (Vec<H256>, Vec<H256>) {
@ -345,58 +362,55 @@ impl Client {
/// This is triggered by a message coming from a block queue when the block is ready for insertion /// This is triggered by a message coming from a block queue when the block is ready for insertion
pub fn import_verified_blocks(&self) -> usize { pub fn import_verified_blocks(&self) -> usize {
let max_blocks_to_import = 64; let max_blocks_to_import = 4;
let (imported_blocks, import_results, invalid_blocks, imported, duration) = { let (imported_blocks, import_results, invalid_blocks, imported, duration, is_empty) = {
let mut imported_blocks = Vec::with_capacity(max_blocks_to_import); let mut imported_blocks = Vec::with_capacity(max_blocks_to_import);
let mut invalid_blocks = HashSet::new(); let mut invalid_blocks = HashSet::new();
let mut import_results = Vec::with_capacity(max_blocks_to_import); let mut import_results = Vec::with_capacity(max_blocks_to_import);
let _import_lock = self.import_lock.lock(); let _import_lock = self.import_lock.lock();
let blocks = self.block_queue.drain(max_blocks_to_import);
if blocks.is_empty() {
return 0;
}
let _timer = PerfTimer::new("import_verified_blocks"); let _timer = PerfTimer::new("import_verified_blocks");
let start = precise_time_ns(); let start = precise_time_ns();
let blocks = self.block_queue.drain(max_blocks_to_import);
for block in blocks { for block in blocks {
let header = &block.header; let header = &block.header;
if invalid_blocks.contains(header.parent_hash()) { let is_invalid = invalid_blocks.contains(header.parent_hash());
if is_invalid {
invalid_blocks.insert(header.hash()); invalid_blocks.insert(header.hash());
continue; continue;
} }
let closed_block = self.check_and_close_block(&block); if let Ok(closed_block) = self.check_and_close_block(&block) {
if let Err(_) = closed_block { imported_blocks.push(header.hash());
let route = self.commit_block(closed_block, &header.hash(), &block.bytes);
import_results.push(route);
self.report.write().accrue_block(&block);
} else {
invalid_blocks.insert(header.hash()); invalid_blocks.insert(header.hash());
continue;
} }
let closed_block = closed_block.unwrap();
imported_blocks.push(header.hash());
let route = self.commit_block(closed_block, &header.hash(), &block.bytes);
import_results.push(route);
self.report.write().accrue_block(&block);
} }
let imported = imported_blocks.len(); let imported = imported_blocks.len();
let invalid_blocks = invalid_blocks.into_iter().collect::<Vec<H256>>(); let invalid_blocks = invalid_blocks.into_iter().collect::<Vec<H256>>();
{ if !invalid_blocks.is_empty() {
if !invalid_blocks.is_empty() { self.block_queue.mark_as_bad(&invalid_blocks);
self.block_queue.mark_as_bad(&invalid_blocks);
}
if !imported_blocks.is_empty() {
self.block_queue.mark_as_good(&imported_blocks);
}
} }
let is_empty = self.block_queue.mark_as_good(&imported_blocks);
let duration_ns = precise_time_ns() - start; let duration_ns = precise_time_ns() - start;
(imported_blocks, import_results, invalid_blocks, imported, duration_ns) (imported_blocks, import_results, invalid_blocks, imported, duration_ns, is_empty)
}; };
{ {
if !imported_blocks.is_empty() && self.block_queue.queue_info().is_empty() { if !imported_blocks.is_empty() && is_empty {
let (enacted, retracted) = self.calculate_enacted_retracted(&import_results); let (enacted, retracted) = self.calculate_enacted_retracted(&import_results);
if self.queue_info().is_empty() { if is_empty {
self.miner.chain_new_blocks(self, &imported_blocks, &invalid_blocks, &enacted, &retracted); self.miner.chain_new_blocks(self, &imported_blocks, &invalid_blocks, &enacted, &retracted);
} }
@ -417,17 +431,33 @@ impl Client {
imported imported
} }
/// Import a block with transaction receipts.
/// The block is guaranteed to be the next best blocks in the first block sequence.
/// Does no sealing or transaction validation.
fn import_old_block(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> H256 {
let block = BlockView::new(&block_bytes);
let hash = block.header().hash();
let _import_lock = self.import_lock.lock();
{
let _timer = PerfTimer::new("import_old_block");
let chain = self.chain.read();
// Commit results
let receipts = ::rlp::decode(&receipts_bytes);
let mut batch = DBTransaction::new(&self.db.read());
chain.insert_unordered_block(&mut batch, &block_bytes, receipts, None, false, true);
// Final commit to the DB
self.db.read().write_buffered(batch);
chain.commit();
}
self.db.read().flush().expect("DB flush failed.");
hash
}
fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain { fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain {
let number = block.header().number(); let number = block.header().number();
let parent = block.header().parent_hash().clone(); let parent = block.header().parent_hash().clone();
let chain = self.chain.read(); let chain = self.chain.read();
// Are we committing an era?
let ancient = if number >= HISTORY {
let n = number - HISTORY;
Some((n, chain.block_hash(n).unwrap()))
} else {
None
};
// Commit results // Commit results
let receipts = block.receipts().to_owned(); let receipts = block.receipts().to_owned();
@ -442,7 +472,18 @@ impl Client {
// CHECK! I *think* this is fine, even if the state_root is equal to another // CHECK! I *think* this is fine, even if the state_root is equal to another
// already-imported block of the same number. // already-imported block of the same number.
// TODO: Prove it with a test. // TODO: Prove it with a test.
block.drain().commit(&mut batch, number, hash, ancient).expect("DB commit failed."); let mut state = block.drain();
state.journal_under(&mut batch, number, hash).expect("DB commit failed");
if number >= self.history {
let n = number - self.history;
if let Some(ancient_hash) = chain.block_hash(n) {
state.mark_canonical(&mut batch, n, &ancient_hash).expect("DB commit failed");
} else {
debug!(target: "client", "Missing expected hash for block {}", n);
}
}
let route = chain.insert_block(&mut batch, block_data, receipts); let route = chain.insert_block(&mut batch, block_data, receipts);
self.tracedb.read().import(&mut batch, TraceImportRequest { self.tracedb.read().import(&mut batch, TraceImportRequest {
@ -452,10 +493,12 @@ impl Client {
enacted: route.enacted.clone(), enacted: route.enacted.clone(),
retracted: route.retracted.len() retracted: route.retracted.len()
}); });
let is_canon = route.enacted.last().map_or(false, |h| h == hash);
state.sync_cache(&route.enacted, &route.retracted, is_canon);
// Final commit to the DB // Final commit to the DB
self.db.read().write_buffered(batch); self.db.read().write_buffered(batch);
chain.commit(); chain.commit();
self.update_last_hashes(&parent, hash); self.update_last_hashes(&parent, hash);
route route
} }
@ -497,10 +540,10 @@ impl Client {
}; };
self.block_header(id).and_then(|header| { self.block_header(id).and_then(|header| {
let db = self.state_db.read().boxed_clone(); let db = self.state_db.lock().boxed_clone();
// early exit for pruned blocks // early exit for pruned blocks
if db.is_pruned() && self.chain.read().best_block_number() >= block_number + HISTORY { if db.is_pruned() && self.chain.read().best_block_number() >= block_number + self.history {
return None; return None;
} }
@ -527,9 +570,11 @@ impl Client {
/// Get a copy of the best block's state. /// Get a copy of the best block's state.
pub fn state(&self) -> State { pub fn state(&self) -> State {
let header = self.best_block_header();
let header = HeaderView::new(&header);
State::from_existing( State::from_existing(
self.state_db.read().boxed_clone(), self.state_db.lock().boxed_clone_canon(&header.hash()),
HeaderView::new(&self.best_block_header()).state_root(), header.state_root(),
self.engine.account_start_nonce(), self.engine.account_start_nonce(),
self.factories.clone()) self.factories.clone())
.expect("State root of best block header always valid.") .expect("State root of best block header always valid.")
@ -543,7 +588,7 @@ impl Client {
/// Get the report. /// Get the report.
pub fn report(&self) -> ClientReport { pub fn report(&self) -> ClientReport {
let mut report = self.report.read().clone(); let mut report = self.report.read().clone();
report.state_db_mem = self.state_db.read().mem_used(); report.state_db_mem = self.state_db.lock().mem_used();
report report
} }
@ -599,24 +644,27 @@ impl Client {
/// Take a snapshot at the given block. /// Take a snapshot at the given block.
/// If the ID given is "latest", this will default to 1000 blocks behind. /// If the ID given is "latest", this will default to 1000 blocks behind.
pub fn take_snapshot<W: snapshot_io::SnapshotWriter + Send>(&self, writer: W, at: BlockID, p: &snapshot::Progress) -> Result<(), EthcoreError> { pub fn take_snapshot<W: snapshot_io::SnapshotWriter + Send>(&self, writer: W, at: BlockID, p: &snapshot::Progress) -> Result<(), EthcoreError> {
let db = self.state_db.read().boxed_clone(); let db = self.state_db.lock().journal_db().boxed_clone();
let best_block_number = self.chain_info().best_block_number; let best_block_number = self.chain_info().best_block_number;
let block_number = try!(self.block_number(at).ok_or(snapshot::Error::InvalidStartingBlock(at))); let block_number = try!(self.block_number(at).ok_or(snapshot::Error::InvalidStartingBlock(at)));
if best_block_number > HISTORY + block_number && db.is_pruned() { if best_block_number > self.history + block_number && db.is_pruned() {
return Err(snapshot::Error::OldBlockPrunedDB.into()); return Err(snapshot::Error::OldBlockPrunedDB.into());
} }
let history = ::std::cmp::min(self.history, 1000);
let start_hash = match at { let start_hash = match at {
BlockID::Latest => { BlockID::Latest => {
let start_num = if best_block_number > 1000 { let start_num = match db.earliest_era() {
best_block_number - 1000 Some(era) => ::std::cmp::max(era, best_block_number - history),
} else { None => best_block_number - history,
0
}; };
self.block_hash(BlockID::Number(start_num)) match self.block_hash(BlockID::Number(start_num)) {
.expect("blocks within HISTORY are always stored.") Some(h) => h,
None => return Err(snapshot::Error::InvalidStartingBlock(at).into()),
}
} }
_ => match self.block_hash(at) { _ => match self.block_hash(at) {
Some(hash) => hash, Some(hash) => hash,
@ -629,6 +677,11 @@ impl Client {
Ok(()) Ok(())
} }
/// Ask the client what the history parameter is.
pub fn pruning_history(&self) -> u64 {
self.history
}
fn block_hash(chain: &BlockChain, id: BlockID) -> Option<H256> { fn block_hash(chain: &BlockChain, id: BlockID) -> Option<H256> {
match id { match id {
BlockID::Hash(hash) => Some(hash), BlockID::Hash(hash) => Some(hash),
@ -678,14 +731,15 @@ impl snapshot::DatabaseRestore for Client {
trace!(target: "snapshot", "Replacing client database with {:?}", new_db); trace!(target: "snapshot", "Replacing client database with {:?}", new_db);
let _import_lock = self.import_lock.lock(); let _import_lock = self.import_lock.lock();
let mut state_db = self.state_db.write(); let mut state_db = self.state_db.lock();
let mut chain = self.chain.write(); let mut chain = self.chain.write();
let mut tracedb = self.tracedb.write(); let mut tracedb = self.tracedb.write();
self.miner.clear(); self.miner.clear();
let db = self.db.write(); let db = self.db.write();
try!(db.restore(new_db)); try!(db.restore(new_db));
*state_db = journaldb::new(db.clone(), self.pruning, ::db::COL_STATE); let cache_size = state_db.cache_size();
*state_db = StateDB::new(journaldb::new(db.clone(), self.pruning, ::db::COL_STATE), cache_size);
*chain = Arc::new(BlockChain::new(self.config.blockchain.clone(), &[], db.clone())); *chain = Arc::new(BlockChain::new(self.config.blockchain.clone(), &[], db.clone()));
*tracedb = TraceDB::new(self.config.tracing.clone(), db.clone(), chain.clone()); *tracedb = TraceDB::new(self.config.tracing.clone(), db.clone(), chain.clone());
Ok(()) Ok(())
@ -805,7 +859,7 @@ impl BlockChainClient for Client {
let chain = self.chain.read(); let chain = self.chain.read();
match Self::block_hash(&chain, id) { match Self::block_hash(&chain, id) {
Some(ref hash) if chain.is_known(hash) => BlockStatus::InChain, Some(ref hash) if chain.is_known(hash) => BlockStatus::InChain,
Some(hash) => self.block_queue.block_status(&hash), Some(hash) => self.block_queue.status(&hash).into(),
None => BlockStatus::Unknown None => BlockStatus::Unknown
} }
} }
@ -830,7 +884,7 @@ impl BlockChainClient for Client {
} }
fn code(&self, address: &Address, id: BlockID) -> Option<Option<Bytes>> { fn code(&self, address: &Address, id: BlockID) -> Option<Option<Bytes>> {
self.state_at(id).map(|s| s.code(address)) self.state_at(id).map(|s| s.code(address).map(|c| (*c).clone()))
} }
fn balance(&self, address: &Address, id: BlockID) -> Option<U256> { fn balance(&self, address: &Address, id: BlockID) -> Option<U256> {
@ -841,6 +895,38 @@ impl BlockChainClient for Client {
self.state_at(id).map(|s| s.storage_at(address, position)) self.state_at(id).map(|s| s.storage_at(address, position))
} }
fn list_accounts(&self, id: BlockID) -> Option<Vec<Address>> {
if !self.factories.trie.is_fat() {
trace!(target: "fatdb", "list_accounts: Not a fat DB");
return None;
}
let state = match self.state_at(id) {
Some(state) => state,
_ => return None,
};
let (root, db) = state.drop();
let trie = match self.factories.trie.readonly(db.as_hashdb(), &root) {
Ok(trie) => trie,
_ => {
trace!(target: "fatdb", "list_accounts: Couldn't open the DB");
return None;
}
};
let iter = match trie.iter() {
Ok(iter) => iter,
_ => return None,
};
let accounts = iter.filter_map(|item| {
item.ok().map(|(addr, _)| Address::from_slice(&addr))
}).collect();
Some(accounts)
}
fn transaction(&self, id: TransactionID) -> Option<LocalizedTransaction> { fn transaction(&self, id: TransactionID) -> Option<LocalizedTransaction> {
self.transaction_address(id).and_then(|address| self.chain.read().transaction(&address)) self.transaction_address(id).and_then(|address| self.chain.read().transaction(&address))
} }
@ -852,12 +938,17 @@ impl BlockChainClient for Client {
fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> { fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> {
let chain = self.chain.read(); let chain = self.chain.read();
self.transaction_address(id).and_then(|address| chain.block_number(&address.block_hash).and_then(|block_number| { self.transaction_address(id)
.and_then(|address| chain.block_number(&address.block_hash).and_then(|block_number| {
let t = chain.block_body(&address.block_hash) let t = chain.block_body(&address.block_hash)
.and_then(|block| BodyView::new(&block).localized_transaction_at(&address.block_hash, block_number, address.index)); .and_then(|block| {
BodyView::new(&block).localized_transaction_at(&address.block_hash, block_number, address.index)
});
match (t, chain.transaction_receipt(&address)) { let tx_and_sender = t.and_then(|tx| tx.sender().ok().map(|sender| (tx, sender)));
(Some(tx), Some(receipt)) => {
match (tx_and_sender, chain.transaction_receipt(&address)) {
(Some((tx, sender)), Some(receipt)) => {
let block_hash = tx.block_hash.clone(); let block_hash = tx.block_hash.clone();
let block_number = tx.block_number.clone(); let block_number = tx.block_number.clone();
let transaction_hash = tx.hash(); let transaction_hash = tx.hash();
@ -879,7 +970,7 @@ impl BlockChainClient for Client {
gas_used: receipt.gas_used - prior_gas_used, gas_used: receipt.gas_used - prior_gas_used,
contract_address: match tx.action { contract_address: match tx.action {
Action::Call(_) => None, Action::Call(_) => None,
Action::Create => Some(contract_address(&tx.sender().unwrap(), &tx.nonce)) Action::Create => Some(contract_address(&sender, &tx.nonce))
}, },
logs: receipt.logs.into_iter().enumerate().map(|(i, log)| LocalizedLogEntry { logs: receipt.logs.into_iter().enumerate().map(|(i, log)| LocalizedLogEntry {
entry: log, entry: log,
@ -909,7 +1000,7 @@ impl BlockChainClient for Client {
} }
fn state_data(&self, hash: &H256) -> Option<Bytes> { fn state_data(&self, hash: &H256) -> Option<Bytes> {
self.state_db.read().state(hash) self.state_db.lock().journal_db().state(hash)
} }
fn block_receipts(&self, hash: &H256) -> Option<Bytes> { fn block_receipts(&self, hash: &H256) -> Option<Bytes> {
@ -917,16 +1008,35 @@ impl BlockChainClient for Client {
} }
fn import_block(&self, bytes: Bytes) -> Result<H256, BlockImportError> { fn import_block(&self, bytes: Bytes) -> Result<H256, BlockImportError> {
use verification::queue::kind::HasHash;
use verification::queue::kind::blocks::Unverified;
// create unverified block here so the `sha3` calculation can be cached.
let unverified = Unverified::new(bytes);
{ {
let header = BlockView::new(&bytes).header_view(); if self.chain.read().is_known(&unverified.hash()) {
if self.chain.read().is_known(&header.sha3()) { return Err(BlockImportError::Import(ImportError::AlreadyInChain));
}
if self.block_status(BlockID::Hash(unverified.parent_hash())) == BlockStatus::Unknown {
return Err(BlockImportError::Block(BlockError::UnknownParent(unverified.parent_hash())));
}
}
Ok(try!(self.block_queue.import(unverified)))
}
fn import_block_with_receipts(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> Result<H256, BlockImportError> {
{
// check block order
let header = BlockView::new(&block_bytes).header_view();
if self.chain.read().is_known(&header.hash()) {
return Err(BlockImportError::Import(ImportError::AlreadyInChain)); return Err(BlockImportError::Import(ImportError::AlreadyInChain));
} }
if self.block_status(BlockID::Hash(header.parent_hash())) == BlockStatus::Unknown { if self.block_status(BlockID::Hash(header.parent_hash())) == BlockStatus::Unknown {
return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash()))); return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash())));
} }
} }
Ok(try!(self.block_queue.import_block(bytes))) Ok(self.import_old_block(block_bytes, receipts_bytes))
} }
fn queue_info(&self) -> BlockQueueInfo { fn queue_info(&self) -> BlockQueueInfo {
@ -938,14 +1048,7 @@ impl BlockChainClient for Client {
} }
fn chain_info(&self) -> BlockChainInfo { fn chain_info(&self) -> BlockChainInfo {
let chain = self.chain.read(); self.chain.read().chain_info()
BlockChainInfo {
total_difficulty: chain.best_block_total_difficulty(),
pending_total_difficulty: chain.best_block_total_difficulty(),
genesis_hash: chain.genesis_hash(),
best_block_hash: chain.best_block_hash(),
best_block_number: From::from(chain.best_block_number())
}
} }
fn additional_params(&self) -> BTreeMap<String, String> { fn additional_params(&self) -> BTreeMap<String, String> {
@ -975,17 +1078,18 @@ impl BlockChainClient for Client {
let start = self.block_number(filter.range.start); let start = self.block_number(filter.range.start);
let end = self.block_number(filter.range.end); let end = self.block_number(filter.range.end);
if start.is_some() && end.is_some() { match (start, end) {
let filter = trace::Filter { (Some(s), Some(e)) => {
range: start.unwrap() as usize..end.unwrap() as usize, let filter = trace::Filter {
from_address: From::from(filter.from_address), range: s as usize..e as usize,
to_address: From::from(filter.to_address), from_address: From::from(filter.from_address),
}; to_address: From::from(filter.to_address),
};
let traces = self.tracedb.read().filter(&filter); let traces = self.tracedb.read().filter(&filter);
Some(traces) Some(traces)
} else { },
None _ => None,
} }
} }
@ -1032,7 +1136,7 @@ impl BlockChainClient for Client {
} }
fn pending_transactions(&self) -> Vec<SignedTransaction> { fn pending_transactions(&self) -> Vec<SignedTransaction> {
self.miner.pending_transactions() self.miner.pending_transactions(self.chain.read().best_block_number())
} }
} }
@ -1046,7 +1150,7 @@ impl MiningBlockChainClient for Client {
engine, engine,
self.factories.clone(), self.factories.clone(),
false, // TODO: this will need to be parameterised once we want to do immediate mining insertion. false, // TODO: this will need to be parameterised once we want to do immediate mining insertion.
self.state_db.read().boxed_clone(), self.state_db.lock().boxed_clone_canon(&h),
&chain.block_header(&h).expect("h is best block hash: so its header must exist: qed"), &chain.block_header(&h).expect("h is best block hash: so its header must exist: qed"),
self.build_last_hashes(h.clone()), self.build_last_hashes(h.clone()),
author, author,
@ -1057,11 +1161,15 @@ impl MiningBlockChainClient for Client {
// Add uncles // Add uncles
chain chain
.find_uncle_headers(&h, engine.maximum_uncle_age()) .find_uncle_headers(&h, engine.maximum_uncle_age())
.unwrap() .unwrap_or_else(Vec::new)
.into_iter() .into_iter()
.take(engine.maximum_uncle_count()) .take(engine.maximum_uncle_count())
.foreach(|h| { .foreach(|h| {
open_block.push_uncle(h).unwrap(); open_block.push_uncle(h).expect("pushing maximum_uncle_count;
open_block was just created;
push_uncle is not ok only if more than maximum_uncle_count is pushed;
so all push_uncle are Ok;
qed");
}); });
open_block open_block
@ -1072,20 +1180,22 @@ impl MiningBlockChainClient for Client {
} }
fn import_sealed_block(&self, block: SealedBlock) -> ImportResult { fn import_sealed_block(&self, block: SealedBlock) -> ImportResult {
let _import_lock = self.import_lock.lock();
let _timer = PerfTimer::new("import_sealed_block");
let start = precise_time_ns();
let h = block.header().hash(); let h = block.header().hash();
let number = block.header().number(); let start = precise_time_ns();
let route = {
let block_data = block.rlp_bytes(); // scope for self.import_lock
let route = self.commit_block(block, &h, &block_data); let _import_lock = self.import_lock.lock();
trace!(target: "client", "Imported sealed block #{} ({})", number, h); let _timer = PerfTimer::new("import_sealed_block");
let number = block.header().number();
let block_data = block.rlp_bytes();
let route = self.commit_block(block, &h, &block_data);
trace!(target: "client", "Imported sealed block #{} ({})", number, h);
self.state_db.lock().sync_cache(&route.enacted, &route.retracted, false);
route
};
let (enacted, retracted) = self.calculate_enacted_retracted(&[route]); let (enacted, retracted) = self.calculate_enacted_retracted(&[route]);
self.miner.chain_new_blocks(self, &[h.clone()], &[], &enacted, &retracted); self.miner.chain_new_blocks(self, &[h.clone()], &[], &enacted, &retracted);
self.notify(|notify| { self.notify(|notify| {
notify.new_blocks( notify.new_blocks(
vec![h.clone()], vec![h.clone()],

View File

@ -15,35 +15,38 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::str::FromStr; use std::str::FromStr;
use std::path::Path;
pub use std::time::Duration; pub use std::time::Duration;
pub use block_queue::BlockQueueConfig;
pub use blockchain::Config as BlockChainConfig; pub use blockchain::Config as BlockChainConfig;
pub use trace::Config as TraceConfig; pub use trace::Config as TraceConfig;
pub use evm::VMType; pub use evm::VMType;
pub use verification::VerifierType;
use verification::{VerifierType, QueueConfig};
use util::{journaldb, CompactionProfile}; use util::{journaldb, CompactionProfile};
use util::trie::TrieSpec;
/// Client state db compaction profile /// Client state db compaction profile
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub enum DatabaseCompactionProfile { pub enum DatabaseCompactionProfile {
/// Default compaction profile /// Try to determine compaction profile automatically
Default, Auto,
/// SSD compaction profile
SSD,
/// HDD or other slow storage io compaction profile /// HDD or other slow storage io compaction profile
HDD, HDD,
} }
impl Default for DatabaseCompactionProfile { impl Default for DatabaseCompactionProfile {
fn default() -> Self { fn default() -> Self {
DatabaseCompactionProfile::Default DatabaseCompactionProfile::Auto
} }
} }
impl DatabaseCompactionProfile { impl DatabaseCompactionProfile {
/// Returns corresponding compaction profile. /// Returns corresponding compaction profile.
pub fn compaction_profile(&self) -> CompactionProfile { pub fn compaction_profile(&self, db_path: &Path) -> CompactionProfile {
match *self { match *self {
DatabaseCompactionProfile::Default => Default::default(), DatabaseCompactionProfile::Auto => CompactionProfile::auto(db_path),
DatabaseCompactionProfile::SSD => CompactionProfile::ssd(),
DatabaseCompactionProfile::HDD => CompactionProfile::hdd(), DatabaseCompactionProfile::HDD => CompactionProfile::hdd(),
} }
} }
@ -54,9 +57,10 @@ impl FromStr for DatabaseCompactionProfile {
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
"ssd" | "default" => Ok(DatabaseCompactionProfile::Default), "auto" => Ok(DatabaseCompactionProfile::Auto),
"ssd" => Ok(DatabaseCompactionProfile::SSD),
"hdd" => Ok(DatabaseCompactionProfile::HDD), "hdd" => Ok(DatabaseCompactionProfile::HDD),
_ => Err("Invalid compaction profile given. Expected hdd/ssd (default).".into()), _ => Err("Invalid compaction profile given. Expected default/hdd/ssd.".into()),
} }
} }
} }
@ -84,20 +88,20 @@ impl Default for Mode {
#[derive(Debug, PartialEq, Default)] #[derive(Debug, PartialEq, Default)]
pub struct ClientConfig { pub struct ClientConfig {
/// Block queue configuration. /// Block queue configuration.
pub queue: BlockQueueConfig, pub queue: QueueConfig,
/// Blockchain configuration. /// Blockchain configuration.
pub blockchain: BlockChainConfig, pub blockchain: BlockChainConfig,
/// Trace configuration. /// Trace configuration.
pub tracing: TraceConfig, pub tracing: TraceConfig,
/// VM type. /// VM type.
pub vm_type: VMType, pub vm_type: VMType,
/// Trie type. /// Fat DB enabled?
pub trie_spec: TrieSpec, pub fat_db: bool,
/// The JournalDB ("pruning") algorithm to use. /// The JournalDB ("pruning") algorithm to use.
pub pruning: journaldb::Algorithm, pub pruning: journaldb::Algorithm,
/// The name of the client instance. /// The name of the client instance.
pub name: String, pub name: String,
/// State db cache-size if not default /// RocksDB state column cache-size if not default
pub db_cache_size: Option<usize>, pub db_cache_size: Option<usize>,
/// State db compaction profile /// State db compaction profile
pub db_compaction: DatabaseCompactionProfile, pub db_compaction: DatabaseCompactionProfile,
@ -107,6 +111,14 @@ pub struct ClientConfig {
pub mode: Mode, pub mode: Mode,
/// Type of block verifier used by client. /// Type of block verifier used by client.
pub verifier_type: VerifierType, pub verifier_type: VerifierType,
/// State db cache-size.
pub state_cache_size: usize,
/// EVM jump-tables cache size.
pub jump_table_size: usize,
/// State pruning history size.
pub history: u64,
/// Check seal valididity on block import
pub check_seal: bool,
} }
#[cfg(test)] #[cfg(test)]
@ -115,13 +127,13 @@ mod test {
#[test] #[test]
fn test_default_compaction_profile() { fn test_default_compaction_profile() {
assert_eq!(DatabaseCompactionProfile::default(), DatabaseCompactionProfile::Default); assert_eq!(DatabaseCompactionProfile::default(), DatabaseCompactionProfile::Auto);
} }
#[test] #[test]
fn test_parsing_compaction_profile() { fn test_parsing_compaction_profile() {
assert_eq!(DatabaseCompactionProfile::Default, "ssd".parse().unwrap()); assert_eq!(DatabaseCompactionProfile::Auto, "auto".parse().unwrap());
assert_eq!(DatabaseCompactionProfile::Default, "default".parse().unwrap()); assert_eq!(DatabaseCompactionProfile::SSD, "ssd".parse().unwrap());
assert_eq!(DatabaseCompactionProfile::HDD, "hdd".parse().unwrap()); assert_eq!(DatabaseCompactionProfile::HDD, "hdd".parse().unwrap());
} }

View File

@ -23,20 +23,28 @@ mod trace;
mod client; mod client;
pub use self::client::*; pub use self::client::*;
pub use self::config::{Mode, ClientConfig, DatabaseCompactionProfile, BlockQueueConfig, BlockChainConfig, VMType}; pub use self::config::{Mode, ClientConfig, DatabaseCompactionProfile, BlockChainConfig, VMType};
pub use self::error::Error; pub use self::error::Error;
pub use types::ids::*; pub use types::ids::*;
pub use self::test_client::{TestBlockChainClient, EachBlockWith}; pub use self::test_client::{TestBlockChainClient, EachBlockWith};
pub use types::trace_filter::Filter as TraceFilter; pub use types::trace_filter::Filter as TraceFilter;
pub use executive::{Executed, Executive, TransactOptions}; pub use executive::{Executed, Executive, TransactOptions};
pub use env_info::{LastHashes, EnvInfo}; pub use env_info::{LastHashes, EnvInfo};
pub use self::chain_notify::{ChainNotify, ChainNotifyClient}; pub use self::chain_notify::ChainNotify;
pub use types::call_analytics::CallAnalytics; pub use types::call_analytics::CallAnalytics;
pub use block_import_error::BlockImportError; pub use block_import_error::BlockImportError;
pub use transaction_import::TransactionImportResult; pub use transaction_import::TransactionImportResult;
pub use transaction_import::TransactionImportError; pub use transaction_import::TransactionImportError;
pub use self::traits::{BlockChainClient, MiningBlockChainClient, RemoteClient}; pub use self::traits::{BlockChainClient, MiningBlockChainClient};
pub use verification::VerifierType;
/// IPC interfaces
#[cfg(feature="ipc")]
pub mod remote {
pub use super::traits::RemoteClient;
pub use super::chain_notify::ChainNotifyClient;
}
mod traits { mod traits {
#![allow(dead_code, unused_assignments, unused_variables, missing_docs)] // codegen issues #![allow(dead_code, unused_assignments, unused_variables, missing_docs)] // codegen issues

View File

@ -25,8 +25,9 @@ use transaction::{Transaction, LocalizedTransaction, SignedTransaction, Action};
use blockchain::TreeRoute; use blockchain::TreeRoute;
use client::{ use client::{
BlockChainClient, MiningBlockChainClient, BlockChainInfo, BlockStatus, BlockID, BlockChainClient, MiningBlockChainClient, BlockChainInfo, BlockStatus, BlockID,
TransactionID, UncleID, TraceId, TraceFilter, LastHashes, CallAnalytics, BlockImportError TransactionID, UncleID, TraceId, TraceFilter, LastHashes, CallAnalytics, BlockImportError,
}; };
use db::{NUM_COLUMNS, COL_STATE};
use header::{Header as BlockHeader, BlockNumber}; use header::{Header as BlockHeader, BlockNumber};
use filter::Filter; use filter::Filter;
use log_entry::LocalizedLogEntry; use log_entry::LocalizedLogEntry;
@ -37,11 +38,12 @@ use evm::{Factory as EvmFactory, VMType};
use miner::{Miner, MinerService, TransactionImportResult}; use miner::{Miner, MinerService, TransactionImportResult};
use spec::Spec; use spec::Spec;
use block_queue::BlockQueueInfo; use verification::queue::QueueInfo;
use block::{OpenBlock, SealedBlock}; use block::{OpenBlock, SealedBlock};
use executive::Executed; use executive::Executed;
use error::CallError; use error::CallError;
use trace::LocalizedTrace; use trace::LocalizedTrace;
use state_db::StateDB;
/// Test client. /// Test client.
pub struct TestBlockChainClient { pub struct TestBlockChainClient {
@ -53,6 +55,8 @@ pub struct TestBlockChainClient {
pub genesis_hash: H256, pub genesis_hash: H256,
/// Last block hash. /// Last block hash.
pub last_hash: RwLock<H256>, pub last_hash: RwLock<H256>,
/// Extra data do set for each block
pub extra_data: Bytes,
/// Difficulty. /// Difficulty.
pub difficulty: RwLock<U256>, pub difficulty: RwLock<U256>,
/// Balances. /// Balances.
@ -103,11 +107,17 @@ impl Default for TestBlockChainClient {
impl TestBlockChainClient { impl TestBlockChainClient {
/// Creates new test client. /// Creates new test client.
pub fn new() -> Self { pub fn new() -> Self {
Self::new_with_extra_data(Bytes::new())
}
/// Creates new test client with specified extra data for each block
pub fn new_with_extra_data(extra_data: Bytes) -> Self {
let spec = Spec::new_test(); let spec = Spec::new_test();
let mut client = TestBlockChainClient { let mut client = TestBlockChainClient {
blocks: RwLock::new(HashMap::new()), blocks: RwLock::new(HashMap::new()),
numbers: RwLock::new(HashMap::new()), numbers: RwLock::new(HashMap::new()),
genesis_hash: H256::new(), genesis_hash: H256::new(),
extra_data: extra_data,
last_hash: RwLock::new(H256::new()), last_hash: RwLock::new(H256::new()),
difficulty: RwLock::new(From::from(0)), difficulty: RwLock::new(From::from(0)),
balances: RwLock::new(HashMap::new()), balances: RwLock::new(HashMap::new()),
@ -120,14 +130,14 @@ impl TestBlockChainClient {
queue_size: AtomicUsize::new(0), queue_size: AtomicUsize::new(0),
miner: Arc::new(Miner::with_spec(&spec)), miner: Arc::new(Miner::with_spec(&spec)),
spec: spec, spec: spec,
vm_factory: EvmFactory::new(VMType::Interpreter), vm_factory: EvmFactory::new(VMType::Interpreter, 1024 * 1024),
latest_block_timestamp: RwLock::new(10_000_000), latest_block_timestamp: RwLock::new(10_000_000),
}; };
client.add_blocks(1, EachBlockWith::Nothing); // add genesis block client.add_blocks(1, EachBlockWith::Nothing); // add genesis block
client.genesis_hash = client.last_hash.read().clone(); client.genesis_hash = client.last_hash.read().clone();
client client
} }
/// Set the transaction receipt result /// Set the transaction receipt result
pub fn set_transaction_receipt(&self, id: TransactionID, receipt: LocalizedReceipt) { pub fn set_transaction_receipt(&self, id: TransactionID, receipt: LocalizedReceipt) {
self.receipts.write().insert(id, receipt); self.receipts.write().insert(id, receipt);
@ -182,6 +192,7 @@ impl TestBlockChainClient {
header.set_parent_hash(self.last_hash.read().clone()); header.set_parent_hash(self.last_hash.read().clone());
header.set_number(n as BlockNumber); header.set_number(n as BlockNumber);
header.set_gas_limit(U256::from(1_000_000)); header.set_gas_limit(U256::from(1_000_000));
header.set_extra_data(self.extra_data.clone());
let uncles = match with { let uncles = match with {
EachBlockWith::Uncle | EachBlockWith::UncleAndTransaction => { EachBlockWith::Uncle | EachBlockWith::UncleAndTransaction => {
let mut uncles = RlpStream::new_list(1); let mut uncles = RlpStream::new_list(1);
@ -283,13 +294,14 @@ impl TestBlockChainClient {
} }
} }
pub fn get_temp_journal_db() -> GuardedTempResult<Box<JournalDB>> { pub fn get_temp_state_db() -> GuardedTempResult<StateDB> {
let temp = RandomTempPath::new(); let temp = RandomTempPath::new();
let db = Database::open_default(temp.as_str()).unwrap(); let db = Database::open(&DatabaseConfig::with_columns(NUM_COLUMNS), temp.as_str()).unwrap();
let journal_db = journaldb::new(Arc::new(db), journaldb::Algorithm::EarlyMerge, None); let journal_db = journaldb::new(Arc::new(db), journaldb::Algorithm::EarlyMerge, COL_STATE);
let state_db = StateDB::new(journal_db, 1024 * 1024);
GuardedTempResult { GuardedTempResult {
_temp: temp, _temp: temp,
result: Some(journal_db) result: Some(state_db)
} }
} }
@ -297,9 +309,9 @@ impl MiningBlockChainClient for TestBlockChainClient {
fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock { fn prepare_open_block(&self, author: Address, gas_range_target: (U256, U256), extra_data: Bytes) -> OpenBlock {
let engine = &*self.spec.engine; let engine = &*self.spec.engine;
let genesis_header = self.spec.genesis_header(); let genesis_header = self.spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
self.spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); self.spec.ensure_db_good(&mut db).unwrap();
let last_hashes = vec![genesis_header.hash()]; let last_hashes = vec![genesis_header.hash()];
let mut open_block = OpenBlock::new( let mut open_block = OpenBlock::new(
@ -382,12 +394,16 @@ impl BlockChainClient for TestBlockChainClient {
} }
} }
fn list_accounts(&self, _id: BlockID) -> Option<Vec<Address>> {
None
}
fn transaction(&self, _id: TransactionID) -> Option<LocalizedTransaction> { fn transaction(&self, _id: TransactionID) -> Option<LocalizedTransaction> {
unimplemented!(); None // Simple default.
} }
fn uncle(&self, _id: UncleID) -> Option<Bytes> { fn uncle(&self, _id: UncleID) -> Option<Bytes> {
unimplemented!(); None // Simple default.
} }
fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> { fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> {
@ -544,8 +560,12 @@ impl BlockChainClient for TestBlockChainClient {
Ok(h) Ok(h)
} }
fn queue_info(&self) -> BlockQueueInfo { fn import_block_with_receipts(&self, b: Bytes, _r: Bytes) -> Result<H256, BlockImportError> {
BlockQueueInfo { self.import_block(b)
}
fn queue_info(&self) -> QueueInfo {
QueueInfo {
verified_queue_size: self.queue_size.load(AtomicOrder::Relaxed), verified_queue_size: self.queue_size.load(AtomicOrder::Relaxed),
unverified_queue_size: 0, unverified_queue_size: 0,
verifying_queue_size: 0, verifying_queue_size: 0,
@ -569,6 +589,10 @@ impl BlockChainClient for TestBlockChainClient {
genesis_hash: self.genesis_hash.clone(), genesis_hash: self.genesis_hash.clone(),
best_block_hash: self.last_hash.read().clone(), best_block_hash: self.last_hash.read().clone(),
best_block_number: self.blocks.read().len() as BlockNumber - 1, best_block_number: self.blocks.read().len() as BlockNumber - 1,
first_block_hash: None,
first_block_number: None,
ancient_block_hash: None,
ancient_block_number: None,
} }
} }
@ -595,6 +619,6 @@ impl BlockChainClient for TestBlockChainClient {
} }
fn pending_transactions(&self) -> Vec<SignedTransaction> { fn pending_transactions(&self) -> Vec<SignedTransaction> {
self.miner.pending_transactions() self.miner.pending_transactions(self.chain_info().best_block_number)
} }
} }

View File

@ -17,7 +17,7 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use util::{U256, Address, H256, H2048, Bytes, Itertools}; use util::{U256, Address, H256, H2048, Bytes, Itertools};
use blockchain::TreeRoute; use blockchain::TreeRoute;
use block_queue::BlockQueueInfo; use verification::queue::QueueInfo as BlockQueueInfo;
use block::{OpenBlock, SealedBlock}; use block::{OpenBlock, SealedBlock};
use header::{BlockNumber}; use header::{BlockNumber};
use transaction::{LocalizedTransaction, SignedTransaction}; use transaction::{LocalizedTransaction, SignedTransaction};
@ -38,7 +38,6 @@ use ipc::IpcConfig;
use types::blockchain_info::BlockChainInfo; use types::blockchain_info::BlockChainInfo;
use types::block_status::BlockStatus; use types::block_status::BlockStatus;
#[derive(Ipc)]
#[ipc(client_ident="RemoteClient")] #[ipc(client_ident="RemoteClient")]
/// Blockchain database client. Owns and manages a blockchain and a block queue. /// Blockchain database client. Owns and manages a blockchain and a block queue.
pub trait BlockChainClient : Sync + Send { pub trait BlockChainClient : Sync + Send {
@ -112,6 +111,9 @@ pub trait BlockChainClient : Sync + Send {
Therefore storage_at has returned Some; qed") Therefore storage_at has returned Some; qed")
} }
/// Get a list of all accounts in the block `id`, if fat DB is in operation, otherwise `None`.
fn list_accounts(&self, id: BlockID) -> Option<Vec<Address>>;
/// Get transaction with given hash. /// Get transaction with given hash.
fn transaction(&self, id: TransactionID) -> Option<LocalizedTransaction>; fn transaction(&self, id: TransactionID) -> Option<LocalizedTransaction>;
@ -137,6 +139,9 @@ pub trait BlockChainClient : Sync + Send {
/// Import a block into the blockchain. /// Import a block into the blockchain.
fn import_block(&self, bytes: Bytes) -> Result<H256, BlockImportError>; fn import_block(&self, bytes: Bytes) -> Result<H256, BlockImportError>;
/// Import a block with transaction receipts. Does no sealing and transaction validation.
fn import_block_with_receipts(&self, block_bytes: Bytes, receipts_bytes: Bytes) -> Result<H256, BlockImportError>;
/// Get block queue information. /// Get block queue information.
fn queue_info(&self) -> BlockQueueInfo; fn queue_info(&self) -> BlockQueueInfo;
@ -189,15 +194,20 @@ pub trait BlockChainClient : Sync + Send {
fn gas_price_statistics(&self, sample_size: usize, distribution_size: usize) -> Result<Vec<U256>, ()> { fn gas_price_statistics(&self, sample_size: usize, distribution_size: usize) -> Result<Vec<U256>, ()> {
let mut h = self.chain_info().best_block_hash; let mut h = self.chain_info().best_block_hash;
let mut corpus = Vec::new(); let mut corpus = Vec::new();
for _ in 0..sample_size { while corpus.is_empty() {
let block_bytes = self.block(BlockID::Hash(h)).expect("h is either the best_block_hash or an ancestor; qed"); for _ in 0..sample_size {
let block = BlockView::new(&block_bytes); let block_bytes = self.block(BlockID::Hash(h)).expect("h is either the best_block_hash or an ancestor; qed");
let header = block.header_view(); let block = BlockView::new(&block_bytes);
if header.number() == 0 { let header = block.header_view();
break; if header.number() == 0 {
if corpus.is_empty() {
corpus.push(20_000_000_000u64.into()); // we have literally no information - it' as good a number as any.
}
break;
}
block.transaction_views().iter().foreach(|t| corpus.push(t.gas_price()));
h = header.parent_hash().clone();
} }
block.transaction_views().iter().foreach(|t| corpus.push(t.gas_price()));
h = header.parent_hash().clone();
} }
corpus.sort(); corpus.sort();
let n = corpus.len(); let n = corpus.len();

View File

@ -34,8 +34,10 @@ pub const COL_BODIES: Option<u32> = Some(2);
pub const COL_EXTRA: Option<u32> = Some(3); pub const COL_EXTRA: Option<u32> = Some(3);
/// Column for Traces /// Column for Traces
pub const COL_TRACE: Option<u32> = Some(4); pub const COL_TRACE: Option<u32> = Some(4);
/// Column for Traces
pub const COL_ACCOUNT_BLOOM: Option<u32> = Some(5);
/// Number of columns in DB /// Number of columns in DB
pub const NUM_COLUMNS: Option<u32> = Some(5); pub const NUM_COLUMNS: Option<u32> = Some(6);
/// Modes for updating caches. /// Modes for updating caches.
#[derive(Clone, Copy)] #[derive(Clone, Copy)]
@ -86,6 +88,9 @@ pub trait Writable {
/// Writes the value into the database. /// Writes the value into the database.
fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: Deref<Target = [u8]>; fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: Deref<Target = [u8]>;
/// Deletes key from the databse.
fn delete<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>) where T: rlp::Encodable, R: Deref<Target = [u8]>;
/// Writes the value into the database and updates the cache. /// Writes the value into the database and updates the cache.
fn write_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, key: K, value: T, policy: CacheUpdatePolicy) where fn write_with_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, T>, key: K, value: T, policy: CacheUpdatePolicy) where
K: Key<T, Target = R> + Hash + Eq, K: Key<T, Target = R> + Hash + Eq,
@ -122,6 +127,34 @@ pub trait Writable {
}, },
} }
} }
/// Writes and removes the values into the database and updates the cache.
fn extend_with_option_cache<K, T, R>(&mut self, col: Option<u32>, cache: &mut Cache<K, Option<T>>, values: HashMap<K, Option<T>>, policy: CacheUpdatePolicy) where
K: Key<T, Target = R> + Hash + Eq,
T: rlp::Encodable,
R: Deref<Target = [u8]> {
match policy {
CacheUpdatePolicy::Overwrite => {
for (key, value) in values.into_iter() {
match value {
Some(ref v) => self.write(col, &key, v),
None => self.delete(col, &key),
}
cache.insert(key, value);
}
},
CacheUpdatePolicy::Remove => {
for (key, value) in values.into_iter() {
match value {
Some(v) => self.write(col, &key, &v),
None => self.delete(col, &key),
}
cache.remove(&key);
}
},
}
}
} }
/// Should be used to read values from database. /// Should be used to read values from database.
@ -173,6 +206,10 @@ impl Writable for DBTransaction {
fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: Deref<Target = [u8]> { fn write<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>, value: &T) where T: rlp::Encodable, R: Deref<Target = [u8]> {
self.put(col, &key.key(), &rlp::encode(value)); self.put(col, &key.key(), &rlp::encode(value));
} }
fn delete<T, R>(&mut self, col: Option<u32>, key: &Key<T, Target = R>) where T: rlp::Encodable, R: Deref<Target = [u8]> {
self.delete(col, &key.key());
}
} }
impl Readable for Database { impl Readable for Database {

View File

@ -16,14 +16,20 @@
//! A blockchain engine that supports a basic, non-BFT proof-of-authority. //! A blockchain engine that supports a basic, non-BFT proof-of-authority.
use common::*;
use ethkey::{recover, public_to_address}; use ethkey::{recover, public_to_address};
use account_provider::AccountProvider; use account_provider::AccountProvider;
use block::*; use block::*;
use builtin::Builtin;
use spec::CommonParams; use spec::CommonParams;
use engines::Engine; use engines::Engine;
use env_info::EnvInfo;
use error::{BlockError, Error};
use evm::Schedule; use evm::Schedule;
use ethjson; use ethjson;
use header::Header;
use transaction::SignedTransaction;
use util::*;
/// `BasicAuthority` params. /// `BasicAuthority` params.
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
@ -112,7 +118,7 @@ impl Engine for BasicAuthority {
let header = block.header(); let header = block.header();
let message = header.bare_hash(); let message = header.bare_hash();
// account should be pernamently unlocked, otherwise sealing will fail // account should be pernamently unlocked, otherwise sealing will fail
if let Ok(signature) = ap.sign(*block.header().author(), message) { if let Ok(signature) = ap.sign(*block.header().author(), None, message) {
return Some(vec![::rlp::encode(&(&*signature as &[u8])).to_vec()]); return Some(vec![::rlp::encode(&(&*signature as &[u8])).to_vec()]);
} else { } else {
trace!(target: "basicauthority", "generate_seal: FAIL: accounts secret key unavailable"); trace!(target: "basicauthority", "generate_seal: FAIL: accounts secret key unavailable");
@ -184,10 +190,13 @@ impl Header {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use common::*; use util::*;
use block::*; use block::*;
use env_info::EnvInfo;
use error::{BlockError, Error};
use tests::helpers::*; use tests::helpers::*;
use account_provider::AccountProvider; use account_provider::AccountProvider;
use header::Header;
use spec::Spec; use spec::Spec;
/// Create a new test chain spec with `BasicAuthority` consensus engine. /// Create a new test chain spec with `BasicAuthority` consensus engine.
@ -252,9 +261,9 @@ mod tests {
let spec = new_test_authority(); let spec = new_test_authority();
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close_and_lock(); let b = b.close_and_lock();

View File

@ -18,11 +18,11 @@ use std::collections::BTreeMap;
use util::Address; use util::Address;
use builtin::Builtin; use builtin::Builtin;
use engines::Engine; use engines::Engine;
use env_info::EnvInfo;
use spec::CommonParams; use spec::CommonParams;
use evm::Schedule; use evm::Schedule;
use env_info::EnvInfo;
use block::ExecutedBlock; use block::ExecutedBlock;
use common::Bytes; use util::Bytes;
use account_provider::AccountProvider; use account_provider::AccountProvider;
/// An engine which does not provide any consensus mechanism, just seals blocks internally. /// An engine which does not provide any consensus mechanism, just seals blocks internally.
@ -67,10 +67,11 @@ impl Engine for InstantSeal {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use common::*; use util::*;
use tests::helpers::*; use tests::helpers::*;
use account_provider::AccountProvider; use account_provider::AccountProvider;
use spec::Spec; use spec::Spec;
use header::Header;
use block::*; use block::*;
#[test] #[test]
@ -81,9 +82,9 @@ mod tests {
let spec = Spec::new_test_instant(); let spec = Spec::new_test_instant();
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close_and_lock(); let b = b.close_and_lock();

View File

@ -24,11 +24,16 @@ pub use self::null_engine::NullEngine;
pub use self::instant_seal::InstantSeal; pub use self::instant_seal::InstantSeal;
pub use self::basic_authority::BasicAuthority; pub use self::basic_authority::BasicAuthority;
use common::*; use util::*;
use account_provider::AccountProvider; use account_provider::AccountProvider;
use block::ExecutedBlock; use block::ExecutedBlock;
use builtin::Builtin;
use env_info::EnvInfo;
use error::Error;
use spec::CommonParams; use spec::CommonParams;
use evm::Schedule; use evm::Schedule;
use header::Header;
use transaction::SignedTransaction;
/// A consensus mechanism for the chain. Generally either proof-of-work or proof-of-stake-based. /// A consensus mechanism for the chain. Generally either proof-of-work or proof-of-stake-based.
/// Provides hooks into each of the major parts of block import. /// Provides hooks into each of the major parts of block import.
@ -123,10 +128,14 @@ pub trait Engine : Sync + Send {
fn is_builtin(&self, a: &Address) -> bool { self.builtins().contains_key(a) } fn is_builtin(&self, a: &Address) -> bool { self.builtins().contains_key(a) }
/// Determine the code execution cost of the builtin contract with address `a`. /// Determine the code execution cost of the builtin contract with address `a`.
/// Panics if `is_builtin(a)` is not true. /// Panics if `is_builtin(a)` is not true.
fn cost_of_builtin(&self, a: &Address, input: &[u8]) -> U256 { self.builtins().get(a).unwrap().cost(input.len()) } fn cost_of_builtin(&self, a: &Address, input: &[u8]) -> U256 {
self.builtins().get(a).expect("queried cost of nonexistent builtin").cost(input.len())
}
/// Execution the builtin contract `a` on `input` and return `output`. /// Execution the builtin contract `a` on `input` and return `output`.
/// Panics if `is_builtin(a)` is not true. /// Panics if `is_builtin(a)` is not true.
fn execute_builtin(&self, a: &Address, input: &[u8], output: &mut BytesRef) { self.builtins().get(a).unwrap().execute(input, output); } fn execute_builtin(&self, a: &Address, input: &[u8], output: &mut BytesRef) {
self.builtins().get(a).expect("attempted to execute nonexistent builtin").execute(input, output);
}
// TODO: sealing stuff - though might want to leave this for later. // TODO: sealing stuff - though might want to leave this for later.
} }

View File

@ -14,10 +14,15 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use ethash::{quick_get_difficulty, EthashManager, H256 as EH256}; use ethash::{quick_get_difficulty, slow_get_seedhash, EthashManager, H256 as EH256};
use common::*; use util::*;
use block::*; use block::*;
use builtin::Builtin;
use env_info::EnvInfo;
use error::{BlockError, Error};
use header::Header;
use spec::CommonParams; use spec::CommonParams;
use transaction::SignedTransaction;
use engines::Engine; use engines::Engine;
use evm::Schedule; use evm::Schedule;
use ethjson; use ethjson;
@ -32,6 +37,8 @@ pub struct EthashParams {
pub minimum_difficulty: U256, pub minimum_difficulty: U256,
/// Difficulty bound divisor. /// Difficulty bound divisor.
pub difficulty_bound_divisor: U256, pub difficulty_bound_divisor: U256,
/// Difficulty increment divisor.
pub difficulty_increment_divisor: u64,
/// Block duration. /// Block duration.
pub duration_limit: u64, pub duration_limit: u64,
/// Block reward. /// Block reward.
@ -39,13 +46,21 @@ pub struct EthashParams {
/// Namereg contract address. /// Namereg contract address.
pub registrar: Address, pub registrar: Address,
/// Homestead transition block number. /// Homestead transition block number.
pub frontier_compatibility_mode_limit: u64, pub homestead_transition: u64,
/// DAO hard-fork transition block (X). /// DAO hard-fork transition block (X).
pub dao_hardfork_transition: u64, pub dao_hardfork_transition: u64,
/// DAO hard-fork refund contract address (C). /// DAO hard-fork refund contract address (C).
pub dao_hardfork_beneficiary: Address, pub dao_hardfork_beneficiary: Address,
/// DAO hard-fork DAO accounts list (L) /// DAO hard-fork DAO accounts list (L)
pub dao_hardfork_accounts: Vec<Address>, pub dao_hardfork_accounts: Vec<Address>,
/// Transition block for a change of difficulty params (currently just bound_divisor).
pub difficulty_hardfork_transition: u64,
/// Difficulty param after the difficulty transition.
pub difficulty_hardfork_bound_divisor: U256,
/// Block on which there is no additional difficulty from the exponential bomb.
pub bomb_defuse_transition: u64,
/// Bad gas transition block number.
pub eip150_transition: u64,
} }
impl From<ethjson::spec::EthashParams> for EthashParams { impl From<ethjson::spec::EthashParams> for EthashParams {
@ -54,13 +69,18 @@ impl From<ethjson::spec::EthashParams> for EthashParams {
gas_limit_bound_divisor: p.gas_limit_bound_divisor.into(), gas_limit_bound_divisor: p.gas_limit_bound_divisor.into(),
minimum_difficulty: p.minimum_difficulty.into(), minimum_difficulty: p.minimum_difficulty.into(),
difficulty_bound_divisor: p.difficulty_bound_divisor.into(), difficulty_bound_divisor: p.difficulty_bound_divisor.into(),
difficulty_increment_divisor: p.difficulty_increment_divisor.map_or(10, Into::into),
duration_limit: p.duration_limit.into(), duration_limit: p.duration_limit.into(),
block_reward: p.block_reward.into(), block_reward: p.block_reward.into(),
registrar: p.registrar.map_or_else(Address::new, Into::into), registrar: p.registrar.map_or_else(Address::new, Into::into),
frontier_compatibility_mode_limit: p.frontier_compatibility_mode_limit.map_or(0, Into::into), homestead_transition: p.homestead_transition.map_or(0, Into::into),
dao_hardfork_transition: p.dao_hardfork_transition.map_or(0x7fffffffffffffff, Into::into), dao_hardfork_transition: p.dao_hardfork_transition.map_or(0x7fffffffffffffff, Into::into),
dao_hardfork_beneficiary: p.dao_hardfork_beneficiary.map_or_else(Address::new, Into::into), dao_hardfork_beneficiary: p.dao_hardfork_beneficiary.map_or_else(Address::new, Into::into),
dao_hardfork_accounts: p.dao_hardfork_accounts.unwrap_or_else(Vec::new).into_iter().map(Into::into).collect(), dao_hardfork_accounts: p.dao_hardfork_accounts.unwrap_or_else(Vec::new).into_iter().map(Into::into).collect(),
difficulty_hardfork_transition: p.difficulty_hardfork_transition.map_or(0x7fffffffffffffff, Into::into),
difficulty_hardfork_bound_divisor: p.difficulty_hardfork_bound_divisor.map_or(p.difficulty_bound_divisor.into(), Into::into),
bomb_defuse_transition: p.bomb_defuse_transition.map_or(0x7fffffffffffffff, Into::into),
eip150_transition: p.eip150_transition.map_or(0, Into::into),
} }
} }
} }
@ -105,12 +125,14 @@ impl Engine for Ethash {
} }
fn schedule(&self, env_info: &EnvInfo) -> Schedule { fn schedule(&self, env_info: &EnvInfo) -> Schedule {
trace!(target: "client", "Creating schedule. fCML={}", self.ethash_params.frontier_compatibility_mode_limit); trace!(target: "client", "Creating schedule. fCML={}, bGCML={}", self.ethash_params.homestead_transition, self.ethash_params.eip150_transition);
if env_info.number < self.ethash_params.frontier_compatibility_mode_limit { if env_info.number < self.ethash_params.homestead_transition {
Schedule::new_frontier() Schedule::new_frontier()
} else { } else if env_info.number < self.ethash_params.eip150_transition {
Schedule::new_homestead() Schedule::new_homestead()
} else {
Schedule::new_homestead_gas_fix()
} }
} }
@ -168,6 +190,10 @@ impl Engine for Ethash {
fields.state.add_balance(u.author(), &(reward * U256::from(8 + u.number() - current_number) / U256::from(8))); fields.state.add_balance(u.author(), &(reward * U256::from(8 + u.number() - current_number) / U256::from(8)));
} }
// Commit state so that we can actually figure out the state root.
if let Err(e) = fields.state.commit() {
warn!("Encountered error on state commit: {}", e);
}
} }
fn verify_block_basic(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> { fn verify_block_basic(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
@ -217,6 +243,7 @@ impl Engine for Ethash {
let result = self.pow.compute_light(header.number() as u64, &Ethash::to_ethash(header.bare_hash()), header.nonce().low_u64()); let result = self.pow.compute_light(header.number() as u64, &Ethash::to_ethash(header.bare_hash()), header.nonce().low_u64());
let mix = Ethash::from_ethash(result.mix_hash); let mix = Ethash::from_ethash(result.mix_hash);
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value)); let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value));
trace!(target: "miner", "num: {}, seed: {}, h: {}, non: {}, mix: {}, res: {}" , header.number() as u64, Ethash::from_ethash(slow_get_seedhash(header.number() as u64)), header.bare_hash(), header.nonce().low_u64(), Ethash::from_ethash(result.mix_hash), Ethash::from_ethash(result.value));
if mix != header.mix_hash() { if mix != header.mix_hash() {
return Err(From::from(BlockError::MismatchedH256SealElement(Mismatch { expected: mix, found: header.mix_hash() }))); return Err(From::from(BlockError::MismatchedH256SealElement(Mismatch { expected: mix, found: header.mix_hash() })));
} }
@ -247,7 +274,7 @@ impl Engine for Ethash {
} }
fn verify_transaction_basic(&self, t: &SignedTransaction, header: &Header) -> result::Result<(), Error> { fn verify_transaction_basic(&self, t: &SignedTransaction, header: &Header) -> result::Result<(), Error> {
if header.number() >= self.ethash_params.frontier_compatibility_mode_limit { if header.number() >= self.ethash_params.homestead_transition {
try!(t.check_low_s()); try!(t.check_low_s());
} }
Ok(()) Ok(())
@ -267,9 +294,13 @@ impl Ethash {
} }
let min_difficulty = self.ethash_params.minimum_difficulty; let min_difficulty = self.ethash_params.minimum_difficulty;
let difficulty_bound_divisor = self.ethash_params.difficulty_bound_divisor; let difficulty_hardfork = header.number() >= self.ethash_params.difficulty_hardfork_transition;
let difficulty_bound_divisor = match difficulty_hardfork {
true => self.ethash_params.difficulty_hardfork_bound_divisor,
false => self.ethash_params.difficulty_bound_divisor,
};
let duration_limit = self.ethash_params.duration_limit; let duration_limit = self.ethash_params.duration_limit;
let frontier_limit = self.ethash_params.frontier_compatibility_mode_limit; let frontier_limit = self.ethash_params.homestead_transition;
let mut target = if header.number() < frontier_limit { let mut target = if header.number() < frontier_limit {
if header.timestamp() >= parent.timestamp() + duration_limit { if header.timestamp() >= parent.timestamp() + duration_limit {
@ -281,17 +312,19 @@ impl Ethash {
else { else {
trace!(target: "ethash", "Calculating difficulty parent.difficulty={}, header.timestamp={}, parent.timestamp={}", parent.difficulty(), header.timestamp(), parent.timestamp()); trace!(target: "ethash", "Calculating difficulty parent.difficulty={}, header.timestamp={}, parent.timestamp={}", parent.difficulty(), header.timestamp(), parent.timestamp());
//block_diff = parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99) //block_diff = parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99)
let diff_inc = (header.timestamp() - parent.timestamp()) / 10; let diff_inc = (header.timestamp() - parent.timestamp()) / self.ethash_params.difficulty_increment_divisor;
if diff_inc <= 1 { if diff_inc <= 1 {
parent.difficulty().clone() + parent.difficulty().clone() / From::from(2048) * From::from(1 - diff_inc) parent.difficulty().clone() + parent.difficulty().clone() / From::from(difficulty_bound_divisor) * From::from(1 - diff_inc)
} else { } else {
parent.difficulty().clone() - parent.difficulty().clone() / From::from(2048) * From::from(min(diff_inc - 1, 99)) parent.difficulty().clone() - parent.difficulty().clone() / From::from(difficulty_bound_divisor) * From::from(min(diff_inc - 1, 99))
} }
}; };
target = max(min_difficulty, target); target = max(min_difficulty, target);
let period = ((parent.number() + 1) / EXP_DIFF_PERIOD) as usize; if header.number() < self.ethash_params.bomb_defuse_transition {
if period > 1 { let period = ((parent.number() + 1) / EXP_DIFF_PERIOD) as usize;
target = max(min_difficulty, target + (U256::from(1) << (period - 2))); if period > 1 {
target = max(min_difficulty, target + (U256::from(1) << (period - 2)));
}
} }
target target
} }
@ -343,9 +376,12 @@ impl Header {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use common::*; use util::*;
use block::*; use block::*;
use tests::helpers::*; use tests::helpers::*;
use env_info::EnvInfo;
use error::{BlockError, Error};
use header::Header;
use super::super::new_morden; use super::super::new_morden;
use super::Ethash; use super::Ethash;
use rlp; use rlp;
@ -355,9 +391,9 @@ mod tests {
let spec = new_morden(); let spec = new_morden();
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let b = b.close(); let b = b.close();
@ -369,9 +405,9 @@ mod tests {
let spec = new_morden(); let spec = new_morden();
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let last_hashes = Arc::new(vec![genesis_header.hash()]); let last_hashes = Arc::new(vec![genesis_header.hash()]);
let mut b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap(); let mut b = OpenBlock::new(engine, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
let mut uncle = Header::new(); let mut uncle = Header::new();

View File

@ -42,14 +42,20 @@ pub fn new_frontier() -> Spec { load(include_bytes!("../../res/ethereum/frontier
/// Create a new Frontier mainnet chain spec without the DAO hardfork. /// Create a new Frontier mainnet chain spec without the DAO hardfork.
pub fn new_classic() -> Spec { load(include_bytes!("../../res/ethereum/classic.json")) } pub fn new_classic() -> Spec { load(include_bytes!("../../res/ethereum/classic.json")) }
/// Create a new Frontier mainnet chain spec without the DAO hardfork.
pub fn new_expanse() -> Spec { load(include_bytes!("../../res/ethereum/expanse.json")) }
/// Create a new Frontier chain spec as though it never changes to Homestead. /// Create a new Frontier chain spec as though it never changes to Homestead.
pub fn new_frontier_test() -> Spec { load(include_bytes!("../../res/ethereum/frontier_test.json")) } pub fn new_frontier_test() -> Spec { load(include_bytes!("../../res/ethereum/frontier_test.json")) }
/// Create a new Homestead chain spec as though it never changed from Frontier. /// Create a new Homestead chain spec as though it never changed from Frontier.
pub fn new_homestead_test() -> Spec { load(include_bytes!("../../res/ethereum/homestead_test.json")) } pub fn new_homestead_test() -> Spec { load(include_bytes!("../../res/ethereum/homestead_test.json")) }
/// Create a new Homestead-EIP150 chain spec as though it never changed from Homestead/Frontier.
pub fn new_eip150_test() -> Spec { load(include_bytes!("../../res/ethereum/eip150_test.json")) }
/// Create a new Frontier/Homestead/DAO chain spec with transition points at #5 and #8. /// Create a new Frontier/Homestead/DAO chain spec with transition points at #5 and #8.
pub fn new_daohardfork_test() -> Spec { load(include_bytes!("../../res/ethereum/daohardfork_test.json")) } pub fn new_transition_test() -> Spec { load(include_bytes!("../../res/ethereum/transition_test.json")) }
/// Create a new Frontier main net chain spec without genesis accounts. /// Create a new Frontier main net chain spec without genesis accounts.
pub fn new_mainnet_like() -> Spec { load(include_bytes!("../../res/ethereum/frontier_like_test.json")) } pub fn new_mainnet_like() -> Spec { load(include_bytes!("../../res/ethereum/frontier_like_test.json")) }
@ -59,19 +65,20 @@ pub fn new_morden() -> Spec { load(include_bytes!("../../res/ethereum/morden.jso
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use common::*; use util::*;
use state::*; use state::*;
use super::*; use super::*;
use tests::helpers::*; use tests::helpers::*;
use views::BlockView;
#[test] #[test]
fn ensure_db_good() { fn ensure_db_good() {
let spec = new_morden(); let spec = new_morden();
let engine = &spec.engine; let engine = &spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let mut db_result = get_temp_journal_db(); let mut db_result = get_temp_state_db();
let mut db = db_result.take(); let mut db = db_result.take();
spec.ensure_db_good(db.as_hashdb_mut()).unwrap(); spec.ensure_db_good(&mut db).unwrap();
let s = State::from_existing(db, genesis_header.state_root().clone(), engine.account_start_nonce(), Default::default()).unwrap(); let s = State::from_existing(db, genesis_header.state_root().clone(), engine.account_start_nonce(), Default::default()).unwrap();
assert_eq!(s.balance(&"0000000000000000000000000000000000000001".into()), 1u64.into()); assert_eq!(s.balance(&"0000000000000000000000000000000000000001".into()), 1u64.into());
assert_eq!(s.balance(&"0000000000000000000000000000000000000002".into()), 1u64.into()); assert_eq!(s.balance(&"0000000000000000000000000000000000000002".into()), 1u64.into());

View File

@ -24,7 +24,8 @@ extern crate test;
use self::test::{Bencher, black_box}; use self::test::{Bencher, black_box};
use common::*; use util::*;
use action_params::ActionParams;
use evm::{self, Factory, VMType}; use evm::{self, Factory, VMType};
use evm::tests::FakeExt; use evm::tests::FakeExt;

View File

@ -81,7 +81,7 @@ pub trait Ext {
) -> MessageCallResult; ) -> MessageCallResult;
/// Returns code at given address /// Returns code at given address
fn extcode(&self, address: &Address) -> Bytes; fn extcode(&self, address: &Address) -> Arc<Bytes>;
/// Returns code size at given address /// Returns code size at given address
fn extcodesize(&self, address: &Address) -> usize; fn extcodesize(&self, address: &Address) -> usize;

View File

@ -18,8 +18,10 @@
//! //!
//! TODO: consider spliting it into two separate files. //! TODO: consider spliting it into two separate files.
use std::fmt; use std::fmt;
use std::sync::Arc;
use evm::Evm; use evm::Evm;
use util::{U256, Uint}; use util::{U256, Uint};
use super::interpreter::SharedCache;
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone)]
/// Type of EVM to use. /// Type of EVM to use.
@ -82,7 +84,8 @@ impl VMType {
/// Evm factory. Creates appropriate Evm. /// Evm factory. Creates appropriate Evm.
#[derive(Clone)] #[derive(Clone)]
pub struct Factory { pub struct Factory {
evm: VMType evm: VMType,
evm_cache: Arc<SharedCache>,
} }
impl Factory { impl Factory {
@ -95,9 +98,9 @@ impl Factory {
Box::new(super::jit::JitEvm::default()) Box::new(super::jit::JitEvm::default())
}, },
VMType::Interpreter => if Self::can_fit_in_usize(gas) { VMType::Interpreter => if Self::can_fit_in_usize(gas) {
Box::new(super::interpreter::Interpreter::<usize>::default()) Box::new(super::interpreter::Interpreter::<usize>::new(self.evm_cache.clone()))
} else { } else {
Box::new(super::interpreter::Interpreter::<U256>::default()) Box::new(super::interpreter::Interpreter::<U256>::new(self.evm_cache.clone()))
} }
} }
} }
@ -108,17 +111,19 @@ impl Factory {
pub fn create(&self, gas: U256) -> Box<Evm> { pub fn create(&self, gas: U256) -> Box<Evm> {
match self.evm { match self.evm {
VMType::Interpreter => if Self::can_fit_in_usize(gas) { VMType::Interpreter => if Self::can_fit_in_usize(gas) {
Box::new(super::interpreter::Interpreter::<usize>::default()) Box::new(super::interpreter::Interpreter::<usize>::new(self.evm_cache.clone()))
} else { } else {
Box::new(super::interpreter::Interpreter::<U256>::default()) Box::new(super::interpreter::Interpreter::<U256>::new(self.evm_cache.clone()))
} }
} }
} }
/// Create new instance of specific `VMType` factory /// Create new instance of specific `VMType` factory, with a size in bytes
pub fn new(evm: VMType) -> Self { /// for caching jump destinations.
pub fn new(evm: VMType, cache_size: usize) -> Self {
Factory { Factory {
evm: evm evm: evm,
evm_cache: Arc::new(SharedCache::new(cache_size)),
} }
} }
@ -132,7 +137,8 @@ impl Default for Factory {
#[cfg(all(feature = "jit", not(test)))] #[cfg(all(feature = "jit", not(test)))]
fn default() -> Factory { fn default() -> Factory {
Factory { Factory {
evm: VMType::Jit evm: VMType::Jit,
evm_cache: Arc::new(SharedCache::default()),
} }
} }
@ -140,7 +146,8 @@ impl Default for Factory {
#[cfg(any(not(feature = "jit"), test))] #[cfg(any(not(feature = "jit"), test))]
fn default() -> Factory { fn default() -> Factory {
Factory { Factory {
evm: VMType::Interpreter evm: VMType::Interpreter,
evm_cache: Arc::new(SharedCache::default()),
} }
} }
} }
@ -158,22 +165,22 @@ macro_rules! evm_test(
#[ignore] #[ignore]
#[cfg(feature = "jit")] #[cfg(feature = "jit")]
fn $name_jit() { fn $name_jit() {
$name_test(Factory::new(VMType::Jit)); $name_test(Factory::new(VMType::Jit, 1024 * 32));
} }
#[test] #[test]
fn $name_int() { fn $name_int() {
$name_test(Factory::new(VMType::Interpreter)); $name_test(Factory::new(VMType::Interpreter, 1024 * 32));
} }
}; };
($name_test: ident: $name_jit: ident, $name_int: ident) => { ($name_test: ident: $name_jit: ident, $name_int: ident) => {
#[test] #[test]
#[cfg(feature = "jit")] #[cfg(feature = "jit")]
fn $name_jit() { fn $name_jit() {
$name_test(Factory::new(VMType::Jit)); $name_test(Factory::new(VMType::Jit, 1024 * 32));
} }
#[test] #[test]
fn $name_int() { fn $name_int() {
$name_test(Factory::new(VMType::Interpreter)); $name_test(Factory::new(VMType::Interpreter, 1024 * 32));
} }
} }
); );
@ -187,13 +194,13 @@ macro_rules! evm_test_ignore(
#[cfg(feature = "jit")] #[cfg(feature = "jit")]
#[cfg(feature = "ignored-tests")] #[cfg(feature = "ignored-tests")]
fn $name_jit() { fn $name_jit() {
$name_test(Factory::new(VMType::Jit)); $name_test(Factory::new(VMType::Jit, 1024 * 32));
} }
#[test] #[test]
#[ignore] #[ignore]
#[cfg(feature = "ignored-tests")] #[cfg(feature = "ignored-tests")]
fn $name_int() { fn $name_int() {
$name_test(Factory::new(VMType::Interpreter)); $name_test(Factory::new(VMType::Interpreter, 1024 * 32));
} }
} }
); );

View File

@ -14,11 +14,12 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use common::*; use util::*;
use super::u256_to_address; use super::u256_to_address;
use evm::{self, CostType}; use evm::{self, CostType};
use evm::instructions::{self, Instruction, InstructionInfo}; use evm::instructions::{self, Instruction, InstructionInfo};
use evm::interpreter::stack::Stack; use evm::interpreter::stack::Stack;
use evm::schedule::Schedule;
macro_rules! overflowing { macro_rules! overflowing {
($x: expr) => {{ ($x: expr) => {{
@ -31,7 +32,7 @@ macro_rules! overflowing {
#[cfg_attr(feature="dev", allow(enum_variant_names))] #[cfg_attr(feature="dev", allow(enum_variant_names))]
enum InstructionCost<Cost: CostType> { enum InstructionCost<Cost: CostType> {
Gas(Cost), Gas(Cost),
GasMem(Cost, Cost), GasMem(Cost, Cost, Option<Cost>),
GasMemCopy(Cost, Cost, Cost) GasMemCopy(Cost, Cost, Cost)
} }
@ -56,7 +57,37 @@ impl<Gas: CostType> Gasometer<Gas> {
} }
} }
/// How much gas is provided to a CALL/CREATE, given that we need to deduct `needed` for this operation
/// and that we `requested` some.
pub fn gas_provided(&self, schedule: &Schedule, needed: Gas, requested: Option<evm::Result<Gas>>) -> evm::Result<Gas> {
match schedule.sub_gas_cap_divisor {
Some(cap_divisor) if self.current_gas >= needed => {
let gas_remaining = self.current_gas - needed;
let max_gas_provided = gas_remaining - gas_remaining / Gas::from(cap_divisor);
if let Some(Ok(r)) = requested {
Ok(min(r, max_gas_provided))
} else {
Ok(max_gas_provided)
}
},
_ => {
if let Some(r) = requested {
r
} else if self.current_gas >= needed {
Ok(self.current_gas - needed)
} else {
Ok(0.into())
}
}
}
}
#[cfg_attr(feature="dev", allow(cyclomatic_complexity))] #[cfg_attr(feature="dev", allow(cyclomatic_complexity))]
/// Determine how much gas is used by the given instruction, given the machine's state.
///
/// We guarantee that the final element of the returned tuple (`provided`) will be `Some`
/// iff the `instruction` is one of `CREATE`, or any of the `CALL` variants. In this case,
/// it will be the amount of gas that the current context provides to the child context.
pub fn get_gas_cost_mem( pub fn get_gas_cost_mem(
&mut self, &mut self,
ext: &evm::Ext, ext: &evm::Ext,
@ -64,7 +95,7 @@ impl<Gas: CostType> Gasometer<Gas> {
info: &InstructionInfo, info: &InstructionInfo,
stack: &Stack<U256>, stack: &Stack<U256>,
current_mem_size: usize, current_mem_size: usize,
) -> evm::Result<(Gas, Gas, usize)> { ) -> evm::Result<(Gas, Gas, usize, Option<Gas>)> {
let schedule = ext.schedule(); let schedule = ext.schedule();
let tier = instructions::get_tier_idx(info.tier); let tier = instructions::get_tier_idx(info.tier);
let default_gas = Gas::from(schedule.tier_step_gas[tier]); let default_gas = Gas::from(schedule.tier_step_gas[tier]);
@ -90,26 +121,42 @@ impl<Gas: CostType> Gasometer<Gas> {
instructions::SLOAD => { instructions::SLOAD => {
InstructionCost::Gas(Gas::from(schedule.sload_gas)) InstructionCost::Gas(Gas::from(schedule.sload_gas))
}, },
instructions::BALANCE => {
InstructionCost::Gas(Gas::from(schedule.balance_gas))
},
instructions::EXTCODESIZE => {
InstructionCost::Gas(Gas::from(schedule.extcodesize_gas))
},
instructions::SUICIDE => {
let mut gas = Gas::from(schedule.suicide_gas);
let address = u256_to_address(stack.peek(0));
if !ext.exists(&address) {
gas = overflowing!(gas.overflow_add(schedule.suicide_to_new_account_cost.into()));
}
InstructionCost::Gas(gas)
},
instructions::MSTORE | instructions::MLOAD => { instructions::MSTORE | instructions::MLOAD => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32))) InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 32)), None)
}, },
instructions::MSTORE8 => { instructions::MSTORE8 => {
InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1))) InstructionCost::GasMem(default_gas, try!(mem_needed_const(stack.peek(0), 1)), None)
}, },
instructions::RETURN => { instructions::RETURN => {
InstructionCost::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1)))) InstructionCost::GasMem(default_gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None)
}, },
instructions::SHA3 => { instructions::SHA3 => {
let w = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(1))), 31)); let w = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(1))), 31));
let words = w >> 5; let words = w >> 5;
let gas = Gas::from(schedule.sha3_gas) + (Gas::from(schedule.sha3_word_gas) * words); let gas = Gas::from(schedule.sha3_gas) + (Gas::from(schedule.sha3_word_gas) * words);
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1)))) InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None)
}, },
instructions::CALLDATACOPY | instructions::CODECOPY => { instructions::CALLDATACOPY | instructions::CODECOPY => {
InstructionCost::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2)))) InstructionCost::GasMemCopy(default_gas, try!(mem_needed(stack.peek(0), stack.peek(2))), try!(Gas::from_u256(*stack.peek(2))))
}, },
instructions::EXTCODECOPY => { instructions::EXTCODECOPY => {
InstructionCost::GasMemCopy(default_gas, try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3)))) InstructionCost::GasMemCopy(schedule.extcodecopy_base_gas.into(), try!(mem_needed(stack.peek(1), stack.peek(3))), try!(Gas::from_u256(*stack.peek(3))))
}, },
instructions::LOG0...instructions::LOG4 => { instructions::LOG0...instructions::LOG4 => {
let no_of_topics = instructions::get_log_topics(instruction); let no_of_topics = instructions::get_log_topics(instruction);
@ -117,10 +164,10 @@ impl<Gas: CostType> Gasometer<Gas> {
let data_gas = overflowing!(try!(Gas::from_u256(*stack.peek(1))).overflow_mul(Gas::from(schedule.log_data_gas))); let data_gas = overflowing!(try!(Gas::from_u256(*stack.peek(1))).overflow_mul(Gas::from(schedule.log_data_gas)));
let gas = overflowing!(data_gas.overflow_add(Gas::from(log_gas))); let gas = overflowing!(data_gas.overflow_add(Gas::from(log_gas)));
InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1)))) InstructionCost::GasMem(gas, try!(mem_needed(stack.peek(0), stack.peek(1))), None)
}, },
instructions::CALL | instructions::CALLCODE => { instructions::CALL | instructions::CALLCODE => {
let mut gas = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(0))), schedule.call_gas)); let mut gas = Gas::from(schedule.call_gas);
let mem = cmp::max( let mem = cmp::max(
try!(mem_needed(stack.peek(5), stack.peek(6))), try!(mem_needed(stack.peek(5), stack.peek(6))),
try!(mem_needed(stack.peek(3), stack.peek(4))) try!(mem_needed(stack.peek(3), stack.peek(4)))
@ -129,27 +176,49 @@ impl<Gas: CostType> Gasometer<Gas> {
let address = u256_to_address(stack.peek(1)); let address = u256_to_address(stack.peek(1));
if instruction == instructions::CALL && !ext.exists(&address) { if instruction == instructions::CALL && !ext.exists(&address) {
gas = overflowing!(gas.overflow_add(Gas::from(schedule.call_new_account_gas))); gas = overflowing!(gas.overflow_add(schedule.call_new_account_gas.into()));
}; };
if !stack.peek(2).is_zero() { if !stack.peek(2).is_zero() {
gas = overflowing!(gas.overflow_add(Gas::from(schedule.call_value_transfer_gas))); gas = overflowing!(gas.overflow_add(schedule.call_value_transfer_gas.into()));
}; };
InstructionCost::GasMem(gas,mem) // TODO: refactor to avoid duplicate calculation here and later on.
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
}, },
instructions::DELEGATECALL => { instructions::DELEGATECALL => {
let gas = overflowing!(add_gas_usize(try!(Gas::from_u256(*stack.peek(0))), schedule.call_gas)); let mut gas = Gas::from(schedule.call_gas);
let mem = cmp::max( let mem = cmp::max(
try!(mem_needed(stack.peek(4), stack.peek(5))), try!(mem_needed(stack.peek(4), stack.peek(5))),
try!(mem_needed(stack.peek(2), stack.peek(3))) try!(mem_needed(stack.peek(2), stack.peek(3)))
); );
InstructionCost::GasMem(gas, mem)
// TODO: refactor to avoid duplicate calculation here and later on.
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let requested = Gas::from_u256(*stack.peek(0));
let provided = try!(self.gas_provided(schedule, cost_so_far, Some(requested)));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
}, },
instructions::CREATE => { instructions::CREATE => {
let gas = Gas::from(schedule.create_gas); let mut gas = Gas::from(schedule.create_gas);
let mem = try!(mem_needed(stack.peek(1), stack.peek(2))); let mem = try!(mem_needed(stack.peek(1), stack.peek(2)));
InstructionCost::GasMem(gas, mem)
// TODO: refactor to avoid duplicate calculation here and later on.
let (mem_gas_cost, _, _) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem));
let cost_so_far = overflowing!(gas.overflow_add(mem_gas_cost.into()));
let provided = try!(self.gas_provided(schedule, cost_so_far, None));
gas = overflowing!(gas.overflow_add(provided));
InstructionCost::GasMem(gas, mem, Some(provided))
}, },
instructions::EXP => { instructions::EXP => {
let expon = stack.peek(1); let expon = stack.peek(1);
@ -157,17 +226,17 @@ impl<Gas: CostType> Gasometer<Gas> {
let gas = Gas::from(schedule.exp_gas + schedule.exp_byte_gas * bytes); let gas = Gas::from(schedule.exp_gas + schedule.exp_byte_gas * bytes);
InstructionCost::Gas(gas) InstructionCost::Gas(gas)
}, },
_ => InstructionCost::Gas(default_gas) _ => InstructionCost::Gas(default_gas),
}; };
match cost { match cost {
InstructionCost::Gas(gas) => { InstructionCost::Gas(gas) => {
Ok((gas, self.current_mem_gas, 0)) Ok((gas, self.current_mem_gas, 0, None))
}, },
InstructionCost::GasMem(gas, mem_size) => { InstructionCost::GasMem(gas, mem_size, provided) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size)); let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
let gas = overflowing!(gas.overflow_add(mem_gas_cost)); let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size)) Ok((gas, new_mem_gas, new_mem_size, provided))
}, },
InstructionCost::GasMemCopy(gas, mem_size, copy) => { InstructionCost::GasMemCopy(gas, mem_size, copy) => {
let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size)); let (mem_gas_cost, new_mem_gas, new_mem_size) = try!(self.mem_gas_cost(schedule, current_mem_size, &mem_size));
@ -175,7 +244,7 @@ impl<Gas: CostType> Gasometer<Gas> {
let copy_gas = Gas::from(schedule.copy_gas) * copy; let copy_gas = Gas::from(schedule.copy_gas) * copy;
let gas = overflowing!(gas.overflow_add(copy_gas)); let gas = overflowing!(gas.overflow_add(copy_gas));
let gas = overflowing!(gas.overflow_add(mem_gas_cost)); let gas = overflowing!(gas.overflow_add(mem_gas_cost));
Ok((gas, new_mem_gas, new_mem_size)) Ok((gas, new_mem_gas, new_mem_size, None))
} }
} }
} }

View File

@ -0,0 +1,164 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
pub use self::inner::*;
#[macro_use]
#[cfg(not(feature = "evm-debug"))]
mod inner {
macro_rules! evm_debug {
($x: expr) => {}
}
pub struct EvmInformant;
impl EvmInformant {
pub fn new(_depth: usize) -> Self {
EvmInformant {}
}
pub fn done(&mut self) {}
}
}
#[macro_use]
#[cfg(feature = "evm-debug")]
mod inner {
use std::iter;
use std::collections::HashMap;
use std::time::{Instant, Duration};
use evm::interpreter::stack::Stack;
use evm::instructions::{Instruction, InstructionInfo, INSTRUCTIONS};
use evm::{CostType};
use util::U256;
macro_rules! evm_debug {
($x: expr) => {
$x
}
}
fn print(data: String) {
if cfg!(feature = "evm-debug-tests") {
println!("{}", data);
} else {
debug!(target: "evm", "{}", data);
}
}
pub struct EvmInformant {
spacing: String,
last_instruction: Instant,
stats: HashMap<Instruction, Stats>,
}
impl EvmInformant {
fn color(instruction: Instruction, name: &str) -> String {
let c = instruction as usize % 6;
let colors = [31, 34, 33, 32, 35, 36];
format!("\x1B[1;{}m{}\x1B[0m", colors[c], name)
}
fn as_micro(duration: &Duration) -> u64 {
let mut sec = duration.as_secs();
let subsec = duration.subsec_nanos() as u64;
sec = sec.saturating_mul(1_000_000u64);
sec += subsec / 1_000;
sec
}
pub fn new(depth: usize) -> Self {
EvmInformant {
spacing: iter::repeat(".").take(depth).collect(),
last_instruction: Instant::now(),
stats: HashMap::new(),
}
}
pub fn before_instruction<Cost: CostType>(&mut self, pc: usize, instruction: Instruction, info: &InstructionInfo, current_gas: &Cost, stack: &Stack<U256>) {
let time = self.last_instruction.elapsed();
self.last_instruction = Instant::now();
print(format!("{}[0x{:<3x}][{:>19}(0x{:<2x}) Gas Left: {:6?} (Previous took: {:10}μs)",
&self.spacing,
pc,
Self::color(instruction, info.name),
instruction,
current_gas,
Self::as_micro(&time),
));
if info.args > 0 {
for (idx, item) in stack.peek_top(info.args).iter().enumerate() {
print(format!("{} |{:2}: {:?}", self.spacing, idx, item));
}
}
}
pub fn after_instruction(&mut self, instruction: Instruction) {
let mut stats = self.stats.entry(instruction).or_insert_with(|| Stats::default());
let took = self.last_instruction.elapsed();
stats.note(took);
}
pub fn done(&mut self) {
// Print out stats
let infos = &*INSTRUCTIONS;
let mut stats: Vec<(_,_)> = self.stats.drain().collect();
stats.sort_by(|ref a, ref b| b.1.avg().cmp(&a.1.avg()));
print(format!("\n{}-------OPCODE STATS:", self.spacing));
for (instruction, stats) in stats.into_iter() {
let info = infos[instruction as usize];
print(format!("{}-------{:>19}(0x{:<2x}) count: {:4}, avg: {:10}μs",
self.spacing,
Self::color(instruction, info.name),
instruction,
stats.count,
stats.avg(),
));
}
}
}
struct Stats {
count: u64,
total_duration: Duration,
}
impl Default for Stats {
fn default() -> Self {
Stats {
count: 0,
total_duration: Duration::from_secs(0),
}
}
}
impl Stats {
fn note(&mut self, took: Duration) {
self.count += 1;
self.total_duration += took;
}
fn avg(&self) -> u64 {
EvmInformant::as_micro(&self.total_duration) / self.count
}
}
}

View File

@ -16,39 +16,26 @@
//! Rust VM implementation //! Rust VM implementation
#[cfg(not(feature = "evm-debug"))] #[macro_use]
macro_rules! evm_debug { mod informant;
($x: expr) => {}
}
#[cfg(feature = "evm-debug")]
macro_rules! evm_debug {
($x: expr) => {
$x
}
}
mod gasometer; mod gasometer;
mod stack; mod stack;
mod memory; mod memory;
mod shared_cache;
use self::gasometer::Gasometer; use self::gasometer::Gasometer;
use self::stack::{Stack, VecStack}; use self::stack::{Stack, VecStack};
use self::memory::Memory; use self::memory::Memory;
pub use self::shared_cache::SharedCache;
use std::marker::PhantomData; use std::marker::PhantomData;
use common::*; use action_params::{ActionParams, ActionValue};
use types::executed::CallType; use types::executed::CallType;
use super::instructions::{self, Instruction, InstructionInfo}; use evm::instructions::{self, Instruction, InstructionInfo};
use evm::{self, MessageCallResult, ContractCreateResult, GasLeft, CostType}; use evm::{self, MessageCallResult, ContractCreateResult, GasLeft, CostType};
use bit_set::BitSet; use bit_set::BitSet;
#[cfg(feature = "evm-debug")] use util::*;
fn color(instruction: Instruction, name: &'static str) -> String {
let c = instruction as usize % 6;
let colors = [31, 34, 33, 32, 35, 36];
format!("\x1B[1;{}m{}\x1B[0m", colors[c], name)
}
type CodePosition = usize; type CodePosition = usize;
type ProgramCounter = usize; type ProgramCounter = usize;
@ -72,6 +59,15 @@ struct CodeReader<'a> {
#[cfg_attr(feature="dev", allow(len_without_is_empty))] #[cfg_attr(feature="dev", allow(len_without_is_empty))]
impl<'a> CodeReader<'a> { impl<'a> CodeReader<'a> {
/// Create new code reader - starting at position 0.
fn new(code: &'a Bytes) -> Self {
CodeReader {
position: 0,
code: code,
}
}
/// Get `no_of_bytes` from code and convert to U256. Move PC /// Get `no_of_bytes` from code and convert to U256. Move PC
fn read(&mut self, no_of_bytes: usize) -> U256 { fn read(&mut self, no_of_bytes: usize) -> U256 {
let pos = self.position; let pos = self.position;
@ -87,8 +83,6 @@ impl<'a> CodeReader<'a> {
enum InstructionResult<Gas> { enum InstructionResult<Gas> {
Ok, Ok,
UseAllGas,
GasLeft(Gas),
UnusedGas(Gas), UnusedGas(Gas),
JumpToPosition(U256), JumpToPosition(U256),
// gas left, init_orf, init_size // gas left, init_orf, init_size
@ -98,9 +92,9 @@ enum InstructionResult<Gas> {
/// Intepreter EVM implementation /// Intepreter EVM implementation
#[derive(Default)]
pub struct Interpreter<Cost: CostType> { pub struct Interpreter<Cost: CostType> {
mem: Vec<u8>, mem: Vec<u8>,
cache: Arc<SharedCache>,
_type: PhantomData<Cost>, _type: PhantomData<Cost>,
} }
@ -108,26 +102,25 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
fn exec(&mut self, params: ActionParams, ext: &mut evm::Ext) -> evm::Result<GasLeft> { fn exec(&mut self, params: ActionParams, ext: &mut evm::Ext) -> evm::Result<GasLeft> {
self.mem.clear(); self.mem.clear();
let code = &params.code.as_ref().unwrap(); let mut informant = informant::EvmInformant::new(ext.depth());
let valid_jump_destinations = self.find_jump_destinations(code);
let code = &params.code.as_ref().expect("exec always called with code; qed");
let valid_jump_destinations = self.cache.jump_destinations(&params.code_hash, code);
let mut gasometer = Gasometer::<Cost>::new(try!(Cost::from_u256(params.gas))); let mut gasometer = Gasometer::<Cost>::new(try!(Cost::from_u256(params.gas)));
let mut stack = VecStack::with_capacity(ext.schedule().stack_limit, U256::zero()); let mut stack = VecStack::with_capacity(ext.schedule().stack_limit, U256::zero());
let mut reader = CodeReader { let mut reader = CodeReader::new(code);
position: 0,
code: code
};
let infos = &*instructions::INSTRUCTIONS; let infos = &*instructions::INSTRUCTIONS;
while reader.position < code.len() { while reader.position < code.len() {
let instruction = code[reader.position]; let instruction = code[reader.position];
reader.position += 1; reader.position += 1;
let info = infos[instruction as usize]; let info = &infos[instruction as usize];
try!(self.verify_instruction(ext, instruction, &info, &stack)); try!(self.verify_instruction(ext, instruction, info, &stack));
// Calculate gas cost // Calculate gas cost
let (gas_cost, mem_gas, mem_size) = try!(gasometer.get_gas_cost_mem(ext, instruction, &info, &stack, self.mem.size())); let (gas_cost, mem_gas, mem_size, provided) = try!(gasometer.get_gas_cost_mem(ext, instruction, info, &stack, self.mem.size()));
// TODO: make compile-time removable if too much of a performance hit. // TODO: make compile-time removable if too much of a performance hit.
let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &gas_cost.as_u256()); let trace_executed = ext.trace_prepare_execute(reader.position - 1, instruction, &gas_cost.as_u256());
@ -136,15 +129,7 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
gasometer.current_mem_gas = mem_gas; gasometer.current_mem_gas = mem_gas;
gasometer.current_gas = gasometer.current_gas - gas_cost; gasometer.current_gas = gasometer.current_gas - gas_cost;
evm_debug!({ evm_debug!({ informant.before_instruction(reader.position, instruction, info, &gasometer.current_gas, &stack) });
println!("[0x{:x}][{}(0x{:x}) Gas: {:?}\n Gas Before: {:?}",
reader.position,
color(instruction, info.name),
instruction,
gas_cost,
gasometer.current_gas + gas_cost
);
});
let (mem_written, store_written) = match trace_executed { let (mem_written, store_written) = match trace_executed {
true => (Self::mem_written(instruction, &stack), Self::store_written(instruction, &stack)), true => (Self::mem_written(instruction, &stack), Self::store_written(instruction, &stack)),
@ -153,41 +138,47 @@ impl<Cost: CostType> evm::Evm for Interpreter<Cost> {
// Execute instruction // Execute instruction
let result = try!(self.exec_instruction( let result = try!(self.exec_instruction(
gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack gasometer.current_gas, &params, ext, instruction, &mut reader, &mut stack, provided
)); ));
evm_debug!({ informant.after_instruction(instruction) });
if let InstructionResult::UnusedGas(ref gas) = result {
gasometer.current_gas = gasometer.current_gas + *gas;
}
if trace_executed { if trace_executed {
ext.trace_executed(gasometer.current_gas.as_u256(), stack.peek_top(info.ret), mem_written.map(|(o, s)| (o, &(self.mem[o..(o + s)]))), store_written); ext.trace_executed(gasometer.current_gas.as_u256(), stack.peek_top(info.ret), mem_written.map(|(o, s)| (o, &(self.mem[o..(o + s)]))), store_written);
} }
// Advance // Advance
match result { match result {
InstructionResult::Ok => {},
InstructionResult::UnusedGas(gas) => {
gasometer.current_gas = gasometer.current_gas + gas;
},
InstructionResult::UseAllGas => {
gasometer.current_gas = Cost::from(0);
},
InstructionResult::GasLeft(gas_left) => {
gasometer.current_gas = gas_left;
},
InstructionResult::JumpToPosition(position) => { InstructionResult::JumpToPosition(position) => {
let pos = try!(self.verify_jump(position, &valid_jump_destinations)); let pos = try!(self.verify_jump(position, &valid_jump_destinations));
reader.position = pos; reader.position = pos;
}, },
InstructionResult::StopExecutionNeedsReturn(gas, off, size) => { InstructionResult::StopExecutionNeedsReturn(gas, off, size) => {
informant.done();
return Ok(GasLeft::NeedsReturn(gas.as_u256(), self.mem.read_slice(off, size))); return Ok(GasLeft::NeedsReturn(gas.as_u256(), self.mem.read_slice(off, size)));
}, },
InstructionResult::StopExecution => break, InstructionResult::StopExecution => break,
_ => {},
} }
} }
informant.done();
Ok(GasLeft::Known(gasometer.current_gas.as_u256())) Ok(GasLeft::Known(gasometer.current_gas.as_u256()))
} }
} }
impl<Cost: CostType> Interpreter<Cost> { impl<Cost: CostType> Interpreter<Cost> {
/// Create a new `Interpreter` instance with shared cache.
pub fn new(cache: Arc<SharedCache>) -> Interpreter<Cost> {
Interpreter {
mem: Vec::new(),
cache: cache,
_type: PhantomData::default(),
}
}
fn verify_instruction(&self, ext: &evm::Ext, instruction: Instruction, info: &InstructionInfo, stack: &Stack<U256>) -> evm::Result<()> { fn verify_instruction(&self, ext: &evm::Ext, instruction: Instruction, info: &InstructionInfo, stack: &Stack<U256>) -> evm::Result<()> {
let schedule = ext.schedule(); let schedule = ext.schedule();
@ -254,7 +245,8 @@ impl<Cost: CostType> Interpreter<Cost> {
ext: &mut evm::Ext, ext: &mut evm::Ext,
instruction: Instruction, instruction: Instruction,
code: &mut CodeReader, code: &mut CodeReader,
stack: &mut Stack<U256> stack: &mut Stack<U256>,
provided: Option<Cost>
) -> evm::Result<InstructionResult<Cost>> { ) -> evm::Result<InstructionResult<Cost>> {
match instruction { match instruction {
instructions::JUMP => { instructions::JUMP => {
@ -279,31 +271,32 @@ impl<Cost: CostType> Interpreter<Cost> {
let endowment = stack.pop_back(); let endowment = stack.pop_back();
let init_off = stack.pop_back(); let init_off = stack.pop_back();
let init_size = stack.pop_back(); let init_size = stack.pop_back();
let create_gas = provided.expect("`provided` comes through Self::exec from `Gasometer::get_gas_cost_mem`; `gas_gas_mem_cost` guarantees `Some` when instruction is `CALL`/`CALLCODE`/`DELEGATECALL`/`CREATE`; this is `CREATE`; qed");
let contract_code = self.mem.read_slice(init_off, init_size); let contract_code = self.mem.read_slice(init_off, init_size);
let can_create = ext.balance(&params.address) >= endowment && ext.depth() < ext.schedule().max_depth; let can_create = ext.balance(&params.address) >= endowment && ext.depth() < ext.schedule().max_depth;
if !can_create { if !can_create {
stack.push(U256::zero()); stack.push(U256::zero());
return Ok(InstructionResult::Ok); return Ok(InstructionResult::UnusedGas(create_gas));
} }
let create_result = ext.create(&gas.as_u256(), &endowment, contract_code); let create_result = ext.create(&create_gas.as_u256(), &endowment, contract_code);
return match create_result { return match create_result {
ContractCreateResult::Created(address, gas_left) => { ContractCreateResult::Created(address, gas_left) => {
stack.push(address_to_u256(address)); stack.push(address_to_u256(address));
Ok(InstructionResult::GasLeft(Cost::from_u256(gas_left).expect("Gas left cannot be greater."))) Ok(InstructionResult::UnusedGas(Cost::from_u256(gas_left).expect("Gas left cannot be greater.")))
}, },
ContractCreateResult::Failed => { ContractCreateResult::Failed => {
stack.push(U256::zero()); stack.push(U256::zero());
// TODO [todr] Should we just StopExecution here? Ok(InstructionResult::Ok)
Ok(InstructionResult::UseAllGas)
} }
}; };
}, },
instructions::CALL | instructions::CALLCODE | instructions::DELEGATECALL => { instructions::CALL | instructions::CALLCODE | instructions::DELEGATECALL => {
assert!(ext.schedule().call_value_transfer_gas > ext.schedule().call_stipend, "overflow possible"); assert!(ext.schedule().call_value_transfer_gas > ext.schedule().call_stipend, "overflow possible");
let call_gas = Cost::from_u256(stack.pop_back()).expect("Gas is already validated."); stack.pop_back();
let call_gas = provided.expect("`provided` comes through Self::exec from `Gasometer::get_gas_cost_mem`; `gas_gas_mem_cost` guarantees `Some` when instruction is `CALL`/`CALLCODE`/`DELEGATECALL`/`CREATE`; this is one of `CALL`/`CALLCODE`/`DELEGATECALL`; qed");
let code_address = stack.pop_back(); let code_address = stack.pop_back();
let code_address = u256_to_address(&code_address); let code_address = u256_to_address(&code_address);
@ -321,17 +314,17 @@ impl<Cost: CostType> Interpreter<Cost> {
// Add stipend (only CALL|CALLCODE when value > 0) // Add stipend (only CALL|CALLCODE when value > 0)
let call_gas = call_gas + value.map_or_else(|| Cost::from(0), |val| match val.is_zero() { let call_gas = call_gas + value.map_or_else(|| Cost::from(0), |val| match val.is_zero() {
false => Cost::from(ext.schedule().call_stipend), false => Cost::from(ext.schedule().call_stipend),
true => Cost::from(0) true => Cost::from(0),
}); });
// Get sender & receive addresses, check if we have balance // Get sender & receive addresses, check if we have balance
let (sender_address, receive_address, has_balance, call_type) = match instruction { let (sender_address, receive_address, has_balance, call_type) = match instruction {
instructions::CALL => { instructions::CALL => {
let has_balance = ext.balance(&params.address) >= value.unwrap(); let has_balance = ext.balance(&params.address) >= value.expect("value set for all but delegate call; qed");
(&params.address, &code_address, has_balance, CallType::Call) (&params.address, &code_address, has_balance, CallType::Call)
}, },
instructions::CALLCODE => { instructions::CALLCODE => {
let has_balance = ext.balance(&params.address) >= value.unwrap(); let has_balance = ext.balance(&params.address) >= value.expect("value set for all but delegate call; qed");
(&params.address, &params.address, has_balance, CallType::CallCode) (&params.address, &params.address, has_balance, CallType::CallCode)
}, },
instructions::DELEGATECALL => (&params.sender, &params.address, true, CallType::DelegateCall), instructions::DELEGATECALL => (&params.sender, &params.address, true, CallType::DelegateCall),
@ -486,10 +479,10 @@ impl<Cost: CostType> Interpreter<Cost> {
stack.push(U256::from(len)); stack.push(U256::from(len));
}, },
instructions::CALLDATACOPY => { instructions::CALLDATACOPY => {
self.copy_data_to_memory(stack, &params.data.clone().unwrap_or_else(|| vec![])); self.copy_data_to_memory(stack, params.data.as_ref().map_or_else(|| &[] as &[u8], |d| &*d as &[u8]));
}, },
instructions::CODECOPY => { instructions::CODECOPY => {
self.copy_data_to_memory(stack, &params.code.clone().unwrap_or_else(|| vec![])); self.copy_data_to_memory(stack, params.code.as_ref().map_or_else(|| &[] as &[u8], |c| &**c as &[u8]));
}, },
instructions::EXTCODECOPY => { instructions::EXTCODECOPY => {
let address = u256_to_address(&stack.pop_back()); let address = u256_to_address(&stack.pop_back());
@ -790,23 +783,6 @@ impl<Cost: CostType> Interpreter<Cost> {
Ok(()) Ok(())
} }
fn find_jump_destinations(&self, code: &[u8]) -> BitSet {
let mut jump_dests = BitSet::with_capacity(code.len());
let mut position = 0;
while position < code.len() {
let instruction = code[position];
if instruction == instructions::JUMPDEST {
jump_dests.insert(position);
} else if instructions::is_push(instruction) {
position += instructions::get_push_bytes(instruction);
}
position += 1;
}
jump_dests
}
} }
fn get_and_reset_sign(value: U256) -> (U256, bool) { fn get_and_reset_sign(value: U256) -> (U256, bool) {
@ -833,15 +809,3 @@ fn address_to_u256(value: Address) -> U256 {
U256::from(&*H256::from(value)) U256::from(&*H256::from(value))
} }
#[test]
fn test_find_jump_destinations() {
// given
let interpreter = Interpreter::<U256>::default();
let code = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff5b01600055".from_hex().unwrap();
// when
let valid_jump_destinations = interpreter.find_jump_destinations(&code);
// then
assert!(valid_jump_destinations.contains(66));
}

View File

@ -0,0 +1,125 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use lru_cache::LruCache;
use util::{H256, Mutex};
use util::sha3::*;
use bit_set::BitSet;
use super::super::instructions;
const INITIAL_CAPACITY: usize = 32;
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
/// Global cache for EVM interpreter
pub struct SharedCache {
jump_destinations: Mutex<LruCache<H256, Arc<BitSet>>>,
max_size: usize,
cur_size: Mutex<usize>,
}
impl SharedCache {
/// Create a jump destinations cache with a maximum size in bytes
/// to cache.
pub fn new(max_size: usize) -> Self {
SharedCache {
jump_destinations: Mutex::new(LruCache::new(INITIAL_CAPACITY)),
max_size: max_size * 8, // dealing with bits here.
cur_size: Mutex::new(0),
}
}
/// Get jump destinations bitmap for a contract.
pub fn jump_destinations(&self, code_hash: &H256, code: &[u8]) -> Arc<BitSet> {
if code_hash == &SHA3_EMPTY {
return Self::find_jump_destinations(code);
}
if let Some(d) = self.jump_destinations.lock().get_mut(code_hash) {
return d.clone();
}
let d = Self::find_jump_destinations(code);
{
let mut cur_size = self.cur_size.lock();
*cur_size += d.capacity();
let mut jump_dests = self.jump_destinations.lock();
let cap = jump_dests.capacity();
// grow the cache as necessary; it operates on amount of items
// but we're working based on memory usage.
if jump_dests.len() == cap && *cur_size < self.max_size {
jump_dests.set_capacity(cap * 2);
}
// account for any element displaced from the cache.
if let Some(lru) = jump_dests.insert(code_hash.clone(), d.clone()) {
*cur_size -= lru.capacity();
}
// remove elements until we are below the memory target.
while *cur_size > self.max_size {
match jump_dests.remove_lru() {
Some((_, v)) => *cur_size -= v.capacity(),
_ => break,
}
}
}
d
}
fn find_jump_destinations(code: &[u8]) -> Arc<BitSet> {
let mut jump_dests = BitSet::with_capacity(code.len());
let mut position = 0;
while position < code.len() {
let instruction = code[position];
if instruction == instructions::JUMPDEST {
jump_dests.insert(position);
} else if instructions::is_push(instruction) {
position += instructions::get_push_bytes(instruction);
}
position += 1;
}
jump_dests.shrink_to_fit();
Arc::new(jump_dests)
}
}
impl Default for SharedCache {
fn default() -> Self {
SharedCache::new(DEFAULT_CACHE_SIZE)
}
}
#[test]
fn test_find_jump_destinations() {
use util::FromHex;
// given
let code = "7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff5b01600055".from_hex().unwrap();
// when
let valid_jump_destinations = SharedCache::find_jump_destinations(&code);
// then
assert!(valid_jump_destinations.contains(66));
}

View File

@ -34,7 +34,7 @@ pub trait Stack<T> {
/// Get number of elements on Stack /// Get number of elements on Stack
fn size(&self) -> usize; fn size(&self) -> usize;
/// Returns all data on stack. /// Returns all data on stack.
fn peek_top(&mut self, no_of_elems: usize) -> &[T]; fn peek_top(&self, no_of_elems: usize) -> &[T];
} }
pub struct VecStack<S> { pub struct VecStack<S> {
@ -68,12 +68,7 @@ impl<S : fmt::Display> Stack<S> for VecStack<S> {
fn pop_back(&mut self) -> S { fn pop_back(&mut self) -> S {
let val = self.stack.pop(); let val = self.stack.pop();
match val { match val {
Some(x) => { Some(x) => x,
evm_debug!({
println!(" POP: {}", x)
});
x
},
None => panic!("Tried to pop from empty stack.") None => panic!("Tried to pop from empty stack.")
} }
} }
@ -88,9 +83,6 @@ impl<S : fmt::Display> Stack<S> for VecStack<S> {
} }
fn push(&mut self, elem: S) { fn push(&mut self, elem: S) {
evm_debug!({
println!(" PUSH: {}", elem)
});
self.stack.push(elem); self.stack.push(elem);
} }
@ -98,7 +90,7 @@ impl<S : fmt::Display> Stack<S> for VecStack<S> {
self.stack.len() self.stack.len()
} }
fn peek_top(&mut self, no_from_top: usize) -> &[S] { fn peek_top(&self, no_from_top: usize) -> &[S] {
assert!(self.stack.len() >= no_from_top, "peek_top asked for more items than exist."); assert!(self.stack.len() >= no_from_top, "peek_top asked for more items than exist.");
&self.stack[self.stack.len() - no_from_top .. self.stack.len()] &self.stack[self.stack.len() - no_from_top .. self.stack.len()]
} }

View File

@ -15,7 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Just in time compiler execution environment. //! Just in time compiler execution environment.
use common::*; use util::*;
use evmjit; use evmjit;
use evm::{self, GasLeft}; use evm::{self, GasLeft};
use types::executed::CallType; use types::executed::CallType;
@ -196,6 +196,7 @@ impl<'a> evmjit::Ext for ExtAdapter<'a> {
receive_address: *const evmjit::H256, receive_address: *const evmjit::H256,
code_address: *const evmjit::H256, code_address: *const evmjit::H256,
transfer_value: *const evmjit::I256, transfer_value: *const evmjit::I256,
// We are ignoring apparent value - it's handled in externalities.
_apparent_value: *const evmjit::I256, _apparent_value: *const evmjit::I256,
in_beg: *const u8, in_beg: *const u8,
in_size: u64, in_size: u64,
@ -208,12 +209,13 @@ impl<'a> evmjit::Ext for ExtAdapter<'a> {
let sender_address = unsafe { Address::from_jit(&*sender_address) }; let sender_address = unsafe { Address::from_jit(&*sender_address) };
let receive_address = unsafe { Address::from_jit(&*receive_address) }; let receive_address = unsafe { Address::from_jit(&*receive_address) };
let code_address = unsafe { Address::from_jit(&*code_address) }; let code_address = unsafe { Address::from_jit(&*code_address) };
// TODO Is it always safe in case of DELEGATE_CALL?
let transfer_value = unsafe { U256::from_jit(&*transfer_value) }; let transfer_value = unsafe { U256::from_jit(&*transfer_value) };
let value = Some(transfer_value);
// receive address and code address are the same in normal calls // receive address and code address are the same in normal calls
let is_callcode = receive_address != code_address; let is_callcode = receive_address != code_address;
let is_delegatecall = is_callcode && sender_address != receive_address;
let value = if is_delegatecall { None } else { Some(transfer_value) };
if !is_callcode && !self.ext.exists(&code_address) { if !is_callcode && !self.ext.exists(&code_address) {
gas_cost = gas_cost + U256::from(self.ext.schedule().call_new_account_gas); gas_cost = gas_cost + U256::from(self.ext.schedule().call_new_account_gas);
@ -242,10 +244,10 @@ impl<'a> evmjit::Ext for ExtAdapter<'a> {
} }
} }
// TODO [ToDr] Any way to detect DelegateCall? let call_type = match (is_callcode, is_delegatecall) {
let call_type = match is_callcode { (_, true) => CallType::DelegateCall,
true => CallType::CallCode, (true, false) => CallType::CallCode,
false => CallType::Call, (false, false) => CallType::Call,
}; };
match self.ext.call( match self.ext.call(
@ -357,7 +359,7 @@ impl evm::Evm for JitEvm {
data.timestamp = ext.env_info().timestamp as i64; data.timestamp = ext.env_info().timestamp as i64;
self.context = Some(unsafe { evmjit::ContextHandle::new(data, schedule, &mut ext_handle) }); self.context = Some(unsafe { evmjit::ContextHandle::new(data, schedule, &mut ext_handle) });
let mut context = self.context.as_mut().unwrap(); let mut context = self.context.as_mut().expect("context handle set on the prior line; qed");
let res = context.exec(); let res = context.exec();
match res { match res {

View File

@ -80,6 +80,19 @@ pub struct Schedule {
pub tx_data_non_zero_gas: usize, pub tx_data_non_zero_gas: usize,
/// Gas price for copying memory /// Gas price for copying memory
pub copy_gas: usize, pub copy_gas: usize,
/// Price of EXTCODESIZE
pub extcodesize_gas: usize,
/// Base price of EXTCODECOPY
pub extcodecopy_base_gas: usize,
/// Price of BALANCE
pub balance_gas: usize,
/// Price of SUICIDE
pub suicide_gas: usize,
/// Amount of additional gas to pay when SUICIDE credits a non-existant account
pub suicide_to_new_account_cost: usize,
/// If Some(x): let limit = GAS * (x - 1) / x; let CALL's gas = min(requested, limit). let CREATE's gas = limit.
/// If None: let CALL's gas = (requested > GAS ? [OOG] : GAS). let CREATE's gas = GAS
pub sub_gas_cap_divisor: Option<usize>,
} }
impl Schedule { impl Schedule {
@ -93,6 +106,49 @@ impl Schedule {
Self::new(true, true, 53000) Self::new(true, true, 53000)
} }
/// Schedule for the Homestead-era of the Ethereum main net.
pub fn new_homestead_gas_fix() -> Schedule {
Schedule{
exceptional_failed_code_deposit: true,
have_delegate_call: true,
stack_limit: 1024,
max_depth: 1024,
tier_step_gas: [0, 2, 3, 5, 8, 10, 20, 0],
exp_gas: 10,
exp_byte_gas: 10,
sha3_gas: 30,
sha3_word_gas: 6,
sload_gas: 200,
sstore_set_gas: 20000,
sstore_reset_gas: 5000,
sstore_refund_gas: 15000,
jumpdest_gas: 1,
log_gas: 375,
log_data_gas: 8,
log_topic_gas: 375,
create_gas: 32000,
call_gas: 700,
call_stipend: 2300,
call_value_transfer_gas: 9000,
call_new_account_gas: 25000,
suicide_refund_gas: 24000,
memory_gas: 3,
quad_coeff_div: 512,
create_data_gas: 200,
tx_gas: 21000,
tx_create_gas: 53000,
tx_data_zero_gas: 4,
tx_data_non_zero_gas: 68,
copy_gas: 3,
extcodesize_gas: 700,
extcodecopy_base_gas: 700,
balance_gas: 400,
suicide_gas: 5000,
suicide_to_new_account_cost: 25000,
sub_gas_cap_divisor: Some(64),
}
}
fn new(efcd: bool, hdc: bool, tcg: usize) -> Schedule { fn new(efcd: bool, hdc: bool, tcg: usize) -> Schedule {
Schedule{ Schedule{
exceptional_failed_code_deposit: efcd, exceptional_failed_code_deposit: efcd,
@ -126,6 +182,12 @@ impl Schedule {
tx_data_zero_gas: 4, tx_data_zero_gas: 4,
tx_data_non_zero_gas: 68, tx_data_non_zero_gas: 68,
copy_gas: 3, copy_gas: 3,
extcodesize_gas: 20,
extcodecopy_base_gas: 20,
balance_gas: 20,
suicide_gas: 0,
suicide_to_new_account_cost: 0,
sub_gas_cap_divisor: None,
} }
} }
} }

View File

@ -14,7 +14,9 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use common::*; use util::*;
use action_params::{ActionParams, ActionValue};
use env_info::EnvInfo;
use types::executed::CallType; use types::executed::CallType;
use evm::{self, Ext, Schedule, Factory, GasLeft, VMType, ContractCreateResult, MessageCallResult}; use evm::{self, Ext, Schedule, Factory, GasLeft, VMType, ContractCreateResult, MessageCallResult};
use std::fmt::Debug; use std::fmt::Debug;
@ -49,7 +51,7 @@ pub struct FakeExt {
depth: usize, depth: usize,
store: HashMap<H256, H256>, store: HashMap<H256, H256>,
blockhashes: HashMap<U256, H256>, blockhashes: HashMap<U256, H256>,
codes: HashMap<Address, Bytes>, codes: HashMap<Address, Arc<Bytes>>,
logs: Vec<FakeLogEntry>, logs: Vec<FakeLogEntry>,
_suicides: HashSet<Address>, _suicides: HashSet<Address>,
info: EnvInfo, info: EnvInfo,
@ -136,12 +138,12 @@ impl Ext for FakeExt {
MessageCallResult::Success(*gas) MessageCallResult::Success(*gas)
} }
fn extcode(&self, address: &Address) -> Bytes { fn extcode(&self, address: &Address) -> Arc<Bytes> {
self.codes.get(address).unwrap_or(&Bytes::new()).clone() self.codes.get(address).unwrap_or(&Arc::new(Bytes::new())).clone()
} }
fn extcodesize(&self, address: &Address) -> usize { fn extcodesize(&self, address: &Address) -> usize {
self.codes.get(address).map(|v| v.len()).unwrap_or(0) self.codes.get(address).map_or(0, |c| c.len())
} }
fn log(&mut self, topics: Vec<H256>, data: &[u8]) { fn log(&mut self, topics: Vec<H256>, data: &[u8]) {
@ -184,11 +186,11 @@ fn test_stack_underflow() {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let err = { let err = {
let mut vm : Box<evm::Evm> = Box::new(super::interpreter::Interpreter::<usize>::default()); let mut vm : Box<evm::Evm> = Box::new(super::interpreter::Interpreter::<usize>::new(Arc::new(super::interpreter::SharedCache::default())));
test_finalize(vm.exec(params, &mut ext)).unwrap_err() test_finalize(vm.exec(params, &mut ext)).unwrap_err()
}; };
@ -211,7 +213,7 @@ fn test_add(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -231,7 +233,7 @@ fn test_sha3(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -251,7 +253,7 @@ fn test_address(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -273,7 +275,7 @@ fn test_origin(factory: super::Factory) {
params.address = address.clone(); params.address = address.clone();
params.origin = origin.clone(); params.origin = origin.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -295,7 +297,7 @@ fn test_sender(factory: super::Factory) {
params.address = address.clone(); params.address = address.clone();
params.sender = sender.clone(); params.sender = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -329,9 +331,9 @@ fn test_extcodecopy(factory: super::Factory) {
params.address = address.clone(); params.address = address.clone();
params.sender = sender.clone(); params.sender = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.codes.insert(sender, sender_code); ext.codes.insert(sender, Arc::new(sender_code));
let gas_left = { let gas_left = {
let mut vm = factory.create(params.gas); let mut vm = factory.create(params.gas);
@ -350,7 +352,7 @@ fn test_log_empty(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -382,7 +384,7 @@ fn test_log_sender(factory: super::Factory) {
params.address = address.clone(); params.address = address.clone();
params.sender = sender.clone(); params.sender = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -406,7 +408,7 @@ fn test_blockhash(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.blockhashes.insert(U256::zero(), blockhash.clone()); ext.blockhashes.insert(U256::zero(), blockhash.clone());
@ -428,7 +430,7 @@ fn test_calldataload(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
params.data = Some(data); params.data = Some(data);
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
@ -449,7 +451,7 @@ fn test_author(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.info.author = author; ext.info.author = author;
@ -469,7 +471,7 @@ fn test_timestamp(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.info.timestamp = timestamp; ext.info.timestamp = timestamp;
@ -489,7 +491,7 @@ fn test_number(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.info.number = number; ext.info.number = number;
@ -509,7 +511,7 @@ fn test_difficulty(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.info.difficulty = difficulty; ext.info.difficulty = difficulty;
@ -529,7 +531,7 @@ fn test_gas_limit(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.info.gas_limit = gas_limit; ext.info.gas_limit = gas_limit;
@ -548,7 +550,7 @@ fn test_mul(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -566,7 +568,7 @@ fn test_sub(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -584,7 +586,7 @@ fn test_div(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -602,7 +604,7 @@ fn test_div_zero(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -620,7 +622,7 @@ fn test_mod(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -639,7 +641,7 @@ fn test_smod(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -658,7 +660,7 @@ fn test_sdiv(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -677,7 +679,7 @@ fn test_exp(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -697,7 +699,7 @@ fn test_comparison(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -718,7 +720,7 @@ fn test_signed_comparison(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -739,7 +741,7 @@ fn test_bitops(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(150_000); params.gas = U256::from(150_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -762,7 +764,7 @@ fn test_addmod_mulmod(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -783,7 +785,7 @@ fn test_byte(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -802,7 +804,7 @@ fn test_signextend(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -817,12 +819,12 @@ fn test_signextend(factory: super::Factory) {
#[test] // JIT just returns out of gas #[test] // JIT just returns out of gas
fn test_badinstruction_int() { fn test_badinstruction_int() {
let factory = super::Factory::new(VMType::Interpreter); let factory = super::Factory::new(VMType::Interpreter, 1024 * 32);
let code = "af".from_hex().unwrap(); let code = "af".from_hex().unwrap();
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let err = { let err = {
@ -842,7 +844,7 @@ fn test_pop(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -862,7 +864,7 @@ fn test_extops(factory: super::Factory) {
params.gas = U256::from(150_000); params.gas = U256::from(150_000);
params.gas_price = U256::from(0x32); params.gas_price = U256::from(0x32);
params.value = ActionValue::Transfer(U256::from(0x99)); params.value = ActionValue::Transfer(U256::from(0x99));
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -885,7 +887,7 @@ fn test_jumps(factory: super::Factory) {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(150_000); params.gas = U256::from(150_000);
params.code = Some(code); params.code = Some(Arc::new(code));
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
let gas_left = { let gas_left = {
@ -908,7 +910,7 @@ fn test_calls(factory: super::Factory) {
let code_address = Address::from(0x998); let code_address = Address::from(0x998);
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.gas = U256::from(150_000); params.gas = U256::from(150_000);
params.code = Some(code); params.code = Some(Arc::new(code));
params.address = address.clone(); params.address = address.clone();
let mut ext = FakeExt::new(); let mut ext = FakeExt::new();
ext.balances = { ext.balances = {

View File

@ -15,20 +15,24 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Transaction Execution environment. //! Transaction Execution environment.
use common::*; use util::*;
use action_params::{ActionParams, ActionValue};
use state::{State, Substate}; use state::{State, Substate};
use engines::Engine; use engines::Engine;
use types::executed::CallType; use types::executed::CallType;
use env_info::EnvInfo;
use error::ExecutionError;
use evm::{self, Ext, Factory, Finalize}; use evm::{self, Ext, Factory, Finalize};
use externalities::*; use externalities::*;
use trace::{FlatTrace, Tracer, NoopTracer, ExecutiveTracer, VMTrace, VMTracer, ExecutiveVMTracer, NoopVMTracer}; use trace::{FlatTrace, Tracer, NoopTracer, ExecutiveTracer, VMTrace, VMTracer, ExecutiveVMTracer, NoopVMTracer};
use transaction::{Action, SignedTransaction};
use crossbeam; use crossbeam;
pub use types::executed::{Executed, ExecutionResult}; pub use types::executed::{Executed, ExecutionResult};
/// Max depth to avoid stack overflow (when it's reached we start a new thread with VM) /// Roughly estimate what stack size each level of evm depth will use
/// TODO [todr] We probably need some more sophisticated calculations here (limit on my machine 132) /// TODO [todr] We probably need some more sophisticated calculations here (limit on my machine 132)
/// Maybe something like here: `https://github.com/ethereum/libethereum/blob/4db169b8504f2b87f7d5a481819cfb959fc65f6c/libethereum/ExtVM.cpp` /// Maybe something like here: `https://github.com/ethereum/libethereum/blob/4db169b8504f2b87f7d5a481819cfb959fc65f6c/libethereum/ExtVM.cpp`
const MAX_VM_DEPTH_FOR_THREAD: usize = 64; const STACK_SIZE_PER_DEPTH: usize = 24*1024;
/// Returns new address created from address and given nonce. /// Returns new address created from address and given nonce.
pub fn contract_address(address: &Address, nonce: &U256) -> Address { pub fn contract_address(address: &Address, nonce: &U256) -> Address {
@ -149,12 +153,13 @@ impl<'a> Executive<'a> {
// TODO: we might need bigints here, or at least check overflows. // TODO: we might need bigints here, or at least check overflows.
let balance = self.state.balance(&sender); let balance = self.state.balance(&sender);
let gas_cost = U512::from(t.gas) * U512::from(t.gas_price); let gas_cost = t.gas.full_mul(t.gas_price);
let total_cost = U512::from(t.value) + gas_cost; let total_cost = U512::from(t.value) + gas_cost;
// avoid unaffordable transactions // avoid unaffordable transactions
if U512::from(balance) < total_cost { let balance512 = U512::from(balance);
return Err(From::from(ExecutionError::NotEnoughCash { required: total_cost, got: U512::from(balance) })); if balance512 < total_cost {
return Err(From::from(ExecutionError::NotEnoughCash { required: total_cost, got: balance512 }));
} }
// NOTE: there can be no invalid transactions from this point. // NOTE: there can be no invalid transactions from this point.
@ -168,13 +173,14 @@ impl<'a> Executive<'a> {
let new_address = contract_address(&sender, &nonce); let new_address = contract_address(&sender, &nonce);
let params = ActionParams { let params = ActionParams {
code_address: new_address.clone(), code_address: new_address.clone(),
code_hash: t.data.sha3(),
address: new_address, address: new_address,
sender: sender.clone(), sender: sender.clone(),
origin: sender.clone(), origin: sender.clone(),
gas: init_gas, gas: init_gas,
gas_price: t.gas_price, gas_price: t.gas_price,
value: ActionValue::Transfer(t.value), value: ActionValue::Transfer(t.value),
code: Some(t.data.clone()), code: Some(Arc::new(t.data.clone())),
data: None, data: None,
call_type: CallType::None, call_type: CallType::None,
}; };
@ -190,6 +196,7 @@ impl<'a> Executive<'a> {
gas_price: t.gas_price, gas_price: t.gas_price,
value: ActionValue::Transfer(t.value), value: ActionValue::Transfer(t.value),
code: self.state.code(address), code: self.state.code(address),
code_hash: self.state.code_hash(address),
data: Some(t.data.clone()), data: Some(t.data.clone()),
call_type: CallType::Call, call_type: CallType::Call,
}; };
@ -210,8 +217,11 @@ impl<'a> Executive<'a> {
tracer: &mut T, tracer: &mut T,
vm_tracer: &mut V vm_tracer: &mut V
) -> evm::Result<U256> where T: Tracer, V: VMTracer { ) -> evm::Result<U256> where T: Tracer, V: VMTracer {
let depth_threshold = ::io::LOCAL_STACK_SIZE.with(|sz| sz.get() / STACK_SIZE_PER_DEPTH);
// Ordinary execution - keep VM in same thread // Ordinary execution - keep VM in same thread
if (self.depth + 1) % MAX_VM_DEPTH_FOR_THREAD != 0 { if (self.depth + 1) % depth_threshold != 0 {
let vm_factory = self.vm_factory; let vm_factory = self.vm_factory;
let mut ext = self.as_externalities(OriginInfo::from(&params), unconfirmed_substate, output_policy, tracer, vm_tracer); let mut ext = self.as_externalities(OriginInfo::from(&params), unconfirmed_substate, output_policy, tracer, vm_tracer);
trace!(target: "executive", "ext.schedule.have_delegate_call: {}", ext.schedule().have_delegate_call); trace!(target: "executive", "ext.schedule.have_delegate_call: {}", ext.schedule().have_delegate_call);
@ -244,7 +254,7 @@ impl<'a> Executive<'a> {
vm_tracer: &mut V vm_tracer: &mut V
) -> evm::Result<U256> where T: Tracer, V: VMTracer { ) -> evm::Result<U256> where T: Tracer, V: VMTracer {
// backup used in case of running out of gas // backup used in case of running out of gas
self.state.snapshot(); self.state.checkpoint();
// at first, transfer value to destination // at first, transfer value to destination
if let ActionValue::Transfer(val) = params.value { if let ActionValue::Transfer(val) = params.value {
@ -263,7 +273,7 @@ impl<'a> Executive<'a> {
let cost = self.engine.cost_of_builtin(&params.code_address, data); let cost = self.engine.cost_of_builtin(&params.code_address, data);
if cost <= params.gas { if cost <= params.gas {
self.engine.execute_builtin(&params.code_address, data, &mut output); self.engine.execute_builtin(&params.code_address, data, &mut output);
self.state.clear_snapshot(); self.state.discard_checkpoint();
// trace only top level calls to builtins to avoid DDoS attacks // trace only top level calls to builtins to avoid DDoS attacks
if self.depth == 0 { if self.depth == 0 {
@ -283,7 +293,7 @@ impl<'a> Executive<'a> {
Ok(params.gas - cost) Ok(params.gas - cost)
} else { } else {
// just drain the whole gas // just drain the whole gas
self.state.revert_snapshot(); self.state.revert_to_checkpoint();
tracer.trace_failed_call(trace_info, vec![], evm::Error::OutOfGas.into()); tracer.trace_failed_call(trace_info, vec![], evm::Error::OutOfGas.into());
@ -329,7 +339,7 @@ impl<'a> Executive<'a> {
res res
} else { } else {
// otherwise it's just a basic transaction, only do tracing, if necessary. // otherwise it's just a basic transaction, only do tracing, if necessary.
self.state.clear_snapshot(); self.state.discard_checkpoint();
tracer.trace_call(trace_info, U256::zero(), trace_output, vec![]); tracer.trace_call(trace_info, U256::zero(), trace_output, vec![]);
Ok(params.gas) Ok(params.gas)
@ -348,7 +358,7 @@ impl<'a> Executive<'a> {
vm_tracer: &mut V vm_tracer: &mut V
) -> evm::Result<U256> where T: Tracer, V: VMTracer { ) -> evm::Result<U256> where T: Tracer, V: VMTracer {
// backup used in case of running out of gas // backup used in case of running out of gas
self.state.snapshot(); self.state.checkpoint();
// part of substate that may be reverted // part of substate that may be reverted
let mut unconfirmed_substate = Substate::new(); let mut unconfirmed_substate = Substate::new();
@ -411,7 +421,7 @@ impl<'a> Executive<'a> {
// real ammount to refund // real ammount to refund
let gas_left_prerefund = match result { Ok(x) => x, _ => 0.into() }; let gas_left_prerefund = match result { Ok(x) => x, _ => 0.into() };
let refunded = cmp::min(refunds_bound, (t.gas - gas_left_prerefund) / U256::from(2)); let refunded = cmp::min(refunds_bound, (t.gas - gas_left_prerefund) >> 1);
let gas_left = gas_left_prerefund + refunded; let gas_left = gas_left_prerefund + refunded;
let gas_used = t.gas - gas_left; let gas_used = t.gas - gas_left;
@ -421,8 +431,16 @@ impl<'a> Executive<'a> {
trace!("exec::finalize: t.gas={}, sstore_refunds={}, suicide_refunds={}, refunds_bound={}, gas_left_prerefund={}, refunded={}, gas_left={}, gas_used={}, refund_value={}, fees_value={}\n", trace!("exec::finalize: t.gas={}, sstore_refunds={}, suicide_refunds={}, refunds_bound={}, gas_left_prerefund={}, refunded={}, gas_left={}, gas_used={}, refund_value={}, fees_value={}\n",
t.gas, sstore_refunds, suicide_refunds, refunds_bound, gas_left_prerefund, refunded, gas_left, gas_used, refund_value, fees_value); t.gas, sstore_refunds, suicide_refunds, refunds_bound, gas_left_prerefund, refunded, gas_left, gas_used, refund_value, fees_value);
trace!("exec::finalize: Refunding refund_value={}, sender={}\n", refund_value, t.sender().unwrap()); let sender = match t.sender() {
self.state.add_balance(&t.sender().unwrap(), &refund_value); Ok(sender) => sender,
Err(e) => {
debug!(target: "executive", "attempted to finalize transaction without sender: {}", e);
return Err(ExecutionError::Internal);
}
};
trace!("exec::finalize: Refunding refund_value={}, sender={}\n", refund_value, sender);
self.state.add_balance(&sender, &refund_value);
trace!("exec::finalize: Compensating author: fees_value={}, author={}\n", fees_value, &self.info.author); trace!("exec::finalize: Compensating author: fees_value={}, author={}\n", fees_value, &self.info.author);
self.state.add_balance(&self.info.author, &fees_value); self.state.add_balance(&self.info.author, &fees_value);
@ -471,10 +489,10 @@ impl<'a> Executive<'a> {
| Err(evm::Error::BadInstruction {.. }) | Err(evm::Error::BadInstruction {.. })
| Err(evm::Error::StackUnderflow {..}) | Err(evm::Error::StackUnderflow {..})
| Err(evm::Error::OutOfStack {..}) => { | Err(evm::Error::OutOfStack {..}) => {
self.state.revert_snapshot(); self.state.revert_to_checkpoint();
}, },
Ok(_) | Err(evm::Error::Internal) => { Ok(_) | Err(evm::Error::Internal) => {
self.state.clear_snapshot(); self.state.discard_checkpoint();
substate.accrue(un_substate); substate.accrue(un_substate);
} }
} }
@ -486,13 +504,18 @@ impl<'a> Executive<'a> {
mod tests { mod tests {
use ethkey::{Generator, Random}; use ethkey::{Generator, Random};
use super::*; use super::*;
use common::*; use util::*;
use action_params::{ActionParams, ActionValue};
use env_info::EnvInfo;
use evm::{Factory, VMType}; use evm::{Factory, VMType};
use error::ExecutionError;
use state::Substate; use state::Substate;
use tests::helpers::*; use tests::helpers::*;
use trace::trace; use trace::trace;
use trace::{FlatTrace, Tracer, NoopTracer, ExecutiveTracer}; use trace::{FlatTrace, Tracer, NoopTracer, ExecutiveTracer};
use trace::{VMTrace, VMOperation, VMExecutedOperation, MemoryDiff, StorageDiff, VMTracer, NoopVMTracer, ExecutiveVMTracer}; use trace::{VMTrace, VMOperation, VMExecutedOperation, MemoryDiff, StorageDiff, VMTracer, NoopVMTracer, ExecutiveVMTracer};
use transaction::{Action, Transaction};
use types::executed::CallType; use types::executed::CallType;
#[test] #[test]
@ -511,7 +534,7 @@ mod tests {
params.address = address.clone(); params.address = address.clone();
params.sender = sender.clone(); params.sender = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some("3331600055".from_hex().unwrap()); params.code = Some(Arc::new("3331600055".from_hex().unwrap()));
params.value = ActionValue::Transfer(U256::from(0x7)); params.value = ActionValue::Transfer(U256::from(0x7));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
@ -570,7 +593,7 @@ mod tests {
params.sender = sender.clone(); params.sender = sender.clone();
params.origin = sender.clone(); params.origin = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code.clone()); params.code = Some(Arc::new(code));
params.value = ActionValue::Transfer(U256::from(100)); params.value = ActionValue::Transfer(U256::from(100));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
@ -589,8 +612,11 @@ mod tests {
assert_eq!(substate.contracts_created.len(), 0); assert_eq!(substate.contracts_created.len(), 0);
} }
evm_test!{test_call_to_create: test_call_to_create_jit, test_call_to_create_int} #[test]
fn test_call_to_create(factory: Factory) { // Tracing is not suported in JIT
fn test_call_to_create() {
let factory = Factory::new(VMType::Interpreter, 1024 * 32);
// code: // code:
// //
// 7c 601080600c6000396000f3006000355415600957005b60203560003555 - push 29 bytes? // 7c 601080600c6000396000f3006000355415600957005b60203560003555 - push 29 bytes?
@ -625,7 +651,7 @@ mod tests {
params.sender = sender.clone(); params.sender = sender.clone();
params.origin = sender.clone(); params.origin = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code.clone()); params.code = Some(Arc::new(code));
params.value = ActionValue::Transfer(U256::from(100)); params.value = ActionValue::Transfer(U256::from(100));
params.call_type = CallType::Call; params.call_type = CallType::Call;
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
@ -688,7 +714,7 @@ mod tests {
VMOperation { pc: 33, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99985.into(), stack_push: vec_into![29], mem_diff: None, store_diff: None }) }, VMOperation { pc: 33, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99985.into(), stack_push: vec_into![29], mem_diff: None, store_diff: None }) },
VMOperation { pc: 35, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99982.into(), stack_push: vec_into![3], mem_diff: None, store_diff: None }) }, VMOperation { pc: 35, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99982.into(), stack_push: vec_into![3], mem_diff: None, store_diff: None }) },
VMOperation { pc: 37, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99979.into(), stack_push: vec_into![23], mem_diff: None, store_diff: None }) }, VMOperation { pc: 37, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 99979.into(), stack_push: vec_into![23], mem_diff: None, store_diff: None }) },
VMOperation { pc: 39, instruction: 240, gas_cost: 32000.into(), executed: Some(VMExecutedOperation { gas_used: 67979.into(), stack_push: vec_into![U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap()], mem_diff: None, store_diff: None }) }, VMOperation { pc: 39, instruction: 240, gas_cost: 99979.into(), executed: Some(VMExecutedOperation { gas_used: 64755.into(), stack_push: vec_into![U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap()], mem_diff: None, store_diff: None }) },
VMOperation { pc: 40, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 64752.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) }, VMOperation { pc: 40, instruction: 96, gas_cost: 3.into(), executed: Some(VMExecutedOperation { gas_used: 64752.into(), stack_push: vec_into![0], mem_diff: None, store_diff: None }) },
VMOperation { pc: 42, instruction: 85, gas_cost: 20000.into(), executed: Some(VMExecutedOperation { gas_used: 44752.into(), stack_push: vec_into![], mem_diff: None, store_diff: Some(StorageDiff { location: 0.into(), value: U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap() }) }) } VMOperation { pc: 42, instruction: 85, gas_cost: 20000.into(), executed: Some(VMExecutedOperation { gas_used: 44752.into(), stack_push: vec_into![], mem_diff: None, store_diff: Some(StorageDiff { location: 0.into(), value: U256::from_dec_str("1135198453258042933984631383966629874710669425204").unwrap() }) }) }
], ],
@ -712,8 +738,10 @@ mod tests {
assert_eq!(vm_tracer.drain().unwrap(), expected_vm_trace); assert_eq!(vm_tracer.drain().unwrap(), expected_vm_trace);
} }
evm_test!{test_create_contract: test_create_contract_jit, test_create_contract_int} #[test]
fn test_create_contract(factory: Factory) { fn test_create_contract() {
// Tracing is not supported in JIT
let factory = Factory::new(VMType::Interpreter, 1024 * 32);
// code: // code:
// //
// 60 10 - push 16 // 60 10 - push 16
@ -735,7 +763,7 @@ mod tests {
params.sender = sender.clone(); params.sender = sender.clone();
params.origin = sender.clone(); params.origin = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code.clone()); params.code = Some(Arc::new(code));
params.value = ActionValue::Transfer(100.into()); params.value = ActionValue::Transfer(100.into());
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
@ -823,7 +851,7 @@ mod tests {
params.sender = sender.clone(); params.sender = sender.clone();
params.origin = sender.clone(); params.origin = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code.clone()); params.code = Some(Arc::new(code));
params.value = ActionValue::Transfer(U256::from(100)); params.value = ActionValue::Transfer(U256::from(100));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
@ -875,7 +903,7 @@ mod tests {
params.sender = sender.clone(); params.sender = sender.clone();
params.origin = sender.clone(); params.origin = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code.clone()); params.code = Some(Arc::new(code));
params.value = ActionValue::Transfer(U256::from(100)); params.value = ActionValue::Transfer(U256::from(100));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
@ -932,7 +960,7 @@ mod tests {
params.address = address_a.clone(); params.address = address_a.clone();
params.sender = sender.clone(); params.sender = sender.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code_a.clone()); params.code = Some(Arc::new(code_a.clone()));
params.value = ActionValue::Transfer(U256::from(100_000)); params.value = ActionValue::Transfer(U256::from(100_000));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
@ -982,10 +1010,10 @@ mod tests {
let mut params = ActionParams::default(); let mut params = ActionParams::default();
params.address = address.clone(); params.address = address.clone();
params.gas = U256::from(100_000); params.gas = U256::from(100_000);
params.code = Some(code.clone()); params.code = Some(Arc::new(code.clone()));
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();
state.init_code(&address, code.clone()); state.init_code(&address, code);
let info = EnvInfo::default(); let info = EnvInfo::default();
let engine = TestEngine::new(0); let engine = TestEngine::new(0);
let mut substate = Substate::new(); let mut substate = Substate::new();
@ -1183,7 +1211,7 @@ mod tests {
params.sender = sender.clone(); params.sender = sender.clone();
params.origin = sender.clone(); params.origin = sender.clone();
params.gas = U256::from(0x0186a0); params.gas = U256::from(0x0186a0);
params.code = Some(code.clone()); params.code = Some(Arc::new(code));
params.value = ActionValue::Transfer(U256::from_str("0de0b6b3a7640000").unwrap()); params.value = ActionValue::Transfer(U256::from_str("0de0b6b3a7640000").unwrap());
let mut state_result = get_temp_state(); let mut state_result = get_temp_state();
let mut state = state_result.reference_mut(); let mut state = state_result.reference_mut();

View File

@ -15,9 +15,11 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Transaction Execution environment. //! Transaction Execution environment.
use common::*; use util::*;
use action_params::{ActionParams, ActionValue};
use state::{State, Substate}; use state::{State, Substate};
use engines::Engine; use engines::Engine;
use env_info::EnvInfo;
use executive::*; use executive::*;
use evm::{self, Schedule, Ext, ContractCreateResult, MessageCallResult, Factory}; use evm::{self, Schedule, Ext, ContractCreateResult, MessageCallResult, Factory};
use types::executed::CallType; use types::executed::CallType;
@ -146,7 +148,8 @@ impl<'a, T, V> Ext for Externalities<'a, T, V> where T: 'a + Tracer, V: 'a + VMT
gas: *gas, gas: *gas,
gas_price: self.origin_info.gas_price, gas_price: self.origin_info.gas_price,
value: ActionValue::Transfer(*value), value: ActionValue::Transfer(*value),
code: Some(code.to_vec()), code: Some(Arc::new(code.to_vec())),
code_hash: code.sha3(),
data: None, data: None,
call_type: CallType::None, call_type: CallType::None,
}; };
@ -185,6 +188,7 @@ impl<'a, T, V> Ext for Externalities<'a, T, V> where T: 'a + Tracer, V: 'a + VMT
gas: *gas, gas: *gas,
gas_price: self.origin_info.gas_price, gas_price: self.origin_info.gas_price,
code: self.state.code(code_address), code: self.state.code(code_address),
code_hash: self.state.code_hash(code_address),
data: Some(data.to_vec()), data: Some(data.to_vec()),
call_type: call_type, call_type: call_type,
}; };
@ -201,15 +205,14 @@ impl<'a, T, V> Ext for Externalities<'a, T, V> where T: 'a + Tracer, V: 'a + VMT
} }
} }
fn extcode(&self, address: &Address) -> Bytes { fn extcode(&self, address: &Address) -> Arc<Bytes> {
self.state.code(address).unwrap_or_else(|| vec![]) self.state.code(address).unwrap_or_else(|| Arc::new(vec![]))
} }
fn extcodesize(&self, address: &Address) -> usize { fn extcodesize(&self, address: &Address) -> usize {
self.state.code_size(address).unwrap_or(0) self.state.code_size(address).unwrap_or(0)
} }
#[cfg_attr(feature="dev", allow(match_ref_pats))] #[cfg_attr(feature="dev", allow(match_ref_pats))]
fn ret(mut self, gas: &U256, data: &[u8]) -> evm::Result<U256> fn ret(mut self, gas: &U256, data: &[u8]) -> evm::Result<U256>
where Self: Sized { where Self: Sized {
@ -252,6 +255,8 @@ impl<'a, T, V> Ext for Externalities<'a, T, V> where T: 'a + Tracer, V: 'a + VMT
} }
fn log(&mut self, topics: Vec<H256>, data: &[u8]) { fn log(&mut self, topics: Vec<H256>, data: &[u8]) {
use log_entry::LogEntry;
let address = self.origin_info.address.clone(); let address = self.origin_info.address.clone();
self.substate.logs.push(LogEntry { self.substate.logs.push(LogEntry {
address: address, address: address,
@ -302,8 +307,9 @@ impl<'a, T, V> Ext for Externalities<'a, T, V> where T: 'a + Tracer, V: 'a + VMT
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use common::*; use util::*;
use engines::Engine; use engines::Engine;
use env_info::EnvInfo;
use evm::Ext; use evm::Ext;
use state::{State, Substate}; use state::{State, Substate};
use tests::helpers::*; use tests::helpers::*;

View File

@ -17,7 +17,7 @@
//! Block header. //! Block header.
use util::*; use util::*;
use basic_types::*; use basic_types::{LogBloom, Seal, ZERO_LOGBLOOM};
use time::get_time; use time::get_time;
use rlp::*; use rlp::*;
@ -199,8 +199,9 @@ impl Header {
match &mut *hash { match &mut *hash {
&mut Some(ref h) => h.clone(), &mut Some(ref h) => h.clone(),
hash @ &mut None => { hash @ &mut None => {
*hash = Some(self.rlp_sha3(Seal::With)); let h = self.rlp_sha3(Seal::With);
hash.as_ref().unwrap().clone() *hash = Some(h.clone());
h
} }
} }
} }
@ -211,8 +212,9 @@ impl Header {
match &mut *hash { match &mut *hash {
&mut Some(ref h) => h.clone(), &mut Some(ref h) => h.clone(),
hash @ &mut None => { hash @ &mut None => {
*hash = Some(self.rlp_sha3(Seal::Without)); let h = self.rlp_sha3(Seal::Without);
hash.as_ref().unwrap().clone() *hash = Some(h.clone());
h
} }
} }
} }
@ -295,6 +297,12 @@ impl Encodable for Header {
} }
} }
impl HeapSizeOf for Header {
fn heap_size_of_children(&self) -> usize {
self.extra_data.heap_size_of_children() + self.seal.heap_size_of_children()
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use rustc_serialize::hex::FromHex; use rustc_serialize::hex::FromHex;

View File

@ -48,7 +48,8 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
let mut spec = match era { let mut spec = match era {
ChainEra::Frontier => ethereum::new_frontier_test(), ChainEra::Frontier => ethereum::new_frontier_test(),
ChainEra::Homestead => ethereum::new_homestead_test(), ChainEra::Homestead => ethereum::new_homestead_test(),
ChainEra::DaoHardfork => ethereum::new_daohardfork_test(), ChainEra::Eip150 => ethereum::new_eip150_test(),
ChainEra::TransitionTest => ethereum::new_transition_test(),
}; };
spec.set_genesis_state(state); spec.set_genesis_state(state);
spec.overwrite_genesis_params(genesis); spec.overwrite_genesis_params(genesis);
@ -116,14 +117,38 @@ mod frontier_era_tests {
declare_test!{BlockchainTests_RandomTests_bl201507071825GO, "BlockchainTests/RandomTests/bl201507071825GO"} declare_test!{BlockchainTests_RandomTests_bl201507071825GO, "BlockchainTests/RandomTests/bl201507071825GO"}
} }
mod daohardfork_tests { mod transition_tests {
use tests::helpers::*; use tests::helpers::*;
use super::json_chain_test; use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> { fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::DaoHardfork) json_chain_test(json_data, ChainEra::TransitionTest)
} }
declare_test!{BlockchainTests_TestNetwork_bcSimpleTransitionTest, "BlockchainTests/TestNetwork/bcSimpleTransitionTest"} declare_test!{BlockchainTests_TestNetwork_bcSimpleTransitionTest, "BlockchainTests/TestNetwork/bcSimpleTransitionTest"}
declare_test!{BlockchainTests_TestNetwork_bcTheDaoTest, "BlockchainTests/TestNetwork/bcTheDaoTest"} declare_test!{BlockchainTests_TestNetwork_bcTheDaoTest, "BlockchainTests/TestNetwork/bcTheDaoTest"}
declare_test!{BlockchainTests_TestNetwork_bcEIP150Test, "BlockchainTests/TestNetwork/bcEIP150Test"}
}
mod eip150_blockchain_tests {
use tests::helpers::*;
use super::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Eip150)
}
declare_test!{BlockchainTests_EIP150_bcBlockGasLimitTest, "BlockchainTests/EIP150/bcBlockGasLimitTest"}
declare_test!{BlockchainTests_EIP150_bcForkStressTest, "BlockchainTests/EIP150/bcForkStressTest"}
declare_test!{BlockchainTests_EIP150_bcGasPricerTest, "BlockchainTests/EIP150/bcGasPricerTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidHeaderTest, "BlockchainTests/EIP150/bcInvalidHeaderTest"}
declare_test!{BlockchainTests_EIP150_bcInvalidRLPTest, "BlockchainTests/EIP150/bcInvalidRLPTest"}
declare_test!{BlockchainTests_EIP150_bcMultiChainTest, "BlockchainTests/EIP150/bcMultiChainTest"}
declare_test!{BlockchainTests_EIP150_bcRPC_API_Test, "BlockchainTests/EIP150/bcRPC_API_Test"}
declare_test!{BlockchainTests_EIP150_bcStateTest, "BlockchainTests/EIP150/bcStateTest"}
declare_test!{BlockchainTests_EIP150_bcTotalDifficultyTest, "BlockchainTests/EIP150/bcTotalDifficultyTest"}
declare_test!{BlockchainTests_EIP150_bcUncleHeaderValiditiy, "BlockchainTests/EIP150/bcUncleHeaderValiditiy"}
declare_test!{BlockchainTests_EIP150_bcUncleTest, "BlockchainTests/EIP150/bcUncleTest"}
declare_test!{BlockchainTests_EIP150_bcValidBlockTest, "BlockchainTests/EIP150/bcValidBlockTest"}
declare_test!{BlockchainTests_EIP150_bcWalletTest, "BlockchainTests/EIP150/bcWalletTest"}
} }

View File

@ -0,0 +1,43 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use super::test_common::*;
use tests::helpers::*;
use super::state::json_chain_test;
fn do_json_test(json_data: &[u8]) -> Vec<String> {
json_chain_test(json_data, ChainEra::Eip150)
}
declare_test!{StateTests_EIP150_stEIPSpecificTest, "StateTests/EIP150/stEIPSpecificTest"}
declare_test!{StateTests_EIP150_stEIPsingleCodeGasPrices, "StateTests/EIP150/stEIPsingleCodeGasPrices"}
declare_test!{StateTests_EIP150_stMemExpandingEIPCalls, "StateTests/EIP150/stMemExpandingEIPCalls"}
declare_test!{StateTests_EIP150_stCallCodes, "StateTests/EIP150/Homestead/stCallCodes"}
declare_test!{StateTests_EIP150_stCallCreateCallCodeTest, "StateTests/EIP150/Homestead/stCallCreateCallCodeTest"}
declare_test!{StateTests_EIP150_stDelegatecallTest, "StateTests/EIP150/Homestead/stDelegatecallTest"}
declare_test!{StateTests_EIP150_stInitCodeTest, "StateTests/EIP150/Homestead/stInitCodeTest"}
declare_test!{StateTests_EIP150_stLogTests, "StateTests/EIP150/Homestead/stLogTests"}
declare_test!{heavy => StateTests_EIP150_stMemoryStressTest, "StateTests/EIP150/Homestead/stMemoryStressTest"}
declare_test!{heavy => StateTests_EIP150_stMemoryTest, "StateTests/EIP150/Homestead/stMemoryTest"}
declare_test!{StateTests_EIP150_stPreCompiledContracts, "StateTests/EIP150/Homestead/stPreCompiledContracts"}
declare_test!{heavy => StateTests_EIP150_stQuadraticComplexityTest, "StateTests/EIP150/Homestead/stQuadraticComplexityTest"}
declare_test!{StateTests_EIP150_stRecursiveCreate, "StateTests/EIP150/Homestead/stRecursiveCreate"}
declare_test!{StateTests_EIP150_stRefundTest, "StateTests/EIP150/Homestead/stRefundTest"}
declare_test!{StateTests_EIP150_stSpecialTest, "StateTests/EIP150/Homestead/stSpecialTest"}
declare_test!{StateTests_EIP150_stSystemOperationsTest, "StateTests/EIP150/Homestead/stSystemOperationsTest"}
declare_test!{StateTests_EIP150_stTransactionTest, "StateTests/EIP150/Homestead/stTransactionTest"}
declare_test!{StateTests_EIP150_stWalletTest, "StateTests/EIP150/Homestead/stWalletTest"}

Some files were not shown because too many files have changed in this diff Show More