Merge branch 'master' into ng-accounts-backup
This commit is contained in:
commit
6b2cffd312
@ -21,10 +21,17 @@ linux-stable:
|
|||||||
- cargo build --release --verbose
|
- cargo build --release --verbose
|
||||||
- strip target/release/parity
|
- strip target/release/parity
|
||||||
- md5sum target/release/parity >> parity.md5
|
- md5sum target/release/parity >> parity.md5
|
||||||
|
- sh scripts/deb-build.sh amd64
|
||||||
|
- cp target/release/parity deb/usr/bin/parity
|
||||||
|
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||||
|
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
|
||||||
|
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
|
||||||
- aws configure set aws_access_key_id $s3_key
|
- aws configure set aws_access_key_id $s3_key
|
||||||
- aws configure set aws_secret_access_key $s3_secret
|
- aws configure set aws_secret_access_key $s3_secret
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
|
||||||
tags:
|
tags:
|
||||||
- rust
|
- rust
|
||||||
- rust-stable
|
- rust-stable
|
||||||
@ -44,10 +51,17 @@ linux-stable-14.04:
|
|||||||
- cargo build --release --verbose
|
- cargo build --release --verbose
|
||||||
- strip target/release/parity
|
- strip target/release/parity
|
||||||
- md5sum target/release/parity >> parity.md5
|
- md5sum target/release/parity >> parity.md5
|
||||||
|
- sh scripts/deb-build.sh amd64
|
||||||
|
- cp target/release/parity deb/usr/bin/parity
|
||||||
|
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||||
|
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
|
||||||
|
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
|
||||||
- aws configure set aws_access_key_id $s3_key
|
- aws configure set aws_access_key_id $s3_key
|
||||||
- aws configure set aws_secret_access_key $s3_secret
|
- aws configure set aws_secret_access_key $s3_secret
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity --body target/release/parity
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity --body target/release/parity
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity.md5 --body parity.md5
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity.md5 --body parity.md5
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/"parity_"$VER"_amd64.deb" --body "parity_"$VER"_amd64.deb"
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/"parity_"$VER"_amd64.deb.md5" --body "parity_"$VER"_amd64.deb.md5"
|
||||||
tags:
|
tags:
|
||||||
- rust
|
- rust
|
||||||
- rust-14.04
|
- rust-14.04
|
||||||
@ -134,10 +148,17 @@ linux-armv7:
|
|||||||
- cargo build --target armv7-unknown-linux-gnueabihf --release --verbose
|
- cargo build --target armv7-unknown-linux-gnueabihf --release --verbose
|
||||||
- arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity
|
- arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity
|
||||||
- md5sum target/armv7-unknown-linux-gnueabihf/release/parity >> parity.md5
|
- md5sum target/armv7-unknown-linux-gnueabihf/release/parity >> parity.md5
|
||||||
|
- sh scripts/deb-build.sh armhf
|
||||||
|
- cp target/armv7-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity
|
||||||
|
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||||
|
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
|
||||||
|
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
|
||||||
- aws configure set aws_access_key_id $s3_key
|
- aws configure set aws_access_key_id $s3_key
|
||||||
- aws configure set aws_secret_access_key $s3_secret
|
- aws configure set aws_secret_access_key $s3_secret
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity.md5 --body parity.md5
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity.md5 --body parity.md5
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
|
||||||
tags:
|
tags:
|
||||||
- rust
|
- rust
|
||||||
- rust-arm
|
- rust-arm
|
||||||
@ -162,10 +183,17 @@ linux-arm:
|
|||||||
- cargo build --target arm-unknown-linux-gnueabihf --release --verbose
|
- cargo build --target arm-unknown-linux-gnueabihf --release --verbose
|
||||||
- arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity
|
- arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity
|
||||||
- md5sum target/arm-unknown-linux-gnueabihf/release/parity >> parity.md5
|
- md5sum target/arm-unknown-linux-gnueabihf/release/parity >> parity.md5
|
||||||
|
- sh scripts/deb-build.sh armhf
|
||||||
|
- cp target/arm-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity
|
||||||
|
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||||
|
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
|
||||||
|
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
|
||||||
- aws configure set aws_access_key_id $s3_key
|
- aws configure set aws_access_key_id $s3_key
|
||||||
- aws configure set aws_secret_access_key $s3_secret
|
- aws configure set aws_secret_access_key $s3_secret
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity.md5 --body parity.md5
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity.md5 --body parity.md5
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb"
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5"
|
||||||
tags:
|
tags:
|
||||||
- rust
|
- rust
|
||||||
- rust-arm
|
- rust-arm
|
||||||
@ -218,10 +246,17 @@ linux-aarch64:
|
|||||||
- cargo build --target aarch64-unknown-linux-gnu --release --verbose
|
- cargo build --target aarch64-unknown-linux-gnu --release --verbose
|
||||||
- aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity
|
- aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity
|
||||||
- md5sum target/aarch64-unknown-linux-gnu/release/parity >> parity.md5
|
- md5sum target/aarch64-unknown-linux-gnu/release/parity >> parity.md5
|
||||||
|
- sh scripts/deb-build.sh arm64
|
||||||
|
- cp target/aarch64-unknown-linux-gnu/release/parity deb/usr/bin/parity
|
||||||
|
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
|
||||||
|
- dpkg-deb -b deb "parity_"$VER"_arm64.deb"
|
||||||
|
- md5sum "parity_"$VER"_arm64.deb" >> "parity_"$VER"_arm64.deb.md5"
|
||||||
- aws configure set aws_access_key_id $s3_key
|
- aws configure set aws_access_key_id $s3_key
|
||||||
- aws configure set aws_secret_access_key $s3_secret
|
- aws configure set aws_secret_access_key $s3_secret
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity
|
||||||
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb"
|
||||||
|
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5"
|
||||||
tags:
|
tags:
|
||||||
- rust
|
- rust
|
||||||
- rust-arm
|
- rust-arm
|
||||||
@ -261,10 +296,8 @@ windows:
|
|||||||
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
|
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
|
||||||
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
|
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
|
||||||
- set RUST_BACKTRACE=1
|
- set RUST_BACKTRACE=1
|
||||||
- set RUSTFLAGS=-Zorbit=off
|
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off -D warnings
|
||||||
- set RUSTFLAGS=-D warnings
|
|
||||||
- rustup default stable-x86_64-pc-windows-msvc
|
- rustup default stable-x86_64-pc-windows-msvc
|
||||||
- git submodule update --init
|
|
||||||
- cargo build --release --verbose
|
- cargo build --release --verbose
|
||||||
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
|
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
|
||||||
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
|
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -o nsis\vc_redist.x64.exe
|
||||||
@ -300,7 +333,6 @@ windows:
|
|||||||
- target/release/parity.pdb
|
- target/release/parity.pdb
|
||||||
- nsis/InstallParity.exe
|
- nsis/InstallParity.exe
|
||||||
name: "x86_64-pc-windows-msvc_parity"
|
name: "x86_64-pc-windows-msvc_parity"
|
||||||
allow_failure: true
|
|
||||||
test-linux:
|
test-linux:
|
||||||
stage: test
|
stage: test
|
||||||
before_script:
|
before_script:
|
||||||
|
18
Cargo.lock
generated
18
Cargo.lock
generated
@ -222,8 +222,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "elastic-array"
|
name = "elastic-array"
|
||||||
version = "0.5.0"
|
version = "0.6.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/ethcore/elastic-array#70e4012e691b732c7c4cb04e9232799e6aa268bc"
|
||||||
|
dependencies = [
|
||||||
|
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "env_logger"
|
name = "env_logger"
|
||||||
@ -540,7 +543,7 @@ dependencies = [
|
|||||||
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.90 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
|
||||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
|
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
|
||||||
"ethcore-bigint 0.1.1",
|
"ethcore-bigint 0.1.1",
|
||||||
@ -613,6 +616,7 @@ dependencies = [
|
|||||||
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -812,7 +816,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "json-ipc-server"
|
name = "json-ipc-server"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
source = "git+https://github.com/ethcore/json-ipc-server.git#5fbd0253750d3097b9a8fb27effa84c18d630bbb"
|
source = "git+https://github.com/ethcore/json-ipc-server.git#4642cd03ec1d23db89df80d22d5a88e7364ab885"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -1207,7 +1211,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "parity-ui-precompiled"
|
name = "parity-ui-precompiled"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
source = "git+https://github.com/ethcore/js-precompiled.git#18cc1f1aba75b9a7556f0461fb9379dbd0a34f02"
|
source = "git+https://github.com/ethcore/js-precompiled.git#9f8baa9d0e54056c41a842b351597d0565beda98"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
@ -1420,7 +1424,7 @@ dependencies = [
|
|||||||
name = "rlp"
|
name = "rlp"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)",
|
||||||
"ethcore-bigint 0.1.1",
|
"ethcore-bigint 0.1.1",
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -1953,7 +1957,7 @@ dependencies = [
|
|||||||
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
|
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
|
||||||
"checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76"
|
"checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76"
|
||||||
"checksum dtoa 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0dd841b58510c9618291ffa448da2e4e0f699d984d436122372f446dae62263d"
|
"checksum dtoa 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "0dd841b58510c9618291ffa448da2e4e0f699d984d436122372f446dae62263d"
|
||||||
"checksum elastic-array 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4bc9250a632e7c001b741eb0ec6cee93c9a5b6d5f1879696a4b94d62b012210a"
|
"checksum elastic-array 0.6.0 (git+https://github.com/ethcore/elastic-array)" = "<none>"
|
||||||
"checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5"
|
"checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5"
|
||||||
"checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>"
|
"checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>"
|
||||||
"checksum ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0c53453517f620847be51943db329276ae52f2e210cfc659e81182864be2f"
|
"checksum ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f7b0c53453517f620847be51943db329276ae52f2e210cfc659e81182864be2f"
|
||||||
|
@ -73,6 +73,7 @@ ipc = ["ethcore/ipc", "ethsync/ipc"]
|
|||||||
jit = ["ethcore/jit"]
|
jit = ["ethcore/jit"]
|
||||||
dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"]
|
dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"]
|
||||||
json-tests = ["ethcore/json-tests"]
|
json-tests = ["ethcore/json-tests"]
|
||||||
|
test-heavy = ["ethcore/test-heavy"]
|
||||||
stratum = ["ipc"]
|
stratum = ["ipc"]
|
||||||
ethkey-cli = ["ethcore/ethkey-cli"]
|
ethkey-cli = ["ethcore/ethkey-cli"]
|
||||||
ethstore-cli = ["ethcore/ethstore-cli"]
|
ethstore-cli = ["ethcore/ethstore-cli"]
|
||||||
|
@ -164,8 +164,7 @@
|
|||||||
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
|
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
|
||||||
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",
|
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",
|
||||||
"enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303",
|
"enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303",
|
||||||
"enode://4cd540b2c3292e17cff39922e864094bf8b0741fcc8c5dcea14957e389d7944c70278d872902e3d0345927f621547efa659013c400865485ab4bfa0c6596936f@zero.parity.io:30303",
|
"enode://4cd540b2c3292e17cff39922e864094bf8b0741fcc8c5dcea14957e389d7944c70278d872902e3d0345927f621547efa659013c400865485ab4bfa0c6596936f@138.201.144.135:30303"
|
||||||
"enode://cc92c4c40d612a10c877ca023ef0496c843fbc92b6c6c0d55ce0b863d51d821c4bd70daebb54324a6086374e6dc05708fed39862b275f169cb678e655da9d07d@136.243.154.246:30303"
|
|
||||||
],
|
],
|
||||||
"accounts": {
|
"accounts": {
|
||||||
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
|
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },
|
||||||
|
@ -96,9 +96,9 @@ impl<'db> HashDB for AccountDB<'db>{
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return Some(&NULL_RLP_STATIC);
|
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
|
||||||
}
|
}
|
||||||
self.db.get(&combine_key(&self.address_hash, key))
|
self.db.get(&combine_key(&self.address_hash, key))
|
||||||
}
|
}
|
||||||
@ -114,7 +114,7 @@ impl<'db> HashDB for AccountDB<'db>{
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, _key: H256, _value: Bytes) {
|
fn emplace(&mut self, _key: H256, _value: DBValue) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ impl<'db> HashDB for AccountDB<'db>{
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_aux(&self, hash: &[u8]) -> Option<Vec<u8>> {
|
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
|
||||||
self.db.get_aux(hash)
|
self.db.get_aux(hash)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -158,9 +158,9 @@ impl<'db> HashDB for AccountDBMut<'db>{
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return Some(&NULL_RLP_STATIC);
|
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
|
||||||
}
|
}
|
||||||
self.db.get(&combine_key(&self.address_hash, key))
|
self.db.get(&combine_key(&self.address_hash, key))
|
||||||
}
|
}
|
||||||
@ -178,16 +178,16 @@ impl<'db> HashDB for AccountDBMut<'db>{
|
|||||||
}
|
}
|
||||||
let k = value.sha3();
|
let k = value.sha3();
|
||||||
let ak = combine_key(&self.address_hash, &k);
|
let ak = combine_key(&self.address_hash, &k);
|
||||||
self.db.emplace(ak, value.to_vec());
|
self.db.emplace(ak, DBValue::from_slice(value));
|
||||||
k
|
k
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) {
|
fn emplace(&mut self, key: H256, value: DBValue) {
|
||||||
if key == SHA3_NULL_RLP {
|
if key == SHA3_NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let key = combine_key(&self.address_hash, &key);
|
let key = combine_key(&self.address_hash, &key);
|
||||||
self.db.emplace(key, value.to_vec())
|
self.db.emplace(key, value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, key: &H256) {
|
fn remove(&mut self, key: &H256) {
|
||||||
@ -202,7 +202,7 @@ impl<'db> HashDB for AccountDBMut<'db>{
|
|||||||
self.db.insert_aux(hash, value);
|
self.db.insert_aux(hash, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_aux(&self, hash: &[u8]) -> Option<Vec<u8>> {
|
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
|
||||||
self.db.get_aux(hash)
|
self.db.get_aux(hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -218,9 +218,9 @@ impl<'db> HashDB for Wrapping<'db> {
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return Some(&NULL_RLP_STATIC);
|
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
|
||||||
}
|
}
|
||||||
self.0.get(key)
|
self.0.get(key)
|
||||||
}
|
}
|
||||||
@ -236,7 +236,7 @@ impl<'db> HashDB for Wrapping<'db> {
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, _key: H256, _value: Bytes) {
|
fn emplace(&mut self, _key: H256, _value: DBValue) {
|
||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -252,9 +252,9 @@ impl<'db> HashDB for WrappingMut<'db>{
|
|||||||
unimplemented!()
|
unimplemented!()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return Some(&NULL_RLP_STATIC);
|
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
|
||||||
}
|
}
|
||||||
self.0.get(key)
|
self.0.get(key)
|
||||||
}
|
}
|
||||||
@ -273,7 +273,7 @@ impl<'db> HashDB for WrappingMut<'db>{
|
|||||||
self.0.insert(value)
|
self.0.insert(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) {
|
fn emplace(&mut self, key: H256, value: DBValue) {
|
||||||
if key == SHA3_NULL_RLP {
|
if key == SHA3_NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -37,6 +37,5 @@ declare_test!{BlockchainTests_Homestead_bcUncleTest, "BlockchainTests/Homestead/
|
|||||||
declare_test!{BlockchainTests_Homestead_bcValidBlockTest, "BlockchainTests/Homestead/bcValidBlockTest"}
|
declare_test!{BlockchainTests_Homestead_bcValidBlockTest, "BlockchainTests/Homestead/bcValidBlockTest"}
|
||||||
declare_test!{BlockchainTests_Homestead_bcWalletTest, "BlockchainTests/Homestead/bcWalletTest"}
|
declare_test!{BlockchainTests_Homestead_bcWalletTest, "BlockchainTests/Homestead/bcWalletTest"}
|
||||||
declare_test!{BlockchainTests_Homestead_bcShanghaiLove, "BlockchainTests/Homestead/bcShanghaiLove"}
|
declare_test!{BlockchainTests_Homestead_bcShanghaiLove, "BlockchainTests/Homestead/bcShanghaiLove"}
|
||||||
// TODO [ToDr] uncomment as soon as eip150 tests are merged to develop branch of ethereum/tests
|
declare_test!{BlockchainTests_Homestead_bcSuicideIssue, "BlockchainTests/Homestead/bcSuicideIssue"}
|
||||||
// declare_test!{BlockchainTests_Homestead_bcSuicideIssue, "BlockchainTests/Homestead/bcSuicideIssue"}
|
|
||||||
declare_test!{BlockchainTests_Homestead_bcExploitTest, "BlockchainTests/Homestead/bcExploitTest"}
|
declare_test!{BlockchainTests_Homestead_bcExploitTest, "BlockchainTests/Homestead/bcExploitTest"}
|
||||||
|
@ -154,7 +154,7 @@ impl OverlayRecentV7 {
|
|||||||
// and commit the altered entries.
|
// and commit the altered entries.
|
||||||
fn migrate_journal(&self, source: Arc<Database>, mut batch: Batch, dest: &mut Database) -> Result<(), Error> {
|
fn migrate_journal(&self, source: Arc<Database>, mut batch: Batch, dest: &mut Database) -> Result<(), Error> {
|
||||||
if let Some(val) = try!(source.get(None, V7_LATEST_ERA_KEY).map_err(Error::Custom)) {
|
if let Some(val) = try!(source.get(None, V7_LATEST_ERA_KEY).map_err(Error::Custom)) {
|
||||||
try!(batch.insert(V7_LATEST_ERA_KEY.into(), val.to_owned(), dest));
|
try!(batch.insert(V7_LATEST_ERA_KEY.into(), val.clone().to_vec(), dest));
|
||||||
|
|
||||||
let mut era = decode::<u64>(&val);
|
let mut era = decode::<u64>(&val);
|
||||||
loop {
|
loop {
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
use account_db::{AccountDB, AccountDBMut};
|
use account_db::{AccountDB, AccountDBMut};
|
||||||
use snapshot::Error;
|
use snapshot::Error;
|
||||||
|
|
||||||
use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP};
|
use util::{U256, FixedHash, H256, Bytes, HashDB, DBValue, SHA3_EMPTY, SHA3_NULL_RLP};
|
||||||
use util::trie::{TrieDB, Trie};
|
use util::trie::{TrieDB, Trie};
|
||||||
use rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View};
|
use rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View};
|
||||||
|
|
||||||
@ -112,7 +112,7 @@ impl Account {
|
|||||||
let mut stream = RlpStream::new_list(pairs.len());
|
let mut stream = RlpStream::new_list(pairs.len());
|
||||||
|
|
||||||
for (k, v) in pairs {
|
for (k, v) in pairs {
|
||||||
stream.begin_list(2).append(&k).append(&v);
|
stream.begin_list(2).append(&k).append(&&*v);
|
||||||
}
|
}
|
||||||
|
|
||||||
let pairs_rlp = stream.out();
|
let pairs_rlp = stream.out();
|
||||||
@ -130,7 +130,7 @@ impl Account {
|
|||||||
match acct_db.get(&self.code_hash) {
|
match acct_db.get(&self.code_hash) {
|
||||||
Some(c) => {
|
Some(c) => {
|
||||||
used_code.insert(self.code_hash.clone());
|
used_code.insert(self.code_hash.clone());
|
||||||
account_stream.append(&CodeState::Inline.raw()).append(&c);
|
account_stream.append(&CodeState::Inline.raw()).append(&&*c);
|
||||||
}
|
}
|
||||||
None => {
|
None => {
|
||||||
warn!("code lookup failed during snapshot");
|
warn!("code lookup failed during snapshot");
|
||||||
@ -178,7 +178,7 @@ impl Account {
|
|||||||
CodeState::Hash => {
|
CodeState::Hash => {
|
||||||
let code_hash = try!(rlp.val_at(3));
|
let code_hash = try!(rlp.val_at(3));
|
||||||
if let Some(code) = code_map.get(&code_hash) {
|
if let Some(code) = code_map.get(&code_hash) {
|
||||||
acct_db.emplace(code_hash.clone(), code.clone());
|
acct_db.emplace(code_hash.clone(), DBValue::from_slice(&code));
|
||||||
}
|
}
|
||||||
|
|
||||||
(code_hash, None)
|
(code_hash, None)
|
||||||
@ -226,7 +226,7 @@ mod tests {
|
|||||||
use snapshot::tests::helpers::fill_storage;
|
use snapshot::tests::helpers::fill_storage;
|
||||||
|
|
||||||
use util::sha3::{SHA3_EMPTY, SHA3_NULL_RLP};
|
use util::sha3::{SHA3_EMPTY, SHA3_NULL_RLP};
|
||||||
use util::{Address, FixedHash, H256, HashDB};
|
use util::{Address, FixedHash, H256, HashDB, DBValue};
|
||||||
use rlp::{UntrustedRlp, View};
|
use rlp::{UntrustedRlp, View};
|
||||||
|
|
||||||
use std::collections::{HashSet, HashMap};
|
use std::collections::{HashSet, HashMap};
|
||||||
@ -292,7 +292,7 @@ mod tests {
|
|||||||
|
|
||||||
{
|
{
|
||||||
let mut acct_db = AccountDBMut::new(db.as_hashdb_mut(), &addr2);
|
let mut acct_db = AccountDBMut::new(db.as_hashdb_mut(), &addr2);
|
||||||
acct_db.emplace(code_hash.clone(), b"this is definitely code".to_vec());
|
acct_db.emplace(code_hash.clone(), DBValue::from_slice(b"this is definitely code"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let account1 = Account {
|
let account1 = Account {
|
||||||
|
@ -29,8 +29,7 @@ use engines::Engine;
|
|||||||
use ids::BlockID;
|
use ids::BlockID;
|
||||||
use views::BlockView;
|
use views::BlockView;
|
||||||
|
|
||||||
use util::{Bytes, Hashable, HashDB, snappy, U256, Uint};
|
use util::{Bytes, Hashable, HashDB, DBValue, snappy, U256, Uint};
|
||||||
use util::memorydb::MemoryDB;
|
|
||||||
use util::Mutex;
|
use util::Mutex;
|
||||||
use util::hash::{FixedHash, H256};
|
use util::hash::{FixedHash, H256};
|
||||||
use util::journaldb::{self, Algorithm, JournalDB};
|
use util::journaldb::{self, Algorithm, JournalDB};
|
||||||
@ -38,6 +37,7 @@ use util::kvdb::Database;
|
|||||||
use util::trie::{TrieDB, TrieDBMut, Trie, TrieMut};
|
use util::trie::{TrieDB, TrieDBMut, Trie, TrieMut};
|
||||||
use util::sha3::SHA3_NULL_RLP;
|
use util::sha3::SHA3_NULL_RLP;
|
||||||
use rlp::{RlpStream, Stream, UntrustedRlp, View};
|
use rlp::{RlpStream, Stream, UntrustedRlp, View};
|
||||||
|
use bloom_journal::Bloom;
|
||||||
|
|
||||||
use self::account::Account;
|
use self::account::Account;
|
||||||
use self::block::AbridgedBlock;
|
use self::block::AbridgedBlock;
|
||||||
@ -46,7 +46,7 @@ use self::io::SnapshotWriter;
|
|||||||
use super::state_db::StateDB;
|
use super::state_db::StateDB;
|
||||||
use super::state::Account as StateAccount;
|
use super::state::Account as StateAccount;
|
||||||
|
|
||||||
use crossbeam::{scope, ScopedJoinHandle};
|
use crossbeam::scope;
|
||||||
use rand::{Rng, OsRng};
|
use rand::{Rng, OsRng};
|
||||||
|
|
||||||
pub use self::error::Error;
|
pub use self::error::Error;
|
||||||
@ -368,7 +368,7 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
|
|||||||
// account_key here is the address' hash.
|
// account_key here is the address' hash.
|
||||||
for item in try!(account_trie.iter()) {
|
for item in try!(account_trie.iter()) {
|
||||||
let (account_key, account_data) = try!(item);
|
let (account_key, account_data) = try!(item);
|
||||||
let account = Account::from_thin_rlp(account_data);
|
let account = Account::from_thin_rlp(&*account_data);
|
||||||
let account_key_hash = H256::from_slice(&account_key);
|
let account_key_hash = H256::from_slice(&account_key);
|
||||||
|
|
||||||
let account_db = AccountDB::from_hash(db, account_key_hash);
|
let account_db = AccountDB::from_hash(db, account_key_hash);
|
||||||
@ -390,6 +390,7 @@ pub struct StateRebuilder {
|
|||||||
state_root: H256,
|
state_root: H256,
|
||||||
code_map: HashMap<H256, Bytes>, // maps code hashes to code itself.
|
code_map: HashMap<H256, Bytes>, // maps code hashes to code itself.
|
||||||
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
|
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
|
||||||
|
bloom: Bloom,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StateRebuilder {
|
impl StateRebuilder {
|
||||||
@ -400,6 +401,7 @@ impl StateRebuilder {
|
|||||||
state_root: SHA3_NULL_RLP,
|
state_root: SHA3_NULL_RLP,
|
||||||
code_map: HashMap::new(),
|
code_map: HashMap::new(),
|
||||||
missing_code: HashMap::new(),
|
missing_code: HashMap::new(),
|
||||||
|
bloom: StateDB::load_bloom(&*db),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -418,43 +420,19 @@ impl StateRebuilder {
|
|||||||
// new code contained within this chunk.
|
// new code contained within this chunk.
|
||||||
let mut chunk_code = HashMap::new();
|
let mut chunk_code = HashMap::new();
|
||||||
|
|
||||||
// build account tries in parallel.
|
|
||||||
// Todo [rob] keep a thread pool around so we don't do this per-chunk.
|
|
||||||
try!(scope(|scope| {
|
|
||||||
let mut handles = Vec::new();
|
|
||||||
for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) {
|
for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) {
|
||||||
let code_map = &self.code_map;
|
let code_map = &self.code_map;
|
||||||
let handle: ScopedJoinHandle<Result<_, ::error::Error>> = scope.spawn(move || {
|
let status = try!(rebuild_accounts(self.db.as_hashdb_mut(), account_chunk, out_pairs_chunk, code_map));
|
||||||
let mut db = MemoryDB::new();
|
|
||||||
let status = try!(rebuild_accounts(&mut db, account_chunk, out_pairs_chunk, code_map));
|
|
||||||
|
|
||||||
trace!(target: "snapshot", "thread rebuilt {} account tries", account_chunk.len());
|
|
||||||
Ok((db, status))
|
|
||||||
});
|
|
||||||
|
|
||||||
handles.push(handle);
|
|
||||||
}
|
|
||||||
|
|
||||||
// consolidate all edits into the main overlay.
|
|
||||||
for handle in handles {
|
|
||||||
let (thread_db, status): (MemoryDB, _) = try!(handle.join());
|
|
||||||
self.db.consolidate(thread_db);
|
|
||||||
|
|
||||||
chunk_code.extend(status.new_code);
|
chunk_code.extend(status.new_code);
|
||||||
|
|
||||||
for (addr_hash, code_hash) in status.missing_code {
|
for (addr_hash, code_hash) in status.missing_code {
|
||||||
self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
|
self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok::<_, ::error::Error>(())
|
|
||||||
}));
|
|
||||||
|
|
||||||
// patch up all missing code. must be done after collecting all new missing code entries.
|
// patch up all missing code. must be done after collecting all new missing code entries.
|
||||||
for (code_hash, code) in chunk_code {
|
for (code_hash, code) in chunk_code {
|
||||||
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
|
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
|
||||||
let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash);
|
let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash);
|
||||||
db.emplace(code_hash, code.clone());
|
db.emplace(code_hash, DBValue::from_slice(&code));
|
||||||
}
|
}
|
||||||
|
|
||||||
self.code_map.insert(code_hash, code);
|
self.code_map.insert(code_hash, code);
|
||||||
@ -462,9 +440,6 @@ impl StateRebuilder {
|
|||||||
|
|
||||||
let backing = self.db.backing().clone();
|
let backing = self.db.backing().clone();
|
||||||
|
|
||||||
// bloom has to be updated
|
|
||||||
let mut bloom = StateDB::load_bloom(&backing);
|
|
||||||
|
|
||||||
// batch trie writes
|
// batch trie writes
|
||||||
{
|
{
|
||||||
let mut account_trie = if self.state_root != SHA3_NULL_RLP {
|
let mut account_trie = if self.state_root != SHA3_NULL_RLP {
|
||||||
@ -475,17 +450,17 @@ impl StateRebuilder {
|
|||||||
|
|
||||||
for (hash, thin_rlp) in pairs {
|
for (hash, thin_rlp) in pairs {
|
||||||
if &thin_rlp[..] != &empty_rlp[..] {
|
if &thin_rlp[..] != &empty_rlp[..] {
|
||||||
bloom.set(&*hash);
|
self.bloom.set(&*hash);
|
||||||
}
|
}
|
||||||
try!(account_trie.insert(&hash, &thin_rlp));
|
try!(account_trie.insert(&hash, &thin_rlp));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let bloom_journal = bloom.drain_journal();
|
let bloom_journal = self.bloom.drain_journal();
|
||||||
let mut batch = backing.transaction();
|
let mut batch = backing.transaction();
|
||||||
try!(StateDB::commit_bloom(&mut batch, bloom_journal));
|
try!(StateDB::commit_bloom(&mut batch, bloom_journal));
|
||||||
try!(self.db.inject(&mut batch));
|
try!(self.db.inject(&mut batch));
|
||||||
try!(backing.write(batch).map_err(::util::UtilError::SimpleString));
|
backing.write_buffered(batch);
|
||||||
trace!(target: "snapshot", "current state root: {:?}", self.state_root);
|
trace!(target: "snapshot", "current state root: {:?}", self.state_root);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -628,7 +603,7 @@ impl BlockRebuilder {
|
|||||||
} else {
|
} else {
|
||||||
self.chain.insert_unordered_block(&mut batch, &block_bytes, receipts, None, is_best, false);
|
self.chain.insert_unordered_block(&mut batch, &block_bytes, receipts, None, is_best, false);
|
||||||
}
|
}
|
||||||
self.db.write(batch).expect("Error writing to the DB");
|
self.db.write_buffered(batch);
|
||||||
self.chain.commit();
|
self.chain.commit();
|
||||||
|
|
||||||
parent_hash = BlockView::new(&block_bytes).hash();
|
parent_hash = BlockView::new(&block_bytes).hash();
|
||||||
|
@ -74,6 +74,7 @@ struct Restoration {
|
|||||||
snappy_buffer: Bytes,
|
snappy_buffer: Bytes,
|
||||||
final_state_root: H256,
|
final_state_root: H256,
|
||||||
guard: Guard,
|
guard: Guard,
|
||||||
|
db: Arc<Database>,
|
||||||
}
|
}
|
||||||
|
|
||||||
struct RestorationParams<'a> {
|
struct RestorationParams<'a> {
|
||||||
@ -105,12 +106,13 @@ impl Restoration {
|
|||||||
manifest: manifest,
|
manifest: manifest,
|
||||||
state_chunks_left: state_chunks,
|
state_chunks_left: state_chunks,
|
||||||
block_chunks_left: block_chunks,
|
block_chunks_left: block_chunks,
|
||||||
state: StateRebuilder::new(raw_db, params.pruning),
|
state: StateRebuilder::new(raw_db.clone(), params.pruning),
|
||||||
blocks: blocks,
|
blocks: blocks,
|
||||||
writer: params.writer,
|
writer: params.writer,
|
||||||
snappy_buffer: Vec::new(),
|
snappy_buffer: Vec::new(),
|
||||||
final_state_root: root,
|
final_state_root: root,
|
||||||
guard: params.guard,
|
guard: params.guard,
|
||||||
|
db: raw_db,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -467,24 +469,25 @@ impl Service {
|
|||||||
/// Feed a chunk of either kind. no-op if no restoration or status is wrong.
|
/// Feed a chunk of either kind. no-op if no restoration or status is wrong.
|
||||||
fn feed_chunk(&self, hash: H256, chunk: &[u8], is_state: bool) -> Result<(), Error> {
|
fn feed_chunk(&self, hash: H256, chunk: &[u8], is_state: bool) -> Result<(), Error> {
|
||||||
// TODO: be able to process block chunks and state chunks at same time?
|
// TODO: be able to process block chunks and state chunks at same time?
|
||||||
|
let (result, db) = {
|
||||||
let mut restoration = self.restoration.lock();
|
let mut restoration = self.restoration.lock();
|
||||||
|
|
||||||
match self.status() {
|
match self.status() {
|
||||||
RestorationStatus::Inactive | RestorationStatus::Failed => Ok(()),
|
RestorationStatus::Inactive | RestorationStatus::Failed => return Ok(()),
|
||||||
RestorationStatus::Ongoing { .. } => {
|
RestorationStatus::Ongoing { .. } => {
|
||||||
let res = {
|
let (res, db) = {
|
||||||
let rest = match *restoration {
|
let rest = match *restoration {
|
||||||
Some(ref mut r) => r,
|
Some(ref mut r) => r,
|
||||||
None => return Ok(()),
|
None => return Ok(()),
|
||||||
};
|
};
|
||||||
|
|
||||||
match is_state {
|
(match is_state {
|
||||||
true => rest.feed_state(hash, chunk),
|
true => rest.feed_state(hash, chunk),
|
||||||
false => rest.feed_blocks(hash, chunk, &*self.engine),
|
false => rest.feed_blocks(hash, chunk, &*self.engine),
|
||||||
}.map(|_| rest.is_done())
|
}.map(|_| rest.is_done()), rest.db.clone())
|
||||||
};
|
};
|
||||||
|
|
||||||
match res {
|
let res = match res {
|
||||||
Ok(is_done) => {
|
Ok(is_done) => {
|
||||||
match is_state {
|
match is_state {
|
||||||
true => self.state_chunks.fetch_add(1, Ordering::SeqCst),
|
true => self.state_chunks.fetch_add(1, Ordering::SeqCst),
|
||||||
@ -492,14 +495,21 @@ impl Service {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match is_done {
|
match is_done {
|
||||||
true => self.finalize_restoration(&mut *restoration),
|
true => {
|
||||||
|
try!(db.flush().map_err(::util::UtilError::SimpleString));
|
||||||
|
drop(db);
|
||||||
|
return self.finalize_restoration(&mut *restoration);
|
||||||
|
},
|
||||||
false => Ok(())
|
false => Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
other => other.map(drop),
|
other => other.map(drop),
|
||||||
|
};
|
||||||
|
(res, db)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
result.and_then(|_| db.flush().map_err(|e| ::util::UtilError::SimpleString(e).into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Feed a state chunk to be processed synchronously.
|
/// Feed a state chunk to be processed synchronously.
|
||||||
@ -549,8 +559,9 @@ impl SnapshotService for Service {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn begin_restore(&self, manifest: ManifestData) {
|
fn begin_restore(&self, manifest: ManifestData) {
|
||||||
self.io_channel.send(ClientIoMessage::BeginRestoration(manifest))
|
if let Err(e) = self.io_channel.send(ClientIoMessage::BeginRestoration(manifest)) {
|
||||||
.expect("snapshot service and io service are kept alive by client service; qed");
|
trace!("Error sending snapshot service message: {:?}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn abort_restore(&self) {
|
fn abort_restore(&self) {
|
||||||
@ -559,13 +570,15 @@ impl SnapshotService for Service {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn restore_state_chunk(&self, hash: H256, chunk: Bytes) {
|
fn restore_state_chunk(&self, hash: H256, chunk: Bytes) {
|
||||||
self.io_channel.send(ClientIoMessage::FeedStateChunk(hash, chunk))
|
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedStateChunk(hash, chunk)) {
|
||||||
.expect("snapshot service and io service are kept alive by client service; qed");
|
trace!("Error sending snapshot service message: {:?}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn restore_block_chunk(&self, hash: H256, chunk: Bytes) {
|
fn restore_block_chunk(&self, hash: H256, chunk: Bytes) {
|
||||||
self.io_channel.send(ClientIoMessage::FeedBlockChunk(hash, chunk))
|
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedBlockChunk(hash, chunk)) {
|
||||||
.expect("snapshot service and io service are kept alive by client service; qed");
|
trace!("Error sending snapshot service message: {:?}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ use account_db::AccountDBMut;
|
|||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use snapshot::account::Account;
|
use snapshot::account::Account;
|
||||||
|
|
||||||
|
use util::DBValue;
|
||||||
use util::hash::{FixedHash, H256};
|
use util::hash::{FixedHash, H256};
|
||||||
use util::hashdb::HashDB;
|
use util::hashdb::HashDB;
|
||||||
use util::trie::{Alphabet, StandardMap, SecTrieDBMut, TrieMut, ValueMode};
|
use util::trie::{Alphabet, StandardMap, SecTrieDBMut, TrieMut, ValueMode};
|
||||||
@ -66,7 +67,7 @@ impl StateProducer {
|
|||||||
let mut account = Account::from_thin_rlp(&*account_data);
|
let mut account = Account::from_thin_rlp(&*account_data);
|
||||||
let acct_db = AccountDBMut::from_hash(db, *address_hash);
|
let acct_db = AccountDBMut::from_hash(db, *address_hash);
|
||||||
fill_storage(acct_db, account.storage_root_mut(), &mut self.storage_seed);
|
fill_storage(acct_db, account.storage_root_mut(), &mut self.storage_seed);
|
||||||
*account_data = account.to_thin_rlp();
|
*account_data = DBValue::from_vec(account.to_thin_rlp());
|
||||||
}
|
}
|
||||||
|
|
||||||
// sweep again to alter account trie.
|
// sweep again to alter account trie.
|
||||||
|
@ -172,7 +172,7 @@ impl Account {
|
|||||||
using it will not fail.");
|
using it will not fail.");
|
||||||
|
|
||||||
let item: U256 = match db.get(key){
|
let item: U256 = match db.get(key){
|
||||||
Ok(x) => x.map_or_else(U256::zero, decode),
|
Ok(x) => x.map_or_else(U256::zero, |v| decode(&*v)),
|
||||||
Err(e) => panic!("Encountered potential DB corruption: {}", e),
|
Err(e) => panic!("Encountered potential DB corruption: {}", e),
|
||||||
};
|
};
|
||||||
let value: H256 = item.into();
|
let value: H256 = item.into();
|
||||||
@ -253,8 +253,8 @@ impl Account {
|
|||||||
self.is_cached() ||
|
self.is_cached() ||
|
||||||
match db.get(&self.code_hash) {
|
match db.get(&self.code_hash) {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
self.code_cache = Arc::new(x.to_vec());
|
|
||||||
self.code_size = Some(x.len());
|
self.code_size = Some(x.len());
|
||||||
|
self.code_cache = Arc::new(x.to_vec());
|
||||||
true
|
true
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
@ -351,7 +351,7 @@ impl Account {
|
|||||||
self.code_filth = Filth::Clean;
|
self.code_filth = Filth::Clean;
|
||||||
},
|
},
|
||||||
(true, false) => {
|
(true, false) => {
|
||||||
db.emplace(self.code_hash.clone(), (*self.code_cache).clone());
|
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache));
|
||||||
self.code_size = Some(self.code_cache.len());
|
self.code_size = Some(self.code_cache.len());
|
||||||
self.code_filth = Filth::Clean;
|
self.code_filth = Filth::Clean;
|
||||||
},
|
},
|
||||||
|
@ -408,7 +408,7 @@ impl State {
|
|||||||
// account is not found in the global cache, get from the DB and insert into local
|
// account is not found in the global cache, get from the DB and insert into local
|
||||||
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
||||||
let maybe_acc = match db.get(address) {
|
let maybe_acc = match db.get(address) {
|
||||||
Ok(acc) => acc.map(Account::from_rlp),
|
Ok(acc) => acc.map(|v| Account::from_rlp(&v)),
|
||||||
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
||||||
};
|
};
|
||||||
let r = maybe_acc.as_ref().map_or(H256::new(), |a| {
|
let r = maybe_acc.as_ref().map_or(H256::new(), |a| {
|
||||||
@ -648,7 +648,7 @@ impl State {
|
|||||||
// not found in the global cache, get from the DB and insert into local
|
// not found in the global cache, get from the DB and insert into local
|
||||||
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
||||||
let mut maybe_acc = match db.get(a) {
|
let mut maybe_acc = match db.get(a) {
|
||||||
Ok(acc) => acc.map(Account::from_rlp),
|
Ok(acc) => acc.map(|v| Account::from_rlp(&v)),
|
||||||
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
||||||
};
|
};
|
||||||
if let Some(ref mut account) = maybe_acc.as_mut() {
|
if let Some(ref mut account) = maybe_acc.as_mut() {
|
||||||
@ -680,7 +680,7 @@ impl State {
|
|||||||
let maybe_acc = if self.db.check_account_bloom(a) {
|
let maybe_acc = if self.db.check_account_bloom(a) {
|
||||||
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
let db = self.factories.trie.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
||||||
let maybe_acc = match db.get(a) {
|
let maybe_acc = match db.get(a) {
|
||||||
Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(acc))),
|
Ok(Some(acc)) => AccountEntry::new_clean(Some(Account::from_rlp(&acc))),
|
||||||
Ok(None) => AccountEntry::new_clean(None),
|
Ok(None) => AccountEntry::new_clean(None),
|
||||||
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
||||||
};
|
};
|
||||||
|
@ -166,7 +166,9 @@ pub mod ecies {
|
|||||||
|
|
||||||
/// Encrypt a message with a public key
|
/// Encrypt a message with a public key
|
||||||
pub fn encrypt(public: &Public, shared_mac: &[u8], plain: &[u8]) -> Result<Vec<u8>, Error> {
|
pub fn encrypt(public: &Public, shared_mac: &[u8], plain: &[u8]) -> Result<Vec<u8>, Error> {
|
||||||
let r = Random.generate().unwrap();
|
let r = Random.generate()
|
||||||
|
.expect("context known to have key-generation capabilities; qed");
|
||||||
|
|
||||||
let z = try!(ecdh::agree(r.secret(), public));
|
let z = try!(ecdh::agree(r.secret(), public));
|
||||||
let mut key = [0u8; 32];
|
let mut key = [0u8; 32];
|
||||||
let mut mkey = [0u8; 32];
|
let mut mkey = [0u8; 32];
|
||||||
@ -201,7 +203,9 @@ pub mod ecies {
|
|||||||
|
|
||||||
/// Encrypt a message with a public key
|
/// Encrypt a message with a public key
|
||||||
pub fn encrypt_single_message(public: &Public, plain: &[u8]) -> Result<Vec<u8>, Error> {
|
pub fn encrypt_single_message(public: &Public, plain: &[u8]) -> Result<Vec<u8>, Error> {
|
||||||
let r = Random.generate().unwrap();
|
let r = Random.generate()
|
||||||
|
.expect("context known to have key-generation capabilities");
|
||||||
|
|
||||||
let z = try!(ecdh::agree(r.secret(), public));
|
let z = try!(ecdh::agree(r.secret(), public));
|
||||||
let mut key = [0u8; 32];
|
let mut key = [0u8; 32];
|
||||||
let mut mkey = [0u8; 32];
|
let mut mkey = [0u8; 32];
|
||||||
|
@ -18,6 +18,7 @@ docopt = { version = "0.6", optional = true }
|
|||||||
time = "0.1.34"
|
time = "0.1.34"
|
||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
itertools = "0.4"
|
itertools = "0.4"
|
||||||
|
parking_lot = "0.3"
|
||||||
ethcrypto = { path = "../ethcrypto" }
|
ethcrypto = { path = "../ethcrypto" }
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
|
@ -28,7 +28,9 @@ const IGNORED_FILES: &'static [&'static str] = &["thumbs.db", "address_book.json
|
|||||||
fn restrict_permissions_to_owner(file_path: &Path) -> Result<(), i32> {
|
fn restrict_permissions_to_owner(file_path: &Path) -> Result<(), i32> {
|
||||||
use std::ffi;
|
use std::ffi;
|
||||||
use libc;
|
use libc;
|
||||||
let cstr = ffi::CString::new(file_path.to_str().unwrap()).unwrap();
|
|
||||||
|
let cstr = try!(ffi::CString::new(&*file_path.to_string_lossy())
|
||||||
|
.map_err(|_| -1));
|
||||||
match unsafe { libc::chmod(cstr.as_ptr(), libc::S_IWUSR | libc::S_IRUSR) } {
|
match unsafe { libc::chmod(cstr.as_ptr(), libc::S_IWUSR | libc::S_IRUSR) } {
|
||||||
0 => Ok(()),
|
0 => Ok(()),
|
||||||
x => Err(x),
|
x => Err(x),
|
||||||
@ -63,15 +65,15 @@ impl DiskDirectory {
|
|||||||
let paths = try!(fs::read_dir(&self.path))
|
let paths = try!(fs::read_dir(&self.path))
|
||||||
.flat_map(Result::ok)
|
.flat_map(Result::ok)
|
||||||
.filter(|entry| {
|
.filter(|entry| {
|
||||||
let metadata = entry.metadata();
|
let metadata = entry.metadata().ok();
|
||||||
let file_name = entry.file_name();
|
let file_name = entry.file_name();
|
||||||
let name = file_name.to_str().unwrap();
|
let name = file_name.to_string_lossy();
|
||||||
// filter directories
|
// filter directories
|
||||||
metadata.is_ok() && !metadata.unwrap().is_dir() &&
|
metadata.map_or(false, |m| !m.is_dir()) &&
|
||||||
// hidden files
|
// hidden files
|
||||||
!name.starts_with(".") &&
|
!name.starts_with(".") &&
|
||||||
// other ignored files
|
// other ignored files
|
||||||
!IGNORED_FILES.contains(&name)
|
!IGNORED_FILES.contains(&&*name)
|
||||||
})
|
})
|
||||||
.map(|entry| entry.path())
|
.map(|entry| entry.path())
|
||||||
.collect::<Vec<PathBuf>>();
|
.collect::<Vec<PathBuf>>();
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
use std::sync::RwLock;
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use ethkey::KeyPair;
|
use ethkey::KeyPair;
|
||||||
use crypto::KEY_ITERATIONS;
|
use crypto::KEY_ITERATIONS;
|
||||||
@ -26,6 +25,7 @@ use account::SafeAccount;
|
|||||||
use {Error, SecretStore};
|
use {Error, SecretStore};
|
||||||
use json;
|
use json;
|
||||||
use json::UUID;
|
use json::UUID;
|
||||||
|
use parking_lot::RwLock;
|
||||||
use presale::PresaleWallet;
|
use presale::PresaleWallet;
|
||||||
use import;
|
use import;
|
||||||
|
|
||||||
@ -56,13 +56,13 @@ impl EthStore {
|
|||||||
let account = try!(self.dir.insert(account.clone()));
|
let account = try!(self.dir.insert(account.clone()));
|
||||||
|
|
||||||
// update cache
|
// update cache
|
||||||
let mut cache = self.cache.write().unwrap();
|
let mut cache = self.cache.write();
|
||||||
cache.insert(account.address.clone(), account);
|
cache.insert(account.address.clone(), account);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn reload_accounts(&self) -> Result<(), Error> {
|
fn reload_accounts(&self) -> Result<(), Error> {
|
||||||
let mut cache = self.cache.write().unwrap();
|
let mut cache = self.cache.write();
|
||||||
let accounts = try!(self.dir.load());
|
let accounts = try!(self.dir.load());
|
||||||
let new_accounts: BTreeMap<_, _> = accounts.into_iter().map(|account| (account.address.clone(), account)).collect();
|
let new_accounts: BTreeMap<_, _> = accounts.into_iter().map(|account| (account.address.clone(), account)).collect();
|
||||||
mem::replace(&mut *cache, new_accounts);
|
mem::replace(&mut *cache, new_accounts);
|
||||||
@ -71,13 +71,13 @@ impl EthStore {
|
|||||||
|
|
||||||
fn get(&self, address: &Address) -> Result<SafeAccount, Error> {
|
fn get(&self, address: &Address) -> Result<SafeAccount, Error> {
|
||||||
{
|
{
|
||||||
let cache = self.cache.read().unwrap();
|
let cache = self.cache.read();
|
||||||
if let Some(account) = cache.get(address) {
|
if let Some(account) = cache.get(address) {
|
||||||
return Ok(account.clone())
|
return Ok(account.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
try!(self.reload_accounts());
|
try!(self.reload_accounts());
|
||||||
let cache = self.cache.read().unwrap();
|
let cache = self.cache.read();
|
||||||
cache.get(address).cloned().ok_or(Error::InvalidAccount)
|
cache.get(address).cloned().ok_or(Error::InvalidAccount)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -111,7 +111,7 @@ impl SecretStore for EthStore {
|
|||||||
|
|
||||||
fn accounts(&self) -> Result<Vec<Address>, Error> {
|
fn accounts(&self) -> Result<Vec<Address>, Error> {
|
||||||
try!(self.reload_accounts());
|
try!(self.reload_accounts());
|
||||||
Ok(self.cache.read().unwrap().keys().cloned().collect())
|
Ok(self.cache.read().keys().cloned().collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn change_password(&self, address: &Address, old_password: &str, new_password: &str) -> Result<(), Error> {
|
fn change_password(&self, address: &Address, old_password: &str, new_password: &str) -> Result<(), Error> {
|
||||||
@ -131,7 +131,7 @@ impl SecretStore for EthStore {
|
|||||||
|
|
||||||
if can_remove {
|
if can_remove {
|
||||||
try!(self.dir.remove(address));
|
try!(self.dir.remove(address));
|
||||||
let mut cache = self.cache.write().unwrap();
|
let mut cache = self.cache.write();
|
||||||
cache.remove(address);
|
cache.remove(address);
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
|
@ -26,12 +26,15 @@ extern crate serde_json;
|
|||||||
extern crate rustc_serialize;
|
extern crate rustc_serialize;
|
||||||
extern crate crypto as rcrypto;
|
extern crate crypto as rcrypto;
|
||||||
extern crate tiny_keccak;
|
extern crate tiny_keccak;
|
||||||
#[macro_use]
|
extern crate parking_lot;
|
||||||
extern crate lazy_static;
|
|
||||||
// reexport it nicely
|
// reexport it nicely
|
||||||
extern crate ethkey as _ethkey;
|
extern crate ethkey as _ethkey;
|
||||||
extern crate ethcrypto as crypto;
|
extern crate ethcrypto as crypto;
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate lazy_static;
|
||||||
|
|
||||||
pub mod dir;
|
pub mod dir;
|
||||||
pub mod ethkey;
|
pub mod ethkey;
|
||||||
|
|
||||||
|
@ -33,7 +33,8 @@ impl From<json::PresaleWallet> for PresaleWallet {
|
|||||||
impl PresaleWallet {
|
impl PresaleWallet {
|
||||||
pub fn open<P>(path: P) -> Result<Self, Error> where P: AsRef<Path> {
|
pub fn open<P>(path: P) -> Result<Self, Error> where P: AsRef<Path> {
|
||||||
let file = try!(fs::File::open(path));
|
let file = try!(fs::File::open(path));
|
||||||
let presale = json::PresaleWallet::load(file).unwrap();
|
let presale = try!(json::PresaleWallet::load(file)
|
||||||
|
.map_err(|e| Error::InvalidKeyFile(format!("{}", e))));
|
||||||
Ok(PresaleWallet::from(presale))
|
Ok(PresaleWallet::from(presale))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
1
js/.gitignore
vendored
1
js/.gitignore
vendored
@ -3,4 +3,5 @@ npm-debug.log
|
|||||||
build
|
build
|
||||||
.build
|
.build
|
||||||
.coverage
|
.coverage
|
||||||
|
.dist
|
||||||
.happypack
|
.happypack
|
||||||
|
@ -24,17 +24,38 @@ var express = require('express');
|
|||||||
var proxy = require('http-proxy-middleware');
|
var proxy = require('http-proxy-middleware');
|
||||||
|
|
||||||
var app = express();
|
var app = express();
|
||||||
|
var wsProxy = proxy('ws://127.0.0.1:8180', { changeOrigin: true });
|
||||||
|
|
||||||
app.use(express.static('build'));
|
app.use(express.static('.build'));
|
||||||
|
|
||||||
app.use('/api/*', proxy({
|
app.use('/api/*', proxy({
|
||||||
target: 'http://127.0.0.1:8080',
|
target: 'http://127.0.0.1:8080',
|
||||||
changeOrigin: true
|
changeOrigin: true
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
app.use('/app/*', proxy({
|
||||||
|
target: 'http://127.0.0.1:8080',
|
||||||
|
changeOrigin: true,
|
||||||
|
pathRewrite: {
|
||||||
|
'^/app': ''
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
app.use('/parity-utils/*', proxy({
|
||||||
|
target: 'http://127.0.0.1:3000',
|
||||||
|
changeOrigin: true,
|
||||||
|
pathRewrite: {
|
||||||
|
'^/parity-utils': ''
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
app.use('/rpc/*', proxy({
|
app.use('/rpc/*', proxy({
|
||||||
target: 'http://127.0.0.1:8080',
|
target: 'http://127.0.0.1:8080',
|
||||||
changeOrigin: true
|
changeOrigin: true
|
||||||
}));
|
}));
|
||||||
|
|
||||||
app.listen(3000);
|
app.use(wsProxy);
|
||||||
|
|
||||||
|
var server = app.listen(3000);
|
||||||
|
|
||||||
|
server.on('upgrade', wsProxy.upgrade);
|
||||||
|
@ -23,17 +23,22 @@
|
|||||||
"Promise"
|
"Promise"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "npm run build:dll && npm run build:app",
|
"build": "npm run build:dll && npm run build:app && npm run build:lib",
|
||||||
"build:app": "webpack --progress",
|
"build:app": "webpack --progress",
|
||||||
"build:dll": "webpack --config webpack.vendor.js --progress",
|
"build:lib": "webpack --config webpack.libraries --progress",
|
||||||
"ci:build": "npm run ci:build:dll && npm run ci:build:app",
|
"build:dll": "webpack --config webpack.vendor --progress",
|
||||||
|
|
||||||
|
"ci:build": "npm run ci:build:dll && npm run ci:build:app && npm run ci:build:lib",
|
||||||
"ci:build:app": "NODE_ENV=production webpack",
|
"ci:build:app": "NODE_ENV=production webpack",
|
||||||
"ci:build:dll": "NODE_ENV=production webpack --config webpack.vendor.js",
|
"ci:build:lib": "NODE_ENV=production webpack --config webpack.libraries",
|
||||||
|
"ci:build:dll": "NODE_ENV=production webpack --config webpack.vendor",
|
||||||
|
|
||||||
|
"start": "npm install && npm run build:dll && npm run start:app",
|
||||||
|
"start:app": "webpack-dev-server -d --history-api-fallback --open --hot --inline --progress --colors --port 3000",
|
||||||
|
|
||||||
"clean": "rm -rf ./build ./coverage",
|
"clean": "rm -rf ./build ./coverage",
|
||||||
"coveralls": "npm run testCoverage && coveralls < coverage/lcov.info",
|
"coveralls": "npm run testCoverage && coveralls < coverage/lcov.info",
|
||||||
"lint": "eslint --ignore-path .gitignore ./src/",
|
"lint": "eslint --ignore-path .gitignore ./src/",
|
||||||
"start": "npm install && npm run build:dll && npm run start:app",
|
|
||||||
"start:app": "webpack-dev-server -d --history-api-fallback --open --hot --inline --progress --colors --port 3000",
|
|
||||||
"test": "mocha 'src/**/*.spec.js'",
|
"test": "mocha 'src/**/*.spec.js'",
|
||||||
"test:coverage": "istanbul cover _mocha -- 'src/**/*.spec.js'",
|
"test:coverage": "istanbul cover _mocha -- 'src/**/*.spec.js'",
|
||||||
"test:e2e": "mocha 'src/**/*.e2e.js'"
|
"test:e2e": "mocha 'src/**/*.e2e.js'"
|
||||||
|
@ -6,8 +6,15 @@ cd ..
|
|||||||
|
|
||||||
# run build (production) and store the exit code
|
# run build (production) and store the exit code
|
||||||
EXITCODE=0
|
EXITCODE=0
|
||||||
rm -rf .build
|
BUILDDIR=./.dist
|
||||||
npm run ci:build || EXITCODE=1
|
rm -rf $BUILDDIR
|
||||||
|
mkdir -p $BUILDDIR/src
|
||||||
|
BUILD_DEST=$BUILDDIR/build npm run ci:build || EXITCODE=1
|
||||||
|
|
||||||
|
# Copy rust files
|
||||||
|
cp Cargo.precompiled.toml $BUILDDIR/Cargo.toml
|
||||||
|
cp build.rs $BUILDDIR
|
||||||
|
cp src/lib.rs* $BUILDDIR/src
|
||||||
|
|
||||||
# back to root
|
# back to root
|
||||||
popd
|
popd
|
||||||
|
@ -13,21 +13,11 @@ function setup_git_user {
|
|||||||
BASEDIR=`dirname $0`
|
BASEDIR=`dirname $0`
|
||||||
GITLOG=./.git/gitcommand.log
|
GITLOG=./.git/gitcommand.log
|
||||||
pushd $BASEDIR
|
pushd $BASEDIR
|
||||||
cd ../.build
|
cd ../.dist
|
||||||
|
|
||||||
# variables
|
# variables
|
||||||
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
|
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
|
||||||
|
|
||||||
# Create proper directory structure
|
|
||||||
mkdir -p build
|
|
||||||
mv *.* build
|
|
||||||
mkdir -p src
|
|
||||||
|
|
||||||
# Copy rust files
|
|
||||||
cp ../Cargo.precompiled.toml Cargo.toml
|
|
||||||
cp ../build.rs .
|
|
||||||
cp ../src/lib.rs* ./src/
|
|
||||||
|
|
||||||
# init git
|
# init git
|
||||||
rm -rf ./.git
|
rm -rf ./.git
|
||||||
git init
|
git init
|
||||||
@ -40,21 +30,23 @@ git checkout -b $CI_BUILD_REF_NAME
|
|||||||
git add .
|
git add .
|
||||||
git commit -m "$UTCDATE [compiled]"
|
git commit -m "$UTCDATE [compiled]"
|
||||||
git merge origin/$CI_BUILD_REF_NAME -X ours --commit -m "$UTCDATE [release]"
|
git merge origin/$CI_BUILD_REF_NAME -X ours --commit -m "$UTCDATE [release]"
|
||||||
git push origin $CI_BUILD_REF_NAME 2>$GITLOG
|
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG
|
||||||
|
|
||||||
# back to root
|
# back to root
|
||||||
popd
|
popd
|
||||||
|
|
||||||
# bump js-precompiled
|
# inti git with right origin
|
||||||
cargo update -p parity-ui-precompiled
|
|
||||||
|
|
||||||
# add to git and push
|
|
||||||
setup_git_user
|
setup_git_user
|
||||||
git remote set-url origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git
|
git remote set-url origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git
|
||||||
git fetch origin 2>$GITLOG
|
|
||||||
|
# at this point we have a detached head on GitLab, reset
|
||||||
|
git reset --hard origin/$CI_BUILD_REF_NAME 2>$GITLOG
|
||||||
|
|
||||||
|
# bump js-precompiled, add, commit & push
|
||||||
|
cargo update -p parity-ui-precompiled
|
||||||
git add . || true
|
git add . || true
|
||||||
git commit -m "[ci skip] js-precompiled $UTCDATE" || true
|
git commit -m "[ci skip] js-precompiled $UTCDATE"
|
||||||
git push origin $CI_BUILD_REF_NAME 2>$GITLOG || true
|
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG
|
||||||
|
|
||||||
# exit with exit code
|
# exit with exit code
|
||||||
exit 0
|
exit 0
|
||||||
|
@ -173,12 +173,12 @@ describe('api/contract/Contract', () => {
|
|||||||
expect(log.event).to.equal('Message');
|
expect(log.event).to.equal('Message');
|
||||||
expect(log.address).to.equal('0x22bff18ec62281850546a664bb63a5c06ac5f76c');
|
expect(log.address).to.equal('0x22bff18ec62281850546a664bb63a5c06ac5f76c');
|
||||||
expect(log.params).to.deep.equal({
|
expect(log.params).to.deep.equal({
|
||||||
at: new BigNumber('1457965151'),
|
at: { type: 'uint', value: new BigNumber('1457965151') },
|
||||||
message: 'post(message)',
|
message: { type: 'string', value: 'post(message)' },
|
||||||
messageId: new BigNumber('281474976731085'),
|
messageId: { type: 'uint', value: new BigNumber('281474976731085') },
|
||||||
parentId: new BigNumber(0),
|
parentId: { type: 'uint', value: new BigNumber(0) },
|
||||||
postId: new BigNumber('281474976731104'),
|
postId: { type: 'uint', value: new BigNumber('281474976731104') },
|
||||||
sender: '0x63Cf90D3f0410092FC0fca41846f596223979195'
|
sender: { type: 'address', value: '0x63Cf90D3f0410092FC0fca41846f596223979195' }
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -464,12 +464,12 @@ describe('api/contract/Contract', () => {
|
|||||||
event: 'Message',
|
event: 'Message',
|
||||||
logIndex: new BigNumber(0),
|
logIndex: new BigNumber(0),
|
||||||
params: {
|
params: {
|
||||||
at: new BigNumber(1457965151),
|
at: { type: 'uint', value: new BigNumber(1457965151) },
|
||||||
message: 'post(message)',
|
message: { type: 'string', value: 'post(message)' },
|
||||||
messageId: new BigNumber(281474976731085),
|
messageId: { type: 'uint', value: new BigNumber(281474976731085) },
|
||||||
parentId: new BigNumber(0),
|
parentId: { type: 'uint', value: new BigNumber(0) },
|
||||||
postId: new BigNumber(281474976731104),
|
postId: { type: 'uint', value: new BigNumber(281474976731104) },
|
||||||
sender: '0x63Cf90D3f0410092FC0fca41846f596223979195'
|
sender: { type: 'address', value: '0x63Cf90D3f0410092FC0fca41846f596223979195' }
|
||||||
},
|
},
|
||||||
topics: [
|
topics: [
|
||||||
'0x954ba6c157daf8a26539574ffa64203c044691aa57251af95f4b48d85ec00dd5', '0x0000000000000000000000000000000000000000000000000001000000004fe0'
|
'0x954ba6c157daf8a26539574ffa64203c044691aa57251af95f4b48d85ec00dd5', '0x0000000000000000000000000000000000000000000000000001000000004fe0'
|
||||||
|
@ -93,6 +93,12 @@ export default class Ws extends JsonRpcBase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_onMessage = (event) => {
|
_onMessage = (event) => {
|
||||||
|
// Event sent by Signer Broadcaster
|
||||||
|
if (event.data === 'new_message') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
const result = JSON.parse(event.data);
|
const result = JSON.parse(event.data);
|
||||||
const { method, params, json, resolve, reject } = this._messages[result.id];
|
const { method, params, json, resolve, reject } = this._messages[result.id];
|
||||||
|
|
||||||
@ -108,6 +114,9 @@ export default class Ws extends JsonRpcBase {
|
|||||||
|
|
||||||
resolve(result.result);
|
resolve(result.result);
|
||||||
delete this._messages[result.id];
|
delete this._messages[result.id];
|
||||||
|
} catch (e) {
|
||||||
|
console.error('ws::_onMessage', event.data, e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_send = (id) => {
|
_send = (id) => {
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
<div id="container"></div>
|
<div id="container"></div>
|
||||||
<script src="vendor.js"></script>
|
<script src="vendor.js"></script>
|
||||||
<script src="commons.js"></script>
|
<script src="commons.js"></script>
|
||||||
<script src="parity.js"></script>
|
<script src="/parity-utils/parity.js"></script>
|
||||||
<script src="basiccoin.js"></script>
|
<script src="basiccoin.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
<div id="container"></div>
|
<div id="container"></div>
|
||||||
<script src="vendor.js"></script>
|
<script src="vendor.js"></script>
|
||||||
<script src="commons.js"></script>
|
<script src="commons.js"></script>
|
||||||
<script src="parity.js"></script>
|
<script src="/parity-utils/parity.js"></script>
|
||||||
<script src="gavcoin.js"></script>
|
<script src="gavcoin.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -24,7 +24,7 @@ export default class Loading extends Component {
|
|||||||
render () {
|
render () {
|
||||||
return (
|
return (
|
||||||
<div className={ styles.loading }>
|
<div className={ styles.loading }>
|
||||||
<CircularProgress size={ 2 } />
|
<CircularProgress size={ 120 } thickness={ 7 } />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
<div id="container"></div>
|
<div id="container"></div>
|
||||||
<script src="vendor.js"></script>
|
<script src="vendor.js"></script>
|
||||||
<script src="commons.js"></script>
|
<script src="commons.js"></script>
|
||||||
<script src="parity.js"></script>
|
<script src="/parity-utils/parity.js"></script>
|
||||||
<script src="githubhint.js"></script>
|
<script src="githubhint.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
<div id="container"></div>
|
<div id="container"></div>
|
||||||
<script src="vendor.js"></script>
|
<script src="vendor.js"></script>
|
||||||
<script src="commons.js"></script>
|
<script src="commons.js"></script>
|
||||||
<script src="parity.js"></script>
|
<script src="/parity-utils/parity.js"></script>
|
||||||
<script src="registry.js"></script>
|
<script src="registry.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -79,7 +79,7 @@ export default class Application extends Component {
|
|||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
) : (
|
) : (
|
||||||
<CircularProgress size={ 1 } />
|
<CircularProgress size={ 60 } />
|
||||||
) }
|
) }
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
<div id="container"></div>
|
<div id="container"></div>
|
||||||
<script src="vendor.js"></script>
|
<script src="vendor.js"></script>
|
||||||
<script src="commons.js"></script>
|
<script src="commons.js"></script>
|
||||||
<script src="parity.js"></script>
|
<script src="/parity-utils/parity.js"></script>
|
||||||
<script src="signaturereg.js"></script>
|
<script src="signaturereg.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
<div id="container"></div>
|
<div id="container"></div>
|
||||||
<script src="vendor.js"></script>
|
<script src="vendor.js"></script>
|
||||||
<script src="commons.js"></script>
|
<script src="commons.js"></script>
|
||||||
<script src="parity.js"></script>
|
<script src="/parity-utils/parity.js"></script>
|
||||||
<script src="tokenreg.js"></script>
|
<script src="tokenreg.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -25,9 +25,11 @@ export default class Loading extends Component {
|
|||||||
};
|
};
|
||||||
|
|
||||||
render () {
|
render () {
|
||||||
|
const size = (this.props.size || 2) * 60;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={ styles.loading }>
|
<div className={ styles.loading }>
|
||||||
<CircularProgress size={ this.props.size || 2 } />
|
<CircularProgress size={ size } />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,6 @@
|
|||||||
<title>dev::Parity.js</title>
|
<title>dev::Parity.js</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<script src="parity.js"></script>
|
<script src="/parity-utils/parity.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -7,6 +7,6 @@
|
|||||||
<title>dev::Web3</title>
|
<title>dev::Web3</title>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<script src="inject.js"></script>
|
<script src="/parity-utils/web3.js"></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
@ -19,7 +19,13 @@
|
|||||||
|
|
||||||
import './tests';
|
import './tests';
|
||||||
|
|
||||||
const parityNode = process.env.NODE_ENV === 'production' ? 'http://127.0.0.1:8080' : '';
|
const parityNode = (
|
||||||
|
process.env.PARITY_URL && `http://${process.env.PARITY_URL}`
|
||||||
|
) || (
|
||||||
|
process.env.NODE_ENV === 'production'
|
||||||
|
? 'http://127.0.0.1:8080'
|
||||||
|
: ''
|
||||||
|
);
|
||||||
|
|
||||||
export {
|
export {
|
||||||
parityNode
|
parityNode
|
||||||
|
@ -137,7 +137,7 @@ export default class EditMeta extends Component {
|
|||||||
|
|
||||||
onTagsInputChange = (value) => {
|
onTagsInputChange = (value) => {
|
||||||
const { meta } = this.state;
|
const { meta } = this.state;
|
||||||
const { tags } = meta || [];
|
const { tags = [] } = meta;
|
||||||
|
|
||||||
const tokens = value.split(/[\s,;]+/);
|
const tokens = value.split(/[\s,;]+/);
|
||||||
|
|
||||||
|
@ -15,15 +15,17 @@
|
|||||||
/* along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
/* along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
*/
|
*/
|
||||||
.container {
|
.container {
|
||||||
|
flex: 1;
|
||||||
padding: 0em;
|
padding: 0em;
|
||||||
|
background: rgba(0, 0, 0, 0.8) !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.compact,
|
.compact,
|
||||||
.padded {
|
.padded {
|
||||||
background: rgba(0, 0, 0, 0.8) !important;
|
|
||||||
border-radius: 0 !important;
|
border-radius: 0 !important;
|
||||||
position: relative;
|
position: relative;
|
||||||
overflow: auto;
|
overflow: auto;
|
||||||
|
background-color: transparent !important;
|
||||||
}
|
}
|
||||||
|
|
||||||
.compact {
|
.compact {
|
||||||
|
@ -39,8 +39,8 @@ export default class Summary extends Component {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const url = `/app/${app.builtin ? 'global' : 'local'}/${app.url || app.id}`;
|
const url = `/app/${app.builtin ? 'global' : 'local'}/${app.url || app.id}`;
|
||||||
const image = app.image
|
const image = app.image || app.iconUrl
|
||||||
? <img src={ app.image } className={ styles.image } />
|
? <img src={ app.image || `http://127.0.0.1:8080/${app.id}/${app.iconUrl}` } className={ styles.image } />
|
||||||
: <div className={ styles.image }> </div>;
|
: <div className={ styles.image }> </div>;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -43,7 +43,7 @@
|
|||||||
.expanded {
|
.expanded {
|
||||||
right: 16px;
|
right: 16px;
|
||||||
width: 964px;
|
width: 964px;
|
||||||
height: 288px;
|
height: 300px;
|
||||||
border-radius: 4px 4px 0 0;
|
border-radius: 4px 4px 0 0;
|
||||||
overflow-y: auto;
|
overflow-y: auto;
|
||||||
display: flex;
|
display: flex;
|
||||||
@ -53,6 +53,7 @@
|
|||||||
.expanded .content {
|
.expanded .content {
|
||||||
flex: 1;
|
flex: 1;
|
||||||
overflow: auto;
|
overflow: auto;
|
||||||
|
display: flex;
|
||||||
}
|
}
|
||||||
|
|
||||||
.corner {
|
.corner {
|
||||||
|
@ -80,7 +80,7 @@ export default class TransactionFinished extends Component {
|
|||||||
if (!chain || !fromBalance || !toBalance) {
|
if (!chain || !fromBalance || !toBalance) {
|
||||||
return (
|
return (
|
||||||
<div className={ `${styles.container} ${className}` }>
|
<div className={ `${styles.container} ${className}` }>
|
||||||
<CircularProgress size={ 1 } />
|
<CircularProgress size={ 60 } />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -81,7 +81,7 @@ export default class TransactionPending extends Component {
|
|||||||
if (!this.state.chain) {
|
if (!this.state.chain) {
|
||||||
return (
|
return (
|
||||||
<div className={ `${styles.container} ${className}` }>
|
<div className={ `${styles.container} ${className}` }>
|
||||||
<CircularProgress size={ 1 } />
|
<CircularProgress size={ 60 } />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,8 @@ module.exports = {
|
|||||||
'signaturereg': ['./dapps/signaturereg.js'],
|
'signaturereg': ['./dapps/signaturereg.js'],
|
||||||
'tokenreg': ['./dapps/tokenreg.js'],
|
'tokenreg': ['./dapps/tokenreg.js'],
|
||||||
// library
|
// library
|
||||||
'inject': ['./inject.js'],
|
'inject': ['./web3.js'],
|
||||||
|
'web3': ['./web3.js'],
|
||||||
'parity': ['./parity.js'],
|
'parity': ['./parity.js'],
|
||||||
// app
|
// app
|
||||||
'index': ['./index.js']
|
'index': ['./index.js']
|
||||||
@ -136,10 +137,6 @@ module.exports = {
|
|||||||
'babel?cacheDirectory=true'
|
'babel?cacheDirectory=true'
|
||||||
]
|
]
|
||||||
}),
|
}),
|
||||||
new webpack.DllReferencePlugin({
|
|
||||||
context: '.',
|
|
||||||
manifest: require(`./${DEST}/vendor-manifest.json`)
|
|
||||||
}),
|
|
||||||
new CopyWebpackPlugin([{ from: './error_pages.css', to: 'styles.css' }], {}),
|
new CopyWebpackPlugin([{ from: './error_pages.css', to: 'styles.css' }], {}),
|
||||||
new WebpackErrorNotificationPlugin(),
|
new WebpackErrorNotificationPlugin(),
|
||||||
new webpack.DefinePlugin({
|
new webpack.DefinePlugin({
|
||||||
@ -149,6 +146,11 @@ module.exports = {
|
|||||||
PARITY_URL: JSON.stringify(process.env.PARITY_URL),
|
PARITY_URL: JSON.stringify(process.env.PARITY_URL),
|
||||||
LOGGING: JSON.stringify(!isProd)
|
LOGGING: JSON.stringify(!isProd)
|
||||||
}
|
}
|
||||||
|
}),
|
||||||
|
|
||||||
|
new webpack.DllReferencePlugin({
|
||||||
|
context: '.',
|
||||||
|
manifest: require(`./${DEST}/vendor-manifest.json`)
|
||||||
})
|
})
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -164,16 +166,10 @@ module.exports = {
|
|||||||
if (isProd) {
|
if (isProd) {
|
||||||
plugins.push(
|
plugins.push(
|
||||||
new webpack.optimize.CommonsChunkPlugin({
|
new webpack.optimize.CommonsChunkPlugin({
|
||||||
chunks: [ 'index' ],
|
chunks: ['index'],
|
||||||
name: 'commons'
|
name: 'commons'
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
plugins.push(
|
|
||||||
new webpack.optimize.CommonsChunkPlugin({
|
|
||||||
chunks: [ 'parity' ],
|
|
||||||
name: 'parity'
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
plugins.push(new webpack.optimize.OccurrenceOrderPlugin(false));
|
plugins.push(new webpack.optimize.OccurrenceOrderPlugin(false));
|
||||||
plugins.push(new webpack.optimize.DedupePlugin());
|
plugins.push(new webpack.optimize.DedupePlugin());
|
||||||
@ -208,8 +204,11 @@ module.exports = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
'/parity-utils/*': {
|
'/parity-utils/*': {
|
||||||
target: 'http://127.0.0.1:8080',
|
target: 'http://127.0.0.1:3000',
|
||||||
changeOrigin: true
|
changeOrigin: true,
|
||||||
|
pathRewrite: {
|
||||||
|
'^/parity-utils': ''
|
||||||
|
}
|
||||||
},
|
},
|
||||||
'/rpc/*': {
|
'/rpc/*': {
|
||||||
target: 'http://localhost:8080',
|
target: 'http://localhost:8080',
|
||||||
|
89
js/webpack.libraries.js
Normal file
89
js/webpack.libraries.js
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
// Run with `webpack --config webpack.libraries.js --progress`
|
||||||
|
|
||||||
|
const HappyPack = require('happypack');
|
||||||
|
const path = require('path');
|
||||||
|
const webpack = require('webpack');
|
||||||
|
|
||||||
|
const ENV = process.env.NODE_ENV || 'development';
|
||||||
|
const isProd = ENV === 'production';
|
||||||
|
const DEST = process.env.BUILD_DEST || '.build';
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
context: path.join(__dirname, './src'),
|
||||||
|
entry: {
|
||||||
|
// library
|
||||||
|
'inject': ['./web3.js'],
|
||||||
|
'web3': ['./web3.js'],
|
||||||
|
'parity': ['./parity.js']
|
||||||
|
},
|
||||||
|
output: {
|
||||||
|
path: path.join(__dirname, DEST),
|
||||||
|
filename: '[name].js'
|
||||||
|
},
|
||||||
|
module: {
|
||||||
|
loaders: [
|
||||||
|
{
|
||||||
|
test: /\.js$/,
|
||||||
|
exclude: /node_modules/,
|
||||||
|
loader: 'happypack/loader?id=js'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.json$/,
|
||||||
|
loaders: ['json']
|
||||||
|
},
|
||||||
|
{
|
||||||
|
test: /\.html$/,
|
||||||
|
loader: 'file?name=[name].[ext]'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
plugins: (function () {
|
||||||
|
const plugins = [
|
||||||
|
new HappyPack({
|
||||||
|
id: 'js',
|
||||||
|
threads: 4,
|
||||||
|
loaders: [ 'babel' ]
|
||||||
|
}),
|
||||||
|
new webpack.DefinePlugin({
|
||||||
|
'process.env': {
|
||||||
|
NODE_ENV: JSON.stringify(ENV),
|
||||||
|
RPC_ADDRESS: JSON.stringify(process.env.RPC_ADDRESS),
|
||||||
|
PARITY_URL: JSON.stringify(process.env.PARITY_URL),
|
||||||
|
LOGGING: JSON.stringify(!isProd)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
];
|
||||||
|
|
||||||
|
if (isProd) {
|
||||||
|
plugins.push(new webpack.optimize.OccurrenceOrderPlugin(false));
|
||||||
|
plugins.push(new webpack.optimize.DedupePlugin());
|
||||||
|
plugins.push(new webpack.optimize.UglifyJsPlugin({
|
||||||
|
screwIe8: true,
|
||||||
|
compress: {
|
||||||
|
warnings: false
|
||||||
|
},
|
||||||
|
output: {
|
||||||
|
comments: false
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
return plugins;
|
||||||
|
}())
|
||||||
|
};
|
@ -43,6 +43,7 @@ pub struct Informant {
|
|||||||
net: Option<Arc<ManageNetwork>>,
|
net: Option<Arc<ManageNetwork>>,
|
||||||
last_import: Mutex<Instant>,
|
last_import: Mutex<Instant>,
|
||||||
skipped: AtomicUsize,
|
skipped: AtomicUsize,
|
||||||
|
skipped_txs: AtomicUsize,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Format byte counts to standard denominations.
|
/// Format byte counts to standard denominations.
|
||||||
@ -80,6 +81,7 @@ impl Informant {
|
|||||||
net: net,
|
net: net,
|
||||||
last_import: Mutex::new(Instant::now()),
|
last_import: Mutex::new(Instant::now()),
|
||||||
skipped: AtomicUsize::new(0),
|
skipped: AtomicUsize::new(0),
|
||||||
|
skipped_txs: AtomicUsize::new(0),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,13 +180,21 @@ impl ChainNotify for Informant {
|
|||||||
let mut last_import = self.last_import.lock();
|
let mut last_import = self.last_import.lock();
|
||||||
let sync_state = self.sync.as_ref().map(|s| s.status().state);
|
let sync_state = self.sync.as_ref().map(|s| s.status().state);
|
||||||
let importing = is_major_importing(sync_state, self.client.queue_info());
|
let importing = is_major_importing(sync_state, self.client.queue_info());
|
||||||
if Instant::now() > *last_import + Duration::from_secs(1) && !importing {
|
|
||||||
|
let ripe = Instant::now() > *last_import + Duration::from_secs(1) && !importing;
|
||||||
|
let txs_imported = imported.iter()
|
||||||
|
.take(imported.len() - if ripe {1} else {0})
|
||||||
|
.filter_map(|h| self.client.block(BlockID::Hash(h.clone())))
|
||||||
|
.map(|b| BlockView::new(&b).transactions_count())
|
||||||
|
.sum();
|
||||||
|
|
||||||
|
if ripe {
|
||||||
if let Some(block) = imported.last().and_then(|h| self.client.block(BlockID::Hash(*h))) {
|
if let Some(block) = imported.last().and_then(|h| self.client.block(BlockID::Hash(*h))) {
|
||||||
let view = BlockView::new(&block);
|
let view = BlockView::new(&block);
|
||||||
let header = view.header();
|
let header = view.header();
|
||||||
let tx_count = view.transactions_count();
|
let tx_count = view.transactions_count();
|
||||||
let size = block.len();
|
let size = block.len();
|
||||||
let skipped = self.skipped.load(AtomicOrdering::Relaxed);
|
let (skipped, skipped_txs) = (self.skipped.load(AtomicOrdering::Relaxed) + imported.len() - 1, self.skipped.load(AtomicOrdering::Relaxed) + txs_imported);
|
||||||
info!(target: "import", "Imported {} {} ({} txs, {} Mgas, {} ms, {} KiB){}",
|
info!(target: "import", "Imported {} {} ({} txs, {} Mgas, {} ms, {} KiB){}",
|
||||||
Colour::White.bold().paint(format!("#{}", header.number())),
|
Colour::White.bold().paint(format!("#{}", header.number())),
|
||||||
Colour::White.bold().paint(format!("{}", header.hash())),
|
Colour::White.bold().paint(format!("{}", header.hash())),
|
||||||
@ -192,13 +202,22 @@ impl ChainNotify for Informant {
|
|||||||
Colour::Yellow.bold().paint(format!("{:.2}", header.gas_used().low_u64() as f32 / 1000000f32)),
|
Colour::Yellow.bold().paint(format!("{:.2}", header.gas_used().low_u64() as f32 / 1000000f32)),
|
||||||
Colour::Purple.bold().paint(format!("{:.2}", duration as f32 / 1000000f32)),
|
Colour::Purple.bold().paint(format!("{:.2}", duration as f32 / 1000000f32)),
|
||||||
Colour::Blue.bold().paint(format!("{:.2}", size as f32 / 1024f32)),
|
Colour::Blue.bold().paint(format!("{:.2}", size as f32 / 1024f32)),
|
||||||
if skipped > 0 { format!(" + another {} block(s)", Colour::Red.bold().paint(format!("{}", skipped))) } else { String::new() }
|
if skipped > 0 {
|
||||||
|
format!(" + another {} block(s) containing {} tx(s)",
|
||||||
|
Colour::Red.bold().paint(format!("{}", skipped)),
|
||||||
|
Colour::Red.bold().paint(format!("{}", skipped_txs))
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
self.skipped.store(0, AtomicOrdering::Relaxed);
|
||||||
|
self.skipped_txs.store(0, AtomicOrdering::Relaxed);
|
||||||
*last_import = Instant::now();
|
*last_import = Instant::now();
|
||||||
}
|
}
|
||||||
self.skipped.store(0, AtomicOrdering::Relaxed);
|
|
||||||
} else {
|
} else {
|
||||||
self.skipped.fetch_add(imported.len(), AtomicOrdering::Relaxed);
|
self.skipped.fetch_add(imported.len(), AtomicOrdering::Relaxed);
|
||||||
|
self.skipped_txs.fetch_add(txs_imported, AtomicOrdering::Relaxed);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
31
scripts/deb-build.sh
Normal file
31
scripts/deb-build.sh
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e # fail on any error
|
||||||
|
set -u # treat unset variables as error
|
||||||
|
rm -rf deb
|
||||||
|
#create DEBIAN files
|
||||||
|
mkdir -p deb/usr/bin/
|
||||||
|
mkdir -p deb/DEBIAN
|
||||||
|
#create copyright, docs, compat
|
||||||
|
cp LICENSE deb/DEBIAN/copyright
|
||||||
|
echo "https://github.com/ethcore/parity/wiki" >> deb/DEBIAN/docs
|
||||||
|
echo "8" >> deb/DEBIAN/compat
|
||||||
|
#create control file
|
||||||
|
control=deb/DEBIAN/control
|
||||||
|
echo "Package: parity" >> $control
|
||||||
|
version=`grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n"`
|
||||||
|
echo "Version: $version" >> $control
|
||||||
|
echo "Source: parity" >> $control
|
||||||
|
echo "Section: science" >> $control
|
||||||
|
echo "Priority: extra" >> $control
|
||||||
|
echo "Maintainer: Ethcore <devops@ethcore.io>" >> $control
|
||||||
|
echo "Build-Depends: debhelper (>=9)" >> $control
|
||||||
|
echo "Standards-Version: 3.9.5" >> $control
|
||||||
|
echo "Homepage: https://ethcore.io" >> $control
|
||||||
|
echo "Vcs-Git: git://github.com/ethcore/parity.git" >> $control
|
||||||
|
echo "Vcs-Browser: https://github.com/ethcore/parity" >> $control
|
||||||
|
echo "Architecture: $1" >> $control
|
||||||
|
echo "Description: Ethereum network client by Ethcore" >> $control
|
||||||
|
#build .deb package
|
||||||
|
|
||||||
|
exit
|
@ -1253,7 +1253,12 @@ impl ChainSync {
|
|||||||
}
|
}
|
||||||
peer.asking = asking;
|
peer.asking = asking;
|
||||||
peer.ask_time = time::precise_time_s();
|
peer.ask_time = time::precise_time_s();
|
||||||
if let Err(e) = sync.send(peer_id, packet_id, packet) {
|
let result = if packet_id >= ETH_PACKET_COUNT {
|
||||||
|
sync.send_protocol(WARP_SYNC_PROTOCOL_ID, peer_id, packet_id, packet)
|
||||||
|
} else {
|
||||||
|
sync.send(peer_id, packet_id, packet)
|
||||||
|
};
|
||||||
|
if let Err(e) = result {
|
||||||
debug!(target:"sync", "Error sending request: {:?}", e);
|
debug!(target:"sync", "Error sending request: {:?}", e);
|
||||||
sync.disable_peer(peer_id);
|
sync.disable_peer(peer_id);
|
||||||
}
|
}
|
||||||
@ -1270,8 +1275,9 @@ impl ChainSync {
|
|||||||
|
|
||||||
/// Called when peer sends us new transactions
|
/// Called when peer sends us new transactions
|
||||||
fn on_peer_transactions(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
|
fn on_peer_transactions(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
|
||||||
// accepting transactions once only fully synced
|
// Accept transactions only when fully synced
|
||||||
if !io.is_chain_queue_empty() {
|
if !io.is_chain_queue_empty() || self.state != SyncState::Idle || self.state != SyncState::NewBlocks {
|
||||||
|
trace!(target: "sync", "{} Ignoring transactions while syncing", peer_id);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
if !self.peers.get(&peer_id).map_or(false, |p| p.can_sync()) {
|
if !self.peers.get(&peer_id).map_or(false, |p| p.can_sync()) {
|
||||||
@ -1570,7 +1576,7 @@ impl ChainSync {
|
|||||||
SNAPSHOT_MANIFEST_PACKET => self.on_snapshot_manifest(io, peer, &rlp),
|
SNAPSHOT_MANIFEST_PACKET => self.on_snapshot_manifest(io, peer, &rlp),
|
||||||
SNAPSHOT_DATA_PACKET => self.on_snapshot_data(io, peer, &rlp),
|
SNAPSHOT_DATA_PACKET => self.on_snapshot_data(io, peer, &rlp),
|
||||||
_ => {
|
_ => {
|
||||||
debug!(target: "sync", "Unknown packet {}", packet_id);
|
debug!(target: "sync", "{}: Unknown packet {}", peer, packet_id);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -34,6 +34,8 @@ pub trait SyncIo {
|
|||||||
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError>;
|
fn respond(&mut self, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError>;
|
||||||
/// Send a packet to a peer.
|
/// Send a packet to a peer.
|
||||||
fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError>;
|
fn send(&mut self, peer_id: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError>;
|
||||||
|
/// Send a packet to a peer using specified protocol.
|
||||||
|
fn send_protocol(&mut self, protocol: ProtocolId, peer_id: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError>;
|
||||||
/// Get the blockchain
|
/// Get the blockchain
|
||||||
fn chain(&self) -> &BlockChainClient;
|
fn chain(&self) -> &BlockChainClient;
|
||||||
/// Get the snapshot service.
|
/// Get the snapshot service.
|
||||||
@ -98,6 +100,10 @@ impl<'s, 'h> SyncIo for NetSyncIo<'s, 'h> {
|
|||||||
self.network.send(peer_id, packet_id, data)
|
self.network.send(peer_id, packet_id, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn send_protocol(&mut self, protocol: ProtocolId, peer_id: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError>{
|
||||||
|
self.network.send_protocol(protocol, peer_id, packet_id, data)
|
||||||
|
}
|
||||||
|
|
||||||
fn chain(&self) -> &BlockChainClient {
|
fn chain(&self) -> &BlockChainClient {
|
||||||
self.chain
|
self.chain
|
||||||
}
|
}
|
||||||
|
@ -78,6 +78,10 @@ impl<'p> SyncIo for TestIo<'p> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn send_protocol(&mut self, _protocol: ProtocolId, peer_id: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError> {
|
||||||
|
self.send(peer_id, packet_id, data)
|
||||||
|
}
|
||||||
|
|
||||||
fn chain(&self) -> &BlockChainClient {
|
fn chain(&self) -> &BlockChainClient {
|
||||||
self.chain
|
self.chain
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }
|
|||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
eth-secp256k1 = { git = "https://github.com/ethcore/rust-secp256k1" }
|
eth-secp256k1 = { git = "https://github.com/ethcore/rust-secp256k1" }
|
||||||
rust-crypto = "0.2.34"
|
rust-crypto = "0.2.34"
|
||||||
elastic-array = "0.5"
|
elastic-array = { git = "https://github.com/ethcore/elastic-array" }
|
||||||
rlp = { path = "rlp" }
|
rlp = { path = "rlp" }
|
||||||
heapsize = { version = "0.3", features = ["unstable"] }
|
heapsize = { version = "0.3", features = ["unstable"] }
|
||||||
itertools = "0.4"
|
itertools = "0.4"
|
||||||
|
@ -241,9 +241,14 @@ impl<'s> NetworkContext<'s> {
|
|||||||
|
|
||||||
/// Send a packet over the network to another peer.
|
/// Send a packet over the network to another peer.
|
||||||
pub fn send(&self, peer: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError> {
|
pub fn send(&self, peer: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError> {
|
||||||
|
self.send_protocol(self.protocol, peer, packet_id, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a packet over the network to another peer using specified protocol.
|
||||||
|
pub fn send_protocol(&self, protocol: ProtocolId, peer: PeerId, packet_id: PacketId, data: Vec<u8>) -> Result<(), NetworkError> {
|
||||||
let session = self.resolve_session(peer);
|
let session = self.resolve_session(peer);
|
||||||
if let Some(session) = session {
|
if let Some(session) = session {
|
||||||
try!(session.lock().send_packet(self.io, self.protocol, packet_id as u8, &data));
|
try!(session.lock().send_packet(self.io, protocol, packet_id as u8, &data));
|
||||||
} else {
|
} else {
|
||||||
trace!(target: "network", "Send: Peer no longer exist")
|
trace!(target: "network", "Send: Peer no longer exist")
|
||||||
}
|
}
|
||||||
@ -911,7 +916,7 @@ impl Host {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_nodes(&self, io: &IoContext<NetworkIoMessage>, node_changes: TableUpdates) {
|
fn update_nodes(&self, _io: &IoContext<NetworkIoMessage>, node_changes: TableUpdates) {
|
||||||
let mut to_remove: Vec<PeerId> = Vec::new();
|
let mut to_remove: Vec<PeerId> = Vec::new();
|
||||||
{
|
{
|
||||||
let sessions = self.sessions.write();
|
let sessions = self.sessions.write();
|
||||||
@ -926,7 +931,6 @@ impl Host {
|
|||||||
}
|
}
|
||||||
for i in to_remove {
|
for i in to_remove {
|
||||||
trace!(target: "network", "Removed from node table: {}", i);
|
trace!(target: "network", "Removed from node table: {}", i);
|
||||||
self.kill_connection(i, io, false);
|
|
||||||
}
|
}
|
||||||
self.nodes.write().update(node_changes, &*self.reserved_nodes.read());
|
self.nodes.write().update(node_changes, &*self.reserved_nodes.read());
|
||||||
}
|
}
|
||||||
|
@ -395,7 +395,7 @@ impl Session {
|
|||||||
PACKET_PEERS => Ok(SessionData::None),
|
PACKET_PEERS => Ok(SessionData::None),
|
||||||
PACKET_USER ... PACKET_LAST => {
|
PACKET_USER ... PACKET_LAST => {
|
||||||
let mut i = 0usize;
|
let mut i = 0usize;
|
||||||
while packet_id < self.info.capabilities[i].id_offset {
|
while packet_id > self.info.capabilities[i].id_offset + self.info.capabilities[i].packet_count {
|
||||||
i += 1;
|
i += 1;
|
||||||
if i == self.info.capabilities.len() {
|
if i == self.info.capabilities.len() {
|
||||||
debug!(target: "network", "Unknown packet: {:?}", packet_id);
|
debug!(target: "network", "Unknown packet: {:?}", packet_id);
|
||||||
@ -469,7 +469,7 @@ impl Session {
|
|||||||
offset += caps[i].packet_count;
|
offset += caps[i].packet_count;
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
trace!(target: "network", "Hello: {} v{} {} {:?}", client_version, protocol, id, caps);
|
debug!(target: "network", "Hello: {} v{} {} {:?}", client_version, protocol, id, caps);
|
||||||
self.info.protocol_version = protocol;
|
self.info.protocol_version = protocol;
|
||||||
self.info.client_version = client_version;
|
self.info.client_version = client_version;
|
||||||
self.info.capabilities = caps;
|
self.info.capabilities = caps;
|
||||||
|
@ -6,7 +6,7 @@ version = "0.1.0"
|
|||||||
authors = ["Ethcore <admin@ethcore.io>"]
|
authors = ["Ethcore <admin@ethcore.io>"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
elastic-array = "0.5"
|
elastic-array = { git = "https://github.com/ethcore/elastic-array" }
|
||||||
ethcore-bigint = { path = "../bigint" }
|
ethcore-bigint = { path = "../bigint" }
|
||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
@ -95,8 +95,8 @@ macro_rules! flushln {
|
|||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub fn flush(s: String) {
|
pub fn flush(s: String) {
|
||||||
::std::io::stdout().write(s.as_bytes()).unwrap();
|
let _ = ::std::io::stdout().write(s.as_bytes());
|
||||||
::std::io::stdout().flush().unwrap();
|
let _ = ::std::io::stdout().flush();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -16,8 +16,11 @@
|
|||||||
|
|
||||||
//! Database of byte-slices keyed to their Keccak hash.
|
//! Database of byte-slices keyed to their Keccak hash.
|
||||||
use hash::*;
|
use hash::*;
|
||||||
use bytes::*;
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use elastic_array::ElasticArray256;
|
||||||
|
|
||||||
|
/// `HashDB` value type.
|
||||||
|
pub type DBValue = ElasticArray256<u8>;
|
||||||
|
|
||||||
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
|
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
|
||||||
pub trait HashDB: AsHashDB + Send + Sync {
|
pub trait HashDB: AsHashDB + Send + Sync {
|
||||||
@ -39,7 +42,7 @@ pub trait HashDB: AsHashDB + Send + Sync {
|
|||||||
/// assert_eq!(m.get(&hash).unwrap(), hello_bytes);
|
/// assert_eq!(m.get(&hash).unwrap(), hello_bytes);
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]>;
|
fn get(&self, key: &H256) -> Option<DBValue>;
|
||||||
|
|
||||||
/// Check for the existance of a hash-key.
|
/// Check for the existance of a hash-key.
|
||||||
///
|
///
|
||||||
@ -80,7 +83,7 @@ pub trait HashDB: AsHashDB + Send + Sync {
|
|||||||
fn insert(&mut self, value: &[u8]) -> H256;
|
fn insert(&mut self, value: &[u8]) -> H256;
|
||||||
|
|
||||||
/// Like `insert()` , except you provide the key and the data is all moved.
|
/// Like `insert()` , except you provide the key and the data is all moved.
|
||||||
fn emplace(&mut self, key: H256, value: Bytes);
|
fn emplace(&mut self, key: H256, value: DBValue);
|
||||||
|
|
||||||
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
|
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
|
||||||
/// happen without the data being eventually being inserted into the DB.
|
/// happen without the data being eventually being inserted into the DB.
|
||||||
@ -111,7 +114,7 @@ pub trait HashDB: AsHashDB + Send + Sync {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get auxiliary data from hashdb.
|
/// Get auxiliary data from hashdb.
|
||||||
fn get_aux(&self, _hash: &[u8]) -> Option<Vec<u8>> {
|
fn get_aux(&self, _hash: &[u8]) -> Option<DBValue> {
|
||||||
unimplemented!();
|
unimplemented!();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -65,8 +65,8 @@ impl ArchiveDB {
|
|||||||
Self::new(backing, None)
|
Self::new(backing, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn payload(&self, key: &H256) -> Option<Bytes> {
|
fn payload(&self, key: &H256) -> Option<DBValue> {
|
||||||
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?").map(|v| v.to_vec())
|
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -85,19 +85,12 @@ impl HashDB for ArchiveDB {
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
let k = self.overlay.raw(key);
|
let k = self.overlay.raw(key);
|
||||||
match k {
|
if let Some((d, rc)) = k {
|
||||||
Some((d, rc)) if rc > 0 => Some(d),
|
if rc > 0 { return Some(d); }
|
||||||
_ => {
|
|
||||||
if let Some(x) = self.payload(key) {
|
|
||||||
Some(self.overlay.denote(key, x).0)
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
self.payload(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256) -> bool {
|
||||||
@ -108,7 +101,7 @@ impl HashDB for ArchiveDB {
|
|||||||
self.overlay.insert(value)
|
self.overlay.insert(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) {
|
fn emplace(&mut self, key: H256, value: DBValue) {
|
||||||
self.overlay.emplace(key, value);
|
self.overlay.emplace(key, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -120,7 +113,7 @@ impl HashDB for ArchiveDB {
|
|||||||
self.overlay.insert_aux(hash, value);
|
self.overlay.insert_aux(hash, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_aux(&self, hash: &[u8]) -> Option<Vec<u8>> {
|
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
|
||||||
if let Some(res) = self.overlay.get_aux(hash) {
|
if let Some(res) = self.overlay.get_aux(hash) {
|
||||||
return Some(res)
|
return Some(res)
|
||||||
}
|
}
|
||||||
@ -130,7 +123,6 @@ impl HashDB for ArchiveDB {
|
|||||||
|
|
||||||
self.backing.get(self.column, &db_hash)
|
self.backing.get(self.column, &db_hash)
|
||||||
.expect("Low-level database error. Some issue with your hard disk?")
|
.expect("Low-level database error. Some issue with your hard disk?")
|
||||||
.map(|v| v.to_vec())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_aux(&mut self, hash: &[u8]) {
|
fn remove_aux(&mut self, hash: &[u8]) {
|
||||||
@ -396,7 +388,7 @@ mod tests {
|
|||||||
let mut jdb = new_db(&dir);
|
let mut jdb = new_db(&dir);
|
||||||
// history is 1
|
// history is 1
|
||||||
let foo = jdb.insert(b"foo");
|
let foo = jdb.insert(b"foo");
|
||||||
jdb.emplace(bar.clone(), b"bar".to_vec());
|
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar"));
|
||||||
jdb.commit_batch(0, &b"0".sha3(), None).unwrap();
|
jdb.commit_batch(0, &b"0".sha3(), None).unwrap();
|
||||||
foo
|
foo
|
||||||
};
|
};
|
||||||
@ -497,7 +489,7 @@ mod tests {
|
|||||||
let key = jdb.insert(b"dog");
|
let key = jdb.insert(b"dog");
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
|
||||||
jdb.remove(&key);
|
jdb.remove(&key);
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
@ -150,7 +150,7 @@ impl EarlyMergeDB {
|
|||||||
backing.get(col, &Self::morph_key(key, 0)).expect("Low-level database error. Some issue with your hard disk?").is_some()
|
backing.get(col, &Self::morph_key(key, 0)).expect("Low-level database error. Some issue with your hard disk?").is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_keys(inserts: &[(H256, Bytes)], backing: &Database, col: Option<u32>, refs: &mut HashMap<H256, RefInfo>, batch: &mut DBTransaction, trace: bool) {
|
fn insert_keys(inserts: &[(H256, DBValue)], backing: &Database, col: Option<u32>, refs: &mut HashMap<H256, RefInfo>, batch: &mut DBTransaction, trace: bool) {
|
||||||
for &(ref h, ref d) in inserts {
|
for &(ref h, ref d) in inserts {
|
||||||
if let Some(c) = refs.get_mut(h) {
|
if let Some(c) = refs.get_mut(h) {
|
||||||
// already counting. increment.
|
// already counting. increment.
|
||||||
@ -268,8 +268,8 @@ impl EarlyMergeDB {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn payload(&self, key: &H256) -> Option<Bytes> {
|
fn payload(&self, key: &H256) -> Option<DBValue> {
|
||||||
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?").map(|v| v.to_vec())
|
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_refs(db: &Database, col: Option<u32>) -> (Option<u64>, HashMap<H256, RefInfo>) {
|
fn read_refs(db: &Database, col: Option<u32>) -> (Option<u64>, HashMap<H256, RefInfo>) {
|
||||||
@ -317,19 +317,12 @@ impl HashDB for EarlyMergeDB {
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
let k = self.overlay.raw(key);
|
let k = self.overlay.raw(key);
|
||||||
match k {
|
if let Some((d, rc)) = k {
|
||||||
Some((d, rc)) if rc > 0 => Some(d),
|
if rc > 0 { return Some(d) }
|
||||||
_ => {
|
|
||||||
if let Some(x) = self.payload(key) {
|
|
||||||
Some(self.overlay.denote(key, x).0)
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
self.payload(key)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256) -> bool {
|
||||||
@ -339,7 +332,7 @@ impl HashDB for EarlyMergeDB {
|
|||||||
fn insert(&mut self, value: &[u8]) -> H256 {
|
fn insert(&mut self, value: &[u8]) -> H256 {
|
||||||
self.overlay.insert(value)
|
self.overlay.insert(value)
|
||||||
}
|
}
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) {
|
fn emplace(&mut self, key: H256, value: DBValue) {
|
||||||
self.overlay.emplace(key, value);
|
self.overlay.emplace(key, value);
|
||||||
}
|
}
|
||||||
fn remove(&mut self, key: &H256) {
|
fn remove(&mut self, key: &H256) {
|
||||||
@ -383,7 +376,10 @@ impl JournalDB for EarlyMergeDB {
|
|||||||
let trace = false;
|
let trace = false;
|
||||||
|
|
||||||
// record new commit's details.
|
// record new commit's details.
|
||||||
let mut refs = self.refs.as_ref().unwrap().write();
|
let mut refs = match self.refs.as_ref() {
|
||||||
|
Some(refs) => refs.write(),
|
||||||
|
None => return Ok(0),
|
||||||
|
};
|
||||||
|
|
||||||
{
|
{
|
||||||
let mut index = 0usize;
|
let mut index = 0usize;
|
||||||
@ -410,7 +406,7 @@ impl JournalDB for EarlyMergeDB {
|
|||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(k, &(_, c))| if c < 0 {Some(k.clone())} else {None})
|
.filter_map(|(k, &(_, c))| if c < 0 {Some(k.clone())} else {None})
|
||||||
.collect();
|
.collect();
|
||||||
let inserts: Vec<(H256, Bytes)> = drained
|
let inserts: Vec<(H256, _)> = drained
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|(k, (v, r))| if r > 0 { assert!(r == 1); Some((k, v)) } else { assert!(r >= -1); None })
|
.filter_map(|(k, (v, r))| if r > 0 { assert!(r == 1); Some((k, v)) } else { assert!(r >= -1); None })
|
||||||
.collect();
|
.collect();
|
||||||
@ -829,7 +825,7 @@ mod tests {
|
|||||||
let mut jdb = new_db(&dir);
|
let mut jdb = new_db(&dir);
|
||||||
// history is 1
|
// history is 1
|
||||||
let foo = jdb.insert(b"foo");
|
let foo = jdb.insert(b"foo");
|
||||||
jdb.emplace(bar.clone(), b"bar".to_vec());
|
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar"));
|
||||||
jdb.commit_batch(0, &b"0".sha3(), None).unwrap();
|
jdb.commit_batch(0, &b"0".sha3(), None).unwrap();
|
||||||
assert!(jdb.can_reconstruct_refs());
|
assert!(jdb.can_reconstruct_refs());
|
||||||
foo
|
foo
|
||||||
@ -1085,7 +1081,7 @@ mod tests {
|
|||||||
let key = jdb.insert(b"dog");
|
let key = jdb.insert(b"dog");
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
|
||||||
jdb.remove(&key);
|
jdb.remove(&key);
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
@ -67,7 +67,7 @@ pub struct OverlayRecentDB {
|
|||||||
#[derive(PartialEq)]
|
#[derive(PartialEq)]
|
||||||
struct JournalOverlay {
|
struct JournalOverlay {
|
||||||
backing_overlay: MemoryDB, // Nodes added in the history period
|
backing_overlay: MemoryDB, // Nodes added in the history period
|
||||||
pending_overlay: H256FastMap<Bytes>, // Nodes being transfered from backing_overlay to backing db
|
pending_overlay: H256FastMap<DBValue>, // Nodes being transfered from backing_overlay to backing db
|
||||||
journal: HashMap<u64, Vec<JournalEntry>>,
|
journal: HashMap<u64, Vec<JournalEntry>>,
|
||||||
latest_era: Option<u64>,
|
latest_era: Option<u64>,
|
||||||
earliest_era: Option<u64>,
|
earliest_era: Option<u64>,
|
||||||
@ -130,7 +130,7 @@ impl OverlayRecentDB {
|
|||||||
journal_overlay.latest_era == reconstructed.latest_era
|
journal_overlay.latest_era == reconstructed.latest_era
|
||||||
}
|
}
|
||||||
|
|
||||||
fn payload(&self, key: &H256) -> Option<Bytes> {
|
fn payload(&self, key: &H256) -> Option<DBValue> {
|
||||||
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?")
|
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?")
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -160,8 +160,8 @@ impl OverlayRecentDB {
|
|||||||
let mut inserted_keys = Vec::new();
|
let mut inserted_keys = Vec::new();
|
||||||
for r in insertions.iter() {
|
for r in insertions.iter() {
|
||||||
let k: H256 = r.val_at(0);
|
let k: H256 = r.val_at(0);
|
||||||
let v: Bytes = r.val_at(1);
|
let v = r.at(1).data();
|
||||||
overlay.emplace(to_short_key(&k), v);
|
overlay.emplace(to_short_key(&k), DBValue::from_slice(v));
|
||||||
inserted_keys.push(k);
|
inserted_keys.push(k);
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
@ -229,7 +229,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
let journal_overlay = self.journal_overlay.read();
|
let journal_overlay = self.journal_overlay.read();
|
||||||
let key = to_short_key(key);
|
let key = to_short_key(key);
|
||||||
journal_overlay.backing_overlay.get(&key).map(|v| v.to_vec())
|
journal_overlay.backing_overlay.get(&key).map(|v| v.to_vec())
|
||||||
.or_else(|| journal_overlay.pending_overlay.get(&key).cloned())
|
.or_else(|| journal_overlay.pending_overlay.get(&key).map(|d| d.clone().to_vec()))
|
||||||
.or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.to_vec()))
|
.or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.to_vec()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -255,7 +255,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
for (k, v) in insertions {
|
for (k, v) in insertions {
|
||||||
r.begin_list(2);
|
r.begin_list(2);
|
||||||
r.append(&k);
|
r.append(&k);
|
||||||
r.append(&v);
|
r.append(&&*v);
|
||||||
journal_overlay.backing_overlay.emplace(to_short_key(&k), v);
|
journal_overlay.backing_overlay.emplace(to_short_key(&k), v);
|
||||||
}
|
}
|
||||||
r.append(&removed_keys);
|
r.append(&removed_keys);
|
||||||
@ -284,7 +284,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
let mut ops = 0;
|
let mut ops = 0;
|
||||||
// apply old commits' details
|
// apply old commits' details
|
||||||
if let Some(ref mut records) = journal_overlay.journal.get_mut(&end_era) {
|
if let Some(ref mut records) = journal_overlay.journal.get_mut(&end_era) {
|
||||||
let mut canon_insertions: Vec<(H256, Bytes)> = Vec::new();
|
let mut canon_insertions: Vec<(H256, DBValue)> = Vec::new();
|
||||||
let mut canon_deletions: Vec<H256> = Vec::new();
|
let mut canon_deletions: Vec<H256> = Vec::new();
|
||||||
let mut overlay_deletions: Vec<H256> = Vec::new();
|
let mut overlay_deletions: Vec<H256> = Vec::new();
|
||||||
let mut index = 0usize;
|
let mut index = 0usize;
|
||||||
@ -301,7 +301,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
for h in &journal.insertions {
|
for h in &journal.insertions {
|
||||||
if let Some((d, rc)) = journal_overlay.backing_overlay.raw(&to_short_key(h)) {
|
if let Some((d, rc)) = journal_overlay.backing_overlay.raw(&to_short_key(h)) {
|
||||||
if rc > 0 {
|
if rc > 0 {
|
||||||
canon_insertions.push((h.clone(), d.to_owned())); //TODO: optimize this to avoid data copy
|
canon_insertions.push((h.clone(), d)); //TODO: optimize this to avoid data copy
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -348,13 +348,13 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
match rc {
|
match rc {
|
||||||
0 => {}
|
0 => {}
|
||||||
1 => {
|
1 => {
|
||||||
if try!(self.backing.get(self.column, &key)).is_some() {
|
if cfg!(debug_assertions) && try!(self.backing.get(self.column, &key)).is_some() {
|
||||||
return Err(BaseDataError::AlreadyExists(key).into());
|
return Err(BaseDataError::AlreadyExists(key).into());
|
||||||
}
|
}
|
||||||
batch.put(self.column, &key, &value)
|
batch.put(self.column, &key, &value)
|
||||||
}
|
}
|
||||||
-1 => {
|
-1 => {
|
||||||
if try!(self.backing.get(self.column, &key)).is_none() {
|
if cfg!(debug_assertions) && try!(self.backing.get(self.column, &key)).is_none() {
|
||||||
return Err(BaseDataError::NegativelyReferencedHash(key).into());
|
return Err(BaseDataError::NegativelyReferencedHash(key).into());
|
||||||
}
|
}
|
||||||
batch.delete(self.column, &key)
|
batch.delete(self.column, &key)
|
||||||
@ -386,32 +386,18 @@ impl HashDB for OverlayRecentDB {
|
|||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
let k = self.transaction_overlay.raw(key);
|
let k = self.transaction_overlay.raw(key);
|
||||||
match k {
|
if let Some((d, rc)) = k {
|
||||||
Some((d, rc)) if rc > 0 => Some(d),
|
if rc > 0 { return Some(d) }
|
||||||
_ => {
|
}
|
||||||
let v = {
|
let v = {
|
||||||
let journal_overlay = self.journal_overlay.read();
|
let journal_overlay = self.journal_overlay.read();
|
||||||
let key = to_short_key(key);
|
let key = to_short_key(key);
|
||||||
journal_overlay.backing_overlay.get(&key).map(|v| v.to_vec())
|
journal_overlay.backing_overlay.get(&key)
|
||||||
.or_else(|| journal_overlay.pending_overlay.get(&key).cloned())
|
.or_else(|| journal_overlay.pending_overlay.get(&key).cloned())
|
||||||
};
|
};
|
||||||
match v {
|
v.or_else(|| self.payload(key))
|
||||||
Some(x) => {
|
|
||||||
Some(self.transaction_overlay.denote(key, x).0)
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
if let Some(x) = self.payload(key) {
|
|
||||||
Some(self.transaction_overlay.denote(key, x).0)
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256) -> bool {
|
||||||
@ -421,7 +407,7 @@ impl HashDB for OverlayRecentDB {
|
|||||||
fn insert(&mut self, value: &[u8]) -> H256 {
|
fn insert(&mut self, value: &[u8]) -> H256 {
|
||||||
self.transaction_overlay.insert(value)
|
self.transaction_overlay.insert(value)
|
||||||
}
|
}
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) {
|
fn emplace(&mut self, key: H256, value: DBValue) {
|
||||||
self.transaction_overlay.emplace(key, value);
|
self.transaction_overlay.emplace(key, value);
|
||||||
}
|
}
|
||||||
fn remove(&mut self, key: &H256) {
|
fn remove(&mut self, key: &H256) {
|
||||||
@ -692,7 +678,7 @@ mod tests {
|
|||||||
let mut jdb = new_db(&dir);
|
let mut jdb = new_db(&dir);
|
||||||
// history is 1
|
// history is 1
|
||||||
let foo = jdb.insert(b"foo");
|
let foo = jdb.insert(b"foo");
|
||||||
jdb.emplace(bar.clone(), b"bar".to_vec());
|
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar"));
|
||||||
jdb.commit_batch(0, &b"0".sha3(), None).unwrap();
|
jdb.commit_batch(0, &b"0".sha3(), None).unwrap();
|
||||||
assert!(jdb.can_reconstruct_refs());
|
assert!(jdb.can_reconstruct_refs());
|
||||||
foo
|
foo
|
||||||
@ -965,7 +951,7 @@ mod tests {
|
|||||||
let key = jdb.insert(b"dog");
|
let key = jdb.insert(b"dog");
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
|
||||||
jdb.remove(&key);
|
jdb.remove(&key);
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
@ -83,10 +83,10 @@ impl RefCountedDB {
|
|||||||
|
|
||||||
impl HashDB for RefCountedDB {
|
impl HashDB for RefCountedDB {
|
||||||
fn keys(&self) -> HashMap<H256, i32> { self.forward.keys() }
|
fn keys(&self) -> HashMap<H256, i32> { self.forward.keys() }
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> { self.forward.get(key) }
|
fn get(&self, key: &H256) -> Option<DBValue> { self.forward.get(key) }
|
||||||
fn contains(&self, key: &H256) -> bool { self.forward.contains(key) }
|
fn contains(&self, key: &H256) -> bool { self.forward.contains(key) }
|
||||||
fn insert(&mut self, value: &[u8]) -> H256 { let r = self.forward.insert(value); self.inserts.push(r.clone()); r }
|
fn insert(&mut self, value: &[u8]) -> H256 { let r = self.forward.insert(value); self.inserts.push(r.clone()); r }
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) { self.inserts.push(key.clone()); self.forward.emplace(key, value); }
|
fn emplace(&mut self, key: H256, value: DBValue) { self.inserts.push(key.clone()); self.forward.emplace(key, value); }
|
||||||
fn remove(&mut self, key: &H256) { self.removes.push(key.clone()); }
|
fn remove(&mut self, key: &H256) { self.removes.push(key.clone()); }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -326,7 +326,7 @@ mod tests {
|
|||||||
let key = jdb.insert(b"dog");
|
let key = jdb.insert(b"dog");
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
|
||||||
jdb.remove(&key);
|
jdb.remove(&key);
|
||||||
jdb.inject_batch().unwrap();
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ use common::*;
|
|||||||
use elastic_array::*;
|
use elastic_array::*;
|
||||||
use std::default::Default;
|
use std::default::Default;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use hashdb::DBValue;
|
||||||
use rlp::{UntrustedRlp, RlpType, View, Compressible};
|
use rlp::{UntrustedRlp, RlpType, View, Compressible};
|
||||||
use rocksdb::{DB, Writable, WriteBatch, WriteOptions, IteratorMode, DBIterator,
|
use rocksdb::{DB, Writable, WriteBatch, WriteOptions, IteratorMode, DBIterator,
|
||||||
Options, DBCompactionStyle, BlockBasedOptions, Direction, Cache, Column, ReadOptions};
|
Options, DBCompactionStyle, BlockBasedOptions, Direction, Cache, Column, ReadOptions};
|
||||||
@ -43,12 +44,12 @@ enum DBOp {
|
|||||||
Insert {
|
Insert {
|
||||||
col: Option<u32>,
|
col: Option<u32>,
|
||||||
key: ElasticArray32<u8>,
|
key: ElasticArray32<u8>,
|
||||||
value: Bytes,
|
value: DBValue,
|
||||||
},
|
},
|
||||||
InsertCompressed {
|
InsertCompressed {
|
||||||
col: Option<u32>,
|
col: Option<u32>,
|
||||||
key: ElasticArray32<u8>,
|
key: ElasticArray32<u8>,
|
||||||
value: Bytes,
|
value: DBValue,
|
||||||
},
|
},
|
||||||
Delete {
|
Delete {
|
||||||
col: Option<u32>,
|
col: Option<u32>,
|
||||||
@ -71,7 +72,7 @@ impl DBTransaction {
|
|||||||
self.ops.push(DBOp::Insert {
|
self.ops.push(DBOp::Insert {
|
||||||
col: col,
|
col: col,
|
||||||
key: ekey,
|
key: ekey,
|
||||||
value: value.to_vec(),
|
value: DBValue::from_slice(value),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,7 +83,7 @@ impl DBTransaction {
|
|||||||
self.ops.push(DBOp::Insert {
|
self.ops.push(DBOp::Insert {
|
||||||
col: col,
|
col: col,
|
||||||
key: ekey,
|
key: ekey,
|
||||||
value: value,
|
value: DBValue::from_vec(value),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -94,7 +95,7 @@ impl DBTransaction {
|
|||||||
self.ops.push(DBOp::InsertCompressed {
|
self.ops.push(DBOp::InsertCompressed {
|
||||||
col: col,
|
col: col,
|
||||||
key: ekey,
|
key: ekey,
|
||||||
value: value,
|
value: DBValue::from_vec(value),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -110,8 +111,8 @@ impl DBTransaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
enum KeyState {
|
enum KeyState {
|
||||||
Insert(Bytes),
|
Insert(DBValue),
|
||||||
InsertCompressed(Bytes),
|
InsertCompressed(DBValue),
|
||||||
Delete,
|
Delete,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -345,7 +346,8 @@ impl Database {
|
|||||||
let cfnames: Vec<&str> = cfnames.iter().map(|n| n as &str).collect();
|
let cfnames: Vec<&str> = cfnames.iter().map(|n| n as &str).collect();
|
||||||
match DB::open_cf(&opts, path, &cfnames, &cf_options) {
|
match DB::open_cf(&opts, path, &cfnames, &cf_options) {
|
||||||
Ok(db) => {
|
Ok(db) => {
|
||||||
cfs = cfnames.iter().map(|n| db.cf_handle(n).unwrap()).collect();
|
cfs = cfnames.iter().map(|n| db.cf_handle(n)
|
||||||
|
.expect("rocksdb opens a cf_handle for each cfname; qed")).collect();
|
||||||
assert!(cfs.len() == columns as usize);
|
assert!(cfs.len() == columns as usize);
|
||||||
Ok(db)
|
Ok(db)
|
||||||
}
|
}
|
||||||
@ -353,7 +355,7 @@ impl Database {
|
|||||||
// retry and create CFs
|
// retry and create CFs
|
||||||
match DB::open_cf(&opts, path, &[], &[]) {
|
match DB::open_cf(&opts, path, &[], &[]) {
|
||||||
Ok(mut db) => {
|
Ok(mut db) => {
|
||||||
cfs = cfnames.iter().enumerate().map(|(i, n)| db.create_cf(n, &cf_options[i]).unwrap()).collect();
|
cfs = try!(cfnames.iter().enumerate().map(|(i, n)| db.create_cf(n, &cf_options[i])).collect());
|
||||||
Ok(db)
|
Ok(db)
|
||||||
},
|
},
|
||||||
err @ Err(_) => err,
|
err @ Err(_) => err,
|
||||||
@ -506,7 +508,7 @@ impl Database {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get value by key.
|
/// Get value by key.
|
||||||
pub fn get(&self, col: Option<u32>, key: &[u8]) -> Result<Option<Bytes>, String> {
|
pub fn get(&self, col: Option<u32>, key: &[u8]) -> Result<Option<DBValue>, String> {
|
||||||
match *self.db.read() {
|
match *self.db.read() {
|
||||||
Some(DBAndColumns { ref db, ref cfs }) => {
|
Some(DBAndColumns { ref db, ref cfs }) => {
|
||||||
let overlay = &self.overlay.read()[Self::to_overlay_column(col)];
|
let overlay = &self.overlay.read()[Self::to_overlay_column(col)];
|
||||||
@ -520,8 +522,8 @@ impl Database {
|
|||||||
Some(&KeyState::Delete) => Ok(None),
|
Some(&KeyState::Delete) => Ok(None),
|
||||||
None => {
|
None => {
|
||||||
col.map_or_else(
|
col.map_or_else(
|
||||||
|| db.get_opt(key, &self.read_opts).map(|r| r.map(|v| v.to_vec())),
|
|| db.get_opt(key, &self.read_opts).map(|r| r.map(|v| DBValue::from_slice(&v))),
|
||||||
|c| db.get_cf_opt(cfs[c as usize], key, &self.read_opts).map(|r| r.map(|v| v.to_vec())))
|
|c| db.get_cf_opt(cfs[c as usize], key, &self.read_opts).map(|r| r.map(|v| DBValue::from_slice(&v))))
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -537,7 +539,8 @@ impl Database {
|
|||||||
match *self.db.read() {
|
match *self.db.read() {
|
||||||
Some(DBAndColumns { ref db, ref cfs }) => {
|
Some(DBAndColumns { ref db, ref cfs }) => {
|
||||||
let mut iter = col.map_or_else(|| db.iterator_opt(IteratorMode::From(prefix, Direction::Forward), &self.read_opts),
|
let mut iter = col.map_or_else(|| db.iterator_opt(IteratorMode::From(prefix, Direction::Forward), &self.read_opts),
|
||||||
|c| db.iterator_cf_opt(cfs[c as usize], IteratorMode::From(prefix, Direction::Forward), &self.read_opts).unwrap());
|
|c| db.iterator_cf_opt(cfs[c as usize], IteratorMode::From(prefix, Direction::Forward), &self.read_opts)
|
||||||
|
.expect("iterator params are valid; qed"));
|
||||||
match iter.next() {
|
match iter.next() {
|
||||||
// TODO: use prefix_same_as_start read option (not availabele in C API currently)
|
// TODO: use prefix_same_as_start read option (not availabele in C API currently)
|
||||||
Some((k, v)) => if k[0 .. prefix.len()] == prefix[..] { Some(v) } else { None },
|
Some((k, v)) => if k[0 .. prefix.len()] == prefix[..] { Some(v) } else { None },
|
||||||
@ -554,7 +557,8 @@ impl Database {
|
|||||||
match *self.db.read() {
|
match *self.db.read() {
|
||||||
Some(DBAndColumns { ref db, ref cfs }) => {
|
Some(DBAndColumns { ref db, ref cfs }) => {
|
||||||
col.map_or_else(|| DatabaseIterator { iter: db.iterator_opt(IteratorMode::Start, &self.read_opts) },
|
col.map_or_else(|| DatabaseIterator { iter: db.iterator_opt(IteratorMode::Start, &self.read_opts) },
|
||||||
|c| DatabaseIterator { iter: db.iterator_cf_opt(cfs[c as usize], IteratorMode::Start, &self.read_opts).unwrap() })
|
|c| DatabaseIterator { iter: db.iterator_cf_opt(cfs[c as usize], IteratorMode::Start, &self.read_opts)
|
||||||
|
.expect("iterator params are valid; qed") })
|
||||||
},
|
},
|
||||||
None => panic!("Not supported yet") //TODO: return an empty iterator or change return type
|
None => panic!("Not supported yet") //TODO: return an empty iterator or change return type
|
||||||
}
|
}
|
||||||
|
@ -148,7 +148,7 @@ mod timer;
|
|||||||
pub use common::*;
|
pub use common::*;
|
||||||
pub use misc::*;
|
pub use misc::*;
|
||||||
pub use hashdb::*;
|
pub use hashdb::*;
|
||||||
pub use memorydb::*;
|
pub use memorydb::MemoryDB;
|
||||||
pub use overlaydb::*;
|
pub use overlaydb::*;
|
||||||
pub use journaldb::JournalDB;
|
pub use journaldb::JournalDB;
|
||||||
pub use triehash::*;
|
pub use triehash::*;
|
||||||
|
@ -24,8 +24,6 @@ use hashdb::*;
|
|||||||
use heapsize::*;
|
use heapsize::*;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
const STATIC_NULL_RLP: (&'static [u8], i32) = (&[0x80; 1], 1);
|
|
||||||
use std::collections::hash_map::Entry;
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
/// Reference-counted memory-based `HashDB` implementation.
|
/// Reference-counted memory-based `HashDB` implementation.
|
||||||
@ -73,8 +71,8 @@ use std::collections::hash_map::Entry;
|
|||||||
/// ```
|
/// ```
|
||||||
#[derive(Default, Clone, PartialEq)]
|
#[derive(Default, Clone, PartialEq)]
|
||||||
pub struct MemoryDB {
|
pub struct MemoryDB {
|
||||||
data: H256FastMap<(Bytes, i32)>,
|
data: H256FastMap<(DBValue, i32)>,
|
||||||
aux: HashMap<Bytes, Bytes>,
|
aux: HashMap<Bytes, DBValue>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MemoryDB {
|
impl MemoryDB {
|
||||||
@ -116,12 +114,12 @@ impl MemoryDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return the internal map of hashes to data, clearing the current state.
|
/// Return the internal map of hashes to data, clearing the current state.
|
||||||
pub fn drain(&mut self) -> H256FastMap<(Bytes, i32)> {
|
pub fn drain(&mut self) -> H256FastMap<(DBValue, i32)> {
|
||||||
mem::replace(&mut self.data, H256FastMap::default())
|
mem::replace(&mut self.data, H256FastMap::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the internal map of auxiliary data, clearing the current state.
|
/// Return the internal map of auxiliary data, clearing the current state.
|
||||||
pub fn drain_aux(&mut self) -> HashMap<Bytes, Bytes> {
|
pub fn drain_aux(&mut self) -> HashMap<Bytes, DBValue> {
|
||||||
mem::replace(&mut self.aux, HashMap::new())
|
mem::replace(&mut self.aux, HashMap::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,25 +128,11 @@ impl MemoryDB {
|
|||||||
///
|
///
|
||||||
/// Even when Some is returned, the data is only guaranteed to be useful
|
/// Even when Some is returned, the data is only guaranteed to be useful
|
||||||
/// when the refs > 0.
|
/// when the refs > 0.
|
||||||
pub fn raw(&self, key: &H256) -> Option<(&[u8], i32)> {
|
pub fn raw(&self, key: &H256) -> Option<(DBValue, i32)> {
|
||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return Some(STATIC_NULL_RLP.clone());
|
return Some((DBValue::from_slice(&NULL_RLP_STATIC), 1));
|
||||||
}
|
}
|
||||||
self.data.get(key).map(|&(ref val, rc)| (&val[..], rc))
|
self.data.get(key).cloned()
|
||||||
}
|
|
||||||
|
|
||||||
/// Denote than an existing value has the given key. Used when a key gets removed without
|
|
||||||
/// a prior insert and thus has a negative reference with no value.
|
|
||||||
///
|
|
||||||
/// May safely be called even if the key's value is known, in which case it will be a no-op.
|
|
||||||
pub fn denote(&self, key: &H256, value: Bytes) -> (&[u8], i32) {
|
|
||||||
if self.raw(key) == None {
|
|
||||||
unsafe {
|
|
||||||
let p = &self.data as *const H256FastMap<(Bytes, i32)> as *mut H256FastMap<(Bytes, i32)>;
|
|
||||||
(*p).insert(key.clone(), (value, 0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.raw(key).unwrap()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the size of allocated heap memory
|
/// Returns the size of allocated heap memory
|
||||||
@ -170,7 +154,7 @@ impl MemoryDB {
|
|||||||
entry.get_mut().1 -= 1;
|
entry.get_mut().1 -= 1;
|
||||||
},
|
},
|
||||||
Entry::Vacant(entry) => {
|
Entry::Vacant(entry) => {
|
||||||
entry.insert((Bytes::new(), -1));
|
entry.insert((DBValue::new(), -1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -197,13 +181,13 @@ impl MemoryDB {
|
|||||||
static NULL_RLP_STATIC: [u8; 1] = [0x80; 1];
|
static NULL_RLP_STATIC: [u8; 1] = [0x80; 1];
|
||||||
|
|
||||||
impl HashDB for MemoryDB {
|
impl HashDB for MemoryDB {
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return Some(&NULL_RLP_STATIC);
|
return Some(DBValue::from_slice(&NULL_RLP_STATIC));
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.data.get(key) {
|
match self.data.get(key) {
|
||||||
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
Some(&(ref d, rc)) if rc > 0 => Some(d.clone()),
|
||||||
_ => None
|
_ => None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -230,20 +214,20 @@ impl HashDB for MemoryDB {
|
|||||||
let key = value.sha3();
|
let key = value.sha3();
|
||||||
if match self.data.get_mut(&key) {
|
if match self.data.get_mut(&key) {
|
||||||
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => {
|
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => {
|
||||||
*old_value = value.into();
|
*old_value = DBValue::from_slice(value);
|
||||||
*rc += 1;
|
*rc += 1;
|
||||||
false
|
false
|
||||||
},
|
},
|
||||||
Some(&mut (_, ref mut x)) => { *x += 1; false } ,
|
Some(&mut (_, ref mut x)) => { *x += 1; false } ,
|
||||||
None => true,
|
None => true,
|
||||||
}{ // ... None falls through into...
|
}{ // ... None falls through into...
|
||||||
self.data.insert(key.clone(), (value.into(), 1));
|
self.data.insert(key.clone(), (DBValue::from_slice(value), 1));
|
||||||
}
|
}
|
||||||
key
|
key
|
||||||
}
|
}
|
||||||
|
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) {
|
fn emplace(&mut self, key: H256, value: DBValue) {
|
||||||
if value == &NULL_RLP {
|
if &*value == &NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -269,15 +253,15 @@ impl HashDB for MemoryDB {
|
|||||||
Some(&mut (_, ref mut x)) => { *x -= 1; false }
|
Some(&mut (_, ref mut x)) => { *x -= 1; false }
|
||||||
None => true
|
None => true
|
||||||
}{ // ... None falls through into...
|
}{ // ... None falls through into...
|
||||||
self.data.insert(key.clone(), (Bytes::new(), -1));
|
self.data.insert(key.clone(), (DBValue::new(), -1));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_aux(&mut self, hash: Vec<u8>, value: Vec<u8>) {
|
fn insert_aux(&mut self, hash: Vec<u8>, value: Vec<u8>) {
|
||||||
self.aux.insert(hash, value);
|
self.aux.insert(hash, DBValue::from_vec(value));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_aux(&self, hash: &[u8]) -> Option<Vec<u8>> {
|
fn get_aux(&self, hash: &[u8]) -> Option<DBValue> {
|
||||||
self.aux.get(hash).cloned()
|
self.aux.get(hash).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -286,24 +270,6 @@ impl HashDB for MemoryDB {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn memorydb_denote() {
|
|
||||||
let mut m = MemoryDB::new();
|
|
||||||
let hello_bytes = b"Hello world!";
|
|
||||||
let hash = m.insert(hello_bytes);
|
|
||||||
assert_eq!(m.get(&hash).unwrap(), b"Hello world!");
|
|
||||||
|
|
||||||
for _ in 0..1000 {
|
|
||||||
let r = H256::random();
|
|
||||||
let k = r.sha3();
|
|
||||||
let (v, rc) = m.denote(&k, r.to_vec());
|
|
||||||
assert_eq!(v, &*r);
|
|
||||||
assert_eq!(rc, 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert_eq!(m.get(&hash).unwrap(), b"Hello world!");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn memorydb_remove_and_purge() {
|
fn memorydb_remove_and_purge() {
|
||||||
let hello_bytes = b"Hello world!";
|
let hello_bytes = b"Hello world!";
|
||||||
@ -337,12 +303,12 @@ fn consolidate() {
|
|||||||
main.remove(&remove_key);
|
main.remove(&remove_key);
|
||||||
|
|
||||||
let insert_key = other.insert(b"arf");
|
let insert_key = other.insert(b"arf");
|
||||||
main.emplace(insert_key, b"arf".to_vec());
|
main.emplace(insert_key, DBValue::from_slice(b"arf"));
|
||||||
|
|
||||||
main.consolidate(other);
|
main.consolidate(other);
|
||||||
|
|
||||||
let overlay = main.drain();
|
let overlay = main.drain();
|
||||||
|
|
||||||
assert_eq!(overlay.get(&remove_key).unwrap(), &(b"doggo".to_vec(), 0));
|
assert_eq!(overlay.get(&remove_key).unwrap(), &(DBValue::from_slice(b"doggo"), 0));
|
||||||
assert_eq!(overlay.get(&insert_key).unwrap(), &(b"arf".to_vec(), 2));
|
assert_eq!(overlay.get(&insert_key).unwrap(), &(DBValue::from_slice(b"arf"), 2));
|
||||||
}
|
}
|
@ -17,7 +17,7 @@
|
|||||||
//! Nibble-orientated view onto byte-slice, allowing nibble-precision offsets.
|
//! Nibble-orientated view onto byte-slice, allowing nibble-precision offsets.
|
||||||
use std::cmp::*;
|
use std::cmp::*;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use bytes::*;
|
use elastic_array::ElasticArray36;
|
||||||
|
|
||||||
/// Nibble-orientated view onto byte-slice, allowing nibble-precision offsets.
|
/// Nibble-orientated view onto byte-slice, allowing nibble-precision offsets.
|
||||||
///
|
///
|
||||||
@ -149,9 +149,9 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Encode while nibble slice in prefixed hex notation, noting whether it `is_leaf`.
|
/// Encode while nibble slice in prefixed hex notation, noting whether it `is_leaf`.
|
||||||
pub fn encoded(&self, is_leaf: bool) -> Bytes {
|
pub fn encoded(&self, is_leaf: bool) -> ElasticArray36<u8> {
|
||||||
let l = self.len();
|
let l = self.len();
|
||||||
let mut r = Bytes::with_capacity(l / 2 + 1);
|
let mut r = ElasticArray36::new();
|
||||||
let mut i = l % 2;
|
let mut i = l % 2;
|
||||||
r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0});
|
r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0});
|
||||||
while i < l {
|
while i < l {
|
||||||
@ -163,9 +163,9 @@ impl<'a, 'view> NibbleSlice<'a> where 'a: 'view {
|
|||||||
|
|
||||||
/// Encode only the leftmost `n` bytes of the nibble slice in prefixed hex notation,
|
/// Encode only the leftmost `n` bytes of the nibble slice in prefixed hex notation,
|
||||||
/// noting whether it `is_leaf`.
|
/// noting whether it `is_leaf`.
|
||||||
pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> Bytes {
|
pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> ElasticArray36<u8> {
|
||||||
let l = min(self.len(), n);
|
let l = min(self.len(), n);
|
||||||
let mut r = Bytes::with_capacity(l / 2 + 1);
|
let mut r = ElasticArray36::new();
|
||||||
let mut i = l % 2;
|
let mut i = l % 2;
|
||||||
r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0});
|
r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0});
|
||||||
while i < l {
|
while i < l {
|
||||||
@ -212,6 +212,7 @@ impl<'a> fmt::Debug for NibbleSlice<'a> {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::NibbleSlice;
|
use super::NibbleSlice;
|
||||||
|
use elastic_array::ElasticArray36;
|
||||||
static D: &'static [u8;3] = &[0x01u8, 0x23, 0x45];
|
static D: &'static [u8;3] = &[0x01u8, 0x23, 0x45];
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -254,10 +255,10 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn encoded() {
|
fn encoded() {
|
||||||
let n = NibbleSlice::new(D);
|
let n = NibbleSlice::new(D);
|
||||||
assert_eq!(n.encoded(false), &[0x00, 0x01, 0x23, 0x45]);
|
assert_eq!(n.encoded(false), ElasticArray36::from_slice(&[0x00, 0x01, 0x23, 0x45]));
|
||||||
assert_eq!(n.encoded(true), &[0x20, 0x01, 0x23, 0x45]);
|
assert_eq!(n.encoded(true), ElasticArray36::from_slice(&[0x20, 0x01, 0x23, 0x45]));
|
||||||
assert_eq!(n.mid(1).encoded(false), &[0x11, 0x23, 0x45]);
|
assert_eq!(n.mid(1).encoded(false), ElasticArray36::from_slice(&[0x11, 0x23, 0x45]));
|
||||||
assert_eq!(n.mid(1).encoded(true), &[0x31, 0x23, 0x45]);
|
assert_eq!(n.mid(1).encoded(true), ElasticArray36::from_slice(&[0x31, 0x23, 0x45]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
|
|
||||||
use error::*;
|
use error::*;
|
||||||
use hash::*;
|
use hash::*;
|
||||||
use bytes::*;
|
|
||||||
use rlp::*;
|
use rlp::*;
|
||||||
use hashdb::*;
|
use hashdb::*;
|
||||||
use memorydb::*;
|
use memorydb::*;
|
||||||
@ -101,21 +100,21 @@ impl OverlayDB {
|
|||||||
pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key).map_or(0, |(_, refs)| refs) }
|
pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key).map_or(0, |(_, refs)| refs) }
|
||||||
|
|
||||||
/// Get the refs and value of the given key.
|
/// Get the refs and value of the given key.
|
||||||
fn payload(&self, key: &H256) -> Option<(Bytes, u32)> {
|
fn payload(&self, key: &H256) -> Option<(DBValue, u32)> {
|
||||||
self.backing.get(self.column, key)
|
self.backing.get(self.column, key)
|
||||||
.expect("Low-level database error. Some issue with your hard disk?")
|
.expect("Low-level database error. Some issue with your hard disk?")
|
||||||
.map(|d| {
|
.map(|d| {
|
||||||
let r = Rlp::new(&d);
|
let r = Rlp::new(&d);
|
||||||
(r.at(1).as_val(), r.at(0).as_val())
|
(DBValue::from_slice(r.at(1).data()), r.at(0).as_val())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Put the refs and value of the given key, possibly deleting it from the db.
|
/// Put the refs and value of the given key, possibly deleting it from the db.
|
||||||
fn put_payload_in_batch(&self, batch: &mut DBTransaction, key: &H256, payload: (Bytes, u32)) -> bool {
|
fn put_payload_in_batch(&self, batch: &mut DBTransaction, key: &H256, payload: (DBValue, u32)) -> bool {
|
||||||
if payload.1 > 0 {
|
if payload.1 > 0 {
|
||||||
let mut s = RlpStream::new_list(2);
|
let mut s = RlpStream::new_list(2);
|
||||||
s.append(&payload.1);
|
s.append(&payload.1);
|
||||||
s.append(&payload.0);
|
s.append(&&*payload.0);
|
||||||
batch.put(self.column, key, s.as_raw());
|
batch.put(self.column, key, s.as_raw());
|
||||||
false
|
false
|
||||||
} else {
|
} else {
|
||||||
@ -140,19 +139,23 @@ impl HashDB for OverlayDB {
|
|||||||
}
|
}
|
||||||
ret
|
ret
|
||||||
}
|
}
|
||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||||
// return ok if positive; if negative, check backing - might be enough references there to make
|
// return ok if positive; if negative, check backing - might be enough references there to make
|
||||||
// it positive again.
|
// it positive again.
|
||||||
let k = self.overlay.raw(key);
|
let k = self.overlay.raw(key);
|
||||||
match k {
|
let memrc = {
|
||||||
Some((d, rc)) if rc > 0 => Some(d),
|
if let Some((d, rc)) = k {
|
||||||
_ => {
|
if rc > 0 { return Some(d); }
|
||||||
let memrc = k.map_or(0, |(_, rc)| rc);
|
rc
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
}
|
||||||
|
};
|
||||||
match self.payload(key) {
|
match self.payload(key) {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
let (d, rc) = x;
|
let (d, rc) = x;
|
||||||
if rc as i32 + memrc > 0 {
|
if rc as i32 + memrc > 0 {
|
||||||
Some(self.overlay.denote(key, d).0)
|
Some(d)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
None
|
None
|
||||||
@ -163,8 +166,6 @@ impl HashDB for OverlayDB {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
|
||||||
fn contains(&self, key: &H256) -> bool {
|
fn contains(&self, key: &H256) -> bool {
|
||||||
// return ok if positive; if negative, check backing - might be enough references there to make
|
// return ok if positive; if negative, check backing - might be enough references there to make
|
||||||
// it positive again.
|
// it positive again.
|
||||||
@ -186,7 +187,7 @@ impl HashDB for OverlayDB {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn insert(&mut self, value: &[u8]) -> H256 { self.overlay.insert(value) }
|
fn insert(&mut self, value: &[u8]) -> H256 { self.overlay.insert(value) }
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) { self.overlay.emplace(key, value); }
|
fn emplace(&mut self, key: H256, value: DBValue) { self.overlay.emplace(key, value); }
|
||||||
fn remove(&mut self, key: &H256) { self.overlay.remove(key); }
|
fn remove(&mut self, key: &H256) { self.overlay.remove(key); }
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -211,7 +212,7 @@ fn overlaydb_revert() {
|
|||||||
fn overlaydb_overlay_insert_and_remove() {
|
fn overlaydb_overlay_insert_and_remove() {
|
||||||
let mut trie = OverlayDB::new_temp();
|
let mut trie = OverlayDB::new_temp();
|
||||||
let h = trie.insert(b"hello world");
|
let h = trie.insert(b"hello world");
|
||||||
assert_eq!(trie.get(&h).unwrap(), b"hello world");
|
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
|
||||||
trie.remove(&h);
|
trie.remove(&h);
|
||||||
assert_eq!(trie.get(&h), None);
|
assert_eq!(trie.get(&h), None);
|
||||||
}
|
}
|
||||||
@ -220,11 +221,11 @@ fn overlaydb_overlay_insert_and_remove() {
|
|||||||
fn overlaydb_backing_insert_revert() {
|
fn overlaydb_backing_insert_revert() {
|
||||||
let mut trie = OverlayDB::new_temp();
|
let mut trie = OverlayDB::new_temp();
|
||||||
let h = trie.insert(b"hello world");
|
let h = trie.insert(b"hello world");
|
||||||
assert_eq!(trie.get(&h).unwrap(), b"hello world");
|
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
|
||||||
trie.commit().unwrap();
|
trie.commit().unwrap();
|
||||||
assert_eq!(trie.get(&h).unwrap(), b"hello world");
|
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
|
||||||
trie.revert();
|
trie.revert();
|
||||||
assert_eq!(trie.get(&h).unwrap(), b"hello world");
|
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -248,7 +249,7 @@ fn overlaydb_backing_remove_revert() {
|
|||||||
trie.remove(&h);
|
trie.remove(&h);
|
||||||
assert_eq!(trie.get(&h), None);
|
assert_eq!(trie.get(&h), None);
|
||||||
trie.revert();
|
trie.revert();
|
||||||
assert_eq!(trie.get(&h).unwrap(), b"hello world");
|
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -266,29 +267,29 @@ fn overlaydb_negative() {
|
|||||||
fn overlaydb_complex() {
|
fn overlaydb_complex() {
|
||||||
let mut trie = OverlayDB::new_temp();
|
let mut trie = OverlayDB::new_temp();
|
||||||
let hfoo = trie.insert(b"foo");
|
let hfoo = trie.insert(b"foo");
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
let hbar = trie.insert(b"bar");
|
let hbar = trie.insert(b"bar");
|
||||||
assert_eq!(trie.get(&hbar).unwrap(), b"bar");
|
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar"));
|
||||||
trie.commit().unwrap();
|
trie.commit().unwrap();
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
assert_eq!(trie.get(&hbar).unwrap(), b"bar");
|
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar"));
|
||||||
trie.insert(b"foo"); // two refs
|
trie.insert(b"foo"); // two refs
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
trie.commit().unwrap();
|
trie.commit().unwrap();
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
assert_eq!(trie.get(&hbar).unwrap(), b"bar");
|
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar"));
|
||||||
trie.remove(&hbar); // zero refs - delete
|
trie.remove(&hbar); // zero refs - delete
|
||||||
assert_eq!(trie.get(&hbar), None);
|
assert_eq!(trie.get(&hbar), None);
|
||||||
trie.remove(&hfoo); // one ref - keep
|
trie.remove(&hfoo); // one ref - keep
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
trie.commit().unwrap();
|
trie.commit().unwrap();
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
trie.remove(&hfoo); // zero ref - would delete, but...
|
trie.remove(&hfoo); // zero ref - would delete, but...
|
||||||
assert_eq!(trie.get(&hfoo), None);
|
assert_eq!(trie.get(&hfoo), None);
|
||||||
trie.insert(b"foo"); // one ref - keep after all.
|
trie.insert(b"foo"); // one ref - keep after all.
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
trie.commit().unwrap();
|
trie.commit().unwrap();
|
||||||
assert_eq!(trie.get(&hfoo).unwrap(), b"foo");
|
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
|
||||||
trie.remove(&hfoo); // zero ref - delete
|
trie.remove(&hfoo); // zero ref - delete
|
||||||
assert_eq!(trie.get(&hfoo), None);
|
assert_eq!(trie.get(&hfoo), None);
|
||||||
trie.commit().unwrap(); //
|
trie.commit().unwrap(); //
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
use hash::H256;
|
use hash::H256;
|
||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::{HashDB, DBValue};
|
||||||
use super::{TrieDB, Trie, TrieDBIterator, TrieItem, Recorder};
|
use super::{TrieDB, Trie, TrieDBIterator, TrieItem, Recorder};
|
||||||
|
|
||||||
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||||
@ -58,7 +58,7 @@ impl<'db> Trie for FatDB<'db> {
|
|||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<&'a [u8]>>
|
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<DBValue>>
|
||||||
where 'a: 'b, R: Recorder
|
where 'a: 'b, R: Recorder
|
||||||
{
|
{
|
||||||
self.raw.get_recorded(&key.sha3(), rec)
|
self.raw.get_recorded(&key.sha3(), rec)
|
||||||
@ -88,7 +88,7 @@ impl<'db> Iterator for FatDBIterator<'db> {
|
|||||||
self.trie_iterator.next()
|
self.trie_iterator.next()
|
||||||
.map(|res|
|
.map(|res|
|
||||||
res.map(|(hash, value)| {
|
res.map(|(hash, value)| {
|
||||||
(self.trie.db().get_aux(&hash).expect("Missing fatdb hash"), value)
|
(self.trie.db().get_aux(&hash).expect("Missing fatdb hash").to_vec(), value)
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -106,6 +106,6 @@ fn fatdb_to_trie() {
|
|||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = FatDB::new(&memdb, &root).unwrap();
|
let t = FatDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||||
assert_eq!(t.iter().unwrap().map(Result::unwrap).collect::<Vec<_>>(), vec![(vec![0x01u8, 0x23], &[0x01u8, 0x23] as &[u8])]);
|
assert_eq!(t.iter().unwrap().map(Result::unwrap).collect::<Vec<_>>(), vec![(vec![0x01u8, 0x23], DBValue::from_slice(&[0x01u8, 0x23] as &[u8]))]);
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
use hash::H256;
|
use hash::H256;
|
||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::{HashDB, DBValue};
|
||||||
use super::{TrieDBMut, TrieMut};
|
use super::{TrieDBMut, TrieMut};
|
||||||
|
|
||||||
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||||
@ -66,7 +66,7 @@ impl<'db> TrieMut for FatDBMut<'db> {
|
|||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<&'a [u8]>>
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<DBValue>>
|
||||||
where 'a: 'key
|
where 'a: 'key
|
||||||
{
|
{
|
||||||
self.raw.get(&key.sha3())
|
self.raw.get(&key.sha3())
|
||||||
@ -98,5 +98,5 @@ fn fatdb_to_trie() {
|
|||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap().unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@ use hashdb::*;
|
|||||||
/// Type of operation for the backing database - either a new node or a node deletion.
|
/// Type of operation for the backing database - either a new node or a node deletion.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum Operation {
|
enum Operation {
|
||||||
New(H256, Bytes),
|
New(H256, DBValue),
|
||||||
Delete(H256),
|
Delete(H256),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -52,16 +52,16 @@ impl Journal {
|
|||||||
|
|
||||||
/// Given the RLP that encodes a node, append a reference to that node `out` and leave `journal`
|
/// Given the RLP that encodes a node, append a reference to that node `out` and leave `journal`
|
||||||
/// such that the reference is valid, once applied.
|
/// such that the reference is valid, once applied.
|
||||||
pub fn new_node(&mut self, rlp: Bytes, out: &mut RlpStream) {
|
pub fn new_node(&mut self, rlp: DBValue, out: &mut RlpStream) {
|
||||||
if rlp.len() >= 32 {
|
if rlp.len() >= 32 {
|
||||||
let rlp_sha3 = rlp.sha3();
|
let rlp_sha3 = rlp.sha3();
|
||||||
|
|
||||||
trace!("new_node: reference node {:?} => {:?}", rlp_sha3, rlp.pretty());
|
trace!("new_node: reference node {:?} => {:?}", rlp_sha3, &*rlp);
|
||||||
out.append(&rlp_sha3);
|
out.append(&rlp_sha3);
|
||||||
self.0.push(Operation::New(rlp_sha3, rlp));
|
self.0.push(Operation::New(rlp_sha3, rlp));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
trace!("new_node: inline node {:?}", rlp.pretty());
|
trace!("new_node: inline node {:?}", &*rlp);
|
||||||
out.append_raw(&rlp, 1);
|
out.append_raw(&rlp, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@
|
|||||||
|
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use hash::H256;
|
use hash::H256;
|
||||||
use hashdb::HashDB;
|
use hashdb::{HashDB, DBValue};
|
||||||
|
|
||||||
/// Export the standardmap module.
|
/// Export the standardmap module.
|
||||||
pub mod standardmap;
|
pub mod standardmap;
|
||||||
@ -76,7 +76,7 @@ impl fmt::Display for TrieError {
|
|||||||
pub type Result<T> = ::std::result::Result<T, Box<TrieError>>;
|
pub type Result<T> = ::std::result::Result<T, Box<TrieError>>;
|
||||||
|
|
||||||
/// Trie-Item type.
|
/// Trie-Item type.
|
||||||
pub type TrieItem<'a> = Result<(Vec<u8>, &'a [u8])>;
|
pub type TrieItem<'a> = Result<(Vec<u8>, DBValue)>;
|
||||||
|
|
||||||
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
||||||
pub trait Trie {
|
pub trait Trie {
|
||||||
@ -92,13 +92,13 @@ pub trait Trie {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// What is the value of the given key in this trie?
|
/// What is the value of the given key in this trie?
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<&'a [u8]>> where 'a: 'key {
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>> where 'a: 'key {
|
||||||
self.get_recorded(key, &mut recorder::NoOp)
|
self.get_recorded(key, &mut recorder::NoOp)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Query the value of the given key in this trie while recording visited nodes
|
/// Query the value of the given key in this trie while recording visited nodes
|
||||||
/// to the given recorder. If the query fails, the nodes passed to the recorder are unspecified.
|
/// to the given recorder. If the query fails, the nodes passed to the recorder are unspecified.
|
||||||
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> Result<Option<&'a [u8]>>
|
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> Result<Option<DBValue>>
|
||||||
where 'a: 'b, R: Recorder;
|
where 'a: 'b, R: Recorder;
|
||||||
|
|
||||||
/// Returns an iterator over elements of trie.
|
/// Returns an iterator over elements of trie.
|
||||||
@ -119,7 +119,7 @@ pub trait TrieMut {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// What is the value of the given key in this trie?
|
/// What is the value of the given key in this trie?
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<&'a [u8]>> where 'a: 'key;
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>> where 'a: 'key;
|
||||||
|
|
||||||
/// Insert a `key`/`value` pair into the trie. An `empty` value is equivalent to removing
|
/// Insert a `key`/`value` pair into the trie. An `empty` value is equivalent to removing
|
||||||
/// `key` from the trie.
|
/// `key` from the trie.
|
||||||
@ -188,7 +188,7 @@ impl<'db> Trie for TrieKinds<'db> {
|
|||||||
wrapper!(self, contains, key)
|
wrapper!(self, contains, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], r: &'b mut R) -> Result<Option<&'a [u8]>>
|
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], r: &'b mut R) -> Result<Option<DBValue>>
|
||||||
where 'a: 'b, R: Recorder {
|
where 'a: 'b, R: Recorder {
|
||||||
wrapper!(self, get_recorded, key, r)
|
wrapper!(self, get_recorded, key, r)
|
||||||
}
|
}
|
||||||
|
@ -14,27 +14,51 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
use elastic_array::ElasticArray36;
|
||||||
use nibbleslice::*;
|
use nibbleslice::*;
|
||||||
use bytes::*;
|
use bytes::*;
|
||||||
use rlp::*;
|
use rlp::*;
|
||||||
use super::journal::*;
|
use super::journal::*;
|
||||||
|
use hashdb::DBValue;
|
||||||
|
|
||||||
|
/// Partial node key type.
|
||||||
|
pub type NodeKey = ElasticArray36<u8>;
|
||||||
|
|
||||||
/// Type of node in the trie and essential information thereof.
|
/// Type of node in the trie and essential information thereof.
|
||||||
#[derive(Clone, Eq, PartialEq, Debug)]
|
#[derive(Eq, PartialEq, Debug)]
|
||||||
pub enum Node<'a> {
|
pub enum Node {
|
||||||
/// Null trie node; could be an empty root or an empty branch entry.
|
/// Null trie node; could be an empty root or an empty branch entry.
|
||||||
Empty,
|
Empty,
|
||||||
/// Leaf node; has key slice and value. Value may not be empty.
|
/// Leaf node; has key slice and value. Value may not be empty.
|
||||||
Leaf(NibbleSlice<'a>, &'a [u8]),
|
Leaf(NodeKey, DBValue),
|
||||||
/// Extension node; has key slice and node data. Data may not be null.
|
/// Extension node; has key slice and node data. Data may not be null.
|
||||||
Extension(NibbleSlice<'a>, &'a [u8]),
|
Extension(NodeKey, DBValue),
|
||||||
/// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data.
|
/// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data.
|
||||||
Branch([&'a [u8]; 16], Option<&'a [u8]>)
|
Branch([NodeKey; 16], Option<DBValue>)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Node<'a> {
|
impl Clone for Node {
|
||||||
|
fn clone(&self) -> Node {
|
||||||
|
match *self {
|
||||||
|
Node::Empty => Node::Empty,
|
||||||
|
Node::Leaf(ref k, ref v) => Node::Leaf(k.clone(), v.clone()),
|
||||||
|
Node::Extension(ref k, ref v) => Node::Extension(k.clone(), v.clone()),
|
||||||
|
Node::Branch(ref k, ref v) => {
|
||||||
|
let mut branch = [NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
|
||||||
|
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
|
||||||
|
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new()];
|
||||||
|
for i in 0 .. 16 {
|
||||||
|
branch[i] = k[i].clone();
|
||||||
|
}
|
||||||
|
Node::Branch(branch, v.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Node {
|
||||||
/// Decode the `node_rlp` and return the Node.
|
/// Decode the `node_rlp` and return the Node.
|
||||||
pub fn decoded(node_rlp: &'a [u8]) -> Node<'a> {
|
pub fn decoded(node_rlp: &[u8]) -> Node {
|
||||||
let r = Rlp::new(node_rlp);
|
let r = Rlp::new(node_rlp);
|
||||||
match r.prototype() {
|
match r.prototype() {
|
||||||
// either leaf or extension - decode first item with NibbleSlice::???
|
// either leaf or extension - decode first item with NibbleSlice::???
|
||||||
@ -43,16 +67,18 @@ impl<'a> Node<'a> {
|
|||||||
// if extension, second item is a node (either SHA3 to be looked up and
|
// if extension, second item is a node (either SHA3 to be looked up and
|
||||||
// fed back into this function or inline RLP which can be fed back into this function).
|
// fed back into this function or inline RLP which can be fed back into this function).
|
||||||
Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0).data()) {
|
Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0).data()) {
|
||||||
(slice, true) => Node::Leaf(slice, r.at(1).data()),
|
(slice, true) => Node::Leaf(slice.encoded(true), DBValue::from_slice(r.at(1).data())),
|
||||||
(slice, false) => Node::Extension(slice, r.at(1).as_raw()),
|
(slice, false) => Node::Extension(slice.encoded(false), DBValue::from_slice(r.at(1).as_raw())),
|
||||||
},
|
},
|
||||||
// branch - first 16 are nodes, 17th is a value (or empty).
|
// branch - first 16 are nodes, 17th is a value (or empty).
|
||||||
Prototype::List(17) => {
|
Prototype::List(17) => {
|
||||||
let mut nodes: [&'a [u8]; 16] = [&[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[]];
|
let mut nodes: [NodeKey; 16] = [NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
|
||||||
|
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(),
|
||||||
|
NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new(), NodeKey::new()];
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
nodes[i] = r.at(i).as_raw();
|
nodes[i] = NodeKey::from_slice(r.at(i).as_raw());
|
||||||
}
|
}
|
||||||
Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(r.at(16).data()) })
|
Node::Branch(nodes, if r.at(16).is_empty() { None } else { Some(DBValue::from_slice(r.at(16).data())) })
|
||||||
},
|
},
|
||||||
// an empty branch index.
|
// an empty branch index.
|
||||||
Prototype::Data(0) => Node::Empty,
|
Prototype::Data(0) => Node::Empty,
|
||||||
@ -69,23 +95,23 @@ impl<'a> Node<'a> {
|
|||||||
match *self {
|
match *self {
|
||||||
Node::Leaf(ref slice, ref value) => {
|
Node::Leaf(ref slice, ref value) => {
|
||||||
let mut stream = RlpStream::new_list(2);
|
let mut stream = RlpStream::new_list(2);
|
||||||
stream.append(&slice.encoded(true));
|
stream.append(&&**slice);
|
||||||
stream.append(value);
|
stream.append(&&**value);
|
||||||
stream.out()
|
stream.out()
|
||||||
},
|
},
|
||||||
Node::Extension(ref slice, raw_rlp) => {
|
Node::Extension(ref slice, ref raw_rlp) => {
|
||||||
let mut stream = RlpStream::new_list(2);
|
let mut stream = RlpStream::new_list(2);
|
||||||
stream.append(&slice.encoded(false));
|
stream.append(&&**slice);
|
||||||
stream.append_raw(raw_rlp, 1);
|
stream.append_raw(&&*raw_rlp, 1);
|
||||||
stream.out()
|
stream.out()
|
||||||
},
|
},
|
||||||
Node::Branch(ref nodes, ref value) => {
|
Node::Branch(ref nodes, ref value) => {
|
||||||
let mut stream = RlpStream::new_list(17);
|
let mut stream = RlpStream::new_list(17);
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
stream.append_raw(nodes[i], 1);
|
stream.append_raw(&*nodes[i], 1);
|
||||||
}
|
}
|
||||||
match *value {
|
match *value {
|
||||||
Some(n) => { stream.append(&n); },
|
Some(ref n) => { stream.append(&&**n); },
|
||||||
None => { stream.append_empty_data(); },
|
None => { stream.append_empty_data(); },
|
||||||
}
|
}
|
||||||
stream.out()
|
stream.out()
|
||||||
@ -100,26 +126,26 @@ impl<'a> Node<'a> {
|
|||||||
|
|
||||||
/// Encode the node, adding it to `journal` if necessary and return the RLP valid for
|
/// Encode the node, adding it to `journal` if necessary and return the RLP valid for
|
||||||
/// insertion into a parent node.
|
/// insertion into a parent node.
|
||||||
pub fn encoded_and_added(&self, journal: &mut Journal) -> Bytes {
|
pub fn encoded_and_added(&self, journal: &mut Journal) -> DBValue {
|
||||||
let mut stream = RlpStream::new();
|
let mut stream = RlpStream::new();
|
||||||
match *self {
|
match *self {
|
||||||
Node::Leaf(ref slice, ref value) => {
|
Node::Leaf(ref slice, ref value) => {
|
||||||
stream.begin_list(2);
|
stream.begin_list(2);
|
||||||
stream.append(&slice.encoded(true));
|
stream.append(&&**slice);
|
||||||
stream.append(value);
|
stream.append(&&**value);
|
||||||
},
|
},
|
||||||
Node::Extension(ref slice, raw_rlp) => {
|
Node::Extension(ref slice, ref raw_rlp) => {
|
||||||
stream.begin_list(2);
|
stream.begin_list(2);
|
||||||
stream.append(&slice.encoded(false));
|
stream.append(&&**slice);
|
||||||
stream.append_raw(raw_rlp, 1);
|
stream.append_raw(&&**raw_rlp, 1);
|
||||||
},
|
},
|
||||||
Node::Branch(ref nodes, ref value) => {
|
Node::Branch(ref nodes, ref value) => {
|
||||||
stream.begin_list(17);
|
stream.begin_list(17);
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
stream.append_raw(nodes[i], 1);
|
stream.append_raw(&*nodes[i], 1);
|
||||||
}
|
}
|
||||||
match *value {
|
match *value {
|
||||||
Some(n) => { stream.append(&n); },
|
Some(ref n) => { stream.append(&&**n); },
|
||||||
None => { stream.append_empty_data(); },
|
None => { stream.append_empty_data(); },
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -127,13 +153,13 @@ impl<'a> Node<'a> {
|
|||||||
stream.append_empty_data();
|
stream.append_empty_data();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let node = stream.out();
|
let node = DBValue::from_slice(stream.as_raw());
|
||||||
match node.len() {
|
match node.len() {
|
||||||
0 ... 31 => node,
|
0 ... 31 => node,
|
||||||
_ => {
|
_ => {
|
||||||
let mut stream = RlpStream::new();
|
let mut stream = RlpStream::new();
|
||||||
journal.new_node(node, &mut stream);
|
journal.new_node(node, &mut stream);
|
||||||
stream.out()
|
DBValue::from_slice(stream.as_raw())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
use hash::H256;
|
use hash::H256;
|
||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::{HashDB, DBValue};
|
||||||
use super::triedb::TrieDB;
|
use super::triedb::TrieDB;
|
||||||
use super::{Trie, TrieItem, Recorder};
|
use super::{Trie, TrieItem, Recorder};
|
||||||
|
|
||||||
@ -59,7 +59,7 @@ impl<'db> Trie for SecTrieDB<'db> {
|
|||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<&'a [u8]>>
|
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<DBValue>>
|
||||||
where 'a: 'b, R: Recorder
|
where 'a: 'b, R: Recorder
|
||||||
{
|
{
|
||||||
self.raw.get_recorded(&key.sha3(), rec)
|
self.raw.get_recorded(&key.sha3(), rec)
|
||||||
@ -79,5 +79,5 @@ fn trie_to_sectrie() {
|
|||||||
t.insert(&(&[0x01u8, 0x23]).sha3(), &[0x01u8, 0x23]).unwrap();
|
t.insert(&(&[0x01u8, 0x23]).sha3(), &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = SecTrieDB::new(&memdb, &root).unwrap();
|
let t = SecTrieDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,7 @@
|
|||||||
|
|
||||||
use hash::H256;
|
use hash::H256;
|
||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::{HashDB, DBValue};
|
||||||
use super::triedbmut::TrieDBMut;
|
use super::triedbmut::TrieDBMut;
|
||||||
use super::TrieMut;
|
use super::TrieMut;
|
||||||
|
|
||||||
@ -62,7 +62,7 @@ impl<'db> TrieMut for SecTrieDBMut<'db> {
|
|||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<&'a [u8]>>
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<DBValue>>
|
||||||
where 'a: 'key
|
where 'a: 'key
|
||||||
{
|
{
|
||||||
self.raw.get(&key.sha3())
|
self.raw.get(&key.sha3())
|
||||||
@ -90,5 +90,5 @@ fn sectrie_to_trie() {
|
|||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap().unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||||
}
|
}
|
||||||
|
@ -44,7 +44,7 @@ use super::{Trie, TrieItem, TrieError};
|
|||||||
/// TrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap();
|
/// TrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap();
|
||||||
/// let t = TrieDB::new(&memdb, &root).unwrap();
|
/// let t = TrieDB::new(&memdb, &root).unwrap();
|
||||||
/// assert!(t.contains(b"foo").unwrap());
|
/// assert!(t.contains(b"foo").unwrap());
|
||||||
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), b"bar");
|
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar"));
|
||||||
/// assert!(t.db_items_remaining().unwrap().is_empty());
|
/// assert!(t.db_items_remaining().unwrap().is_empty());
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
@ -119,8 +119,8 @@ impl<'db> TrieDB<'db> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match node {
|
match node {
|
||||||
Node::Extension(_, payload) => try!(handle_payload(payload)),
|
Node::Extension(_, ref payload) => try!(handle_payload(payload)),
|
||||||
Node::Branch(payloads, _) => for payload in &payloads { try!(handle_payload(payload)) },
|
Node::Branch(ref payloads, _) => for payload in payloads { try!(handle_payload(payload)) },
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,18 +129,18 @@ impl<'db> TrieDB<'db> {
|
|||||||
|
|
||||||
/// Get the root node's RLP.
|
/// Get the root node's RLP.
|
||||||
fn root_node<R: Recorder>(&self, r: &mut R) -> super::Result<Node> {
|
fn root_node<R: Recorder>(&self, r: &mut R) -> super::Result<Node> {
|
||||||
self.root_data(r).map(Node::decoded)
|
self.root_data(r).map(|d| Node::decoded(&d))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the data of the root node.
|
/// Get the data of the root node.
|
||||||
fn root_data<'a, R: 'a + Recorder>(&self, r: &'a mut R) -> super::Result<&[u8]> {
|
fn root_data<'a, R: 'a + Recorder>(&self, r: &'a mut R) -> super::Result<DBValue> {
|
||||||
self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
|
self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
|
||||||
.map(|node| { r.record(self.root, node, 0); node })
|
.map(|node| { r.record(self.root, &*node, 0); node })
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the root node as a `Node`.
|
/// Get the root node as a `Node`.
|
||||||
fn get_node<'a, R: 'a + Recorder>(&'db self, node: &'db [u8], r: &'a mut R, depth: u32) -> super::Result<Node> {
|
fn get_node<'a, R: 'a + Recorder>(&'db self, node: &'db [u8], r: &'a mut R, depth: u32) -> super::Result<Node> {
|
||||||
self.get_raw_or_lookup(node, r, depth).map(Node::decoded)
|
self.get_raw_or_lookup(node, r, depth).map(|n| Node::decoded(&n))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Indentation helper for `formal_all`.
|
/// Indentation helper for `formal_all`.
|
||||||
@ -155,20 +155,20 @@ impl<'db> TrieDB<'db> {
|
|||||||
fn fmt_all(&self, node: Node, f: &mut fmt::Formatter, deepness: usize) -> fmt::Result {
|
fn fmt_all(&self, node: Node, f: &mut fmt::Formatter, deepness: usize) -> fmt::Result {
|
||||||
match node {
|
match node {
|
||||||
Node::Leaf(slice, value) => try!(writeln!(f, "'{:?}: {:?}.", slice, value.pretty())),
|
Node::Leaf(slice, value) => try!(writeln!(f, "'{:?}: {:?}.", slice, value.pretty())),
|
||||||
Node::Extension(ref slice, item) => {
|
Node::Extension(ref slice, ref item) => {
|
||||||
try!(write!(f, "'{:?} ", slice));
|
try!(write!(f, "'{:?} ", slice));
|
||||||
if let Ok(node) = self.get_node(item, &mut NoOp, 0) {
|
if let Ok(node) = self.get_node(&*item, &mut NoOp, 0) {
|
||||||
try!(self.fmt_all(node, f, deepness));
|
try!(self.fmt_all(node, f, deepness));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Node::Branch(ref nodes, ref value) => {
|
Node::Branch(ref nodes, ref value) => {
|
||||||
try!(writeln!(f, ""));
|
try!(writeln!(f, ""));
|
||||||
if let Some(v) = *value {
|
if let Some(ref v) = *value {
|
||||||
try!(self.fmt_indent(f, deepness + 1));
|
try!(self.fmt_indent(f, deepness + 1));
|
||||||
try!(writeln!(f, "=: {:?}", v.pretty()))
|
try!(writeln!(f, "=: {:?}", v.pretty()))
|
||||||
}
|
}
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
match self.get_node(nodes[i], &mut NoOp, 0) {
|
match self.get_node(&*nodes[i], &mut NoOp, 0) {
|
||||||
Ok(Node::Empty) => {},
|
Ok(Node::Empty) => {},
|
||||||
Ok(n) => {
|
Ok(n) => {
|
||||||
try!(self.fmt_indent(f, deepness + 1));
|
try!(self.fmt_indent(f, deepness + 1));
|
||||||
@ -190,11 +190,11 @@ impl<'db> TrieDB<'db> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
||||||
fn do_lookup<'key, R: 'key>(&'db self, key: &NibbleSlice<'key>, r: &'key mut R) -> super::Result<Option<&'db [u8]>>
|
fn do_lookup<'key, R: 'key>(&'db self, key: &NibbleSlice<'key>, r: &'key mut R) -> super::Result<Option<DBValue>>
|
||||||
where 'db: 'key, R: Recorder
|
where 'db: 'key, R: Recorder
|
||||||
{
|
{
|
||||||
let root_rlp = try!(self.root_data(r));
|
let root_rlp = try!(self.root_data(r));
|
||||||
self.get_from_node(root_rlp, key, r, 1)
|
self.get_from_node(&root_rlp, key, r, 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
||||||
@ -207,18 +207,23 @@ impl<'db> TrieDB<'db> {
|
|||||||
key: &NibbleSlice<'key>,
|
key: &NibbleSlice<'key>,
|
||||||
r: &'key mut R,
|
r: &'key mut R,
|
||||||
d: u32
|
d: u32
|
||||||
) -> super::Result<Option<&'db [u8]>> where 'db: 'key, R: Recorder {
|
) -> super::Result<Option<DBValue>> where 'db: 'key, R: Recorder {
|
||||||
match Node::decoded(node) {
|
match Node::decoded(node) {
|
||||||
Node::Leaf(ref slice, value) if key == slice => Ok(Some(value)),
|
Node::Leaf(ref slice, ref value) if NibbleSlice::from_encoded(slice).0 == *key => Ok(Some(value.clone())),
|
||||||
Node::Extension(ref slice, item) if key.starts_with(slice) => {
|
Node::Extension(ref slice, ref item) => {
|
||||||
let data = try!(self.get_raw_or_lookup(item, r, d));
|
let slice = &NibbleSlice::from_encoded(slice).0;
|
||||||
self.get_from_node(data, &key.mid(slice.len()), r, d + 1)
|
if key.starts_with(slice) {
|
||||||
|
let data = try!(self.get_raw_or_lookup(&*item, r, d));
|
||||||
|
self.get_from_node(&data, &key.mid(slice.len()), r, d + 1)
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Node::Branch(ref nodes, value) => match key.is_empty() {
|
Node::Branch(ref nodes, ref value) => match key.is_empty() {
|
||||||
true => Ok(value),
|
true => Ok(value.clone()),
|
||||||
false => {
|
false => {
|
||||||
let node = try!(self.get_raw_or_lookup(nodes[key.at(0) as usize], r, d));
|
let node = try!(self.get_raw_or_lookup(&*nodes[key.at(0) as usize], r, d));
|
||||||
self.get_from_node(node, &key.mid(1), r, d + 1)
|
self.get_from_node(&node, &key.mid(1), r, d + 1)
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
_ => Ok(None)
|
_ => Ok(None)
|
||||||
@ -228,16 +233,16 @@ impl<'db> TrieDB<'db> {
|
|||||||
/// Given some node-describing data `node`, return the actual node RLP.
|
/// Given some node-describing data `node`, return the actual node RLP.
|
||||||
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
||||||
/// may require a database lookup.
|
/// may require a database lookup.
|
||||||
fn get_raw_or_lookup<R: Recorder>(&'db self, node: &'db [u8], rec: &mut R, d: u32) -> super::Result<&'db [u8]> {
|
fn get_raw_or_lookup<R: Recorder>(&'db self, node: &'db [u8], rec: &mut R, d: u32) -> super::Result<DBValue> {
|
||||||
// check if its sha3 + len
|
// check if its sha3 + len
|
||||||
let r = Rlp::new(node);
|
let r = Rlp::new(node);
|
||||||
match r.is_data() && r.size() == 32 {
|
match r.is_data() && r.size() == 32 {
|
||||||
true => {
|
true => {
|
||||||
let key = r.as_val::<H256>();
|
let key = r.as_val::<H256>();
|
||||||
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
||||||
.map(|raw| { rec.record(&key, raw, d); raw })
|
.map(|raw| { rec.record(&key, &raw, d); raw })
|
||||||
}
|
}
|
||||||
false => Ok(node)
|
false => Ok(DBValue::from_slice(node))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -251,12 +256,12 @@ enum Status {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Eq, PartialEq)]
|
#[derive(Clone, Eq, PartialEq)]
|
||||||
struct Crumb<'a> {
|
struct Crumb {
|
||||||
node: Node<'a>,
|
node: Node,
|
||||||
status: Status,
|
status: Status,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Crumb<'a> {
|
impl Crumb {
|
||||||
/// Move on to next status in the node's sequence.
|
/// Move on to next status in the node's sequence.
|
||||||
fn increment(&mut self) {
|
fn increment(&mut self) {
|
||||||
self.status = match (&self.status, &self.node) {
|
self.status = match (&self.status, &self.node) {
|
||||||
@ -273,7 +278,7 @@ impl<'a> Crumb<'a> {
|
|||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TrieDBIterator<'a> {
|
pub struct TrieDBIterator<'a> {
|
||||||
db: &'a TrieDB<'a>,
|
db: &'a TrieDB<'a>,
|
||||||
trail: Vec<Crumb<'a>>,
|
trail: Vec<Crumb>,
|
||||||
key_nibbles: Bytes,
|
key_nibbles: Bytes,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -286,18 +291,18 @@ impl<'a> TrieDBIterator<'a> {
|
|||||||
key_nibbles: Vec::new(),
|
key_nibbles: Vec::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
try!(db.root_data(&mut NoOp).and_then(|root| r.descend(root)));
|
try!(db.root_data(&mut NoOp).and_then(|root| r.descend(&root)));
|
||||||
Ok(r)
|
Ok(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Descend into a payload.
|
/// Descend into a payload.
|
||||||
fn descend(&mut self, d: &'a [u8]) -> super::Result<()> {
|
fn descend(&mut self, d: &[u8]) -> super::Result<()> {
|
||||||
self.trail.push(Crumb {
|
self.trail.push(Crumb {
|
||||||
status: Status::Entering,
|
status: Status::Entering,
|
||||||
node: try!(self.db.get_node(d, &mut NoOp, 0)),
|
node: try!(self.db.get_node(d, &mut NoOp, 0)),
|
||||||
});
|
});
|
||||||
match self.trail.last().unwrap().node {
|
match self.trail.last().expect("just pushed item; qed").node {
|
||||||
Node::Leaf(n, _) | Node::Extension(n, _) => { self.key_nibbles.extend(n.iter()); },
|
Node::Leaf(ref n, _) | Node::Extension(ref n, _) => { self.key_nibbles.extend(NibbleSlice::from_encoded(n).0.iter()); },
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,7 +330,7 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
|||||||
match n {
|
match n {
|
||||||
Node::Leaf(n, _) | Node::Extension(n, _) => {
|
Node::Leaf(n, _) | Node::Extension(n, _) => {
|
||||||
let l = self.key_nibbles.len();
|
let l = self.key_nibbles.len();
|
||||||
self.key_nibbles.truncate(l - n.len());
|
self.key_nibbles.truncate(l - NibbleSlice::from_encoded(&*n).0.len());
|
||||||
},
|
},
|
||||||
Node::Branch(_, _) => { self.key_nibbles.pop(); },
|
Node::Branch(_, _) => { self.key_nibbles.pop(); },
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -337,18 +342,19 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
|||||||
return Some(Ok((self.key(), v)));
|
return Some(Ok((self.key(), v)));
|
||||||
},
|
},
|
||||||
(Status::At, Node::Extension(_, d)) => {
|
(Status::At, Node::Extension(_, d)) => {
|
||||||
if let Err(e) = self.descend(d) {
|
if let Err(e) = self.descend(&*d) {
|
||||||
return Some(Err(e));
|
return Some(Err(e));
|
||||||
}
|
}
|
||||||
// continue
|
// continue
|
||||||
},
|
},
|
||||||
(Status::At, Node::Branch(_, _)) => {},
|
(Status::At, Node::Branch(_, _)) => {},
|
||||||
(Status::AtChild(i), Node::Branch(children, _)) if children[i].len() > 0 => {
|
(Status::AtChild(i), Node::Branch(ref children, _)) if children[i].len() > 0 => {
|
||||||
match i {
|
match i {
|
||||||
0 => self.key_nibbles.push(0),
|
0 => self.key_nibbles.push(0),
|
||||||
i => *self.key_nibbles.last_mut().unwrap() = i as u8,
|
i => *self.key_nibbles.last_mut()
|
||||||
|
.expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8,
|
||||||
}
|
}
|
||||||
if let Err(e) = self.descend(children[i]) {
|
if let Err(e) = self.descend(&*children[i]) {
|
||||||
return Some(Err(e));
|
return Some(Err(e));
|
||||||
}
|
}
|
||||||
// continue
|
// continue
|
||||||
@ -372,7 +378,7 @@ impl<'db> Trie for TrieDB<'db> {
|
|||||||
|
|
||||||
fn root(&self) -> &H256 { self.root }
|
fn root(&self) -> &H256 { self.root }
|
||||||
|
|
||||||
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<&'a [u8]>>
|
fn get_recorded<'a, 'b, R: 'b>(&'a self, key: &'b [u8], rec: &'b mut R) -> super::Result<Option<DBValue>>
|
||||||
where 'a: 'b, R: Recorder
|
where 'a: 'b, R: Recorder
|
||||||
{
|
{
|
||||||
self.do_lookup(&NibbleSlice::new(key), rec)
|
self.do_lookup(&NibbleSlice::new(key), rec)
|
||||||
@ -383,7 +389,7 @@ impl<'db> fmt::Debug for TrieDB<'db> {
|
|||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
try!(writeln!(f, "c={:?} [", self.hash_count));
|
try!(writeln!(f, "c={:?} [", self.hash_count));
|
||||||
let root_rlp = self.db.get(self.root).expect("Trie root not found!");
|
let root_rlp = self.db.get(self.root).expect("Trie root not found!");
|
||||||
try!(self.fmt_all(Node::decoded(root_rlp), f, 0));
|
try!(self.fmt_all(Node::decoded(&root_rlp), f, 0));
|
||||||
writeln!(f, "]")
|
writeln!(f, "]")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -394,7 +400,7 @@ fn iterator() {
|
|||||||
use super::TrieMut;
|
use super::TrieMut;
|
||||||
use super::triedbmut::*;
|
use super::triedbmut::*;
|
||||||
|
|
||||||
let d = vec![ &b"A"[..], &b"AA"[..], &b"AB"[..], &b"B"[..] ];
|
let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ];
|
||||||
|
|
||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
@ -406,6 +412,6 @@ fn iterator() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(d.iter().map(|i|i.to_vec()).collect::<Vec<_>>(), t.iter().unwrap().map(|x| x.unwrap().0).collect::<Vec<_>>());
|
assert_eq!(d.iter().map(|i| i.clone().to_vec()).collect::<Vec<_>>(), t.iter().unwrap().map(|x| x.unwrap().0).collect::<Vec<_>>());
|
||||||
assert_eq!(d, t.iter().unwrap().map(|x| x.unwrap().1).collect::<Vec<_>>());
|
assert_eq!(d, t.iter().unwrap().map(|x| x.unwrap().1).collect::<Vec<_>>());
|
||||||
}
|
}
|
||||||
|
@ -18,12 +18,14 @@
|
|||||||
|
|
||||||
use super::{TrieError, TrieMut};
|
use super::{TrieError, TrieMut};
|
||||||
use super::node::Node as RlpNode;
|
use super::node::Node as RlpNode;
|
||||||
|
use super::node::NodeKey;
|
||||||
|
|
||||||
use ::{Bytes, HashDB, H256};
|
use ::{HashDB, H256};
|
||||||
use ::bytes::ToPretty;
|
use ::bytes::ToPretty;
|
||||||
use ::nibbleslice::NibbleSlice;
|
use ::nibbleslice::NibbleSlice;
|
||||||
use ::rlp::{Rlp, RlpStream, View, Stream};
|
use ::rlp::{Rlp, RlpStream, View, Stream};
|
||||||
use ::sha3::SHA3_NULL_RLP;
|
use ::sha3::SHA3_NULL_RLP;
|
||||||
|
use hashdb::DBValue;
|
||||||
|
|
||||||
use elastic_array::ElasticArray1024;
|
use elastic_array::ElasticArray1024;
|
||||||
|
|
||||||
@ -72,14 +74,14 @@ enum Node {
|
|||||||
/// A leaf node contains the end of a key and a value.
|
/// A leaf node contains the end of a key and a value.
|
||||||
/// This key is encoded from a `NibbleSlice`, meaning it contains
|
/// This key is encoded from a `NibbleSlice`, meaning it contains
|
||||||
/// a flag indicating it is a leaf.
|
/// a flag indicating it is a leaf.
|
||||||
Leaf(Bytes, Bytes),
|
Leaf(NodeKey, DBValue),
|
||||||
/// An extension contains a shared portion of a key and a child node.
|
/// An extension contains a shared portion of a key and a child node.
|
||||||
/// The shared portion is encoded from a `NibbleSlice` meaning it contains
|
/// The shared portion is encoded from a `NibbleSlice` meaning it contains
|
||||||
/// a flag indicating it is an extension.
|
/// a flag indicating it is an extension.
|
||||||
/// The child node is always a branch.
|
/// The child node is always a branch.
|
||||||
Extension(Bytes, NodeHandle),
|
Extension(NodeKey, NodeHandle),
|
||||||
/// A branch has up to 16 children and an optional value.
|
/// A branch has up to 16 children and an optional value.
|
||||||
Branch(Box<[Option<NodeHandle>; 16]>, Option<Bytes>)
|
Branch(Box<[Option<NodeHandle>; 16]>, Option<DBValue>)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Node {
|
impl Node {
|
||||||
@ -98,21 +100,18 @@ impl Node {
|
|||||||
fn from_rlp(rlp: &[u8], db: &HashDB, storage: &mut NodeStorage) -> Self {
|
fn from_rlp(rlp: &[u8], db: &HashDB, storage: &mut NodeStorage) -> Self {
|
||||||
match RlpNode::decoded(rlp) {
|
match RlpNode::decoded(rlp) {
|
||||||
RlpNode::Empty => Node::Empty,
|
RlpNode::Empty => Node::Empty,
|
||||||
RlpNode::Leaf(k, v) => Node::Leaf(k.encoded(true), v.to_owned()),
|
RlpNode::Leaf(k, v) => Node::Leaf(k, v),
|
||||||
RlpNode::Extension(partial, cb) => {
|
RlpNode::Extension(key, cb) => {
|
||||||
let key = partial.encoded(false);
|
Node::Extension(key, Self::inline_or_hash(&*cb, db, storage))
|
||||||
|
|
||||||
Node::Extension(key, Self::inline_or_hash(cb, db, storage))
|
|
||||||
}
|
}
|
||||||
RlpNode::Branch(children_rlp, v) => {
|
RlpNode::Branch(children_rlp, val) => {
|
||||||
let val = v.map(|x| x.to_owned());
|
|
||||||
let mut children = empty_children();
|
let mut children = empty_children();
|
||||||
|
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
let raw = children_rlp[i];
|
let raw = &children_rlp[i];
|
||||||
let child_rlp = Rlp::new(raw);
|
let child_rlp = Rlp::new(&*raw);
|
||||||
if !child_rlp.is_empty() {
|
if !child_rlp.is_empty() {
|
||||||
children[i] = Some(Self::inline_or_hash(raw, db, storage));
|
children[i] = Some(Self::inline_or_hash(&*raw, db, storage));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -134,13 +133,13 @@ impl Node {
|
|||||||
}
|
}
|
||||||
Node::Leaf(partial, value) => {
|
Node::Leaf(partial, value) => {
|
||||||
let mut stream = RlpStream::new_list(2);
|
let mut stream = RlpStream::new_list(2);
|
||||||
stream.append(&partial);
|
stream.append(&&*partial);
|
||||||
stream.append(&value);
|
stream.append(&&*value);
|
||||||
stream.drain()
|
stream.drain()
|
||||||
}
|
}
|
||||||
Node::Extension(partial, child) => {
|
Node::Extension(partial, child) => {
|
||||||
let mut stream = RlpStream::new_list(2);
|
let mut stream = RlpStream::new_list(2);
|
||||||
stream.append(&partial);
|
stream.append(&&*partial);
|
||||||
child_cb(child, &mut stream);
|
child_cb(child, &mut stream);
|
||||||
stream.drain()
|
stream.drain()
|
||||||
}
|
}
|
||||||
@ -154,7 +153,7 @@ impl Node {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(value) = value {
|
if let Some(value) = value {
|
||||||
stream.append(&value);
|
stream.append(&&*value);
|
||||||
} else {
|
} else {
|
||||||
stream.append_empty_data();
|
stream.append_empty_data();
|
||||||
}
|
}
|
||||||
@ -276,7 +275,7 @@ impl<'a> Index<&'a StorageHandle> for NodeStorage {
|
|||||||
/// assert_eq!(*t.root(), ::util::sha3::SHA3_NULL_RLP);
|
/// assert_eq!(*t.root(), ::util::sha3::SHA3_NULL_RLP);
|
||||||
/// t.insert(b"foo", b"bar").unwrap();
|
/// t.insert(b"foo", b"bar").unwrap();
|
||||||
/// assert!(t.contains(b"foo").unwrap());
|
/// assert!(t.contains(b"foo").unwrap());
|
||||||
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), b"bar");
|
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar"));
|
||||||
/// t.remove(b"foo").unwrap();
|
/// t.remove(b"foo").unwrap();
|
||||||
/// assert!(!t.contains(b"foo").unwrap());
|
/// assert!(!t.contains(b"foo").unwrap());
|
||||||
/// }
|
/// }
|
||||||
@ -338,7 +337,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
// cache a node by hash
|
// cache a node by hash
|
||||||
fn cache(&mut self, hash: H256) -> super::Result<StorageHandle> {
|
fn cache(&mut self, hash: H256) -> super::Result<StorageHandle> {
|
||||||
let node_rlp = try!(self.db.get(&hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash))));
|
let node_rlp = try!(self.db.get(&hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash))));
|
||||||
let node = Node::from_rlp(node_rlp, &*self.db, &mut self.storage);
|
let node = Node::from_rlp(&node_rlp, &*self.db, &mut self.storage);
|
||||||
Ok(self.storage.alloc(Stored::Cached(node, hash)))
|
Ok(self.storage.alloc(Stored::Cached(node, hash)))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -367,7 +366,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// walk the trie, attempting to find the key's node.
|
// walk the trie, attempting to find the key's node.
|
||||||
fn lookup<'x, 'key>(&'x self, partial: NibbleSlice<'key>, handle: &NodeHandle) -> super::Result<Option<&'x [u8]>>
|
fn lookup<'x, 'key>(&'x self, partial: NibbleSlice<'key>, handle: &NodeHandle) -> super::Result<Option<DBValue>>
|
||||||
where 'x: 'key
|
where 'x: 'key
|
||||||
{
|
{
|
||||||
match *handle {
|
match *handle {
|
||||||
@ -376,7 +375,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
Node::Empty => Ok(None),
|
Node::Empty => Ok(None),
|
||||||
Node::Leaf(ref key, ref value) => {
|
Node::Leaf(ref key, ref value) => {
|
||||||
if NibbleSlice::from_encoded(key).0 == partial {
|
if NibbleSlice::from_encoded(key).0 == partial {
|
||||||
Ok(Some(value))
|
Ok(Some(DBValue::from_slice(value)))
|
||||||
} else {
|
} else {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
@ -391,7 +390,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
}
|
}
|
||||||
Node::Branch(ref children, ref value) => {
|
Node::Branch(ref children, ref value) => {
|
||||||
if partial.is_empty() {
|
if partial.is_empty() {
|
||||||
Ok(value.as_ref().map(|v| &v[..]))
|
Ok(value.as_ref().map(|v| DBValue::from_slice(v)))
|
||||||
} else {
|
} else {
|
||||||
let idx = partial.at(0);
|
let idx = partial.at(0);
|
||||||
match children[idx as usize].as_ref() {
|
match children[idx as usize].as_ref() {
|
||||||
@ -405,28 +404,33 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
||||||
fn do_db_lookup<'x, 'key>(&'x self, hash: &H256, key: NibbleSlice<'key>) -> super::Result<Option<&'x [u8]>>
|
fn do_db_lookup<'x, 'key>(&'x self, hash: &H256, key: NibbleSlice<'key>) -> super::Result<Option<DBValue>>
|
||||||
where 'x: 'key
|
where 'x: 'key
|
||||||
{
|
{
|
||||||
self.db.get(hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(*hash)))
|
self.db.get(hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(*hash)))
|
||||||
.and_then(|node_rlp| self.get_from_db_node(node_rlp, key))
|
.and_then(|node_rlp| self.get_from_db_node(&node_rlp, key))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
||||||
/// value exists for the key.
|
/// value exists for the key.
|
||||||
///
|
///
|
||||||
/// Note: Not a public API; use Trie trait functions.
|
/// Note: Not a public API; use Trie trait functions.
|
||||||
fn get_from_db_node<'x, 'key>(&'x self, node: &'x [u8], key: NibbleSlice<'key>) -> super::Result<Option<&'x [u8]>>
|
fn get_from_db_node<'x, 'key>(&'x self, node: &'x [u8], key: NibbleSlice<'key>) -> super::Result<Option<DBValue>>
|
||||||
where 'x: 'key
|
where 'x: 'key
|
||||||
{
|
{
|
||||||
match RlpNode::decoded(node) {
|
match RlpNode::decoded(node) {
|
||||||
RlpNode::Leaf(ref slice, value) if &key == slice => Ok(Some(value)),
|
RlpNode::Leaf(ref slice, ref value) if NibbleSlice::from_encoded(slice).0 == key => Ok(Some(value.clone())),
|
||||||
RlpNode::Extension(ref slice, item) if key.starts_with(slice) => {
|
RlpNode::Extension(ref slice, ref item) => {
|
||||||
self.get_from_db_node(try!(self.get_raw_or_lookup(item)), key.mid(slice.len()))
|
let slice = &NibbleSlice::from_encoded(slice).0;
|
||||||
|
if key.starts_with(slice) {
|
||||||
|
self.get_from_db_node(&try!(self.get_raw_or_lookup(&*item)), key.mid(slice.len()))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
RlpNode::Branch(ref nodes, value) => match key.is_empty() {
|
RlpNode::Branch(ref nodes, ref value) => match key.is_empty() {
|
||||||
true => Ok(value),
|
true => Ok(value.clone()),
|
||||||
false => self.get_from_db_node(try!(self.get_raw_or_lookup(nodes[key.at(0) as usize])), key.mid(1))
|
false => self.get_from_db_node(&try!(self.get_raw_or_lookup(&*nodes[key.at(0) as usize])), key.mid(1))
|
||||||
},
|
},
|
||||||
_ => Ok(None),
|
_ => Ok(None),
|
||||||
}
|
}
|
||||||
@ -435,7 +439,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
/// Given some node-describing data `node`, return the actual node RLP.
|
/// Given some node-describing data `node`, return the actual node RLP.
|
||||||
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
||||||
/// may require a database lookup.
|
/// may require a database lookup.
|
||||||
fn get_raw_or_lookup<'x>(&'x self, node: &'x [u8]) -> super::Result<&'x [u8]> {
|
fn get_raw_or_lookup<'x>(&'x self, node: &'x [u8]) -> super::Result<DBValue> {
|
||||||
// check if its sha3 + len
|
// check if its sha3 + len
|
||||||
let r = Rlp::new(node);
|
let r = Rlp::new(node);
|
||||||
match r.is_data() && r.size() == 32 {
|
match r.is_data() && r.size() == 32 {
|
||||||
@ -443,12 +447,12 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
let key = r.as_val::<H256>();
|
let key = r.as_val::<H256>();
|
||||||
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
||||||
}
|
}
|
||||||
false => Ok(node)
|
false => Ok(DBValue::from_slice(node))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// insert a key, value pair into the trie, creating new nodes if necessary.
|
/// insert a key, value pair into the trie, creating new nodes if necessary.
|
||||||
fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: Bytes) -> super::Result<(StorageHandle, bool)> {
|
fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: DBValue) -> super::Result<(StorageHandle, bool)> {
|
||||||
let h = match handle {
|
let h = match handle {
|
||||||
NodeHandle::InMemory(h) => h,
|
NodeHandle::InMemory(h) => h,
|
||||||
NodeHandle::Hash(h) => try!(self.cache(h)),
|
NodeHandle::Hash(h) => try!(self.cache(h)),
|
||||||
@ -463,7 +467,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
|
|
||||||
/// the insertion inspector.
|
/// the insertion inspector.
|
||||||
#[cfg_attr(feature = "dev", allow(cyclomatic_complexity))]
|
#[cfg_attr(feature = "dev", allow(cyclomatic_complexity))]
|
||||||
fn insert_inspector(&mut self, node: Node, partial: NibbleSlice, value: Bytes) -> super::Result<InsertAction> {
|
fn insert_inspector(&mut self, node: Node, partial: NibbleSlice, value: DBValue) -> super::Result<InsertAction> {
|
||||||
trace!(target: "trie", "augmented (partial: {:?}, value: {:?})", partial, value.pretty());
|
trace!(target: "trie", "augmented (partial: {:?}, value: {:?})", partial, value.pretty());
|
||||||
|
|
||||||
Ok(match node {
|
Ok(match node {
|
||||||
@ -744,7 +748,8 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
(UsedIndex::One(a), None) => {
|
(UsedIndex::One(a), None) => {
|
||||||
// only one onward node. make an extension.
|
// only one onward node. make an extension.
|
||||||
let new_partial = NibbleSlice::new_offset(&[a], 1).encoded(false);
|
let new_partial = NibbleSlice::new_offset(&[a], 1).encoded(false);
|
||||||
let new_node = Node::Extension(new_partial, children[a as usize].take().unwrap());
|
let child = children[a as usize].take().expect("used_index only set if occupied; qed");
|
||||||
|
let new_node = Node::Extension(new_partial, child);
|
||||||
self.fix(new_node)
|
self.fix(new_node)
|
||||||
}
|
}
|
||||||
(UsedIndex::None, Some(value)) => {
|
(UsedIndex::None, Some(value)) => {
|
||||||
@ -897,7 +902,7 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'x, 'key>(&'x self, key: &'key [u8]) -> super::Result<Option<&'x [u8]>> where 'x: 'key {
|
fn get<'x, 'key>(&'x self, key: &'key [u8]) -> super::Result<Option<DBValue>> where 'x: 'key {
|
||||||
self.lookup(NibbleSlice::new(key), &self.root_handle)
|
self.lookup(NibbleSlice::new(key), &self.root_handle)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -910,7 +915,7 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
|||||||
trace!(target: "trie", "insert: key={:?}, value={:?}", key.pretty(), value.pretty());
|
trace!(target: "trie", "insert: key={:?}, value={:?}", key.pretty(), value.pretty());
|
||||||
|
|
||||||
let root_handle = self.root_handle();
|
let root_handle = self.root_handle();
|
||||||
let (new_handle, changed) = try!(self.insert_at(root_handle, NibbleSlice::new(key), value.to_owned()));
|
let (new_handle, changed) = try!(self.insert_at(root_handle, NibbleSlice::new(key), DBValue::from_slice(value)));
|
||||||
|
|
||||||
trace!(target: "trie", "insert: altered trie={}", changed);
|
trace!(target: "trie", "insert: altered trie={}", changed);
|
||||||
self.root_handle = NodeHandle::InMemory(new_handle);
|
self.root_handle = NodeHandle::InMemory(new_handle);
|
||||||
@ -1179,9 +1184,9 @@ mod tests {
|
|||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), &[0x1u8, 0x23]);
|
assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x1u8, 0x23]));
|
||||||
t.commit();
|
t.commit();
|
||||||
assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), &[0x1u8, 0x23]);
|
assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x1u8, 0x23]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1192,14 +1197,14 @@ mod tests {
|
|||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap();
|
t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap();
|
||||||
t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap();
|
t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap();
|
||||||
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||||
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), &[0xf1u8, 0x23]);
|
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0xf1u8, 0x23]));
|
||||||
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), &[0x81u8, 0x23]);
|
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x81u8, 0x23]));
|
||||||
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
||||||
t.commit();
|
t.commit();
|
||||||
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||||
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), &[0xf1u8, 0x23]);
|
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0xf1u8, 0x23]));
|
||||||
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), &[0x81u8, 0x23]);
|
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x81u8, 0x23]));
|
||||||
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user