Merge branch 'master' into auth-round-no-mocknet

This commit is contained in:
keorn 2016-10-30 21:18:52 +00:00
commit fc4664ab12
81 changed files with 1099 additions and 387 deletions

View File

@ -5,7 +5,7 @@ variables:
GIT_DEPTH: "3" GIT_DEPTH: "3"
SIMPLECOV: "true" SIMPLECOV: "true"
RUST_BACKTRACE: "1" RUST_BACKTRACE: "1"
RUSTFLAGS: "-D warnings" RUSTFLAGS: ""
cache: cache:
key: "$CI_BUILD_NAME/$CI_BUILD_REF_NAME" key: "$CI_BUILD_NAME/$CI_BUILD_REF_NAME"
untracked: true untracked: true
@ -316,7 +316,7 @@ windows:
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt - set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64 - set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
- set RUST_BACKTRACE=1 - set RUST_BACKTRACE=1
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off -D warnings - set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off
- rustup default stable-x86_64-pc-windows-msvc - rustup default stable-x86_64-pc-windows-msvc
- cargo build --release --verbose - cargo build --release --verbose
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll - curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
@ -380,8 +380,8 @@ test-windows:
before_script: before_script:
- git submodule update --init --recursive - git submodule update --init --recursive
script: script:
- export RUST_BACKTRACE=1 - set RUST_BACKTRACE=1
- ./test.sh --verbose - PowerShell ./test.sh --verbose
tags: tags:
- rust-windows - rust-windows
dependencies: dependencies:
@ -391,9 +391,6 @@ js-release:
image: ethcore/javascript:latest image: ethcore/javascript:latest
only: only:
- master - master
- beta
- tags
- stable
before_script: before_script:
- ./js/scripts/install-deps.sh - ./js/scripts/install-deps.sh
script: script:

50
Cargo.lock generated
View File

@ -370,7 +370,7 @@ version = "1.4.0"
dependencies = [ dependencies = [
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.6.0 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -451,6 +451,7 @@ name = "ethcore-network"
version = "1.4.0" version = "1.4.0"
dependencies = [ dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)", "ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0", "ethcore-devtools 1.4.0",
"ethcore-io 1.4.0", "ethcore-io 1.4.0",
"ethcore-util 1.4.0", "ethcore-util 1.4.0",
@ -459,7 +460,7 @@ dependencies = [
"igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)", "mio 0.6.0 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0", "rlp 0.1.0",
@ -516,7 +517,7 @@ dependencies = [
"parity-ui 1.4.0", "parity-ui 1.4.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)", "ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)",
] ]
[[package]] [[package]]
@ -886,6 +887,11 @@ name = "lazy_static"
version = "0.2.1" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazycell"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.15" version = "0.2.15"
@ -990,7 +996,7 @@ dependencies = [
[[package]] [[package]]
name = "mio" name = "mio"
version = "0.6.0-dev" version = "0.6.0-dev"
source = "git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d#62ec763c9cc34d8a452ed0392c575c50ddd5fc8d" source = "git+https://github.com/ethcore/mio?branch=timer-fix#31eccc40ece3d47abaefaf23bb2114033175b972"
dependencies = [ dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1002,6 +1008,22 @@ dependencies = [
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "mio"
version = "0.6.0"
source = "git+https://github.com/carllerche/mio#9f17b70d6fecbf912168267ea74cf536f2cba705"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "miow" name = "miow"
version = "0.1.3" version = "0.1.3"
@ -1213,7 +1235,7 @@ dependencies = [
[[package]] [[package]]
name = "parity-ui-precompiled" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#ba726039185238d6fd604f092b089a7d52c0f436" source = "git+https://github.com/ethcore/js-precompiled.git#eec3d41e6fd1a10e4d69470a9e8c2a7b1b464466"
dependencies = [ dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1593,6 +1615,11 @@ name = "slab"
version = "0.2.0" version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slab"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "smallvec" name = "smallvec"
version = "0.1.8" version = "0.1.8"
@ -1884,13 +1911,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "ws" name = "ws"
version = "0.5.2" version = "0.5.3"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#00bd2134b07b4bc8ea47b7f6c7afce16bbe34c8f" source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#0cd6c5e3e9d5e61a37d53eb8dcbad523dcc69314"
dependencies = [ dependencies = [
"bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)", "bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)", "mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)", "slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)",
@ -1984,6 +2011,7 @@ dependencies = [
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" "checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f" "checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
"checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "23e3757828fa702a20072c37ff47938e9dd331b92fac6e223d26d4b7a55f7ee2" "checksum libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "23e3757828fa702a20072c37ff47938e9dd331b92fac6e223d26d4b7a55f7ee2"
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48" "checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd" "checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
@ -1996,7 +2024,8 @@ dependencies = [
"checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54" "checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54"
"checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>" "checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>"
"checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e" "checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e"
"checksum mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)" = "<none>" "checksum mio 0.6.0 (git+https://github.com/carllerche/mio)" = "<none>"
"checksum mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)" = "<none>"
"checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a" "checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a"
"checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8" "checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8"
"checksum nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>" "checksum nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>"
@ -2062,6 +2091,7 @@ dependencies = [
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e" "checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>" "checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4" "checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410" "checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
"checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2" "checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2"
"checksum spmc 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "93bdab61c1a413e591c4d17388ffa859eaff2df27f1e13a5ec8b716700605adf" "checksum spmc 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "93bdab61c1a413e591c4d17388ffa859eaff2df27f1e13a5ec8b716700605adf"
@ -2099,7 +2129,7 @@ dependencies = [
"checksum webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "813503a5985585e0812d430cd1328ee322f47f66629c8ed4ecab939cf9e92f91" "checksum webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "813503a5985585e0812d430cd1328ee322f47f66629c8ed4ecab939cf9e92f91"
"checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4" "checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>" "checksum ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>"
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef" "checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef"
"checksum xmltree 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "472a9d37c7c53ab2391161df5b89b1f3bf76dab6ab150d7941ecbdd832282082" "checksum xmltree 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "472a9d37c7c53ab2391161df5b89b1f3bf76dab6ab150d7941ecbdd832282082"

View File

@ -139,7 +139,7 @@ pub struct Client {
miner: Arc<Miner>, miner: Arc<Miner>,
sleep_state: Mutex<SleepState>, sleep_state: Mutex<SleepState>,
liveness: AtomicBool, liveness: AtomicBool,
io_channel: IoChannel<ClientIoMessage>, io_channel: Mutex<IoChannel<ClientIoMessage>>,
notify: RwLock<Vec<Weak<ChainNotify>>>, notify: RwLock<Vec<Weak<ChainNotify>>>,
queue_transactions: AtomicUsize, queue_transactions: AtomicUsize,
last_hashes: RwLock<VecDeque<H256>>, last_hashes: RwLock<VecDeque<H256>>,
@ -235,7 +235,7 @@ impl Client {
import_lock: Mutex::new(()), import_lock: Mutex::new(()),
panic_handler: panic_handler, panic_handler: panic_handler,
miner: miner, miner: miner,
io_channel: message_channel, io_channel: Mutex::new(message_channel),
notify: RwLock::new(Vec::new()), notify: RwLock::new(Vec::new()),
queue_transactions: AtomicUsize::new(0), queue_transactions: AtomicUsize::new(0),
last_hashes: RwLock::new(VecDeque::new()), last_hashes: RwLock::new(VecDeque::new()),
@ -1147,9 +1147,13 @@ impl BlockChainClient for Client {
debug!("Ignoring {} transactions: queue is full", transactions.len()); debug!("Ignoring {} transactions: queue is full", transactions.len());
} else { } else {
let len = transactions.len(); let len = transactions.len();
match self.io_channel.send(ClientIoMessage::NewTransactions(transactions)) { match self.io_channel.lock().send(ClientIoMessage::NewTransactions(transactions)) {
Ok(_) => { self.queue_transactions.fetch_add(len, AtomicOrdering::SeqCst); }, Ok(_) => {
Err(e) => debug!("Ignoring {} transactions: error queueing: {}", len, e), self.queue_transactions.fetch_add(len, AtomicOrdering::SeqCst);
}
Err(e) => {
debug!("Ignoring {} transactions: error queueing: {}", len, e);
}
} }
} }
} }

View File

@ -130,7 +130,7 @@ impl BanningTransactionQueue {
// Ban sender // Ban sender
let sender_banned = self.ban_sender(sender); let sender_banned = self.ban_sender(sender);
// Ban recipient and codehash // Ban recipient and codehash
let is_banned = sender_banned || match transaction.action { let recipient_or_code_banned = match transaction.action {
Action::Call(recipient) => { Action::Call(recipient) => {
self.ban_recipient(recipient) self.ban_recipient(recipient)
}, },
@ -138,7 +138,7 @@ impl BanningTransactionQueue {
self.ban_codehash(transaction.data.sha3()) self.ban_codehash(transaction.data.sha3())
}, },
}; };
is_banned sender_banned || recipient_or_code_banned
}, },
None => false, None => false,
} }

View File

@ -110,6 +110,7 @@ impl PartialOrd for TransactionOrigin {
} }
impl Ord for TransactionOrigin { impl Ord for TransactionOrigin {
#[cfg_attr(feature="dev", allow(match_same_arms))]
fn cmp(&self, other: &TransactionOrigin) -> Ordering { fn cmp(&self, other: &TransactionOrigin) -> Ordering {
if *other == *self { if *other == *self {
return Ordering::Equal; return Ordering::Equal;

View File

@ -178,7 +178,7 @@ impl Account {
CodeState::Hash => { CodeState::Hash => {
let code_hash = try!(rlp.val_at(3)); let code_hash = try!(rlp.val_at(3));
if let Some(code) = code_map.get(&code_hash) { if let Some(code) = code_map.get(&code_hash) {
acct_db.emplace(code_hash.clone(), DBValue::from_slice(&code)); acct_db.emplace(code_hash.clone(), DBValue::from_slice(code));
} }
(code_hash, None) (code_hash, None)

View File

@ -213,7 +213,7 @@ pub struct Service {
restoration: Mutex<Option<Restoration>>, restoration: Mutex<Option<Restoration>>,
snapshot_root: PathBuf, snapshot_root: PathBuf,
db_config: DatabaseConfig, db_config: DatabaseConfig,
io_channel: Channel, io_channel: Mutex<Channel>,
pruning: Algorithm, pruning: Algorithm,
status: Mutex<RestorationStatus>, status: Mutex<RestorationStatus>,
reader: RwLock<Option<LooseReader>>, reader: RwLock<Option<LooseReader>>,
@ -233,7 +233,7 @@ impl Service {
restoration: Mutex::new(None), restoration: Mutex::new(None),
snapshot_root: params.snapshot_root, snapshot_root: params.snapshot_root,
db_config: params.db_config, db_config: params.db_config,
io_channel: params.channel, io_channel: Mutex::new(params.channel),
pruning: params.pruning, pruning: params.pruning,
status: Mutex::new(RestorationStatus::Inactive), status: Mutex::new(RestorationStatus::Inactive),
reader: RwLock::new(None), reader: RwLock::new(None),
@ -567,7 +567,7 @@ impl SnapshotService for Service {
} }
fn begin_restore(&self, manifest: ManifestData) { fn begin_restore(&self, manifest: ManifestData) {
if let Err(e) = self.io_channel.send(ClientIoMessage::BeginRestoration(manifest)) { if let Err(e) = self.io_channel.lock().send(ClientIoMessage::BeginRestoration(manifest)) {
trace!("Error sending snapshot service message: {:?}", e); trace!("Error sending snapshot service message: {:?}", e);
} }
} }
@ -578,13 +578,13 @@ impl SnapshotService for Service {
} }
fn restore_state_chunk(&self, hash: H256, chunk: Bytes) { fn restore_state_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedStateChunk(hash, chunk)) { if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedStateChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e); trace!("Error sending snapshot service message: {:?}", e);
} }
} }
fn restore_block_chunk(&self, hash: H256, chunk: Bytes) { fn restore_block_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedBlockChunk(hash, chunk)) { if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedBlockChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e); trace!("Error sending snapshot service message: {:?}", e);
} }
} }

View File

@ -16,6 +16,7 @@
//! Watcher for snapshot-related chain events. //! Watcher for snapshot-related chain events.
use util::Mutex;
use client::{BlockChainClient, Client, ChainNotify}; use client::{BlockChainClient, Client, ChainNotify};
use ids::BlockID; use ids::BlockID;
use service::ClientIoMessage; use service::ClientIoMessage;
@ -55,7 +56,7 @@ trait Broadcast: Send + Sync {
fn take_at(&self, num: Option<u64>); fn take_at(&self, num: Option<u64>);
} }
impl Broadcast for IoChannel<ClientIoMessage> { impl Broadcast for Mutex<IoChannel<ClientIoMessage>> {
fn take_at(&self, num: Option<u64>) { fn take_at(&self, num: Option<u64>) {
let num = match num { let num = match num {
Some(n) => n, Some(n) => n,
@ -64,7 +65,7 @@ impl Broadcast for IoChannel<ClientIoMessage> {
trace!(target: "snapshot_watcher", "broadcast: {}", num); trace!(target: "snapshot_watcher", "broadcast: {}", num);
if let Err(e) = self.send(ClientIoMessage::TakeSnapshot(num)) { if let Err(e) = self.lock().send(ClientIoMessage::TakeSnapshot(num)) {
warn!("Snapshot watcher disconnected from IoService: {}", e); warn!("Snapshot watcher disconnected from IoService: {}", e);
} }
} }
@ -91,7 +92,7 @@ impl Watcher {
client: client, client: client,
sync_status: sync_status, sync_status: sync_status,
}), }),
broadcast: Box::new(channel), broadcast: Box::new(Mutex::new(channel)),
period: period, period: period,
history: history, history: history,
} }

View File

@ -43,8 +43,6 @@ struct AccountCache {
// When changing the type of the values here, be sure to update `mem_used` and // When changing the type of the values here, be sure to update `mem_used` and
// `new`. // `new`.
accounts: LruCache<Address, Option<Account>>, accounts: LruCache<Address, Option<Account>>,
/// DB Code cache. Maps code hashes to shared bytes.
code: MemoryLruCache<H256, Arc<Vec<u8>>>,
/// Information on the modifications in recently committed blocks; specifically which addresses /// Information on the modifications in recently committed blocks; specifically which addresses
/// changed in which block. Ordered by block number. /// changed in which block. Ordered by block number.
modifications: VecDeque<BlockChanges>, modifications: VecDeque<BlockChanges>,
@ -95,6 +93,8 @@ pub struct StateDB {
db: Box<JournalDB>, db: Box<JournalDB>,
/// Shared canonical state cache. /// Shared canonical state cache.
account_cache: Arc<Mutex<AccountCache>>, account_cache: Arc<Mutex<AccountCache>>,
/// DB Code cache. Maps code hashes to shared bytes.
code_cache: Arc<Mutex<MemoryLruCache<H256, Arc<Vec<u8>>>>>,
/// Local dirty cache. /// Local dirty cache.
local_cache: Vec<CacheQueueItem>, local_cache: Vec<CacheQueueItem>,
/// Shared account bloom. Does not handle chain reorganizations. /// Shared account bloom. Does not handle chain reorganizations.
@ -125,9 +125,9 @@ impl StateDB {
db: db, db: db,
account_cache: Arc::new(Mutex::new(AccountCache { account_cache: Arc::new(Mutex::new(AccountCache {
accounts: LruCache::new(cache_items), accounts: LruCache::new(cache_items),
code: MemoryLruCache::new(code_cache_size),
modifications: VecDeque::new(), modifications: VecDeque::new(),
})), })),
code_cache: Arc::new(Mutex::new(MemoryLruCache::new(code_cache_size))),
local_cache: Vec::new(), local_cache: Vec::new(),
account_bloom: Arc::new(Mutex::new(bloom)), account_bloom: Arc::new(Mutex::new(bloom)),
cache_size: cache_size, cache_size: cache_size,
@ -320,6 +320,7 @@ impl StateDB {
StateDB { StateDB {
db: self.db.boxed_clone(), db: self.db.boxed_clone(),
account_cache: self.account_cache.clone(), account_cache: self.account_cache.clone(),
code_cache: self.code_cache.clone(),
local_cache: Vec::new(), local_cache: Vec::new(),
account_bloom: self.account_bloom.clone(), account_bloom: self.account_bloom.clone(),
cache_size: self.cache_size, cache_size: self.cache_size,
@ -334,6 +335,7 @@ impl StateDB {
StateDB { StateDB {
db: self.db.boxed_clone(), db: self.db.boxed_clone(),
account_cache: self.account_cache.clone(), account_cache: self.account_cache.clone(),
code_cache: self.code_cache.clone(),
local_cache: Vec::new(), local_cache: Vec::new(),
account_bloom: self.account_bloom.clone(), account_bloom: self.account_bloom.clone(),
cache_size: self.cache_size, cache_size: self.cache_size,
@ -352,10 +354,9 @@ impl StateDB {
pub fn mem_used(&self) -> usize { pub fn mem_used(&self) -> usize {
// TODO: account for LRU-cache overhead; this is a close approximation. // TODO: account for LRU-cache overhead; this is a close approximation.
self.db.mem_used() + { self.db.mem_used() + {
let cache = self.account_cache.lock(); let accounts = self.account_cache.lock().accounts.len();
let code_size = self.code_cache.lock().current_size();
cache.code.current_size() + code_size + accounts * ::std::mem::size_of::<Option<Account>>()
cache.accounts.len() * ::std::mem::size_of::<Option<Account>>()
} }
} }
@ -380,9 +381,9 @@ impl StateDB {
/// it simply maps hashes to raw code and will always be correct in the absence of /// it simply maps hashes to raw code and will always be correct in the absence of
/// hash collisions. /// hash collisions.
pub fn cache_code(&self, hash: H256, code: Arc<Vec<u8>>) { pub fn cache_code(&self, hash: H256, code: Arc<Vec<u8>>) {
let mut cache = self.account_cache.lock(); let mut cache = self.code_cache.lock();
cache.code.insert(hash, code); cache.insert(hash, code);
} }
/// Get basic copy of the cached account. Does not include storage. /// Get basic copy of the cached account. Does not include storage.
@ -397,9 +398,9 @@ impl StateDB {
/// Get cached code based on hash. /// Get cached code based on hash.
pub fn get_cached_code(&self, hash: &H256) -> Option<Arc<Vec<u8>>> { pub fn get_cached_code(&self, hash: &H256) -> Option<Arc<Vec<u8>>> {
let mut cache = self.account_cache.lock(); let mut cache = self.code_cache.lock();
cache.code.get_mut(hash).map(|code| code.clone()) cache.get_mut(hash).map(|code| code.clone())
} }
/// Get value from a cached account. /// Get value from a cached account.

View File

@ -109,7 +109,7 @@ pub struct VerificationQueue<K: Kind> {
struct QueueSignal { struct QueueSignal {
deleting: Arc<AtomicBool>, deleting: Arc<AtomicBool>,
signalled: AtomicBool, signalled: AtomicBool,
message_channel: IoChannel<ClientIoMessage>, message_channel: Mutex<IoChannel<ClientIoMessage>>,
} }
impl QueueSignal { impl QueueSignal {
@ -121,7 +121,8 @@ impl QueueSignal {
} }
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false { if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
if let Err(e) = self.message_channel.send_sync(ClientIoMessage::BlockVerified) { let channel = self.message_channel.lock().clone();
if let Err(e) = channel.send_sync(ClientIoMessage::BlockVerified) {
debug!("Error sending BlockVerified message: {:?}", e); debug!("Error sending BlockVerified message: {:?}", e);
} }
} }
@ -135,7 +136,8 @@ impl QueueSignal {
} }
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false { if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
if let Err(e) = self.message_channel.send(ClientIoMessage::BlockVerified) { let channel = self.message_channel.lock().clone();
if let Err(e) = channel.send(ClientIoMessage::BlockVerified) {
debug!("Error sending BlockVerified message: {:?}", e); debug!("Error sending BlockVerified message: {:?}", e);
} }
} }
@ -178,7 +180,7 @@ impl<K: Kind> VerificationQueue<K> {
let ready_signal = Arc::new(QueueSignal { let ready_signal = Arc::new(QueueSignal {
deleting: deleting.clone(), deleting: deleting.clone(),
signalled: AtomicBool::new(false), signalled: AtomicBool::new(false),
message_channel: message_channel message_channel: Mutex::new(message_channel),
}); });
let empty = Arc::new(SCondvar::new()); let empty = Arc::new(SCondvar::new());
let panic_handler = PanicHandler::new_in_arc(); let panic_handler = PanicHandler::new_in_arc();

1
js/.gitignore vendored
View File

@ -5,3 +5,4 @@ build
.coverage .coverage
.dist .dist
.happypack .happypack
.npmjs

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
js/assets/images/parity.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -1,9 +1,9 @@
{ {
"name": "parity.js", "name": "parity.js",
"version": "0.0.1", "version": "0.1.5",
"main": "release/index.js", "main": "release/index.js",
"jsnext:main": "src/index.js", "jsnext:main": "src/index.js",
"author": "Ethcore Team <admin@ethcore.io>", "author": "Parity Team <admin@parity.io>",
"maintainers": [ "maintainers": [
"Jaco Greeff" "Jaco Greeff"
], ],
@ -11,7 +11,7 @@
"license": "GPL-3.0", "license": "GPL-3.0",
"repository": { "repository": {
"type": "git", "type": "git",
"url": "git+https://github.com/ethcore/parity.js.git" "url": "git+https://github.com/ethcore/parity.git"
}, },
"keywords": [ "keywords": [
"Ethereum", "Ethereum",
@ -27,15 +27,13 @@
"build:app": "webpack --progress", "build:app": "webpack --progress",
"build:lib": "webpack --config webpack.libraries --progress", "build:lib": "webpack --config webpack.libraries --progress",
"build:dll": "webpack --config webpack.vendor --progress", "build:dll": "webpack --config webpack.vendor --progress",
"ci:build": "npm run ci:build:dll && npm run ci:build:app && npm run ci:build:lib", "ci:build": "npm run ci:build:dll && npm run ci:build:app && npm run ci:build:lib",
"ci:build:app": "NODE_ENV=production webpack", "ci:build:app": "NODE_ENV=production webpack",
"ci:build:lib": "NODE_ENV=production webpack --config webpack.libraries", "ci:build:lib": "NODE_ENV=production webpack --config webpack.libraries",
"ci:build:dll": "NODE_ENV=production webpack --config webpack.vendor", "ci:build:dll": "NODE_ENV=production webpack --config webpack.vendor",
"ci:build:npm": "NODE_ENV=production webpack --config webpack.npm",
"start": "npm install && npm run build:dll && npm run start:app", "start": "npm install && npm run build:dll && npm run start:app",
"start:app": "webpack-dev-server -d --history-api-fallback --open --hot --inline --progress --colors --port 3000", "start:app": "webpack-dev-server -d --history-api-fallback --open --hot --inline --progress --colors --port 3000",
"clean": "rm -rf ./build ./coverage", "clean": "rm -rf ./build ./coverage",
"coveralls": "npm run testCoverage && coveralls < coverage/lcov.info", "coveralls": "npm run testCoverage && coveralls < coverage/lcov.info",
"lint": "eslint --ignore-path .gitignore ./src/", "lint": "eslint --ignore-path .gitignore ./src/",
@ -65,7 +63,7 @@
"chai": "^3.5.0", "chai": "^3.5.0",
"chai-enzyme": "0.4.2", "chai-enzyme": "0.4.2",
"cheerio": "0.20.0", "cheerio": "0.20.0",
"copy-webpack-plugin": "^3.0.1", "copy-webpack-plugin": "^4.0.0",
"core-js": "^2.4.1", "core-js": "^2.4.1",
"coveralls": "^2.11.11", "coveralls": "^2.11.11",
"css-loader": "^0.23.1", "css-loader": "^0.23.1",
@ -79,6 +77,7 @@
"eslint-plugin-standard": "^2.0.0", "eslint-plugin-standard": "^2.0.0",
"extract-text-webpack-plugin": "^1.0.1", "extract-text-webpack-plugin": "^1.0.1",
"file-loader": "^0.8.5", "file-loader": "^0.8.5",
"fs-extra": "^0.30.0",
"happypack": "^2.2.1", "happypack": "^2.2.1",
"history": "^2.0.0", "history": "^2.0.0",
"html-loader": "^0.4.3", "html-loader": "^0.4.3",
@ -125,6 +124,7 @@
"material-ui": "^0.16.1", "material-ui": "^0.16.1",
"material-ui-chip-input": "^0.8.0", "material-ui-chip-input": "^0.8.0",
"moment": "^2.14.1", "moment": "^2.14.1",
"qs": "^6.3.0",
"react": "^15.2.1", "react": "^15.2.1",
"react-addons-css-transition-group": "^15.2.1", "react-addons-css-transition-group": "^15.2.1",
"react-dom": "^15.2.1", "react-dom": "^15.2.1",

81
js/parity.md Normal file
View File

@ -0,0 +1,81 @@
# parity.js
Parity.js is a thin, fast, Promise-based wrapper around the Ethereum APIs.
## installation
Install the package with `npm install --save @parity/parity.js`
## usage
### initialisation
```javascript
// import the actual Api class
import { Api } from '@parity/parity.js';
// do the setup
const transport = new Api.Transport.Http('http://localhost:8545');
const api = new Api(transport);
```
### making calls
perform a call
```javascript
api.eth
.coinbase()
.then((coinbase) => {
console.log(`The coinbase is ${coinbase}`);
});
```
multiple promises
```javascript
Promise
.all([
api.eth.coinbase(),
api.net.listening()
])
.then(([coinbase, listening]) => {
// do stuff here
});
```
chaining promises
```javascript
api.eth
.newFilter({...})
.then((filterId) => api.eth.getFilterChanges(filterId))
.then((changes) => {
console.log(changes);
});
```
### contracts
attach contract
```javascript
const abi = [{ name: 'callMe', inputs: [{ type: 'bool', ...}, { type: 'string', ...}]}, ...abi...];
const address = '0x123456...9abc';
const contract = new api.newContract(abi, address);
```
find & call a function
```javascript
contract.instance
.callMe
.call({ gas: 21000 }, [true, 'someString']) // or estimateGas or postTransaction
.then((result) => {
console.log(`the result was ${result}`);
});
```
## apis
APIs implement the calls as exposed in the [Ethcore JSON Ethereum RPC](https://github.com/ethcore/ethereum-rpc-json/) definitions. Mapping follows the naming conventions of the originals, i.e. `eth_call` becomes `eth.call`, `personal_accounts` becomes `personal.accounts`, etc.

32
js/parity.package.json Normal file
View File

@ -0,0 +1,32 @@
{
"name": "@parity/parity.js",
"description": "The Parity Promise-base API & ABI library for interfacing with Ethereum over RPC",
"version": "0.0.0",
"main": "library.js",
"author": "Parity Team <admin@parity.io>",
"maintainers": [
"Jaco Greeff"
],
"contributors": [],
"license": "GPL-3.0",
"repository": {
"type": "git",
"url": "git+https://github.com/ethcore/parity.git"
},
"keywords": [
"Ethereum",
"ABI",
"API",
"RPC",
"Parity",
"Promise"
],
"scripts": {
},
"devDependencies": {
},
"dependencies": {
"bignumber.js": "^2.3.0",
"js-sha3": "^0.5.2"
}
}

View File

@ -1,11 +1,18 @@
#!/bin/bash #!/bin/bash
set -e set -e
# variables
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
PACKAGES=( "parity.js" )
BRANCH=$CI_BUILD_REF_NAME
GIT_JS_PRECOMPILED="https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/js-precompiled.git"
GIT_PARITY="https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git"
# setup the git user defaults for the current repo # setup the git user defaults for the current repo
function setup_git_user { function setup_git_user {
git config push.default simple git config push.default simple
git config merge.ours.driver true git config merge.ours.driver true
git config user.email "jaco+gitlab@ethcore.io" git config user.email "$GITHUB_EMAIL"
git config user.name "GitLab Build Bot" git config user.name "GitLab Build Bot"
} }
@ -15,47 +22,63 @@ GITLOG=./.git/gitcommand.log
pushd $BASEDIR pushd $BASEDIR
cd ../.dist cd ../.dist
# variables # add local files and send it up
UTCDATE=`date -u "+%Y%m%d-%H%M%S"` echo "*** Setting up GitHub config for js-precompiled"
# init git
rm -rf ./.git rm -rf ./.git
git init git init
# add local files and send it up
echo "Setting up GitHub config for js-precompiled"
setup_git_user setup_git_user
echo "Checking out $CI_BUILD_REF_NAME branch" echo "*** Checking out $BRANCH branch"
git remote add origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/js-precompiled.git git remote add origin $GIT_JS_PRECOMPILED
git fetch origin 2>$GITLOG git fetch origin 2>$GITLOG
git checkout -b $CI_BUILD_REF_NAME git checkout -b $BRANCH
echo "Committing compiled files for $UTCDATE" echo "*** Committing compiled files for $UTCDATE"
git add . git add .
git commit -m "$UTCDATE" git commit -m "$UTCDATE"
echo "Merging remote" echo "*** Merging remote"
git merge origin/$CI_BUILD_REF_NAME -X ours --commit -m "$UTCDATE [release]" git merge origin/$BRANCH -X ours --commit -m "$UTCDATE [release]"
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG git push origin HEAD:refs/heads/$BRANCH 2>$GITLOG
PRECOMPILED_HASH=`git rev-parse HEAD` PRECOMPILED_HASH=`git rev-parse HEAD`
# back to root # move to root
popd cd ../..
echo "Setting up GitHub config for parity" echo "*** Setting up GitHub config for parity"
setup_git_user setup_git_user
git remote set-url origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git git remote set-url origin $GIT_PARITY
git reset --hard origin/$CI_BUILD_REF_NAME 2>$GITLOG git reset --hard origin/$BRANCH 2>$GITLOG
echo "Updating cargo package parity-ui-precompiled#$PRECOMPILED_HASH" echo "*** Bumping package.json patch version"
cd js
npm --no-git-tag-version version
npm version patch
cd ..
echo "*** Updating cargo parity-ui-precompiled#$PRECOMPILED_HASH"
cargo update -p parity-ui-precompiled cargo update -p parity-ui-precompiled
# --precise "$PRECOMPILED_HASH" # --precise "$PRECOMPILED_HASH"
echo "Committing updated files" echo "*** Committing updated files"
git add . git add Cargo.lock js/package.json
git commit -m "[ci skip] js-precompiled $UTCDATE" git commit -m "[ci skip] js-precompiled $UTCDATE"
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG git push origin HEAD:refs/heads/$BRANCH 2>$GITLOG
echo "*** Building packages for npmjs"
cd js
# echo -e "$NPM_USERNAME\n$NPM_PASSWORD\n$NPM_EMAIL" | npm login
echo "$NPM_TOKEN" >> ~/.npmrc
npm run ci:build:npm
echo "*** Publishing $PACKAGE to npmjs"
cd .npmjs
npm publish --access public
cd ..
# back to root
echo "*** Release completed"
popd
# exit with exit code # exit with exit code
exit 0 exit 0

View File

@ -14,6 +14,8 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { stringify } from 'qs';
const options = { const options = {
method: 'GET', method: 'GET',
headers: { headers: {
@ -23,19 +25,14 @@ const options = {
export function call (module, action, _params, test) { export function call (module, action, _params, test) {
const host = test ? 'testnet.etherscan.io' : 'api.etherscan.io'; const host = test ? 'testnet.etherscan.io' : 'api.etherscan.io';
let params = '';
if (_params) { const query = stringify(Object.assign({
Object.keys(_params).map((param) => { module, action
const value = _params[param]; }, _params || {}));
params = `${params}&${param}=${value}`; return fetch(`https://${host}/api?${query}`, options)
});
}
return fetch(`http://${host}/api?module=${module}&action=${action}${params}`, options)
.then((response) => { .then((response) => {
if (response.status !== 200) { if (!response.ok) {
throw { code: response.status, message: response.statusText }; // eslint-disable-line throw { code: response.status, message: response.statusText }; // eslint-disable-line
} }

View File

@ -16,10 +16,13 @@
import { account } from './account'; import { account } from './account';
import { stats } from './stats'; import { stats } from './stats';
import { txLink, addressLink } from './links';
const etherscan = { const etherscan = {
account: account, account: account,
stats: stats stats: stats,
txLink: txLink,
addressLink: addressLink
}; };
export default etherscan; export default etherscan;

View File

@ -14,8 +14,10 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
// links to chain explorers export const txLink = (hash, isTestnet = false) => {
export const BASE_LINK_ACCOUNT_MORDEN = 'https://testnet.etherscan.io/address/'; return `https://${isTestnet ? 'testnet.' : ''}etherscan.io/tx/${hash}`;
export const BASE_LINK_ACCOUNT_HOMESTEAD = 'https://etherscan.io/address/'; };
export const BASE_LINK_TX_MORDEN = 'https://testnet.etherscan.io/tx/';
export const BASE_LINK_TX_HOMESTEAD = 'https://etherscan.io/tx/'; export const addressLink = (address, isTestnet = false) => {
return `https://${isTestnet ? 'testnet.' : ''}etherscan.io/address/${address}`;
};

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Basic Token Deployment</title> <title>Basic Token Deployment</title>
</head> </head>
<body> <body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>GAVcoin</title> <title>GAVcoin</title>
</head> </head>
<body> <body>

View File

@ -43,7 +43,7 @@ export default class ActionBuyIn extends Component {
accountError: ERRORS.invalidAccount, accountError: ERRORS.invalidAccount,
amount: 0, amount: 0,
amountError: ERRORS.invalidAmount, amountError: ERRORS.invalidAmount,
maxPrice: api.util.fromWei(this.props.price.mul(1.2)).toString(), maxPrice: api.util.fromWei(this.props.price.mul(1.2)).toFixed(0),
maxPriceError: null, maxPriceError: null,
sending: false, sending: false,
complete: false complete: false

View File

@ -53,6 +53,7 @@ export default class Application extends Component {
action: null, action: null,
address: null, address: null,
accounts: [], accounts: [],
accountsInfo: {},
blockNumber: new BigNumber(-1), blockNumber: new BigNumber(-1),
ethBalance: new BigNumber(0), ethBalance: new BigNumber(0),
gavBalance: new BigNumber(0), gavBalance: new BigNumber(0),
@ -68,7 +69,7 @@ export default class Application extends Component {
} }
render () { render () {
const { accounts, address, blockNumber, gavBalance, loading, price, remaining, totalSupply } = this.state; const { accounts, accountsInfo, address, blockNumber, gavBalance, loading, price, remaining, totalSupply } = this.state;
if (loading) { if (loading) {
return ( return (
@ -93,7 +94,7 @@ export default class Application extends Component {
gavBalance={ gavBalance } gavBalance={ gavBalance }
onAction={ this.onAction } /> onAction={ this.onAction } />
<Events <Events
accounts={ accounts } /> accountsInfo={ accountsInfo } />
</div> </div>
); );
} }
@ -216,8 +217,8 @@ export default class Application extends Component {
api.personal.accountsInfo() api.personal.accountsInfo()
]); ]);
}) })
.then(([address, addresses, infos]) => { .then(([address, addresses, accountsInfo]) => {
infos = infos || {}; accountsInfo = accountsInfo || {};
console.log(`gavcoin was found at ${address}`); console.log(`gavcoin was found at ${address}`);
const contract = api.newContract(abis.gavcoin, address); const contract = api.newContract(abis.gavcoin, address);
@ -226,9 +227,10 @@ export default class Application extends Component {
loading: false, loading: false,
address, address,
contract, contract,
accountsInfo,
instance: contract.instance, instance: contract.instance,
accounts: addresses.map((address) => { accounts: addresses.map((address) => {
const info = infos[address] || {}; const info = accountsInfo[address] || {};
return { return {
address, address,

View File

@ -14,10 +14,11 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import moment from 'moment';
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import IdentityIcon from '../../IdentityIcon'; import IdentityIcon from '../../IdentityIcon';
import { formatBlockNumber, formatCoins, formatEth } from '../../format'; import { formatCoins, formatEth, formatHash } from '../../format';
import styles from '../events.css'; import styles from '../events.css';
@ -27,7 +28,8 @@ const EMPTY_COLUMN = (
export default class Event extends Component { export default class Event extends Component {
static contextTypes = { static contextTypes = {
accounts: PropTypes.array.isRequired accountsInfo: PropTypes.object.isRequired,
api: PropTypes.object.isRequired
} }
static propTypes = { static propTypes = {
@ -38,14 +40,23 @@ export default class Event extends Component {
toAddress: PropTypes.string toAddress: PropTypes.string
} }
state = {
block: null
}
componentDidMount () {
this.loadBlock();
}
render () { render () {
const { event, fromAddress, toAddress, price, value } = this.props; const { event, fromAddress, toAddress, price, value } = this.props;
const { blockNumber, state, type } = event; const { block } = this.state;
const { state, type } = event;
const cls = `${styles.event} ${styles[state]} ${styles[type.toLowerCase()]}`; const cls = `${styles.event} ${styles[state]} ${styles[type.toLowerCase()]}`;
return ( return (
<tr className={ cls }> <tr className={ cls }>
{ this.renderBlockNumber(blockNumber) } { this.renderTimestamp(block) }
{ this.renderType(type) } { this.renderType(type) }
{ this.renderValue(value) } { this.renderValue(value) }
{ this.renderPrice(price) } { this.renderPrice(price) }
@ -55,10 +66,10 @@ export default class Event extends Component {
); );
} }
renderBlockNumber (blockNumber) { renderTimestamp (block) {
return ( return (
<td className={ styles.blocknumber }> <td className={ styles.blocknumber }>
{ formatBlockNumber(blockNumber) } { !block ? ' ' : moment(block.timestamp).fromNow() }
</td> </td>
); );
} }
@ -77,8 +88,8 @@ export default class Event extends Component {
} }
renderAddressName (address) { renderAddressName (address) {
const { accounts } = this.context; const { accountsInfo } = this.context;
const account = accounts.find((_account) => _account.address === address); const account = accountsInfo[address];
if (account && account.name) { if (account && account.name) {
return ( return (
@ -90,7 +101,7 @@ export default class Event extends Component {
return ( return (
<div className={ styles.address }> <div className={ styles.address }>
{ address } { formatHash(address) }
</div> </div>
); );
} }
@ -126,4 +137,19 @@ export default class Event extends Component {
</td> </td>
); );
} }
loadBlock () {
const { api } = this.context;
const { event } = this.props;
if (!event || !event.blockNumber || event.blockNumber.eq(0)) {
return;
}
api.eth
.getBlockByNumber(event.blockNumber)
.then((block) => {
this.setState({ block });
});
}
} }

View File

@ -16,18 +16,20 @@
*/ */
.events { .events {
padding: 4em 2em; padding: 4em 2em;
text-align: center;
} }
.list { .list {
width: 100%; margin: 0 auto;
border: none; border: none;
border-spacing: 0; border-spacing: 0;
text-align: left;
} }
.list td { .list td {
vertical-align: top; vertical-align: top;
padding: 4px 0.5em; padding: 0.25em 1em;
max-height: 32px; max-height: 1.5em;
} }
.event { .event {
@ -38,7 +40,6 @@
.blocknumber, .blocknumber,
.ethvalue, .ethvalue,
.gavvalue { .gavvalue {
font-family: 'Roboto Mono', monospace;
} }
.blocknumber, .blocknumber,

View File

@ -27,7 +27,7 @@ import styles from './events.css';
export default class Events extends Component { export default class Events extends Component {
static childContextTypes = { static childContextTypes = {
accounts: PropTypes.array accountsInfo: PropTypes.object
} }
static contextTypes = { static contextTypes = {
@ -36,7 +36,7 @@ export default class Events extends Component {
} }
static propTypes = { static propTypes = {
accounts: PropTypes.array accountsInfo: PropTypes.object.isRequired
} }
state = { state = {
@ -84,11 +84,9 @@ export default class Events extends Component {
} }
getChildContext () { getChildContext () {
const { accounts } = this.props; const { accountsInfo } = this.props;
return { return { accountsInfo };
accounts
};
} }
setupFilters () { setupFilters () {

View File

@ -50,3 +50,7 @@ export function formatCoins (amount, decimals = 6) {
export function formatEth (eth, decimals = 3) { export function formatEth (eth, decimals = 3) {
return api.util.fromWei(eth).toFormat(decimals); return api.util.fromWei(eth).toFormat(decimals);
} }
export function formatHash (hash) {
return `${hash.substr(0, 10)}...${hash.substr(-8)}`;
}

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>GitHub Hint</title> <title>GitHub Hint</title>
</head> </head>
<body> <body>

View File

@ -151,7 +151,7 @@ export default class Application extends Component {
let urlError = null; let urlError = null;
if (url && url.length) { if (url && url.length) {
var re = /^https?:\/\/(?:www\.|(?!www))[^\s\.]+\.[^\s]{2,}/g; const re = /^https?:\/\/(?:www\.|(?!www))[^\s\.]+\.[^\s]{2,}/g; // eslint-disable-line
urlError = re.test(url) urlError = re.test(url)
? null ? null
: 'not matching rexex'; : 'not matching rexex';

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Token Registry</title> <title>Token Registry</title>
</head> </head>
<body> <body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Method Signature Registry</title> <title>Method Signature Registry</title>
</head> </head>
<body> <body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Token Registry</title> <title>Token Registry</title>
</head> </head>
<body> <body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>dev::Parity.js</title> <title>dev::Parity.js</title>
</head> </head>
<body> <body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>dev::Web3</title> <title>dev::Web3</title>
</head> </head>
<body> <body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8"> <meta charset="utf-8">
<meta name="viewport" content="width=device-width"> <meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Parity</title> <title>Parity</title>
<style> <style>
html, body, #container { html, body, #container {

View File

@ -39,6 +39,8 @@ import './environment';
import '../assets/fonts/Roboto/font.css'; import '../assets/fonts/Roboto/font.css';
import '../assets/fonts/RobotoMono/font.css'; import '../assets/fonts/RobotoMono/font.css';
import '../assets/images/parity.ico';
import styles from './reset.css'; import styles from './reset.css';
import './index.html'; import './index.html';

View File

@ -14,12 +14,10 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { BASE_LINK_ACCOUNT_MORDEN, BASE_LINK_ACCOUNT_HOMESTEAD } from '../constants/constants'; import Abi from './abi';
import Api from './api';
export const getAccountLink = _getAccountLink; export {
Abi,
function _getAccountLink (address, chain) { Api
const isTestNet = chain === 'morden' || chain === 'testnet'; };
const base = isTestNet ? BASE_LINK_ACCOUNT_MORDEN : BASE_LINK_ACCOUNT_HOMESTEAD;
return base + address;
}

View File

@ -50,6 +50,9 @@ export default class CreationType extends Component {
<RadioButton <RadioButton
label='Import account from an Ethereum pre-sale wallet' label='Import account from an Ethereum pre-sale wallet'
value='fromPresale' /> value='fromPresale' />
<RadioButton
label='Import raw private key'
value='fromRaw' />
</RadioButtonGroup> </RadioButtonGroup>
</div> </div>
); );

View File

@ -26,6 +26,8 @@ import styles from '../createAccount.css';
const ERRORS = { const ERRORS = {
noName: 'you need to specify a valid name for the account', noName: 'you need to specify a valid name for the account',
noPhrase: 'you need to specify the recovery phrase', noPhrase: 'you need to specify the recovery phrase',
noKey: 'you need to provide the raw private key',
invalidKey: 'the raw key needs to be hex, 64 characters in length',
invalidPassword: 'you need to specify a password >= 8 characters', invalidPassword: 'you need to specify a password >= 8 characters',
noMatchPassword: 'the supplied passwords does not match' noMatchPassword: 'the supplied passwords does not match'
}; };

View File

@ -0,0 +1,17 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
export default from './rawKey';

View File

@ -0,0 +1,189 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { Form, Input } from '../../../ui';
import styles from '../createAccount.css';
import { ERRORS } from '../NewAccount';
export default class RawKey extends Component {
static contextTypes = {
api: PropTypes.object.isRequired
}
static propTypes = {
onChange: PropTypes.func.isRequired
}
state = {
rawKey: '',
rawKeyError: ERRORS.noKey,
accountName: '',
accountNameError: ERRORS.noName,
passwordHint: '',
password1: '',
password1Error: ERRORS.invalidPassword,
password2: '',
password2Error: ERRORS.noMatchPassword,
isValidPass: false,
isValidName: false,
isValidKey: false
}
componentWillMount () {
this.props.onChange(false, {});
}
render () {
const { accountName, accountNameError, passwordHint, password1, password1Error, password2, password2Error, rawKey, rawKeyError } = this.state;
return (
<Form>
<Input
hint='the raw hex encoded private key'
label='private key'
error={ rawKeyError }
value={ rawKey }
onChange={ this.onEditKey } />
<Input
label='account name'
hint='a descriptive name for the account'
error={ accountNameError }
value={ accountName }
onChange={ this.onEditAccountName } />
<Input
label='password hint'
hint='(optional) a hint to help with remembering the password'
value={ passwordHint }
onChange={ this.onEditPasswordHint } />
<div className={ styles.passwords }>
<div className={ styles.password }>
<Input
className={ styles.password }
label='password'
hint='a strong, unique password'
type='password'
error={ password1Error }
value={ password1 }
onChange={ this.onEditPassword1 } />
</div>
<div className={ styles.password }>
<Input
className={ styles.password }
label='password (repeat)'
hint='verify your password'
type='password'
error={ password2Error }
value={ password2 }
onChange={ this.onEditPassword2 } />
</div>
</div>
</Form>
);
}
updateParent = () => {
const { isValidName, isValidPass, isValidKey, accountName, passwordHint, password1, rawKey } = this.state;
const isValid = isValidName && isValidPass && isValidKey;
this.props.onChange(isValid, {
name: accountName,
passwordHint,
password: password1,
rawKey
});
}
onEditPasswordHint = (event, value) => {
this.setState({
passwordHint: value
});
}
onEditKey = (event) => {
const { api } = this.context;
const rawKey = event.target.value;
let rawKeyError = null;
console.log(rawKey.length, rawKey);
if (!rawKey || !rawKey.trim().length) {
rawKeyError = ERRORS.noKey;
} else if (rawKey.substr(0, 2) !== '0x' || rawKey.substr(2).length !== 64 || !api.util.isHex(rawKey)) {
rawKeyError = ERRORS.invalidKey;
}
this.setState({
rawKey,
rawKeyError,
isValidKey: !rawKeyError
}, this.updateParent);
}
onEditAccountName = (event) => {
const accountName = event.target.value;
let accountNameError = null;
if (!accountName || accountName.trim().length < 2) {
accountNameError = ERRORS.noName;
}
this.setState({
accountName,
accountNameError,
isValidName: !accountNameError
}, this.updateParent);
}
onEditPassword1 = (event) => {
const value = event.target.value;
let error1 = null;
let error2 = null;
if (!value || value.trim().length < 8) {
error1 = ERRORS.invalidPassword;
}
if (value !== this.state.password2) {
error2 = ERRORS.noMatchPassword;
}
this.setState({
password1: value,
password1Error: error1,
password2Error: error2,
isValidPass: !error1 && !error2
}, this.updateParent);
}
onEditPassword2 = (event) => {
const value = event.target.value;
let error2 = null;
if (value !== this.state.password1) {
error2 = ERRORS.noMatchPassword;
}
this.setState({
password2: value,
password2Error: error2,
isValidPass: !error2
}, this.updateParent);
}
}

View File

@ -29,6 +29,7 @@ import CreationType from './CreationType';
import NewAccount from './NewAccount'; import NewAccount from './NewAccount';
import NewGeth from './NewGeth'; import NewGeth from './NewGeth';
import NewImport from './NewImport'; import NewImport from './NewImport';
import RawKey from './RawKey';
import RecoveryPhrase from './RecoveryPhrase'; import RecoveryPhrase from './RecoveryPhrase';
const TITLES = { const TITLES = {
@ -58,6 +59,7 @@ export default class CreateAccount extends Component {
passwordHint: null, passwordHint: null,
password: null, password: null,
phrase: null, phrase: null,
rawKey: null,
json: null, json: null,
canCreate: false, canCreate: false,
createType: null, createType: null,
@ -110,6 +112,11 @@ export default class CreateAccount extends Component {
<RecoveryPhrase <RecoveryPhrase
onChange={ this.onChangeDetails } /> onChange={ this.onChangeDetails } />
); );
} else if (createType === 'fromRaw') {
return (
<RawKey
onChange={ this.onChangeDetails } />
);
} }
return ( return (
@ -220,6 +227,28 @@ export default class CreateAccount extends Component {
canCreate: true canCreate: true
}); });
this.newError(error);
});
} else if (createType === 'fromRaw') {
return api.personal
.newAccountFromSecret(this.state.rawKey, this.state.password)
.then((address) => {
this.setState({ address });
return api.personal
.setAccountName(address, this.state.name)
.then(() => api.personal.setAccountMeta(address, { passwordHint: this.state.passwordHint }));
})
.then(() => {
this.onNext();
this.props.onUpdate && this.props.onUpdate();
})
.catch((error) => {
console.error('onCreate', error);
this.setState({
canCreate: true
});
this.newError(error); this.newError(error);
}); });
} else if (createType === 'fromGeth') { } else if (createType === 'fromGeth') {
@ -288,27 +317,35 @@ export default class CreateAccount extends Component {
}); });
} }
onChangeDetails = (valid, { name, passwordHint, address, password, phrase }) => { onChangeDetails = (canCreate, { name, passwordHint, address, password, phrase, rawKey }) => {
this.setState({ this.setState({
canCreate: valid, canCreate,
name, name,
passwordHint, passwordHint,
address, address,
password, password,
phrase phrase,
rawKey
}); });
} }
onChangeGeth = (valid, gethAddresses) => { onChangeRaw = (canCreate, rawKey) => {
this.setState({ this.setState({
canCreate: valid, canCreate,
rawKey
});
}
onChangeGeth = (canCreate, gethAddresses) => {
this.setState({
canCreate,
gethAddresses gethAddresses
}); });
} }
onChangeWallet = (valid, { name, passwordHint, password, json }) => { onChangeWallet = (canCreate, { name, passwordHint, password, json }) => {
this.setState({ this.setState({
canCreate: valid, canCreate,
name, name,
passwordHint, passwordHint,
password, password,

View File

@ -122,7 +122,7 @@ export default class FirstRun extends Component {
icon={ <ActionDoneAll /> } icon={ <ActionDoneAll /> }
label='Close' label='Close'
onClick={ this.onClose } /> onClick={ this.onClose } />
); );
} }
} }

View File

@ -46,6 +46,16 @@
font-size: 1.1rem; font-size: 1.1rem;
} }
.passwords {
display: flex;
flex-wrap: wrap;
}
.password {
flex: 0 1 50%;
width: 50%;
}
.passwordHint { .passwordHint {
font-size: 0.9rem; font-size: 0.9rem;
color: lightgrey; color: lightgrey;

View File

@ -181,33 +181,37 @@ export default class PasswordManager extends Component {
disabled={ disabled } disabled={ disabled }
onSubmit={ this.handleChangePassword } onSubmit={ this.handleChangePassword }
onChange={ this.onEditCurrent } /> onChange={ this.onEditCurrent } />
<Input <Input
label='new password' label='(optional) new password hint'
hint='the new password for this account'
type='password'
submitOnBlur={ false }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditNew } />
<Input
label='repeat new password'
hint='repeat the new password for this account'
type='password'
submitOnBlur={ false }
error={ repeatError }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditRepeatNew } />
<Input
label='new password hint'
hint='hint for the new password' hint='hint for the new password'
submitOnBlur={ false } submitOnBlur={ false }
value={ passwordHint } value={ passwordHint }
disabled={ disabled } disabled={ disabled }
onSubmit={ this.handleChangePassword } onSubmit={ this.handleChangePassword }
onChange={ this.onEditHint } /> onChange={ this.onEditHint } />
<div className={ styles.passwords }>
<div className={ styles.password }>
<Input
label='new password'
hint='the new password for this account'
type='password'
submitOnBlur={ false }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditNew } />
</div>
<div className={ styles.password }>
<Input
label='repeat new password'
hint='repeat the new password for this account'
type='password'
submitOnBlur={ false }
error={ repeatError }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditRepeatNew } />
</div>
</div>
</div> </div>
</Form> </Form>
</Tab> </Tab>

View File

@ -420,13 +420,15 @@ export default class Transfer extends Component {
_sendToken () { _sendToken () {
const { account, balance } = this.props; const { account, balance } = this.props;
const { recipient, value, tag } = this.state; const { gas, gasPrice, recipient, value, tag } = this.state;
const token = balance.tokens.find((balance) => balance.token.tag === tag).token; const token = balance.tokens.find((balance) => balance.token.tag === tag).token;
return token.contract.instance.transfer return token.contract.instance.transfer
.postTransaction({ .postTransaction({
from: account.address, from: account.address,
to: token.address to: token.address,
gas,
gasPrice
}, [ }, [
recipient, recipient,
new BigNumber(value).mul(token.format).toFixed(0) new BigNumber(value).mul(token.format).toFixed(0)
@ -483,7 +485,7 @@ export default class Transfer extends Component {
to: token.address to: token.address
}, [ }, [
recipient, recipient,
new BigNumber(value || 0).mul(token.format).toString() new BigNumber(value || 0).mul(token.format).toFixed(0)
]); ]);
} }

View File

@ -139,7 +139,7 @@ class MethodDecoding extends Component {
return ( return (
<div className={ styles.gasDetails }> <div className={ styles.gasDetails }>
{ historic ? 'Used' : 'Will use' } <span className={ styles.highlight }>{ gas.toFormat(0) } gas ({ gasPrice.div(1000000).toFormat(0) }M/<small>ETH</small>)</span> for a total transaction cost of <span className={ styles.highlight }>{ this.renderEtherValue(gasValue) }</span> { historic ? 'Provided' : 'Provides' } <span className={ styles.highlight }>{ gas.toFormat(0) } gas ({ gasPrice.div(1000000).toFormat(0) }M/<small>ETH</small>)</span> for a total transaction value of <span className={ styles.highlight }>{ this.renderEtherValue(gasValue) }</span>
</div> </div>
); );
} }

View File

@ -23,6 +23,7 @@
.hash { .hash {
padding-top: 1em; padding-top: 1em;
word-break: break-all;
} }
.confirm { .confirm {
@ -31,11 +32,13 @@
} }
.progressbar { .progressbar {
margin: 0.5em !important; margin: 0.5em 0 !important;
width: 30% !important; width: 30% !important;
min-width: 220px;
display: inline-block !important; display: inline-block !important;
height: 0.75em !important; height: 0.75em !important;
} }
.progressinfo { .progressinfo {
text-align: center;
} }

View File

@ -19,6 +19,7 @@ import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux'; import { connect } from 'react-redux';
import { bindActionCreators } from 'redux'; import { bindActionCreators } from 'redux';
import { LinearProgress } from 'material-ui'; import { LinearProgress } from 'material-ui';
import { txLink } from '../../3rdparty/etherscan/links';
import styles from './txHash.css'; import styles from './txHash.css';
@ -29,7 +30,8 @@ class TxHash extends Component {
static propTypes = { static propTypes = {
hash: PropTypes.string.isRequired, hash: PropTypes.string.isRequired,
isTest: PropTypes.bool isTest: PropTypes.bool,
summary: PropTypes.bool
} }
state = { state = {
@ -54,16 +56,22 @@ class TxHash extends Component {
} }
render () { render () {
const { hash, isTest } = this.props; const { hash, isTest, summary } = this.props;
const link = `https://${isTest ? 'testnet.' : ''}etherscan.io/tx/${hash}`; let header = null;
return ( if (!summary) {
<div className={ styles.details }> header = (
<div className={ styles.header }> <div className={ styles.header }>
The transaction has been posted to the network with a transaction hash of The transaction has been posted to the network with a transaction hash of
</div> </div>
);
}
return (
<div className={ styles.details }>
{ header }
<div className={ styles.hash }> <div className={ styles.hash }>
<a href={ link } target='_blank'>{ hash }</a> <a href={ txLink(hash, isTest) } target='_blank'>{ hash }</a>
</div> </div>
{ this.renderConfirmations() } { this.renderConfirmations() }
</div> </div>

View File

@ -23,6 +23,7 @@ import { bindActionCreators } from 'redux';
import { fetchBlock, fetchTransaction } from '../../../../redux/providers/blockchainActions'; import { fetchBlock, fetchTransaction } from '../../../../redux/providers/blockchainActions';
import { IdentityIcon, IdentityName, MethodDecoding } from '../../../../ui'; import { IdentityIcon, IdentityName, MethodDecoding } from '../../../../ui';
import { txLink, addressLink } from '../../../../3rdparty/etherscan/links';
import styles from '../transactions.css'; import styles from '../transactions.css';
@ -55,9 +56,7 @@ class Transaction extends Component {
} }
render () { render () {
const { block, transaction, isTest } = this.props; const { block, transaction } = this.props;
const prefix = `https://${isTest ? 'testnet.' : ''}etherscan.io/`;
return ( return (
<tr> <tr>
@ -65,9 +64,9 @@ class Transaction extends Component {
<div>{ this.formatBlockTimestamp(block) }</div> <div>{ this.formatBlockTimestamp(block) }</div>
<div>{ this.formatNumber(transaction.blockNumber) }</div> <div>{ this.formatNumber(transaction.blockNumber) }</div>
</td> </td>
{ this.renderAddress(prefix, transaction.from) } { this.renderAddress(transaction.from) }
{ this.renderTransaction() } { this.renderTransaction() }
{ this.renderAddress(prefix, transaction.to) } { this.renderAddress(transaction.to) }
<td className={ styles.method }> <td className={ styles.method }>
{ this.renderMethod() } { this.renderMethod() }
</td> </td>
@ -93,15 +92,16 @@ class Transaction extends Component {
renderTransaction () { renderTransaction () {
const { transaction, isTest } = this.props; const { transaction, isTest } = this.props;
const prefix = `https://${isTest ? 'testnet.' : ''}etherscan.io/`;
const hashLink = `${prefix}tx/${transaction.hash}`;
return ( return (
<td className={ styles.transaction }> <td className={ styles.transaction }>
{ this.renderEtherValue() } { this.renderEtherValue() }
<div></div> <div></div>
<div> <div>
<a href={ hashLink } target='_blank' className={ styles.link }> <a
className={ styles.link }
href={ txLink(transaction.hash, isTest) }
target='_blank'
>
{ this.formatHash(transaction.hash) } { this.formatHash(transaction.hash) }
</a> </a>
</div> </div>
@ -109,10 +109,12 @@ class Transaction extends Component {
); );
} }
renderAddress (prefix, address) { renderAddress (address) {
const { isTest } = this.props;
const eslink = address ? ( const eslink = address ? (
<a <a
href={ `${prefix}address/${address}` } href={ addressLink(address, isTest) }
target='_blank' target='_blank'
className={ styles.link }> className={ styles.link }>
<IdentityName address={ address } shorten /> <IdentityName address={ address } shorten />

View File

@ -22,6 +22,7 @@ import { bindActionCreators } from 'redux';
import { fetchBlock, fetchTransaction } from '../../../../redux/providers/blockchainActions'; import { fetchBlock, fetchTransaction } from '../../../../redux/providers/blockchainActions';
import { IdentityIcon, IdentityName, Input, InputAddress } from '../../../../ui'; import { IdentityIcon, IdentityName, Input, InputAddress } from '../../../../ui';
import { txLink } from '../../../../3rdparty/etherscan/links';
import styles from '../../contract.css'; import styles from '../../contract.css';
@ -49,7 +50,7 @@ class Event extends Component {
const block = blocks[event.blockNumber.toString()]; const block = blocks[event.blockNumber.toString()];
const transaction = transactions[event.transactionHash] || {}; const transaction = transactions[event.transactionHash] || {};
const classes = `${styles.event} ${styles[event.state]}`; const classes = `${styles.event} ${styles[event.state]}`;
const url = `https://${isTest ? 'testnet.' : ''}etherscan.io/tx/${event.transactionHash}`; const url = txLink(event.transactionHash, isTest);
const keys = Object.keys(event.params).join(', '); const keys = Object.keys(event.params).join(', ');
const values = Object.keys(event.params).map((name, index) => { const values = Object.keys(event.params).map((name, index) => {
const param = event.params[name]; const param = event.params[name];

View File

@ -16,7 +16,7 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import { getAccountLink } from '../../util/account'; import { addressLink } from '../../../../../3rdparty/etherscan/links';
import styles from './AccountLink.css'; import styles from './AccountLink.css';
export default class AccountLink extends Component { export default class AccountLink extends Component {
@ -57,7 +57,7 @@ export default class AccountLink extends Component {
} }
updateLink (address, chain) { updateLink (address, chain) {
const link = getAccountLink(address, chain); const link = addressLink(address, chain === 'morden' || chain === 'testnet');
this.setState({ this.setState({
link link

View File

@ -29,7 +29,7 @@
.statusContainer { .statusContainer {
width: 220px; width: 220px;
padding: 20px 40px 0 40px; padding: 0 40px 0 40px;
/*border-left: 1px solid #aaa;*/ /*border-left: 1px solid #aaa;*/
position: absolute; position: absolute;
top: 0; top: 0;
@ -46,7 +46,8 @@
} }
.isRejected { .isRejected {
opacity: 0.7; opacity: 0.5;
padding-top: 2em;
} }
.txHash { .txHash {

View File

@ -17,6 +17,9 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import CircularProgress from 'material-ui/CircularProgress'; import CircularProgress from 'material-ui/CircularProgress';
import { TxHash } from '../../../../ui';
import TransactionMainDetails from '../TransactionMainDetails'; import TransactionMainDetails from '../TransactionMainDetails';
import TxHashLink from '../TxHashLink'; import TxHashLink from '../TxHashLink';
import TransactionSecondaryDetails from '../TransactionSecondaryDetails'; import TransactionSecondaryDetails from '../TransactionSecondaryDetails';
@ -57,7 +60,7 @@ export default class TransactionFinished extends Component {
}; };
componentWillMount () { componentWillMount () {
const { gas, gasPrice, value } = this.props; const { from, to, gas, gasPrice, value } = this.props;
const fee = tUtil.getFee(gas, gasPrice); // BigNumber object const fee = tUtil.getFee(gas, gasPrice); // BigNumber object
const totalValue = tUtil.getTotalValue(fee, value); const totalValue = tUtil.getTotalValue(fee, value);
this.setState({ totalValue }); this.setState({ totalValue });
@ -70,9 +73,10 @@ export default class TransactionFinished extends Component {
console.error('could not fetch chain', err); console.error('could not fetch chain', err);
}); });
const { from, to } = this.props;
this.fetchBalance(from, 'fromBalance'); this.fetchBalance(from, 'fromBalance');
if (to) this.fetchBalance(to, 'toBalance'); if (to) {
this.fetchBalance(to, 'toBalance');
}
} }
render () { render () {
@ -109,19 +113,27 @@ export default class TransactionFinished extends Component {
} }
renderStatus () { renderStatus () {
const { status } = this.props; const { status, txHash } = this.props;
const klass = status === 'confirmed' ? styles.isConfirmed : styles.isRejected;
if (status !== 'confirmed') {
return (
<div>
<span className={ styles.isRejected }>{ capitalize(status) }</span>
</div>
);
}
return ( return (
<div> <TxHash
<span className={ klass }>{ capitalize(status) }</span> summary
{ this.renderTxHash() } hash={ txHash } />
</div>
); );
} }
renderTxHash () { renderTxHash () {
const { txHash, chain } = this.props; const { txHash } = this.props;
if (!txHash) { const { chain } = this.state;
if (!txHash || !chain) {
return; return;
} }

View File

@ -16,7 +16,7 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import { getTxLink } from '../util/transaction'; import { txLink } from '../../../../3rdparty/etherscan/links';
export default class TxHashLink extends Component { export default class TxHashLink extends Component {
@ -27,27 +27,12 @@ export default class TxHashLink extends Component {
className: PropTypes.string className: PropTypes.string
} }
state = {
link: null
};
componentWillMount () {
const { txHash, chain } = this.props;
this.updateLink(txHash, chain);
}
componentWillReceiveProps (nextProps) {
const { txHash, chain } = nextProps;
this.updateLink(txHash, chain);
}
render () { render () {
const { children, txHash, className } = this.props; const { children, txHash, className, chain } = this.props;
const { link } = this.state;
return ( return (
<a <a
href={ link } href={ txLink(txHash, chain === 'morden' || chain === 'testnet') }
target='_blank' target='_blank'
className={ className }> className={ className }>
{ children || txHash } { children || txHash }
@ -55,9 +40,4 @@ export default class TxHashLink extends Component {
); );
} }
updateLink (txHash, chain) {
const link = getTxLink(txHash, chain);
this.setState({ link });
}
} }

View File

@ -18,7 +18,6 @@ import BigNumber from 'bignumber.js';
const WEI_TO_ETH_MULTIPLIER = 0.000000000000000001; const WEI_TO_ETH_MULTIPLIER = 0.000000000000000001;
const WEI_TO_SZABU_MULTIPLIER = 0.000000000001; const WEI_TO_SZABU_MULTIPLIER = 0.000000000001;
import { BASE_LINK_TX_MORDEN, BASE_LINK_TX_HOMESTEAD } from '../constants/constants';
export const getShortData = _getShortData; export const getShortData = _getShortData;
// calculations // calculations
@ -33,8 +32,6 @@ export const getTotalValueDisplay = _getTotalValueDisplay;
export const getTotalValueDisplayWei = _getTotalValueDisplayWei; export const getTotalValueDisplayWei = _getTotalValueDisplayWei;
export const getEthmFromWeiDisplay = _getEthmFromWeiDisplay; export const getEthmFromWeiDisplay = _getEthmFromWeiDisplay;
export const getGasDisplay = _getGasDisplay; export const getGasDisplay = _getGasDisplay;
// links
export const getTxLink = _getTxLink;
function _getShortData (data) { function _getShortData (data) {
if (data.length <= 3) { if (data.length <= 3) {
@ -111,11 +108,6 @@ function _getEthmFromWeiDisplay (weiHexString) {
return value.times(WEI_TO_ETH_MULTIPLIER).times(1e7).toFixed(5); return value.times(WEI_TO_ETH_MULTIPLIER).times(1e7).toFixed(5);
} }
function _getTxLink (txHash, chain) {
const base = chain === 'morden' || chain === 'testnet' ? BASE_LINK_TX_MORDEN : BASE_LINK_TX_HOMESTEAD;
return base + txHash;
}
function _getGasDisplay (gas) { function _getGasDisplay (gas) {
return new BigNumber(gas).times(1e-7).toFormat(4); return new BigNumber(gas).times(1e-7).toFormat(4);
} }

View File

@ -83,9 +83,13 @@ module.exports = {
loader: 'style!css' loader: 'style!css'
}, },
{ {
test: /\.(png|jpg|)$/, test: /\.(png|jpg)$/,
loader: 'file-loader' loader: 'file-loader'
}, },
{
test: /\.ico$/,
loader: 'file-loader?name=[name].[ext]'
},
{ {
test: /\.(woff(2)|ttf|eot|svg|otf)(\?v=[0-9]\.[0-9]\.[0-9])?$/, test: /\.(woff(2)|ttf|eot|svg|otf)(\?v=[0-9]\.[0-9]\.[0-9])?$/,
loader: 'file-loader' loader: 'file-loader'

82
js/webpack.npm.js Normal file
View File

@ -0,0 +1,82 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
const path = require('path');
const webpack = require('webpack');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const packageJson = require('./package.json');
const ENV = process.env.NODE_ENV || 'development';
const isProd = ENV === 'production';
module.exports = {
context: path.join(__dirname, './src'),
entry: 'library.js',
output: {
path: path.join(__dirname, '.npmjs'),
filename: 'library.js',
libraryTarget: 'commonjs'
},
module: {
loaders: [
{
test: /(\.jsx|\.js)$/,
loader: 'babel',
exclude: /node_modules/
}
]
},
resolve: {
root: path.resolve('./src'),
extensions: ['', '.js']
},
plugins: (function () {
const plugins = [
new CopyWebpackPlugin([
{
from: '../parity.package.json',
to: 'package.json',
transform: function (content, path) {
const json = JSON.parse(content.toString());
json.version = packageJson.version;
return new Buffer(JSON.stringify(json, null, ' '), 'utf-8');
}
},
{
from: '../LICENSE'
},
{
from: '../parity.md',
to: 'README.md'
}
], { copyUnmodified: true })
];
if (isProd) {
plugins.push(new webpack.optimize.UglifyJsPlugin({
screwIe8: true,
compress: {
warnings: false
},
output: {
comments: false
}
}));
}
return plugins;
}())
};

View File

@ -14,6 +14,13 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
macro_rules! println_stderr(
($($arg:tt)*) => { {
let r = writeln!(&mut ::std::io::stderr(), $($arg)*);
r.expect("failed printing to stderr");
} }
);
macro_rules! otry { macro_rules! otry {
($e: expr) => ( ($e: expr) => (
match $e { match $e {
@ -39,7 +46,7 @@ macro_rules! usage {
) => { ) => {
use toml; use toml;
use std::{fs, io, process}; use std::{fs, io, process};
use std::io::Read; use std::io::{Read, Write};
use util::version; use util::version;
use docopt::{Docopt, Error as DocoptError}; use docopt::{Docopt, Error as DocoptError};
use helpers::replace_home; use helpers::replace_home;
@ -58,20 +65,20 @@ macro_rules! usage {
match self { match self {
ArgsError::Docopt(e) => e.exit(), ArgsError::Docopt(e) => e.exit(),
ArgsError::Parsing(errors) => { ArgsError::Parsing(errors) => {
println!("There is an error in config file."); println_stderr!("There is an error in config file.");
for e in &errors { for e in &errors {
println!("{}", e); println_stderr!("{}", e);
} }
process::exit(2) process::exit(2)
}, },
ArgsError::Decode(e) => { ArgsError::Decode(e) => {
println!("You might have supplied invalid parameters in config file."); println_stderr!("You might have supplied invalid parameters in config file.");
println!("{}", e); println_stderr!("{}", e);
process::exit(2) process::exit(2)
}, },
ArgsError::Config(path, e) => { ArgsError::Config(path, e) => {
println!("There was an error reading your config file at: {}", path); println_stderr!("There was an error reading your config file at: {}", path);
println!("{}", e); println_stderr!("{}", e);
process::exit(2) process::exit(2)
} }
} }
@ -136,7 +143,7 @@ macro_rules! usage {
let config = match (fs::File::open(&config_file), raw_args.flag_config.is_some()) { let config = match (fs::File::open(&config_file), raw_args.flag_config.is_some()) {
// Load config file // Load config file
(Ok(mut file), _) => { (Ok(mut file), _) => {
println!("Loading config file from {}", &config_file); println_stderr!("Loading config file from {}", &config_file);
let mut config = String::new(); let mut config = String::new();
try!(file.read_to_string(&mut config).map_err(|e| ArgsError::Config(config_file, e))); try!(file.read_to_string(&mut config).map_err(|e| ArgsError::Config(config_file, e)));
try!(Self::parse_config(&config)) try!(Self::parse_config(&config))

View File

@ -184,7 +184,7 @@ impl ChainNotify for Informant {
let ripe = Instant::now() > *last_import + Duration::from_secs(1) && !importing; let ripe = Instant::now() > *last_import + Duration::from_secs(1) && !importing;
let txs_imported = imported.iter() let txs_imported = imported.iter()
.take(imported.len() - if ripe {1} else {0}) .take(imported.len() - if ripe {1} else {0})
.filter_map(|h| self.client.block(BlockID::Hash(h.clone()))) .filter_map(|h| self.client.block(BlockID::Hash(*h)))
.map(|b| BlockView::new(&b).transactions_count()) .map(|b| BlockView::new(&b).transactions_count())
.sum(); .sum();

View File

@ -207,10 +207,10 @@ fn main() {
match start() { match start() {
Ok(result) => { Ok(result) => {
println!("{}", result); info!("{}", result);
}, },
Err(err) => { Err(err) => {
println!("{}", err); info!("{}", err);
process::exit(1); process::exit(1);
} }
} }

View File

@ -53,7 +53,7 @@ use url;
const SNAPSHOT_PERIOD: u64 = 10000; const SNAPSHOT_PERIOD: u64 = 10000;
// how many blocks to wait before starting a periodic snapshot. // how many blocks to wait before starting a periodic snapshot.
const SNAPSHOT_HISTORY: u64 = 500; const SNAPSHOT_HISTORY: u64 = 100;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct RunCmd { pub struct RunCmd {

View File

@ -58,7 +58,7 @@ impl Default for SyncConfig {
network_id: U256::from(1), network_id: U256::from(1),
subprotocol_name: *b"eth", subprotocol_name: *b"eth",
fork_block: None, fork_block: None,
warp_sync: true, warp_sync: false,
} }
} }
} }

View File

@ -29,7 +29,7 @@ use sync_io::SyncIo;
use blocks::BlockCollection; use blocks::BlockCollection;
const MAX_HEADERS_TO_REQUEST: usize = 128; const MAX_HEADERS_TO_REQUEST: usize = 128;
const MAX_BODIES_TO_REQUEST: usize = 128; const MAX_BODIES_TO_REQUEST: usize = 64;
const MAX_RECEPITS_TO_REQUEST: usize = 128; const MAX_RECEPITS_TO_REQUEST: usize = 128;
const SUBCHAIN_SIZE: u64 = 256; const SUBCHAIN_SIZE: u64 = 256;
const MAX_ROUND_PARENTS: usize = 32; const MAX_ROUND_PARENTS: usize = 32;

View File

@ -123,6 +123,7 @@ const MAX_NEW_BLOCK_AGE: BlockNumber = 20;
const MAX_TRANSACTION_SIZE: usize = 300*1024; const MAX_TRANSACTION_SIZE: usize = 300*1024;
// Min number of blocks to be behind for a snapshot sync // Min number of blocks to be behind for a snapshot sync
const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 100000; const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 100000;
const SNAPSHOT_MIN_PEERS: usize = 3;
const STATUS_PACKET: u8 = 0x00; const STATUS_PACKET: u8 = 0x00;
const NEW_BLOCK_HASHES_PACKET: u8 = 0x01; const NEW_BLOCK_HASHES_PACKET: u8 = 0x01;
@ -147,21 +148,27 @@ const SNAPSHOT_DATA_PACKET: u8 = 0x14;
pub const SNAPSHOT_SYNC_PACKET_COUNT: u8 = 0x15; pub const SNAPSHOT_SYNC_PACKET_COUNT: u8 = 0x15;
const HEADERS_TIMEOUT_SEC: f64 = 15f64; const MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD: usize = 3;
const BODIES_TIMEOUT_SEC: f64 = 10f64;
const RECEIPTS_TIMEOUT_SEC: f64 = 10f64; const WAIT_PEERS_TIMEOUT_SEC: u64 = 5;
const FORK_HEADER_TIMEOUT_SEC: f64 = 3f64; const STATUS_TIMEOUT_SEC: u64 = 5;
const SNAPSHOT_MANIFEST_TIMEOUT_SEC: f64 = 3f64; const HEADERS_TIMEOUT_SEC: u64 = 15;
const SNAPSHOT_DATA_TIMEOUT_SEC: f64 = 60f64; const BODIES_TIMEOUT_SEC: u64 = 10;
const RECEIPTS_TIMEOUT_SEC: u64 = 10;
const FORK_HEADER_TIMEOUT_SEC: u64 = 3;
const SNAPSHOT_MANIFEST_TIMEOUT_SEC: u64 = 3;
const SNAPSHOT_DATA_TIMEOUT_SEC: u64 = 60;
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug)]
/// Sync state /// Sync state
pub enum SyncState { pub enum SyncState {
/// Waiting for pv64 peers to start snapshot syncing /// Collecting enough peers to start syncing.
WaitingPeers,
/// Waiting for snapshot manifest download
SnapshotManifest, SnapshotManifest,
/// Downloading snapshot data /// Downloading snapshot data
SnapshotData, SnapshotData,
/// Waiting for snapshot restoration to complete /// Waiting for snapshot restoration progress.
SnapshotWaiting, SnapshotWaiting,
/// Downloading new blocks /// Downloading new blocks
Blocks, Blocks,
@ -276,7 +283,7 @@ struct PeerInfo {
/// Holds requested snapshot chunk hash if any. /// Holds requested snapshot chunk hash if any.
asking_snapshot_data: Option<H256>, asking_snapshot_data: Option<H256>,
/// Request timestamp /// Request timestamp
ask_time: f64, ask_time: u64,
/// Holds a set of transactions recently sent to this peer to avoid spamming. /// Holds a set of transactions recently sent to this peer to avoid spamming.
last_sent_transactions: HashSet<H256>, last_sent_transactions: HashSet<H256>,
/// Pending request is expired and result should be ignored /// Pending request is expired and result should be ignored
@ -324,10 +331,13 @@ pub struct ChainSync {
network_id: U256, network_id: U256,
/// Optional fork block to check /// Optional fork block to check
fork_block: Option<(BlockNumber, H256)>, fork_block: Option<(BlockNumber, H256)>,
/// Snapshot sync allowed.
snapshot_sync_enabled: bool,
/// Snapshot downloader. /// Snapshot downloader.
snapshot: Snapshot, snapshot: Snapshot,
/// Connected peers pending Status message.
/// Value is request timestamp.
handshaking_peers: HashMap<PeerId, u64>,
/// Sync start timestamp. Measured when first peer is connected
sync_start_time: Option<u64>,
} }
type RlpResponseResult = Result<Option<(PacketId, RlpStream)>, PacketDecodeError>; type RlpResponseResult = Result<Option<(PacketId, RlpStream)>, PacketDecodeError>;
@ -337,20 +347,21 @@ impl ChainSync {
pub fn new(config: SyncConfig, chain: &BlockChainClient) -> ChainSync { pub fn new(config: SyncConfig, chain: &BlockChainClient) -> ChainSync {
let chain_info = chain.chain_info(); let chain_info = chain.chain_info();
let mut sync = ChainSync { let mut sync = ChainSync {
state: SyncState::Idle, state: if config.warp_sync { SyncState::WaitingPeers } else { SyncState::Idle },
starting_block: chain.chain_info().best_block_number, starting_block: chain.chain_info().best_block_number,
highest_block: None, highest_block: None,
peers: HashMap::new(), peers: HashMap::new(),
handshaking_peers: HashMap::new(),
active_peers: HashSet::new(), active_peers: HashSet::new(),
new_blocks: BlockDownloader::new(false, &chain_info.best_block_hash, chain_info.best_block_number), new_blocks: BlockDownloader::new(false, &chain_info.best_block_hash, chain_info.best_block_number),
old_blocks: None, old_blocks: None,
last_sent_block_number: 0, last_sent_block_number: 0,
network_id: config.network_id, network_id: config.network_id,
fork_block: config.fork_block, fork_block: config.fork_block,
snapshot_sync_enabled: config.warp_sync,
snapshot: Snapshot::new(), snapshot: Snapshot::new(),
sync_start_time: None,
}; };
sync.init_downloaders(chain); sync.update_targets(chain);
sync sync
} }
@ -442,20 +453,67 @@ impl ChainSync {
self.active_peers.remove(&peer_id); self.active_peers.remove(&peer_id);
} }
fn start_snapshot_sync(&mut self, io: &mut SyncIo, peer_id: PeerId) { fn maybe_start_snapshot_sync(&mut self, io: &mut SyncIo) {
if self.state != SyncState::WaitingPeers {
return;
}
let best_block = io.chain().chain_info().best_block_number;
let (best_hash, max_peers, snapshot_peers) = {
//collect snapshot infos from peers
let snapshots = self.peers.iter()
.filter(|&(_, p)| p.is_allowed() && p.snapshot_number.map_or(false, |sn| best_block < sn && (sn - best_block) > SNAPSHOT_RESTORE_THRESHOLD))
.filter_map(|(p, peer)| peer.snapshot_hash.map(|hash| (p, hash.clone())));
let mut snapshot_peers = HashMap::new();
let mut max_peers: usize = 0;
let mut best_hash = None;
for (p, hash) in snapshots {
let peers = snapshot_peers.entry(hash).or_insert_with(Vec::new);
peers.push(*p);
if peers.len() > max_peers {
max_peers = peers.len();
best_hash = Some(hash);
}
}
(best_hash, max_peers, snapshot_peers)
};
let timeout = self.sync_start_time.map_or(false, |t| ((time::precise_time_ns() - t) / 1_000_000_000) > WAIT_PEERS_TIMEOUT_SEC);
if let (Some(hash), Some(peers)) = (best_hash, best_hash.map_or(None, |h| snapshot_peers.get(&h))) {
if max_peers >= SNAPSHOT_MIN_PEERS {
trace!(target: "sync", "Starting confirmed snapshot sync {:?} with {:?}", hash, peers);
self.start_snapshot_sync(io, peers);
} else if timeout {
trace!(target: "sync", "Starting unconfirmed snapshot sync {:?} with {:?}", hash, peers);
self.start_snapshot_sync(io, peers);
}
} else if timeout {
trace!(target: "sync", "No snapshots found, starting full sync");
self.state = SyncState::Idle;
self.continue_sync(io);
}
}
fn start_snapshot_sync(&mut self, io: &mut SyncIo, peers: &[PeerId]) {
self.snapshot.clear(); self.snapshot.clear();
self.request_snapshot_manifest(io, peer_id); for p in peers {
if self.peers.get(p).map_or(false, |p| p.asking == PeerAsking::Nothing) {
self.request_snapshot_manifest(io, *p);
}
}
self.state = SyncState::SnapshotManifest; self.state = SyncState::SnapshotManifest;
} }
/// Restart sync disregarding the block queue status. May end up re-downloading up to QUEUE_SIZE blocks /// Restart sync disregarding the block queue status. May end up re-downloading up to QUEUE_SIZE blocks
pub fn restart(&mut self, io: &mut SyncIo) { pub fn restart(&mut self, io: &mut SyncIo) {
self.init_downloaders(io.chain()); self.update_targets(io.chain());
self.reset_and_continue(io); self.reset_and_continue(io);
} }
/// Restart sync after bad block has been detected. May end up re-downloading up to QUEUE_SIZE blocks /// Update sync after the blockchain has been changed externally.
fn init_downloaders(&mut self, chain: &BlockChainClient) { pub fn update_targets(&mut self, chain: &BlockChainClient) {
// Do not assume that the block queue/chain still has our last_imported_block // Do not assume that the block queue/chain still has our last_imported_block
let chain = chain.chain_info(); let chain = chain.chain_info();
self.new_blocks = BlockDownloader::new(false, &chain.best_block_hash, chain.best_block_number); self.new_blocks = BlockDownloader::new(false, &chain.best_block_hash, chain.best_block_number);
@ -475,6 +533,7 @@ impl ChainSync {
/// Called by peer to report status /// Called by peer to report status
fn on_peer_status(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> { fn on_peer_status(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
self.handshaking_peers.remove(&peer_id);
let protocol_version: u8 = try!(r.val_at(0)); let protocol_version: u8 = try!(r.val_at(0));
let warp_protocol = io.protocol_version(&WARP_SYNC_PROTOCOL_ID, peer_id) != 0; let warp_protocol = io.protocol_version(&WARP_SYNC_PROTOCOL_ID, peer_id) != 0;
let peer = PeerInfo { let peer = PeerInfo {
@ -486,7 +545,7 @@ impl ChainSync {
asking: PeerAsking::Nothing, asking: PeerAsking::Nothing,
asking_blocks: Vec::new(), asking_blocks: Vec::new(),
asking_hash: None, asking_hash: None,
ask_time: 0f64, ask_time: 0,
last_sent_transactions: HashSet::new(), last_sent_transactions: HashSet::new(),
expired: false, expired: false,
confirmation: if self.fork_block.is_none() { ForkConfirmation::Confirmed } else { ForkConfirmation::Unconfirmed }, confirmation: if self.fork_block.is_none() { ForkConfirmation::Confirmed } else { ForkConfirmation::Unconfirmed },
@ -496,7 +555,12 @@ impl ChainSync {
block_set: None, block_set: None,
}; };
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest_hash, peer.genesis); if self.sync_start_time.is_none() {
self.sync_start_time = Some(time::precise_time_ns());
}
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{}, snapshot:{:?})",
peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest_hash, peer.genesis, peer.snapshot_number);
if io.is_expired() { if io.is_expired() {
trace!(target: "sync", "Status packet from expired session {}:{}", peer_id, io.peer_info(peer_id)); trace!(target: "sync", "Status packet from expired session {}:{}", peer_id, io.peer_info(peer_id));
return Ok(()); return Ok(());
@ -578,7 +642,7 @@ impl ChainSync {
} }
let item_count = r.item_count(); let item_count = r.item_count();
trace!(target: "sync", "{} -> BlockHeaders ({} entries), state = {:?}, set = {:?}", peer_id, item_count, self.state, block_set); trace!(target: "sync", "{} -> BlockHeaders ({} entries), state = {:?}, set = {:?}", peer_id, item_count, self.state, block_set);
if self.state == SyncState::Idle && self.old_blocks.is_none() { if (self.state == SyncState::Idle || self.state == SyncState::WaitingPeers) && self.old_blocks.is_none() {
trace!(target: "sync", "Ignored unexpected block headers"); trace!(target: "sync", "Ignored unexpected block headers");
self.continue_sync(io); self.continue_sync(io);
return Ok(()); return Ok(());
@ -875,7 +939,7 @@ impl ChainSync {
} }
self.clear_peer_download(peer_id); self.clear_peer_download(peer_id);
if !self.reset_peer_asking(peer_id, PeerAsking::SnapshotManifest) || self.state != SyncState::SnapshotManifest { if !self.reset_peer_asking(peer_id, PeerAsking::SnapshotManifest) || self.state != SyncState::SnapshotManifest {
trace!(target: "sync", "{}: Ignored unexpected manifest", peer_id); trace!(target: "sync", "{}: Ignored unexpected/expired manifest", peer_id);
self.continue_sync(io); self.continue_sync(io);
return Ok(()); return Ok(());
} }
@ -918,7 +982,7 @@ impl ChainSync {
match io.snapshot_service().status() { match io.snapshot_service().status() {
RestorationStatus::Inactive | RestorationStatus::Failed => { RestorationStatus::Inactive | RestorationStatus::Failed => {
trace!(target: "sync", "{}: Snapshot restoration aborted", peer_id); trace!(target: "sync", "{}: Snapshot restoration aborted", peer_id);
self.state = SyncState::Idle; self.state = SyncState::WaitingPeers;
self.snapshot.clear(); self.snapshot.clear();
self.continue_sync(io); self.continue_sync(io);
return Ok(()); return Ok(());
@ -960,6 +1024,7 @@ impl ChainSync {
/// Called by peer when it is disconnecting /// Called by peer when it is disconnecting
pub fn on_peer_aborting(&mut self, io: &mut SyncIo, peer: PeerId) { pub fn on_peer_aborting(&mut self, io: &mut SyncIo, peer: PeerId) {
trace!(target: "sync", "== Disconnecting {}: {}", peer, io.peer_info(peer)); trace!(target: "sync", "== Disconnecting {}: {}", peer, io.peer_info(peer));
self.handshaking_peers.remove(&peer);
if self.peers.contains_key(&peer) { if self.peers.contains_key(&peer) {
debug!(target: "sync", "Disconnected {}", peer); debug!(target: "sync", "Disconnected {}", peer);
self.clear_peer_download(peer); self.clear_peer_download(peer);
@ -975,12 +1040,14 @@ impl ChainSync {
if let Err(e) = self.send_status(io, peer) { if let Err(e) = self.send_status(io, peer) {
debug!(target:"sync", "Error sending status request: {:?}", e); debug!(target:"sync", "Error sending status request: {:?}", e);
io.disable_peer(peer); io.disable_peer(peer);
} else {
self.handshaking_peers.insert(peer, time::precise_time_ns());
} }
} }
/// Resume downloading /// Resume downloading
fn continue_sync(&mut self, io: &mut SyncIo) { fn continue_sync(&mut self, io: &mut SyncIo) {
if self.state != SyncState::Waiting && self.state != SyncState::SnapshotWaiting if (self.state == SyncState::Blocks || self.state == SyncState::NewBlocks || self.state == SyncState::Idle)
&& !self.peers.values().any(|p| p.asking != PeerAsking::Nothing && p.block_set != Some(BlockSet::OldBlocks) && p.can_sync()) { && !self.peers.values().any(|p| p.asking != PeerAsking::Nothing && p.block_set != Some(BlockSet::OldBlocks) && p.can_sync()) {
self.complete_sync(io); self.complete_sync(io);
} }
@ -1040,11 +1107,9 @@ impl ChainSync {
let higher_difficulty = peer_difficulty.map_or(true, |pd| pd > syncing_difficulty); let higher_difficulty = peer_difficulty.map_or(true, |pd| pd > syncing_difficulty);
if force || self.state == SyncState::NewBlocks || higher_difficulty || self.old_blocks.is_some() { if force || self.state == SyncState::NewBlocks || higher_difficulty || self.old_blocks.is_some() {
match self.state { match self.state {
SyncState::Idle if self.snapshot_sync_enabled SyncState::WaitingPeers => {
&& chain_info.best_block_number < peer_snapshot_number trace!(target: "sync", "Checking snapshot sync: {} vs {}", peer_snapshot_number, chain_info.best_block_number);
&& (peer_snapshot_number - chain_info.best_block_number) > SNAPSHOT_RESTORE_THRESHOLD => { self.maybe_start_snapshot_sync(io);
trace!(target: "sync", "Starting snapshot sync: {} vs {}", peer_snapshot_number, chain_info.best_block_number);
self.start_snapshot_sync(io, peer_id);
}, },
SyncState::Idle | SyncState::Blocks | SyncState::NewBlocks => { SyncState::Idle | SyncState::Blocks | SyncState::NewBlocks => {
if io.chain().queue_info().is_full() { if io.chain().queue_info().is_full() {
@ -1070,6 +1135,13 @@ impl ChainSync {
} }
}, },
SyncState::SnapshotData => { SyncState::SnapshotData => {
if let RestorationStatus::Ongoing { state_chunks: _, block_chunks: _, state_chunks_done, block_chunks_done, } = io.snapshot_service().status() {
if self.snapshot.done_chunks() - (state_chunks_done + block_chunks_done) as usize > MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD {
trace!(target: "sync", "Snapshot queue full, pausing sync");
self.state = SyncState::SnapshotWaiting;
return;
}
}
if peer_snapshot_hash.is_some() && peer_snapshot_hash == self.snapshot.snapshot_hash() { if peer_snapshot_hash.is_some() && peer_snapshot_hash == self.snapshot.snapshot_hash() {
self.request_snapshot_data(io, peer_id); self.request_snapshot_data(io, peer_id);
} }
@ -1253,7 +1325,7 @@ impl ChainSync {
warn!(target:"sync", "Asking {:?} while requesting {:?}", peer.asking, asking); warn!(target:"sync", "Asking {:?} while requesting {:?}", peer.asking, asking);
} }
peer.asking = asking; peer.asking = asking;
peer.ask_time = time::precise_time_s(); peer.ask_time = time::precise_time_ns();
let result = if packet_id >= ETH_PACKET_COUNT { let result = if packet_id >= ETH_PACKET_COUNT {
sync.send_protocol(WARP_SYNC_PROTOCOL_ID, peer_id, packet_id, packet) sync.send_protocol(WARP_SYNC_PROTOCOL_ID, peer_id, packet_id, packet)
} else { } else {
@ -1277,7 +1349,7 @@ impl ChainSync {
/// Called when peer sends us new transactions /// Called when peer sends us new transactions
fn on_peer_transactions(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> { fn on_peer_transactions(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
// Accept transactions only when fully synced // Accept transactions only when fully synced
if !io.is_chain_queue_empty() || self.state != SyncState::Idle || self.state != SyncState::NewBlocks { if !io.is_chain_queue_empty() || (self.state != SyncState::Idle && self.state != SyncState::NewBlocks) {
trace!(target: "sync", "{} Ignoring transactions while syncing", peer_id); trace!(target: "sync", "{} Ignoring transactions while syncing", peer_id);
return Ok(()); return Ok(());
} }
@ -1437,7 +1509,7 @@ impl ChainSync {
} }
trace!(target: "sync", "{} -> GetNodeData: return {} entries", peer_id, added); trace!(target: "sync", "{} -> GetNodeData: return {} entries", peer_id, added);
let mut rlp = RlpStream::new_list(added); let mut rlp = RlpStream::new_list(added);
for d in data.into_iter() { for d in data {
rlp.append(&d); rlp.append(&d);
} }
Ok(Some((NODE_DATA_PACKET, rlp))) Ok(Some((NODE_DATA_PACKET, rlp)))
@ -1590,17 +1662,18 @@ impl ChainSync {
#[cfg_attr(feature="dev", allow(match_same_arms))] #[cfg_attr(feature="dev", allow(match_same_arms))]
pub fn maintain_peers(&mut self, io: &mut SyncIo) { pub fn maintain_peers(&mut self, io: &mut SyncIo) {
let tick = time::precise_time_s(); let tick = time::precise_time_ns();
let mut aborting = Vec::new(); let mut aborting = Vec::new();
for (peer_id, peer) in &self.peers { for (peer_id, peer) in &self.peers {
let elapsed = (tick - peer.ask_time) / 1_000_000_000;
let timeout = match peer.asking { let timeout = match peer.asking {
PeerAsking::BlockHeaders => (tick - peer.ask_time) > HEADERS_TIMEOUT_SEC, PeerAsking::BlockHeaders => elapsed > HEADERS_TIMEOUT_SEC,
PeerAsking::BlockBodies => (tick - peer.ask_time) > BODIES_TIMEOUT_SEC, PeerAsking::BlockBodies => elapsed > BODIES_TIMEOUT_SEC,
PeerAsking::BlockReceipts => (tick - peer.ask_time) > RECEIPTS_TIMEOUT_SEC, PeerAsking::BlockReceipts => elapsed > RECEIPTS_TIMEOUT_SEC,
PeerAsking::Nothing => false, PeerAsking::Nothing => false,
PeerAsking::ForkHeader => (tick - peer.ask_time) > FORK_HEADER_TIMEOUT_SEC, PeerAsking::ForkHeader => elapsed > FORK_HEADER_TIMEOUT_SEC,
PeerAsking::SnapshotManifest => (tick - peer.ask_time) > SNAPSHOT_MANIFEST_TIMEOUT_SEC, PeerAsking::SnapshotManifest => elapsed > SNAPSHOT_MANIFEST_TIMEOUT_SEC,
PeerAsking::SnapshotData => (tick - peer.ask_time) > SNAPSHOT_DATA_TIMEOUT_SEC, PeerAsking::SnapshotData => elapsed > SNAPSHOT_DATA_TIMEOUT_SEC,
}; };
if timeout { if timeout {
trace!(target:"sync", "Timeout {}", peer_id); trace!(target:"sync", "Timeout {}", peer_id);
@ -1611,16 +1684,42 @@ impl ChainSync {
for p in aborting { for p in aborting {
self.on_peer_aborting(io, p); self.on_peer_aborting(io, p);
} }
// Check for handshake timeouts
for (peer, ask_time) in &self.handshaking_peers {
let elapsed = (tick - ask_time) / 1_000_000_000;
if elapsed > STATUS_TIMEOUT_SEC {
trace!(target:"sync", "Status timeout {}", peer);
io.disconnect_peer(*peer);
}
}
} }
fn check_resume(&mut self, io: &mut SyncIo) { fn check_resume(&mut self, io: &mut SyncIo) {
if self.state == SyncState::Waiting && !io.chain().queue_info().is_full() && self.state == SyncState::Waiting { if self.state == SyncState::Waiting && !io.chain().queue_info().is_full() && self.state == SyncState::Waiting {
self.state = SyncState::Blocks; self.state = SyncState::Blocks;
self.continue_sync(io); self.continue_sync(io);
} else if self.state == SyncState::SnapshotWaiting && io.snapshot_service().status() == RestorationStatus::Inactive { } else if self.state == SyncState::SnapshotWaiting {
trace!(target:"sync", "Snapshot restoration is complete"); match io.snapshot_service().status() {
self.restart(io); RestorationStatus::Inactive => {
self.continue_sync(io); trace!(target:"sync", "Snapshot restoration is complete");
self.restart(io);
self.continue_sync(io);
},
RestorationStatus::Ongoing { state_chunks: _, block_chunks: _, state_chunks_done, block_chunks_done, } => {
if !self.snapshot.is_complete() && self.snapshot.done_chunks() - (state_chunks_done + block_chunks_done) as usize <= MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD {
trace!(target:"sync", "Resuming snapshot sync");
self.state = SyncState::SnapshotData;
self.continue_sync(io);
}
},
RestorationStatus::Failed => {
trace!(target: "sync", "Snapshot restoration aborted");
self.state = SyncState::WaitingPeers;
self.snapshot.clear();
self.continue_sync(io);
},
}
} }
} }
@ -1828,6 +1927,7 @@ impl ChainSync {
/// Maintain other peers. Send out any new blocks and transactions /// Maintain other peers. Send out any new blocks and transactions
pub fn maintain_sync(&mut self, io: &mut SyncIo) { pub fn maintain_sync(&mut self, io: &mut SyncIo) {
self.maybe_start_snapshot_sync(io);
self.check_resume(io); self.check_resume(io);
} }
@ -2050,7 +2150,7 @@ mod tests {
asking: PeerAsking::Nothing, asking: PeerAsking::Nothing,
asking_blocks: Vec::new(), asking_blocks: Vec::new(),
asking_hash: None, asking_hash: None,
ask_time: 0f64, ask_time: 0,
last_sent_transactions: HashSet::new(), last_sent_transactions: HashSet::new(),
expired: false, expired: false,
confirmation: super::ForkConfirmation::Confirmed, confirmation: super::ForkConfirmation::Confirmed,

View File

@ -113,7 +113,7 @@ impl Snapshot {
} }
pub fn done_chunks(&self) -> usize { pub fn done_chunks(&self) -> usize {
self.total_chunks() - self.completed_chunks.len() self.completed_chunks.len()
} }
pub fn is_complete(&self) -> bool { pub fn is_complete(&self) -> bool {
@ -165,6 +165,7 @@ mod test {
let mut snapshot = Snapshot::new(); let mut snapshot = Snapshot::new();
let (manifest, mhash, state_chunks, block_chunks) = test_manifest(); let (manifest, mhash, state_chunks, block_chunks) = test_manifest();
snapshot.reset_to(&manifest, &mhash); snapshot.reset_to(&manifest, &mhash);
assert_eq!(snapshot.done_chunks(), 0);
assert!(snapshot.validate_chunk(&H256::random().to_vec()).is_err()); assert!(snapshot.validate_chunk(&H256::random().to_vec()).is_err());
let requested: Vec<H256> = (0..40).map(|_| snapshot.needed_chunk().unwrap()).collect(); let requested: Vec<H256> = (0..40).map(|_| snapshot.needed_chunk().unwrap()).collect();
@ -194,6 +195,8 @@ mod test {
} }
assert!(snapshot.is_complete()); assert!(snapshot.is_complete());
assert_eq!(snapshot.done_chunks(), 40);
assert_eq!(snapshot.done_chunks(), snapshot.total_chunks());
assert_eq!(snapshot.snapshot_hash(), Some(manifest.into_rlp().sha3())); assert_eq!(snapshot.snapshot_hash(), Some(manifest.into_rlp().sha3()));
} }
} }

View File

@ -18,6 +18,7 @@ use util::*;
use ethcore::client::{TestBlockChainClient, BlockChainClient, BlockID, EachBlockWith}; use ethcore::client::{TestBlockChainClient, BlockChainClient, BlockID, EachBlockWith};
use chain::{SyncState}; use chain::{SyncState};
use super::helpers::*; use super::helpers::*;
use SyncConfig;
#[test] #[test]
fn two_peers() { fn two_peers() {
@ -156,6 +157,10 @@ fn restart() {
fn status_empty() { fn status_empty() {
let net = TestNet::new(2); let net = TestNet::new(2);
assert_eq!(net.peer(0).sync.read().status().state, SyncState::Idle); assert_eq!(net.peer(0).sync.read().status().state, SyncState::Idle);
let mut config = SyncConfig::default();
config.warp_sync = true;
let net = TestNet::new_with_config(2, config);
assert_eq!(net.peer(0).sync.read().status().state, SyncState::WaitingPeers);
} }
#[test] #[test]

View File

@ -127,20 +127,24 @@ pub struct TestNet {
impl TestNet { impl TestNet {
pub fn new(n: usize) -> TestNet { pub fn new(n: usize) -> TestNet {
Self::new_with_fork(n, None) Self::new_with_config(n, SyncConfig::default())
} }
pub fn new_with_fork(n: usize, fork: Option<(BlockNumber, H256)>) -> TestNet { pub fn new_with_fork(n: usize, fork: Option<(BlockNumber, H256)>) -> TestNet {
let mut config = SyncConfig::default();
config.fork_block = fork;
Self::new_with_config(n, config)
}
pub fn new_with_config(n: usize, config: SyncConfig) -> TestNet {
let mut net = TestNet { let mut net = TestNet {
peers: Vec::new(), peers: Vec::new(),
started: false, started: false,
}; };
for _ in 0..n { for _ in 0..n {
let chain = TestBlockChainClient::new(); let chain = TestBlockChainClient::new();
let mut config = SyncConfig::default();
config.fork_block = fork;
let ss = Arc::new(TestSnapshotService::new()); let ss = Arc::new(TestSnapshotService::new());
let sync = ChainSync::new(config, &chain); let sync = ChainSync::new(config.clone(), &chain);
net.peers.push(TestPeer { net.peers.push(TestPeer {
sync: RwLock::new(sync), sync: RwLock::new(sync),
snapshot_service: ss, snapshot_service: ss,
@ -164,7 +168,7 @@ impl TestNet {
for client in 0..self.peers.len() { for client in 0..self.peers.len() {
if peer != client { if peer != client {
let mut p = self.peers.get_mut(peer).unwrap(); let mut p = self.peers.get_mut(peer).unwrap();
p.sync.write().restart(&mut TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(client as PeerId))); p.sync.write().update_targets(&mut p.chain);
p.sync.write().on_peer_connected(&mut TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(client as PeerId)), client as PeerId); p.sync.write().on_peer_connected(&mut TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(client as PeerId)), client as PeerId);
} }
} }

View File

@ -19,6 +19,7 @@ use ethcore::snapshot::{SnapshotService, ManifestData, RestorationStatus};
use ethcore::header::BlockNumber; use ethcore::header::BlockNumber;
use ethcore::client::{EachBlockWith}; use ethcore::client::{EachBlockWith};
use super::helpers::*; use super::helpers::*;
use SyncConfig;
pub struct TestSnapshotService { pub struct TestSnapshotService {
manifest: Option<ManifestData>, manifest: Option<ManifestData>,
@ -122,11 +123,16 @@ impl SnapshotService for TestSnapshotService {
#[test] #[test]
fn snapshot_sync() { fn snapshot_sync() {
::env_logger::init().ok(); ::env_logger::init().ok();
let mut net = TestNet::new(2); let mut config = SyncConfig::default();
net.peer_mut(0).snapshot_service = Arc::new(TestSnapshotService::new_with_snapshot(16, H256::new(), 500000)); config.warp_sync = true;
net.peer_mut(0).chain.add_blocks(1, EachBlockWith::Nothing); let mut net = TestNet::new_with_config(5, config);
net.sync_steps(19); // status + manifest + chunks let snapshot_service = Arc::new(TestSnapshotService::new_with_snapshot(16, H256::new(), 500000));
assert_eq!(net.peer(1).snapshot_service.state_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().state_hashes.len()); for i in 0..4 {
assert_eq!(net.peer(1).snapshot_service.block_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().block_hashes.len()); net.peer_mut(i).snapshot_service = snapshot_service.clone();
net.peer_mut(i).chain.add_blocks(1, EachBlockWith::Nothing);
}
net.sync_steps(50);
assert_eq!(net.peer(4).snapshot_service.state_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().state_hashes.len());
assert_eq!(net.peer(4).snapshot_service.block_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().block_hashes.len());
} }

View File

@ -2,7 +2,7 @@
# Running Parity Full Test Sute # Running Parity Full Test Sute
FEATURES="json-tests" FEATURES="json-tests"
OPTIONS="--release" OPTIONS="--verbose --release"
case $1 in case $1 in
--no-json) --no-json)

View File

@ -7,7 +7,7 @@ version = "1.4.0"
authors = ["Ethcore <admin@ethcore.io>"] authors = ["Ethcore <admin@ethcore.io>"]
[dependencies] [dependencies]
mio = { git = "https://github.com/ethcore/mio", branch = "v0.5.x" } mio = { git = "https://github.com/carllerche/mio" }
crossbeam = "0.2" crossbeam = "0.2"
parking_lot = "0.3" parking_lot = "0.3"
log = "0.3" log = "0.3"

View File

@ -65,7 +65,8 @@ mod service;
mod worker; mod worker;
mod panics; mod panics;
use mio::{EventLoop, Token}; use mio::{Token};
use mio::deprecated::{EventLoop, NotifyError};
use std::fmt; use std::fmt;
pub use worker::LOCAL_STACK_SIZE; pub use worker::LOCAL_STACK_SIZE;
@ -96,8 +97,8 @@ impl From<::std::io::Error> for IoError {
} }
} }
impl<Message> From<::mio::NotifyError<service::IoMessage<Message>>> for IoError where Message: Send + Clone { impl<Message> From<NotifyError<service::IoMessage<Message>>> for IoError where Message: Send + Clone {
fn from(_err: ::mio::NotifyError<service::IoMessage<Message>>) -> IoError { fn from(_err: NotifyError<service::IoMessage<Message>>) -> IoError {
IoError::Mio(::std::io::Error::new(::std::io::ErrorKind::ConnectionAborted, "Network IO notification error")) IoError::Mio(::std::io::Error::new(::std::io::ErrorKind::ConnectionAborted, "Network IO notification error"))
} }
} }

View File

@ -18,13 +18,16 @@ use std::sync::{Arc, Weak};
use std::thread::{self, JoinHandle}; use std::thread::{self, JoinHandle};
use std::collections::HashMap; use std::collections::HashMap;
use mio::*; use mio::*;
use mio::timer::{Timeout};
use mio::deprecated::{EventLoop, Handler, Sender, EventLoopBuilder};
use crossbeam::sync::chase_lev; use crossbeam::sync::chase_lev;
use slab::Slab; use slab::Slab;
use {IoError, IoHandler}; use {IoError, IoHandler};
use worker::{Worker, Work, WorkType}; use worker::{Worker, Work, WorkType};
use panics::*; use panics::*;
use parking_lot::{RwLock}; use parking_lot::{RwLock, Mutex};
use std::sync::{Condvar as SCondvar, Mutex as SMutex}; use std::sync::{Condvar as SCondvar, Mutex as SMutex};
use std::time::Duration;
/// Timer ID /// Timer ID
pub type TimerToken = usize; pub type TimerToken = usize;
@ -223,9 +226,9 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
type Timeout = Token; type Timeout = Token;
type Message = IoMessage<Message>; type Message = IoMessage<Message>;
fn ready(&mut self, _event_loop: &mut EventLoop<Self>, token: Token, events: EventSet) { fn ready(&mut self, _event_loop: &mut EventLoop<Self>, token: Token, events: Ready) {
let handler_index = token.as_usize() / TOKENS_PER_HANDLER; let handler_index = token.0 / TOKENS_PER_HANDLER;
let token_id = token.as_usize() % TOKENS_PER_HANDLER; let token_id = token.0 % TOKENS_PER_HANDLER;
if let Some(handler) = self.handlers.read().get(handler_index) { if let Some(handler) = self.handlers.read().get(handler_index) {
if events.is_hup() { if events.is_hup() {
self.worker_channel.push(Work { work_type: WorkType::Hup, token: token_id, handler: handler.clone(), handler_id: handler_index }); self.worker_channel.push(Work { work_type: WorkType::Hup, token: token_id, handler: handler.clone(), handler_id: handler_index });
@ -243,15 +246,15 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
} }
fn timeout(&mut self, event_loop: &mut EventLoop<Self>, token: Token) { fn timeout(&mut self, event_loop: &mut EventLoop<Self>, token: Token) {
let handler_index = token.as_usize() / TOKENS_PER_HANDLER; let handler_index = token.0 / TOKENS_PER_HANDLER;
let token_id = token.as_usize() % TOKENS_PER_HANDLER; let token_id = token.0 % TOKENS_PER_HANDLER;
if let Some(handler) = self.handlers.read().get(handler_index) { if let Some(handler) = self.handlers.read().get(handler_index) {
if let Some(timer) = self.timers.read().get(&token.as_usize()) { if let Some(timer) = self.timers.read().get(&token.0) {
if timer.once { if timer.once {
self.timers.write().remove(&token_id); self.timers.write().remove(&token_id);
event_loop.clear_timeout(timer.timeout); event_loop.clear_timeout(&timer.timeout);
} else { } else {
event_loop.timeout_ms(token, timer.delay).expect("Error re-registering user timer"); event_loop.timeout(token, Duration::from_millis(timer.delay)).expect("Error re-registering user timer");
} }
self.worker_channel.push(Work { work_type: WorkType::Timeout, token: token_id, handler: handler.clone(), handler_id: handler_index }); self.worker_channel.push(Work { work_type: WorkType::Timeout, token: token_id, handler: handler.clone(), handler_id: handler_index });
self.work_ready.notify_all(); self.work_ready.notify_all();
@ -277,18 +280,18 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
let to_remove: Vec<_> = timers.keys().cloned().filter(|timer_id| timer_id / TOKENS_PER_HANDLER == handler_id).collect(); let to_remove: Vec<_> = timers.keys().cloned().filter(|timer_id| timer_id / TOKENS_PER_HANDLER == handler_id).collect();
for timer_id in to_remove { for timer_id in to_remove {
let timer = timers.remove(&timer_id).expect("to_remove only contains keys from timers; qed"); let timer = timers.remove(&timer_id).expect("to_remove only contains keys from timers; qed");
event_loop.clear_timeout(timer.timeout); event_loop.clear_timeout(&timer.timeout);
} }
}, },
IoMessage::AddTimer { handler_id, token, delay, once } => { IoMessage::AddTimer { handler_id, token, delay, once } => {
let timer_id = token + handler_id * TOKENS_PER_HANDLER; let timer_id = token + handler_id * TOKENS_PER_HANDLER;
let timeout = event_loop.timeout_ms(Token(timer_id), delay).expect("Error registering user timer"); let timeout = event_loop.timeout(Token(timer_id), Duration::from_millis(delay)).expect("Error registering user timer");
self.timers.write().insert(timer_id, UserTimer { delay: delay, timeout: timeout, once: once }); self.timers.write().insert(timer_id, UserTimer { delay: delay, timeout: timeout, once: once });
}, },
IoMessage::RemoveTimer { handler_id, token } => { IoMessage::RemoveTimer { handler_id, token } => {
let timer_id = token + handler_id * TOKENS_PER_HANDLER; let timer_id = token + handler_id * TOKENS_PER_HANDLER;
if let Some(timer) = self.timers.write().remove(&timer_id) { if let Some(timer) = self.timers.write().remove(&timer_id) {
event_loop.clear_timeout(timer.timeout); event_loop.clear_timeout(&timer.timeout);
} }
}, },
IoMessage::RegisterStream { handler_id, token } => { IoMessage::RegisterStream { handler_id, token } => {
@ -302,7 +305,7 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
// unregister a timer associated with the token (if any) // unregister a timer associated with the token (if any)
let timer_id = token + handler_id * TOKENS_PER_HANDLER; let timer_id = token + handler_id * TOKENS_PER_HANDLER;
if let Some(timer) = self.timers.write().remove(&timer_id) { if let Some(timer) = self.timers.write().remove(&timer_id) {
event_loop.clear_timeout(timer.timeout); event_loop.clear_timeout(&timer.timeout);
} }
} }
}, },
@ -391,7 +394,7 @@ impl<Message> IoChannel<Message> where Message: Send + Clone + Sync + 'static {
pub struct IoService<Message> where Message: Send + Sync + Clone + 'static { pub struct IoService<Message> where Message: Send + Sync + Clone + 'static {
panic_handler: Arc<PanicHandler>, panic_handler: Arc<PanicHandler>,
thread: Option<JoinHandle<()>>, thread: Option<JoinHandle<()>>,
host_channel: Sender<IoMessage<Message>>, host_channel: Mutex<Sender<IoMessage<Message>>>,
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>, HandlerId>>>, handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>, HandlerId>>>,
} }
@ -405,9 +408,9 @@ impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
/// Starts IO event loop /// Starts IO event loop
pub fn start() -> Result<IoService<Message>, IoError> { pub fn start() -> Result<IoService<Message>, IoError> {
let panic_handler = PanicHandler::new_in_arc(); let panic_handler = PanicHandler::new_in_arc();
let mut config = EventLoopConfig::new(); let mut config = EventLoopBuilder::new();
config.messages_per_tick(1024); config.messages_per_tick(1024);
let mut event_loop = EventLoop::configured(config).expect("Error creating event loop"); let mut event_loop = config.build().expect("Error creating event loop");
let channel = event_loop.channel(); let channel = event_loop.channel();
let panic = panic_handler.clone(); let panic = panic_handler.clone();
let handlers = Arc::new(RwLock::new(Slab::new(MAX_HANDLERS))); let handlers = Arc::new(RwLock::new(Slab::new(MAX_HANDLERS)));
@ -421,14 +424,14 @@ impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
Ok(IoService { Ok(IoService {
panic_handler: panic_handler, panic_handler: panic_handler,
thread: Some(thread), thread: Some(thread),
host_channel: channel, host_channel: Mutex::new(channel),
handlers: handlers, handlers: handlers,
}) })
} }
/// Regiter an IO handler with the event loop. /// Regiter an IO handler with the event loop.
pub fn register_handler(&self, handler: Arc<IoHandler<Message>+Send>) -> Result<(), IoError> { pub fn register_handler(&self, handler: Arc<IoHandler<Message>+Send>) -> Result<(), IoError> {
try!(self.host_channel.send(IoMessage::AddHandler { try!(self.host_channel.lock().send(IoMessage::AddHandler {
handler: handler, handler: handler,
})); }));
Ok(()) Ok(())
@ -436,20 +439,20 @@ impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
/// Send a message over the network. Normaly `HostIo::send` should be used. This can be used from non-io threads. /// Send a message over the network. Normaly `HostIo::send` should be used. This can be used from non-io threads.
pub fn send_message(&self, message: Message) -> Result<(), IoError> { pub fn send_message(&self, message: Message) -> Result<(), IoError> {
try!(self.host_channel.send(IoMessage::UserMessage(message))); try!(self.host_channel.lock().send(IoMessage::UserMessage(message)));
Ok(()) Ok(())
} }
/// Create a new message channel /// Create a new message channel
pub fn channel(&self) -> IoChannel<Message> { pub fn channel(&self) -> IoChannel<Message> {
IoChannel::new(self.host_channel.clone(), Arc::downgrade(&self.handlers)) IoChannel::new(self.host_channel.lock().clone(), Arc::downgrade(&self.handlers))
} }
} }
impl<Message> Drop for IoService<Message> where Message: Send + Sync + Clone { impl<Message> Drop for IoService<Message> where Message: Send + Sync + Clone {
fn drop(&mut self) { fn drop(&mut self) {
trace!(target: "shutdown", "[IoService] Closing..."); trace!(target: "shutdown", "[IoService] Closing...");
self.host_channel.send(IoMessage::Shutdown).unwrap_or_else(|e| warn!("Error on IO service shutdown: {:?}", e)); self.host_channel.lock().send(IoMessage::Shutdown).unwrap_or_else(|e| warn!("Error on IO service shutdown: {:?}", e));
self.thread.take().unwrap().join().ok(); self.thread.take().unwrap().join().ok();
trace!(target: "shutdown", "[IoService] Closed."); trace!(target: "shutdown", "[IoService] Closed.");
} }

View File

@ -8,7 +8,8 @@ authors = ["Ethcore <admin@ethcore.io>"]
[dependencies] [dependencies]
log = "0.3" log = "0.3"
mio = { git = "https://github.com/ethcore/mio", branch = "v0.5.x" } mio = { git = "https://github.com/carllerche/mio" }
bytes = "0.3.0"
rand = "0.3.12" rand = "0.3.12"
time = "0.1.34" time = "0.1.34"
tiny-keccak = "1.0" tiny-keccak = "1.0"

View File

@ -18,7 +18,8 @@ use std::sync::Arc;
use std::collections::VecDeque; use std::collections::VecDeque;
use std::net::SocketAddr; use std::net::SocketAddr;
use std::sync::atomic::{AtomicBool, Ordering as AtomicOrdering}; use std::sync::atomic::{AtomicBool, Ordering as AtomicOrdering};
use mio::{Handler, Token, EventSet, EventLoop, PollOpt, TryRead, TryWrite}; use mio::{Token, Ready, PollOpt};
use mio::deprecated::{Handler, EventLoop, TryRead, TryWrite};
use mio::tcp::*; use mio::tcp::*;
use util::hash::*; use util::hash::*;
use util::sha3::*; use util::sha3::*;
@ -34,6 +35,7 @@ use rcrypto::aessafe::*;
use rcrypto::symmetriccipher::*; use rcrypto::symmetriccipher::*;
use rcrypto::buffer::*; use rcrypto::buffer::*;
use tiny_keccak::Keccak; use tiny_keccak::Keccak;
use bytes::{Buf, MutBuf};
use crypto; use crypto;
const ENCRYPTED_HEADER_LEN: usize = 32; const ENCRYPTED_HEADER_LEN: usize = 32;
@ -57,7 +59,7 @@ pub struct GenericConnection<Socket: GenericSocket> {
/// Send out packets FIFO /// Send out packets FIFO
send_queue: VecDeque<Cursor<Bytes>>, send_queue: VecDeque<Cursor<Bytes>>,
/// Event flags this connection expects /// Event flags this connection expects
interest: EventSet, interest: Ready,
/// Shared network statistics /// Shared network statistics
stats: Arc<NetworkStats>, stats: Arc<NetworkStats>,
/// Registered flag /// Registered flag
@ -81,8 +83,9 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
let sock_ref = <Socket as Read>::by_ref(&mut self.socket); let sock_ref = <Socket as Read>::by_ref(&mut self.socket);
loop { loop {
let max = self.rec_size - self.rec_buf.len(); let max = self.rec_size - self.rec_buf.len();
match sock_ref.take(max as u64).try_read_buf(&mut self.rec_buf) { match sock_ref.take(max as u64).try_read(unsafe { self.rec_buf.mut_bytes() }) {
Ok(Some(size)) if size != 0 => { Ok(Some(size)) if size != 0 => {
unsafe { self.rec_buf.advance(size); }
self.stats.inc_recv(size); self.stats.inc_recv(size);
trace!(target:"network", "{}: Read {} of {} bytes", self.token, self.rec_buf.len(), self.rec_size); trace!(target:"network", "{}: Read {} of {} bytes", self.token, self.rec_buf.len(), self.rec_size);
if self.rec_size != 0 && self.rec_buf.len() == self.rec_size { if self.rec_size != 0 && self.rec_buf.len() == self.rec_size {
@ -109,7 +112,7 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
trace!(target:"network", "{}: Sending {} bytes", self.token, data.len()); trace!(target:"network", "{}: Sending {} bytes", self.token, data.len());
self.send_queue.push_back(Cursor::new(data)); self.send_queue.push_back(Cursor::new(data));
if !self.interest.is_writable() { if !self.interest.is_writable() {
self.interest.insert(EventSet::writable()); self.interest.insert(Ready::writable());
} }
io.update_registration(self.token).ok(); io.update_registration(self.token).ok();
} }
@ -128,16 +131,19 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
{ {
let buf = self.send_queue.front_mut().unwrap(); let buf = self.send_queue.front_mut().unwrap();
let send_size = buf.get_ref().len(); let send_size = buf.get_ref().len();
if (buf.position() as usize) >= send_size { let pos = buf.position() as usize;
if (pos as usize) >= send_size {
warn!(target:"net", "Unexpected connection data"); warn!(target:"net", "Unexpected connection data");
return Ok(WriteStatus::Complete) return Ok(WriteStatus::Complete)
} }
match self.socket.try_write_buf(buf) { let buf = buf as &mut Buf;
Ok(Some(size)) if (buf.position() as usize) < send_size => { match self.socket.try_write(buf.bytes()) {
Ok(Some(size)) if (pos + size) < send_size => {
buf.advance(size);
self.stats.inc_send(size); self.stats.inc_send(size);
Ok(WriteStatus::Ongoing) Ok(WriteStatus::Ongoing)
}, },
Ok(Some(size)) if (buf.position() as usize) == send_size => { Ok(Some(size)) if (pos + size) == send_size => {
self.stats.inc_send(size); self.stats.inc_send(size);
trace!(target:"network", "{}: Wrote {} bytes", self.token, send_size); trace!(target:"network", "{}: Wrote {} bytes", self.token, send_size);
Ok(WriteStatus::Complete) Ok(WriteStatus::Complete)
@ -151,7 +157,7 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
self.send_queue.pop_front(); self.send_queue.pop_front();
} }
if self.send_queue.is_empty() { if self.send_queue.is_empty() {
self.interest.remove(EventSet::writable()); self.interest.remove(Ready::writable());
try!(io.update_registration(self.token)); try!(io.update_registration(self.token));
} }
Ok(r) Ok(r)
@ -171,7 +177,7 @@ impl Connection {
send_queue: VecDeque::new(), send_queue: VecDeque::new(),
rec_buf: Bytes::new(), rec_buf: Bytes::new(),
rec_size: 0, rec_size: 0,
interest: EventSet::hup() | EventSet::readable(), interest: Ready::hup() | Ready::readable(),
stats: stats, stats: stats,
registered: AtomicBool::new(false), registered: AtomicBool::new(false),
} }
@ -205,7 +211,7 @@ impl Connection {
rec_buf: Vec::new(), rec_buf: Vec::new(),
rec_size: 0, rec_size: 0,
send_queue: self.send_queue.clone(), send_queue: self.send_queue.clone(),
interest: EventSet::hup(), interest: Ready::hup(),
stats: self.stats.clone(), stats: self.stats.clone(),
registered: AtomicBool::new(false), registered: AtomicBool::new(false),
}) })
@ -499,7 +505,7 @@ mod tests {
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
use super::super::stats::*; use super::super::stats::*;
use std::io::{Read, Write, Error, Cursor, ErrorKind}; use std::io::{Read, Write, Error, Cursor, ErrorKind};
use mio::{EventSet}; use mio::{Ready};
use std::collections::VecDeque; use std::collections::VecDeque;
use util::bytes::*; use util::bytes::*;
use devtools::*; use devtools::*;
@ -545,7 +551,7 @@ mod tests {
send_queue: VecDeque::new(), send_queue: VecDeque::new(),
rec_buf: Bytes::new(), rec_buf: Bytes::new(),
rec_size: 0, rec_size: 0,
interest: EventSet::hup() | EventSet::readable(), interest: Ready::hup() | Ready::readable(),
stats: Arc::<NetworkStats>::new(NetworkStats::new()), stats: Arc::<NetworkStats>::new(NetworkStats::new()),
registered: AtomicBool::new(false), registered: AtomicBool::new(false),
} }
@ -568,7 +574,7 @@ mod tests {
send_queue: VecDeque::new(), send_queue: VecDeque::new(),
rec_buf: Bytes::new(), rec_buf: Bytes::new(),
rec_size: 0, rec_size: 0,
interest: EventSet::hup() | EventSet::readable(), interest: Ready::hup() | Ready::readable(),
stats: Arc::<NetworkStats>::new(NetworkStats::new()), stats: Arc::<NetworkStats>::new(NetworkStats::new()),
registered: AtomicBool::new(false), registered: AtomicBool::new(false),
} }

View File

@ -20,6 +20,7 @@ use std::collections::{HashSet, HashMap, BTreeMap, VecDeque};
use std::mem; use std::mem;
use std::default::Default; use std::default::Default;
use mio::*; use mio::*;
use mio::deprecated::{Handler, EventLoop};
use mio::udp::*; use mio::udp::*;
use util::sha3::*; use util::sha3::*;
use time; use time;
@ -57,6 +58,7 @@ pub struct NodeEntry {
pub struct BucketEntry { pub struct BucketEntry {
pub address: NodeEntry, pub address: NodeEntry,
pub id_hash: H256,
pub timeout: Option<u64>, pub timeout: Option<u64>,
} }
@ -85,6 +87,7 @@ struct Datagramm {
pub struct Discovery { pub struct Discovery {
id: NodeId, id: NodeId,
id_hash: H256,
secret: Secret, secret: Secret,
public_endpoint: NodeEndpoint, public_endpoint: NodeEndpoint,
udp_socket: UdpSocket, udp_socket: UdpSocket,
@ -106,9 +109,10 @@ pub struct TableUpdates {
impl Discovery { impl Discovery {
pub fn new(key: &KeyPair, listen: SocketAddr, public: NodeEndpoint, token: StreamToken, allow_ips: AllowIP) -> Discovery { pub fn new(key: &KeyPair, listen: SocketAddr, public: NodeEndpoint, token: StreamToken, allow_ips: AllowIP) -> Discovery {
let socket = UdpSocket::bound(&listen).expect("Error binding UDP socket"); let socket = UdpSocket::bind(&listen).expect("Error binding UDP socket");
Discovery { Discovery {
id: key.public().clone(), id: key.public().clone(),
id_hash: key.public().sha3(),
secret: key.secret().clone(), secret: key.secret().clone(),
public_endpoint: public, public_endpoint: public,
token: token, token: token,
@ -150,8 +154,9 @@ impl Discovery {
fn update_node(&mut self, e: NodeEntry) { fn update_node(&mut self, e: NodeEntry) {
trace!(target: "discovery", "Inserting {:?}", &e); trace!(target: "discovery", "Inserting {:?}", &e);
let id_hash = e.id.sha3();
let ping = { let ping = {
let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id, &e.id) as usize).unwrap(); let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id_hash, &id_hash) as usize).unwrap();
let updated = if let Some(node) = bucket.nodes.iter_mut().find(|n| n.address.id == e.id) { let updated = if let Some(node) = bucket.nodes.iter_mut().find(|n| n.address.id == e.id) {
node.address = e.clone(); node.address = e.clone();
node.timeout = None; node.timeout = None;
@ -159,7 +164,7 @@ impl Discovery {
} else { false }; } else { false };
if !updated { if !updated {
bucket.nodes.push_front(BucketEntry { address: e, timeout: None }); bucket.nodes.push_front(BucketEntry { address: e, timeout: None, id_hash: id_hash, });
} }
if bucket.nodes.len() > BUCKET_SIZE { if bucket.nodes.len() > BUCKET_SIZE {
@ -174,7 +179,7 @@ impl Discovery {
} }
fn clear_ping(&mut self, id: &NodeId) { fn clear_ping(&mut self, id: &NodeId) {
let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id, id) as usize).unwrap(); let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id_hash, &id.sha3()) as usize).unwrap();
if let Some(node) = bucket.nodes.iter_mut().find(|n| &n.address.id == id) { if let Some(node) = bucket.nodes.iter_mut().find(|n| &n.address.id == id) {
node.timeout = None; node.timeout = None;
} }
@ -224,8 +229,8 @@ impl Discovery {
self.discovery_round += 1; self.discovery_round += 1;
} }
fn distance(a: &NodeId, b: &NodeId) -> u32 { fn distance(a: &H256, b: &H256) -> u32 {
let d = a.sha3() ^ b.sha3(); let d = *a ^ *b;
let mut ret:u32 = 0; let mut ret:u32 = 0;
for i in 0..32 { for i in 0..32 {
let mut v: u8 = d[i]; let mut v: u8 = d[i];
@ -279,11 +284,12 @@ impl Discovery {
fn nearest_node_entries(target: &NodeId, buckets: &[NodeBucket]) -> Vec<NodeEntry> { fn nearest_node_entries(target: &NodeId, buckets: &[NodeBucket]) -> Vec<NodeEntry> {
let mut found: BTreeMap<u32, Vec<&NodeEntry>> = BTreeMap::new(); let mut found: BTreeMap<u32, Vec<&NodeEntry>> = BTreeMap::new();
let mut count = 0; let mut count = 0;
let target_hash = target.sha3();
// Sort nodes by distance to target // Sort nodes by distance to target
for bucket in buckets { for bucket in buckets {
for node in &bucket.nodes { for node in &bucket.nodes {
let distance = Discovery::distance(target, &node.address.id); let distance = Discovery::distance(&target_hash, &node.id_hash);
found.entry(distance).or_insert_with(Vec::new).push(&node.address); found.entry(distance).or_insert_with(Vec::new).push(&node.address);
if count == BUCKET_SIZE { if count == BUCKET_SIZE {
// delete the most distant element // delete the most distant element
@ -527,15 +533,15 @@ impl Discovery {
} }
pub fn register_socket<Host:Handler>(&self, event_loop: &mut EventLoop<Host>) -> Result<(), NetworkError> { pub fn register_socket<Host:Handler>(&self, event_loop: &mut EventLoop<Host>) -> Result<(), NetworkError> {
event_loop.register(&self.udp_socket, Token(self.token), EventSet::all(), PollOpt::edge()).expect("Error registering UDP socket"); event_loop.register(&self.udp_socket, Token(self.token), Ready::all(), PollOpt::edge()).expect("Error registering UDP socket");
Ok(()) Ok(())
} }
pub fn update_registration<Host:Handler>(&self, event_loop: &mut EventLoop<Host>) -> Result<(), NetworkError> { pub fn update_registration<Host:Handler>(&self, event_loop: &mut EventLoop<Host>) -> Result<(), NetworkError> {
let registration = if !self.send_queue.is_empty() { let registration = if !self.send_queue.is_empty() {
EventSet::readable() | EventSet::writable() Ready::readable() | Ready::writable()
} else { } else {
EventSet::readable() Ready::readable()
}; };
event_loop.reregister(&self.udp_socket, Token(self.token), registration, PollOpt::edge()).expect("Error reregistering UDP socket"); event_loop.reregister(&self.udp_socket, Token(self.token), registration, PollOpt::edge()).expect("Error reregistering UDP socket");
Ok(()) Ok(())
@ -546,6 +552,7 @@ impl Discovery {
mod tests { mod tests {
use super::*; use super::*;
use util::hash::*; use util::hash::*;
use util::sha3::*;
use std::net::*; use std::net::*;
use node_table::*; use node_table::*;
use std::str::FromStr; use std::str::FromStr;
@ -626,7 +633,8 @@ mod tests {
for _ in 0..(16 + 10) { for _ in 0..(16 + 10) {
buckets[0].nodes.push_back(BucketEntry { buckets[0].nodes.push_back(BucketEntry {
address: NodeEntry { id: NodeId::new(), endpoint: ep.clone() }, address: NodeEntry { id: NodeId::new(), endpoint: ep.clone() },
timeout: None timeout: None,
id_hash: NodeId::new().sha3(),
}); });
} }
let nearest = Discovery::nearest_node_entries(&NodeId::new(), &buckets); let nearest = Discovery::nearest_node_entries(&NodeId::new(), &buckets);

View File

@ -26,6 +26,7 @@ use std::io::{Read, Write};
use std::fs; use std::fs;
use ethkey::{KeyPair, Secret, Random, Generator}; use ethkey::{KeyPair, Secret, Random, Generator};
use mio::*; use mio::*;
use mio::deprecated::{EventLoop};
use mio::tcp::*; use mio::tcp::*;
use util::hash::*; use util::hash::*;
use util::Hashable; use util::Hashable;
@ -61,7 +62,7 @@ const SYS_TIMER: usize = LAST_SESSION + 1;
// Timeouts // Timeouts
const MAINTENANCE_TIMEOUT: u64 = 1000; const MAINTENANCE_TIMEOUT: u64 = 1000;
const DISCOVERY_REFRESH_TIMEOUT: u64 = 7200; const DISCOVERY_REFRESH_TIMEOUT: u64 = 60_000;
const DISCOVERY_ROUND_TIMEOUT: u64 = 300; const DISCOVERY_ROUND_TIMEOUT: u64 = 300;
const NODE_TABLE_TIMEOUT: u64 = 300_000; const NODE_TABLE_TIMEOUT: u64 = 300_000;
@ -744,10 +745,9 @@ impl Host {
trace!(target: "network", "Accepting incoming connection"); trace!(target: "network", "Accepting incoming connection");
loop { loop {
let socket = match self.tcp_listener.lock().accept() { let socket = match self.tcp_listener.lock().accept() {
Ok(None) => break, Ok((sock, _addr)) => sock,
Ok(Some((sock, _addr))) => sock,
Err(e) => { Err(e) => {
warn!("Error accepting connection: {:?}", e); debug!(target: "network", "Error accepting connection: {:?}", e);
break break
}, },
}; };
@ -801,29 +801,31 @@ impl Host {
}, },
Ok(SessionData::Ready) => { Ok(SessionData::Ready) => {
self.num_sessions.fetch_add(1, AtomicOrdering::SeqCst); self.num_sessions.fetch_add(1, AtomicOrdering::SeqCst);
if !s.info.originated { let session_count = self.session_count();
let session_count = self.session_count(); let (min_peers, max_peers, reserved_only) = {
let (max_peers, reserved_only) = { let info = self.info.read();
let info = self.info.read(); let mut max_peers = info.config.max_peers;
let mut max_peers = info.config.max_peers; for cap in s.info.capabilities.iter() {
for cap in s.info.capabilities.iter() { if let Some(num) = info.config.reserved_protocols.get(&cap.protocol) {
if let Some(num) = info.config.reserved_protocols.get(&cap.protocol) { max_peers += *num;
max_peers += *num; break;
break;
}
}
(max_peers, info.config.non_reserved_mode == NonReservedPeerMode::Deny)
};
if session_count >= max_peers as usize || reserved_only {
// only proceed if the connecting peer is reserved.
if !self.reserved_nodes.read().contains(s.id().unwrap()) {
s.disconnect(io, DisconnectReason::TooManyPeers);
return;
} }
} }
(info.config.min_peers as usize, max_peers as usize, info.config.non_reserved_mode == NonReservedPeerMode::Deny)
};
// Add it no node table if reserved_only ||
(s.info.originated && session_count >= min_peers) ||
(!s.info.originated && session_count >= max_peers) {
// only proceed if the connecting peer is reserved.
if !self.reserved_nodes.read().contains(s.id().unwrap()) {
s.disconnect(io, DisconnectReason::TooManyPeers);
return;
}
}
// Add it to the node table
if !s.info.originated {
if let Ok(address) = s.remote_addr() { if let Ok(address) = s.remote_addr() {
let entry = NodeEntry { id: s.id().unwrap().clone(), endpoint: NodeEndpoint { address: address, udp_port: address.port() } }; let entry = NodeEntry { id: s.id().unwrap().clone(), endpoint: NodeEndpoint { address: address, udp_port: address.port() } };
self.nodes.write().add_node(Node::new(entry.id.clone(), entry.endpoint.clone())); self.nodes.write().add_node(Node::new(entry.id.clone(), entry.endpoint.clone()));
@ -1101,7 +1103,7 @@ impl IoHandler<NetworkIoMessage> for Host {
} }
} }
DISCOVERY => self.discovery.lock().as_ref().unwrap().register_socket(event_loop).expect("Error registering discovery socket"), DISCOVERY => self.discovery.lock().as_ref().unwrap().register_socket(event_loop).expect("Error registering discovery socket"),
TCP_ACCEPT => event_loop.register(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), EventSet::all(), PollOpt::edge()).expect("Error registering stream"), TCP_ACCEPT => event_loop.register(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), Ready::all(), PollOpt::edge()).expect("Error registering stream"),
_ => warn!("Unexpected stream registration") _ => warn!("Unexpected stream registration")
} }
} }
@ -1129,7 +1131,7 @@ impl IoHandler<NetworkIoMessage> for Host {
} }
} }
DISCOVERY => self.discovery.lock().as_ref().unwrap().update_registration(event_loop).expect("Error reregistering discovery socket"), DISCOVERY => self.discovery.lock().as_ref().unwrap().update_registration(event_loop).expect("Error reregistering discovery socket"),
TCP_ACCEPT => event_loop.reregister(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), EventSet::all(), PollOpt::edge()).expect("Error reregistering stream"), TCP_ACCEPT => event_loop.reregister(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), Ready::all(), PollOpt::edge()).expect("Error reregistering stream"),
_ => warn!("Unexpected stream update") _ => warn!("Unexpected stream update")
} }
} }

View File

@ -70,6 +70,7 @@ extern crate slab;
extern crate ethkey; extern crate ethkey;
extern crate ethcrypto as crypto; extern crate ethcrypto as crypto;
extern crate rlp; extern crate rlp;
extern crate bytes;
#[macro_use] #[macro_use]
extern crate log; extern crate log;

View File

@ -19,6 +19,7 @@ use std::net::SocketAddr;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::sync::*; use std::sync::*;
use mio::*; use mio::*;
use mio::deprecated::{Handler, EventLoop};
use mio::tcp::*; use mio::tcp::*;
use util::hash::*; use util::hash::*;
use rlp::*; use rlp::*;

View File

@ -133,7 +133,7 @@ impl<'db> TrieDB<'db> {
} }
/// Get the data of the root node. /// Get the data of the root node.
fn root_data<'a, R: 'a + Recorder>(&self, r: &'a mut R) -> super::Result<DBValue> { fn root_data<R: Recorder>(&self, r: &mut R) -> super::Result<DBValue> {
self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root))) self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
.map(|node| { r.record(self.root, &*node, 0); node }) .map(|node| { r.record(self.root, &*node, 0); node })
} }