Merge remote-tracking branch 'parity/master'

This commit is contained in:
keorn 2016-10-30 20:45:28 +00:00
commit 37ca1fa497
81 changed files with 1093 additions and 385 deletions

View File

@ -3,9 +3,9 @@ stages:
- test
variables:
GIT_DEPTH: "3"
SIMPLECOV: "true"
SIMPLECOV: "true"
RUST_BACKTRACE: "1"
RUSTFLAGS: "-D warnings"
RUSTFLAGS: ""
cache:
key: "$CI_BUILD_NAME/$CI_BUILD_REF_NAME"
untracked: true
@ -26,7 +26,7 @@ linux-stable:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-linux-gnu/parity.md5 --body parity.md5
@ -56,7 +56,7 @@ linux-stable-14.04:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_amd64.deb"
- md5sum "parity_"$VER"_amd64.deb" >> "parity_"$VER"_amd64.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-ubuntu_14_04-gnu/parity.md5 --body parity.md5
@ -121,7 +121,7 @@ linux-centos:
- cargo build --release --verbose
- strip target/release/parity
- md5sum target/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-unknown-centos-gnu/parity.md5 --body parity.md5
@ -158,7 +158,7 @@ linux-armv7:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity.md5 --body parity.md5
@ -198,7 +198,7 @@ linux-arm:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_armhf.deb"
- md5sum "parity_"$VER"_armhf.deb" >> "parity_"$VER"_armhf.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity.md5 --body parity.md5
@ -231,8 +231,8 @@ linux-armv6:
- cat .cargo/config
- cargo build --target arm-unknown-linux-gnueabi --release --verbose
- arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity
- md5sum target/arm-unknown-linux-gnueabi/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- md5sum target/arm-unknown-linux-gnueabi/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity.md5 --body parity.md5
@ -270,7 +270,7 @@ linux-aarch64:
- export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n")
- dpkg-deb -b deb "parity_"$VER"_arm64.deb"
- md5sum "parity_"$VER"_arm64.deb" >> "parity_"$VER"_arm64.deb.md5"
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity --body target/aarch64-unknown-linux-gnu/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5
@ -295,7 +295,7 @@ darwin:
- cargo build --release --verbose
- rm -rf parity.md5
- md5sum target/release/parity >> parity.md5
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_access_key_id $s3_key
- aws configure set aws_secret_access_key $s3_secret
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity --body target/release/parity
- aws s3api put-object --bucket builds-parity --key $CI_BUILD_REF_NAME/x86_64-apple-darwin/parity.md5 --body parity.md5
@ -316,7 +316,7 @@ windows:
- set INCLUDE=C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Include;C:\vs2015\VC\include;C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
- set RUST_BACKTRACE=1
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off -D warnings
- set RUSTFLAGS=%RUSTFLAGS% -Zorbit=off
- rustup default stable-x86_64-pc-windows-msvc
- cargo build --release --verbose
- curl -sL --url "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -o nsis\SimpleFC.dll
@ -380,8 +380,8 @@ test-windows:
before_script:
- git submodule update --init --recursive
script:
- export RUST_BACKTRACE=1
- ./test.sh --verbose
- set RUST_BACKTRACE=1
- PowerShell ./test.sh --verbose
tags:
- rust-windows
dependencies:
@ -391,9 +391,6 @@ js-release:
image: ethcore/javascript:latest
only:
- master
- beta
- tags
- stable
before_script:
- ./js/scripts/install-deps.sh
script:

50
Cargo.lock generated
View File

@ -370,7 +370,7 @@ version = "1.4.0"
dependencies = [
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"mio 0.6.0 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -451,6 +451,7 @@ name = "ethcore-network"
version = "1.4.0"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0",
"ethcore-io 1.4.0",
"ethcore-util 1.4.0",
@ -459,7 +460,7 @@ dependencies = [
"igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)",
"mio 0.6.0 (git+https://github.com/carllerche/mio)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0",
@ -516,7 +517,7 @@ dependencies = [
"parity-ui 1.4.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)",
"ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)",
]
[[package]]
@ -886,6 +887,11 @@ name = "lazy_static"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "lazycell"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "libc"
version = "0.2.15"
@ -990,7 +996,7 @@ dependencies = [
[[package]]
name = "mio"
version = "0.6.0-dev"
source = "git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d#62ec763c9cc34d8a452ed0392c575c50ddd5fc8d"
source = "git+https://github.com/ethcore/mio?branch=timer-fix#31eccc40ece3d47abaefaf23bb2114033175b972"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1002,6 +1008,22 @@ dependencies = [
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "mio"
version = "0.6.0"
source = "git+https://github.com/carllerche/mio#9f17b70d6fecbf912168267ea74cf536f2cba705"
dependencies = [
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)",
"nix 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "miow"
version = "0.1.3"
@ -1213,7 +1235,7 @@ dependencies = [
[[package]]
name = "parity-ui-precompiled"
version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#ba726039185238d6fd604f092b089a7d52c0f436"
source = "git+https://github.com/ethcore/js-precompiled.git#eec3d41e6fd1a10e4d69470a9e8c2a7b1b464466"
dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1593,6 +1615,11 @@ name = "slab"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "slab"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "smallvec"
version = "0.1.8"
@ -1884,13 +1911,13 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "ws"
version = "0.5.2"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#00bd2134b07b4bc8ea47b7f6c7afce16bbe34c8f"
version = "0.5.3"
source = "git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable#0cd6c5e3e9d5e61a37d53eb8dcbad523dcc69314"
dependencies = [
"bytes 0.4.0-dev (git+https://github.com/carllerche/bytes)",
"httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)",
"mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)",
@ -1984,6 +2011,7 @@ dependencies = [
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
"checksum lazycell 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ce12306c4739d86ee97c23139f3a34ddf0387bbf181bc7929d287025a8c3ef6b"
"checksum libc 0.2.15 (registry+https://github.com/rust-lang/crates.io-index)" = "23e3757828fa702a20072c37ff47938e9dd331b92fac6e223d26d4b7a55f7ee2"
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
@ -1996,7 +2024,8 @@ dependencies = [
"checksum miniz-sys 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "9d1f4d337a01c32e1f2122510fed46393d53ca35a7f429cb0450abaedfa3ed54"
"checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>"
"checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e"
"checksum mio 0.6.0-dev (git+https://github.com/carllerche/mio?rev=62ec763c9cc34d8a452ed0392c575c50ddd5fc8d)" = "<none>"
"checksum mio 0.6.0 (git+https://github.com/carllerche/mio)" = "<none>"
"checksum mio 0.6.0-dev (git+https://github.com/ethcore/mio?branch=timer-fix)" = "<none>"
"checksum miow 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d5bfc6782530ac8ace97af10a540054a37126b63b0702ddaaa243b73b5745b9a"
"checksum msdos_time 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c04b68cc63a8480fb2550343695f7be72effdec953a9d4508161c3e69041c7d8"
"checksum nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>"
@ -2062,6 +2091,7 @@ dependencies = [
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
"checksum slab 0.2.0 (git+https://github.com/carllerche/slab?rev=5476efcafb)" = "<none>"
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
"checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2"
"checksum spmc 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "93bdab61c1a413e591c4d17388ffa859eaff2df27f1e13a5ec8b716700605adf"
@ -2099,7 +2129,7 @@ dependencies = [
"checksum webpki 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "813503a5985585e0812d430cd1328ee322f47f66629c8ed4ecab939cf9e92f91"
"checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4"
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
"checksum ws 0.5.2 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>"
"checksum ws 0.5.3 (git+https://github.com/ethcore/ws-rs.git?branch=mio-upstream-stable)" = "<none>"
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
"checksum xml-rs 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "65e74b96bd3179209dc70a980da6df843dff09e46eee103a0376c0949257e3ef"
"checksum xmltree 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "472a9d37c7c53ab2391161df5b89b1f3bf76dab6ab150d7941ecbdd832282082"

View File

@ -139,7 +139,7 @@ pub struct Client {
miner: Arc<Miner>,
sleep_state: Mutex<SleepState>,
liveness: AtomicBool,
io_channel: IoChannel<ClientIoMessage>,
io_channel: Mutex<IoChannel<ClientIoMessage>>,
notify: RwLock<Vec<Weak<ChainNotify>>>,
queue_transactions: AtomicUsize,
last_hashes: RwLock<VecDeque<H256>>,
@ -235,7 +235,7 @@ impl Client {
import_lock: Mutex::new(()),
panic_handler: panic_handler,
miner: miner,
io_channel: message_channel,
io_channel: Mutex::new(message_channel),
notify: RwLock::new(Vec::new()),
queue_transactions: AtomicUsize::new(0),
last_hashes: RwLock::new(VecDeque::new()),
@ -1139,7 +1139,7 @@ impl BlockChainClient for Client {
debug!("Ignoring {} transactions: queue is full", transactions.len());
} else {
let len = transactions.len();
match self.io_channel.send(ClientIoMessage::NewTransactions(transactions)) {
match self.io_channel.lock().send(ClientIoMessage::NewTransactions(transactions)) {
Ok(_) => {
self.queue_transactions.fetch_add(len, AtomicOrdering::SeqCst);
}

View File

@ -130,7 +130,7 @@ impl BanningTransactionQueue {
// Ban sender
let sender_banned = self.ban_sender(sender);
// Ban recipient and codehash
let is_banned = sender_banned || match transaction.action {
let recipient_or_code_banned = match transaction.action {
Action::Call(recipient) => {
self.ban_recipient(recipient)
},
@ -138,7 +138,7 @@ impl BanningTransactionQueue {
self.ban_codehash(transaction.data.sha3())
},
};
is_banned
sender_banned || recipient_or_code_banned
},
None => false,
}

View File

@ -110,6 +110,7 @@ impl PartialOrd for TransactionOrigin {
}
impl Ord for TransactionOrigin {
#[cfg_attr(feature="dev", allow(match_same_arms))]
fn cmp(&self, other: &TransactionOrigin) -> Ordering {
if *other == *self {
return Ordering::Equal;

View File

@ -178,7 +178,7 @@ impl Account {
CodeState::Hash => {
let code_hash = try!(rlp.val_at(3));
if let Some(code) = code_map.get(&code_hash) {
acct_db.emplace(code_hash.clone(), DBValue::from_slice(&code));
acct_db.emplace(code_hash.clone(), DBValue::from_slice(code));
}
(code_hash, None)

View File

@ -213,7 +213,7 @@ pub struct Service {
restoration: Mutex<Option<Restoration>>,
snapshot_root: PathBuf,
db_config: DatabaseConfig,
io_channel: Channel,
io_channel: Mutex<Channel>,
pruning: Algorithm,
status: Mutex<RestorationStatus>,
reader: RwLock<Option<LooseReader>>,
@ -233,7 +233,7 @@ impl Service {
restoration: Mutex::new(None),
snapshot_root: params.snapshot_root,
db_config: params.db_config,
io_channel: params.channel,
io_channel: Mutex::new(params.channel),
pruning: params.pruning,
status: Mutex::new(RestorationStatus::Inactive),
reader: RwLock::new(None),
@ -567,7 +567,7 @@ impl SnapshotService for Service {
}
fn begin_restore(&self, manifest: ManifestData) {
if let Err(e) = self.io_channel.send(ClientIoMessage::BeginRestoration(manifest)) {
if let Err(e) = self.io_channel.lock().send(ClientIoMessage::BeginRestoration(manifest)) {
trace!("Error sending snapshot service message: {:?}", e);
}
}
@ -578,13 +578,13 @@ impl SnapshotService for Service {
}
fn restore_state_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedStateChunk(hash, chunk)) {
if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedStateChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e);
}
}
fn restore_block_chunk(&self, hash: H256, chunk: Bytes) {
if let Err(e) = self.io_channel.send(ClientIoMessage::FeedBlockChunk(hash, chunk)) {
if let Err(e) = self.io_channel.lock().send(ClientIoMessage::FeedBlockChunk(hash, chunk)) {
trace!("Error sending snapshot service message: {:?}", e);
}
}

View File

@ -16,6 +16,7 @@
//! Watcher for snapshot-related chain events.
use util::Mutex;
use client::{BlockChainClient, Client, ChainNotify};
use ids::BlockID;
use service::ClientIoMessage;
@ -55,7 +56,7 @@ trait Broadcast: Send + Sync {
fn take_at(&self, num: Option<u64>);
}
impl Broadcast for IoChannel<ClientIoMessage> {
impl Broadcast for Mutex<IoChannel<ClientIoMessage>> {
fn take_at(&self, num: Option<u64>) {
let num = match num {
Some(n) => n,
@ -64,7 +65,7 @@ impl Broadcast for IoChannel<ClientIoMessage> {
trace!(target: "snapshot_watcher", "broadcast: {}", num);
if let Err(e) = self.send(ClientIoMessage::TakeSnapshot(num)) {
if let Err(e) = self.lock().send(ClientIoMessage::TakeSnapshot(num)) {
warn!("Snapshot watcher disconnected from IoService: {}", e);
}
}
@ -91,7 +92,7 @@ impl Watcher {
client: client,
sync_status: sync_status,
}),
broadcast: Box::new(channel),
broadcast: Box::new(Mutex::new(channel)),
period: period,
history: history,
}

View File

@ -43,8 +43,6 @@ struct AccountCache {
// When changing the type of the values here, be sure to update `mem_used` and
// `new`.
accounts: LruCache<Address, Option<Account>>,
/// DB Code cache. Maps code hashes to shared bytes.
code: MemoryLruCache<H256, Arc<Vec<u8>>>,
/// Information on the modifications in recently committed blocks; specifically which addresses
/// changed in which block. Ordered by block number.
modifications: VecDeque<BlockChanges>,
@ -95,6 +93,8 @@ pub struct StateDB {
db: Box<JournalDB>,
/// Shared canonical state cache.
account_cache: Arc<Mutex<AccountCache>>,
/// DB Code cache. Maps code hashes to shared bytes.
code_cache: Arc<Mutex<MemoryLruCache<H256, Arc<Vec<u8>>>>>,
/// Local dirty cache.
local_cache: Vec<CacheQueueItem>,
/// Shared account bloom. Does not handle chain reorganizations.
@ -125,9 +125,9 @@ impl StateDB {
db: db,
account_cache: Arc::new(Mutex::new(AccountCache {
accounts: LruCache::new(cache_items),
code: MemoryLruCache::new(code_cache_size),
modifications: VecDeque::new(),
})),
code_cache: Arc::new(Mutex::new(MemoryLruCache::new(code_cache_size))),
local_cache: Vec::new(),
account_bloom: Arc::new(Mutex::new(bloom)),
cache_size: cache_size,
@ -320,6 +320,7 @@ impl StateDB {
StateDB {
db: self.db.boxed_clone(),
account_cache: self.account_cache.clone(),
code_cache: self.code_cache.clone(),
local_cache: Vec::new(),
account_bloom: self.account_bloom.clone(),
cache_size: self.cache_size,
@ -334,6 +335,7 @@ impl StateDB {
StateDB {
db: self.db.boxed_clone(),
account_cache: self.account_cache.clone(),
code_cache: self.code_cache.clone(),
local_cache: Vec::new(),
account_bloom: self.account_bloom.clone(),
cache_size: self.cache_size,
@ -352,10 +354,9 @@ impl StateDB {
pub fn mem_used(&self) -> usize {
// TODO: account for LRU-cache overhead; this is a close approximation.
self.db.mem_used() + {
let cache = self.account_cache.lock();
cache.code.current_size() +
cache.accounts.len() * ::std::mem::size_of::<Option<Account>>()
let accounts = self.account_cache.lock().accounts.len();
let code_size = self.code_cache.lock().current_size();
code_size + accounts * ::std::mem::size_of::<Option<Account>>()
}
}
@ -380,9 +381,9 @@ impl StateDB {
/// it simply maps hashes to raw code and will always be correct in the absence of
/// hash collisions.
pub fn cache_code(&self, hash: H256, code: Arc<Vec<u8>>) {
let mut cache = self.account_cache.lock();
let mut cache = self.code_cache.lock();
cache.code.insert(hash, code);
cache.insert(hash, code);
}
/// Get basic copy of the cached account. Does not include storage.
@ -397,9 +398,9 @@ impl StateDB {
/// Get cached code based on hash.
pub fn get_cached_code(&self, hash: &H256) -> Option<Arc<Vec<u8>>> {
let mut cache = self.account_cache.lock();
let mut cache = self.code_cache.lock();
cache.code.get_mut(hash).map(|code| code.clone())
cache.get_mut(hash).map(|code| code.clone())
}
/// Get value from a cached account.

View File

@ -109,7 +109,7 @@ pub struct VerificationQueue<K: Kind> {
struct QueueSignal {
deleting: Arc<AtomicBool>,
signalled: AtomicBool,
message_channel: IoChannel<ClientIoMessage>,
message_channel: Mutex<IoChannel<ClientIoMessage>>,
}
impl QueueSignal {
@ -121,7 +121,8 @@ impl QueueSignal {
}
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
if let Err(e) = self.message_channel.send_sync(ClientIoMessage::BlockVerified) {
let channel = self.message_channel.lock().clone();
if let Err(e) = channel.send_sync(ClientIoMessage::BlockVerified) {
debug!("Error sending BlockVerified message: {:?}", e);
}
}
@ -135,7 +136,8 @@ impl QueueSignal {
}
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
if let Err(e) = self.message_channel.send(ClientIoMessage::BlockVerified) {
let channel = self.message_channel.lock().clone();
if let Err(e) = channel.send(ClientIoMessage::BlockVerified) {
debug!("Error sending BlockVerified message: {:?}", e);
}
}
@ -178,7 +180,7 @@ impl<K: Kind> VerificationQueue<K> {
let ready_signal = Arc::new(QueueSignal {
deleting: deleting.clone(),
signalled: AtomicBool::new(false),
message_channel: message_channel
message_channel: Mutex::new(message_channel),
});
let empty = Arc::new(SCondvar::new());
let panic_handler = PanicHandler::new_in_arc();

1
js/.gitignore vendored
View File

@ -5,3 +5,4 @@ build
.coverage
.dist
.happypack
.npmjs

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
js/assets/images/parity.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -1,9 +1,9 @@
{
"name": "parity.js",
"version": "0.0.1",
"version": "0.1.5",
"main": "release/index.js",
"jsnext:main": "src/index.js",
"author": "Ethcore Team <admin@ethcore.io>",
"author": "Parity Team <admin@parity.io>",
"maintainers": [
"Jaco Greeff"
],
@ -11,7 +11,7 @@
"license": "GPL-3.0",
"repository": {
"type": "git",
"url": "git+https://github.com/ethcore/parity.js.git"
"url": "git+https://github.com/ethcore/parity.git"
},
"keywords": [
"Ethereum",
@ -27,15 +27,13 @@
"build:app": "webpack --progress",
"build:lib": "webpack --config webpack.libraries --progress",
"build:dll": "webpack --config webpack.vendor --progress",
"ci:build": "npm run ci:build:dll && npm run ci:build:app && npm run ci:build:lib",
"ci:build:app": "NODE_ENV=production webpack",
"ci:build:lib": "NODE_ENV=production webpack --config webpack.libraries",
"ci:build:dll": "NODE_ENV=production webpack --config webpack.vendor",
"ci:build:npm": "NODE_ENV=production webpack --config webpack.npm",
"start": "npm install && npm run build:dll && npm run start:app",
"start:app": "webpack-dev-server -d --history-api-fallback --open --hot --inline --progress --colors --port 3000",
"clean": "rm -rf ./build ./coverage",
"coveralls": "npm run testCoverage && coveralls < coverage/lcov.info",
"lint": "eslint --ignore-path .gitignore ./src/",
@ -65,7 +63,7 @@
"chai": "^3.5.0",
"chai-enzyme": "0.4.2",
"cheerio": "0.20.0",
"copy-webpack-plugin": "^3.0.1",
"copy-webpack-plugin": "^4.0.0",
"core-js": "^2.4.1",
"coveralls": "^2.11.11",
"css-loader": "^0.23.1",
@ -79,6 +77,7 @@
"eslint-plugin-standard": "^2.0.0",
"extract-text-webpack-plugin": "^1.0.1",
"file-loader": "^0.8.5",
"fs-extra": "^0.30.0",
"happypack": "^2.2.1",
"history": "^2.0.0",
"html-loader": "^0.4.3",
@ -125,6 +124,7 @@
"material-ui": "^0.16.1",
"material-ui-chip-input": "^0.8.0",
"moment": "^2.14.1",
"qs": "^6.3.0",
"react": "^15.2.1",
"react-addons-css-transition-group": "^15.2.1",
"react-dom": "^15.2.1",

81
js/parity.md Normal file
View File

@ -0,0 +1,81 @@
# parity.js
Parity.js is a thin, fast, Promise-based wrapper around the Ethereum APIs.
## installation
Install the package with `npm install --save @parity/parity.js`
## usage
### initialisation
```javascript
// import the actual Api class
import { Api } from '@parity/parity.js';
// do the setup
const transport = new Api.Transport.Http('http://localhost:8545');
const api = new Api(transport);
```
### making calls
perform a call
```javascript
api.eth
.coinbase()
.then((coinbase) => {
console.log(`The coinbase is ${coinbase}`);
});
```
multiple promises
```javascript
Promise
.all([
api.eth.coinbase(),
api.net.listening()
])
.then(([coinbase, listening]) => {
// do stuff here
});
```
chaining promises
```javascript
api.eth
.newFilter({...})
.then((filterId) => api.eth.getFilterChanges(filterId))
.then((changes) => {
console.log(changes);
});
```
### contracts
attach contract
```javascript
const abi = [{ name: 'callMe', inputs: [{ type: 'bool', ...}, { type: 'string', ...}]}, ...abi...];
const address = '0x123456...9abc';
const contract = new api.newContract(abi, address);
```
find & call a function
```javascript
contract.instance
.callMe
.call({ gas: 21000 }, [true, 'someString']) // or estimateGas or postTransaction
.then((result) => {
console.log(`the result was ${result}`);
});
```
## apis
APIs implement the calls as exposed in the [Ethcore JSON Ethereum RPC](https://github.com/ethcore/ethereum-rpc-json/) definitions. Mapping follows the naming conventions of the originals, i.e. `eth_call` becomes `eth.call`, `personal_accounts` becomes `personal.accounts`, etc.

32
js/parity.package.json Normal file
View File

@ -0,0 +1,32 @@
{
"name": "@parity/parity.js",
"description": "The Parity Promise-base API & ABI library for interfacing with Ethereum over RPC",
"version": "0.0.0",
"main": "library.js",
"author": "Parity Team <admin@parity.io>",
"maintainers": [
"Jaco Greeff"
],
"contributors": [],
"license": "GPL-3.0",
"repository": {
"type": "git",
"url": "git+https://github.com/ethcore/parity.git"
},
"keywords": [
"Ethereum",
"ABI",
"API",
"RPC",
"Parity",
"Promise"
],
"scripts": {
},
"devDependencies": {
},
"dependencies": {
"bignumber.js": "^2.3.0",
"js-sha3": "^0.5.2"
}
}

View File

@ -1,11 +1,18 @@
#!/bin/bash
set -e
# variables
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
PACKAGES=( "parity.js" )
BRANCH=$CI_BUILD_REF_NAME
GIT_JS_PRECOMPILED="https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/js-precompiled.git"
GIT_PARITY="https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git"
# setup the git user defaults for the current repo
function setup_git_user {
git config push.default simple
git config merge.ours.driver true
git config user.email "jaco+gitlab@ethcore.io"
git config user.email "$GITHUB_EMAIL"
git config user.name "GitLab Build Bot"
}
@ -15,47 +22,63 @@ GITLOG=./.git/gitcommand.log
pushd $BASEDIR
cd ../.dist
# variables
UTCDATE=`date -u "+%Y%m%d-%H%M%S"`
# init git
# add local files and send it up
echo "*** Setting up GitHub config for js-precompiled"
rm -rf ./.git
git init
# add local files and send it up
echo "Setting up GitHub config for js-precompiled"
setup_git_user
echo "Checking out $CI_BUILD_REF_NAME branch"
git remote add origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/js-precompiled.git
echo "*** Checking out $BRANCH branch"
git remote add origin $GIT_JS_PRECOMPILED
git fetch origin 2>$GITLOG
git checkout -b $CI_BUILD_REF_NAME
git checkout -b $BRANCH
echo "Committing compiled files for $UTCDATE"
echo "*** Committing compiled files for $UTCDATE"
git add .
git commit -m "$UTCDATE"
echo "Merging remote"
git merge origin/$CI_BUILD_REF_NAME -X ours --commit -m "$UTCDATE [release]"
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG
echo "*** Merging remote"
git merge origin/$BRANCH -X ours --commit -m "$UTCDATE [release]"
git push origin HEAD:refs/heads/$BRANCH 2>$GITLOG
PRECOMPILED_HASH=`git rev-parse HEAD`
# back to root
popd
# move to root
cd ../..
echo "Setting up GitHub config for parity"
echo "*** Setting up GitHub config for parity"
setup_git_user
git remote set-url origin https://${GITHUB_JS_PRECOMPILED}:@github.com/ethcore/parity.git
git reset --hard origin/$CI_BUILD_REF_NAME 2>$GITLOG
git remote set-url origin $GIT_PARITY
git reset --hard origin/$BRANCH 2>$GITLOG
echo "Updating cargo package parity-ui-precompiled#$PRECOMPILED_HASH"
echo "*** Bumping package.json patch version"
cd js
npm --no-git-tag-version version
npm version patch
cd ..
echo "*** Updating cargo parity-ui-precompiled#$PRECOMPILED_HASH"
cargo update -p parity-ui-precompiled
# --precise "$PRECOMPILED_HASH"
echo "Committing updated files"
git add .
echo "*** Committing updated files"
git add Cargo.lock js/package.json
git commit -m "[ci skip] js-precompiled $UTCDATE"
git push origin HEAD:refs/heads/$CI_BUILD_REF_NAME 2>$GITLOG
git push origin HEAD:refs/heads/$BRANCH 2>$GITLOG
echo "*** Building packages for npmjs"
cd js
# echo -e "$NPM_USERNAME\n$NPM_PASSWORD\n$NPM_EMAIL" | npm login
echo "$NPM_TOKEN" >> ~/.npmrc
npm run ci:build:npm
echo "*** Publishing $PACKAGE to npmjs"
cd .npmjs
npm publish --access public
cd ..
# back to root
echo "*** Release completed"
popd
# exit with exit code
exit 0

View File

@ -14,6 +14,8 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { stringify } from 'qs';
const options = {
method: 'GET',
headers: {
@ -23,19 +25,14 @@ const options = {
export function call (module, action, _params, test) {
const host = test ? 'testnet.etherscan.io' : 'api.etherscan.io';
let params = '';
if (_params) {
Object.keys(_params).map((param) => {
const value = _params[param];
const query = stringify(Object.assign({
module, action
}, _params || {}));
params = `${params}&${param}=${value}`;
});
}
return fetch(`http://${host}/api?module=${module}&action=${action}${params}`, options)
return fetch(`https://${host}/api?${query}`, options)
.then((response) => {
if (response.status !== 200) {
if (!response.ok) {
throw { code: response.status, message: response.statusText }; // eslint-disable-line
}

View File

@ -16,10 +16,13 @@
import { account } from './account';
import { stats } from './stats';
import { txLink, addressLink } from './links';
const etherscan = {
account: account,
stats: stats
stats: stats,
txLink: txLink,
addressLink: addressLink
};
export default etherscan;

View File

@ -14,8 +14,10 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
// links to chain explorers
export const BASE_LINK_ACCOUNT_MORDEN = 'https://testnet.etherscan.io/address/';
export const BASE_LINK_ACCOUNT_HOMESTEAD = 'https://etherscan.io/address/';
export const BASE_LINK_TX_MORDEN = 'https://testnet.etherscan.io/tx/';
export const BASE_LINK_TX_HOMESTEAD = 'https://etherscan.io/tx/';
export const txLink = (hash, isTestnet = false) => {
return `https://${isTestnet ? 'testnet.' : ''}etherscan.io/tx/${hash}`;
};
export const addressLink = (address, isTestnet = false) => {
return `https://${isTestnet ? 'testnet.' : ''}etherscan.io/address/${address}`;
};

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Basic Token Deployment</title>
</head>
<body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>GAVcoin</title>
</head>
<body>

View File

@ -43,7 +43,7 @@ export default class ActionBuyIn extends Component {
accountError: ERRORS.invalidAccount,
amount: 0,
amountError: ERRORS.invalidAmount,
maxPrice: api.util.fromWei(this.props.price.mul(1.2)).toString(),
maxPrice: api.util.fromWei(this.props.price.mul(1.2)).toFixed(0),
maxPriceError: null,
sending: false,
complete: false

View File

@ -53,6 +53,7 @@ export default class Application extends Component {
action: null,
address: null,
accounts: [],
accountsInfo: {},
blockNumber: new BigNumber(-1),
ethBalance: new BigNumber(0),
gavBalance: new BigNumber(0),
@ -68,7 +69,7 @@ export default class Application extends Component {
}
render () {
const { accounts, address, blockNumber, gavBalance, loading, price, remaining, totalSupply } = this.state;
const { accounts, accountsInfo, address, blockNumber, gavBalance, loading, price, remaining, totalSupply } = this.state;
if (loading) {
return (
@ -93,7 +94,7 @@ export default class Application extends Component {
gavBalance={ gavBalance }
onAction={ this.onAction } />
<Events
accounts={ accounts } />
accountsInfo={ accountsInfo } />
</div>
);
}
@ -216,8 +217,8 @@ export default class Application extends Component {
api.personal.accountsInfo()
]);
})
.then(([address, addresses, infos]) => {
infos = infos || {};
.then(([address, addresses, accountsInfo]) => {
accountsInfo = accountsInfo || {};
console.log(`gavcoin was found at ${address}`);
const contract = api.newContract(abis.gavcoin, address);
@ -226,9 +227,10 @@ export default class Application extends Component {
loading: false,
address,
contract,
accountsInfo,
instance: contract.instance,
accounts: addresses.map((address) => {
const info = infos[address] || {};
const info = accountsInfo[address] || {};
return {
address,

View File

@ -14,10 +14,11 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import moment from 'moment';
import React, { Component, PropTypes } from 'react';
import IdentityIcon from '../../IdentityIcon';
import { formatBlockNumber, formatCoins, formatEth } from '../../format';
import { formatCoins, formatEth, formatHash } from '../../format';
import styles from '../events.css';
@ -27,7 +28,8 @@ const EMPTY_COLUMN = (
export default class Event extends Component {
static contextTypes = {
accounts: PropTypes.array.isRequired
accountsInfo: PropTypes.object.isRequired,
api: PropTypes.object.isRequired
}
static propTypes = {
@ -38,14 +40,23 @@ export default class Event extends Component {
toAddress: PropTypes.string
}
state = {
block: null
}
componentDidMount () {
this.loadBlock();
}
render () {
const { event, fromAddress, toAddress, price, value } = this.props;
const { blockNumber, state, type } = event;
const { block } = this.state;
const { state, type } = event;
const cls = `${styles.event} ${styles[state]} ${styles[type.toLowerCase()]}`;
return (
<tr className={ cls }>
{ this.renderBlockNumber(blockNumber) }
{ this.renderTimestamp(block) }
{ this.renderType(type) }
{ this.renderValue(value) }
{ this.renderPrice(price) }
@ -55,10 +66,10 @@ export default class Event extends Component {
);
}
renderBlockNumber (blockNumber) {
renderTimestamp (block) {
return (
<td className={ styles.blocknumber }>
{ formatBlockNumber(blockNumber) }
{ !block ? ' ' : moment(block.timestamp).fromNow() }
</td>
);
}
@ -77,8 +88,8 @@ export default class Event extends Component {
}
renderAddressName (address) {
const { accounts } = this.context;
const account = accounts.find((_account) => _account.address === address);
const { accountsInfo } = this.context;
const account = accountsInfo[address];
if (account && account.name) {
return (
@ -90,7 +101,7 @@ export default class Event extends Component {
return (
<div className={ styles.address }>
{ address }
{ formatHash(address) }
</div>
);
}
@ -126,4 +137,19 @@ export default class Event extends Component {
</td>
);
}
loadBlock () {
const { api } = this.context;
const { event } = this.props;
if (!event || !event.blockNumber || event.blockNumber.eq(0)) {
return;
}
api.eth
.getBlockByNumber(event.blockNumber)
.then((block) => {
this.setState({ block });
});
}
}

View File

@ -16,18 +16,20 @@
*/
.events {
padding: 4em 2em;
text-align: center;
}
.list {
width: 100%;
margin: 0 auto;
border: none;
border-spacing: 0;
text-align: left;
}
.list td {
vertical-align: top;
padding: 4px 0.5em;
max-height: 32px;
padding: 0.25em 1em;
max-height: 1.5em;
}
.event {
@ -38,7 +40,6 @@
.blocknumber,
.ethvalue,
.gavvalue {
font-family: 'Roboto Mono', monospace;
}
.blocknumber,

View File

@ -27,7 +27,7 @@ import styles from './events.css';
export default class Events extends Component {
static childContextTypes = {
accounts: PropTypes.array
accountsInfo: PropTypes.object
}
static contextTypes = {
@ -36,7 +36,7 @@ export default class Events extends Component {
}
static propTypes = {
accounts: PropTypes.array
accountsInfo: PropTypes.object.isRequired
}
state = {
@ -84,11 +84,9 @@ export default class Events extends Component {
}
getChildContext () {
const { accounts } = this.props;
const { accountsInfo } = this.props;
return {
accounts
};
return { accountsInfo };
}
setupFilters () {

View File

@ -50,3 +50,7 @@ export function formatCoins (amount, decimals = 6) {
export function formatEth (eth, decimals = 3) {
return api.util.fromWei(eth).toFormat(decimals);
}
export function formatHash (hash) {
return `${hash.substr(0, 10)}...${hash.substr(-8)}`;
}

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>GitHub Hint</title>
</head>
<body>

View File

@ -151,7 +151,7 @@ export default class Application extends Component {
let urlError = null;
if (url && url.length) {
var re = /^https?:\/\/(?:www\.|(?!www))[^\s\.]+\.[^\s]{2,}/g;
const re = /^https?:\/\/(?:www\.|(?!www))[^\s\.]+\.[^\s]{2,}/g; // eslint-disable-line
urlError = re.test(url)
? null
: 'not matching rexex';

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Token Registry</title>
</head>
<body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Method Signature Registry</title>
</head>
<body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Token Registry</title>
</head>
<body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>dev::Parity.js</title>
</head>
<body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>dev::Web3</title>
</head>
<body>

View File

@ -4,6 +4,7 @@
<meta charset="utf-8">
<meta name="viewport" content="width=device-width">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<link rel="icon" href="parity.ico" type="image/x-icon">
<title>Parity</title>
<style>
html, body, #container {

View File

@ -39,6 +39,8 @@ import './environment';
import '../assets/fonts/Roboto/font.css';
import '../assets/fonts/RobotoMono/font.css';
import '../assets/images/parity.ico';
import styles from './reset.css';
import './index.html';

View File

@ -14,12 +14,10 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { BASE_LINK_ACCOUNT_MORDEN, BASE_LINK_ACCOUNT_HOMESTEAD } from '../constants/constants';
import Abi from './abi';
import Api from './api';
export const getAccountLink = _getAccountLink;
function _getAccountLink (address, chain) {
const isTestNet = chain === 'morden' || chain === 'testnet';
const base = isTestNet ? BASE_LINK_ACCOUNT_MORDEN : BASE_LINK_ACCOUNT_HOMESTEAD;
return base + address;
}
export {
Abi,
Api
};

View File

@ -50,6 +50,9 @@ export default class CreationType extends Component {
<RadioButton
label='Import account from an Ethereum pre-sale wallet'
value='fromPresale' />
<RadioButton
label='Import raw private key'
value='fromRaw' />
</RadioButtonGroup>
</div>
);

View File

@ -26,6 +26,8 @@ import styles from '../createAccount.css';
const ERRORS = {
noName: 'you need to specify a valid name for the account',
noPhrase: 'you need to specify the recovery phrase',
noKey: 'you need to provide the raw private key',
invalidKey: 'the raw key needs to be hex, 64 characters in length',
invalidPassword: 'you need to specify a password >= 8 characters',
noMatchPassword: 'the supplied passwords does not match'
};

View File

@ -0,0 +1,17 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
export default from './rawKey';

View File

@ -0,0 +1,189 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { Form, Input } from '../../../ui';
import styles from '../createAccount.css';
import { ERRORS } from '../NewAccount';
export default class RawKey extends Component {
static contextTypes = {
api: PropTypes.object.isRequired
}
static propTypes = {
onChange: PropTypes.func.isRequired
}
state = {
rawKey: '',
rawKeyError: ERRORS.noKey,
accountName: '',
accountNameError: ERRORS.noName,
passwordHint: '',
password1: '',
password1Error: ERRORS.invalidPassword,
password2: '',
password2Error: ERRORS.noMatchPassword,
isValidPass: false,
isValidName: false,
isValidKey: false
}
componentWillMount () {
this.props.onChange(false, {});
}
render () {
const { accountName, accountNameError, passwordHint, password1, password1Error, password2, password2Error, rawKey, rawKeyError } = this.state;
return (
<Form>
<Input
hint='the raw hex encoded private key'
label='private key'
error={ rawKeyError }
value={ rawKey }
onChange={ this.onEditKey } />
<Input
label='account name'
hint='a descriptive name for the account'
error={ accountNameError }
value={ accountName }
onChange={ this.onEditAccountName } />
<Input
label='password hint'
hint='(optional) a hint to help with remembering the password'
value={ passwordHint }
onChange={ this.onEditPasswordHint } />
<div className={ styles.passwords }>
<div className={ styles.password }>
<Input
className={ styles.password }
label='password'
hint='a strong, unique password'
type='password'
error={ password1Error }
value={ password1 }
onChange={ this.onEditPassword1 } />
</div>
<div className={ styles.password }>
<Input
className={ styles.password }
label='password (repeat)'
hint='verify your password'
type='password'
error={ password2Error }
value={ password2 }
onChange={ this.onEditPassword2 } />
</div>
</div>
</Form>
);
}
updateParent = () => {
const { isValidName, isValidPass, isValidKey, accountName, passwordHint, password1, rawKey } = this.state;
const isValid = isValidName && isValidPass && isValidKey;
this.props.onChange(isValid, {
name: accountName,
passwordHint,
password: password1,
rawKey
});
}
onEditPasswordHint = (event, value) => {
this.setState({
passwordHint: value
});
}
onEditKey = (event) => {
const { api } = this.context;
const rawKey = event.target.value;
let rawKeyError = null;
console.log(rawKey.length, rawKey);
if (!rawKey || !rawKey.trim().length) {
rawKeyError = ERRORS.noKey;
} else if (rawKey.substr(0, 2) !== '0x' || rawKey.substr(2).length !== 64 || !api.util.isHex(rawKey)) {
rawKeyError = ERRORS.invalidKey;
}
this.setState({
rawKey,
rawKeyError,
isValidKey: !rawKeyError
}, this.updateParent);
}
onEditAccountName = (event) => {
const accountName = event.target.value;
let accountNameError = null;
if (!accountName || accountName.trim().length < 2) {
accountNameError = ERRORS.noName;
}
this.setState({
accountName,
accountNameError,
isValidName: !accountNameError
}, this.updateParent);
}
onEditPassword1 = (event) => {
const value = event.target.value;
let error1 = null;
let error2 = null;
if (!value || value.trim().length < 8) {
error1 = ERRORS.invalidPassword;
}
if (value !== this.state.password2) {
error2 = ERRORS.noMatchPassword;
}
this.setState({
password1: value,
password1Error: error1,
password2Error: error2,
isValidPass: !error1 && !error2
}, this.updateParent);
}
onEditPassword2 = (event) => {
const value = event.target.value;
let error2 = null;
if (value !== this.state.password1) {
error2 = ERRORS.noMatchPassword;
}
this.setState({
password2: value,
password2Error: error2,
isValidPass: !error2
}, this.updateParent);
}
}

View File

@ -29,6 +29,7 @@ import CreationType from './CreationType';
import NewAccount from './NewAccount';
import NewGeth from './NewGeth';
import NewImport from './NewImport';
import RawKey from './RawKey';
import RecoveryPhrase from './RecoveryPhrase';
const TITLES = {
@ -58,6 +59,7 @@ export default class CreateAccount extends Component {
passwordHint: null,
password: null,
phrase: null,
rawKey: null,
json: null,
canCreate: false,
createType: null,
@ -110,6 +112,11 @@ export default class CreateAccount extends Component {
<RecoveryPhrase
onChange={ this.onChangeDetails } />
);
} else if (createType === 'fromRaw') {
return (
<RawKey
onChange={ this.onChangeDetails } />
);
}
return (
@ -220,6 +227,28 @@ export default class CreateAccount extends Component {
canCreate: true
});
this.newError(error);
});
} else if (createType === 'fromRaw') {
return api.personal
.newAccountFromSecret(this.state.rawKey, this.state.password)
.then((address) => {
this.setState({ address });
return api.personal
.setAccountName(address, this.state.name)
.then(() => api.personal.setAccountMeta(address, { passwordHint: this.state.passwordHint }));
})
.then(() => {
this.onNext();
this.props.onUpdate && this.props.onUpdate();
})
.catch((error) => {
console.error('onCreate', error);
this.setState({
canCreate: true
});
this.newError(error);
});
} else if (createType === 'fromGeth') {
@ -288,27 +317,35 @@ export default class CreateAccount extends Component {
});
}
onChangeDetails = (valid, { name, passwordHint, address, password, phrase }) => {
onChangeDetails = (canCreate, { name, passwordHint, address, password, phrase, rawKey }) => {
this.setState({
canCreate: valid,
canCreate,
name,
passwordHint,
address,
password,
phrase
phrase,
rawKey
});
}
onChangeGeth = (valid, gethAddresses) => {
onChangeRaw = (canCreate, rawKey) => {
this.setState({
canCreate: valid,
canCreate,
rawKey
});
}
onChangeGeth = (canCreate, gethAddresses) => {
this.setState({
canCreate,
gethAddresses
});
}
onChangeWallet = (valid, { name, passwordHint, password, json }) => {
onChangeWallet = (canCreate, { name, passwordHint, password, json }) => {
this.setState({
canCreate: valid,
canCreate,
name,
passwordHint,
password,

View File

@ -122,7 +122,7 @@ export default class FirstRun extends Component {
icon={ <ActionDoneAll /> }
label='Close'
onClick={ this.onClose } />
);
);
}
}

View File

@ -46,6 +46,16 @@
font-size: 1.1rem;
}
.passwords {
display: flex;
flex-wrap: wrap;
}
.password {
flex: 0 1 50%;
width: 50%;
}
.passwordHint {
font-size: 0.9rem;
color: lightgrey;

View File

@ -181,33 +181,37 @@ export default class PasswordManager extends Component {
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditCurrent } />
<Input
label='new password'
hint='the new password for this account'
type='password'
submitOnBlur={ false }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditNew } />
<Input
label='repeat new password'
hint='repeat the new password for this account'
type='password'
submitOnBlur={ false }
error={ repeatError }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditRepeatNew } />
<Input
label='new password hint'
label='(optional) new password hint'
hint='hint for the new password'
submitOnBlur={ false }
value={ passwordHint }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditHint } />
<div className={ styles.passwords }>
<div className={ styles.password }>
<Input
label='new password'
hint='the new password for this account'
type='password'
submitOnBlur={ false }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditNew } />
</div>
<div className={ styles.password }>
<Input
label='repeat new password'
hint='repeat the new password for this account'
type='password'
submitOnBlur={ false }
error={ repeatError }
disabled={ disabled }
onSubmit={ this.handleChangePassword }
onChange={ this.onEditRepeatNew } />
</div>
</div>
</div>
</Form>
</Tab>

View File

@ -420,13 +420,15 @@ export default class Transfer extends Component {
_sendToken () {
const { account, balance } = this.props;
const { recipient, value, tag } = this.state;
const { gas, gasPrice, recipient, value, tag } = this.state;
const token = balance.tokens.find((balance) => balance.token.tag === tag).token;
return token.contract.instance.transfer
.postTransaction({
from: account.address,
to: token.address
to: token.address,
gas,
gasPrice
}, [
recipient,
new BigNumber(value).mul(token.format).toFixed(0)
@ -483,7 +485,7 @@ export default class Transfer extends Component {
to: token.address
}, [
recipient,
new BigNumber(value || 0).mul(token.format).toString()
new BigNumber(value || 0).mul(token.format).toFixed(0)
]);
}

View File

@ -139,7 +139,7 @@ class MethodDecoding extends Component {
return (
<div className={ styles.gasDetails }>
{ historic ? 'Used' : 'Will use' } <span className={ styles.highlight }>{ gas.toFormat(0) } gas ({ gasPrice.div(1000000).toFormat(0) }M/<small>ETH</small>)</span> for a total transaction cost of <span className={ styles.highlight }>{ this.renderEtherValue(gasValue) }</span>
{ historic ? 'Provided' : 'Provides' } <span className={ styles.highlight }>{ gas.toFormat(0) } gas ({ gasPrice.div(1000000).toFormat(0) }M/<small>ETH</small>)</span> for a total transaction value of <span className={ styles.highlight }>{ this.renderEtherValue(gasValue) }</span>
</div>
);
}

View File

@ -23,6 +23,7 @@
.hash {
padding-top: 1em;
word-break: break-all;
}
.confirm {
@ -31,11 +32,13 @@
}
.progressbar {
margin: 0.5em !important;
margin: 0.5em 0 !important;
width: 30% !important;
min-width: 220px;
display: inline-block !important;
height: 0.75em !important;
}
.progressinfo {
text-align: center;
}

View File

@ -19,6 +19,7 @@ import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux';
import { bindActionCreators } from 'redux';
import { LinearProgress } from 'material-ui';
import { txLink } from '../../3rdparty/etherscan/links';
import styles from './txHash.css';
@ -29,7 +30,8 @@ class TxHash extends Component {
static propTypes = {
hash: PropTypes.string.isRequired,
isTest: PropTypes.bool
isTest: PropTypes.bool,
summary: PropTypes.bool
}
state = {
@ -54,16 +56,22 @@ class TxHash extends Component {
}
render () {
const { hash, isTest } = this.props;
const link = `https://${isTest ? 'testnet.' : ''}etherscan.io/tx/${hash}`;
const { hash, isTest, summary } = this.props;
let header = null;
return (
<div className={ styles.details }>
if (!summary) {
header = (
<div className={ styles.header }>
The transaction has been posted to the network with a transaction hash of
</div>
);
}
return (
<div className={ styles.details }>
{ header }
<div className={ styles.hash }>
<a href={ link } target='_blank'>{ hash }</a>
<a href={ txLink(hash, isTest) } target='_blank'>{ hash }</a>
</div>
{ this.renderConfirmations() }
</div>

View File

@ -23,6 +23,7 @@ import { bindActionCreators } from 'redux';
import { fetchBlock, fetchTransaction } from '../../../../redux/providers/blockchainActions';
import { IdentityIcon, IdentityName, MethodDecoding } from '../../../../ui';
import { txLink, addressLink } from '../../../../3rdparty/etherscan/links';
import styles from '../transactions.css';
@ -55,9 +56,7 @@ class Transaction extends Component {
}
render () {
const { block, transaction, isTest } = this.props;
const prefix = `https://${isTest ? 'testnet.' : ''}etherscan.io/`;
const { block, transaction } = this.props;
return (
<tr>
@ -65,9 +64,9 @@ class Transaction extends Component {
<div>{ this.formatBlockTimestamp(block) }</div>
<div>{ this.formatNumber(transaction.blockNumber) }</div>
</td>
{ this.renderAddress(prefix, transaction.from) }
{ this.renderAddress(transaction.from) }
{ this.renderTransaction() }
{ this.renderAddress(prefix, transaction.to) }
{ this.renderAddress(transaction.to) }
<td className={ styles.method }>
{ this.renderMethod() }
</td>
@ -93,15 +92,16 @@ class Transaction extends Component {
renderTransaction () {
const { transaction, isTest } = this.props;
const prefix = `https://${isTest ? 'testnet.' : ''}etherscan.io/`;
const hashLink = `${prefix}tx/${transaction.hash}`;
return (
<td className={ styles.transaction }>
{ this.renderEtherValue() }
<div></div>
<div>
<a href={ hashLink } target='_blank' className={ styles.link }>
<a
className={ styles.link }
href={ txLink(transaction.hash, isTest) }
target='_blank'
>
{ this.formatHash(transaction.hash) }
</a>
</div>
@ -109,10 +109,12 @@ class Transaction extends Component {
);
}
renderAddress (prefix, address) {
renderAddress (address) {
const { isTest } = this.props;
const eslink = address ? (
<a
href={ `${prefix}address/${address}` }
href={ addressLink(address, isTest) }
target='_blank'
className={ styles.link }>
<IdentityName address={ address } shorten />

View File

@ -22,6 +22,7 @@ import { bindActionCreators } from 'redux';
import { fetchBlock, fetchTransaction } from '../../../../redux/providers/blockchainActions';
import { IdentityIcon, IdentityName, Input, InputAddress } from '../../../../ui';
import { txLink } from '../../../../3rdparty/etherscan/links';
import styles from '../../contract.css';
@ -49,7 +50,7 @@ class Event extends Component {
const block = blocks[event.blockNumber.toString()];
const transaction = transactions[event.transactionHash] || {};
const classes = `${styles.event} ${styles[event.state]}`;
const url = `https://${isTest ? 'testnet.' : ''}etherscan.io/tx/${event.transactionHash}`;
const url = txLink(event.transactionHash, isTest);
const keys = Object.keys(event.params).join(', ');
const values = Object.keys(event.params).map((name, index) => {
const param = event.params[name];

View File

@ -16,7 +16,7 @@
import React, { Component, PropTypes } from 'react';
import { getAccountLink } from '../../util/account';
import { addressLink } from '../../../../../3rdparty/etherscan/links';
import styles from './AccountLink.css';
export default class AccountLink extends Component {
@ -57,7 +57,7 @@ export default class AccountLink extends Component {
}
updateLink (address, chain) {
const link = getAccountLink(address, chain);
const link = addressLink(address, chain === 'morden' || chain === 'testnet');
this.setState({
link

View File

@ -29,7 +29,7 @@
.statusContainer {
width: 220px;
padding: 20px 40px 0 40px;
padding: 0 40px 0 40px;
/*border-left: 1px solid #aaa;*/
position: absolute;
top: 0;
@ -46,7 +46,8 @@
}
.isRejected {
opacity: 0.7;
opacity: 0.5;
padding-top: 2em;
}
.txHash {

View File

@ -17,6 +17,9 @@
import React, { Component, PropTypes } from 'react';
import CircularProgress from 'material-ui/CircularProgress';
import { TxHash } from '../../../../ui';
import TransactionMainDetails from '../TransactionMainDetails';
import TxHashLink from '../TxHashLink';
import TransactionSecondaryDetails from '../TransactionSecondaryDetails';
@ -57,7 +60,7 @@ export default class TransactionFinished extends Component {
};
componentWillMount () {
const { gas, gasPrice, value } = this.props;
const { from, to, gas, gasPrice, value } = this.props;
const fee = tUtil.getFee(gas, gasPrice); // BigNumber object
const totalValue = tUtil.getTotalValue(fee, value);
this.setState({ totalValue });
@ -70,9 +73,10 @@ export default class TransactionFinished extends Component {
console.error('could not fetch chain', err);
});
const { from, to } = this.props;
this.fetchBalance(from, 'fromBalance');
if (to) this.fetchBalance(to, 'toBalance');
if (to) {
this.fetchBalance(to, 'toBalance');
}
}
render () {
@ -109,19 +113,27 @@ export default class TransactionFinished extends Component {
}
renderStatus () {
const { status } = this.props;
const klass = status === 'confirmed' ? styles.isConfirmed : styles.isRejected;
const { status, txHash } = this.props;
if (status !== 'confirmed') {
return (
<div>
<span className={ styles.isRejected }>{ capitalize(status) }</span>
</div>
);
}
return (
<div>
<span className={ klass }>{ capitalize(status) }</span>
{ this.renderTxHash() }
</div>
<TxHash
summary
hash={ txHash } />
);
}
renderTxHash () {
const { txHash, chain } = this.props;
if (!txHash) {
const { txHash } = this.props;
const { chain } = this.state;
if (!txHash || !chain) {
return;
}

View File

@ -16,7 +16,7 @@
import React, { Component, PropTypes } from 'react';
import { getTxLink } from '../util/transaction';
import { txLink } from '../../../../3rdparty/etherscan/links';
export default class TxHashLink extends Component {
@ -27,27 +27,12 @@ export default class TxHashLink extends Component {
className: PropTypes.string
}
state = {
link: null
};
componentWillMount () {
const { txHash, chain } = this.props;
this.updateLink(txHash, chain);
}
componentWillReceiveProps (nextProps) {
const { txHash, chain } = nextProps;
this.updateLink(txHash, chain);
}
render () {
const { children, txHash, className } = this.props;
const { link } = this.state;
const { children, txHash, className, chain } = this.props;
return (
<a
href={ link }
href={ txLink(txHash, chain === 'morden' || chain === 'testnet') }
target='_blank'
className={ className }>
{ children || txHash }
@ -55,9 +40,4 @@ export default class TxHashLink extends Component {
);
}
updateLink (txHash, chain) {
const link = getTxLink(txHash, chain);
this.setState({ link });
}
}

View File

@ -18,7 +18,6 @@ import BigNumber from 'bignumber.js';
const WEI_TO_ETH_MULTIPLIER = 0.000000000000000001;
const WEI_TO_SZABU_MULTIPLIER = 0.000000000001;
import { BASE_LINK_TX_MORDEN, BASE_LINK_TX_HOMESTEAD } from '../constants/constants';
export const getShortData = _getShortData;
// calculations
@ -33,8 +32,6 @@ export const getTotalValueDisplay = _getTotalValueDisplay;
export const getTotalValueDisplayWei = _getTotalValueDisplayWei;
export const getEthmFromWeiDisplay = _getEthmFromWeiDisplay;
export const getGasDisplay = _getGasDisplay;
// links
export const getTxLink = _getTxLink;
function _getShortData (data) {
if (data.length <= 3) {
@ -111,11 +108,6 @@ function _getEthmFromWeiDisplay (weiHexString) {
return value.times(WEI_TO_ETH_MULTIPLIER).times(1e7).toFixed(5);
}
function _getTxLink (txHash, chain) {
const base = chain === 'morden' || chain === 'testnet' ? BASE_LINK_TX_MORDEN : BASE_LINK_TX_HOMESTEAD;
return base + txHash;
}
function _getGasDisplay (gas) {
return new BigNumber(gas).times(1e-7).toFormat(4);
}

View File

@ -83,9 +83,13 @@ module.exports = {
loader: 'style!css'
},
{
test: /\.(png|jpg|)$/,
test: /\.(png|jpg)$/,
loader: 'file-loader'
},
{
test: /\.ico$/,
loader: 'file-loader?name=[name].[ext]'
},
{
test: /\.(woff(2)|ttf|eot|svg|otf)(\?v=[0-9]\.[0-9]\.[0-9])?$/,
loader: 'file-loader'

82
js/webpack.npm.js Normal file
View File

@ -0,0 +1,82 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
const path = require('path');
const webpack = require('webpack');
const CopyWebpackPlugin = require('copy-webpack-plugin');
const packageJson = require('./package.json');
const ENV = process.env.NODE_ENV || 'development';
const isProd = ENV === 'production';
module.exports = {
context: path.join(__dirname, './src'),
entry: 'library.js',
output: {
path: path.join(__dirname, '.npmjs'),
filename: 'library.js',
libraryTarget: 'commonjs'
},
module: {
loaders: [
{
test: /(\.jsx|\.js)$/,
loader: 'babel',
exclude: /node_modules/
}
]
},
resolve: {
root: path.resolve('./src'),
extensions: ['', '.js']
},
plugins: (function () {
const plugins = [
new CopyWebpackPlugin([
{
from: '../parity.package.json',
to: 'package.json',
transform: function (content, path) {
const json = JSON.parse(content.toString());
json.version = packageJson.version;
return new Buffer(JSON.stringify(json, null, ' '), 'utf-8');
}
},
{
from: '../LICENSE'
},
{
from: '../parity.md',
to: 'README.md'
}
], { copyUnmodified: true })
];
if (isProd) {
plugins.push(new webpack.optimize.UglifyJsPlugin({
screwIe8: true,
compress: {
warnings: false
},
output: {
comments: false
}
}));
}
return plugins;
}())
};

View File

@ -14,6 +14,13 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
macro_rules! println_stderr(
($($arg:tt)*) => { {
let r = writeln!(&mut ::std::io::stderr(), $($arg)*);
r.expect("failed printing to stderr");
} }
);
macro_rules! otry {
($e: expr) => (
match $e {
@ -39,7 +46,7 @@ macro_rules! usage {
) => {
use toml;
use std::{fs, io, process};
use std::io::Read;
use std::io::{Read, Write};
use util::version;
use docopt::{Docopt, Error as DocoptError};
use helpers::replace_home;
@ -58,20 +65,20 @@ macro_rules! usage {
match self {
ArgsError::Docopt(e) => e.exit(),
ArgsError::Parsing(errors) => {
println!("There is an error in config file.");
println_stderr!("There is an error in config file.");
for e in &errors {
println!("{}", e);
println_stderr!("{}", e);
}
process::exit(2)
},
ArgsError::Decode(e) => {
println!("You might have supplied invalid parameters in config file.");
println!("{}", e);
println_stderr!("You might have supplied invalid parameters in config file.");
println_stderr!("{}", e);
process::exit(2)
},
ArgsError::Config(path, e) => {
println!("There was an error reading your config file at: {}", path);
println!("{}", e);
println_stderr!("There was an error reading your config file at: {}", path);
println_stderr!("{}", e);
process::exit(2)
}
}
@ -136,7 +143,7 @@ macro_rules! usage {
let config = match (fs::File::open(&config_file), raw_args.flag_config.is_some()) {
// Load config file
(Ok(mut file), _) => {
println!("Loading config file from {}", &config_file);
println_stderr!("Loading config file from {}", &config_file);
let mut config = String::new();
try!(file.read_to_string(&mut config).map_err(|e| ArgsError::Config(config_file, e)));
try!(Self::parse_config(&config))

View File

@ -184,7 +184,7 @@ impl ChainNotify for Informant {
let ripe = Instant::now() > *last_import + Duration::from_secs(1) && !importing;
let txs_imported = imported.iter()
.take(imported.len() - if ripe {1} else {0})
.filter_map(|h| self.client.block(BlockID::Hash(h.clone())))
.filter_map(|h| self.client.block(BlockID::Hash(*h)))
.map(|b| BlockView::new(&b).transactions_count())
.sum();

View File

@ -207,10 +207,10 @@ fn main() {
match start() {
Ok(result) => {
println!("{}", result);
info!("{}", result);
},
Err(err) => {
println!("{}", err);
info!("{}", err);
process::exit(1);
}
}

View File

@ -53,7 +53,7 @@ use url;
const SNAPSHOT_PERIOD: u64 = 10000;
// how many blocks to wait before starting a periodic snapshot.
const SNAPSHOT_HISTORY: u64 = 500;
const SNAPSHOT_HISTORY: u64 = 100;
#[derive(Debug, PartialEq)]
pub struct RunCmd {

View File

@ -58,7 +58,7 @@ impl Default for SyncConfig {
network_id: U256::from(1),
subprotocol_name: *b"eth",
fork_block: None,
warp_sync: true,
warp_sync: false,
}
}
}

View File

@ -29,7 +29,7 @@ use sync_io::SyncIo;
use blocks::BlockCollection;
const MAX_HEADERS_TO_REQUEST: usize = 128;
const MAX_BODIES_TO_REQUEST: usize = 128;
const MAX_BODIES_TO_REQUEST: usize = 64;
const MAX_RECEPITS_TO_REQUEST: usize = 128;
const SUBCHAIN_SIZE: u64 = 256;
const MAX_ROUND_PARENTS: usize = 32;

View File

@ -123,6 +123,7 @@ const MAX_NEW_BLOCK_AGE: BlockNumber = 20;
const MAX_TRANSACTION_SIZE: usize = 300*1024;
// Min number of blocks to be behind for a snapshot sync
const SNAPSHOT_RESTORE_THRESHOLD: BlockNumber = 100000;
const SNAPSHOT_MIN_PEERS: usize = 3;
const STATUS_PACKET: u8 = 0x00;
const NEW_BLOCK_HASHES_PACKET: u8 = 0x01;
@ -147,21 +148,27 @@ const SNAPSHOT_DATA_PACKET: u8 = 0x14;
pub const SNAPSHOT_SYNC_PACKET_COUNT: u8 = 0x15;
const HEADERS_TIMEOUT_SEC: f64 = 15f64;
const BODIES_TIMEOUT_SEC: f64 = 10f64;
const RECEIPTS_TIMEOUT_SEC: f64 = 10f64;
const FORK_HEADER_TIMEOUT_SEC: f64 = 3f64;
const SNAPSHOT_MANIFEST_TIMEOUT_SEC: f64 = 3f64;
const SNAPSHOT_DATA_TIMEOUT_SEC: f64 = 60f64;
const MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD: usize = 3;
const WAIT_PEERS_TIMEOUT_SEC: u64 = 5;
const STATUS_TIMEOUT_SEC: u64 = 5;
const HEADERS_TIMEOUT_SEC: u64 = 15;
const BODIES_TIMEOUT_SEC: u64 = 10;
const RECEIPTS_TIMEOUT_SEC: u64 = 10;
const FORK_HEADER_TIMEOUT_SEC: u64 = 3;
const SNAPSHOT_MANIFEST_TIMEOUT_SEC: u64 = 3;
const SNAPSHOT_DATA_TIMEOUT_SEC: u64 = 60;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
/// Sync state
pub enum SyncState {
/// Waiting for pv64 peers to start snapshot syncing
/// Collecting enough peers to start syncing.
WaitingPeers,
/// Waiting for snapshot manifest download
SnapshotManifest,
/// Downloading snapshot data
SnapshotData,
/// Waiting for snapshot restoration to complete
/// Waiting for snapshot restoration progress.
SnapshotWaiting,
/// Downloading new blocks
Blocks,
@ -276,7 +283,7 @@ struct PeerInfo {
/// Holds requested snapshot chunk hash if any.
asking_snapshot_data: Option<H256>,
/// Request timestamp
ask_time: f64,
ask_time: u64,
/// Holds a set of transactions recently sent to this peer to avoid spamming.
last_sent_transactions: HashSet<H256>,
/// Pending request is expired and result should be ignored
@ -324,10 +331,13 @@ pub struct ChainSync {
network_id: U256,
/// Optional fork block to check
fork_block: Option<(BlockNumber, H256)>,
/// Snapshot sync allowed.
snapshot_sync_enabled: bool,
/// Snapshot downloader.
snapshot: Snapshot,
/// Connected peers pending Status message.
/// Value is request timestamp.
handshaking_peers: HashMap<PeerId, u64>,
/// Sync start timestamp. Measured when first peer is connected
sync_start_time: Option<u64>,
}
type RlpResponseResult = Result<Option<(PacketId, RlpStream)>, PacketDecodeError>;
@ -337,20 +347,21 @@ impl ChainSync {
pub fn new(config: SyncConfig, chain: &BlockChainClient) -> ChainSync {
let chain_info = chain.chain_info();
let mut sync = ChainSync {
state: SyncState::Idle,
state: if config.warp_sync { SyncState::WaitingPeers } else { SyncState::Idle },
starting_block: chain.chain_info().best_block_number,
highest_block: None,
peers: HashMap::new(),
handshaking_peers: HashMap::new(),
active_peers: HashSet::new(),
new_blocks: BlockDownloader::new(false, &chain_info.best_block_hash, chain_info.best_block_number),
old_blocks: None,
last_sent_block_number: 0,
network_id: config.network_id,
fork_block: config.fork_block,
snapshot_sync_enabled: config.warp_sync,
snapshot: Snapshot::new(),
sync_start_time: None,
};
sync.init_downloaders(chain);
sync.update_targets(chain);
sync
}
@ -442,20 +453,67 @@ impl ChainSync {
self.active_peers.remove(&peer_id);
}
fn start_snapshot_sync(&mut self, io: &mut SyncIo, peer_id: PeerId) {
fn maybe_start_snapshot_sync(&mut self, io: &mut SyncIo) {
if self.state != SyncState::WaitingPeers {
return;
}
let best_block = io.chain().chain_info().best_block_number;
let (best_hash, max_peers, snapshot_peers) = {
//collect snapshot infos from peers
let snapshots = self.peers.iter()
.filter(|&(_, p)| p.is_allowed() && p.snapshot_number.map_or(false, |sn| best_block < sn && (sn - best_block) > SNAPSHOT_RESTORE_THRESHOLD))
.filter_map(|(p, peer)| peer.snapshot_hash.map(|hash| (p, hash.clone())));
let mut snapshot_peers = HashMap::new();
let mut max_peers: usize = 0;
let mut best_hash = None;
for (p, hash) in snapshots {
let peers = snapshot_peers.entry(hash).or_insert_with(Vec::new);
peers.push(*p);
if peers.len() > max_peers {
max_peers = peers.len();
best_hash = Some(hash);
}
}
(best_hash, max_peers, snapshot_peers)
};
let timeout = self.sync_start_time.map_or(false, |t| ((time::precise_time_ns() - t) / 1_000_000_000) > WAIT_PEERS_TIMEOUT_SEC);
if let (Some(hash), Some(peers)) = (best_hash, best_hash.map_or(None, |h| snapshot_peers.get(&h))) {
if max_peers >= SNAPSHOT_MIN_PEERS {
trace!(target: "sync", "Starting confirmed snapshot sync {:?} with {:?}", hash, peers);
self.start_snapshot_sync(io, peers);
} else if timeout {
trace!(target: "sync", "Starting unconfirmed snapshot sync {:?} with {:?}", hash, peers);
self.start_snapshot_sync(io, peers);
}
} else if timeout {
trace!(target: "sync", "No snapshots found, starting full sync");
self.state = SyncState::Idle;
self.continue_sync(io);
}
}
fn start_snapshot_sync(&mut self, io: &mut SyncIo, peers: &[PeerId]) {
self.snapshot.clear();
self.request_snapshot_manifest(io, peer_id);
for p in peers {
if self.peers.get(p).map_or(false, |p| p.asking == PeerAsking::Nothing) {
self.request_snapshot_manifest(io, *p);
}
}
self.state = SyncState::SnapshotManifest;
}
/// Restart sync disregarding the block queue status. May end up re-downloading up to QUEUE_SIZE blocks
pub fn restart(&mut self, io: &mut SyncIo) {
self.init_downloaders(io.chain());
self.update_targets(io.chain());
self.reset_and_continue(io);
}
/// Restart sync after bad block has been detected. May end up re-downloading up to QUEUE_SIZE blocks
fn init_downloaders(&mut self, chain: &BlockChainClient) {
/// Update sync after the blockchain has been changed externally.
pub fn update_targets(&mut self, chain: &BlockChainClient) {
// Do not assume that the block queue/chain still has our last_imported_block
let chain = chain.chain_info();
self.new_blocks = BlockDownloader::new(false, &chain.best_block_hash, chain.best_block_number);
@ -475,6 +533,7 @@ impl ChainSync {
/// Called by peer to report status
fn on_peer_status(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
self.handshaking_peers.remove(&peer_id);
let protocol_version: u8 = try!(r.val_at(0));
let warp_protocol = io.protocol_version(&WARP_SYNC_PROTOCOL_ID, peer_id) != 0;
let peer = PeerInfo {
@ -486,7 +545,7 @@ impl ChainSync {
asking: PeerAsking::Nothing,
asking_blocks: Vec::new(),
asking_hash: None,
ask_time: 0f64,
ask_time: 0,
last_sent_transactions: HashSet::new(),
expired: false,
confirmation: if self.fork_block.is_none() { ForkConfirmation::Confirmed } else { ForkConfirmation::Unconfirmed },
@ -496,7 +555,12 @@ impl ChainSync {
block_set: None,
};
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest_hash, peer.genesis);
if self.sync_start_time.is_none() {
self.sync_start_time = Some(time::precise_time_ns());
}
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{}, snapshot:{:?})",
peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest_hash, peer.genesis, peer.snapshot_number);
if io.is_expired() {
trace!(target: "sync", "Status packet from expired session {}:{}", peer_id, io.peer_info(peer_id));
return Ok(());
@ -578,7 +642,7 @@ impl ChainSync {
}
let item_count = r.item_count();
trace!(target: "sync", "{} -> BlockHeaders ({} entries), state = {:?}, set = {:?}", peer_id, item_count, self.state, block_set);
if self.state == SyncState::Idle && self.old_blocks.is_none() {
if (self.state == SyncState::Idle || self.state == SyncState::WaitingPeers) && self.old_blocks.is_none() {
trace!(target: "sync", "Ignored unexpected block headers");
self.continue_sync(io);
return Ok(());
@ -875,7 +939,7 @@ impl ChainSync {
}
self.clear_peer_download(peer_id);
if !self.reset_peer_asking(peer_id, PeerAsking::SnapshotManifest) || self.state != SyncState::SnapshotManifest {
trace!(target: "sync", "{}: Ignored unexpected manifest", peer_id);
trace!(target: "sync", "{}: Ignored unexpected/expired manifest", peer_id);
self.continue_sync(io);
return Ok(());
}
@ -918,7 +982,7 @@ impl ChainSync {
match io.snapshot_service().status() {
RestorationStatus::Inactive | RestorationStatus::Failed => {
trace!(target: "sync", "{}: Snapshot restoration aborted", peer_id);
self.state = SyncState::Idle;
self.state = SyncState::WaitingPeers;
self.snapshot.clear();
self.continue_sync(io);
return Ok(());
@ -960,6 +1024,7 @@ impl ChainSync {
/// Called by peer when it is disconnecting
pub fn on_peer_aborting(&mut self, io: &mut SyncIo, peer: PeerId) {
trace!(target: "sync", "== Disconnecting {}: {}", peer, io.peer_info(peer));
self.handshaking_peers.remove(&peer);
if self.peers.contains_key(&peer) {
debug!(target: "sync", "Disconnected {}", peer);
self.clear_peer_download(peer);
@ -975,12 +1040,14 @@ impl ChainSync {
if let Err(e) = self.send_status(io, peer) {
debug!(target:"sync", "Error sending status request: {:?}", e);
io.disable_peer(peer);
} else {
self.handshaking_peers.insert(peer, time::precise_time_ns());
}
}
/// Resume downloading
fn continue_sync(&mut self, io: &mut SyncIo) {
if self.state != SyncState::Waiting && self.state != SyncState::SnapshotWaiting
if (self.state == SyncState::Blocks || self.state == SyncState::NewBlocks || self.state == SyncState::Idle)
&& !self.peers.values().any(|p| p.asking != PeerAsking::Nothing && p.block_set != Some(BlockSet::OldBlocks) && p.can_sync()) {
self.complete_sync(io);
}
@ -1040,11 +1107,9 @@ impl ChainSync {
let higher_difficulty = peer_difficulty.map_or(true, |pd| pd > syncing_difficulty);
if force || self.state == SyncState::NewBlocks || higher_difficulty || self.old_blocks.is_some() {
match self.state {
SyncState::Idle if self.snapshot_sync_enabled
&& chain_info.best_block_number < peer_snapshot_number
&& (peer_snapshot_number - chain_info.best_block_number) > SNAPSHOT_RESTORE_THRESHOLD => {
trace!(target: "sync", "Starting snapshot sync: {} vs {}", peer_snapshot_number, chain_info.best_block_number);
self.start_snapshot_sync(io, peer_id);
SyncState::WaitingPeers => {
trace!(target: "sync", "Checking snapshot sync: {} vs {}", peer_snapshot_number, chain_info.best_block_number);
self.maybe_start_snapshot_sync(io);
},
SyncState::Idle | SyncState::Blocks | SyncState::NewBlocks => {
if io.chain().queue_info().is_full() {
@ -1070,6 +1135,13 @@ impl ChainSync {
}
},
SyncState::SnapshotData => {
if let RestorationStatus::Ongoing { state_chunks: _, block_chunks: _, state_chunks_done, block_chunks_done, } = io.snapshot_service().status() {
if self.snapshot.done_chunks() - (state_chunks_done + block_chunks_done) as usize > MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD {
trace!(target: "sync", "Snapshot queue full, pausing sync");
self.state = SyncState::SnapshotWaiting;
return;
}
}
if peer_snapshot_hash.is_some() && peer_snapshot_hash == self.snapshot.snapshot_hash() {
self.request_snapshot_data(io, peer_id);
}
@ -1253,7 +1325,7 @@ impl ChainSync {
warn!(target:"sync", "Asking {:?} while requesting {:?}", peer.asking, asking);
}
peer.asking = asking;
peer.ask_time = time::precise_time_s();
peer.ask_time = time::precise_time_ns();
let result = if packet_id >= ETH_PACKET_COUNT {
sync.send_protocol(WARP_SYNC_PROTOCOL_ID, peer_id, packet_id, packet)
} else {
@ -1277,7 +1349,7 @@ impl ChainSync {
/// Called when peer sends us new transactions
fn on_peer_transactions(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
// Accept transactions only when fully synced
if !io.is_chain_queue_empty() || self.state != SyncState::Idle || self.state != SyncState::NewBlocks {
if !io.is_chain_queue_empty() || (self.state != SyncState::Idle && self.state != SyncState::NewBlocks) {
trace!(target: "sync", "{} Ignoring transactions while syncing", peer_id);
return Ok(());
}
@ -1437,7 +1509,7 @@ impl ChainSync {
}
trace!(target: "sync", "{} -> GetNodeData: return {} entries", peer_id, added);
let mut rlp = RlpStream::new_list(added);
for d in data.into_iter() {
for d in data {
rlp.append(&d);
}
Ok(Some((NODE_DATA_PACKET, rlp)))
@ -1590,17 +1662,18 @@ impl ChainSync {
#[cfg_attr(feature="dev", allow(match_same_arms))]
pub fn maintain_peers(&mut self, io: &mut SyncIo) {
let tick = time::precise_time_s();
let tick = time::precise_time_ns();
let mut aborting = Vec::new();
for (peer_id, peer) in &self.peers {
let elapsed = (tick - peer.ask_time) / 1_000_000_000;
let timeout = match peer.asking {
PeerAsking::BlockHeaders => (tick - peer.ask_time) > HEADERS_TIMEOUT_SEC,
PeerAsking::BlockBodies => (tick - peer.ask_time) > BODIES_TIMEOUT_SEC,
PeerAsking::BlockReceipts => (tick - peer.ask_time) > RECEIPTS_TIMEOUT_SEC,
PeerAsking::BlockHeaders => elapsed > HEADERS_TIMEOUT_SEC,
PeerAsking::BlockBodies => elapsed > BODIES_TIMEOUT_SEC,
PeerAsking::BlockReceipts => elapsed > RECEIPTS_TIMEOUT_SEC,
PeerAsking::Nothing => false,
PeerAsking::ForkHeader => (tick - peer.ask_time) > FORK_HEADER_TIMEOUT_SEC,
PeerAsking::SnapshotManifest => (tick - peer.ask_time) > SNAPSHOT_MANIFEST_TIMEOUT_SEC,
PeerAsking::SnapshotData => (tick - peer.ask_time) > SNAPSHOT_DATA_TIMEOUT_SEC,
PeerAsking::ForkHeader => elapsed > FORK_HEADER_TIMEOUT_SEC,
PeerAsking::SnapshotManifest => elapsed > SNAPSHOT_MANIFEST_TIMEOUT_SEC,
PeerAsking::SnapshotData => elapsed > SNAPSHOT_DATA_TIMEOUT_SEC,
};
if timeout {
trace!(target:"sync", "Timeout {}", peer_id);
@ -1611,16 +1684,42 @@ impl ChainSync {
for p in aborting {
self.on_peer_aborting(io, p);
}
// Check for handshake timeouts
for (peer, ask_time) in &self.handshaking_peers {
let elapsed = (tick - ask_time) / 1_000_000_000;
if elapsed > STATUS_TIMEOUT_SEC {
trace!(target:"sync", "Status timeout {}", peer);
io.disconnect_peer(*peer);
}
}
}
fn check_resume(&mut self, io: &mut SyncIo) {
if self.state == SyncState::Waiting && !io.chain().queue_info().is_full() && self.state == SyncState::Waiting {
self.state = SyncState::Blocks;
self.continue_sync(io);
} else if self.state == SyncState::SnapshotWaiting && io.snapshot_service().status() == RestorationStatus::Inactive {
trace!(target:"sync", "Snapshot restoration is complete");
self.restart(io);
self.continue_sync(io);
} else if self.state == SyncState::SnapshotWaiting {
match io.snapshot_service().status() {
RestorationStatus::Inactive => {
trace!(target:"sync", "Snapshot restoration is complete");
self.restart(io);
self.continue_sync(io);
},
RestorationStatus::Ongoing { state_chunks: _, block_chunks: _, state_chunks_done, block_chunks_done, } => {
if !self.snapshot.is_complete() && self.snapshot.done_chunks() - (state_chunks_done + block_chunks_done) as usize <= MAX_SNAPSHOT_CHUNKS_DOWNLOAD_AHEAD {
trace!(target:"sync", "Resuming snapshot sync");
self.state = SyncState::SnapshotData;
self.continue_sync(io);
}
},
RestorationStatus::Failed => {
trace!(target: "sync", "Snapshot restoration aborted");
self.state = SyncState::WaitingPeers;
self.snapshot.clear();
self.continue_sync(io);
},
}
}
}
@ -1828,6 +1927,7 @@ impl ChainSync {
/// Maintain other peers. Send out any new blocks and transactions
pub fn maintain_sync(&mut self, io: &mut SyncIo) {
self.maybe_start_snapshot_sync(io);
self.check_resume(io);
}
@ -2050,7 +2150,7 @@ mod tests {
asking: PeerAsking::Nothing,
asking_blocks: Vec::new(),
asking_hash: None,
ask_time: 0f64,
ask_time: 0,
last_sent_transactions: HashSet::new(),
expired: false,
confirmation: super::ForkConfirmation::Confirmed,

View File

@ -113,7 +113,7 @@ impl Snapshot {
}
pub fn done_chunks(&self) -> usize {
self.total_chunks() - self.completed_chunks.len()
self.completed_chunks.len()
}
pub fn is_complete(&self) -> bool {
@ -165,6 +165,7 @@ mod test {
let mut snapshot = Snapshot::new();
let (manifest, mhash, state_chunks, block_chunks) = test_manifest();
snapshot.reset_to(&manifest, &mhash);
assert_eq!(snapshot.done_chunks(), 0);
assert!(snapshot.validate_chunk(&H256::random().to_vec()).is_err());
let requested: Vec<H256> = (0..40).map(|_| snapshot.needed_chunk().unwrap()).collect();
@ -194,6 +195,8 @@ mod test {
}
assert!(snapshot.is_complete());
assert_eq!(snapshot.done_chunks(), 40);
assert_eq!(snapshot.done_chunks(), snapshot.total_chunks());
assert_eq!(snapshot.snapshot_hash(), Some(manifest.into_rlp().sha3()));
}
}

View File

@ -18,6 +18,7 @@ use util::*;
use ethcore::client::{TestBlockChainClient, BlockChainClient, BlockID, EachBlockWith};
use chain::{SyncState};
use super::helpers::*;
use SyncConfig;
#[test]
fn two_peers() {
@ -156,6 +157,10 @@ fn restart() {
fn status_empty() {
let net = TestNet::new(2);
assert_eq!(net.peer(0).sync.read().status().state, SyncState::Idle);
let mut config = SyncConfig::default();
config.warp_sync = true;
let net = TestNet::new_with_config(2, config);
assert_eq!(net.peer(0).sync.read().status().state, SyncState::WaitingPeers);
}
#[test]

View File

@ -127,20 +127,24 @@ pub struct TestNet {
impl TestNet {
pub fn new(n: usize) -> TestNet {
Self::new_with_fork(n, None)
Self::new_with_config(n, SyncConfig::default())
}
pub fn new_with_fork(n: usize, fork: Option<(BlockNumber, H256)>) -> TestNet {
let mut config = SyncConfig::default();
config.fork_block = fork;
Self::new_with_config(n, config)
}
pub fn new_with_config(n: usize, config: SyncConfig) -> TestNet {
let mut net = TestNet {
peers: Vec::new(),
started: false,
};
for _ in 0..n {
let chain = TestBlockChainClient::new();
let mut config = SyncConfig::default();
config.fork_block = fork;
let ss = Arc::new(TestSnapshotService::new());
let sync = ChainSync::new(config, &chain);
let sync = ChainSync::new(config.clone(), &chain);
net.peers.push(TestPeer {
sync: RwLock::new(sync),
snapshot_service: ss,
@ -164,7 +168,7 @@ impl TestNet {
for client in 0..self.peers.len() {
if peer != client {
let mut p = self.peers.get_mut(peer).unwrap();
p.sync.write().restart(&mut TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(client as PeerId)));
p.sync.write().update_targets(&mut p.chain);
p.sync.write().on_peer_connected(&mut TestIo::new(&mut p.chain, &p.snapshot_service, &mut p.queue, Some(client as PeerId)), client as PeerId);
}
}

View File

@ -19,6 +19,7 @@ use ethcore::snapshot::{SnapshotService, ManifestData, RestorationStatus};
use ethcore::header::BlockNumber;
use ethcore::client::{EachBlockWith};
use super::helpers::*;
use SyncConfig;
pub struct TestSnapshotService {
manifest: Option<ManifestData>,
@ -122,11 +123,16 @@ impl SnapshotService for TestSnapshotService {
#[test]
fn snapshot_sync() {
::env_logger::init().ok();
let mut net = TestNet::new(2);
net.peer_mut(0).snapshot_service = Arc::new(TestSnapshotService::new_with_snapshot(16, H256::new(), 500000));
net.peer_mut(0).chain.add_blocks(1, EachBlockWith::Nothing);
net.sync_steps(19); // status + manifest + chunks
assert_eq!(net.peer(1).snapshot_service.state_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().state_hashes.len());
assert_eq!(net.peer(1).snapshot_service.block_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().block_hashes.len());
let mut config = SyncConfig::default();
config.warp_sync = true;
let mut net = TestNet::new_with_config(5, config);
let snapshot_service = Arc::new(TestSnapshotService::new_with_snapshot(16, H256::new(), 500000));
for i in 0..4 {
net.peer_mut(i).snapshot_service = snapshot_service.clone();
net.peer_mut(i).chain.add_blocks(1, EachBlockWith::Nothing);
}
net.sync_steps(50);
assert_eq!(net.peer(4).snapshot_service.state_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().state_hashes.len());
assert_eq!(net.peer(4).snapshot_service.block_restoration_chunks.lock().len(), net.peer(0).snapshot_service.manifest.as_ref().unwrap().block_hashes.len());
}

View File

@ -2,7 +2,7 @@
# Running Parity Full Test Sute
FEATURES="json-tests"
OPTIONS="--release"
OPTIONS="--verbose --release"
case $1 in
--no-json)

View File

@ -7,7 +7,7 @@ version = "1.4.0"
authors = ["Ethcore <admin@ethcore.io>"]
[dependencies]
mio = { git = "https://github.com/ethcore/mio", branch = "v0.5.x" }
mio = { git = "https://github.com/carllerche/mio" }
crossbeam = "0.2"
parking_lot = "0.3"
log = "0.3"

View File

@ -65,7 +65,8 @@ mod service;
mod worker;
mod panics;
use mio::{EventLoop, Token};
use mio::{Token};
use mio::deprecated::{EventLoop, NotifyError};
use std::fmt;
pub use worker::LOCAL_STACK_SIZE;
@ -96,8 +97,8 @@ impl From<::std::io::Error> for IoError {
}
}
impl<Message> From<::mio::NotifyError<service::IoMessage<Message>>> for IoError where Message: Send + Clone {
fn from(_err: ::mio::NotifyError<service::IoMessage<Message>>) -> IoError {
impl<Message> From<NotifyError<service::IoMessage<Message>>> for IoError where Message: Send + Clone {
fn from(_err: NotifyError<service::IoMessage<Message>>) -> IoError {
IoError::Mio(::std::io::Error::new(::std::io::ErrorKind::ConnectionAborted, "Network IO notification error"))
}
}

View File

@ -18,13 +18,16 @@ use std::sync::{Arc, Weak};
use std::thread::{self, JoinHandle};
use std::collections::HashMap;
use mio::*;
use mio::timer::{Timeout};
use mio::deprecated::{EventLoop, Handler, Sender, EventLoopBuilder};
use crossbeam::sync::chase_lev;
use slab::Slab;
use {IoError, IoHandler};
use worker::{Worker, Work, WorkType};
use panics::*;
use parking_lot::{RwLock};
use parking_lot::{RwLock, Mutex};
use std::sync::{Condvar as SCondvar, Mutex as SMutex};
use std::time::Duration;
/// Timer ID
pub type TimerToken = usize;
@ -223,9 +226,9 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
type Timeout = Token;
type Message = IoMessage<Message>;
fn ready(&mut self, _event_loop: &mut EventLoop<Self>, token: Token, events: EventSet) {
let handler_index = token.as_usize() / TOKENS_PER_HANDLER;
let token_id = token.as_usize() % TOKENS_PER_HANDLER;
fn ready(&mut self, _event_loop: &mut EventLoop<Self>, token: Token, events: Ready) {
let handler_index = token.0 / TOKENS_PER_HANDLER;
let token_id = token.0 % TOKENS_PER_HANDLER;
if let Some(handler) = self.handlers.read().get(handler_index) {
if events.is_hup() {
self.worker_channel.push(Work { work_type: WorkType::Hup, token: token_id, handler: handler.clone(), handler_id: handler_index });
@ -243,15 +246,15 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
}
fn timeout(&mut self, event_loop: &mut EventLoop<Self>, token: Token) {
let handler_index = token.as_usize() / TOKENS_PER_HANDLER;
let token_id = token.as_usize() % TOKENS_PER_HANDLER;
let handler_index = token.0 / TOKENS_PER_HANDLER;
let token_id = token.0 % TOKENS_PER_HANDLER;
if let Some(handler) = self.handlers.read().get(handler_index) {
if let Some(timer) = self.timers.read().get(&token.as_usize()) {
if let Some(timer) = self.timers.read().get(&token.0) {
if timer.once {
self.timers.write().remove(&token_id);
event_loop.clear_timeout(timer.timeout);
event_loop.clear_timeout(&timer.timeout);
} else {
event_loop.timeout_ms(token, timer.delay).expect("Error re-registering user timer");
event_loop.timeout(token, Duration::from_millis(timer.delay)).expect("Error re-registering user timer");
}
self.worker_channel.push(Work { work_type: WorkType::Timeout, token: token_id, handler: handler.clone(), handler_id: handler_index });
self.work_ready.notify_all();
@ -277,18 +280,18 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
let to_remove: Vec<_> = timers.keys().cloned().filter(|timer_id| timer_id / TOKENS_PER_HANDLER == handler_id).collect();
for timer_id in to_remove {
let timer = timers.remove(&timer_id).expect("to_remove only contains keys from timers; qed");
event_loop.clear_timeout(timer.timeout);
event_loop.clear_timeout(&timer.timeout);
}
},
IoMessage::AddTimer { handler_id, token, delay, once } => {
let timer_id = token + handler_id * TOKENS_PER_HANDLER;
let timeout = event_loop.timeout_ms(Token(timer_id), delay).expect("Error registering user timer");
let timeout = event_loop.timeout(Token(timer_id), Duration::from_millis(delay)).expect("Error registering user timer");
self.timers.write().insert(timer_id, UserTimer { delay: delay, timeout: timeout, once: once });
},
IoMessage::RemoveTimer { handler_id, token } => {
let timer_id = token + handler_id * TOKENS_PER_HANDLER;
if let Some(timer) = self.timers.write().remove(&timer_id) {
event_loop.clear_timeout(timer.timeout);
event_loop.clear_timeout(&timer.timeout);
}
},
IoMessage::RegisterStream { handler_id, token } => {
@ -302,7 +305,7 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
// unregister a timer associated with the token (if any)
let timer_id = token + handler_id * TOKENS_PER_HANDLER;
if let Some(timer) = self.timers.write().remove(&timer_id) {
event_loop.clear_timeout(timer.timeout);
event_loop.clear_timeout(&timer.timeout);
}
}
},
@ -391,7 +394,7 @@ impl<Message> IoChannel<Message> where Message: Send + Clone + Sync + 'static {
pub struct IoService<Message> where Message: Send + Sync + Clone + 'static {
panic_handler: Arc<PanicHandler>,
thread: Option<JoinHandle<()>>,
host_channel: Sender<IoMessage<Message>>,
host_channel: Mutex<Sender<IoMessage<Message>>>,
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>, HandlerId>>>,
}
@ -405,9 +408,9 @@ impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
/// Starts IO event loop
pub fn start() -> Result<IoService<Message>, IoError> {
let panic_handler = PanicHandler::new_in_arc();
let mut config = EventLoopConfig::new();
let mut config = EventLoopBuilder::new();
config.messages_per_tick(1024);
let mut event_loop = EventLoop::configured(config).expect("Error creating event loop");
let mut event_loop = config.build().expect("Error creating event loop");
let channel = event_loop.channel();
let panic = panic_handler.clone();
let handlers = Arc::new(RwLock::new(Slab::new(MAX_HANDLERS)));
@ -421,14 +424,14 @@ impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
Ok(IoService {
panic_handler: panic_handler,
thread: Some(thread),
host_channel: channel,
host_channel: Mutex::new(channel),
handlers: handlers,
})
}
/// Regiter an IO handler with the event loop.
pub fn register_handler(&self, handler: Arc<IoHandler<Message>+Send>) -> Result<(), IoError> {
try!(self.host_channel.send(IoMessage::AddHandler {
try!(self.host_channel.lock().send(IoMessage::AddHandler {
handler: handler,
}));
Ok(())
@ -436,20 +439,20 @@ impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
/// Send a message over the network. Normaly `HostIo::send` should be used. This can be used from non-io threads.
pub fn send_message(&self, message: Message) -> Result<(), IoError> {
try!(self.host_channel.send(IoMessage::UserMessage(message)));
try!(self.host_channel.lock().send(IoMessage::UserMessage(message)));
Ok(())
}
/// Create a new message channel
pub fn channel(&self) -> IoChannel<Message> {
IoChannel::new(self.host_channel.clone(), Arc::downgrade(&self.handlers))
IoChannel::new(self.host_channel.lock().clone(), Arc::downgrade(&self.handlers))
}
}
impl<Message> Drop for IoService<Message> where Message: Send + Sync + Clone {
fn drop(&mut self) {
trace!(target: "shutdown", "[IoService] Closing...");
self.host_channel.send(IoMessage::Shutdown).unwrap_or_else(|e| warn!("Error on IO service shutdown: {:?}", e));
self.host_channel.lock().send(IoMessage::Shutdown).unwrap_or_else(|e| warn!("Error on IO service shutdown: {:?}", e));
self.thread.take().unwrap().join().ok();
trace!(target: "shutdown", "[IoService] Closed.");
}

View File

@ -8,7 +8,8 @@ authors = ["Ethcore <admin@ethcore.io>"]
[dependencies]
log = "0.3"
mio = { git = "https://github.com/ethcore/mio", branch = "v0.5.x" }
mio = { git = "https://github.com/carllerche/mio" }
bytes = "0.3.0"
rand = "0.3.12"
time = "0.1.34"
tiny-keccak = "1.0"

View File

@ -18,7 +18,8 @@ use std::sync::Arc;
use std::collections::VecDeque;
use std::net::SocketAddr;
use std::sync::atomic::{AtomicBool, Ordering as AtomicOrdering};
use mio::{Handler, Token, EventSet, EventLoop, PollOpt, TryRead, TryWrite};
use mio::{Token, Ready, PollOpt};
use mio::deprecated::{Handler, EventLoop, TryRead, TryWrite};
use mio::tcp::*;
use util::hash::*;
use util::sha3::*;
@ -34,6 +35,7 @@ use rcrypto::aessafe::*;
use rcrypto::symmetriccipher::*;
use rcrypto::buffer::*;
use tiny_keccak::Keccak;
use bytes::{Buf, MutBuf};
use crypto;
const ENCRYPTED_HEADER_LEN: usize = 32;
@ -57,7 +59,7 @@ pub struct GenericConnection<Socket: GenericSocket> {
/// Send out packets FIFO
send_queue: VecDeque<Cursor<Bytes>>,
/// Event flags this connection expects
interest: EventSet,
interest: Ready,
/// Shared network statistics
stats: Arc<NetworkStats>,
/// Registered flag
@ -81,8 +83,9 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
let sock_ref = <Socket as Read>::by_ref(&mut self.socket);
loop {
let max = self.rec_size - self.rec_buf.len();
match sock_ref.take(max as u64).try_read_buf(&mut self.rec_buf) {
match sock_ref.take(max as u64).try_read(unsafe { self.rec_buf.mut_bytes() }) {
Ok(Some(size)) if size != 0 => {
unsafe { self.rec_buf.advance(size); }
self.stats.inc_recv(size);
trace!(target:"network", "{}: Read {} of {} bytes", self.token, self.rec_buf.len(), self.rec_size);
if self.rec_size != 0 && self.rec_buf.len() == self.rec_size {
@ -109,7 +112,7 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
trace!(target:"network", "{}: Sending {} bytes", self.token, data.len());
self.send_queue.push_back(Cursor::new(data));
if !self.interest.is_writable() {
self.interest.insert(EventSet::writable());
self.interest.insert(Ready::writable());
}
io.update_registration(self.token).ok();
}
@ -128,16 +131,19 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
{
let buf = self.send_queue.front_mut().unwrap();
let send_size = buf.get_ref().len();
if (buf.position() as usize) >= send_size {
let pos = buf.position() as usize;
if (pos as usize) >= send_size {
warn!(target:"net", "Unexpected connection data");
return Ok(WriteStatus::Complete)
}
match self.socket.try_write_buf(buf) {
Ok(Some(size)) if (buf.position() as usize) < send_size => {
let buf = buf as &mut Buf;
match self.socket.try_write(buf.bytes()) {
Ok(Some(size)) if (pos + size) < send_size => {
buf.advance(size);
self.stats.inc_send(size);
Ok(WriteStatus::Ongoing)
},
Ok(Some(size)) if (buf.position() as usize) == send_size => {
Ok(Some(size)) if (pos + size) == send_size => {
self.stats.inc_send(size);
trace!(target:"network", "{}: Wrote {} bytes", self.token, send_size);
Ok(WriteStatus::Complete)
@ -151,7 +157,7 @@ impl<Socket: GenericSocket> GenericConnection<Socket> {
self.send_queue.pop_front();
}
if self.send_queue.is_empty() {
self.interest.remove(EventSet::writable());
self.interest.remove(Ready::writable());
try!(io.update_registration(self.token));
}
Ok(r)
@ -171,7 +177,7 @@ impl Connection {
send_queue: VecDeque::new(),
rec_buf: Bytes::new(),
rec_size: 0,
interest: EventSet::hup() | EventSet::readable(),
interest: Ready::hup() | Ready::readable(),
stats: stats,
registered: AtomicBool::new(false),
}
@ -205,7 +211,7 @@ impl Connection {
rec_buf: Vec::new(),
rec_size: 0,
send_queue: self.send_queue.clone(),
interest: EventSet::hup(),
interest: Ready::hup(),
stats: self.stats.clone(),
registered: AtomicBool::new(false),
})
@ -499,7 +505,7 @@ mod tests {
use std::sync::atomic::AtomicBool;
use super::super::stats::*;
use std::io::{Read, Write, Error, Cursor, ErrorKind};
use mio::{EventSet};
use mio::{Ready};
use std::collections::VecDeque;
use util::bytes::*;
use devtools::*;
@ -545,7 +551,7 @@ mod tests {
send_queue: VecDeque::new(),
rec_buf: Bytes::new(),
rec_size: 0,
interest: EventSet::hup() | EventSet::readable(),
interest: Ready::hup() | Ready::readable(),
stats: Arc::<NetworkStats>::new(NetworkStats::new()),
registered: AtomicBool::new(false),
}
@ -568,7 +574,7 @@ mod tests {
send_queue: VecDeque::new(),
rec_buf: Bytes::new(),
rec_size: 0,
interest: EventSet::hup() | EventSet::readable(),
interest: Ready::hup() | Ready::readable(),
stats: Arc::<NetworkStats>::new(NetworkStats::new()),
registered: AtomicBool::new(false),
}

View File

@ -20,6 +20,7 @@ use std::collections::{HashSet, HashMap, BTreeMap, VecDeque};
use std::mem;
use std::default::Default;
use mio::*;
use mio::deprecated::{Handler, EventLoop};
use mio::udp::*;
use util::sha3::*;
use time;
@ -57,6 +58,7 @@ pub struct NodeEntry {
pub struct BucketEntry {
pub address: NodeEntry,
pub id_hash: H256,
pub timeout: Option<u64>,
}
@ -85,6 +87,7 @@ struct Datagramm {
pub struct Discovery {
id: NodeId,
id_hash: H256,
secret: Secret,
public_endpoint: NodeEndpoint,
udp_socket: UdpSocket,
@ -106,9 +109,10 @@ pub struct TableUpdates {
impl Discovery {
pub fn new(key: &KeyPair, listen: SocketAddr, public: NodeEndpoint, token: StreamToken, allow_ips: AllowIP) -> Discovery {
let socket = UdpSocket::bound(&listen).expect("Error binding UDP socket");
let socket = UdpSocket::bind(&listen).expect("Error binding UDP socket");
Discovery {
id: key.public().clone(),
id_hash: key.public().sha3(),
secret: key.secret().clone(),
public_endpoint: public,
token: token,
@ -150,8 +154,9 @@ impl Discovery {
fn update_node(&mut self, e: NodeEntry) {
trace!(target: "discovery", "Inserting {:?}", &e);
let id_hash = e.id.sha3();
let ping = {
let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id, &e.id) as usize).unwrap();
let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id_hash, &id_hash) as usize).unwrap();
let updated = if let Some(node) = bucket.nodes.iter_mut().find(|n| n.address.id == e.id) {
node.address = e.clone();
node.timeout = None;
@ -159,7 +164,7 @@ impl Discovery {
} else { false };
if !updated {
bucket.nodes.push_front(BucketEntry { address: e, timeout: None });
bucket.nodes.push_front(BucketEntry { address: e, timeout: None, id_hash: id_hash, });
}
if bucket.nodes.len() > BUCKET_SIZE {
@ -174,7 +179,7 @@ impl Discovery {
}
fn clear_ping(&mut self, id: &NodeId) {
let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id, id) as usize).unwrap();
let mut bucket = self.node_buckets.get_mut(Discovery::distance(&self.id_hash, &id.sha3()) as usize).unwrap();
if let Some(node) = bucket.nodes.iter_mut().find(|n| &n.address.id == id) {
node.timeout = None;
}
@ -224,8 +229,8 @@ impl Discovery {
self.discovery_round += 1;
}
fn distance(a: &NodeId, b: &NodeId) -> u32 {
let d = a.sha3() ^ b.sha3();
fn distance(a: &H256, b: &H256) -> u32 {
let d = *a ^ *b;
let mut ret:u32 = 0;
for i in 0..32 {
let mut v: u8 = d[i];
@ -279,11 +284,12 @@ impl Discovery {
fn nearest_node_entries(target: &NodeId, buckets: &[NodeBucket]) -> Vec<NodeEntry> {
let mut found: BTreeMap<u32, Vec<&NodeEntry>> = BTreeMap::new();
let mut count = 0;
let target_hash = target.sha3();
// Sort nodes by distance to target
for bucket in buckets {
for node in &bucket.nodes {
let distance = Discovery::distance(target, &node.address.id);
let distance = Discovery::distance(&target_hash, &node.id_hash);
found.entry(distance).or_insert_with(Vec::new).push(&node.address);
if count == BUCKET_SIZE {
// delete the most distant element
@ -527,15 +533,15 @@ impl Discovery {
}
pub fn register_socket<Host:Handler>(&self, event_loop: &mut EventLoop<Host>) -> Result<(), NetworkError> {
event_loop.register(&self.udp_socket, Token(self.token), EventSet::all(), PollOpt::edge()).expect("Error registering UDP socket");
event_loop.register(&self.udp_socket, Token(self.token), Ready::all(), PollOpt::edge()).expect("Error registering UDP socket");
Ok(())
}
pub fn update_registration<Host:Handler>(&self, event_loop: &mut EventLoop<Host>) -> Result<(), NetworkError> {
let registration = if !self.send_queue.is_empty() {
EventSet::readable() | EventSet::writable()
Ready::readable() | Ready::writable()
} else {
EventSet::readable()
Ready::readable()
};
event_loop.reregister(&self.udp_socket, Token(self.token), registration, PollOpt::edge()).expect("Error reregistering UDP socket");
Ok(())
@ -546,6 +552,7 @@ impl Discovery {
mod tests {
use super::*;
use util::hash::*;
use util::sha3::*;
use std::net::*;
use node_table::*;
use std::str::FromStr;
@ -626,7 +633,8 @@ mod tests {
for _ in 0..(16 + 10) {
buckets[0].nodes.push_back(BucketEntry {
address: NodeEntry { id: NodeId::new(), endpoint: ep.clone() },
timeout: None
timeout: None,
id_hash: NodeId::new().sha3(),
});
}
let nearest = Discovery::nearest_node_entries(&NodeId::new(), &buckets);

View File

@ -26,6 +26,7 @@ use std::io::{Read, Write};
use std::fs;
use ethkey::{KeyPair, Secret, Random, Generator};
use mio::*;
use mio::deprecated::{EventLoop};
use mio::tcp::*;
use util::hash::*;
use util::Hashable;
@ -61,7 +62,7 @@ const SYS_TIMER: usize = LAST_SESSION + 1;
// Timeouts
const MAINTENANCE_TIMEOUT: u64 = 1000;
const DISCOVERY_REFRESH_TIMEOUT: u64 = 7200;
const DISCOVERY_REFRESH_TIMEOUT: u64 = 60_000;
const DISCOVERY_ROUND_TIMEOUT: u64 = 300;
const NODE_TABLE_TIMEOUT: u64 = 300_000;
@ -744,10 +745,9 @@ impl Host {
trace!(target: "network", "Accepting incoming connection");
loop {
let socket = match self.tcp_listener.lock().accept() {
Ok(None) => break,
Ok(Some((sock, _addr))) => sock,
Ok((sock, _addr)) => sock,
Err(e) => {
warn!("Error accepting connection: {:?}", e);
debug!(target: "network", "Error accepting connection: {:?}", e);
break
},
};
@ -801,29 +801,31 @@ impl Host {
},
Ok(SessionData::Ready) => {
self.num_sessions.fetch_add(1, AtomicOrdering::SeqCst);
if !s.info.originated {
let session_count = self.session_count();
let (max_peers, reserved_only) = {
let info = self.info.read();
let mut max_peers = info.config.max_peers;
for cap in s.info.capabilities.iter() {
if let Some(num) = info.config.reserved_protocols.get(&cap.protocol) {
max_peers += *num;
break;
}
}
(max_peers, info.config.non_reserved_mode == NonReservedPeerMode::Deny)
};
if session_count >= max_peers as usize || reserved_only {
// only proceed if the connecting peer is reserved.
if !self.reserved_nodes.read().contains(s.id().unwrap()) {
s.disconnect(io, DisconnectReason::TooManyPeers);
return;
let session_count = self.session_count();
let (min_peers, max_peers, reserved_only) = {
let info = self.info.read();
let mut max_peers = info.config.max_peers;
for cap in s.info.capabilities.iter() {
if let Some(num) = info.config.reserved_protocols.get(&cap.protocol) {
max_peers += *num;
break;
}
}
(info.config.min_peers as usize, max_peers as usize, info.config.non_reserved_mode == NonReservedPeerMode::Deny)
};
// Add it no node table
if reserved_only ||
(s.info.originated && session_count >= min_peers) ||
(!s.info.originated && session_count >= max_peers) {
// only proceed if the connecting peer is reserved.
if !self.reserved_nodes.read().contains(s.id().unwrap()) {
s.disconnect(io, DisconnectReason::TooManyPeers);
return;
}
}
// Add it to the node table
if !s.info.originated {
if let Ok(address) = s.remote_addr() {
let entry = NodeEntry { id: s.id().unwrap().clone(), endpoint: NodeEndpoint { address: address, udp_port: address.port() } };
self.nodes.write().add_node(Node::new(entry.id.clone(), entry.endpoint.clone()));
@ -1101,7 +1103,7 @@ impl IoHandler<NetworkIoMessage> for Host {
}
}
DISCOVERY => self.discovery.lock().as_ref().unwrap().register_socket(event_loop).expect("Error registering discovery socket"),
TCP_ACCEPT => event_loop.register(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), EventSet::all(), PollOpt::edge()).expect("Error registering stream"),
TCP_ACCEPT => event_loop.register(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), Ready::all(), PollOpt::edge()).expect("Error registering stream"),
_ => warn!("Unexpected stream registration")
}
}
@ -1129,7 +1131,7 @@ impl IoHandler<NetworkIoMessage> for Host {
}
}
DISCOVERY => self.discovery.lock().as_ref().unwrap().update_registration(event_loop).expect("Error reregistering discovery socket"),
TCP_ACCEPT => event_loop.reregister(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), EventSet::all(), PollOpt::edge()).expect("Error reregistering stream"),
TCP_ACCEPT => event_loop.reregister(&*self.tcp_listener.lock(), Token(TCP_ACCEPT), Ready::all(), PollOpt::edge()).expect("Error reregistering stream"),
_ => warn!("Unexpected stream update")
}
}

View File

@ -70,6 +70,7 @@ extern crate slab;
extern crate ethkey;
extern crate ethcrypto as crypto;
extern crate rlp;
extern crate bytes;
#[macro_use]
extern crate log;

View File

@ -19,6 +19,7 @@ use std::net::SocketAddr;
use std::cmp::Ordering;
use std::sync::*;
use mio::*;
use mio::deprecated::{Handler, EventLoop};
use mio::tcp::*;
use util::hash::*;
use rlp::*;

View File

@ -133,7 +133,7 @@ impl<'db> TrieDB<'db> {
}
/// Get the data of the root node.
fn root_data<'a, R: 'a + Recorder>(&self, r: &'a mut R) -> super::Result<DBValue> {
fn root_data<R: Recorder>(&self, r: &mut R) -> super::Result<DBValue> {
self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
.map(|node| { r.record(self.root, &*node, 0); node })
}