Merge branch 'master' into fix-ci

This commit is contained in:
Gav Wood 2016-12-11 00:02:48 +01:00
commit 12df3adfeb
No known key found for this signature in database
GPG Key ID: C49C1ACA1CC9B252
167 changed files with 3284 additions and 1089 deletions

View File

@ -423,12 +423,8 @@ test-rust-stable:
before_script: before_script:
- git submodule update --init --recursive - git submodule update --init --recursive
- export RUST_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep -v ^js/ | wc -l) - export RUST_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep -v ^js/ | wc -l)
- export JS_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep ^js/ | wc -l)
- echo "rust/js modified: $RUST_FILES_MODIFIED / $JS_FILES_MODIFIED"
- if [ $JS_FILES_MODIFIED = 0 ]; then echo "Skipping JS deps install since no JS files modified."; else ./js/scripts/install-deps.sh;fi
script: script:
- export RUST_BACKTRACE=1 - export RUST_BACKTRACE=1
- if [ $JS_FILES_MODIFIED = 0 ]; then echo "Skipping JS lint since no JS files modified."; else ./js/scripts/lint.sh && ./js/scripts/test.sh && ./js/scripts/build.sh; fi
- if [ $RUST_FILES_MODIFIED = 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi - if [ $RUST_FILES_MODIFIED = 0 ]; then echo "Skipping Rust tests since no Rust files modified."; else ./test.sh $CARGOFLAGS; fi
tags: tags:
- rust - rust
@ -439,11 +435,8 @@ js-test:
before_script: before_script:
- git submodule update --init --recursive - git submodule update --init --recursive
- export JS_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep ^js/ | wc -l) - export JS_FILES_MODIFIED=$(git --no-pager diff --name-only $CI_BUILD_REF^ $CI_BUILD_REF | grep ^js/ | wc -l)
- echo $JS_FILES_MODIFIED
- if [ $JS_FILES_MODIFIED = 0 ]; then echo "Skipping JS deps install since no JS files modified."; else ./js/scripts/install-deps.sh;fi - if [ $JS_FILES_MODIFIED = 0 ]; then echo "Skipping JS deps install since no JS files modified."; else ./js/scripts/install-deps.sh;fi
script: script:
- export RUST_BACKTRACE=1
- echo $JS_FILES_MODIFIED
- if [ $JS_FILES_MODIFIED = 0 ]; then echo "Skipping JS lint since no JS files modified."; else ./js/scripts/lint.sh && ./js/scripts/test.sh && ./js/scripts/build.sh; fi - if [ $JS_FILES_MODIFIED = 0 ]; then echo "Skipping JS lint since no JS files modified."; else ./js/scripts/lint.sh && ./js/scripts/test.sh && ./js/scripts/build.sh; fi
tags: tags:
- rust - rust

19
Cargo.lock generated
View File

@ -18,6 +18,7 @@ dependencies = [
"ethcore-ipc-hypervisor 1.2.0", "ethcore-ipc-hypervisor 1.2.0",
"ethcore-ipc-nano 1.5.0", "ethcore-ipc-nano 1.5.0",
"ethcore-ipc-tests 0.1.0", "ethcore-ipc-tests 0.1.0",
"ethcore-light 1.5.0",
"ethcore-logger 1.5.0", "ethcore-logger 1.5.0",
"ethcore-rpc 1.5.0", "ethcore-rpc 1.5.0",
"ethcore-signer 1.5.0", "ethcore-signer 1.5.0",
@ -456,6 +457,21 @@ dependencies = [
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "ethcore-light"
version = "1.5.0"
dependencies = [
"ethcore 1.5.0",
"ethcore-io 1.5.0",
"ethcore-ipc 1.5.0",
"ethcore-ipc-codegen 1.5.0",
"ethcore-network 1.5.0",
"ethcore-util 1.5.0",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.1.0",
"time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "ethcore-logger" name = "ethcore-logger"
version = "1.5.0" version = "1.5.0"
@ -665,6 +681,7 @@ dependencies = [
"ethcore-ipc 1.5.0", "ethcore-ipc 1.5.0",
"ethcore-ipc-codegen 1.5.0", "ethcore-ipc-codegen 1.5.0",
"ethcore-ipc-nano 1.5.0", "ethcore-ipc-nano 1.5.0",
"ethcore-light 1.5.0",
"ethcore-network 1.5.0", "ethcore-network 1.5.0",
"ethcore-util 1.5.0", "ethcore-util 1.5.0",
"ethkey 0.2.0", "ethkey 0.2.0",
@ -1273,7 +1290,7 @@ dependencies = [
[[package]] [[package]]
name = "parity-ui-precompiled" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#3d3b2f9e8e8b0fd62c172240bfd001a317cf2979" source = "git+https://github.com/ethcore/js-precompiled.git#74aca23de55f84b2fcf6fe80d30277fa6449f645"
dependencies = [ dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]

View File

@ -47,6 +47,7 @@ rlp = { path = "util/rlp" }
ethcore-stratum = { path = "stratum" } ethcore-stratum = { path = "stratum" }
ethcore-dapps = { path = "dapps", optional = true } ethcore-dapps = { path = "dapps", optional = true }
clippy = { version = "0.0.103", optional = true} clippy = { version = "0.0.103", optional = true}
ethcore-light = { path = "ethcore/light" }
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
winapi = "0.2" winapi = "0.2"

View File

@ -8,7 +8,7 @@ authors = ["Ethcore <admin@ethcore.io>"]
build = "build.rs" build = "build.rs"
[build-dependencies] [build-dependencies]
"ethcore-ipc-codegen" = { path = "../../ipc/codegen" } "ethcore-ipc-codegen" = { path = "../../ipc/codegen", optional = true }
[dependencies] [dependencies]
log = "0.3" log = "0.3"
@ -16,6 +16,10 @@ ethcore = { path = ".." }
ethcore-util = { path = "../../util" } ethcore-util = { path = "../../util" }
ethcore-network = { path = "../../util/network" } ethcore-network = { path = "../../util/network" }
ethcore-io = { path = "../../util/io" } ethcore-io = { path = "../../util/io" }
ethcore-ipc = { path = "../../ipc/rpc" } ethcore-ipc = { path = "../../ipc/rpc", optional = true }
rlp = { path = "../../util/rlp" } rlp = { path = "../../util/rlp" }
time = "0.1" time = "0.1"
[features]
default = []
ipc = ["ethcore-ipc", "ethcore-ipc-codegen"]

View File

@ -14,8 +14,14 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
#[cfg(feature = "ipc")]
extern crate ethcore_ipc_codegen; extern crate ethcore_ipc_codegen;
#[cfg(feature = "ipc")]
fn main() { fn main() {
ethcore_ipc_codegen::derive_binary("src/types/mod.rs.in").unwrap(); ethcore_ipc_codegen::derive_binary("src/types/mod.rs.in").unwrap();
ethcore_ipc_codegen::derive_ipc_cond("src/provider.rs", true).unwrap();
} }
#[cfg(not(feature = "ipc"))]
fn main() { }

View File

@ -20,7 +20,7 @@
use std::sync::Arc; use std::sync::Arc;
use ethcore::engines::Engine; use ethcore::engines::Engine;
use ethcore::ids::BlockID; use ethcore::ids::BlockId;
use ethcore::service::ClientIoMessage; use ethcore::service::ClientIoMessage;
use ethcore::block_import_error::BlockImportError; use ethcore::block_import_error::BlockImportError;
use ethcore::block_status::BlockStatus; use ethcore::block_status::BlockStatus;
@ -51,7 +51,7 @@ impl Client {
} }
/// Whether the block is already known (but not necessarily part of the canonical chain) /// Whether the block is already known (but not necessarily part of the canonical chain)
pub fn is_known(&self, _id: BlockID) -> bool { pub fn is_known(&self, _id: BlockId) -> bool {
false false
} }
@ -61,7 +61,7 @@ impl Client {
} }
/// Inquire about the status of a given block. /// Inquire about the status of a given block.
pub fn status(&self, _id: BlockID) -> BlockStatus { pub fn status(&self, _id: BlockId) -> BlockStatus {
BlockStatus::Unknown BlockStatus::Unknown
} }

View File

@ -33,8 +33,21 @@
pub mod client; pub mod client;
pub mod net; pub mod net;
#[cfg(not(feature = "ipc"))]
pub mod provider; pub mod provider;
#[cfg(feature = "ipc")]
pub mod provider {
#![allow(dead_code, unused_assignments, unused_variables, missing_docs)] // codegen issues
include!(concat!(env!("OUT_DIR"), "/provider.rs"));
}
#[cfg(feature = "ipc")]
pub mod remote {
pub use provider::LightProviderClient;
}
mod types; mod types;
pub use self::provider::Provider; pub use self::provider::Provider;
@ -47,6 +60,8 @@ extern crate ethcore;
extern crate ethcore_util as util; extern crate ethcore_util as util;
extern crate ethcore_network as network; extern crate ethcore_network as network;
extern crate ethcore_io as io; extern crate ethcore_io as io;
extern crate ethcore_ipc as ipc;
extern crate rlp; extern crate rlp;
extern crate time; extern crate time;
#[cfg(feature = "ipc")]
extern crate ethcore_ipc as ipc;

View File

@ -22,6 +22,9 @@
//! //!
//! This module provides an interface for configuration of buffer //! This module provides an interface for configuration of buffer
//! flow costs and recharge rates. //! flow costs and recharge rates.
//!
//! Current default costs are picked completely arbitrarily, not based
//! on any empirical timings or mathematical models.
use request; use request;
use super::packet; use super::packet;
@ -273,6 +276,16 @@ impl FlowParams {
} }
} }
impl Default for FlowParams {
fn default() -> Self {
FlowParams {
limit: 50_000_000.into(),
costs: CostTable::default(),
recharge: 100_000.into(),
}
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -0,0 +1,120 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! I/O and event context generalizations.
use network::{NetworkContext, PeerId};
use super::{Announcement, LightProtocol, ReqId};
use super::error::Error;
use request::Request;
/// An I/O context which allows sending and receiving packets as well as
/// disconnecting peers. This is used as a generalization of the portions
/// of a p2p network which the light protocol structure makes use of.
pub trait IoContext {
/// Send a packet to a specific peer.
fn send(&self, peer: PeerId, packet_id: u8, packet_body: Vec<u8>);
/// Respond to a peer's message. Only works if this context is a byproduct
/// of a packet handler.
fn respond(&self, packet_id: u8, packet_body: Vec<u8>);
/// Disconnect a peer.
fn disconnect_peer(&self, peer: PeerId);
/// Disable a peer -- this is a disconnect + a time-out.
fn disable_peer(&self, peer: PeerId);
/// Get a peer's protocol version.
fn protocol_version(&self, peer: PeerId) -> Option<u8>;
}
impl<'a> IoContext for NetworkContext<'a> {
fn send(&self, peer: PeerId, packet_id: u8, packet_body: Vec<u8>) {
if let Err(e) = self.send(peer, packet_id, packet_body) {
debug!(target: "les", "Error sending packet to peer {}: {}", peer, e);
}
}
fn respond(&self, packet_id: u8, packet_body: Vec<u8>) {
if let Err(e) = self.respond(packet_id, packet_body) {
debug!(target: "les", "Error responding to peer message: {}", e);
}
}
fn disconnect_peer(&self, peer: PeerId) {
NetworkContext::disconnect_peer(self, peer);
}
fn disable_peer(&self, peer: PeerId) {
NetworkContext::disable_peer(self, peer);
}
fn protocol_version(&self, peer: PeerId) -> Option<u8> {
self.protocol_version(self.subprotocol_name(), peer)
}
}
/// Context for a protocol event.
pub trait EventContext {
/// Get the peer relevant to the event e.g. message sender,
/// disconnected/connected peer.
fn peer(&self) -> PeerId;
/// Make a request from a peer.
fn request_from(&self, peer: PeerId, request: Request) -> Result<ReqId, Error>;
/// Make an announcement of new capabilities to the rest of the peers.
// TODO: maybe just put this on a timer in LightProtocol?
fn make_announcement(&self, announcement: Announcement);
/// Disconnect a peer.
fn disconnect_peer(&self, peer: PeerId);
/// Disable a peer.
fn disable_peer(&self, peer: PeerId);
}
/// Concrete implementation of `EventContext` over the light protocol struct and
/// an io context.
pub struct Ctx<'a> {
/// Io context to enable immediate response to events.
pub io: &'a IoContext,
/// Protocol implementation.
pub proto: &'a LightProtocol,
/// Relevant peer for event.
pub peer: PeerId,
}
impl<'a> EventContext for Ctx<'a> {
fn peer(&self) -> PeerId { self.peer }
fn request_from(&self, peer: PeerId, request: Request) -> Result<ReqId, Error> {
self.proto.request_from(self.io, &peer, request)
}
fn make_announcement(&self, announcement: Announcement) {
self.proto.make_announcement(self.io, announcement);
}
fn disconnect_peer(&self, peer: PeerId) {
self.io.disconnect_peer(peer);
}
fn disable_peer(&self, peer: PeerId) {
self.io.disable_peer(peer);
}
}

View File

@ -54,6 +54,14 @@ pub enum Error {
WrongNetwork, WrongNetwork,
/// Unknown peer. /// Unknown peer.
UnknownPeer, UnknownPeer,
/// Unsolicited response.
UnsolicitedResponse,
/// Not a server.
NotServer,
/// Unsupported protocol version.
UnsupportedProtocolVersion(u8),
/// Bad protocol version.
BadProtocolVersion,
} }
impl Error { impl Error {
@ -67,6 +75,10 @@ impl Error {
Error::UnexpectedHandshake => Punishment::Disconnect, Error::UnexpectedHandshake => Punishment::Disconnect,
Error::WrongNetwork => Punishment::Disable, Error::WrongNetwork => Punishment::Disable,
Error::UnknownPeer => Punishment::Disconnect, Error::UnknownPeer => Punishment::Disconnect,
Error::UnsolicitedResponse => Punishment::Disable,
Error::NotServer => Punishment::Disable,
Error::UnsupportedProtocolVersion(_) => Punishment::Disable,
Error::BadProtocolVersion => Punishment::Disable,
} }
} }
} }
@ -92,7 +104,11 @@ impl fmt::Display for Error {
Error::UnrecognizedPacket(code) => write!(f, "Unrecognized packet: 0x{:x}", code), Error::UnrecognizedPacket(code) => write!(f, "Unrecognized packet: 0x{:x}", code),
Error::UnexpectedHandshake => write!(f, "Unexpected handshake"), Error::UnexpectedHandshake => write!(f, "Unexpected handshake"),
Error::WrongNetwork => write!(f, "Wrong network"), Error::WrongNetwork => write!(f, "Wrong network"),
Error::UnknownPeer => write!(f, "unknown peer"), Error::UnknownPeer => write!(f, "Unknown peer"),
Error::UnsolicitedResponse => write!(f, "Peer provided unsolicited data"),
Error::NotServer => write!(f, "Peer not a server."),
Error::UnsupportedProtocolVersion(pv) => write!(f, "Unsupported protocol version: {}", pv),
Error::BadProtocolVersion => write!(f, "Bad protocol version in handshake"),
} }
} }
} }

View File

@ -20,36 +20,51 @@
//! See https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES) //! See https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES)
use ethcore::transaction::SignedTransaction; use ethcore::transaction::SignedTransaction;
use ethcore::receipt::Receipt;
use io::TimerToken; use io::TimerToken;
use network::{NetworkProtocolHandler, NetworkContext, NetworkError, PeerId}; use network::{NetworkProtocolHandler, NetworkContext, PeerId};
use rlp::{RlpStream, Stream, UntrustedRlp, View}; use rlp::{RlpStream, Stream, UntrustedRlp, View};
use util::hash::H256; use util::hash::H256;
use util::{Mutex, RwLock, U256}; use util::{Bytes, Mutex, RwLock, U256};
use time::SteadyTime; use time::{Duration, SteadyTime};
use std::collections::{HashMap, HashSet}; use std::collections::HashMap;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
use provider::Provider; use provider::Provider;
use request::{self, Request}; use request::{self, Request};
use self::buffer_flow::{Buffer, FlowParams}; use self::buffer_flow::{Buffer, FlowParams};
use self::context::Ctx;
use self::error::{Error, Punishment}; use self::error::{Error, Punishment};
mod buffer_flow; mod buffer_flow;
mod context;
mod error; mod error;
mod status; mod status;
pub use self::status::{Status, Capabilities, Announcement, NetworkId}; #[cfg(test)]
mod tests;
pub use self::context::{EventContext, IoContext};
pub use self::status::{Status, Capabilities, Announcement};
const TIMEOUT: TimerToken = 0; const TIMEOUT: TimerToken = 0;
const TIMEOUT_INTERVAL_MS: u64 = 1000; const TIMEOUT_INTERVAL_MS: u64 = 1000;
// LPV1 // minimum interval between updates.
const PROTOCOL_VERSION: u32 = 1; const UPDATE_INTERVAL_MS: i64 = 5000;
// TODO [rob] make configurable. // Supported protocol versions.
const PROTOCOL_ID: [u8; 3] = *b"les"; pub const PROTOCOL_VERSIONS: &'static [u8] = &[1];
// Max protocol version.
pub const MAX_PROTOCOL_VERSION: u8 = 1;
// Packet count for LES.
pub const PACKET_COUNT: u8 = 15;
// packet ID definitions. // packet ID definitions.
mod packet { mod packet {
@ -95,17 +110,19 @@ pub struct ReqId(usize);
// may not have received one for. // may not have received one for.
struct PendingPeer { struct PendingPeer {
sent_head: H256, sent_head: H256,
last_update: SteadyTime,
proto_version: u8,
} }
// data about each peer. // data about each peer.
struct Peer { struct Peer {
local_buffer: Buffer, // their buffer relative to us local_buffer: Buffer, // their buffer relative to us
remote_buffer: Buffer, // our buffer relative to them
current_asking: HashSet<usize>, // pending request ids.
status: Status, status: Status,
capabilities: Capabilities, capabilities: Capabilities,
remote_flow: FlowParams, remote_flow: Option<(Buffer, FlowParams)>,
sent_head: H256, // last head we've given them. sent_head: H256, // last head we've given them.
last_update: SteadyTime,
proto_version: u8,
} }
impl Peer { impl Peer {
@ -126,38 +143,56 @@ impl Peer {
self.local_buffer.current() self.local_buffer.current()
} }
// recharge remote buffer with remote flow params.
fn recharge_remote(&mut self) {
let flow = &mut self.remote_flow;
flow.recharge(&mut self.remote_buffer);
}
} }
/// An LES event handler. /// An LES event handler.
///
/// Each handler function takes a context which describes the relevant peer
/// and gives references to the IO layer and protocol structure so new messages
/// can be dispatched immediately.
///
/// Request responses are not guaranteed to be complete or valid, but passed IDs will be correct.
/// Response handlers are not given a copy of the original request; it is assumed
/// that relevant data will be stored by interested handlers.
pub trait Handler: Send + Sync { pub trait Handler: Send + Sync {
/// Called when a peer connects. /// Called when a peer connects.
fn on_connect(&self, _id: PeerId, _status: &Status, _capabilities: &Capabilities) { } fn on_connect(&self, _ctx: &EventContext, _status: &Status, _capabilities: &Capabilities) { }
/// Called when a peer disconnects /// Called when a peer disconnects, with a list of unfulfilled request IDs as
fn on_disconnect(&self, _id: PeerId) { } /// of yet.
fn on_disconnect(&self, _ctx: &EventContext, _unfulfilled: &[ReqId]) { }
/// Called when a peer makes an announcement. /// Called when a peer makes an announcement.
fn on_announcement(&self, _id: PeerId, _announcement: &Announcement) { } fn on_announcement(&self, _ctx: &EventContext, _announcement: &Announcement) { }
/// Called when a peer requests relay of some transactions. /// Called when a peer requests relay of some transactions.
fn on_transactions(&self, _id: PeerId, _relay: &[SignedTransaction]) { } fn on_transactions(&self, _ctx: &EventContext, _relay: &[SignedTransaction]) { }
/// Called when a peer responds with block bodies.
fn on_block_bodies(&self, _ctx: &EventContext, _req_id: ReqId, _bodies: &[Bytes]) { }
/// Called when a peer responds with block headers.
fn on_block_headers(&self, _ctx: &EventContext, _req_id: ReqId, _headers: &[Bytes]) { }
/// Called when a peer responds with block receipts.
fn on_receipts(&self, _ctx: &EventContext, _req_id: ReqId, _receipts: &[Vec<Receipt>]) { }
/// Called when a peer responds with state proofs. Each proof is a series of trie
/// nodes in ascending order by distance from the root.
fn on_state_proofs(&self, _ctx: &EventContext, _req_id: ReqId, _proofs: &[Vec<Bytes>]) { }
/// Called when a peer responds with contract code.
fn on_code(&self, _ctx: &EventContext, _req_id: ReqId, _codes: &[Bytes]) { }
/// Called when a peer responds with header proofs. Each proof is a block header coupled
/// with a series of trie nodes is ascending order by distance from the root.
fn on_header_proofs(&self, _ctx: &EventContext, _req_id: ReqId, _proofs: &[(Bytes, Vec<Bytes>)]) { }
/// Called on abort.
fn on_abort(&self) { }
} }
// a request and the time it was made. // a request, the peer who it was made to, and the time it was made.
struct Requested { struct Requested {
request: Request, request: Request,
timestamp: SteadyTime, timestamp: SteadyTime,
peer_id: PeerId,
} }
/// Protocol parameters. /// Protocol parameters.
pub struct Params { pub struct Params {
/// Genesis hash.
pub genesis_hash: H256,
/// Network id. /// Network id.
pub network_id: NetworkId, pub network_id: u64,
/// Buffer flow parameters. /// Buffer flow parameters.
pub flow_params: FlowParams, pub flow_params: FlowParams,
/// Initial capabilities. /// Initial capabilities.
@ -175,9 +210,9 @@ pub struct Params {
// Locks must be acquired in the order declared, and when holding a read lock // Locks must be acquired in the order declared, and when holding a read lock
// on the peers, only one peer may be held at a time. // on the peers, only one peer may be held at a time.
pub struct LightProtocol { pub struct LightProtocol {
provider: Box<Provider>, provider: Arc<Provider>,
genesis_hash: H256, genesis_hash: H256,
network_id: NetworkId, network_id: u64,
pending_peers: RwLock<HashMap<PeerId, PendingPeer>>, pending_peers: RwLock<HashMap<PeerId, PendingPeer>>,
peers: RwLock<HashMap<PeerId, Mutex<Peer>>>, peers: RwLock<HashMap<PeerId, Mutex<Peer>>>,
pending_requests: RwLock<HashMap<usize, Requested>>, pending_requests: RwLock<HashMap<usize, Requested>>,
@ -189,10 +224,13 @@ pub struct LightProtocol {
impl LightProtocol { impl LightProtocol {
/// Create a new instance of the protocol manager. /// Create a new instance of the protocol manager.
pub fn new(provider: Box<Provider>, params: Params) -> Self { pub fn new(provider: Arc<Provider>, params: Params) -> Self {
debug!(target: "les", "Initializing LES handler");
let genesis_hash = provider.chain_info().genesis_hash;
LightProtocol { LightProtocol {
provider: provider, provider: provider,
genesis_hash: params.genesis_hash, genesis_hash: genesis_hash,
network_id: params.network_id, network_id: params.network_id,
pending_peers: RwLock::new(HashMap::new()), pending_peers: RwLock::new(HashMap::new()),
peers: RwLock::new(HashMap::new()), peers: RwLock::new(HashMap::new()),
@ -207,28 +245,37 @@ impl LightProtocol {
/// Check the maximum amount of requests of a specific type /// Check the maximum amount of requests of a specific type
/// which a peer would be able to serve. /// which a peer would be able to serve.
pub fn max_requests(&self, peer: PeerId, kind: request::Kind) -> Option<usize> { pub fn max_requests(&self, peer: PeerId, kind: request::Kind) -> Option<usize> {
self.peers.read().get(&peer).map(|peer| { self.peers.read().get(&peer).and_then(|peer| {
let mut peer = peer.lock(); let mut peer = peer.lock();
peer.recharge_remote(); match peer.remote_flow.as_mut() {
peer.remote_flow.max_amount(&peer.remote_buffer, kind) Some(&mut (ref mut buf, ref flow)) => {
flow.recharge(buf);
Some(flow.max_amount(&*buf, kind))
}
None => None,
}
}) })
} }
/// Make a request to a peer. /// Make a request to a peer.
/// ///
/// Fails on: nonexistent peer, network error, /// Fails on: nonexistent peer, network error, peer not server,
/// insufficient buffer. Does not check capabilities before sending. /// insufficient buffer. Does not check capabilities before sending.
/// On success, returns a request id which can later be coordinated /// On success, returns a request id which can later be coordinated
/// with an event. /// with an event.
pub fn request_from(&self, io: &NetworkContext, peer_id: &PeerId, request: Request) -> Result<ReqId, Error> { pub fn request_from(&self, io: &IoContext, peer_id: &PeerId, request: Request) -> Result<ReqId, Error> {
let peers = self.peers.read(); let peers = self.peers.read();
let peer = try!(peers.get(peer_id).ok_or_else(|| Error::UnknownPeer)); let peer = try!(peers.get(peer_id).ok_or_else(|| Error::UnknownPeer));
let mut peer = peer.lock(); let mut peer = peer.lock();
peer.recharge_remote(); match peer.remote_flow.as_mut() {
Some(&mut (ref mut buf, ref flow)) => {
let max = peer.remote_flow.compute_cost(request.kind(), request.amount()); flow.recharge(buf);
try!(peer.remote_buffer.deduct_cost(max)); let max = flow.compute_cost(request.kind(), request.amount());
try!(buf.deduct_cost(max));
}
None => return Err(Error::NotServer),
}
let req_id = self.req_id.fetch_add(1, Ordering::SeqCst); let req_id = self.req_id.fetch_add(1, Ordering::SeqCst);
let packet_data = encode_request(&request, req_id); let packet_data = encode_request(&request, req_id);
@ -242,12 +289,12 @@ impl LightProtocol {
request::Kind::HeaderProofs => packet::GET_HEADER_PROOFS, request::Kind::HeaderProofs => packet::GET_HEADER_PROOFS,
}; };
try!(io.send(*peer_id, packet_id, packet_data)); io.send(*peer_id, packet_id, packet_data);
peer.current_asking.insert(req_id);
self.pending_requests.write().insert(req_id, Requested { self.pending_requests.write().insert(req_id, Requested {
request: request, request: request,
timestamp: SteadyTime::now(), timestamp: SteadyTime::now(),
peer_id: *peer_id,
}); });
Ok(ReqId(req_id)) Ok(ReqId(req_id))
@ -255,8 +302,9 @@ impl LightProtocol {
/// Make an announcement of new chain head and capabilities to all peers. /// Make an announcement of new chain head and capabilities to all peers.
/// The announcement is expected to be valid. /// The announcement is expected to be valid.
pub fn make_announcement(&self, io: &NetworkContext, mut announcement: Announcement) { pub fn make_announcement(&self, io: &IoContext, mut announcement: Announcement) {
let mut reorgs_map = HashMap::new(); let mut reorgs_map = HashMap::new();
let now = SteadyTime::now();
// update stored capabilities // update stored capabilities
self.capabilities.write().update_from(&announcement); self.capabilities.write().update_from(&announcement);
@ -264,6 +312,17 @@ impl LightProtocol {
// calculate reorg info and send packets // calculate reorg info and send packets
for (peer_id, peer_info) in self.peers.read().iter() { for (peer_id, peer_info) in self.peers.read().iter() {
let mut peer_info = peer_info.lock(); let mut peer_info = peer_info.lock();
// TODO: "urgent" announcements like new blocks?
// the timer approach will skip 1 (possibly 2) in rare occasions.
if peer_info.sent_head == announcement.head_hash ||
peer_info.status.head_num >= announcement.head_num ||
now - peer_info.last_update < Duration::milliseconds(UPDATE_INTERVAL_MS) {
continue
}
peer_info.last_update = now;
let reorg_depth = reorgs_map.entry(peer_info.sent_head) let reorg_depth = reorgs_map.entry(peer_info.sent_head)
.or_insert_with(|| { .or_insert_with(|| {
match self.provider.reorg_depth(&announcement.head_hash, &peer_info.sent_head) { match self.provider.reorg_depth(&announcement.head_hash, &peer_info.sent_head) {
@ -281,26 +340,133 @@ impl LightProtocol {
peer_info.sent_head = announcement.head_hash; peer_info.sent_head = announcement.head_hash;
announcement.reorg_depth = *reorg_depth; announcement.reorg_depth = *reorg_depth;
if let Err(e) = io.send(*peer_id, packet::ANNOUNCE, status::write_announcement(&announcement)) { io.send(*peer_id, packet::ANNOUNCE, status::write_announcement(&announcement));
debug!(target: "les", "Error sending to peer {}: {}", peer_id, e);
}
} }
} }
/// Add an event handler. /// Add an event handler.
/// Ownership will be transferred to the protocol structure, /// Ownership will be transferred to the protocol structure,
/// and the handler will be kept alive as long as it is. /// and the handler will be kept alive as long as it is.
/// These are intended to be added at the beginning of the /// These are intended to be added when the protocol structure
/// is initialized as a means of customizing its behavior.
pub fn add_handler(&mut self, handler: Box<Handler>) { pub fn add_handler(&mut self, handler: Box<Handler>) {
self.handlers.push(handler); self.handlers.push(handler);
} }
/// Signal to handlers that network activity is being aborted
/// and clear peer data.
pub fn abort(&self) {
for handler in &self.handlers {
handler.on_abort();
}
// acquire in order and hold.
let mut pending_peers = self.pending_peers.write();
let mut peers = self.peers.write();
let mut pending_requests = self.pending_requests.write();
pending_peers.clear();
peers.clear();
pending_requests.clear();
}
// Does the common pre-verification of responses before the response itself
// is actually decoded:
// - check whether peer exists
// - check whether request was made
// - check whether request kinds match
fn pre_verify_response(&self, peer: &PeerId, kind: request::Kind, raw: &UntrustedRlp) -> Result<ReqId, Error> {
let req_id: usize = try!(raw.val_at(0));
let cur_buffer: U256 = try!(raw.val_at(1));
trace!(target: "les", "pre-verifying response from peer {}, kind={:?}", peer, kind);
match self.pending_requests.write().remove(&req_id) {
None => return Err(Error::UnsolicitedResponse),
Some(requested) => {
if requested.peer_id != *peer || requested.request.kind() != kind {
return Err(Error::UnsolicitedResponse)
}
}
}
let peers = self.peers.read();
match peers.get(peer) {
Some(peer_info) => {
let mut peer_info = peer_info.lock();
match peer_info.remote_flow.as_mut() {
Some(&mut (ref mut buf, ref mut flow)) => {
let actual_buffer = ::std::cmp::min(cur_buffer, *flow.limit());
buf.update_to(actual_buffer)
}
None => return Err(Error::NotServer), // this really should be impossible.
}
Ok(ReqId(req_id))
}
None => Err(Error::UnknownPeer), // probably only occurs in a race of some kind.
}
}
// handle a packet using the given io context.
fn handle_packet(&self, io: &IoContext, peer: &PeerId, packet_id: u8, data: &[u8]) {
let rlp = UntrustedRlp::new(data);
trace!(target: "les", "Incoming packet {} from peer {}", packet_id, peer);
// handle the packet
let res = match packet_id {
packet::STATUS => self.status(peer, io, rlp),
packet::ANNOUNCE => self.announcement(peer, io, rlp),
packet::GET_BLOCK_HEADERS => self.get_block_headers(peer, io, rlp),
packet::BLOCK_HEADERS => self.block_headers(peer, io, rlp),
packet::GET_BLOCK_BODIES => self.get_block_bodies(peer, io, rlp),
packet::BLOCK_BODIES => self.block_bodies(peer, io, rlp),
packet::GET_RECEIPTS => self.get_receipts(peer, io, rlp),
packet::RECEIPTS => self.receipts(peer, io, rlp),
packet::GET_PROOFS => self.get_proofs(peer, io, rlp),
packet::PROOFS => self.proofs(peer, io, rlp),
packet::GET_CONTRACT_CODES => self.get_contract_code(peer, io, rlp),
packet::CONTRACT_CODES => self.contract_code(peer, io, rlp),
packet::GET_HEADER_PROOFS => self.get_header_proofs(peer, io, rlp),
packet::HEADER_PROOFS => self.header_proofs(peer, io, rlp),
packet::SEND_TRANSACTIONS => self.relay_transactions(peer, io, rlp),
other => {
Err(Error::UnrecognizedPacket(other))
}
};
// if something went wrong, figure out how much to punish the peer.
if let Err(e) = res {
match e.punishment() {
Punishment::None => {}
Punishment::Disconnect => {
debug!(target: "les", "Disconnecting peer {}: {}", peer, e);
io.disconnect_peer(*peer)
}
Punishment::Disable => {
debug!(target: "les", "Disabling peer {}: {}", peer, e);
io.disable_peer(*peer)
}
}
}
}
} }
impl LightProtocol { impl LightProtocol {
// called when a peer connects. // called when a peer connects.
fn on_connect(&self, peer: &PeerId, io: &NetworkContext) { fn on_connect(&self, peer: &PeerId, io: &IoContext) {
let peer = *peer; let peer = *peer;
trace!(target: "les", "Peer {} connecting", peer);
match self.send_status(peer, io) { match self.send_status(peer, io) {
Ok(pending_peer) => { Ok(pending_peer) => {
self.pending_peers.write().insert(peer, pending_peer); self.pending_peers.write().insert(peer, pending_peer);
@ -313,44 +479,69 @@ impl LightProtocol {
} }
// called when a peer disconnects. // called when a peer disconnects.
fn on_disconnect(&self, peer: PeerId) { fn on_disconnect(&self, peer: PeerId, io: &IoContext) {
// TODO: reassign all requests assigned to this peer. trace!(target: "les", "Peer {} disconnecting", peer);
self.pending_peers.write().remove(&peer); self.pending_peers.write().remove(&peer);
if self.peers.write().remove(&peer).is_some() { if self.peers.write().remove(&peer).is_some() {
let unfulfilled: Vec<_> = self.pending_requests.read()
.iter()
.filter(|&(_, r)| r.peer_id == peer)
.map(|(&id, _)| ReqId(id))
.collect();
{
let mut pending = self.pending_requests.write();
for &ReqId(ref inner) in &unfulfilled {
pending.remove(inner);
}
}
for handler in &self.handlers { for handler in &self.handlers {
handler.on_disconnect(peer) handler.on_disconnect(&Ctx {
peer: peer,
io: io,
proto: self,
}, &unfulfilled)
} }
} }
} }
// send status to a peer. // send status to a peer.
fn send_status(&self, peer: PeerId, io: &NetworkContext) -> Result<PendingPeer, NetworkError> { fn send_status(&self, peer: PeerId, io: &IoContext) -> Result<PendingPeer, Error> {
let chain_info = self.provider.chain_info(); let proto_version = try!(io.protocol_version(peer).ok_or(Error::WrongNetwork));
// TODO: could update capabilities here. if PROTOCOL_VERSIONS.iter().find(|x| **x == proto_version).is_none() {
return Err(Error::UnsupportedProtocolVersion(proto_version));
}
let chain_info = self.provider.chain_info();
let status = Status { let status = Status {
head_td: chain_info.total_difficulty, head_td: chain_info.total_difficulty,
head_hash: chain_info.best_block_hash, head_hash: chain_info.best_block_hash,
head_num: chain_info.best_block_number, head_num: chain_info.best_block_number,
genesis_hash: chain_info.genesis_hash, genesis_hash: chain_info.genesis_hash,
protocol_version: PROTOCOL_VERSION, protocol_version: proto_version as u32, // match peer proto version
network_id: self.network_id, network_id: self.network_id,
last_head: None, last_head: None,
}; };
let capabilities = self.capabilities.read().clone(); let capabilities = self.capabilities.read().clone();
let status_packet = status::write_handshake(&status, &capabilities, &self.flow_params); let status_packet = status::write_handshake(&status, &capabilities, Some(&self.flow_params));
try!(io.send(peer, packet::STATUS, status_packet)); io.send(peer, packet::STATUS, status_packet);
Ok(PendingPeer { Ok(PendingPeer {
sent_head: chain_info.best_block_hash, sent_head: chain_info.best_block_hash,
last_update: SteadyTime::now(),
proto_version: proto_version,
}) })
} }
// Handle status message from peer. // Handle status message from peer.
fn status(&self, peer: &PeerId, data: UntrustedRlp) -> Result<(), Error> { fn status(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
let pending = match self.pending_peers.write().remove(peer) { let pending = match self.pending_peers.write().remove(peer) {
Some(pending) => pending, Some(pending) => pending,
None => { None => {
@ -366,33 +557,45 @@ impl LightProtocol {
return Err(Error::WrongNetwork); return Err(Error::WrongNetwork);
} }
if Some(status.protocol_version as u8) != io.protocol_version(*peer) {
return Err(Error::BadProtocolVersion);
}
let remote_flow = flow_params.map(|params| (params.create_buffer(), params));
self.peers.write().insert(*peer, Mutex::new(Peer { self.peers.write().insert(*peer, Mutex::new(Peer {
local_buffer: self.flow_params.create_buffer(), local_buffer: self.flow_params.create_buffer(),
remote_buffer: flow_params.create_buffer(),
current_asking: HashSet::new(),
status: status.clone(), status: status.clone(),
capabilities: capabilities.clone(), capabilities: capabilities.clone(),
remote_flow: flow_params, remote_flow: remote_flow,
sent_head: pending.sent_head, sent_head: pending.sent_head,
last_update: pending.last_update,
proto_version: pending.proto_version,
})); }));
for handler in &self.handlers { for handler in &self.handlers {
handler.on_connect(*peer, &status, &capabilities) handler.on_connect(&Ctx {
peer: *peer,
io: io,
proto: self,
}, &status, &capabilities)
} }
Ok(()) Ok(())
} }
// Handle an announcement. // Handle an announcement.
fn announcement(&self, peer: &PeerId, data: UntrustedRlp) -> Result<(), Error> { fn announcement(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
if !self.peers.read().contains_key(peer) { if !self.peers.read().contains_key(peer) {
debug!(target: "les", "Ignoring announcement from unknown peer"); debug!(target: "les", "Ignoring announcement from unknown peer");
return Ok(()) return Ok(())
} }
let announcement = try!(status::parse_announcement(data)); let announcement = try!(status::parse_announcement(data));
let peers = self.peers.read();
// scope to ensure locks are dropped before moving into handler-space.
{
let peers = self.peers.read();
let peer_info = match peers.get(peer) { let peer_info = match peers.get(peer) {
Some(info) => info, Some(info) => info,
None => return Ok(()), None => return Ok(()),
@ -413,16 +616,21 @@ impl LightProtocol {
// update capabilities. // update capabilities.
peer_info.capabilities.update_from(&announcement); peer_info.capabilities.update_from(&announcement);
}
for handler in &self.handlers { for handler in &self.handlers {
handler.on_announcement(*peer, &announcement); handler.on_announcement(&Ctx {
peer: *peer,
io: io,
proto: self,
}, &announcement);
} }
Ok(()) Ok(())
} }
// Handle a request for block headers. // Handle a request for block headers.
fn get_block_headers(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> { fn get_block_headers(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_HEADERS: usize = 512; const MAX_HEADERS: usize = 512;
let peers = self.peers.read(); let peers = self.peers.read();
@ -467,16 +675,29 @@ impl LightProtocol {
} }
stream.out() stream.out()
}).map_err(Into::into) });
Ok(())
} }
// Receive a response for block headers. // Receive a response for block headers.
fn block_headers(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> { fn block_headers(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
unimplemented!() let req_id = try!(self.pre_verify_response(peer, request::Kind::Headers, &raw));
let raw_headers: Vec<_> = raw.iter().skip(2).map(|x| x.as_raw().to_owned()).collect();
for handler in &self.handlers {
handler.on_block_headers(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_headers);
}
Ok(())
} }
// Handle a request for block bodies. // Handle a request for block bodies.
fn get_block_bodies(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> { fn get_block_bodies(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_BODIES: usize = 256; const MAX_BODIES: usize = 256;
let peers = self.peers.read(); let peers = self.peers.read();
@ -513,16 +734,29 @@ impl LightProtocol {
} }
stream.out() stream.out()
}).map_err(Into::into) });
Ok(())
} }
// Receive a response for block bodies. // Receive a response for block bodies.
fn block_bodies(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> { fn block_bodies(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
unimplemented!() let req_id = try!(self.pre_verify_response(peer, request::Kind::Bodies, &raw));
let raw_bodies: Vec<Bytes> = raw.iter().skip(2).map(|x| x.as_raw().to_owned()).collect();
for handler in &self.handlers {
handler.on_block_bodies(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_bodies);
}
Ok(())
} }
// Handle a request for receipts. // Handle a request for receipts.
fn get_receipts(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> { fn get_receipts(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_RECEIPTS: usize = 256; const MAX_RECEIPTS: usize = 256;
let peers = self.peers.read(); let peers = self.peers.read();
@ -559,16 +793,33 @@ impl LightProtocol {
} }
stream.out() stream.out()
}).map_err(Into::into) });
Ok(())
} }
// Receive a response for receipts. // Receive a response for receipts.
fn receipts(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> { fn receipts(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
unimplemented!() let req_id = try!(self.pre_verify_response(peer, request::Kind::Receipts, &raw));
let raw_receipts: Vec<Vec<Receipt>> = try!(raw
.iter()
.skip(2)
.map(|x| x.as_val())
.collect());
for handler in &self.handlers {
handler.on_receipts(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_receipts);
}
Ok(())
} }
// Handle a request for proofs. // Handle a request for proofs.
fn get_proofs(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> { fn get_proofs(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_PROOFS: usize = 128; const MAX_PROOFS: usize = 128;
let peers = self.peers.read(); let peers = self.peers.read();
@ -616,16 +867,33 @@ impl LightProtocol {
} }
stream.out() stream.out()
}).map_err(Into::into) });
Ok(())
} }
// Receive a response for proofs. // Receive a response for proofs.
fn proofs(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> { fn proofs(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
unimplemented!() let req_id = try!(self.pre_verify_response(peer, request::Kind::StateProofs, &raw));
let raw_proofs: Vec<Vec<Bytes>> = raw.iter()
.skip(2)
.map(|x| x.iter().map(|node| node.as_raw().to_owned()).collect())
.collect();
for handler in &self.handlers {
handler.on_state_proofs(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_proofs);
}
Ok(())
} }
// Handle a request for contract code. // Handle a request for contract code.
fn get_contract_code(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> { fn get_contract_code(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_CODES: usize = 256; const MAX_CODES: usize = 256;
let peers = self.peers.read(); let peers = self.peers.read();
@ -667,20 +935,34 @@ impl LightProtocol {
stream.append(&req_id).append(&cur_buffer); stream.append(&req_id).append(&cur_buffer);
for code in response { for code in response {
stream.append_raw(&code, 1); stream.append(&code);
} }
stream.out() stream.out()
}).map_err(Into::into) });
Ok(())
} }
// Receive a response for contract code. // Receive a response for contract code.
fn contract_code(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> { fn contract_code(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
unimplemented!() let req_id = try!(self.pre_verify_response(peer, request::Kind::Codes, &raw));
let raw_code: Vec<Bytes> = try!(raw.iter().skip(2).map(|x| x.as_val()).collect());
for handler in &self.handlers {
handler.on_code(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_code);
}
Ok(())
} }
// Handle a request for header proofs // Handle a request for header proofs
fn get_header_proofs(&self, peer: &PeerId, io: &NetworkContext, data: UntrustedRlp) -> Result<(), Error> { fn get_header_proofs(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_PROOFS: usize = 256; const MAX_PROOFS: usize = 256;
let peers = self.peers.read(); let peers = self.peers.read();
@ -727,16 +1009,37 @@ impl LightProtocol {
} }
stream.out() stream.out()
}).map_err(Into::into) });
Ok(())
} }
// Receive a response for header proofs // Receive a response for header proofs
fn header_proofs(&self, _: &PeerId, _: &NetworkContext, _: UntrustedRlp) -> Result<(), Error> { fn header_proofs(&self, peer: &PeerId, io: &IoContext, raw: UntrustedRlp) -> Result<(), Error> {
unimplemented!() fn decode_res(raw: UntrustedRlp) -> Result<(Bytes, Vec<Bytes>), ::rlp::DecoderError> {
Ok((
try!(raw.val_at(0)),
try!(raw.at(1)).iter().map(|x| x.as_raw().to_owned()).collect(),
))
}
let req_id = try!(self.pre_verify_response(peer, request::Kind::HeaderProofs, &raw));
let raw_proofs: Vec<_> = try!(raw.iter().skip(2).map(decode_res).collect());
for handler in &self.handlers {
handler.on_header_proofs(&Ctx {
peer: *peer,
io: io,
proto: self,
}, req_id, &raw_proofs);
}
Ok(())
} }
// Receive a set of transactions to relay. // Receive a set of transactions to relay.
fn relay_transactions(&self, peer: &PeerId, data: UntrustedRlp) -> Result<(), Error> { fn relay_transactions(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> {
const MAX_TRANSACTIONS: usize = 256; const MAX_TRANSACTIONS: usize = 256;
let txs: Vec<_> = try!(data.iter().take(MAX_TRANSACTIONS).map(|x| x.as_val::<SignedTransaction>()).collect()); let txs: Vec<_> = try!(data.iter().take(MAX_TRANSACTIONS).map(|x| x.as_val::<SignedTransaction>()).collect());
@ -744,7 +1047,11 @@ impl LightProtocol {
debug!(target: "les", "Received {} transactions to relay from peer {}", txs.len(), peer); debug!(target: "les", "Received {} transactions to relay from peer {}", txs.len(), peer);
for handler in &self.handlers { for handler in &self.handlers {
handler.on_transactions(*peer, &txs); handler.on_transactions(&Ctx {
peer: *peer,
io: io,
proto: self,
}, &txs);
} }
Ok(()) Ok(())
@ -757,60 +1064,15 @@ impl NetworkProtocolHandler for LightProtocol {
} }
fn read(&self, io: &NetworkContext, peer: &PeerId, packet_id: u8, data: &[u8]) { fn read(&self, io: &NetworkContext, peer: &PeerId, packet_id: u8, data: &[u8]) {
let rlp = UntrustedRlp::new(data); self.handle_packet(io, peer, packet_id, data);
// handle the packet
let res = match packet_id {
packet::STATUS => self.status(peer, rlp),
packet::ANNOUNCE => self.announcement(peer, rlp),
packet::GET_BLOCK_HEADERS => self.get_block_headers(peer, io, rlp),
packet::BLOCK_HEADERS => self.block_headers(peer, io, rlp),
packet::GET_BLOCK_BODIES => self.get_block_bodies(peer, io, rlp),
packet::BLOCK_BODIES => self.block_bodies(peer, io, rlp),
packet::GET_RECEIPTS => self.get_receipts(peer, io, rlp),
packet::RECEIPTS => self.receipts(peer, io, rlp),
packet::GET_PROOFS => self.get_proofs(peer, io, rlp),
packet::PROOFS => self.proofs(peer, io, rlp),
packet::GET_CONTRACT_CODES => self.get_contract_code(peer, io, rlp),
packet::CONTRACT_CODES => self.contract_code(peer, io, rlp),
packet::GET_HEADER_PROOFS => self.get_header_proofs(peer, io, rlp),
packet::HEADER_PROOFS => self.header_proofs(peer, io, rlp),
packet::SEND_TRANSACTIONS => self.relay_transactions(peer, rlp),
other => {
Err(Error::UnrecognizedPacket(other))
}
};
// if something went wrong, figure out how much to punish the peer.
if let Err(e) = res {
match e.punishment() {
Punishment::None => {}
Punishment::Disconnect => {
debug!(target: "les", "Disconnecting peer {}: {}", peer, e);
io.disconnect_peer(*peer)
}
Punishment::Disable => {
debug!(target: "les", "Disabling peer {}: {}", peer, e);
io.disable_peer(*peer)
}
}
}
} }
fn connected(&self, io: &NetworkContext, peer: &PeerId) { fn connected(&self, io: &NetworkContext, peer: &PeerId) {
self.on_connect(peer, io); self.on_connect(peer, io);
} }
fn disconnected(&self, _io: &NetworkContext, peer: &PeerId) { fn disconnected(&self, io: &NetworkContext, peer: &PeerId) {
self.on_disconnect(*peer); self.on_disconnect(*peer, io);
} }
fn timeout(&self, _io: &NetworkContext, timer: TimerToken) { fn timeout(&self, _io: &NetworkContext, timer: TimerToken) {

View File

@ -82,26 +82,6 @@ impl Key {
} }
} }
/// Network ID structure.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u32)]
pub enum NetworkId {
/// ID for the mainnet
Mainnet = 1,
/// ID for the testnet
Testnet = 0,
}
impl NetworkId {
fn from_raw(raw: u32) -> Option<Self> {
match raw {
0 => Some(NetworkId::Testnet),
1 => Some(NetworkId::Mainnet),
_ => None,
}
}
}
// helper for decoding key-value pairs in the handshake or an announcement. // helper for decoding key-value pairs in the handshake or an announcement.
struct Parser<'a> { struct Parser<'a> {
pos: usize, pos: usize,
@ -118,6 +98,7 @@ impl<'a> Parser<'a> {
// expect a specific next key, and get the value's RLP. // expect a specific next key, and get the value's RLP.
// if the key isn't found, the position isn't advanced. // if the key isn't found, the position isn't advanced.
fn expect_raw(&mut self, key: Key) -> Result<UntrustedRlp<'a>, DecoderError> { fn expect_raw(&mut self, key: Key) -> Result<UntrustedRlp<'a>, DecoderError> {
trace!(target: "les", "Expecting key {}", key.as_str());
let pre_pos = self.pos; let pre_pos = self.pos;
if let Some((k, val)) = try!(self.get_next()) { if let Some((k, val)) = try!(self.get_next()) {
if k == key { return Ok(val) } if k == key { return Ok(val) }
@ -164,7 +145,7 @@ pub struct Status {
/// Protocol version. /// Protocol version.
pub protocol_version: u32, pub protocol_version: u32,
/// Network id of this peer. /// Network id of this peer.
pub network_id: NetworkId, pub network_id: u64,
/// Total difficulty of the head of the chain. /// Total difficulty of the head of the chain.
pub head_td: U256, pub head_td: U256,
/// Hash of the best block. /// Hash of the best block.
@ -217,7 +198,7 @@ impl Capabilities {
/// - chain status /// - chain status
/// - serving capabilities /// - serving capabilities
/// - buffer flow parameters /// - buffer flow parameters
pub fn parse_handshake(rlp: UntrustedRlp) -> Result<(Status, Capabilities, FlowParams), DecoderError> { pub fn parse_handshake(rlp: UntrustedRlp) -> Result<(Status, Capabilities, Option<FlowParams>), DecoderError> {
let mut parser = Parser { let mut parser = Parser {
pos: 0, pos: 0,
rlp: rlp, rlp: rlp,
@ -225,8 +206,7 @@ pub fn parse_handshake(rlp: UntrustedRlp) -> Result<(Status, Capabilities, FlowP
let status = Status { let status = Status {
protocol_version: try!(parser.expect(Key::ProtocolVersion)), protocol_version: try!(parser.expect(Key::ProtocolVersion)),
network_id: try!(parser.expect(Key::NetworkId) network_id: try!(parser.expect(Key::NetworkId)),
.and_then(|id: u32| NetworkId::from_raw(id).ok_or(DecoderError::Custom("Invalid network ID")))),
head_td: try!(parser.expect(Key::HeadTD)), head_td: try!(parser.expect(Key::HeadTD)),
head_hash: try!(parser.expect(Key::HeadHash)), head_hash: try!(parser.expect(Key::HeadHash)),
head_num: try!(parser.expect(Key::HeadNum)), head_num: try!(parser.expect(Key::HeadNum)),
@ -241,20 +221,23 @@ pub fn parse_handshake(rlp: UntrustedRlp) -> Result<(Status, Capabilities, FlowP
tx_relay: parser.expect_raw(Key::TxRelay).is_ok(), tx_relay: parser.expect_raw(Key::TxRelay).is_ok(),
}; };
let flow_params = FlowParams::new( let flow_params = match (
try!(parser.expect(Key::BufferLimit)), parser.expect(Key::BufferLimit),
try!(parser.expect(Key::BufferCostTable)), parser.expect(Key::BufferCostTable),
try!(parser.expect(Key::BufferRechargeRate)), parser.expect(Key::BufferRechargeRate)
); ) {
(Ok(bl), Ok(bct), Ok(brr)) => Some(FlowParams::new(bl, bct, brr)),
_ => None,
};
Ok((status, capabilities, flow_params)) Ok((status, capabilities, flow_params))
} }
/// Write a handshake, given status, capabilities, and flow parameters. /// Write a handshake, given status, capabilities, and flow parameters.
pub fn write_handshake(status: &Status, capabilities: &Capabilities, flow_params: &FlowParams) -> Vec<u8> { pub fn write_handshake(status: &Status, capabilities: &Capabilities, flow_params: Option<&FlowParams>) -> Vec<u8> {
let mut pairs = Vec::new(); let mut pairs = Vec::new();
pairs.push(encode_pair(Key::ProtocolVersion, &status.protocol_version)); pairs.push(encode_pair(Key::ProtocolVersion, &status.protocol_version));
pairs.push(encode_pair(Key::NetworkId, &(status.network_id as u32))); pairs.push(encode_pair(Key::NetworkId, &(status.network_id as u64)));
pairs.push(encode_pair(Key::HeadTD, &status.head_td)); pairs.push(encode_pair(Key::HeadTD, &status.head_td));
pairs.push(encode_pair(Key::HeadHash, &status.head_hash)); pairs.push(encode_pair(Key::HeadHash, &status.head_hash));
pairs.push(encode_pair(Key::HeadNum, &status.head_num)); pairs.push(encode_pair(Key::HeadNum, &status.head_num));
@ -273,9 +256,11 @@ pub fn write_handshake(status: &Status, capabilities: &Capabilities, flow_params
pairs.push(encode_flag(Key::TxRelay)); pairs.push(encode_flag(Key::TxRelay));
} }
if let Some(flow_params) = flow_params {
pairs.push(encode_pair(Key::BufferLimit, flow_params.limit())); pairs.push(encode_pair(Key::BufferLimit, flow_params.limit()));
pairs.push(encode_pair(Key::BufferCostTable, flow_params.cost_table())); pairs.push(encode_pair(Key::BufferCostTable, flow_params.cost_table()));
pairs.push(encode_pair(Key::BufferRechargeRate, flow_params.recharge_rate())); pairs.push(encode_pair(Key::BufferRechargeRate, flow_params.recharge_rate()));
}
let mut stream = RlpStream::new_list(pairs.len()); let mut stream = RlpStream::new_list(pairs.len());
@ -385,7 +370,7 @@ mod tests {
fn full_handshake() { fn full_handshake() {
let status = Status { let status = Status {
protocol_version: 1, protocol_version: 1,
network_id: NetworkId::Mainnet, network_id: 1,
head_td: U256::default(), head_td: U256::default(),
head_hash: H256::default(), head_hash: H256::default(),
head_num: 10, head_num: 10,
@ -406,21 +391,21 @@ mod tests {
1000.into(), 1000.into(),
); );
let handshake = write_handshake(&status, &capabilities, &flow_params); let handshake = write_handshake(&status, &capabilities, Some(&flow_params));
let (read_status, read_capabilities, read_flow) let (read_status, read_capabilities, read_flow)
= parse_handshake(UntrustedRlp::new(&handshake)).unwrap(); = parse_handshake(UntrustedRlp::new(&handshake)).unwrap();
assert_eq!(read_status, status); assert_eq!(read_status, status);
assert_eq!(read_capabilities, capabilities); assert_eq!(read_capabilities, capabilities);
assert_eq!(read_flow, flow_params); assert_eq!(read_flow.unwrap(), flow_params);
} }
#[test] #[test]
fn partial_handshake() { fn partial_handshake() {
let status = Status { let status = Status {
protocol_version: 1, protocol_version: 1,
network_id: NetworkId::Mainnet, network_id: 1,
head_td: U256::default(), head_td: U256::default(),
head_hash: H256::default(), head_hash: H256::default(),
head_num: 10, head_num: 10,
@ -441,21 +426,21 @@ mod tests {
1000.into(), 1000.into(),
); );
let handshake = write_handshake(&status, &capabilities, &flow_params); let handshake = write_handshake(&status, &capabilities, Some(&flow_params));
let (read_status, read_capabilities, read_flow) let (read_status, read_capabilities, read_flow)
= parse_handshake(UntrustedRlp::new(&handshake)).unwrap(); = parse_handshake(UntrustedRlp::new(&handshake)).unwrap();
assert_eq!(read_status, status); assert_eq!(read_status, status);
assert_eq!(read_capabilities, capabilities); assert_eq!(read_capabilities, capabilities);
assert_eq!(read_flow, flow_params); assert_eq!(read_flow.unwrap(), flow_params);
} }
#[test] #[test]
fn skip_unknown_keys() { fn skip_unknown_keys() {
let status = Status { let status = Status {
protocol_version: 1, protocol_version: 1,
network_id: NetworkId::Mainnet, network_id: 1,
head_td: U256::default(), head_td: U256::default(),
head_hash: H256::default(), head_hash: H256::default(),
head_num: 10, head_num: 10,
@ -476,7 +461,7 @@ mod tests {
1000.into(), 1000.into(),
); );
let handshake = write_handshake(&status, &capabilities, &flow_params); let handshake = write_handshake(&status, &capabilities, Some(&flow_params));
let interleaved = { let interleaved = {
let handshake = UntrustedRlp::new(&handshake); let handshake = UntrustedRlp::new(&handshake);
let mut stream = RlpStream::new_list(handshake.item_count() * 3); let mut stream = RlpStream::new_list(handshake.item_count() * 3);
@ -498,7 +483,7 @@ mod tests {
assert_eq!(read_status, status); assert_eq!(read_status, status);
assert_eq!(read_capabilities, capabilities); assert_eq!(read_capabilities, capabilities);
assert_eq!(read_flow, flow_params); assert_eq!(read_flow.unwrap(), flow_params);
} }
#[test] #[test]
@ -548,4 +533,33 @@ mod tests {
let out = stream.drain(); let out = stream.drain();
assert!(parse_announcement(UntrustedRlp::new(&out)).is_ok()); assert!(parse_announcement(UntrustedRlp::new(&out)).is_ok());
} }
#[test]
fn optional_flow() {
let status = Status {
protocol_version: 1,
network_id: 1,
head_td: U256::default(),
head_hash: H256::default(),
head_num: 10,
genesis_hash: H256::zero(),
last_head: None,
};
let capabilities = Capabilities {
serve_headers: true,
serve_chain_since: Some(5),
serve_state_since: Some(8),
tx_relay: true,
};
let handshake = write_handshake(&status, &capabilities, None);
let (read_status, read_capabilities, read_flow)
= parse_handshake(UntrustedRlp::new(&handshake)).unwrap();
assert_eq!(read_status, status);
assert_eq!(read_capabilities, capabilities);
assert!(read_flow.is_none());
}
} }

View File

@ -0,0 +1,512 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Tests for the `LightProtocol` implementation.
//! These don't test of the higher level logic on top of
use ethcore::blockchain_info::BlockChainInfo;
use ethcore::client::{BlockChainClient, EachBlockWith, TestBlockChainClient};
use ethcore::ids::BlockId;
use ethcore::transaction::SignedTransaction;
use network::PeerId;
use net::buffer_flow::FlowParams;
use net::context::IoContext;
use net::status::{Capabilities, Status, write_handshake};
use net::{encode_request, LightProtocol, Params, packet};
use provider::Provider;
use request::{self, Request, Headers};
use rlp::*;
use util::{Bytes, H256, U256};
use std::sync::Arc;
// expected result from a call.
#[derive(Debug, PartialEq, Eq)]
enum Expect {
/// Expect to have message sent to peer.
Send(PeerId, u8, Vec<u8>),
/// Expect this response.
Respond(u8, Vec<u8>),
/// Expect a punishment (disconnect/disable)
Punish(PeerId),
/// Expect nothing.
Nothing,
}
impl IoContext for Expect {
fn send(&self, peer: PeerId, packet_id: u8, packet_body: Vec<u8>) {
assert_eq!(self, &Expect::Send(peer, packet_id, packet_body));
}
fn respond(&self, packet_id: u8, packet_body: Vec<u8>) {
assert_eq!(self, &Expect::Respond(packet_id, packet_body));
}
fn disconnect_peer(&self, peer: PeerId) {
assert_eq!(self, &Expect::Punish(peer));
}
fn disable_peer(&self, peer: PeerId) {
assert_eq!(self, &Expect::Punish(peer));
}
fn protocol_version(&self, _peer: PeerId) -> Option<u8> {
Some(super::MAX_PROTOCOL_VERSION)
}
}
// can't implement directly for Arc due to cross-crate orphan rules.
struct TestProvider(Arc<TestProviderInner>);
struct TestProviderInner {
client: TestBlockChainClient,
}
impl Provider for TestProvider {
fn chain_info(&self) -> BlockChainInfo {
self.0.client.chain_info()
}
fn reorg_depth(&self, a: &H256, b: &H256) -> Option<u64> {
self.0.client.tree_route(a, b).map(|route| route.index as u64)
}
fn earliest_state(&self) -> Option<u64> {
None
}
fn block_headers(&self, req: request::Headers) -> Vec<Bytes> {
let best_num = self.0.client.chain_info().best_block_number;
let start_num = req.block_num;
match self.0.client.block_hash(BlockId::Number(req.block_num)) {
Some(hash) if hash == req.block_hash => {}
_=> {
trace!(target: "les_provider", "unknown/non-canonical start block in header request: {:?}", (req.block_num, req.block_hash));
return vec![]
}
}
(0u64..req.max as u64)
.map(|x: u64| x.saturating_mul(req.skip + 1))
.take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x })
.map(|x| if req.reverse { start_num - x } else { start_num + x })
.map(|x| self.0.client.block_header(BlockId::Number(x)))
.take_while(|x| x.is_some())
.flat_map(|x| x)
.collect()
}
fn block_bodies(&self, req: request::Bodies) -> Vec<Bytes> {
req.block_hashes.into_iter()
.map(|hash| self.0.client.block_body(BlockId::Hash(hash)))
.map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec()))
.collect()
}
fn receipts(&self, req: request::Receipts) -> Vec<Bytes> {
req.block_hashes.into_iter()
.map(|hash| self.0.client.block_receipts(&hash))
.map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec()))
.collect()
}
fn proofs(&self, req: request::StateProofs) -> Vec<Bytes> {
req.requests.into_iter()
.map(|req| {
match req.key2 {
Some(_) => ::util::sha3::SHA3_NULL_RLP.to_vec(),
None => {
// sort of a leaf node
let mut stream = RlpStream::new_list(2);
stream.append(&req.key1).append_empty_data();
stream.out()
}
}
})
.collect()
}
fn contract_code(&self, req: request::ContractCodes) -> Vec<Bytes> {
req.code_requests.into_iter()
.map(|req| {
req.account_key.iter().chain(req.account_key.iter()).cloned().collect()
})
.collect()
}
fn header_proofs(&self, req: request::HeaderProofs) -> Vec<Bytes> {
req.requests.into_iter().map(|_| ::rlp::EMPTY_LIST_RLP.to_vec()).collect()
}
fn pending_transactions(&self) -> Vec<SignedTransaction> {
self.0.client.pending_transactions()
}
}
fn make_flow_params() -> FlowParams {
FlowParams::new(5_000_000.into(), Default::default(), 100_000.into())
}
fn capabilities() -> Capabilities {
Capabilities {
serve_headers: true,
serve_chain_since: Some(1),
serve_state_since: Some(1),
tx_relay: true,
}
}
// helper for setting up the protocol handler and provider.
fn setup(flow_params: FlowParams, capabilities: Capabilities) -> (Arc<TestProviderInner>, LightProtocol) {
let provider = Arc::new(TestProviderInner {
client: TestBlockChainClient::new(),
});
let proto = LightProtocol::new(Arc::new(TestProvider(provider.clone())), Params {
network_id: 2,
flow_params: flow_params,
capabilities: capabilities,
});
(provider, proto)
}
fn status(chain_info: BlockChainInfo) -> Status {
Status {
protocol_version: 1,
network_id: 2,
head_td: chain_info.total_difficulty,
head_hash: chain_info.best_block_hash,
head_num: chain_info.best_block_number,
genesis_hash: chain_info.genesis_hash,
last_head: None,
}
}
#[test]
fn handshake_expected() {
let flow_params = make_flow_params();
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let status = status(provider.client.chain_info());
let packet_body = write_handshake(&status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body));
}
#[test]
#[should_panic]
fn genesis_mismatch() {
let flow_params = make_flow_params();
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let mut status = status(provider.client.chain_info());
status.genesis_hash = H256::default();
let packet_body = write_handshake(&status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body));
}
#[test]
fn buffer_overflow() {
let flow_params = make_flow_params();
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let status = status(provider.client.chain_info());
{
let packet_body = write_handshake(&status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body));
}
{
let my_status = write_handshake(&status, &capabilities, Some(&flow_params));
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status);
}
// 1000 requests is far too many for the default flow params.
let request = encode_request(&Request::Headers(Headers {
block_num: 1,
block_hash: provider.client.chain_info().genesis_hash,
max: 1000,
skip: 0,
reverse: false,
}), 111);
proto.handle_packet(&Expect::Punish(1), &1, packet::GET_BLOCK_HEADERS, &request);
}
// test the basic request types -- these just make sure that requests are parsed
// and sent to the provider correctly as well as testing response formatting.
#[test]
fn get_block_headers() {
let flow_params = FlowParams::new(5_000_000.into(), Default::default(), 0.into());
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let cur_status = status(provider.client.chain_info());
let my_status = write_handshake(&cur_status, &capabilities, Some(&flow_params));
provider.client.add_blocks(100, EachBlockWith::Nothing);
let cur_status = status(provider.client.chain_info());
{
let packet_body = write_handshake(&cur_status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body));
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status);
}
let request = Headers {
block_num: 1,
block_hash: provider.client.block_hash(BlockId::Number(1)).unwrap(),
max: 10,
skip: 0,
reverse: false,
};
let req_id = 111;
let request_body = encode_request(&Request::Headers(request.clone()), req_id);
let response = {
let headers: Vec<_> = (0..10).map(|i| provider.client.block_header(BlockId::Number(i + 1)).unwrap()).collect();
assert_eq!(headers.len(), 10);
let new_buf = *flow_params.limit() - flow_params.compute_cost(request::Kind::Headers, 10);
let mut response_stream = RlpStream::new_list(12);
response_stream.append(&req_id).append(&new_buf);
for header in headers {
response_stream.append_raw(&header, 1);
}
response_stream.out()
};
let expected = Expect::Respond(packet::BLOCK_HEADERS, response);
proto.handle_packet(&expected, &1, packet::GET_BLOCK_HEADERS, &request_body);
}
#[test]
fn get_block_bodies() {
let flow_params = FlowParams::new(5_000_000.into(), Default::default(), 0.into());
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let cur_status = status(provider.client.chain_info());
let my_status = write_handshake(&cur_status, &capabilities, Some(&flow_params));
provider.client.add_blocks(100, EachBlockWith::Nothing);
let cur_status = status(provider.client.chain_info());
{
let packet_body = write_handshake(&cur_status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body));
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status);
}
let request = request::Bodies {
block_hashes: (0..10).map(|i| provider.client.block_hash(BlockId::Number(i)).unwrap()).collect(),
};
let req_id = 111;
let request_body = encode_request(&Request::Bodies(request.clone()), req_id);
let response = {
let bodies: Vec<_> = (0..10).map(|i| provider.client.block_body(BlockId::Number(i + 1)).unwrap()).collect();
assert_eq!(bodies.len(), 10);
let new_buf = *flow_params.limit() - flow_params.compute_cost(request::Kind::Bodies, 10);
let mut response_stream = RlpStream::new_list(12);
response_stream.append(&req_id).append(&new_buf);
for body in bodies {
response_stream.append_raw(&body, 1);
}
response_stream.out()
};
let expected = Expect::Respond(packet::BLOCK_BODIES, response);
proto.handle_packet(&expected, &1, packet::GET_BLOCK_BODIES, &request_body);
}
#[test]
fn get_block_receipts() {
let flow_params = FlowParams::new(5_000_000.into(), Default::default(), 0.into());
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let cur_status = status(provider.client.chain_info());
let my_status = write_handshake(&cur_status, &capabilities, Some(&flow_params));
provider.client.add_blocks(1000, EachBlockWith::Nothing);
let cur_status = status(provider.client.chain_info());
{
let packet_body = write_handshake(&cur_status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body));
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &my_status);
}
// find the first 10 block hashes starting with `f` because receipts are only provided
// by the test client in that case.
let block_hashes: Vec<_> = (0..1000).map(|i| provider.client.block_hash(BlockId::Number(i)).unwrap())
.filter(|hash| format!("{}", hash).starts_with("f")).take(10).collect();
let request = request::Receipts {
block_hashes: block_hashes.clone(),
};
let req_id = 111;
let request_body = encode_request(&Request::Receipts(request.clone()), req_id);
let response = {
let receipts: Vec<_> = block_hashes.iter()
.map(|hash| provider.client.block_receipts(hash).unwrap())
.collect();
let new_buf = *flow_params.limit() - flow_params.compute_cost(request::Kind::Receipts, receipts.len());
let mut response_stream = RlpStream::new_list(2 + receipts.len());
response_stream.append(&req_id).append(&new_buf);
for block_receipts in receipts {
response_stream.append_raw(&block_receipts, 1);
}
response_stream.out()
};
let expected = Expect::Respond(packet::RECEIPTS, response);
proto.handle_packet(&expected, &1, packet::GET_RECEIPTS, &request_body);
}
#[test]
fn get_state_proofs() {
let flow_params = FlowParams::new(5_000_000.into(), Default::default(), 0.into());
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let cur_status = status(provider.client.chain_info());
{
let packet_body = write_handshake(&cur_status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body.clone()));
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &packet_body);
}
let req_id = 112;
let key1 = U256::from(11223344).into();
let key2 = U256::from(99988887).into();
let request = Request::StateProofs (request::StateProofs {
requests: vec![
request::StateProof { block: H256::default(), key1: key1, key2: None, from_level: 0 },
request::StateProof { block: H256::default(), key1: key1, key2: Some(key2), from_level: 0},
]
});
let request_body = encode_request(&request, req_id);
let response = {
let proofs = vec![
{ let mut stream = RlpStream::new_list(2); stream.append(&key1).append_empty_data(); stream.out() },
::util::sha3::SHA3_NULL_RLP.to_vec(),
];
let new_buf = *flow_params.limit() - flow_params.compute_cost(request::Kind::StateProofs, 2);
let mut response_stream = RlpStream::new_list(4);
response_stream.append(&req_id).append(&new_buf);
for proof in proofs {
response_stream.append_raw(&proof, 1);
}
response_stream.out()
};
let expected = Expect::Respond(packet::PROOFS, response);
proto.handle_packet(&expected, &1, packet::GET_PROOFS, &request_body);
}
#[test]
fn get_contract_code() {
let flow_params = FlowParams::new(5_000_000.into(), Default::default(), 0.into());
let capabilities = capabilities();
let (provider, proto) = setup(flow_params.clone(), capabilities.clone());
let cur_status = status(provider.client.chain_info());
{
let packet_body = write_handshake(&cur_status, &capabilities, Some(&flow_params));
proto.on_connect(&1, &Expect::Send(1, packet::STATUS, packet_body.clone()));
proto.handle_packet(&Expect::Nothing, &1, packet::STATUS, &packet_body);
}
let req_id = 112;
let key1 = U256::from(11223344).into();
let key2 = U256::from(99988887).into();
let request = Request::Codes (request::ContractCodes {
code_requests: vec![
request::ContractCode { block_hash: H256::default(), account_key: key1 },
request::ContractCode { block_hash: H256::default(), account_key: key2 },
],
});
let request_body = encode_request(&request, req_id);
let response = {
let codes: Vec<Vec<_>> = vec![
key1.iter().chain(key1.iter()).cloned().collect(),
key2.iter().chain(key2.iter()).cloned().collect(),
];
let new_buf = *flow_params.limit() - flow_params.compute_cost(request::Kind::Codes, 2);
let mut response_stream = RlpStream::new_list(4);
response_stream.append(&req_id).append(&new_buf);
for code in codes {
response_stream.append(&code);
}
response_stream.out()
};
let expected = Expect::Respond(packet::CONTRACT_CODES, response);
proto.handle_packet(&expected, &1, packet::GET_CONTRACT_CODES, &request_body);
}

View File

@ -20,7 +20,7 @@
use ethcore::blockchain_info::BlockChainInfo; use ethcore::blockchain_info::BlockChainInfo;
use ethcore::client::{BlockChainClient, ProvingBlockChainClient}; use ethcore::client::{BlockChainClient, ProvingBlockChainClient};
use ethcore::transaction::SignedTransaction; use ethcore::transaction::SignedTransaction;
use ethcore::ids::BlockID; use ethcore::ids::BlockId;
use util::{Bytes, H256}; use util::{Bytes, H256};
@ -33,6 +33,7 @@ use request;
/// or empty vector where appropriate. /// or empty vector where appropriate.
/// ///
/// [1]: https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES) /// [1]: https://github.com/ethcore/parity/wiki/Light-Ethereum-Subprotocol-(LES)
#[cfg_attr(feature = "ipc", ipc(client_ident="LightProviderClient"))]
pub trait Provider: Send + Sync { pub trait Provider: Send + Sync {
/// Provide current blockchain info. /// Provide current blockchain info.
fn chain_info(&self) -> BlockChainInfo; fn chain_info(&self) -> BlockChainInfo;
@ -71,7 +72,10 @@ pub trait Provider: Send + Sync {
/// Each item in the resulting vector is either the raw bytecode or empty. /// Each item in the resulting vector is either the raw bytecode or empty.
fn contract_code(&self, req: request::ContractCodes) -> Vec<Bytes>; fn contract_code(&self, req: request::ContractCodes) -> Vec<Bytes>;
/// Provide header proofs from the Canonical Hash Tries. /// Provide header proofs from the Canonical Hash Tries as well as the headers
/// they correspond to -- each element in the returned vector is a 2-tuple.
/// The first element is a block header and the second a merkle proof of
/// the header in a requested CHT.
fn header_proofs(&self, req: request::HeaderProofs) -> Vec<Bytes>; fn header_proofs(&self, req: request::HeaderProofs) -> Vec<Bytes>;
/// Provide pending transactions. /// Provide pending transactions.
@ -96,7 +100,7 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
let best_num = self.chain_info().best_block_number; let best_num = self.chain_info().best_block_number;
let start_num = req.block_num; let start_num = req.block_num;
match self.block_hash(BlockID::Number(req.block_num)) { match self.block_hash(BlockId::Number(req.block_num)) {
Some(hash) if hash == req.block_hash => {} Some(hash) if hash == req.block_hash => {}
_=> { _=> {
trace!(target: "les_provider", "unknown/non-canonical start block in header request: {:?}", (req.block_num, req.block_hash)); trace!(target: "les_provider", "unknown/non-canonical start block in header request: {:?}", (req.block_num, req.block_hash));
@ -105,10 +109,10 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
} }
(0u64..req.max as u64) (0u64..req.max as u64)
.map(|x: u64| x.saturating_mul(req.skip)) .map(|x: u64| x.saturating_mul(req.skip + 1))
.take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num < *x }) .take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x })
.map(|x| if req.reverse { start_num - x } else { start_num + x }) .map(|x| if req.reverse { start_num - x } else { start_num + x })
.map(|x| self.block_header(BlockID::Number(x))) .map(|x| self.block_header(BlockId::Number(x)))
.take_while(|x| x.is_some()) .take_while(|x| x.is_some())
.flat_map(|x| x) .flat_map(|x| x)
.collect() .collect()
@ -116,7 +120,7 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
fn block_bodies(&self, req: request::Bodies) -> Vec<Bytes> { fn block_bodies(&self, req: request::Bodies) -> Vec<Bytes> {
req.block_hashes.into_iter() req.block_hashes.into_iter()
.map(|hash| self.block_body(BlockID::Hash(hash))) .map(|hash| self.block_body(BlockId::Hash(hash)))
.map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) .map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec()))
.collect() .collect()
} }
@ -135,8 +139,8 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
for request in req.requests { for request in req.requests {
let proof = match request.key2 { let proof = match request.key2 {
Some(key2) => self.prove_storage(request.key1, key2, request.from_level, BlockID::Hash(request.block)), Some(key2) => self.prove_storage(request.key1, key2, request.from_level, BlockId::Hash(request.block)),
None => self.prove_account(request.key1, request.from_level, BlockID::Hash(request.block)), None => self.prove_account(request.key1, request.from_level, BlockId::Hash(request.block)),
}; };
let mut stream = RlpStream::new_list(proof.len()); let mut stream = RlpStream::new_list(proof.len());
@ -153,7 +157,7 @@ impl<T: ProvingBlockChainClient + ?Sized> Provider for T {
fn contract_code(&self, req: request::ContractCodes) -> Vec<Bytes> { fn contract_code(&self, req: request::ContractCodes) -> Vec<Bytes> {
req.code_requests.into_iter() req.code_requests.into_iter()
.map(|req| { .map(|req| {
self.code_by_hash(req.account_key, BlockID::Hash(req.block_hash)) self.code_by_hash(req.account_key, BlockId::Hash(req.block_hash))
}) })
.collect() .collect()
} }

View File

@ -19,7 +19,8 @@
use util::H256; use util::H256;
/// A request for block headers. /// A request for block headers.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct Headers { pub struct Headers {
/// Starting block number /// Starting block number
pub block_num: u64, pub block_num: u64,
@ -35,7 +36,8 @@ pub struct Headers {
} }
/// A request for specific block bodies. /// A request for specific block bodies.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct Bodies { pub struct Bodies {
/// Hashes which bodies are being requested for. /// Hashes which bodies are being requested for.
pub block_hashes: Vec<H256> pub block_hashes: Vec<H256>
@ -45,14 +47,16 @@ pub struct Bodies {
/// ///
/// This request is answered with a list of transaction receipts for each block /// This request is answered with a list of transaction receipts for each block
/// requested. /// requested.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct Receipts { pub struct Receipts {
/// Block hashes to return receipts for. /// Block hashes to return receipts for.
pub block_hashes: Vec<H256>, pub block_hashes: Vec<H256>,
} }
/// A request for a state proof /// A request for a state proof
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct StateProof { pub struct StateProof {
/// Block hash to query state from. /// Block hash to query state from.
pub block: H256, pub block: H256,
@ -66,14 +70,16 @@ pub struct StateProof {
} }
/// A request for state proofs. /// A request for state proofs.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct StateProofs { pub struct StateProofs {
/// All the proof requests. /// All the proof requests.
pub requests: Vec<StateProof>, pub requests: Vec<StateProof>,
} }
/// A request for contract code. /// A request for contract code.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct ContractCode { pub struct ContractCode {
/// Block hash /// Block hash
pub block_hash: H256, pub block_hash: H256,
@ -82,14 +88,16 @@ pub struct ContractCode {
} }
/// A request for contract code. /// A request for contract code.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct ContractCodes { pub struct ContractCodes {
/// Block hash and account key (== sha3(address)) pairs to fetch code for. /// Block hash and account key (== sha3(address)) pairs to fetch code for.
pub code_requests: Vec<ContractCode>, pub code_requests: Vec<ContractCode>,
} }
/// A request for a header proof from the Canonical Hash Trie. /// A request for a header proof from the Canonical Hash Trie.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct HeaderProof { pub struct HeaderProof {
/// Number of the CHT. /// Number of the CHT.
pub cht_number: u64, pub cht_number: u64,
@ -100,14 +108,16 @@ pub struct HeaderProof {
} }
/// A request for header proofs from the CHT. /// A request for header proofs from the CHT.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub struct HeaderProofs { pub struct HeaderProofs {
/// All the proof requests. /// All the proof requests.
pub requests: Vec<HeaderProof>, pub requests: Vec<HeaderProof>,
} }
/// Kinds of requests. /// Kinds of requests.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Binary)] #[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub enum Kind { pub enum Kind {
/// Requesting headers. /// Requesting headers.
Headers, Headers,
@ -124,7 +134,8 @@ pub enum Kind {
} }
/// Encompasses all possible types of requests in a single structure. /// Encompasses all possible types of requests in a single structure.
#[derive(Debug, Clone, PartialEq, Eq, Binary)] #[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "ipc", derive(Binary))]
pub enum Request { pub enum Request {
/// Requesting headers. /// Requesting headers.
Headers(Headers), Headers(Headers),

View File

@ -15,6 +15,11 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Types used in the public (IPC) api which require custom code generation. //! Types used in the public (IPC) api which require custom code generation.
#![cfg_attr(feature = "ipc", allow(dead_code, unused_assignments, unused_variables))] // codegen issues
#![allow(dead_code, unused_assignments, unused_variables)] // codegen issues
#[cfg(feature = "ipc")]
include!(concat!(env!("OUT_DIR"), "/mod.rs.in")); include!(concat!(env!("OUT_DIR"), "/mod.rs.in"));
#[cfg(not(feature = "ipc"))]
include!("mod.rs.in");

View File

@ -170,9 +170,6 @@
"enode://cadc6e573b6bc2a9128f2f635ac0db3353e360b56deef239e9be7e7fce039502e0ec670b595f6288c0d2116812516ad6b6ff8d5728ff45eba176989e40dead1e@37.128.191.230:30303", "enode://cadc6e573b6bc2a9128f2f635ac0db3353e360b56deef239e9be7e7fce039502e0ec670b595f6288c0d2116812516ad6b6ff8d5728ff45eba176989e40dead1e@37.128.191.230:30303",
"enode://595a9a06f8b9bc9835c8723b6a82105aea5d55c66b029b6d44f229d6d135ac3ecdd3e9309360a961ea39d7bee7bac5d03564077a4e08823acc723370aace65ec@46.20.235.22:30303", "enode://595a9a06f8b9bc9835c8723b6a82105aea5d55c66b029b6d44f229d6d135ac3ecdd3e9309360a961ea39d7bee7bac5d03564077a4e08823acc723370aace65ec@46.20.235.22:30303",
"enode://029178d6d6f9f8026fc0bc17d5d1401aac76ec9d86633bba2320b5eed7b312980c0a210b74b20c4f9a8b0b2bf884b111fa9ea5c5f916bb9bbc0e0c8640a0f56c@216.158.85.185:30303", "enode://029178d6d6f9f8026fc0bc17d5d1401aac76ec9d86633bba2320b5eed7b312980c0a210b74b20c4f9a8b0b2bf884b111fa9ea5c5f916bb9bbc0e0c8640a0f56c@216.158.85.185:30303",
"enode://84f5d5957b4880a8b0545e32e05472318898ad9fc8ebe1d56c90c12334a98e12351eccfdf3a2bf72432ac38b57e9d348400d17caa083879ade3822390f89773f@10.1.52.78:30303",
"enode://f90dc9b9bf7b8db97726b7849e175f1eb2707f3d8f281c929336e398dd89b0409fc6aeceb89e846278e9d3ecc3857cebfbe6758ff352ece6fe5d42921ee761db@10.1.173.87:30303",
"enode://6a868ced2dec399c53f730261173638a93a40214cf299ccf4d42a76e3fa54701db410669e8006347a4b3a74fa090bb35af0320e4bc8d04cf5b7f582b1db285f5@10.3.149.199:30303",
"enode://fdd1b9bb613cfbc200bba17ce199a9490edc752a833f88d4134bf52bb0d858aa5524cb3ec9366c7a4ef4637754b8b15b5dc913e4ed9fdb6022f7512d7b63f181@212.47.247.103:30303", "enode://fdd1b9bb613cfbc200bba17ce199a9490edc752a833f88d4134bf52bb0d858aa5524cb3ec9366c7a4ef4637754b8b15b5dc913e4ed9fdb6022f7512d7b63f181@212.47.247.103:30303",
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303", "enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303", "enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",

View File

@ -213,7 +213,7 @@ impl AccountProvider {
Ok(AccountMeta { Ok(AccountMeta {
name: try!(self.sstore.name(&account)), name: try!(self.sstore.name(&account)),
meta: try!(self.sstore.meta(&account)), meta: try!(self.sstore.meta(&account)),
uuid: self.sstore.uuid(&account).ok().map(Into::into), // allowed to not have a UUID uuid: self.sstore.uuid(&account).ok().map(Into::into), // allowed to not have a Uuid
}) })
} }

View File

@ -146,7 +146,7 @@ pub trait BlockProvider {
} }
#[derive(Debug, Hash, Eq, PartialEq, Clone)] #[derive(Debug, Hash, Eq, PartialEq, Clone)]
enum CacheID { enum CacheId {
BlockHeader(H256), BlockHeader(H256),
BlockBody(H256), BlockBody(H256),
BlockDetails(H256), BlockDetails(H256),
@ -160,7 +160,7 @@ impl bc::group::BloomGroupDatabase for BlockChain {
fn blooms_at(&self, position: &bc::group::GroupPosition) -> Option<bc::group::BloomGroup> { fn blooms_at(&self, position: &bc::group::GroupPosition) -> Option<bc::group::BloomGroup> {
let position = LogGroupPosition::from(position.clone()); let position = LogGroupPosition::from(position.clone());
let result = self.db.read_with_cache(db::COL_EXTRA, &self.blocks_blooms, &position).map(Into::into); let result = self.db.read_with_cache(db::COL_EXTRA, &self.blocks_blooms, &position).map(Into::into);
self.cache_man.lock().note_used(CacheID::BlocksBlooms(position)); self.cache_man.lock().note_used(CacheId::BlocksBlooms(position));
result result
} }
} }
@ -193,7 +193,7 @@ pub struct BlockChain {
db: Arc<Database>, db: Arc<Database>,
cache_man: Mutex<CacheManager<CacheID>>, cache_man: Mutex<CacheManager<CacheId>>,
pending_best_block: RwLock<Option<BestBlock>>, pending_best_block: RwLock<Option<BestBlock>>,
pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>, pending_block_hashes: RwLock<HashMap<BlockNumber, H256>>,
@ -270,7 +270,7 @@ impl BlockProvider for BlockChain {
None => None None => None
}; };
self.cache_man.lock().note_used(CacheID::BlockHeader(hash.clone())); self.cache_man.lock().note_used(CacheId::BlockHeader(hash.clone()));
result result
} }
@ -306,7 +306,7 @@ impl BlockProvider for BlockChain {
None => None None => None
}; };
self.cache_man.lock().note_used(CacheID::BlockBody(hash.clone())); self.cache_man.lock().note_used(CacheId::BlockBody(hash.clone()));
result result
} }
@ -314,28 +314,28 @@ impl BlockProvider for BlockChain {
/// Get the familial details concerning a block. /// Get the familial details concerning a block.
fn block_details(&self, hash: &H256) -> Option<BlockDetails> { fn block_details(&self, hash: &H256) -> Option<BlockDetails> {
let result = self.db.read_with_cache(db::COL_EXTRA, &self.block_details, hash); let result = self.db.read_with_cache(db::COL_EXTRA, &self.block_details, hash);
self.cache_man.lock().note_used(CacheID::BlockDetails(hash.clone())); self.cache_man.lock().note_used(CacheId::BlockDetails(hash.clone()));
result result
} }
/// Get the hash of given block's number. /// Get the hash of given block's number.
fn block_hash(&self, index: BlockNumber) -> Option<H256> { fn block_hash(&self, index: BlockNumber) -> Option<H256> {
let result = self.db.read_with_cache(db::COL_EXTRA, &self.block_hashes, &index); let result = self.db.read_with_cache(db::COL_EXTRA, &self.block_hashes, &index);
self.cache_man.lock().note_used(CacheID::BlockHashes(index)); self.cache_man.lock().note_used(CacheId::BlockHashes(index));
result result
} }
/// Get the address of transaction with given hash. /// Get the address of transaction with given hash.
fn transaction_address(&self, hash: &H256) -> Option<TransactionAddress> { fn transaction_address(&self, hash: &H256) -> Option<TransactionAddress> {
let result = self.db.read_with_cache(db::COL_EXTRA, &self.transaction_addresses, hash); let result = self.db.read_with_cache(db::COL_EXTRA, &self.transaction_addresses, hash);
self.cache_man.lock().note_used(CacheID::TransactionAddresses(hash.clone())); self.cache_man.lock().note_used(CacheId::TransactionAddresses(hash.clone()));
result result
} }
/// Get receipts of block with given hash. /// Get receipts of block with given hash.
fn block_receipts(&self, hash: &H256) -> Option<BlockReceipts> { fn block_receipts(&self, hash: &H256) -> Option<BlockReceipts> {
let result = self.db.read_with_cache(db::COL_EXTRA, &self.block_receipts, hash); let result = self.db.read_with_cache(db::COL_EXTRA, &self.block_receipts, hash);
self.cache_man.lock().note_used(CacheID::BlockReceipts(hash.clone())); self.cache_man.lock().note_used(CacheId::BlockReceipts(hash.clone()));
result result
} }
@ -809,7 +809,7 @@ impl BlockChain {
let mut write_details = self.block_details.write(); let mut write_details = self.block_details.write();
batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update, CacheUpdatePolicy::Overwrite); batch.extend_with_cache(db::COL_EXTRA, &mut *write_details, update, CacheUpdatePolicy::Overwrite);
self.cache_man.lock().note_used(CacheID::BlockDetails(block_hash)); self.cache_man.lock().note_used(CacheId::BlockDetails(block_hash));
} }
#[cfg_attr(feature="dev", allow(similar_names))] #[cfg_attr(feature="dev", allow(similar_names))]
@ -968,15 +968,15 @@ impl BlockChain {
let mut cache_man = self.cache_man.lock(); let mut cache_man = self.cache_man.lock();
for n in pending_hashes_keys { for n in pending_hashes_keys {
cache_man.note_used(CacheID::BlockHashes(n)); cache_man.note_used(CacheId::BlockHashes(n));
} }
for hash in enacted_txs_keys { for hash in enacted_txs_keys {
cache_man.note_used(CacheID::TransactionAddresses(hash)); cache_man.note_used(CacheId::TransactionAddresses(hash));
} }
for hash in pending_block_hashes { for hash in pending_block_hashes {
cache_man.note_used(CacheID::BlockDetails(hash)); cache_man.note_used(CacheId::BlockDetails(hash));
} }
} }
@ -1244,13 +1244,13 @@ impl BlockChain {
cache_man.collect_garbage(current_size, | ids | { cache_man.collect_garbage(current_size, | ids | {
for id in &ids { for id in &ids {
match *id { match *id {
CacheID::BlockHeader(ref h) => { block_headers.remove(h); }, CacheId::BlockHeader(ref h) => { block_headers.remove(h); },
CacheID::BlockBody(ref h) => { block_bodies.remove(h); }, CacheId::BlockBody(ref h) => { block_bodies.remove(h); },
CacheID::BlockDetails(ref h) => { block_details.remove(h); } CacheId::BlockDetails(ref h) => { block_details.remove(h); }
CacheID::BlockHashes(ref h) => { block_hashes.remove(h); } CacheId::BlockHashes(ref h) => { block_hashes.remove(h); }
CacheID::TransactionAddresses(ref h) => { transaction_addresses.remove(h); } CacheId::TransactionAddresses(ref h) => { transaction_addresses.remove(h); }
CacheID::BlocksBlooms(ref h) => { blocks_blooms.remove(h); } CacheId::BlocksBlooms(ref h) => { blocks_blooms.remove(h); }
CacheID::BlockReceipts(ref h) => { block_receipts.remove(h); } CacheId::BlockReceipts(ref h) => { block_receipts.remove(h); }
} }
} }

View File

@ -50,9 +50,9 @@ use log_entry::LocalizedLogEntry;
use verification::queue::BlockQueue; use verification::queue::BlockQueue;
use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute}; use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute};
use client::{ use client::{
BlockID, TransactionID, UncleID, TraceId, ClientConfig, BlockChainClient, BlockId, TransactionId, UncleId, TraceId, ClientConfig, BlockChainClient,
MiningBlockChainClient, TraceFilter, CallAnalytics, BlockImportError, Mode, MiningBlockChainClient, TraceFilter, CallAnalytics, BlockImportError, Mode,
ChainNotify, PruningInfo, ProvingBlockChainClient, ChainNotify, PruningInfo,
}; };
use client::Error as ClientError; use client::Error as ClientError;
use env_info::EnvInfo; use env_info::EnvInfo;
@ -580,13 +580,13 @@ impl Client {
/// Attempt to get a copy of a specific block's final state. /// Attempt to get a copy of a specific block's final state.
/// ///
/// This will not fail if given BlockID::Latest. /// This will not fail if given BlockId::Latest.
/// Otherwise, this can fail (but may not) if the DB prunes state. /// Otherwise, this can fail (but may not) if the DB prunes state.
pub fn state_at(&self, id: BlockID) -> Option<State> { pub fn state_at(&self, id: BlockId) -> Option<State> {
// fast path for latest state. // fast path for latest state.
match id.clone() { match id.clone() {
BlockID::Pending => return self.miner.pending_state().or_else(|| Some(self.state())), BlockId::Pending => return self.miner.pending_state().or_else(|| Some(self.state())),
BlockID::Latest => return Some(self.state()), BlockId::Latest => return Some(self.state()),
_ => {}, _ => {},
} }
@ -611,15 +611,15 @@ impl Client {
/// Attempt to get a copy of a specific block's beginning state. /// Attempt to get a copy of a specific block's beginning state.
/// ///
/// This will not fail if given BlockID::Latest. /// This will not fail if given BlockId::Latest.
/// Otherwise, this can fail (but may not) if the DB prunes state. /// Otherwise, this can fail (but may not) if the DB prunes state.
pub fn state_at_beginning(&self, id: BlockID) -> Option<State> { pub fn state_at_beginning(&self, id: BlockId) -> Option<State> {
// fast path for latest state. // fast path for latest state.
match id { match id {
BlockID::Pending => self.state_at(BlockID::Latest), BlockId::Pending => self.state_at(BlockId::Latest),
id => match self.block_number(id) { id => match self.block_number(id) {
None | Some(0) => None, None | Some(0) => None,
Some(n) => self.state_at(BlockID::Number(n - 1)), Some(n) => self.state_at(BlockId::Number(n - 1)),
} }
} }
} }
@ -689,18 +689,18 @@ impl Client {
} }
/// Look up the block number for the given block ID. /// Look up the block number for the given block ID.
pub fn block_number(&self, id: BlockID) -> Option<BlockNumber> { pub fn block_number(&self, id: BlockId) -> Option<BlockNumber> {
match id { match id {
BlockID::Number(number) => Some(number), BlockId::Number(number) => Some(number),
BlockID::Hash(ref hash) => self.chain.read().block_number(hash), BlockId::Hash(ref hash) => self.chain.read().block_number(hash),
BlockID::Earliest => Some(0), BlockId::Earliest => Some(0),
BlockID::Latest | BlockID::Pending => Some(self.chain.read().best_block_number()), BlockId::Latest | BlockId::Pending => Some(self.chain.read().best_block_number()),
} }
} }
/// Take a snapshot at the given block. /// Take a snapshot at the given block.
/// If the ID given is "latest", this will default to 1000 blocks behind. /// If the ID given is "latest", this will default to 1000 blocks behind.
pub fn take_snapshot<W: snapshot_io::SnapshotWriter + Send>(&self, writer: W, at: BlockID, p: &snapshot::Progress) -> Result<(), EthcoreError> { pub fn take_snapshot<W: snapshot_io::SnapshotWriter + Send>(&self, writer: W, at: BlockId, p: &snapshot::Progress) -> Result<(), EthcoreError> {
let db = self.state_db.lock().journal_db().boxed_clone(); let db = self.state_db.lock().journal_db().boxed_clone();
let best_block_number = self.chain_info().best_block_number; let best_block_number = self.chain_info().best_block_number;
let block_number = try!(self.block_number(at).ok_or(snapshot::Error::InvalidStartingBlock(at))); let block_number = try!(self.block_number(at).ok_or(snapshot::Error::InvalidStartingBlock(at)));
@ -712,13 +712,13 @@ impl Client {
let history = ::std::cmp::min(self.history, 1000); let history = ::std::cmp::min(self.history, 1000);
let start_hash = match at { let start_hash = match at {
BlockID::Latest => { BlockId::Latest => {
let start_num = match db.earliest_era() { let start_num = match db.earliest_era() {
Some(era) => ::std::cmp::max(era, best_block_number - history), Some(era) => ::std::cmp::max(era, best_block_number - history),
None => best_block_number - history, None => best_block_number - history,
}; };
match self.block_hash(BlockID::Number(start_num)) { match self.block_hash(BlockId::Number(start_num)) {
Some(h) => h, Some(h) => h,
None => return Err(snapshot::Error::InvalidStartingBlock(at).into()), None => return Err(snapshot::Error::InvalidStartingBlock(at).into()),
} }
@ -739,19 +739,19 @@ impl Client {
self.history self.history
} }
fn block_hash(chain: &BlockChain, id: BlockID) -> Option<H256> { fn block_hash(chain: &BlockChain, id: BlockId) -> Option<H256> {
match id { match id {
BlockID::Hash(hash) => Some(hash), BlockId::Hash(hash) => Some(hash),
BlockID::Number(number) => chain.block_hash(number), BlockId::Number(number) => chain.block_hash(number),
BlockID::Earliest => chain.block_hash(0), BlockId::Earliest => chain.block_hash(0),
BlockID::Latest | BlockID::Pending => Some(chain.best_block_hash()), BlockId::Latest | BlockId::Pending => Some(chain.best_block_hash()),
} }
} }
fn transaction_address(&self, id: TransactionID) -> Option<TransactionAddress> { fn transaction_address(&self, id: TransactionId) -> Option<TransactionAddress> {
match id { match id {
TransactionID::Hash(ref hash) => self.chain.read().transaction_address(hash), TransactionId::Hash(ref hash) => self.chain.read().transaction_address(hash),
TransactionID::Location(id, index) => Self::block_hash(&self.chain.read(), id).map(|hash| TransactionAddress { TransactionId::Location(id, index) => Self::block_hash(&self.chain.read(), id).map(|hash| TransactionAddress {
block_hash: hash, block_hash: hash,
index: index, index: index,
}) })
@ -805,7 +805,7 @@ impl snapshot::DatabaseRestore for Client {
impl BlockChainClient for Client { impl BlockChainClient for Client {
fn call(&self, t: &SignedTransaction, block: BlockID, analytics: CallAnalytics) -> Result<Executed, CallError> { fn call(&self, t: &SignedTransaction, block: BlockId, analytics: CallAnalytics) -> Result<Executed, CallError> {
let header = try!(self.block_header(block).ok_or(CallError::StatePruned)); let header = try!(self.block_header(block).ok_or(CallError::StatePruned));
let view = HeaderView::new(&header); let view = HeaderView::new(&header);
let last_hashes = self.build_last_hashes(view.parent_hash()); let last_hashes = self.build_last_hashes(view.parent_hash());
@ -841,11 +841,11 @@ impl BlockChainClient for Client {
Ok(ret) Ok(ret)
} }
fn replay(&self, id: TransactionID, analytics: CallAnalytics) -> Result<Executed, CallError> { fn replay(&self, id: TransactionId, analytics: CallAnalytics) -> Result<Executed, CallError> {
let address = try!(self.transaction_address(id).ok_or(CallError::TransactionNotFound)); let address = try!(self.transaction_address(id).ok_or(CallError::TransactionNotFound));
let header_data = try!(self.block_header(BlockID::Hash(address.block_hash)).ok_or(CallError::StatePruned)); let header_data = try!(self.block_header(BlockId::Hash(address.block_hash)).ok_or(CallError::StatePruned));
let body_data = try!(self.block_body(BlockID::Hash(address.block_hash)).ok_or(CallError::StatePruned)); let body_data = try!(self.block_body(BlockId::Hash(address.block_hash)).ok_or(CallError::StatePruned));
let mut state = try!(self.state_at_beginning(BlockID::Hash(address.block_hash)).ok_or(CallError::StatePruned)); let mut state = try!(self.state_at_beginning(BlockId::Hash(address.block_hash)).ok_or(CallError::StatePruned));
let txs = BodyView::new(&body_data).transactions(); let txs = BodyView::new(&body_data).transactions();
if address.index >= txs.len() { if address.index >= txs.len() {
@ -918,18 +918,18 @@ impl BlockChainClient for Client {
self.chain.read().best_block_header() self.chain.read().best_block_header()
} }
fn block_header(&self, id: BlockID) -> Option<Bytes> { fn block_header(&self, id: BlockId) -> Option<Bytes> {
let chain = self.chain.read(); let chain = self.chain.read();
Self::block_hash(&chain, id).and_then(|hash| chain.block_header_data(&hash)) Self::block_hash(&chain, id).and_then(|hash| chain.block_header_data(&hash))
} }
fn block_body(&self, id: BlockID) -> Option<Bytes> { fn block_body(&self, id: BlockId) -> Option<Bytes> {
let chain = self.chain.read(); let chain = self.chain.read();
Self::block_hash(&chain, id).and_then(|hash| chain.block_body(&hash)) Self::block_hash(&chain, id).and_then(|hash| chain.block_body(&hash))
} }
fn block(&self, id: BlockID) -> Option<Bytes> { fn block(&self, id: BlockId) -> Option<Bytes> {
if let BlockID::Pending = id { if let BlockId::Pending = id {
if let Some(block) = self.miner.pending_block() { if let Some(block) = self.miner.pending_block() {
return Some(block.rlp_bytes(Seal::Without)); return Some(block.rlp_bytes(Seal::Without));
} }
@ -940,7 +940,7 @@ impl BlockChainClient for Client {
}) })
} }
fn block_status(&self, id: BlockID) -> BlockStatus { fn block_status(&self, id: BlockId) -> BlockStatus {
let chain = self.chain.read(); let chain = self.chain.read();
match Self::block_hash(&chain, id) { match Self::block_hash(&chain, id) {
Some(ref hash) if chain.is_known(hash) => BlockStatus::InChain, Some(ref hash) if chain.is_known(hash) => BlockStatus::InChain,
@ -949,42 +949,42 @@ impl BlockChainClient for Client {
} }
} }
fn block_total_difficulty(&self, id: BlockID) -> Option<U256> { fn block_total_difficulty(&self, id: BlockId) -> Option<U256> {
if let BlockID::Pending = id { if let BlockId::Pending = id {
if let Some(block) = self.miner.pending_block() { if let Some(block) = self.miner.pending_block() {
return Some(*block.header.difficulty() + self.block_total_difficulty(BlockID::Latest).expect("blocks in chain have details; qed")); return Some(*block.header.difficulty() + self.block_total_difficulty(BlockId::Latest).expect("blocks in chain have details; qed"));
} }
} }
let chain = self.chain.read(); let chain = self.chain.read();
Self::block_hash(&chain, id).and_then(|hash| chain.block_details(&hash)).map(|d| d.total_difficulty) Self::block_hash(&chain, id).and_then(|hash| chain.block_details(&hash)).map(|d| d.total_difficulty)
} }
fn nonce(&self, address: &Address, id: BlockID) -> Option<U256> { fn nonce(&self, address: &Address, id: BlockId) -> Option<U256> {
self.state_at(id).map(|s| s.nonce(address)) self.state_at(id).map(|s| s.nonce(address))
} }
fn storage_root(&self, address: &Address, id: BlockID) -> Option<H256> { fn storage_root(&self, address: &Address, id: BlockId) -> Option<H256> {
self.state_at(id).and_then(|s| s.storage_root(address)) self.state_at(id).and_then(|s| s.storage_root(address))
} }
fn block_hash(&self, id: BlockID) -> Option<H256> { fn block_hash(&self, id: BlockId) -> Option<H256> {
let chain = self.chain.read(); let chain = self.chain.read();
Self::block_hash(&chain, id) Self::block_hash(&chain, id)
} }
fn code(&self, address: &Address, id: BlockID) -> Option<Option<Bytes>> { fn code(&self, address: &Address, id: BlockId) -> Option<Option<Bytes>> {
self.state_at(id).map(|s| s.code(address).map(|c| (*c).clone())) self.state_at(id).map(|s| s.code(address).map(|c| (*c).clone()))
} }
fn balance(&self, address: &Address, id: BlockID) -> Option<U256> { fn balance(&self, address: &Address, id: BlockId) -> Option<U256> {
self.state_at(id).map(|s| s.balance(address)) self.state_at(id).map(|s| s.balance(address))
} }
fn storage_at(&self, address: &Address, position: &H256, id: BlockID) -> Option<H256> { fn storage_at(&self, address: &Address, position: &H256, id: BlockId) -> Option<H256> {
self.state_at(id).map(|s| s.storage_at(address, position)) self.state_at(id).map(|s| s.storage_at(address, position))
} }
fn list_accounts(&self, id: BlockID, after: Option<&Address>, count: u64) -> Option<Vec<Address>> { fn list_accounts(&self, id: BlockId, after: Option<&Address>, count: u64) -> Option<Vec<Address>> {
if !self.factories.trie.is_fat() { if !self.factories.trie.is_fat() {
trace!(target: "fatdb", "list_accounts: Not a fat DB"); trace!(target: "fatdb", "list_accounts: Not a fat DB");
return None; return None;
@ -1022,7 +1022,7 @@ impl BlockChainClient for Client {
Some(accounts) Some(accounts)
} }
fn list_storage(&self, id: BlockID, account: &Address, after: Option<&H256>, count: u64) -> Option<Vec<H256>> { fn list_storage(&self, id: BlockId, account: &Address, after: Option<&H256>, count: u64) -> Option<Vec<H256>> {
if !self.factories.trie.is_fat() { if !self.factories.trie.is_fat() {
trace!(target: "fatdb", "list_stroage: Not a fat DB"); trace!(target: "fatdb", "list_stroage: Not a fat DB");
return None; return None;
@ -1066,20 +1066,20 @@ impl BlockChainClient for Client {
Some(keys) Some(keys)
} }
fn transaction(&self, id: TransactionID) -> Option<LocalizedTransaction> { fn transaction(&self, id: TransactionId) -> Option<LocalizedTransaction> {
self.transaction_address(id).and_then(|address| self.chain.read().transaction(&address)) self.transaction_address(id).and_then(|address| self.chain.read().transaction(&address))
} }
fn transaction_block(&self, id: TransactionID) -> Option<H256> { fn transaction_block(&self, id: TransactionId) -> Option<H256> {
self.transaction_address(id).map(|addr| addr.block_hash) self.transaction_address(id).map(|addr| addr.block_hash)
} }
fn uncle(&self, id: UncleID) -> Option<Bytes> { fn uncle(&self, id: UncleId) -> Option<Bytes> {
let index = id.position; let index = id.position;
self.block_body(id.block).and_then(|body| BodyView::new(&body).uncle_rlp_at(index)) self.block_body(id.block).and_then(|body| BodyView::new(&body).uncle_rlp_at(index))
} }
fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> { fn transaction_receipt(&self, id: TransactionId) -> Option<LocalizedReceipt> {
let chain = self.chain.read(); let chain = self.chain.read();
self.transaction_address(id) self.transaction_address(id)
.and_then(|address| chain.block_number(&address.block_hash).and_then(|block_number| { .and_then(|address| chain.block_number(&address.block_hash).and_then(|block_number| {
@ -1163,7 +1163,7 @@ impl BlockChainClient for Client {
if self.chain.read().is_known(&unverified.hash()) { if self.chain.read().is_known(&unverified.hash()) {
return Err(BlockImportError::Import(ImportError::AlreadyInChain)); return Err(BlockImportError::Import(ImportError::AlreadyInChain));
} }
if self.block_status(BlockID::Hash(unverified.parent_hash())) == BlockStatus::Unknown { if self.block_status(BlockId::Hash(unverified.parent_hash())) == BlockStatus::Unknown {
return Err(BlockImportError::Block(BlockError::UnknownParent(unverified.parent_hash()))); return Err(BlockImportError::Block(BlockError::UnknownParent(unverified.parent_hash())));
} }
} }
@ -1177,7 +1177,7 @@ impl BlockChainClient for Client {
if self.chain.read().is_known(&header.hash()) { if self.chain.read().is_known(&header.hash()) {
return Err(BlockImportError::Import(ImportError::AlreadyInChain)); return Err(BlockImportError::Import(ImportError::AlreadyInChain));
} }
if self.block_status(BlockID::Hash(header.parent_hash())) == BlockStatus::Unknown { if self.block_status(BlockId::Hash(header.parent_hash())) == BlockStatus::Unknown {
return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash()))); return Err(BlockImportError::Block(BlockError::UnknownParent(header.parent_hash())));
} }
} }
@ -1200,7 +1200,7 @@ impl BlockChainClient for Client {
self.engine.additional_params().into_iter().collect() self.engine.additional_params().into_iter().collect()
} }
fn blocks_with_bloom(&self, bloom: &H2048, from_block: BlockID, to_block: BlockID) -> Option<Vec<BlockNumber>> { fn blocks_with_bloom(&self, bloom: &H2048, from_block: BlockId, to_block: BlockId) -> Option<Vec<BlockNumber>> {
match (self.block_number(from_block), self.block_number(to_block)) { match (self.block_number(from_block), self.block_number(to_block)) {
(Some(from), Some(to)) => Some(self.chain.read().blocks_with_bloom(bloom, from, to)), (Some(from), Some(to)) => Some(self.chain.read().blocks_with_bloom(bloom, from, to)),
_ => None _ => None
@ -1242,20 +1242,20 @@ impl BlockChainClient for Client {
let trace_address = trace.address; let trace_address = trace.address;
self.transaction_address(trace.transaction) self.transaction_address(trace.transaction)
.and_then(|tx_address| { .and_then(|tx_address| {
self.block_number(BlockID::Hash(tx_address.block_hash)) self.block_number(BlockId::Hash(tx_address.block_hash))
.and_then(|number| self.tracedb.read().trace(number, tx_address.index, trace_address)) .and_then(|number| self.tracedb.read().trace(number, tx_address.index, trace_address))
}) })
} }
fn transaction_traces(&self, transaction: TransactionID) -> Option<Vec<LocalizedTrace>> { fn transaction_traces(&self, transaction: TransactionId) -> Option<Vec<LocalizedTrace>> {
self.transaction_address(transaction) self.transaction_address(transaction)
.and_then(|tx_address| { .and_then(|tx_address| {
self.block_number(BlockID::Hash(tx_address.block_hash)) self.block_number(BlockId::Hash(tx_address.block_hash))
.and_then(|number| self.tracedb.read().transaction_traces(number, tx_address.index)) .and_then(|number| self.tracedb.read().transaction_traces(number, tx_address.index))
}) })
} }
fn block_traces(&self, block: BlockID) -> Option<Vec<LocalizedTrace>> { fn block_traces(&self, block: BlockId) -> Option<Vec<LocalizedTrace>> {
self.block_number(block) self.block_number(block)
.and_then(|number| self.tracedb.read().block_traces(number)) .and_then(|number| self.tracedb.read().block_traces(number))
} }
@ -1290,13 +1290,13 @@ impl BlockChainClient for Client {
self.engine.signing_network_id(&self.latest_env_info()) self.engine.signing_network_id(&self.latest_env_info())
} }
fn block_extra_info(&self, id: BlockID) -> Option<BTreeMap<String, String>> { fn block_extra_info(&self, id: BlockId) -> Option<BTreeMap<String, String>> {
self.block_header(id) self.block_header(id)
.map(|block| decode(&block)) .map(|block| decode(&block))
.map(|header| self.engine.extra_info(&header)) .map(|header| self.engine.extra_info(&header))
} }
fn uncle_extra_info(&self, id: UncleID) -> Option<BTreeMap<String, String>> { fn uncle_extra_info(&self, id: UncleId) -> Option<BTreeMap<String, String>> {
self.uncle(id) self.uncle(id)
.map(|header| self.engine.extra_info(&decode(&header))) .map(|header| self.engine.extra_info(&decode(&header)))
} }
@ -1391,20 +1391,20 @@ impl MayPanic for Client {
} }
} }
impl ProvingBlockChainClient for Client { impl ::client::ProvingBlockChainClient for Client {
fn prove_storage(&self, key1: H256, key2: H256, from_level: u32, id: BlockID) -> Vec<Bytes> { fn prove_storage(&self, key1: H256, key2: H256, from_level: u32, id: BlockId) -> Vec<Bytes> {
self.state_at(id) self.state_at(id)
.and_then(move |state| state.prove_storage(key1, key2, from_level).ok()) .and_then(move |state| state.prove_storage(key1, key2, from_level).ok())
.unwrap_or_else(Vec::new) .unwrap_or_else(Vec::new)
} }
fn prove_account(&self, key1: H256, from_level: u32, id: BlockID) -> Vec<Bytes> { fn prove_account(&self, key1: H256, from_level: u32, id: BlockId) -> Vec<Bytes> {
self.state_at(id) self.state_at(id)
.and_then(move |state| state.prove_account(key1, from_level).ok()) .and_then(move |state| state.prove_account(key1, from_level).ok())
.unwrap_or_else(Vec::new) .unwrap_or_else(Vec::new)
} }
fn code_by_hash(&self, account_key: H256, id: BlockID) -> Bytes { fn code_by_hash(&self, account_key: H256, id: BlockId) -> Bytes {
self.state_at(id) self.state_at(id)
.and_then(move |state| state.code_by_address_hash(account_key).ok()) .and_then(move |state| state.code_by_address_hash(account_key).ok())
.and_then(|x| x) .and_then(|x| x)

View File

@ -27,7 +27,9 @@ pub use self::config::{Mode, ClientConfig, DatabaseCompactionProfile, BlockChain
pub use self::error::Error; pub use self::error::Error;
pub use self::test_client::{TestBlockChainClient, EachBlockWith}; pub use self::test_client::{TestBlockChainClient, EachBlockWith};
pub use self::chain_notify::ChainNotify; pub use self::chain_notify::ChainNotify;
pub use self::traits::{BlockChainClient, MiningBlockChainClient, ProvingBlockChainClient}; pub use self::traits::{BlockChainClient, MiningBlockChainClient};
pub use self::traits::ProvingBlockChainClient;
pub use types::ids::*; pub use types::ids::*;
pub use types::trace_filter::Filter as TraceFilter; pub use types::trace_filter::Filter as TraceFilter;

View File

@ -24,8 +24,8 @@ use devtools::*;
use transaction::{Transaction, LocalizedTransaction, SignedTransaction, Action}; use transaction::{Transaction, LocalizedTransaction, SignedTransaction, Action};
use blockchain::TreeRoute; use blockchain::TreeRoute;
use client::{ use client::{
BlockChainClient, MiningBlockChainClient, BlockChainInfo, BlockStatus, BlockID, BlockChainClient, MiningBlockChainClient, BlockChainInfo, BlockStatus, BlockId,
TransactionID, UncleID, TraceId, TraceFilter, LastHashes, CallAnalytics, BlockImportError, TransactionId, UncleId, TraceId, TraceFilter, LastHashes, CallAnalytics, BlockImportError,
}; };
use db::{NUM_COLUMNS, COL_STATE}; use db::{NUM_COLUMNS, COL_STATE};
use header::{Header as BlockHeader, BlockNumber}; use header::{Header as BlockHeader, BlockNumber};
@ -73,7 +73,7 @@ pub struct TestBlockChainClient {
/// Execution result. /// Execution result.
pub execution_result: RwLock<Option<Result<Executed, CallError>>>, pub execution_result: RwLock<Option<Result<Executed, CallError>>>,
/// Transaction receipts. /// Transaction receipts.
pub receipts: RwLock<HashMap<TransactionID, LocalizedReceipt>>, pub receipts: RwLock<HashMap<TransactionId, LocalizedReceipt>>,
/// Logs /// Logs
pub logs: RwLock<Vec<LocalizedLogEntry>>, pub logs: RwLock<Vec<LocalizedLogEntry>>,
/// Block queue size. /// Block queue size.
@ -92,8 +92,8 @@ pub struct TestBlockChainClient {
pub first_block: RwLock<Option<(H256, u64)>>, pub first_block: RwLock<Option<(H256, u64)>>,
} }
#[derive(Clone)]
/// Used for generating test client blocks. /// Used for generating test client blocks.
#[derive(Clone)]
pub enum EachBlockWith { pub enum EachBlockWith {
/// Plain block. /// Plain block.
Nothing, Nothing,
@ -158,7 +158,7 @@ impl TestBlockChainClient {
} }
/// Set the transaction receipt result /// Set the transaction receipt result
pub fn set_transaction_receipt(&self, id: TransactionID, receipt: LocalizedReceipt) { pub fn set_transaction_receipt(&self, id: TransactionId, receipt: LocalizedReceipt) {
self.receipts.write().insert(id, receipt); self.receipts.write().insert(id, receipt);
} }
@ -256,8 +256,8 @@ impl TestBlockChainClient {
/// Make a bad block by setting invalid extra data. /// Make a bad block by setting invalid extra data.
pub fn corrupt_block(&self, n: BlockNumber) { pub fn corrupt_block(&self, n: BlockNumber) {
let hash = self.block_hash(BlockID::Number(n)).unwrap(); let hash = self.block_hash(BlockId::Number(n)).unwrap();
let mut header: BlockHeader = decode(&self.block_header(BlockID::Number(n)).unwrap()); let mut header: BlockHeader = decode(&self.block_header(BlockId::Number(n)).unwrap());
header.set_extra_data(b"This extra data is way too long to be considered valid".to_vec()); header.set_extra_data(b"This extra data is way too long to be considered valid".to_vec());
let mut rlp = RlpStream::new_list(3); let mut rlp = RlpStream::new_list(3);
rlp.append(&header); rlp.append(&header);
@ -268,8 +268,8 @@ impl TestBlockChainClient {
/// Make a bad block by setting invalid parent hash. /// Make a bad block by setting invalid parent hash.
pub fn corrupt_block_parent(&self, n: BlockNumber) { pub fn corrupt_block_parent(&self, n: BlockNumber) {
let hash = self.block_hash(BlockID::Number(n)).unwrap(); let hash = self.block_hash(BlockId::Number(n)).unwrap();
let mut header: BlockHeader = decode(&self.block_header(BlockID::Number(n)).unwrap()); let mut header: BlockHeader = decode(&self.block_header(BlockId::Number(n)).unwrap());
header.set_parent_hash(H256::from(42)); header.set_parent_hash(H256::from(42));
let mut rlp = RlpStream::new_list(3); let mut rlp = RlpStream::new_list(3);
rlp.append(&header); rlp.append(&header);
@ -285,12 +285,12 @@ impl TestBlockChainClient {
blocks_read[&index].clone() blocks_read[&index].clone()
} }
fn block_hash(&self, id: BlockID) -> Option<H256> { fn block_hash(&self, id: BlockId) -> Option<H256> {
match id { match id {
BlockID::Hash(hash) => Some(hash), BlockId::Hash(hash) => Some(hash),
BlockID::Number(n) => self.numbers.read().get(&(n as usize)).cloned(), BlockId::Number(n) => self.numbers.read().get(&(n as usize)).cloned(),
BlockID::Earliest => self.numbers.read().get(&0).cloned(), BlockId::Earliest => self.numbers.read().get(&0).cloned(),
BlockID::Latest | BlockID::Pending => self.numbers.read().get(&(self.numbers.read().len() - 1)).cloned() BlockId::Latest | BlockId::Pending => self.numbers.read().get(&(self.numbers.read().len() - 1)).cloned()
} }
} }
@ -363,46 +363,46 @@ impl MiningBlockChainClient for TestBlockChainClient {
} }
impl BlockChainClient for TestBlockChainClient { impl BlockChainClient for TestBlockChainClient {
fn call(&self, _t: &SignedTransaction, _block: BlockID, _analytics: CallAnalytics) -> Result<Executed, CallError> { fn call(&self, _t: &SignedTransaction, _block: BlockId, _analytics: CallAnalytics) -> Result<Executed, CallError> {
self.execution_result.read().clone().unwrap() self.execution_result.read().clone().unwrap()
} }
fn replay(&self, _id: TransactionID, _analytics: CallAnalytics) -> Result<Executed, CallError> { fn replay(&self, _id: TransactionId, _analytics: CallAnalytics) -> Result<Executed, CallError> {
self.execution_result.read().clone().unwrap() self.execution_result.read().clone().unwrap()
} }
fn block_total_difficulty(&self, _id: BlockID) -> Option<U256> { fn block_total_difficulty(&self, _id: BlockId) -> Option<U256> {
Some(U256::zero()) Some(U256::zero())
} }
fn block_hash(&self, id: BlockID) -> Option<H256> { fn block_hash(&self, id: BlockId) -> Option<H256> {
Self::block_hash(self, id) Self::block_hash(self, id)
} }
fn nonce(&self, address: &Address, id: BlockID) -> Option<U256> { fn nonce(&self, address: &Address, id: BlockId) -> Option<U256> {
match id { match id {
BlockID::Latest => Some(self.nonces.read().get(address).cloned().unwrap_or(self.spec.params.account_start_nonce)), BlockId::Latest => Some(self.nonces.read().get(address).cloned().unwrap_or(self.spec.params.account_start_nonce)),
_ => None, _ => None,
} }
} }
fn storage_root(&self, _address: &Address, _id: BlockID) -> Option<H256> { fn storage_root(&self, _address: &Address, _id: BlockId) -> Option<H256> {
None None
} }
fn latest_nonce(&self, address: &Address) -> U256 { fn latest_nonce(&self, address: &Address) -> U256 {
self.nonce(address, BlockID::Latest).unwrap() self.nonce(address, BlockId::Latest).unwrap()
} }
fn code(&self, address: &Address, id: BlockID) -> Option<Option<Bytes>> { fn code(&self, address: &Address, id: BlockId) -> Option<Option<Bytes>> {
match id { match id {
BlockID::Latest => Some(self.code.read().get(address).cloned()), BlockId::Latest => Some(self.code.read().get(address).cloned()),
_ => None, _ => None,
} }
} }
fn balance(&self, address: &Address, id: BlockID) -> Option<U256> { fn balance(&self, address: &Address, id: BlockId) -> Option<U256> {
if let BlockID::Latest = id { if let BlockId::Latest = id {
Some(self.balances.read().get(address).cloned().unwrap_or_else(U256::zero)) Some(self.balances.read().get(address).cloned().unwrap_or_else(U256::zero))
} else { } else {
None None
@ -410,45 +410,45 @@ impl BlockChainClient for TestBlockChainClient {
} }
fn latest_balance(&self, address: &Address) -> U256 { fn latest_balance(&self, address: &Address) -> U256 {
self.balance(address, BlockID::Latest).unwrap() self.balance(address, BlockId::Latest).unwrap()
} }
fn storage_at(&self, address: &Address, position: &H256, id: BlockID) -> Option<H256> { fn storage_at(&self, address: &Address, position: &H256, id: BlockId) -> Option<H256> {
if let BlockID::Latest = id { if let BlockId::Latest = id {
Some(self.storage.read().get(&(address.clone(), position.clone())).cloned().unwrap_or_else(H256::new)) Some(self.storage.read().get(&(address.clone(), position.clone())).cloned().unwrap_or_else(H256::new))
} else { } else {
None None
} }
} }
fn list_accounts(&self, _id: BlockID, _after: Option<&Address>, _count: u64) -> Option<Vec<Address>> { fn list_accounts(&self, _id: BlockId, _after: Option<&Address>, _count: u64) -> Option<Vec<Address>> {
None None
} }
fn list_storage(&self, _id: BlockID, _account: &Address, _after: Option<&H256>, _count: u64) -> Option<Vec<H256>> { fn list_storage(&self, _id: BlockId, _account: &Address, _after: Option<&H256>, _count: u64) -> Option<Vec<H256>> {
None None
} }
fn transaction(&self, _id: TransactionID) -> Option<LocalizedTransaction> { fn transaction(&self, _id: TransactionId) -> Option<LocalizedTransaction> {
None // Simple default. None // Simple default.
} }
fn transaction_block(&self, _id: TransactionID) -> Option<H256> { fn transaction_block(&self, _id: TransactionId) -> Option<H256> {
None // Simple default. None // Simple default.
} }
fn uncle(&self, _id: UncleID) -> Option<Bytes> { fn uncle(&self, _id: UncleId) -> Option<Bytes> {
None // Simple default. None // Simple default.
} }
fn uncle_extra_info(&self, _id: UncleID) -> Option<BTreeMap<String, String>> { fn uncle_extra_info(&self, _id: UncleId) -> Option<BTreeMap<String, String>> {
None None
} }
fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt> { fn transaction_receipt(&self, id: TransactionId) -> Option<LocalizedReceipt> {
self.receipts.read().get(&id).cloned() self.receipts.read().get(&id).cloned()
} }
fn blocks_with_bloom(&self, _bloom: &H2048, _from_block: BlockID, _to_block: BlockID) -> Option<Vec<BlockNumber>> { fn blocks_with_bloom(&self, _bloom: &H2048, _from_block: BlockId, _to_block: BlockId) -> Option<Vec<BlockNumber>> {
unimplemented!(); unimplemented!();
} }
@ -466,14 +466,14 @@ impl BlockChainClient for TestBlockChainClient {
} }
fn best_block_header(&self) -> Bytes { fn best_block_header(&self) -> Bytes {
self.block_header(BlockID::Hash(self.chain_info().best_block_hash)).expect("Best block always have header.") self.block_header(BlockId::Hash(self.chain_info().best_block_hash)).expect("Best block always have header.")
} }
fn block_header(&self, id: BlockID) -> Option<Bytes> { fn block_header(&self, id: BlockId) -> Option<Bytes> {
self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).map(|r| Rlp::new(r).at(0).as_raw().to_vec())) self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).map(|r| Rlp::new(r).at(0).as_raw().to_vec()))
} }
fn block_body(&self, id: BlockID) -> Option<Bytes> { fn block_body(&self, id: BlockId) -> Option<Bytes> {
self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).map(|r| { self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).map(|r| {
let mut stream = RlpStream::new_list(2); let mut stream = RlpStream::new_list(2);
stream.append_raw(Rlp::new(r).at(1).as_raw(), 1); stream.append_raw(Rlp::new(r).at(1).as_raw(), 1);
@ -482,21 +482,21 @@ impl BlockChainClient for TestBlockChainClient {
})) }))
} }
fn block(&self, id: BlockID) -> Option<Bytes> { fn block(&self, id: BlockId) -> Option<Bytes> {
self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).cloned()) self.block_hash(id).and_then(|hash| self.blocks.read().get(&hash).cloned())
} }
fn block_extra_info(&self, id: BlockID) -> Option<BTreeMap<String, String>> { fn block_extra_info(&self, id: BlockId) -> Option<BTreeMap<String, String>> {
self.block(id) self.block(id)
.map(|block| BlockView::new(&block).header()) .map(|block| BlockView::new(&block).header())
.map(|header| self.spec.engine.extra_info(&header)) .map(|header| self.spec.engine.extra_info(&header))
} }
fn block_status(&self, id: BlockID) -> BlockStatus { fn block_status(&self, id: BlockId) -> BlockStatus {
match id { match id {
BlockID::Number(number) if (number as usize) < self.blocks.read().len() => BlockStatus::InChain, BlockId::Number(number) if (number as usize) < self.blocks.read().len() => BlockStatus::InChain,
BlockID::Hash(ref hash) if self.blocks.read().get(hash).is_some() => BlockStatus::InChain, BlockId::Hash(ref hash) if self.blocks.read().get(hash).is_some() => BlockStatus::InChain,
_ => BlockStatus::Unknown _ => BlockStatus::Unknown
} }
} }
@ -649,11 +649,11 @@ impl BlockChainClient for TestBlockChainClient {
unimplemented!(); unimplemented!();
} }
fn transaction_traces(&self, _trace: TransactionID) -> Option<Vec<LocalizedTrace>> { fn transaction_traces(&self, _trace: TransactionId) -> Option<Vec<LocalizedTrace>> {
unimplemented!(); unimplemented!();
} }
fn block_traces(&self, _trace: BlockID) -> Option<Vec<LocalizedTrace>> { fn block_traces(&self, _trace: BlockId) -> Option<Vec<LocalizedTrace>> {
unimplemented!(); unimplemented!();
} }

View File

@ -50,93 +50,93 @@ pub trait BlockChainClient : Sync + Send {
fn keep_alive(&self) {} fn keep_alive(&self) {}
/// Get raw block header data by block id. /// Get raw block header data by block id.
fn block_header(&self, id: BlockID) -> Option<Bytes>; fn block_header(&self, id: BlockId) -> Option<Bytes>;
/// Get raw block body data by block id. /// Get raw block body data by block id.
/// Block body is an RLP list of two items: uncles and transactions. /// Block body is an RLP list of two items: uncles and transactions.
fn block_body(&self, id: BlockID) -> Option<Bytes>; fn block_body(&self, id: BlockId) -> Option<Bytes>;
/// Get raw block data by block header hash. /// Get raw block data by block header hash.
fn block(&self, id: BlockID) -> Option<Bytes>; fn block(&self, id: BlockId) -> Option<Bytes>;
/// Get block status by block header hash. /// Get block status by block header hash.
fn block_status(&self, id: BlockID) -> BlockStatus; fn block_status(&self, id: BlockId) -> BlockStatus;
/// Get block total difficulty. /// Get block total difficulty.
fn block_total_difficulty(&self, id: BlockID) -> Option<U256>; fn block_total_difficulty(&self, id: BlockId) -> Option<U256>;
/// Attempt to get address nonce at given block. /// Attempt to get address nonce at given block.
/// May not fail on BlockID::Latest. /// May not fail on BlockId::Latest.
fn nonce(&self, address: &Address, id: BlockID) -> Option<U256>; fn nonce(&self, address: &Address, id: BlockId) -> Option<U256>;
/// Attempt to get address storage root at given block. /// Attempt to get address storage root at given block.
/// May not fail on BlockID::Latest. /// May not fail on BlockId::Latest.
fn storage_root(&self, address: &Address, id: BlockID) -> Option<H256>; fn storage_root(&self, address: &Address, id: BlockId) -> Option<H256>;
/// Get address nonce at the latest block's state. /// Get address nonce at the latest block's state.
fn latest_nonce(&self, address: &Address) -> U256 { fn latest_nonce(&self, address: &Address) -> U256 {
self.nonce(address, BlockID::Latest) self.nonce(address, BlockId::Latest)
.expect("nonce will return Some when given BlockID::Latest. nonce was given BlockID::Latest. \ .expect("nonce will return Some when given BlockId::Latest. nonce was given BlockId::Latest. \
Therefore nonce has returned Some; qed") Therefore nonce has returned Some; qed")
} }
/// Get block hash. /// Get block hash.
fn block_hash(&self, id: BlockID) -> Option<H256>; fn block_hash(&self, id: BlockId) -> Option<H256>;
/// Get address code at given block's state. /// Get address code at given block's state.
fn code(&self, address: &Address, id: BlockID) -> Option<Option<Bytes>>; fn code(&self, address: &Address, id: BlockId) -> Option<Option<Bytes>>;
/// Get address code at the latest block's state. /// Get address code at the latest block's state.
fn latest_code(&self, address: &Address) -> Option<Bytes> { fn latest_code(&self, address: &Address) -> Option<Bytes> {
self.code(address, BlockID::Latest) self.code(address, BlockId::Latest)
.expect("code will return Some if given BlockID::Latest; qed") .expect("code will return Some if given BlockId::Latest; qed")
} }
/// Get address balance at the given block's state. /// Get address balance at the given block's state.
/// ///
/// May not return None if given BlockID::Latest. /// May not return None if given BlockId::Latest.
/// Returns None if and only if the block's root hash has been pruned from the DB. /// Returns None if and only if the block's root hash has been pruned from the DB.
fn balance(&self, address: &Address, id: BlockID) -> Option<U256>; fn balance(&self, address: &Address, id: BlockId) -> Option<U256>;
/// Get address balance at the latest block's state. /// Get address balance at the latest block's state.
fn latest_balance(&self, address: &Address) -> U256 { fn latest_balance(&self, address: &Address) -> U256 {
self.balance(address, BlockID::Latest) self.balance(address, BlockId::Latest)
.expect("balance will return Some if given BlockID::Latest. balance was given BlockID::Latest \ .expect("balance will return Some if given BlockId::Latest. balance was given BlockId::Latest \
Therefore balance has returned Some; qed") Therefore balance has returned Some; qed")
} }
/// Get value of the storage at given position at the given block's state. /// Get value of the storage at given position at the given block's state.
/// ///
/// May not return None if given BlockID::Latest. /// May not return None if given BlockId::Latest.
/// Returns None if and only if the block's root hash has been pruned from the DB. /// Returns None if and only if the block's root hash has been pruned from the DB.
fn storage_at(&self, address: &Address, position: &H256, id: BlockID) -> Option<H256>; fn storage_at(&self, address: &Address, position: &H256, id: BlockId) -> Option<H256>;
/// Get value of the storage at given position at the latest block's state. /// Get value of the storage at given position at the latest block's state.
fn latest_storage_at(&self, address: &Address, position: &H256) -> H256 { fn latest_storage_at(&self, address: &Address, position: &H256) -> H256 {
self.storage_at(address, position, BlockID::Latest) self.storage_at(address, position, BlockId::Latest)
.expect("storage_at will return Some if given BlockID::Latest. storage_at was given BlockID::Latest. \ .expect("storage_at will return Some if given BlockId::Latest. storage_at was given BlockId::Latest. \
Therefore storage_at has returned Some; qed") Therefore storage_at has returned Some; qed")
} }
/// Get a list of all accounts in the block `id`, if fat DB is in operation, otherwise `None`. /// Get a list of all accounts in the block `id`, if fat DB is in operation, otherwise `None`.
/// If `after` is set the list starts with the following item. /// If `after` is set the list starts with the following item.
fn list_accounts(&self, id: BlockID, after: Option<&Address>, count: u64) -> Option<Vec<Address>>; fn list_accounts(&self, id: BlockId, after: Option<&Address>, count: u64) -> Option<Vec<Address>>;
/// Get a list of all storage keys in the block `id`, if fat DB is in operation, otherwise `None`. /// Get a list of all storage keys in the block `id`, if fat DB is in operation, otherwise `None`.
/// If `after` is set the list starts with the following item. /// If `after` is set the list starts with the following item.
fn list_storage(&self, id: BlockID, account: &Address, after: Option<&H256>, count: u64) -> Option<Vec<H256>>; fn list_storage(&self, id: BlockId, account: &Address, after: Option<&H256>, count: u64) -> Option<Vec<H256>>;
/// Get transaction with given hash. /// Get transaction with given hash.
fn transaction(&self, id: TransactionID) -> Option<LocalizedTransaction>; fn transaction(&self, id: TransactionId) -> Option<LocalizedTransaction>;
/// Get the hash of block that contains the transaction, if any. /// Get the hash of block that contains the transaction, if any.
fn transaction_block(&self, id: TransactionID) -> Option<H256>; fn transaction_block(&self, id: TransactionId) -> Option<H256>;
/// Get uncle with given id. /// Get uncle with given id.
fn uncle(&self, id: UncleID) -> Option<Bytes>; fn uncle(&self, id: UncleId) -> Option<Bytes>;
/// Get transaction receipt with given hash. /// Get transaction receipt with given hash.
fn transaction_receipt(&self, id: TransactionID) -> Option<LocalizedReceipt>; fn transaction_receipt(&self, id: TransactionId) -> Option<LocalizedReceipt>;
/// Get a tree route between `from` and `to`. /// Get a tree route between `from` and `to`.
/// See `BlockChain::tree_route`. /// See `BlockChain::tree_route`.
@ -173,16 +173,16 @@ pub trait BlockChainClient : Sync + Send {
fn best_block_header(&self) -> Bytes; fn best_block_header(&self) -> Bytes;
/// Returns numbers of blocks containing given bloom. /// Returns numbers of blocks containing given bloom.
fn blocks_with_bloom(&self, bloom: &H2048, from_block: BlockID, to_block: BlockID) -> Option<Vec<BlockNumber>>; fn blocks_with_bloom(&self, bloom: &H2048, from_block: BlockId, to_block: BlockId) -> Option<Vec<BlockNumber>>;
/// Returns logs matching given filter. /// Returns logs matching given filter.
fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry>; fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry>;
/// Makes a non-persistent transaction call. /// Makes a non-persistent transaction call.
fn call(&self, t: &SignedTransaction, block: BlockID, analytics: CallAnalytics) -> Result<Executed, CallError>; fn call(&self, t: &SignedTransaction, block: BlockId, analytics: CallAnalytics) -> Result<Executed, CallError>;
/// Replays a given transaction for inspection. /// Replays a given transaction for inspection.
fn replay(&self, t: TransactionID, analytics: CallAnalytics) -> Result<Executed, CallError>; fn replay(&self, t: TransactionId, analytics: CallAnalytics) -> Result<Executed, CallError>;
/// Returns traces matching given filter. /// Returns traces matching given filter.
fn filter_traces(&self, filter: TraceFilter) -> Option<Vec<LocalizedTrace>>; fn filter_traces(&self, filter: TraceFilter) -> Option<Vec<LocalizedTrace>>;
@ -191,10 +191,10 @@ pub trait BlockChainClient : Sync + Send {
fn trace(&self, trace: TraceId) -> Option<LocalizedTrace>; fn trace(&self, trace: TraceId) -> Option<LocalizedTrace>;
/// Returns traces created by transaction. /// Returns traces created by transaction.
fn transaction_traces(&self, trace: TransactionID) -> Option<Vec<LocalizedTrace>>; fn transaction_traces(&self, trace: TransactionId) -> Option<Vec<LocalizedTrace>>;
/// Returns traces created by transaction from block. /// Returns traces created by transaction from block.
fn block_traces(&self, trace: BlockID) -> Option<Vec<LocalizedTrace>>; fn block_traces(&self, trace: BlockId) -> Option<Vec<LocalizedTrace>>;
/// Get last hashes starting from best block. /// Get last hashes starting from best block.
fn last_hashes(&self) -> LastHashes; fn last_hashes(&self) -> LastHashes;
@ -211,7 +211,7 @@ pub trait BlockChainClient : Sync + Send {
let mut corpus = Vec::new(); let mut corpus = Vec::new();
while corpus.is_empty() { while corpus.is_empty() {
for _ in 0..sample_size { for _ in 0..sample_size {
let block_bytes = self.block(BlockID::Hash(h)).expect("h is either the best_block_hash or an ancestor; qed"); let block_bytes = self.block(BlockId::Hash(h)).expect("h is either the best_block_hash or an ancestor; qed");
let block = BlockView::new(&block_bytes); let block = BlockView::new(&block_bytes);
let header = block.header_view(); let header = block.header_view();
if header.number() == 0 { if header.number() == 0 {
@ -249,11 +249,11 @@ pub trait BlockChainClient : Sync + Send {
/// Set the mode. /// Set the mode.
fn set_mode(&self, mode: Mode); fn set_mode(&self, mode: Mode);
/// Returns engine-related extra info for `BlockID`. /// Returns engine-related extra info for `BlockId`.
fn block_extra_info(&self, id: BlockID) -> Option<BTreeMap<String, String>>; fn block_extra_info(&self, id: BlockId) -> Option<BTreeMap<String, String>>;
/// Returns engine-related extra info for `UncleID`. /// Returns engine-related extra info for `UncleId`.
fn uncle_extra_info(&self, id: UncleID) -> Option<BTreeMap<String, String>>; fn uncle_extra_info(&self, id: UncleId) -> Option<BTreeMap<String, String>>;
/// Returns information about pruning/data availability. /// Returns information about pruning/data availability.
fn pruning_info(&self) -> PruningInfo; fn pruning_info(&self) -> PruningInfo;
@ -288,15 +288,15 @@ pub trait ProvingBlockChainClient: BlockChainClient {
/// Returns a vector of raw trie nodes (in order from the root) proving the storage query. /// Returns a vector of raw trie nodes (in order from the root) proving the storage query.
/// Nodes after `from_level` may be omitted. /// Nodes after `from_level` may be omitted.
/// An empty vector indicates unservable query. /// An empty vector indicates unservable query.
fn prove_storage(&self, key1: H256, key2: H256, from_level: u32, id: BlockID) -> Vec<Bytes>; fn prove_storage(&self, key1: H256, key2: H256, from_level: u32, id: BlockId) -> Vec<Bytes>;
/// Prove account existence at a specific block id. /// Prove account existence at a specific block id.
/// The key is the keccak hash of the account's address. /// The key is the keccak hash of the account's address.
/// Returns a vector of raw trie nodes (in order from the root) proving the query. /// Returns a vector of raw trie nodes (in order from the root) proving the query.
/// Nodes after `from_level` may be omitted. /// Nodes after `from_level` may be omitted.
/// An empty vector indicates unservable query. /// An empty vector indicates unservable query.
fn prove_account(&self, key1: H256, from_level: u32, id: BlockID) -> Vec<Bytes>; fn prove_account(&self, key1: H256, from_level: u32, id: BlockId) -> Vec<Bytes>;
/// Get code by address hash. /// Get code by address hash.
fn code_by_hash(&self, account_key: H256, id: BlockID) -> Bytes; fn code_by_hash(&self, account_key: H256, id: BlockId) -> Bytes;
} }

View File

@ -347,7 +347,6 @@ mod tests {
use tests::helpers::*; use tests::helpers::*;
use account_provider::AccountProvider; use account_provider::AccountProvider;
use spec::Spec; use spec::Spec;
use std::time::UNIX_EPOCH;
#[test] #[test]
fn has_valid_metadata() { fn has_valid_metadata() {
@ -442,13 +441,30 @@ mod tests {
let engine = Spec::new_test_round().engine; let engine = Spec::new_test_round().engine;
let signature = tap.sign(addr, Some("0".into()), header.bare_hash()).unwrap(); let signature = tap.sign(addr, Some("0".into()), header.bare_hash()).unwrap();
let time = UNIX_EPOCH.elapsed().unwrap().as_secs();
// Two authorities. // Two authorities.
let mut step = time - time % 2; // Spec starts with step 2.
header.set_seal(vec![encode(&step).to_vec(), encode(&(&*signature as &[u8])).to_vec()]); header.set_seal(vec![encode(&2usize).to_vec(), encode(&(&*signature as &[u8])).to_vec()]);
assert!(engine.verify_block_seal(&header).is_err()); assert!(engine.verify_block_seal(&header).is_err());
step = step + 1; header.set_seal(vec![encode(&1usize).to_vec(), encode(&(&*signature as &[u8])).to_vec()]);
header.set_seal(vec![encode(&step).to_vec(), encode(&(&*signature as &[u8])).to_vec()]);
assert!(engine.verify_block_seal(&header).is_ok()); assert!(engine.verify_block_seal(&header).is_ok());
} }
#[test]
fn rejects_future_block() {
let mut header: Header = Header::default();
let tap = AccountProvider::transient_provider();
let addr = tap.insert_account("0".sha3(), "0").unwrap();
header.set_author(addr);
let engine = Spec::new_test_round().engine;
let signature = tap.sign(addr, Some("0".into()), header.bare_hash()).unwrap();
// Two authorities.
// Spec starts with step 2.
header.set_seal(vec![encode(&1usize).to_vec(), encode(&(&*signature as &[u8])).to_vec()]);
assert!(engine.verify_block_seal(&header).is_ok());
header.set_seal(vec![encode(&5usize).to_vec(), encode(&(&*signature as &[u8])).to_vec()]);
assert!(engine.verify_block_seal(&header).is_err());
}
} }

View File

@ -23,7 +23,7 @@ use account_provider::{AccountProvider, Error as AccountError};
use views::{BlockView, HeaderView}; use views::{BlockView, HeaderView};
use header::Header; use header::Header;
use state::{State, CleanupMode}; use state::{State, CleanupMode};
use client::{MiningBlockChainClient, Executive, Executed, EnvInfo, TransactOptions, BlockID, CallAnalytics, TransactionID}; use client::{MiningBlockChainClient, Executive, Executed, EnvInfo, TransactOptions, BlockId, CallAnalytics, TransactionId};
use client::TransactionImportResult; use client::TransactionImportResult;
use executive::contract_address; use executive::contract_address;
use block::{ClosedBlock, SealedBlock, IsBlock, Block}; use block::{ClosedBlock, SealedBlock, IsBlock, Block};
@ -585,7 +585,7 @@ impl Miner {
let best_block_header: Header = ::rlp::decode(&chain.best_block_header()); let best_block_header: Header = ::rlp::decode(&chain.best_block_header());
transactions.into_iter() transactions.into_iter()
.map(|tx| { .map(|tx| {
if chain.transaction_block(TransactionID::Hash(tx.hash())).is_some() { if chain.transaction_block(TransactionId::Hash(tx.hash())).is_some() {
debug!(target: "miner", "Rejected tx {:?}: already in the blockchain", tx.hash()); debug!(target: "miner", "Rejected tx {:?}: already in the blockchain", tx.hash());
return Err(Error::Transaction(TransactionError::AlreadyImported)); return Err(Error::Transaction(TransactionError::AlreadyImported));
} }
@ -701,7 +701,7 @@ impl MinerService for Miner {
Ok(ret) Ok(ret)
}, },
None => { None => {
chain.call(t, BlockID::Latest, analytics) chain.call(t, BlockId::Latest, analytics)
} }
} }
} }
@ -1092,20 +1092,6 @@ impl MinerService for Miner {
fn chain_new_blocks(&self, chain: &MiningBlockChainClient, _imported: &[H256], _invalid: &[H256], enacted: &[H256], retracted: &[H256]) { fn chain_new_blocks(&self, chain: &MiningBlockChainClient, _imported: &[H256], _invalid: &[H256], enacted: &[H256], retracted: &[H256]) {
trace!(target: "miner", "chain_new_blocks"); trace!(target: "miner", "chain_new_blocks");
fn fetch_transactions(chain: &MiningBlockChainClient, hash: &H256) -> Vec<SignedTransaction> {
let block = chain
.block(BlockID::Hash(*hash))
// Client should send message after commit to db and inserting to chain.
.expect("Expected in-chain blocks.");
let block = BlockView::new(&block);
let txs = block.transactions();
// populate sender
for tx in &txs {
let _sender = tx.sender();
}
txs
}
// 1. We ignore blocks that were `imported` (because it means that they are not in canon-chain, and transactions // 1. We ignore blocks that were `imported` (because it means that they are not in canon-chain, and transactions
// should be still available in the queue. // should be still available in the queue.
// 2. We ignore blocks that are `invalid` because it doesn't have any meaning in terms of the transactions that // 2. We ignore blocks that are `invalid` because it doesn't have any meaning in terms of the transactions that
@ -1116,10 +1102,18 @@ impl MinerService for Miner {
// Then import all transactions... // Then import all transactions...
{ {
let out_of_chain = retracted retracted.par_iter()
.par_iter() .map(|hash| {
.map(|h| fetch_transactions(chain, h)); let block = chain.block(BlockId::Hash(*hash))
out_of_chain.for_each(|txs| { .expect("Client is sending message after commit to db and inserting to chain; the block is available; qed");
let block = BlockView::new(&block);
let txs = block.transactions();
// populate sender
for tx in &txs {
let _sender = tx.sender();
}
txs
}).for_each(|txs| {
let mut transaction_queue = self.transaction_queue.lock(); let mut transaction_queue = self.transaction_queue.lock();
let _ = self.add_transactions_to_queue( let _ = self.add_transactions_to_queue(
chain, txs, TransactionOrigin::RetractedBlock, &mut transaction_queue chain, txs, TransactionOrigin::RetractedBlock, &mut transaction_queue
@ -1127,24 +1121,10 @@ impl MinerService for Miner {
}); });
} }
// ...and at the end remove old ones // ...and at the end remove the old ones
{ {
let in_chain = enacted
.par_iter()
.map(|h: &H256| fetch_transactions(chain, h));
in_chain.for_each(|mut txs| {
let mut transaction_queue = self.transaction_queue.lock(); let mut transaction_queue = self.transaction_queue.lock();
transaction_queue.remove_old(|sender| chain.latest_nonce(sender));
let to_remove = txs.drain(..)
.map(|tx| {
tx.sender().expect("Transaction is in block, so sender has to be defined.")
})
.collect::<HashSet<Address>>();
for sender in to_remove {
transaction_queue.remove_all(sender, chain.latest_nonce(&sender));
}
});
} }
if enacted.len() > 0 { if enacted.len() > 0 {

View File

@ -81,6 +81,8 @@
//! 3. `remove_all` is used to inform the queue about client (state) nonce changes. //! 3. `remove_all` is used to inform the queue about client (state) nonce changes.
//! - It removes all transactions (either from `current` or `future`) with nonce < client nonce //! - It removes all transactions (either from `current` or `future`) with nonce < client nonce
//! - It moves matching `future` transactions to `current` //! - It moves matching `future` transactions to `current`
//! 4. `remove_old` is used as convenient method to update the state nonce for all senders in the queue.
//! - Invokes `remove_all` with latest state nonce for all senders.
use std::ops::Deref; use std::ops::Deref;
use std::cmp::Ordering; use std::cmp::Ordering;
@ -752,6 +754,26 @@ impl TransactionQueue {
/// Removes all transactions from particular sender up to (excluding) given client (state) nonce. /// Removes all transactions from particular sender up to (excluding) given client (state) nonce.
/// Client (State) Nonce = next valid nonce for this sender. /// Client (State) Nonce = next valid nonce for this sender.
pub fn remove_all(&mut self, sender: Address, client_nonce: U256) { pub fn remove_all(&mut self, sender: Address, client_nonce: U256) {
// Check if there is anything in current...
let should_check_in_current = self.current.by_address.row(&sender)
// If nonce == client_nonce nothing is changed
.and_then(|by_nonce| by_nonce.keys().find(|nonce| *nonce < &client_nonce))
.map(|_| ());
// ... or future
let should_check_in_future = self.future.by_address.row(&sender)
// if nonce == client_nonce we need to promote to current
.and_then(|by_nonce| by_nonce.keys().find(|nonce| *nonce <= &client_nonce))
.map(|_| ());
if should_check_in_current.or(should_check_in_future).is_none() {
return;
}
self.remove_all_internal(sender, client_nonce);
}
/// Always updates future and moves transactions from current to future.
fn remove_all_internal(&mut self, sender: Address, client_nonce: U256) {
// We will either move transaction to future or remove it completely // We will either move transaction to future or remove it completely
// so there will be no transactions from this sender in current // so there will be no transactions from this sender in current
self.last_nonces.remove(&sender); self.last_nonces.remove(&sender);
@ -765,6 +787,20 @@ impl TransactionQueue {
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len()); assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
} }
/// Checks the current nonce for all transactions' senders in the queue and removes the old transactions.
pub fn remove_old<F>(&mut self, fetch_nonce: F) where
F: Fn(&Address) -> U256,
{
let senders = self.current.by_address.keys()
.chain(self.future.by_address.keys())
.cloned()
.collect::<HashSet<_>>();
for sender in senders {
self.remove_all(sender, fetch_nonce(&sender));
}
}
/// Penalize transactions from sender of transaction with given hash. /// Penalize transactions from sender of transaction with given hash.
/// I.e. it should change the priority of the transaction in the queue. /// I.e. it should change the priority of the transaction in the queue.
/// ///
@ -847,7 +883,7 @@ impl TransactionQueue {
if order.is_some() { if order.is_some() {
// This will keep consistency in queue // This will keep consistency in queue
// Moves all to future and then promotes a batch from current: // Moves all to future and then promotes a batch from current:
self.remove_all(sender, current_nonce); self.remove_all_internal(sender, current_nonce);
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len()); assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
return; return;
} }
@ -2438,7 +2474,7 @@ mod test {
} }
#[test] #[test]
fn should_reject_transactions_below_bas_gas() { fn should_reject_transactions_below_base_gas() {
// given // given
let mut txq = TransactionQueue::default(); let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into()); let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
@ -2457,4 +2493,26 @@ mod test {
} }
#[test]
fn should_clear_all_old_transactions() {
// given
let mut txq = TransactionQueue::default();
let (tx1, tx2) = new_tx_pair_default(1.into(), 0.into());
let (tx3, tx4) = new_tx_pair_default(1.into(), 0.into());
let nonce1 = tx1.nonce;
// Insert all transactions
txq.add(tx1, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx2, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx3, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
txq.add(tx4, TransactionOrigin::External, &default_account_details, &gas_estimator).unwrap();
assert_eq!(txq.top_transactions().len(), 4);
// when
txq.remove_old(|_| nonce1 + U256::one());
// then
assert_eq!(txq.top_transactions().len(), 2);
}
} }

View File

@ -18,7 +18,7 @@
use std::fmt; use std::fmt;
use ids::BlockID; use ids::BlockId;
use util::H256; use util::H256;
use util::trie::TrieError; use util::trie::TrieError;
@ -28,7 +28,7 @@ use rlp::DecoderError;
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
/// Invalid starting block for snapshot. /// Invalid starting block for snapshot.
InvalidStartingBlock(BlockID), InvalidStartingBlock(BlockId),
/// Block not found. /// Block not found.
BlockNotFound(H256), BlockNotFound(H256),
/// Incomplete chain. /// Incomplete chain.

View File

@ -27,7 +27,7 @@ use account_db::{AccountDB, AccountDBMut};
use blockchain::{BlockChain, BlockProvider}; use blockchain::{BlockChain, BlockProvider};
use engines::Engine; use engines::Engine;
use header::Header; use header::Header;
use ids::BlockID; use ids::BlockId;
use views::BlockView; use views::BlockView;
use util::{Bytes, Hashable, HashDB, DBValue, snappy, U256, Uint}; use util::{Bytes, Hashable, HashDB, DBValue, snappy, U256, Uint};
@ -129,7 +129,7 @@ pub fn take_snapshot<W: SnapshotWriter + Send>(
p: &Progress p: &Progress
) -> Result<(), Error> { ) -> Result<(), Error> {
let start_header = try!(chain.block_header(&block_at) let start_header = try!(chain.block_header(&block_at)
.ok_or(Error::InvalidStartingBlock(BlockID::Hash(block_at)))); .ok_or(Error::InvalidStartingBlock(BlockId::Hash(block_at))));
let state_root = start_header.state_root(); let state_root = start_header.state_root();
let number = start_header.number(); let number = start_header.number();

View File

@ -30,7 +30,7 @@ use blockchain::BlockChain;
use client::{BlockChainClient, Client}; use client::{BlockChainClient, Client};
use engines::Engine; use engines::Engine;
use error::Error; use error::Error;
use ids::BlockID; use ids::BlockId;
use service::ClientIoMessage; use service::ClientIoMessage;
use io::IoChannel; use io::IoChannel;
@ -354,7 +354,7 @@ impl Service {
let writer = try!(LooseWriter::new(temp_dir.clone())); let writer = try!(LooseWriter::new(temp_dir.clone()));
let guard = Guard::new(temp_dir.clone()); let guard = Guard::new(temp_dir.clone());
let res = client.take_snapshot(writer, BlockID::Number(num), &self.progress); let res = client.take_snapshot(writer, BlockId::Number(num), &self.progress);
self.taking_snapshot.store(false, Ordering::SeqCst); self.taking_snapshot.store(false, Ordering::SeqCst);
if let Err(e) = res { if let Err(e) = res {

View File

@ -19,7 +19,7 @@
use std::sync::Arc; use std::sync::Arc;
use client::{BlockChainClient, Client}; use client::{BlockChainClient, Client};
use ids::BlockID; use ids::BlockId;
use snapshot::service::{Service, ServiceParams}; use snapshot::service::{Service, ServiceParams};
use snapshot::{self, ManifestData, SnapshotService}; use snapshot::{self, ManifestData, SnapshotService};
use spec::Spec; use spec::Spec;
@ -96,8 +96,8 @@ fn restored_is_equivalent() {
assert_eq!(service.status(), ::snapshot::RestorationStatus::Inactive); assert_eq!(service.status(), ::snapshot::RestorationStatus::Inactive);
for x in 0..NUM_BLOCKS { for x in 0..NUM_BLOCKS {
let block1 = client.block(BlockID::Number(x as u64)).unwrap(); let block1 = client.block(BlockId::Number(x as u64)).unwrap();
let block2 = client2.block(BlockID::Number(x as u64)).unwrap(); let block2 = client2.block(BlockId::Number(x as u64)).unwrap();
assert_eq!(block1, block2); assert_eq!(block1, block2);
} }

View File

@ -18,7 +18,7 @@
use util::Mutex; use util::Mutex;
use client::{BlockChainClient, Client, ChainNotify}; use client::{BlockChainClient, Client, ChainNotify};
use ids::BlockID; use ids::BlockId;
use service::ClientIoMessage; use service::ClientIoMessage;
use views::HeaderView; use views::HeaderView;
@ -43,7 +43,7 @@ impl<F> Oracle for StandardOracle<F>
where F: Send + Sync + Fn() -> bool where F: Send + Sync + Fn() -> bool
{ {
fn to_number(&self, hash: H256) -> Option<u64> { fn to_number(&self, hash: H256) -> Option<u64> {
self.client.block_header(BlockID::Hash(hash)).map(|h| HeaderView::new(&h).number()) self.client.block_header(BlockId::Hash(hash)).map(|h| HeaderView::new(&h).number())
} }
fn is_major_importing(&self) -> bool { fn is_major_importing(&self) -> bool {

View File

@ -31,6 +31,7 @@ use transaction::SignedTransaction;
use state_db::StateDB; use state_db::StateDB;
use util::*; use util::*;
use util::trie::recorder::{Recorder, BasicRecorder as TrieRecorder}; use util::trie::recorder::{Recorder, BasicRecorder as TrieRecorder};
mod account; mod account;

View File

@ -15,7 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use io::IoChannel; use io::IoChannel;
use client::{BlockChainClient, MiningBlockChainClient, Client, ClientConfig, BlockID}; use client::{BlockChainClient, MiningBlockChainClient, Client, ClientConfig, BlockId};
use state::CleanupMode; use state::CleanupMode;
use ethereum; use ethereum;
use block::IsBlock; use block::IsBlock;
@ -99,7 +99,7 @@ fn imports_good_block() {
client.flush_queue(); client.flush_queue();
client.import_verified_blocks(); client.import_verified_blocks();
let block = client.block_header(BlockID::Number(1)).unwrap(); let block = client.block_header(BlockId::Number(1)).unwrap();
assert!(!block.is_empty()); assert!(!block.is_empty());
} }
@ -117,7 +117,7 @@ fn query_none_block() {
IoChannel::disconnected(), IoChannel::disconnected(),
&db_config &db_config
).unwrap(); ).unwrap();
let non_existant = client.block_header(BlockID::Number(188)); let non_existant = client.block_header(BlockId::Number(188));
assert!(non_existant.is_none()); assert!(non_existant.is_none());
} }
@ -125,7 +125,7 @@ fn query_none_block() {
fn query_bad_block() { fn query_bad_block() {
let client_result = get_test_client_with_blocks(vec![get_bad_state_dummy_block()]); let client_result = get_test_client_with_blocks(vec![get_bad_state_dummy_block()]);
let client = client_result.reference(); let client = client_result.reference();
let bad_block:Option<Bytes> = client.block_header(BlockID::Number(1)); let bad_block:Option<Bytes> = client.block_header(BlockId::Number(1));
assert!(bad_block.is_none()); assert!(bad_block.is_none());
} }
@ -146,8 +146,8 @@ fn returns_logs() {
let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]); let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]);
let client = client_result.reference(); let client = client_result.reference();
let logs = client.logs(Filter { let logs = client.logs(Filter {
from_block: BlockID::Earliest, from_block: BlockId::Earliest,
to_block: BlockID::Latest, to_block: BlockId::Latest,
address: None, address: None,
topics: vec![], topics: vec![],
limit: None, limit: None,
@ -161,8 +161,8 @@ fn returns_logs_with_limit() {
let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]); let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]);
let client = client_result.reference(); let client = client_result.reference();
let logs = client.logs(Filter { let logs = client.logs(Filter {
from_block: BlockID::Earliest, from_block: BlockId::Earliest,
to_block: BlockID::Latest, to_block: BlockId::Latest,
address: None, address: None,
topics: vec![], topics: vec![],
limit: Some(2), limit: Some(2),
@ -176,7 +176,7 @@ fn returns_block_body() {
let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]); let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]);
let client = client_result.reference(); let client = client_result.reference();
let block = BlockView::new(&dummy_block); let block = BlockView::new(&dummy_block);
let body = client.block_body(BlockID::Hash(block.header().hash())).unwrap(); let body = client.block_body(BlockId::Hash(block.header().hash())).unwrap();
let body = Rlp::new(&body); let body = Rlp::new(&body);
assert_eq!(body.item_count(), 2); assert_eq!(body.item_count(), 2);
assert_eq!(body.at(0).as_raw()[..], block.rlp().at(1).as_raw()[..]); assert_eq!(body.at(0).as_raw()[..], block.rlp().at(1).as_raw()[..]);
@ -187,7 +187,7 @@ fn returns_block_body() {
fn imports_block_sequence() { fn imports_block_sequence() {
let client_result = generate_dummy_client(6); let client_result = generate_dummy_client(6);
let client = client_result.reference(); let client = client_result.reference();
let block = client.block_header(BlockID::Number(5)).unwrap(); let block = client.block_header(BlockId::Number(5)).unwrap();
assert!(!block.is_empty()); assert!(!block.is_empty());
} }

View File

@ -19,7 +19,7 @@
use nanoipc; use nanoipc;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{Ordering, AtomicBool}; use std::sync::atomic::{Ordering, AtomicBool};
use client::{Client, BlockChainClient, ClientConfig, BlockID}; use client::{Client, BlockChainClient, ClientConfig, BlockId};
use client::remote::RemoteClient; use client::remote::RemoteClient;
use tests::helpers::*; use tests::helpers::*;
use devtools::*; use devtools::*;
@ -71,7 +71,7 @@ fn can_query_block() {
run_test_worker(scope, stop_guard.share(), socket_path); run_test_worker(scope, stop_guard.share(), socket_path);
let remote_client = nanoipc::generic_client::<RemoteClient<_>>(socket_path).unwrap(); let remote_client = nanoipc::generic_client::<RemoteClient<_>>(socket_path).unwrap();
let non_existant_block = remote_client.block_header(BlockID::Number(999)); let non_existant_block = remote_client.block_header(BlockId::Number(999));
assert!(non_existant_block.is_none()); assert!(non_existant_block.is_none());
}) })

View File

@ -94,7 +94,7 @@ impl Key<blooms::BloomGroup> for TraceGroupPosition {
} }
#[derive(Debug, Hash, Eq, PartialEq)] #[derive(Debug, Hash, Eq, PartialEq)]
enum CacheID { enum CacheId {
Trace(H256), Trace(H256),
Bloom(TraceGroupPosition), Bloom(TraceGroupPosition),
} }
@ -104,7 +104,7 @@ pub struct TraceDB<T> where T: DatabaseExtras {
// cache // cache
traces: RwLock<HashMap<H256, FlatBlockTraces>>, traces: RwLock<HashMap<H256, FlatBlockTraces>>,
blooms: RwLock<HashMap<TraceGroupPosition, blooms::BloomGroup>>, blooms: RwLock<HashMap<TraceGroupPosition, blooms::BloomGroup>>,
cache_manager: RwLock<CacheManager<CacheID>>, cache_manager: RwLock<CacheManager<CacheId>>,
// db // db
tracesdb: Arc<Database>, tracesdb: Arc<Database>,
// config, // config,
@ -119,7 +119,7 @@ impl<T> BloomGroupDatabase for TraceDB<T> where T: DatabaseExtras {
fn blooms_at(&self, position: &GroupPosition) -> Option<BloomGroup> { fn blooms_at(&self, position: &GroupPosition) -> Option<BloomGroup> {
let position = TraceGroupPosition::from(position.clone()); let position = TraceGroupPosition::from(position.clone());
let result = self.tracesdb.read_with_cache(db::COL_TRACE, &self.blooms, &position).map(Into::into); let result = self.tracesdb.read_with_cache(db::COL_TRACE, &self.blooms, &position).map(Into::into);
self.note_used(CacheID::Bloom(position)); self.note_used(CacheId::Bloom(position));
result result
} }
} }
@ -152,7 +152,7 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
} }
/// Let the cache system know that a cacheable item has been used. /// Let the cache system know that a cacheable item has been used.
fn note_used(&self, id: CacheID) { fn note_used(&self, id: CacheId) {
let mut cache_manager = self.cache_manager.write(); let mut cache_manager = self.cache_manager.write();
cache_manager.note_used(id); cache_manager.note_used(id);
} }
@ -168,8 +168,8 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
cache_manager.collect_garbage(current_size, | ids | { cache_manager.collect_garbage(current_size, | ids | {
for id in &ids { for id in &ids {
match *id { match *id {
CacheID::Trace(ref h) => { traces.remove(h); }, CacheId::Trace(ref h) => { traces.remove(h); },
CacheID::Bloom(ref h) => { blooms.remove(h); }, CacheId::Bloom(ref h) => { blooms.remove(h); },
} }
} }
traces.shrink_to_fit(); traces.shrink_to_fit();
@ -182,7 +182,7 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
/// Returns traces for block with hash. /// Returns traces for block with hash.
fn traces(&self, block_hash: &H256) -> Option<FlatBlockTraces> { fn traces(&self, block_hash: &H256) -> Option<FlatBlockTraces> {
let result = self.tracesdb.read_with_cache(db::COL_TRACE, &self.traces, block_hash); let result = self.tracesdb.read_with_cache(db::COL_TRACE, &self.traces, block_hash);
self.note_used(CacheID::Trace(block_hash.clone())); self.note_used(CacheId::Trace(block_hash.clone()));
result result
} }
@ -289,7 +289,7 @@ impl<T> TraceDatabase for TraceDB<T> where T: DatabaseExtras {
batch.extend_with_cache(db::COL_TRACE, &mut *blooms, blooms_to_insert, CacheUpdatePolicy::Remove); batch.extend_with_cache(db::COL_TRACE, &mut *blooms, blooms_to_insert, CacheUpdatePolicy::Remove);
// note_used must be called after locking blooms to avoid cache/traces deadlock on garbage collection // note_used must be called after locking blooms to avoid cache/traces deadlock on garbage collection
for key in blooms_keys { for key in blooms_keys {
self.note_used(CacheID::Bloom(key)); self.note_used(CacheId::Bloom(key));
} }
} }
@ -300,7 +300,7 @@ impl<T> TraceDatabase for TraceDB<T> where T: DatabaseExtras {
// cause this value might be queried by hash later // cause this value might be queried by hash later
batch.write_with_cache(db::COL_TRACE, &mut *traces, request.block_hash, request.traces, CacheUpdatePolicy::Overwrite); batch.write_with_cache(db::COL_TRACE, &mut *traces, request.block_hash, request.traces, CacheUpdatePolicy::Overwrite);
// note_used must be called after locking traces to avoid cache/traces deadlock on garbage collection // note_used must be called after locking traces to avoid cache/traces deadlock on garbage collection
self.note_used(CacheID::Trace(request.block_hash.clone())); self.note_used(CacheId::Trace(request.block_hash.clone()));
} }
} }

View File

@ -18,17 +18,17 @@
use util::{Address, H256, Hashable, H2048}; use util::{Address, H256, Hashable, H2048};
use util::bloom::Bloomable; use util::bloom::Bloomable;
use client::BlockID; use client::BlockId;
use log_entry::LogEntry; use log_entry::LogEntry;
/// Blockchain Filter. /// Blockchain Filter.
#[derive(Binary, Debug, PartialEq)] #[derive(Binary, Debug, PartialEq)]
pub struct Filter { pub struct Filter {
/// Blockchain will be searched from this block. /// Blockchain will be searched from this block.
pub from_block: BlockID, pub from_block: BlockId,
/// Till this block. /// Till this block.
pub to_block: BlockID, pub to_block: BlockId,
/// Search addresses. /// Search addresses.
/// ///
@ -114,14 +114,14 @@ impl Filter {
mod tests { mod tests {
use util::FixedHash; use util::FixedHash;
use filter::Filter; use filter::Filter;
use client::BlockID; use client::BlockId;
use log_entry::LogEntry; use log_entry::LogEntry;
#[test] #[test]
fn test_bloom_possibilities_none() { fn test_bloom_possibilities_none() {
let none_filter = Filter { let none_filter = Filter {
from_block: BlockID::Earliest, from_block: BlockId::Earliest,
to_block: BlockID::Latest, to_block: BlockId::Latest,
address: None, address: None,
topics: vec![None, None, None, None], topics: vec![None, None, None, None],
limit: None, limit: None,
@ -136,8 +136,8 @@ mod tests {
#[test] #[test]
fn test_bloom_possibilities_single_address_and_topic() { fn test_bloom_possibilities_single_address_and_topic() {
let filter = Filter { let filter = Filter {
from_block: BlockID::Earliest, from_block: BlockId::Earliest,
to_block: BlockID::Latest, to_block: BlockId::Latest,
address: Some(vec!["b372018f3be9e171df0581136b59d2faf73a7d5d".into()]), address: Some(vec!["b372018f3be9e171df0581136b59d2faf73a7d5d".into()]),
topics: vec![ topics: vec![
Some(vec!["ff74e91598aed6ae5d2fdcf8b24cd2c7be49a0808112a305069355b7160f23f9".into()]), Some(vec!["ff74e91598aed6ae5d2fdcf8b24cd2c7be49a0808112a305069355b7160f23f9".into()]),
@ -155,8 +155,8 @@ mod tests {
#[test] #[test]
fn test_bloom_possibilities_single_address_and_many_topics() { fn test_bloom_possibilities_single_address_and_many_topics() {
let filter = Filter { let filter = Filter {
from_block: BlockID::Earliest, from_block: BlockId::Earliest,
to_block: BlockID::Latest, to_block: BlockId::Latest,
address: Some(vec!["b372018f3be9e171df0581136b59d2faf73a7d5d".into()]), address: Some(vec!["b372018f3be9e171df0581136b59d2faf73a7d5d".into()]),
topics: vec![ topics: vec![
Some(vec!["ff74e91598aed6ae5d2fdcf8b24cd2c7be49a0808112a305069355b7160f23f9".into()]), Some(vec!["ff74e91598aed6ae5d2fdcf8b24cd2c7be49a0808112a305069355b7160f23f9".into()]),
@ -174,8 +174,8 @@ mod tests {
#[test] #[test]
fn test_bloom_possibilites_multiple_addresses_and_topics() { fn test_bloom_possibilites_multiple_addresses_and_topics() {
let filter = Filter { let filter = Filter {
from_block: BlockID::Earliest, from_block: BlockId::Earliest,
to_block: BlockID::Latest, to_block: BlockId::Latest,
address: Some(vec![ address: Some(vec![
"b372018f3be9e171df0581136b59d2faf73a7d5d".into(), "b372018f3be9e171df0581136b59d2faf73a7d5d".into(),
"b372018f3be9e171df0581136b59d2faf73a7d5d".into(), "b372018f3be9e171df0581136b59d2faf73a7d5d".into(),
@ -204,8 +204,8 @@ mod tests {
#[test] #[test]
fn test_filter_matches() { fn test_filter_matches() {
let filter = Filter { let filter = Filter {
from_block: BlockID::Earliest, from_block: BlockId::Earliest,
to_block: BlockID::Latest, to_block: BlockId::Latest,
address: Some(vec!["b372018f3be9e171df0581136b59d2faf73a7d5d".into()]), address: Some(vec!["b372018f3be9e171df0581136b59d2faf73a7d5d".into()]),
topics: vec![ topics: vec![
Some(vec!["ff74e91598aed6ae5d2fdcf8b24cd2c7be49a0808112a305069355b7160f23f9".into()]), Some(vec!["ff74e91598aed6ae5d2fdcf8b24cd2c7be49a0808112a305069355b7160f23f9".into()]),

View File

@ -21,7 +21,7 @@ use header::BlockNumber;
/// Uniquely identifies block. /// Uniquely identifies block.
#[derive(Debug, PartialEq, Copy, Clone, Hash, Eq, Binary)] #[derive(Debug, PartialEq, Copy, Clone, Hash, Eq, Binary)]
pub enum BlockID { pub enum BlockId {
/// Block's sha3. /// Block's sha3.
/// Querying by hash is always faster. /// Querying by hash is always faster.
Hash(H256), Hash(H256),
@ -37,28 +37,28 @@ pub enum BlockID {
/// Uniquely identifies transaction. /// Uniquely identifies transaction.
#[derive(Debug, PartialEq, Clone, Hash, Eq, Binary)] #[derive(Debug, PartialEq, Clone, Hash, Eq, Binary)]
pub enum TransactionID { pub enum TransactionId {
/// Transaction's sha3. /// Transaction's sha3.
Hash(H256), Hash(H256),
/// Block id and transaction index within this block. /// Block id and transaction index within this block.
/// Querying by block position is always faster. /// Querying by block position is always faster.
Location(BlockID, usize) Location(BlockId, usize)
} }
/// Uniquely identifies Trace. /// Uniquely identifies Trace.
#[derive(Binary)] #[derive(Binary)]
pub struct TraceId { pub struct TraceId {
/// Transaction /// Transaction
pub transaction: TransactionID, pub transaction: TransactionId,
/// Trace address within transaction. /// Trace address within transaction.
pub address: Vec<usize>, pub address: Vec<usize>,
} }
/// Uniquely identifies Uncle. /// Uniquely identifies Uncle.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Binary)] #[derive(Debug, PartialEq, Eq, Copy, Clone, Binary)]
pub struct UncleID { pub struct UncleId {
/// Block id. /// Block id.
pub block: BlockID, pub block: BlockId,
/// Position in block. /// Position in block.
pub position: usize pub position: usize
} }

View File

@ -18,13 +18,13 @@
use std::ops::Range; use std::ops::Range;
use util::{Address}; use util::{Address};
use types::ids::BlockID; use types::ids::BlockId;
/// Easy to use trace filter. /// Easy to use trace filter.
#[derive(Binary)] #[derive(Binary)]
pub struct Filter { pub struct Filter {
/// Range of filtering. /// Range of filtering.
pub range: Range<BlockID>, pub range: Range<BlockId>,
/// From address. /// From address.
pub from_address: Vec<Address>, pub from_address: Vec<Address>,
/// To address. /// To address.

View File

@ -20,7 +20,7 @@ use std::collections::HashMap;
use time; use time;
use ethkey::Address; use ethkey::Address;
use {json, SafeAccount, Error}; use {json, SafeAccount, Error};
use json::UUID; use json::Uuid;
use super::KeyDirectory; use super::KeyDirectory;
const IGNORED_FILES: &'static [&'static str] = &["thumbs.db", "address_book.json"]; const IGNORED_FILES: &'static [&'static str] = &["thumbs.db", "address_book.json"];
@ -113,7 +113,7 @@ impl KeyDirectory for DiskDirectory {
// build file path // build file path
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| { let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid."); let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
format!("UTC--{}Z--{}", timestamp, UUID::from(account.id)) format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
}); });
// update account filename // update account filename

View File

@ -24,7 +24,7 @@ use dir::KeyDirectory;
use account::SafeAccount; use account::SafeAccount;
use {Error, SecretStore}; use {Error, SecretStore};
use json; use json;
use json::UUID; use json::Uuid;
use parking_lot::RwLock; use parking_lot::RwLock;
use presale::PresaleWallet; use presale::PresaleWallet;
use import; use import;
@ -154,7 +154,7 @@ impl SecretStore for EthStore {
account.public(password) account.public(password)
} }
fn uuid(&self, address: &Address) -> Result<UUID, Error> { fn uuid(&self, address: &Address) -> Result<Uuid, Error> {
let account = try!(self.get(address)); let account = try!(self.get(address));
Ok(account.id.into()) Ok(account.id.into())
} }

View File

@ -21,7 +21,7 @@ pub enum Error {
UnsupportedCipher, UnsupportedCipher,
InvalidCipherParams, InvalidCipherParams,
UnsupportedKdf, UnsupportedKdf,
InvalidUUID, InvalidUuid,
UnsupportedVersion, UnsupportedVersion,
InvalidCiphertext, InvalidCiphertext,
InvalidH256, InvalidH256,
@ -31,7 +31,7 @@ pub enum Error {
impl fmt::Display for Error { impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
match *self { match *self {
Error::InvalidUUID => write!(f, "Invalid UUID"), Error::InvalidUuid => write!(f, "Invalid Uuid"),
Error::UnsupportedVersion => write!(f, "Unsupported version"), Error::UnsupportedVersion => write!(f, "Unsupported version"),
Error::UnsupportedKdf => write!(f, "Unsupported kdf"), Error::UnsupportedKdf => write!(f, "Unsupported kdf"),
Error::InvalidCiphertext => write!(f, "Invalid ciphertext"), Error::InvalidCiphertext => write!(f, "Invalid ciphertext"),

View File

@ -23,15 +23,15 @@ use super::Error;
/// Universaly unique identifier. /// Universaly unique identifier.
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct UUID([u8; 16]); pub struct Uuid([u8; 16]);
impl From<[u8; 16]> for UUID { impl From<[u8; 16]> for Uuid {
fn from(uuid: [u8; 16]) -> Self { fn from(uuid: [u8; 16]) -> Self {
UUID(uuid) Uuid(uuid)
} }
} }
impl<'a> Into<String> for &'a UUID { impl<'a> Into<String> for &'a Uuid {
fn into(self) -> String { fn into(self) -> String {
let d1 = &self.0[0..4]; let d1 = &self.0[0..4];
let d2 = &self.0[4..6]; let d2 = &self.0[4..6];
@ -42,44 +42,44 @@ impl<'a> Into<String> for &'a UUID {
} }
} }
impl Into<String> for UUID { impl Into<String> for Uuid {
fn into(self) -> String { fn into(self) -> String {
Into::into(&self) Into::into(&self)
} }
} }
impl Into<[u8; 16]> for UUID { impl Into<[u8; 16]> for Uuid {
fn into(self) -> [u8; 16] { fn into(self) -> [u8; 16] {
self.0 self.0
} }
} }
impl fmt::Display for UUID { impl fmt::Display for Uuid {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let s: String = (self as &UUID).into(); let s: String = (self as &Uuid).into();
write!(f, "{}", s) write!(f, "{}", s)
} }
} }
fn copy_into(from: &str, into: &mut [u8]) -> Result<(), Error> { fn copy_into(from: &str, into: &mut [u8]) -> Result<(), Error> {
let from = try!(from.from_hex().map_err(|_| Error::InvalidUUID)); let from = try!(from.from_hex().map_err(|_| Error::InvalidUuid));
if from.len() != into.len() { if from.len() != into.len() {
return Err(Error::InvalidUUID); return Err(Error::InvalidUuid);
} }
into.copy_from_slice(&from); into.copy_from_slice(&from);
Ok(()) Ok(())
} }
impl str::FromStr for UUID { impl str::FromStr for Uuid {
type Err = Error; type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let parts: Vec<&str> = s.split("-").collect(); let parts: Vec<&str> = s.split("-").collect();
if parts.len() != 5 { if parts.len() != 5 {
return Err(Error::InvalidUUID); return Err(Error::InvalidUuid);
} }
let mut uuid = [0u8; 16]; let mut uuid = [0u8; 16];
@ -90,17 +90,17 @@ impl str::FromStr for UUID {
try!(copy_into(parts[3], &mut uuid[8..10])); try!(copy_into(parts[3], &mut uuid[8..10]));
try!(copy_into(parts[4], &mut uuid[10..16])); try!(copy_into(parts[4], &mut uuid[10..16]));
Ok(UUID(uuid)) Ok(Uuid(uuid))
} }
} }
impl From<&'static str> for UUID { impl From<&'static str> for Uuid {
fn from(s: &'static str) -> Self { fn from(s: &'static str) -> Self {
s.parse().expect(&format!("invalid string literal for {}: '{}'", stringify!(Self), s)) s.parse().expect(&format!("invalid string literal for {}: '{}'", stringify!(Self), s))
} }
} }
impl Serialize for UUID { impl Serialize for Uuid {
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error> fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
where S: Serializer { where S: Serializer {
let s: String = self.into(); let s: String = self.into();
@ -108,17 +108,17 @@ impl Serialize for UUID {
} }
} }
impl Deserialize for UUID { impl Deserialize for Uuid {
fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: &mut D) -> Result<Self, D::Error>
where D: Deserializer { where D: Deserializer {
deserializer.deserialize(UUIDVisitor) deserializer.deserialize(UuidVisitor)
} }
} }
struct UUIDVisitor; struct UuidVisitor;
impl Visitor for UUIDVisitor { impl Visitor for UuidVisitor {
type Value = UUID; type Value = Uuid;
fn visit_str<E>(&mut self, value: &str) -> Result<Self::Value, E> where E: SerdeError { fn visit_str<E>(&mut self, value: &str) -> Result<Self::Value, E> where E: SerdeError {
value.parse().map_err(SerdeError::custom) value.parse().map_err(SerdeError::custom)
@ -131,18 +131,18 @@ impl Visitor for UUIDVisitor {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::UUID; use super::Uuid;
#[test] #[test]
fn uuid_from_str() { fn uuid_from_str() {
let uuid: UUID = "3198bc9c-6672-5ab3-d995-4942343ae5b6".into(); let uuid: Uuid = "3198bc9c-6672-5ab3-d995-4942343ae5b6".into();
assert_eq!(uuid, UUID::from([0x31, 0x98, 0xbc, 0x9c, 0x66, 0x72, 0x5a, 0xb3, 0xd9, 0x95, 0x49, 0x42, 0x34, 0x3a, 0xe5, 0xb6])); assert_eq!(uuid, Uuid::from([0x31, 0x98, 0xbc, 0x9c, 0x66, 0x72, 0x5a, 0xb3, 0xd9, 0x95, 0x49, 0x42, 0x34, 0x3a, 0xe5, 0xb6]));
} }
#[test] #[test]
fn uuid_from_and_to_str() { fn uuid_from_and_to_str() {
let from = "3198bc9c-6672-5ab3-d995-4942343ae5b6"; let from = "3198bc9c-6672-5ab3-d995-4942343ae5b6";
let uuid: UUID = from.into(); let uuid: Uuid = from.into();
let to: String = uuid.into(); let to: String = uuid.into();
assert_eq!(from, &to); assert_eq!(from, &to);
} }

View File

@ -18,11 +18,11 @@ use std::io::{Read, Write};
use serde::{Deserialize, Deserializer, Error}; use serde::{Deserialize, Deserializer, Error};
use serde::de::{Visitor, MapVisitor}; use serde::de::{Visitor, MapVisitor};
use serde_json; use serde_json;
use super::{UUID, Version, Crypto, H160}; use super::{Uuid, Version, Crypto, H160};
#[derive(Debug, PartialEq, Serialize)] #[derive(Debug, PartialEq, Serialize)]
pub struct KeyFile { pub struct KeyFile {
pub id: UUID, pub id: Uuid,
pub version: Version, pub version: Version,
pub crypto: Crypto, pub crypto: Crypto,
pub address: H160, pub address: H160,
@ -31,7 +31,7 @@ pub struct KeyFile {
} }
enum KeyFileField { enum KeyFileField {
ID, Id,
Version, Version,
Crypto, Crypto,
Address, Address,
@ -56,7 +56,7 @@ impl Visitor for KeyFileFieldVisitor {
where E: Error where E: Error
{ {
match value { match value {
"id" => Ok(KeyFileField::ID), "id" => Ok(KeyFileField::Id),
"version" => Ok(KeyFileField::Version), "version" => Ok(KeyFileField::Version),
"crypto" => Ok(KeyFileField::Crypto), "crypto" => Ok(KeyFileField::Crypto),
"Crypto" => Ok(KeyFileField::Crypto), "Crypto" => Ok(KeyFileField::Crypto),
@ -94,7 +94,7 @@ impl Visitor for KeyFileVisitor {
loop { loop {
match try!(visitor.visit_key()) { match try!(visitor.visit_key()) {
Some(KeyFileField::ID) => { id = Some(try!(visitor.visit_value())); } Some(KeyFileField::Id) => { id = Some(try!(visitor.visit_value())); }
Some(KeyFileField::Version) => { version = Some(try!(visitor.visit_value())); } Some(KeyFileField::Version) => { version = Some(try!(visitor.visit_value())); }
Some(KeyFileField::Crypto) => { crypto = Some(try!(visitor.visit_value())); } Some(KeyFileField::Crypto) => { crypto = Some(try!(visitor.visit_value())); }
Some(KeyFileField::Address) => { address = Some(try!(visitor.visit_value())); } Some(KeyFileField::Address) => { address = Some(try!(visitor.visit_value())); }
@ -153,7 +153,7 @@ impl KeyFile {
mod tests { mod tests {
use std::str::FromStr; use std::str::FromStr;
use serde_json; use serde_json;
use json::{KeyFile, UUID, Version, Crypto, Cipher, Aes128Ctr, Kdf, Scrypt}; use json::{KeyFile, Uuid, Version, Crypto, Cipher, Aes128Ctr, Kdf, Scrypt};
#[test] #[test]
fn basic_keyfile() { fn basic_keyfile() {
@ -183,7 +183,7 @@ mod tests {
}"#; }"#;
let expected = KeyFile { let expected = KeyFile {
id: UUID::from_str("8777d9f6-7860-4b9b-88b7-0b57ee6b3a73").unwrap(), id: Uuid::from_str("8777d9f6-7860-4b9b-88b7-0b57ee6b3a73").unwrap(),
version: Version::V3, version: Version::V3,
address: "6edddfc6349aff20bc6467ccf276c5b52487f7a8".into(), address: "6edddfc6349aff20bc6467ccf276c5b52487f7a8".into(),
crypto: Crypto { crypto: Crypto {

View File

@ -14,7 +14,7 @@ pub use self::cipher::{Cipher, CipherSer, CipherSerParams, Aes128Ctr};
pub use self::crypto::{Crypto, CipherText}; pub use self::crypto::{Crypto, CipherText};
pub use self::error::Error; pub use self::error::Error;
pub use self::hash::{H128, H160, H256}; pub use self::hash::{H128, H160, H256};
pub use self::id::UUID; pub use self::id::Uuid;
pub use self::kdf::{Kdf, KdfSer, Prf, Pbkdf2, Scrypt, KdfSerParams}; pub use self::kdf::{Kdf, KdfSer, Prf, Pbkdf2, Scrypt, KdfSerParams};
pub use self::key_file::KeyFile; pub use self::key_file::KeyFile;
pub use self::presale::{PresaleWallet, Encseed}; pub use self::presale::{PresaleWallet, Encseed};

View File

@ -16,7 +16,7 @@
use ethkey::{Address, Message, Signature, Secret, Public}; use ethkey::{Address, Message, Signature, Secret, Public};
use Error; use Error;
use json::UUID; use json::Uuid;
pub trait SecretStore: Send + Sync { pub trait SecretStore: Send + Sync {
fn insert_account(&self, secret: Secret, password: &str) -> Result<Address, Error>; fn insert_account(&self, secret: Secret, password: &str) -> Result<Address, Error>;
@ -30,7 +30,7 @@ pub trait SecretStore: Send + Sync {
fn public(&self, account: &Address, password: &str) -> Result<Public, Error>; fn public(&self, account: &Address, password: &str) -> Result<Public, Error>;
fn accounts(&self) -> Result<Vec<Address>, Error>; fn accounts(&self) -> Result<Vec<Address>, Error>;
fn uuid(&self, account: &Address) -> Result<UUID, Error>; fn uuid(&self, account: &Address) -> Result<Uuid, Error>;
fn name(&self, account: &Address) -> Result<String, Error>; fn name(&self, account: &Address) -> Result<String, Error>;
fn meta(&self, account: &Address) -> Result<String, Error>; fn meta(&self, account: &Address) -> Result<String, Error>;

View File

@ -17,6 +17,15 @@
}, },
"development": { "development": {
"plugins": ["react-hot-loader/babel"] "plugins": ["react-hot-loader/babel"]
},
"test": {
"plugins": [
[
"babel-plugin-webpack-alias", {
"config": "webpack/test.js"
}
]
]
} }
} }
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "parity.js", "name": "parity.js",
"version": "0.2.105", "version": "0.2.106",
"main": "release/index.js", "main": "release/index.js",
"jsnext:main": "src/index.js", "jsnext:main": "src/index.js",
"author": "Parity Team <admin@parity.io>", "author": "Parity Team <admin@parity.io>",
@ -40,9 +40,9 @@
"coveralls": "npm run testCoverage && coveralls < coverage/lcov.info", "coveralls": "npm run testCoverage && coveralls < coverage/lcov.info",
"lint": "eslint --ignore-path .gitignore ./src/", "lint": "eslint --ignore-path .gitignore ./src/",
"lint:cached": "eslint --cache --ignore-path .gitignore ./src/", "lint:cached": "eslint --cache --ignore-path .gitignore ./src/",
"test": "mocha 'src/**/*.spec.js'", "test": "NODE_ENV=test mocha 'src/**/*.spec.js'",
"test:coverage": "istanbul cover _mocha -- 'src/**/*.spec.js'", "test:coverage": "NODE_ENV=test istanbul cover _mocha -- 'src/**/*.spec.js'",
"test:e2e": "mocha 'src/**/*.e2e.js'", "test:e2e": "NODE_ENV=test mocha 'src/**/*.e2e.js'",
"test:npm": "(cd .npmjs && npm i) && node test/npmLibrary && (rm -rf .npmjs/node_modules)", "test:npm": "(cd .npmjs && npm i) && node test/npmLibrary && (rm -rf .npmjs/node_modules)",
"prepush": "npm run lint:cached" "prepush": "npm run lint:cached"
}, },
@ -57,6 +57,7 @@
"babel-plugin-transform-object-rest-spread": "6.20.2", "babel-plugin-transform-object-rest-spread": "6.20.2",
"babel-plugin-transform-react-remove-prop-types": "0.2.11", "babel-plugin-transform-react-remove-prop-types": "0.2.11",
"babel-plugin-transform-runtime": "6.15.0", "babel-plugin-transform-runtime": "6.15.0",
"babel-plugin-webpack-alias": "2.1.2",
"babel-polyfill": "6.20.0", "babel-polyfill": "6.20.0",
"babel-preset-es2015": "6.18.0", "babel-preset-es2015": "6.18.0",
"babel-preset-es2016": "6.16.0", "babel-preset-es2016": "6.16.0",
@ -66,6 +67,7 @@
"babel-register": "6.18.0", "babel-register": "6.18.0",
"babel-runtime": "6.20.0", "babel-runtime": "6.20.0",
"chai": "3.5.0", "chai": "3.5.0",
"chai-as-promised": "6.0.0",
"chai-enzyme": "0.6.1", "chai-enzyme": "0.6.1",
"circular-dependency-plugin": "2.0.0", "circular-dependency-plugin": "2.0.0",
"copy-webpack-plugin": "4.0.1", "copy-webpack-plugin": "4.0.1",
@ -99,8 +101,8 @@
"mock-local-storage": "1.0.2", "mock-local-storage": "1.0.2",
"mock-socket": "6.0.3", "mock-socket": "6.0.3",
"nock": "9.0.2", "nock": "9.0.2",
"postcss-import": "8.1.0", "postcss-import": "9.0.0",
"postcss-loader": "1.1.1", "postcss-loader": "1.2.0",
"postcss-nested": "1.0.0", "postcss-nested": "1.0.0",
"postcss-simple-vars": "3.0.0", "postcss-simple-vars": "3.0.0",
"progress": "1.1.8", "progress": "1.1.8",
@ -137,13 +139,14 @@
"js-sha3": "0.5.5", "js-sha3": "0.5.5",
"lodash": "4.17.2", "lodash": "4.17.2",
"marked": "0.3.6", "marked": "0.3.6",
"material-ui": "0.16.4", "material-ui": "0.16.5",
"material-ui-chip-input": "0.11.1", "material-ui-chip-input": "0.11.1",
"mobx": "2.6.4", "mobx": "2.6.4",
"mobx-react": "4.0.3", "mobx-react": "4.0.3",
"mobx-react-devtools": "4.2.10", "mobx-react-devtools": "4.2.10",
"moment": "2.17.0", "moment": "2.17.0",
"phoneformat.js": "1.0.3", "phoneformat.js": "1.0.3",
"push.js": "0.0.11",
"qs": "6.3.0", "qs": "6.3.0",
"react": "15.4.1", "react": "15.4.1",
"react-ace": "4.1.0", "react-ace": "4.1.0",

View File

@ -19,7 +19,10 @@ import * as abis from './abi';
export default class Registry { export default class Registry {
constructor (api) { constructor (api) {
this._api = api; this._api = api;
this._contracts = [];
this._contracts = {};
this._pendingContracts = {};
this._instance = null; this._instance = null;
this._fetching = false; this._fetching = false;
this._queue = []; this._queue = [];
@ -59,20 +62,25 @@ export default class Registry {
getContract (_name) { getContract (_name) {
const name = _name.toLowerCase(); const name = _name.toLowerCase();
return new Promise((resolve, reject) => {
if (this._contracts[name]) { if (this._contracts[name]) {
resolve(this._contracts[name]); return Promise.resolve(this._contracts[name]);
return;
} }
this if (this._pendingContracts[name]) {
return this._pendingContracts[name];
}
const promise = this
.lookupAddress(name) .lookupAddress(name)
.then((address) => { .then((address) => {
this._contracts[name] = this._api.newContract(abis[name], address); this._contracts[name] = this._api.newContract(abis[name], address);
resolve(this._contracts[name]); delete this._pendingContracts[name];
}) return this._contracts[name];
.catch(reject);
}); });
this._pendingContracts[name] = promise;
return promise;
} }
getContractInstance (_name) { getContractInstance (_name) {
@ -89,7 +97,7 @@ export default class Registry {
return instance.getAddress.call({}, [sha3, 'A']); return instance.getAddress.call({}, [sha3, 'A']);
}) })
.then((address) => { .then((address) => {
console.log('lookupAddress', name, sha3, address); console.log('[lookupAddress]', `(${sha3}) ${name}: ${address}`);
return address; return address;
}); });
} }

View File

@ -21,7 +21,7 @@ import '../../../environment/tests';
import Application from './application'; import Application from './application';
describe('localtx/Application', () => { describe('dapps/localtx/Application', () => {
describe('rendering', () => { describe('rendering', () => {
it('renders without crashing', () => { it('renders without crashing', () => {
const rendered = shallow(<Application />); const rendered = shallow(<Application />);

View File

@ -29,7 +29,7 @@ Api.api = {
import BigNumber from 'bignumber.js'; import BigNumber from 'bignumber.js';
import { Transaction, LocalTransaction } from './transaction'; import { Transaction, LocalTransaction } from './transaction';
describe('localtx/Transaction', () => { describe('dapps/localtx/Transaction', () => {
describe('rendering', () => { describe('rendering', () => {
it('renders without crashing', () => { it('renders without crashing', () => {
const transaction = { const transaction = {
@ -51,7 +51,7 @@ describe('localtx/Transaction', () => {
}); });
}); });
describe('localtx/LocalTransaction', () => { describe('dapps/localtx/LocalTransaction', () => {
describe('rendering', () => { describe('rendering', () => {
it('renders without crashing', () => { it('renders without crashing', () => {
const rendered = shallow( const rendered = shallow(

View File

@ -67,7 +67,7 @@ if (window.location.hash && window.location.hash.indexOf(AUTH_HASH) === 0) {
const api = new SecureApi(`ws://${parityUrl}`, token); const api = new SecureApi(`ws://${parityUrl}`, token);
ContractInstances.create(api); ContractInstances.create(api);
const store = initStore(api); const store = initStore(api, hashHistory);
store.dispatch({ type: 'initAll', api }); store.dispatch({ type: 'initAll', api });
store.dispatch(setApi(api)); store.dispatch(setApi(api));

View File

@ -43,7 +43,7 @@ export default {
}, },
uuid: { uuid: {
type: String, type: String,
desc: 'The account UUID, or null if not available/unknown/not applicable.' desc: 'The account Uuid, or null if not available/unknown/not applicable.'
} }
} }
} }
@ -66,7 +66,7 @@ export default {
}, },
uuid: { uuid: {
type: String, type: String,
desc: 'The account UUID, or null if not available/unknown/not applicable.' desc: 'The account Uuid, or null if not available/unknown/not applicable.'
} }
} }
} }

View File

@ -23,6 +23,7 @@ import { wallet as walletAbi } from '~/contracts/abi';
import { wallet as walletCode, walletLibraryRegKey, fullWalletCode } from '~/contracts/code/wallet'; import { wallet as walletCode, walletLibraryRegKey, fullWalletCode } from '~/contracts/code/wallet';
import { validateUint, validateAddress, validateName } from '~/util/validation'; import { validateUint, validateAddress, validateName } from '~/util/validation';
import { toWei } from '~/api/util/wei';
import WalletsUtils from '~/util/wallets'; import WalletsUtils from '~/util/wallets';
const STEPS = { const STEPS = {
@ -47,7 +48,7 @@ export default class CreateWalletStore {
address: '', address: '',
owners: [], owners: [],
required: 1, required: 1,
daylimit: 0, daylimit: toWei(1),
name: '', name: '',
description: '' description: ''

View File

@ -107,10 +107,9 @@ export default class TransferStore {
constructor (api, props) { constructor (api, props) {
this.api = api; this.api = api;
const { account, balance, gasLimit, senders, onClose, newError, sendersBalances } = props; const { account, balance, gasLimit, senders, newError, sendersBalances } = props;
this.account = account; this.account = account;
this.balance = balance; this.balance = balance;
this.onClose = onClose;
this.isWallet = account && account.wallet; this.isWallet = account && account.wallet;
this.newError = newError; this.newError = newError;
@ -136,8 +135,7 @@ export default class TransferStore {
this.stage -= 1; this.stage -= 1;
} }
@action onClose = () => { @action handleClose = () => {
this.onClose && this.onClose();
this.stage = 0; this.stage = 0;
} }

View File

@ -208,7 +208,7 @@ class Transfer extends Component {
<Button <Button
icon={ <ContentClear /> } icon={ <ContentClear /> }
label='Cancel' label='Cancel'
onClick={ this.store.onClose } /> onClick={ this.handleClose } />
); );
const nextBtn = ( const nextBtn = (
<Button <Button
@ -234,7 +234,7 @@ class Transfer extends Component {
<Button <Button
icon={ <ActionDoneAll /> } icon={ <ActionDoneAll /> }
label='Close' label='Close'
onClick={ this.store.onClose } /> onClick={ this.handleClose } />
); );
switch (stage) { switch (stage) {
@ -264,6 +264,13 @@ class Transfer extends Component {
</div> </div>
); );
} }
handleClose = () => {
const { onClose } = this.props;
this.store.handleClose();
typeof onClose === 'function' && onClose();
}
} }
function mapStateToProps (initState, initProps) { function mapStateToProps (initState, initProps) {

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import thunk from 'redux-thunk'; import thunk from 'redux-thunk';
import { routerMiddleware } from 'react-router-redux';
import ErrorsMiddleware from '~/ui/Errors/middleware'; import ErrorsMiddleware from '~/ui/Errors/middleware';
import SettingsMiddleware from '~/views/Settings/middleware'; import SettingsMiddleware from '~/views/Settings/middleware';
@ -22,12 +23,13 @@ import SignerMiddleware from './providers/signerMiddleware';
import statusMiddleware from '~/views/Status/middleware'; import statusMiddleware from '~/views/Status/middleware';
import CertificationsMiddleware from './providers/certifications/middleware'; import CertificationsMiddleware from './providers/certifications/middleware';
export default function (api) { export default function (api, browserHistory) {
const errors = new ErrorsMiddleware(); const errors = new ErrorsMiddleware();
const signer = new SignerMiddleware(api); const signer = new SignerMiddleware(api);
const settings = new SettingsMiddleware(); const settings = new SettingsMiddleware();
const status = statusMiddleware(); const status = statusMiddleware();
const certifications = new CertificationsMiddleware(); const certifications = new CertificationsMiddleware();
const routeMiddleware = routerMiddleware(browserHistory);
const middleware = [ const middleware = [
settings.toMiddleware(), settings.toMiddleware(),
@ -36,5 +38,5 @@ export default function (api) {
certifications.toMiddleware() certifications.toMiddleware()
]; ];
return middleware.concat(status, thunk); return middleware.concat(status, routeMiddleware, thunk);
} }

View File

@ -15,11 +15,14 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { range, uniq, isEqual } from 'lodash'; import { range, uniq, isEqual } from 'lodash';
import BigNumber from 'bignumber.js';
import { push } from 'react-router-redux';
import { hashToImageUrl } from './imagesReducer'; import { hashToImageUrl } from './imagesReducer';
import { setAddressImage } from './imagesActions'; import { setAddressImage } from './imagesActions';
import * as ABIS from '~/contracts/abi'; import * as ABIS from '~/contracts/abi';
import { notifyTransaction } from '~/util/notifications';
import imagesEthereum from '../../../assets/images/contracts/ethereum-black-64x64.png'; import imagesEthereum from '../../../assets/images/contracts/ethereum-black-64x64.png';
const ETH = { const ETH = {
@ -28,7 +31,64 @@ const ETH = {
image: imagesEthereum image: imagesEthereum
}; };
export function setBalances (balances) { function setBalances (_balances) {
return (dispatch, getState) => {
const state = getState();
const accounts = state.personal.accounts;
const nextBalances = _balances;
const prevBalances = state.balances.balances;
const balances = { ...prevBalances };
Object.keys(nextBalances).forEach((address) => {
if (!balances[address]) {
balances[address] = Object.assign({}, nextBalances[address]);
return;
}
const balance = Object.assign({}, balances[address]);
const { tokens, txCount = balance.txCount } = nextBalances[address];
const nextTokens = [].concat(balance.tokens);
tokens.forEach((t) => {
const { token, value } = t;
const { tag } = token;
const tokenIndex = nextTokens.findIndex((tok) => tok.token.tag === tag);
if (tokenIndex === -1) {
nextTokens.push({
token,
value
});
} else {
const oldValue = nextTokens[tokenIndex].value;
// If received a token/eth (old value < new value), notify
if (oldValue.lt(value) && accounts[address]) {
const account = accounts[address];
const txValue = value.minus(oldValue);
const redirectToAccount = () => {
const route = `/account/${account.address}`;
dispatch(push(route));
};
notifyTransaction(account, token, txValue, redirectToAccount);
}
nextTokens[tokenIndex] = { token, value };
}
});
balances[address] = { txCount: txCount || new BigNumber(0), tokens: nextTokens };
});
dispatch(_setBalances(balances));
};
}
function _setBalances (balances) {
return { return {
type: 'setBalances', type: 'setBalances',
balances balances
@ -123,14 +183,14 @@ export function fetchBalances (_addresses) {
const fullFetch = addresses.length === 1; const fullFetch = addresses.length === 1;
const fetchedAddresses = uniq(addresses.concat(Object.keys(accounts))); const addressesToFetch = uniq(addresses.concat(Object.keys(accounts)));
return Promise return Promise
.all(fetchedAddresses.map((addr) => fetchAccount(addr, api, fullFetch))) .all(addressesToFetch.map((addr) => fetchAccount(addr, api, fullFetch)))
.then((accountsBalances) => { .then((accountsBalances) => {
const balances = {}; const balances = {};
fetchedAddresses.forEach((addr, idx) => { addressesToFetch.forEach((addr, idx) => {
balances[addr] = accountsBalances[idx]; balances[addr] = accountsBalances[idx];
}); });
@ -146,10 +206,12 @@ export function fetchBalances (_addresses) {
export function updateTokensFilter (_addresses, _tokens) { export function updateTokensFilter (_addresses, _tokens) {
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, balances, personal } = getState(); const { api, balances, personal } = getState();
const { visibleAccounts } = personal; const { visibleAccounts, accounts } = personal;
const { tokensFilter } = balances; const { tokensFilter } = balances;
const addresses = uniq(_addresses || visibleAccounts || []).sort(); const addressesToFetch = uniq(visibleAccounts.concat(Object.keys(accounts)));
const addresses = uniq(_addresses || addressesToFetch || []).sort();
const tokens = _tokens || Object.values(balances.tokens) || []; const tokens = _tokens || Object.values(balances.tokens) || [];
const tokenAddresses = tokens.map((t) => t.address).sort(); const tokenAddresses = tokens.map((t) => t.address).sort();
@ -221,8 +283,10 @@ export function updateTokensFilter (_addresses, _tokens) {
export function queryTokensFilter (tokensFilter) { export function queryTokensFilter (tokensFilter) {
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, personal, balances } = getState(); const { api, personal, balances } = getState();
const { visibleAccounts } = personal; const { visibleAccounts, accounts } = personal;
const visibleAddresses = visibleAccounts.map((a) => a.toLowerCase()); const visibleAddresses = visibleAccounts.map((a) => a.toLowerCase());
const addressesToFetch = uniq(visibleAddresses.concat(Object.keys(accounts)));
Promise Promise
.all([ .all([
@ -237,18 +301,16 @@ export function queryTokensFilter (tokensFilter) {
.concat(logsTo) .concat(logsTo)
.forEach((log) => { .forEach((log) => {
const tokenAddress = log.address; const tokenAddress = log.address;
const fromAddress = '0x' + log.topics[1].slice(-40); const fromAddress = '0x' + log.topics[1].slice(-40);
const toAddress = '0x' + log.topics[2].slice(-40); const toAddress = '0x' + log.topics[2].slice(-40);
const fromIdx = visibleAddresses.indexOf(fromAddress); if (addressesToFetch.includes(fromAddress)) {
const toIdx = visibleAddresses.indexOf(toAddress); addresses.push(fromAddress);
if (fromIdx > -1) {
addresses.push(visibleAccounts[fromIdx]);
} }
if (toIdx > -1) { if (addressesToFetch.includes(toAddress)) {
addresses.push(visibleAccounts[toIdx]); addresses.push(toAddress);
} }
tokenAddresses.push(tokenAddress); tokenAddresses.push(tokenAddress);
@ -269,9 +331,10 @@ export function queryTokensFilter (tokensFilter) {
export function fetchTokensBalances (_addresses = null, _tokens = null) { export function fetchTokensBalances (_addresses = null, _tokens = null) {
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, personal, balances } = getState(); const { api, personal, balances } = getState();
const { visibleAccounts } = personal; const { visibleAccounts, accounts } = personal;
const addresses = _addresses || visibleAccounts; const addressesToFetch = uniq(visibleAccounts.concat(Object.keys(accounts)));
const addresses = _addresses || addressesToFetch;
const tokens = _tokens || Object.values(balances.tokens); const tokens = _tokens || Object.values(balances.tokens);
if (addresses.length === 0) { if (addresses.length === 0) {

View File

@ -15,7 +15,6 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { handleActions } from 'redux-actions'; import { handleActions } from 'redux-actions';
import BigNumber from 'bignumber.js';
const initialState = { const initialState = {
balances: {}, balances: {},
@ -26,39 +25,7 @@ const initialState = {
export default handleActions({ export default handleActions({
setBalances (state, action) { setBalances (state, action) {
const nextBalances = action.balances; const { balances } = action;
const prevBalances = state.balances;
const balances = { ...prevBalances };
Object.keys(nextBalances).forEach((address) => {
if (!balances[address]) {
balances[address] = Object.assign({}, nextBalances[address]);
return;
}
const balance = Object.assign({}, balances[address]);
const { tokens, txCount = balance.txCount } = nextBalances[address];
const nextTokens = [].concat(balance.tokens);
tokens.forEach((t) => {
const { token, value } = t;
const { tag } = token;
const tokenIndex = nextTokens.findIndex((tok) => tok.token.tag === tag);
if (tokenIndex === -1) {
nextTokens.push({
token,
value
});
} else {
nextTokens[tokenIndex] = { token, value };
}
});
balances[address] = Object.assign({}, { txCount: txCount || new BigNumber(0), tokens: nextTokens });
});
return Object.assign({}, state, { balances }); return Object.assign({}, state, { balances });
}, },

View File

@ -21,11 +21,11 @@ export Status from './status';
export apiReducer from './apiReducer'; export apiReducer from './apiReducer';
export balancesReducer from './balancesReducer'; export balancesReducer from './balancesReducer';
export blockchainReducer from './blockchainReducer';
export compilerReducer from './compilerReducer';
export imagesReducer from './imagesReducer'; export imagesReducer from './imagesReducer';
export personalReducer from './personalReducer'; export personalReducer from './personalReducer';
export signerReducer from './signerReducer'; export signerReducer from './signerReducer';
export statusReducer from './statusReducer';
export blockchainReducer from './blockchainReducer';
export compilerReducer from './compilerReducer';
export snackbarReducer from './snackbarReducer'; export snackbarReducer from './snackbarReducer';
export statusReducer from './statusReducer';
export walletReducer from './walletReducer'; export walletReducer from './walletReducer';

View File

@ -36,9 +36,6 @@ export default class Personal {
} }
this._store.dispatch(personalAccountsInfo(accountsInfo)); this._store.dispatch(personalAccountsInfo(accountsInfo));
})
.then((subscriptionId) => {
console.log('personal._subscribeAccountsInfo', 'subscriptionId', subscriptionId);
}); });
} }

View File

@ -34,9 +34,6 @@ export default class Signer {
} }
this._store.dispatch(signerRequestsToConfirm(pending || [])); this._store.dispatch(signerRequestsToConfirm(pending || []));
})
.then((subscriptionId) => {
console.log('signer._subscribeRequestsToConfirm', 'subscriptionId', subscriptionId);
}); });
} }
} }

View File

@ -54,14 +54,14 @@ export default class Status {
this._api.eth this._api.eth
.getBlockByNumber(blockNumber) .getBlockByNumber(blockNumber)
.then((block) => { .then((block) => {
this._store.dispatch(statusCollection({ gasLimit: block.gasLimit })); this._store.dispatch(statusCollection({
blockTimestamp: block.timestamp,
gasLimit: block.gasLimit
}));
}) })
.catch((error) => { .catch((error) => {
console.warn('status._subscribeBlockNumber', 'getBlockByNumber', error); console.warn('status._subscribeBlockNumber', 'getBlockByNumber', error);
}); });
})
.then((subscriptionId) => {
console.log('status._subscribeBlockNumber', 'subscriptionId', subscriptionId);
}); });
} }

View File

@ -19,6 +19,7 @@ import { handleActions } from 'redux-actions';
const initialState = { const initialState = {
blockNumber: new BigNumber(0), blockNumber: new BigNumber(0),
blockTimestamp: new Date(),
devLogs: [], devLogs: [],
devLogsLevels: null, devLogsLevels: null,
devLogsEnabled: false, devLogsEnabled: false,

View File

@ -17,7 +17,12 @@
import { combineReducers } from 'redux'; import { combineReducers } from 'redux';
import { routerReducer } from 'react-router-redux'; import { routerReducer } from 'react-router-redux';
import { apiReducer, balancesReducer, blockchainReducer, compilerReducer, imagesReducer, personalReducer, signerReducer, statusReducer as nodeStatusReducer, snackbarReducer, walletReducer } from './providers'; import {
apiReducer, balancesReducer, blockchainReducer,
compilerReducer, imagesReducer, personalReducer,
signerReducer, statusReducer as nodeStatusReducer,
snackbarReducer, walletReducer
} from './providers';
import certificationsReducer from './providers/certifications/reducer'; import certificationsReducer from './providers/certifications/reducer';
import errorReducer from '~/ui/Errors/reducers'; import errorReducer from '~/ui/Errors/reducers';

View File

@ -32,9 +32,9 @@ const storeCreation = window.devToolsExtension
? window.devToolsExtension()(createStore) ? window.devToolsExtension()(createStore)
: createStore; : createStore;
export default function (api) { export default function (api, browserHistory) {
const reducers = initReducers(); const reducers = initReducers();
const middleware = initMiddleware(api); const middleware = initMiddleware(api, browserHistory);
const store = applyMiddleware(...middleware)(storeCreation)(reducers); const store = applyMiddleware(...middleware)(storeCreation)(reducers);
new BalancesProvider(store, api).start(); new BalancesProvider(store, api).start();

View File

@ -17,11 +17,13 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import { Toolbar, ToolbarGroup } from 'material-ui/Toolbar'; import { Toolbar, ToolbarGroup } from 'material-ui/Toolbar';
import { nodeOrStringProptype } from '~/util/proptypes';
import styles from './actionbar.css'; import styles from './actionbar.css';
export default class Actionbar extends Component { export default class Actionbar extends Component {
static propTypes = { static propTypes = {
title: PropTypes.string, title: nodeOrStringProptype(),
buttons: PropTypes.array, buttons: PropTypes.array,
children: PropTypes.node, children: PropTypes.node,
className: PropTypes.string className: PropTypes.string

View File

@ -0,0 +1,38 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React from 'react';
import { shallow } from 'enzyme';
import Actionbar from './actionbar';
function renderShallow (props) {
return shallow(
<Actionbar { ...props } />
);
}
describe('ui/Actionbar', () => {
describe('rendering', () => {
it('renders defaults', () => {
expect(renderShallow()).to.be.ok;
});
it('renders with the specified className', () => {
expect(renderShallow({ className: 'testClass' })).to.have.className('testClass');
});
});
});

View File

@ -0,0 +1,38 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React from 'react';
import { shallow } from 'enzyme';
import Badge from './badge';
function renderShallow (props) {
return shallow(
<Badge { ...props } />
);
}
describe('ui/Badge', () => {
describe('rendering', () => {
it('renders defaults', () => {
expect(renderShallow()).to.be.ok;
});
it('renders with the specified className', () => {
expect(renderShallow({ className: 'testClass' })).to.have.className('testClass');
});
});
});

View File

@ -17,16 +17,15 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import { FlatButton } from 'material-ui'; import { FlatButton } from 'material-ui';
import { nodeOrStringProptype } from '~/util/proptypes';
export default class Button extends Component { export default class Button extends Component {
static propTypes = { static propTypes = {
backgroundColor: PropTypes.string, backgroundColor: PropTypes.string,
className: PropTypes.string, className: PropTypes.string,
disabled: PropTypes.bool, disabled: PropTypes.bool,
icon: PropTypes.node, icon: PropTypes.node,
label: PropTypes.oneOfType([ label: nodeOrStringProptype(),
React.PropTypes.string,
React.PropTypes.object
]),
onClick: PropTypes.func, onClick: PropTypes.func,
primary: PropTypes.bool primary: PropTypes.bool
} }

View File

@ -0,0 +1,38 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React from 'react';
import { shallow } from 'enzyme';
import Button from './button';
function renderShallow (props) {
return shallow(
<Button { ...props } />
);
}
describe('ui/Button', () => {
describe('rendering', () => {
it('renders defaults', () => {
expect(renderShallow()).to.be.ok;
});
it('renders with the specified className', () => {
expect(renderShallow({ className: 'testClass' })).to.have.className('testClass');
});
});
});

View File

@ -27,10 +27,6 @@ export default class Title extends Component {
byline: nodeOrStringProptype() byline: nodeOrStringProptype()
} }
state = {
name: 'Unnamed'
}
render () { render () {
const { className, title, byline } = this.props; const { className, title, byline } = this.props;

View File

@ -0,0 +1,52 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React from 'react';
import { mount, shallow } from 'enzyme';
import Title from './title';
function renderShallow (props) {
return shallow(
<Title { ...props } />
);
}
function renderMount (props) {
return mount(
<Title { ...props } />
);
}
describe('ui/Container/Title', () => {
describe('rendering', () => {
it('renders defaults', () => {
expect(renderShallow()).to.be.ok;
});
it('renders with the specified className', () => {
expect(renderShallow({ className: 'testClass' })).to.have.className('testClass');
});
it('renders the specified title', () => {
expect(renderMount({ title: 'titleText' })).to.contain.text('titleText');
});
it('renders the specified byline', () => {
expect(renderMount({ byline: 'bylineText' })).to.contain.text('bylineText');
});
});
});

View File

@ -0,0 +1,38 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React from 'react';
import { shallow } from 'enzyme';
import Container from './container';
function renderShallow (props) {
return shallow(
<Container { ...props } />
);
}
describe('ui/Container', () => {
describe('rendering', () => {
it('renders defaults', () => {
expect(renderShallow()).to.be.ok;
});
it('renders with the specified className', () => {
expect(renderShallow({ className: 'testClass' })).to.have.className('testClass');
});
});
});

View File

@ -113,32 +113,38 @@ export default class Input extends Component {
<TextField <TextField
autoComplete='off' autoComplete='off'
className={ className } className={ className }
style={ textFieldStyle }
readOnly={ readOnly }
errorText={ error } errorText={ error }
floatingLabelFixed floatingLabelFixed
floatingLabelText={ label } floatingLabelText={ label }
fullWidth
hintText={ hint } hintText={ hint }
id={ NAME_ID }
inputStyle={ inputStyle }
fullWidth
max={ max }
min={ min }
multiLine={ multiLine } multiLine={ multiLine }
name={ NAME_ID } name={ NAME_ID }
id={ NAME_ID }
rows={ rows }
type={ type || 'text' }
underlineDisabledStyle={ UNDERLINE_DISABLED }
underlineStyle={ readOnly ? UNDERLINE_READONLY : UNDERLINE_NORMAL }
underlineFocusStyle={ readOnly ? { display: 'none' } : null }
underlineShow={ !hideUnderline }
value={ value }
onBlur={ this.onBlur } onBlur={ this.onBlur }
onChange={ this.onChange } onChange={ this.onChange }
onKeyDown={ this.onKeyDown } onKeyDown={ this.onKeyDown }
onPaste={ this.onPaste } onPaste={ this.onPaste }
inputStyle={ inputStyle }
min={ min } readOnly={ readOnly }
max={ max } rows={ rows }
style={ textFieldStyle }
type={ type || 'text' }
underlineDisabledStyle={ UNDERLINE_DISABLED }
underlineStyle={ readOnly ? UNDERLINE_READONLY : UNDERLINE_NORMAL }
underlineFocusStyle={ readOnly ? { display: 'none' } : null }
underlineShow={ !hideUnderline }
value={ value }
> >
{ children } { children }
</TextField> </TextField>

View File

@ -53,13 +53,13 @@ export default class TypedInput extends Component {
}; };
state = { state = {
isEth: true, isEth: false,
ethValue: 0 ethValue: 0
}; };
componentDidMount () { componentWillMount () {
if (this.props.isEth && this.props.value) { if (this.props.isEth && this.props.value) {
this.setState({ ethValue: fromWei(this.props.value) }); this.setState({ isEth: true, ethValue: fromWei(this.props.value) });
} }
} }
@ -164,28 +164,32 @@ export default class TypedInput extends Component {
} }
if (type === ABI_TYPES.INT) { if (type === ABI_TYPES.INT) {
return this.renderNumber(); return this.renderEth();
} }
if (type === ABI_TYPES.FIXED) { if (type === ABI_TYPES.FIXED) {
return this.renderNumber(); return this.renderFloat();
} }
return this.renderDefault(); return this.renderDefault();
} }
renderEth () { renderEth () {
const { ethValue } = this.state; const { ethValue, isEth } = this.state;
const value = ethValue && typeof ethValue.toNumber === 'function' const value = ethValue && typeof ethValue.toNumber === 'function'
? ethValue.toNumber() ? ethValue.toNumber()
: ethValue; : ethValue;
const input = isEth
? this.renderFloat(value, this.onEthValueChange)
: this.renderInteger(value, this.onEthValueChange);
return ( return (
<div className={ styles.ethInput }> <div className={ styles.ethInput }>
<div className={ styles.input }> <div className={ styles.input }>
{ this.renderNumber(value, this.onEthValueChange) } { input }
{ this.state.isEth ? (<div className={ styles.label }>ETH</div>) : null } { isEth ? (<div className={ styles.label }>ETH</div>) : null }
</div> </div>
<div className={ styles.toggle }> <div className={ styles.toggle }>
<Toggle <Toggle
@ -198,8 +202,9 @@ export default class TypedInput extends Component {
); );
} }
renderNumber (value = this.props.value, onChange = this.onChange) { renderInteger (value = this.props.value, onChange = this.onChange) {
const { label, error, param, hint, min, max } = this.props; const { label, error, param, hint, min, max } = this.props;
const realValue = value && typeof value.toNumber === 'function' const realValue = value && typeof value.toNumber === 'function'
? value.toNumber() ? value.toNumber()
: value; : value;
@ -212,6 +217,35 @@ export default class TypedInput extends Component {
error={ error } error={ error }
onChange={ onChange } onChange={ onChange }
type='number' type='number'
step={ 1 }
min={ min !== null ? min : (param.signed ? null : 0) }
max={ max !== null ? max : null }
/>
);
}
/**
* Decimal numbers have to be input via text field
* because of some react issues with input number fields.
* Once the issue is fixed, this could be a number again.
*
* @see https://github.com/facebook/react/issues/1549
*/
renderFloat (value = this.props.value, onChange = this.onChange) {
const { label, error, param, hint, min, max } = this.props;
const realValue = value && typeof value.toNumber === 'function'
? value.toNumber()
: value;
return (
<Input
label={ label }
hint={ hint }
value={ realValue }
error={ error }
onChange={ onChange }
type='text'
min={ min !== null ? min : (param.signed ? null : 0) } min={ min !== null ? min : (param.signed ? null : 0) }
max={ max !== null ? max : null } max={ max !== null ? max : null }
/> />

View File

@ -0,0 +1,76 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React from 'react';
import { mount } from 'enzyme';
import sinon from 'sinon';
import IdentityName from './identityName';
const ADDR_A = '0x123456789abcdef0123456789A';
const ADDR_B = '0x123456789abcdef0123456789B';
const ADDR_C = '0x123456789abcdef0123456789C';
const STORE = {
dispatch: sinon.stub(),
subscribe: sinon.stub(),
getState: () => {
return {
balances: {
tokens: {}
},
personal: {
accountsInfo: {
[ADDR_A]: { name: 'testing' },
[ADDR_B]: {}
}
}
};
}
};
function render (props) {
return mount(
<IdentityName
store={ STORE }
{ ...props } />
);
}
describe('ui/IdentityName', () => {
describe('rendering', () => {
it('renders defaults', () => {
expect(render()).to.be.ok;
});
describe('account not found', () => {
it('renders null with empty', () => {
expect(render({ address: ADDR_C, empty: true }).html()).to.be.null;
});
it('renders address without empty', () => {
expect(render({ address: ADDR_C }).text()).to.equal(ADDR_C);
});
it('renders short address with shorten', () => {
expect(render({ address: ADDR_C, shorten: true }).text()).to.equal('123456…56789c');
});
it('renders unknown with flag', () => {
expect(render({ address: ADDR_C, unknown: true }).text()).to.equal('UNNAMED');
});
});
});
});

View File

@ -17,8 +17,8 @@
.layout { .layout {
padding: 0.25em 0.25em 1em 0.25em; padding: 0.25em 0.25em 1em 0.25em;
}
.layout>div { &>div {
padding-bottom: 0.75em; margin-bottom: 0.75em;
}
} }

View File

@ -16,22 +16,39 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import Actionbar from '../Actionbar';
import { nodeOrStringProptype } from '~/util/proptypes';
import styles from './page.css'; import styles from './page.css';
export default class Page extends Component { export default class Page extends Component {
static propTypes = { static propTypes = {
buttons: PropTypes.array,
className: PropTypes.string, className: PropTypes.string,
children: PropTypes.node children: PropTypes.node,
title: nodeOrStringProptype()
}; };
render () { render () {
const { className, children } = this.props; const { buttons, className, children, title } = this.props;
const classes = `${styles.layout} ${className}`; const classes = `${styles.layout} ${className}`;
let actionbar = null;
if (title || buttons) {
actionbar = (
<Actionbar
buttons={ buttons }
title={ title } />
);
}
return ( return (
<div>
{ actionbar }
<div className={ classes }> <div className={ classes }>
{ children } { children }
</div> </div>
</div>
); );
} }
} }

View File

@ -0,0 +1,55 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import getMuiTheme from 'material-ui/styles/getMuiTheme';
import lightBaseTheme from 'material-ui/styles/baseThemes/lightBaseTheme';
const muiTheme = getMuiTheme(lightBaseTheme);
import theme from './theme';
describe('ui/Theme', () => {
it('is MUI-based', () => {
expect(Object.keys(theme)).to.deep.equal(Object.keys(muiTheme).concat('parity'));
});
it('allows setting of Parity backgrounds', () => {
expect(typeof theme.parity.setBackgroundSeed === 'function').to.be.true;
expect(typeof theme.parity.getBackgroundStyle === 'function').to.be.true;
});
describe('parity', () => {
describe('setBackgroundSeed', () => {
const SEED = 'testseed';
beforeEach(() => {
theme.parity.setBackgroundSeed(SEED);
});
it('sets the correct theme values', () => {
expect(theme.parity.backgroundSeed).to.equal(SEED);
});
});
describe('getBackgroundStyle', () => {
it('generates a style containing background', () => {
const style = theme.parity.getBackgroundStyle();
expect(style).to.have.property('background');
});
});
});
});

View File

@ -0,0 +1,17 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
export default from './txRow';

View File

@ -0,0 +1,133 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import moment from 'moment';
import React, { Component, PropTypes } from 'react';
import { txLink, addressLink } from '~/3rdparty/etherscan/links';
import IdentityIcon from '../../IdentityIcon';
import IdentityName from '../../IdentityName';
import MethodDecoding from '../../MethodDecoding';
import styles from '../txList.css';
export default class TxRow extends Component {
static contextTypes = {
api: PropTypes.object.isRequired
};
static propTypes = {
tx: PropTypes.object.isRequired,
address: PropTypes.string.isRequired,
isTest: PropTypes.bool.isRequired,
block: PropTypes.object,
historic: PropTypes.bool,
className: PropTypes.string
};
static defaultProps = {
historic: true
};
render () {
const { tx, address, isTest, historic, className } = this.props;
return (
<tr className={ className || '' }>
{ this.renderBlockNumber(tx.blockNumber) }
{ this.renderAddress(tx.from) }
<td className={ styles.transaction }>
{ this.renderEtherValue(tx.value) }
<div></div>
<div>
<a
className={ styles.link }
href={ txLink(tx.hash, isTest) }
target='_blank'>
{ `${tx.hash.substr(2, 6)}...${tx.hash.slice(-6)}` }
</a>
</div>
</td>
{ this.renderAddress(tx.to) }
<td className={ styles.method }>
<MethodDecoding
historic={ historic }
address={ address }
transaction={ tx } />
</td>
</tr>
);
}
renderAddress (address) {
const { isTest } = this.props;
let esLink = null;
if (address) {
esLink = (
<a
href={ addressLink(address, isTest) }
target='_blank'
className={ styles.link }>
<IdentityName address={ address } shorten />
</a>
);
}
return (
<td className={ styles.address }>
<div className={ styles.center }>
<IdentityIcon
center
className={ styles.icon }
address={ address } />
</div>
<div className={ styles.center }>
{ esLink || 'DEPLOY' }
</div>
</td>
);
}
renderEtherValue (_value) {
const { api } = this.context;
const value = api.util.fromWei(_value);
if (value.eq(0)) {
return <div className={ styles.value }>{ ' ' }</div>;
}
return (
<div className={ styles.value }>
{ value.toFormat(5) }<small>ETH</small>
</div>
);
}
renderBlockNumber (_blockNumber) {
const { block } = this.props;
const blockNumber = _blockNumber.toNumber();
return (
<td className={ styles.timestamp }>
<div>{ blockNumber && block ? moment(block.timestamp).fromNow() : null }</div>
<div>{ blockNumber ? _blockNumber.toFormat() : 'Pending' }</div>
</td>
);
}
}

View File

@ -0,0 +1,51 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import BigNumber from 'bignumber.js';
import React from 'react';
import { shallow } from 'enzyme';
import sinon from 'sinon';
import Api from '~/api';
import TxRow from './txRow';
const api = new Api({ execute: sinon.stub() });
function renderShallow (props) {
return shallow(
<TxRow
{ ...props } />,
{ context: { api } }
);
}
describe('ui/TxRow', () => {
describe('rendering', () => {
it('renders defaults', () => {
const block = {
timestamp: new Date()
};
const tx = {
blockNumber: new BigNumber(123),
hash: '0x123456789abcdef0123456789abcdef0123456789abcdef',
value: new BigNumber(1)
};
expect(renderShallow({ block, tx })).to.be.ok;
});
});
});

View File

@ -45,6 +45,8 @@ export default class Store {
if (bnB.eq(0)) { if (bnB.eq(0)) {
return bnB.eq(bnA) ? 0 : 1; return bnB.eq(bnA) ? 0 : 1;
} else if (bnA.eq(0)) {
return -1;
} }
return bnB.comparedTo(bnA); return bnB.comparedTo(bnA);

View File

@ -0,0 +1,90 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import BigNumber from 'bignumber.js';
import sinon from 'sinon';
import Store from './store';
const SUBID = 123;
const BLOCKS = {
1: { blockhash: '0x1' },
2: { blockhash: '0x2' }
};
const TRANSACTIONS = {
'0x123': { blockNumber: new BigNumber(1) },
'0x234': { blockNumber: new BigNumber(0) },
'0x345': { blockNumber: new BigNumber(2) },
'0x456': { blockNumber: new BigNumber(0) }
};
describe('ui/TxList/store', () => {
let api;
let store;
beforeEach(() => {
api = {
subscribe: sinon.stub().resolves(SUBID),
eth: {
getBlockByNumber: (blockNumber) => {
return Promise.resolve(BLOCKS[blockNumber]);
}
}
};
store = new Store(api);
});
describe('create', () => {
it('has empty storage', () => {
expect(store.blocks).to.deep.equal({});
expect(store.sortedHashes.peek()).to.deep.equal([]);
expect(store.transactions).to.deep.equal({});
});
it('subscribes to eth_blockNumber', () => {
expect(api.subscribe).to.have.been.calledWith('eth_blockNumber');
expect(store._subscriptionId).to.equal(SUBID);
});
});
describe('addBlocks', () => {
beforeEach(() => {
store.addBlocks(BLOCKS);
});
it('adds the blocks to the list', () => {
expect(store.blocks).to.deep.equal(BLOCKS);
});
});
describe('addTransactions', () => {
beforeEach(() => {
store.addTransactions(TRANSACTIONS);
});
it('adds all transactions to the list', () => {
expect(store.transactions).to.deep.equal(TRANSACTIONS);
});
it('sorts transactions based on blockNumber', () => {
expect(store.sortedHashes.peek()).to.deep.equal(['0x234', '0x456', '0x345', '0x123']);
});
it('adds pending transactions to the pending queue', () => {
expect(store._pendingHashes).to.deep.equal(['0x234', '0x456']);
});
});
});

View File

@ -14,128 +14,16 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import moment from 'moment';
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux'; import { connect } from 'react-redux';
import { bindActionCreators } from 'redux'; import { bindActionCreators } from 'redux';
import { observer } from 'mobx-react'; import { observer } from 'mobx-react';
import { txLink, addressLink } from '~/3rdparty/etherscan/links';
import IdentityIcon from '../IdentityIcon';
import IdentityName from '../IdentityName';
import MethodDecoding from '../MethodDecoding';
import Store from './store'; import Store from './store';
import TxRow from './TxRow';
import styles from './txList.css'; import styles from './txList.css';
export class TxRow extends Component {
static contextTypes = {
api: PropTypes.object.isRequired
};
static propTypes = {
tx: PropTypes.object.isRequired,
address: PropTypes.string.isRequired,
isTest: PropTypes.bool.isRequired,
block: PropTypes.object,
historic: PropTypes.bool,
className: PropTypes.string
};
static defaultProps = {
historic: true
};
render () {
const { tx, address, isTest, historic, className } = this.props;
return (
<tr className={ className || '' }>
{ this.renderBlockNumber(tx.blockNumber) }
{ this.renderAddress(tx.from) }
<td className={ styles.transaction }>
{ this.renderEtherValue(tx.value) }
<div></div>
<div>
<a
className={ styles.link }
href={ txLink(tx.hash, isTest) }
target='_blank'>
{ `${tx.hash.substr(2, 6)}...${tx.hash.slice(-6)}` }
</a>
</div>
</td>
{ this.renderAddress(tx.to) }
<td className={ styles.method }>
<MethodDecoding
historic={ historic }
address={ address }
transaction={ tx } />
</td>
</tr>
);
}
renderAddress (address) {
const { isTest } = this.props;
let esLink = null;
if (address) {
esLink = (
<a
href={ addressLink(address, isTest) }
target='_blank'
className={ styles.link }>
<IdentityName address={ address } shorten />
</a>
);
}
return (
<td className={ styles.address }>
<div className={ styles.center }>
<IdentityIcon
center
className={ styles.icon }
address={ address } />
</div>
<div className={ styles.center }>
{ esLink || 'DEPLOY' }
</div>
</td>
);
}
renderEtherValue (_value) {
const { api } = this.context;
const value = api.util.fromWei(_value);
if (value.eq(0)) {
return <div className={ styles.value }>{ ' ' }</div>;
}
return (
<div className={ styles.value }>
{ value.toFormat(5) }<small>ETH</small>
</div>
);
}
renderBlockNumber (_blockNumber) {
const { block } = this.props;
const blockNumber = _blockNumber.toNumber();
return (
<td className={ styles.timestamp }>
<div>{ blockNumber && block ? moment(block.timestamp).fromNow() : null }</div>
<div>{ blockNumber ? _blockNumber.toFormat() : 'Pending' }</div>
</td>
);
}
}
@observer @observer
class TxList extends Component { class TxList extends Component {
static contextTypes = { static contextTypes = {

View File

@ -0,0 +1,54 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React from 'react';
import { shallow } from 'enzyme';
import sinon from 'sinon';
import Api from '~/api';
import TxList from './txList';
const api = new Api({ execute: sinon.stub() });
const STORE = {
dispatch: sinon.stub(),
subscribe: sinon.stub(),
getState: () => {
return {
nodeStatus: {
isTest: true
}
};
}
};
function renderShallow (props) {
return shallow(
<TxList
store={ STORE }
{ ...props } />,
{ context: { api } }
);
}
describe('ui/TxList', () => {
describe('rendering', () => {
it('renders defaults', () => {
expect(renderShallow()).to.be.ok;
});
});
});

View File

@ -0,0 +1,45 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import Push from 'push.js';
import BigNumber from 'bignumber.js';
import { noop } from 'lodash';
import { fromWei } from '~/api/util/wei';
import ethereumIcon from '~/../assets/images/contracts/ethereum-black-64x64.png';
import unkownIcon from '~/../assets/images/contracts/unknown-64x64.png';
export function notifyTransaction (account, token, _value, onClick) {
const name = account.name || account.address;
const value = token.tag.toLowerCase() === 'eth'
? fromWei(_value)
: _value.div(new BigNumber(token.format || 1));
const icon = token.tag.toLowerCase() === 'eth'
? ethereumIcon
: (token.image || unkownIcon);
Push.create(`${name}`, {
body: `You just received ${value.toFormat()} ${token.tag.toUpperCase()}`,
icon: {
x16: icon,
x32: icon
},
timeout: 20000,
onClick: onClick || noop
});
}

View File

@ -14,9 +14,10 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { range } from 'lodash'; import { range, uniq } from 'lodash';
import { bytesToHex, toHex } from '~/api/util/format'; import { bytesToHex, toHex } from '~/api/util/format';
import { validateAddress } from '~/util/validation';
export default class WalletsUtils { export default class WalletsUtils {
@ -26,10 +27,82 @@ export default class WalletsUtils {
static fetchOwners (walletContract) { static fetchOwners (walletContract) {
const walletInstance = walletContract.instance; const walletInstance = walletContract.instance;
return walletInstance return walletInstance
.m_numOwners.call() .m_numOwners.call()
.then((mNumOwners) => { .then((mNumOwners) => {
return Promise.all(range(mNumOwners.toNumber()).map((idx) => walletInstance.getOwner.call({}, [ idx ]))); const promises = range(mNumOwners.toNumber())
.map((idx) => walletInstance.getOwner.call({}, [ idx ]));
return Promise
.all(promises)
.then((owners) => {
const uniqOwners = uniq(owners);
// If all owners are the zero account : must be Mist wallet contract
if (uniqOwners.length === 1 && /^(0x)?0*$/.test(owners[0])) {
return WalletsUtils.fetchMistOwners(walletContract, mNumOwners.toNumber());
}
return owners;
});
});
}
static fetchMistOwners (walletContract, mNumOwners) {
const walletAddress = walletContract.address;
return WalletsUtils
.getMistOwnersOffset(walletContract)
.then((result) => {
if (!result || result.offset === -1) {
return [];
}
const owners = [ result.address ];
if (mNumOwners === 1) {
return owners;
}
const initOffset = result.offset + 1;
let promise = Promise.resolve();
range(initOffset, initOffset + mNumOwners - 1).forEach((offset) => {
promise = promise
.then(() => {
return walletContract.api.eth.getStorageAt(walletAddress, offset);
})
.then((result) => {
const resultAddress = '0x' + (result || '').slice(-40);
const { address } = validateAddress(resultAddress);
owners.push(address);
});
});
return promise.then(() => owners);
});
}
static getMistOwnersOffset (walletContract, offset = 3) {
return walletContract.api.eth
.getStorageAt(walletContract.address, offset)
.then((result) => {
if (result && !/^(0x)?0*$/.test(result)) {
const resultAddress = '0x' + result.slice(-40);
const { address, addressError } = validateAddress(resultAddress);
if (!addressError) {
return { offset, address };
}
}
if (offset >= 100) {
return { offset: -1 };
}
return WalletsUtils.getMistOwnersOffset(walletContract, offset + 1);
}); });
} }

View File

@ -31,12 +31,14 @@ export default class Header extends Component {
account: PropTypes.object, account: PropTypes.object,
balance: PropTypes.object, balance: PropTypes.object,
className: PropTypes.string, className: PropTypes.string,
children: PropTypes.node children: PropTypes.node,
isContract: PropTypes.bool
}; };
static defaultProps = { static defaultProps = {
className: '', className: '',
children: null children: null,
isContract: false
}; };
render () { render () {
@ -88,9 +90,9 @@ export default class Header extends Component {
} }
renderTxCount () { renderTxCount () {
const { balance } = this.props; const { balance, isContract } = this.props;
if (!balance) { if (!balance || isContract) {
return null; return null;
} }

View File

@ -75,6 +75,13 @@ export default class Summary extends Component {
return true; return true;
} }
const prevOwners = this.props.owners;
const nextOwners = nextProps.owners;
if (!isEqual(prevOwners, nextOwners)) {
return true;
}
return false; return false;
} }
@ -123,8 +130,8 @@ export default class Summary extends Component {
return ( return (
<div className={ styles.owners }> <div className={ styles.owners }>
{ {
ownersValid.map((owner) => ( ownersValid.map((owner, index) => (
<div key={ owner.address }> <div key={ `${index}_${owner.address}` }>
<div <div
data-tip data-tip
data-for={ `owner_${owner.address}` } data-for={ `owner_${owner.address}` }

View File

@ -188,7 +188,7 @@ class TabBar extends Component {
return ( return (
<ToolbarGroup> <ToolbarGroup>
<div className={ styles.logo }> <div className={ styles.logo }>
<img src={ imagesEthcoreBlock } /> <img src={ imagesEthcoreBlock } height={ 28 } />
</div> </div>
</ToolbarGroup> </ToolbarGroup>
); );

View File

@ -17,7 +17,7 @@
import React, { Component, PropTypes } from 'react'; import React, { Component, PropTypes } from 'react';
import { uniq } from 'lodash'; import { uniq } from 'lodash';
import { Container } from '~/ui'; import { Container, Loading } from '~/ui';
import Event from './Event'; import Event from './Event';
import styles from '../contract.css'; import styles from '../contract.css';
@ -25,18 +25,38 @@ import styles from '../contract.css';
export default class Events extends Component { export default class Events extends Component {
static contextTypes = { static contextTypes = {
api: PropTypes.object api: PropTypes.object
} };
static propTypes = { static propTypes = {
events: PropTypes.array, isTest: PropTypes.bool.isRequired,
isTest: PropTypes.bool.isRequired isLoading: PropTypes.bool,
} events: PropTypes.array
};
static defaultProps = {
isLoading: false,
events: []
};
render () { render () {
const { events, isTest } = this.props; const { events, isTest, isLoading } = this.props;
if (isLoading) {
return (
<Container title='events'>
<div>
<Loading size={ 2 } />
</div>
</Container>
);
}
if (!events || !events.length) { if (!events || !events.length) {
return null; return (
<Container title='events'>
<p>No events has been sent from this contract.</p>
</Container>
);
} }
const eventsKey = uniq(events.map((e) => e.key)); const eventsKey = uniq(events.map((e) => e.key));

View File

@ -54,6 +54,10 @@ export default class Queries extends Component {
.filter((fn) => fn.inputs.length > 0) .filter((fn) => fn.inputs.length > 0)
.map((fn) => this.renderInputQuery(fn)); .map((fn) => this.renderInputQuery(fn));
if (queries.length + noInputQueries.length + withInputQueries.length === 0) {
return null;
}
return ( return (
<Container title='queries'> <Container title='queries'>
<div className={ styles.methods }> <div className={ styles.methods }>

View File

@ -40,7 +40,7 @@ import styles from './contract.css';
class Contract extends Component { class Contract extends Component {
static contextTypes = { static contextTypes = {
api: React.PropTypes.object.isRequired api: React.PropTypes.object.isRequired
} };
static propTypes = { static propTypes = {
setVisibleAccounts: PropTypes.func.isRequired, setVisibleAccounts: PropTypes.func.isRequired,
@ -50,7 +50,7 @@ class Contract extends Component {
contracts: PropTypes.object, contracts: PropTypes.object,
isTest: PropTypes.bool, isTest: PropTypes.bool,
params: PropTypes.object params: PropTypes.object
} };
state = { state = {
contract: null, contract: null,
@ -64,8 +64,9 @@ class Contract extends Component {
allEvents: [], allEvents: [],
minedEvents: [], minedEvents: [],
pendingEvents: [], pendingEvents: [],
queryValues: {} queryValues: {},
} loadingEvents: true
};
componentDidMount () { componentDidMount () {
const { api } = this.context; const { api } = this.context;
@ -115,7 +116,7 @@ class Contract extends Component {
render () { render () {
const { balances, contracts, params, isTest } = this.props; const { balances, contracts, params, isTest } = this.props;
const { allEvents, contract, queryValues } = this.state; const { allEvents, contract, queryValues, loadingEvents } = this.state;
const account = contracts[params.address]; const account = contracts[params.address];
const balance = balances[params.address]; const balance = balances[params.address];
@ -133,13 +134,19 @@ class Contract extends Component {
<Header <Header
account={ account } account={ account }
balance={ balance } balance={ balance }
isContract
/> />
<Queries <Queries
contract={ contract } contract={ contract }
values={ queryValues } /> values={ queryValues }
/>
<Events <Events
isTest={ isTest } isTest={ isTest }
events={ allEvents } /> isLoading={ loadingEvents }
events={ allEvents }
/>
{ this.renderDetails(account) } { this.renderDetails(account) }
</Page> </Page>
@ -358,6 +365,10 @@ class Contract extends Component {
} }
_receiveEvents = (error, logs) => { _receiveEvents = (error, logs) => {
if (this.state.loadingEvents) {
this.setState({ loadingEvents: false });
}
if (error) { if (error) {
console.error('_receiveEvents', error); console.error('_receiveEvents', error);
return; return;

View File

@ -51,9 +51,16 @@ export default class Dapp extends Component {
src = `${dappsUrl}/${app.contentHash}/`; src = `${dappsUrl}/${app.contentHash}/`;
break; break;
default: default:
const dapphost = process.env.NODE_ENV === 'production' && !app.secure let dapphost = process.env.DAPPS_URL || (
process.env.NODE_ENV === 'production' && !app.secure
? `${dappsUrl}/ui` ? `${dappsUrl}/ui`
: ''; : ''
);
if (dapphost === '/') {
dapphost = '';
}
src = `${dapphost}/${app.url}.html`; src = `${dapphost}/${app.url}.html`;
break; break;
} }

View File

@ -17,7 +17,7 @@
import BigNumber from 'bignumber.js'; import BigNumber from 'bignumber.js';
import { getShortData, getFee, getTotalValue } from './transaction'; import { getShortData, getFee, getTotalValue } from './transaction';
describe('util/transaction', () => { describe('views/Signer/components/util/transaction', () => {
describe('getEstimatedMiningTime', () => { describe('getEstimatedMiningTime', () => {
it('should return estimated mining time', () => { it('should return estimated mining time', () => {
}); });

View File

@ -23,8 +23,7 @@ export default class Signer extends Component {
render () { render () {
return ( return (
<div> <div>
<Actionbar <Actionbar title='Trusted Signer' />
title='Trusted Signer' />
<RequestsPage /> <RequestsPage />
</div> </div>
); );

View File

@ -30,12 +30,6 @@ export default class Store {
} }
} }
@action unsubscribe () {
if (this._timeoutId) {
clearTimeout(this._timeoutId);
}
}
@action setBalance = (address, balance) => { @action setBalance = (address, balance) => {
this.setBalances({ [address]: balance }); this.setBalances({ [address]: balance });
} }
@ -50,6 +44,12 @@ export default class Store {
} }
} }
@action unsubscribe () {
if (this._timeoutId) {
clearTimeout(this._timeoutId);
}
}
fetchBalance (address) { fetchBalance (address) {
this._api.eth this._api.eth
.getBalance(address) .getBalance(address)

View File

@ -21,7 +21,7 @@ import getMuiTheme from 'material-ui/styles/getMuiTheme';
import WrappedAutoComplete from './AutoComplete'; import WrappedAutoComplete from './AutoComplete';
describe('components/AutoComplete', () => { describe('views/Status/components/AutoComplete', () => {
describe('rendering', () => { describe('rendering', () => {
let rendered; let rendered;

View File

@ -19,7 +19,7 @@ import { shallow } from 'enzyme';
import Box from './Box'; import Box from './Box';
describe('components/Box', () => { describe('views/Status/components/Box', () => {
describe('rendering', () => { describe('rendering', () => {
const title = 'test title'; const title = 'test title';
let rendered; let rendered;

Some files were not shown because too many files have changed in this diff Show More