Merge branch 'master' into fix_suicide
Conflicts: src/externalities.rs
This commit is contained in:
commit
6403287aa8
13
Cargo.toml
13
Cargo.toml
@ -17,20 +17,13 @@ heapsize = "0.2.0"
|
|||||||
rust-crypto = "0.2.34"
|
rust-crypto = "0.2.34"
|
||||||
time = "0.1"
|
time = "0.1"
|
||||||
#interpolate_idents = { git = "https://github.com/SkylerLipthay/interpolate_idents" }
|
#interpolate_idents = { git = "https://github.com/SkylerLipthay/interpolate_idents" }
|
||||||
evmjit = { path = "rust-evmjit", optional = true }
|
evmjit = { path = "evmjit", optional = true }
|
||||||
ethash = { path = "ethash" }
|
ethash = { path = "ethash" }
|
||||||
num_cpus = "0.2"
|
num_cpus = "0.2"
|
||||||
docopt = "0.6"
|
|
||||||
docopt_macros = "0.6"
|
|
||||||
ctrlc = "1.0"
|
|
||||||
crossbeam = "0.1.5"
|
|
||||||
clippy = "0.0.37"
|
clippy = "0.0.37"
|
||||||
|
crossbeam = "0.1.5"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
jit = ["evmjit"]
|
jit = ["evmjit"]
|
||||||
|
evm_debug = []
|
||||||
test-heavy = []
|
test-heavy = []
|
||||||
evm-debug = []
|
|
||||||
|
|
||||||
[[bin]]
|
|
||||||
name = "client"
|
|
||||||
path = "src/bin/client/main.rs"
|
|
||||||
|
21
bin/Cargo.toml
Normal file
21
bin/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
[package]
|
||||||
|
description = "Ethcore client."
|
||||||
|
name = "ethcore-client"
|
||||||
|
version = "0.1.0"
|
||||||
|
license = "GPL-3.0"
|
||||||
|
authors = ["Ethcore <admin@ethcore.io>"]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
log = "0.3"
|
||||||
|
env_logger = "0.3"
|
||||||
|
rustc-serialize = "0.3"
|
||||||
|
docopt = "0.6"
|
||||||
|
docopt_macros = "0.6"
|
||||||
|
ctrlc = "1.0"
|
||||||
|
ethcore-util = { path = "../util" }
|
||||||
|
ethcore-rpc = { path = "../rpc", optional = true }
|
||||||
|
ethcore = { path = ".." }
|
||||||
|
clippy = "0.0.37"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
rpc = ["ethcore-rpc"]
|
@ -1,5 +1,9 @@
|
|||||||
|
//! Ethcore client application.
|
||||||
|
|
||||||
|
#![warn(missing_docs)]
|
||||||
#![feature(plugin)]
|
#![feature(plugin)]
|
||||||
#![plugin(docopt_macros)]
|
#![plugin(docopt_macros)]
|
||||||
|
#![plugin(clippy)]
|
||||||
extern crate docopt;
|
extern crate docopt;
|
||||||
extern crate rustc_serialize;
|
extern crate rustc_serialize;
|
||||||
extern crate ethcore_util as util;
|
extern crate ethcore_util as util;
|
||||||
@ -8,6 +12,9 @@ extern crate log;
|
|||||||
extern crate env_logger;
|
extern crate env_logger;
|
||||||
extern crate ctrlc;
|
extern crate ctrlc;
|
||||||
|
|
||||||
|
#[cfg(feature = "rpc")]
|
||||||
|
extern crate ethcore_rpc as rpc;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
use log::{LogLevelFilter};
|
use log::{LogLevelFilter};
|
||||||
use env_logger::LogBuilder;
|
use env_logger::LogBuilder;
|
||||||
@ -31,7 +38,7 @@ Options:
|
|||||||
-h --help Show this screen.
|
-h --help Show this screen.
|
||||||
");
|
");
|
||||||
|
|
||||||
fn setup_log(init: &String) {
|
fn setup_log(init: &str) {
|
||||||
let mut builder = LogBuilder::new();
|
let mut builder = LogBuilder::new();
|
||||||
builder.filter(None, LogLevelFilter::Info);
|
builder.filter(None, LogLevelFilter::Info);
|
||||||
|
|
||||||
@ -44,6 +51,23 @@ fn setup_log(init: &String) {
|
|||||||
builder.init().unwrap();
|
builder.init().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(feature = "rpc")]
|
||||||
|
fn setup_rpc_server(client: Arc<Client>) {
|
||||||
|
use rpc::v1::*;
|
||||||
|
|
||||||
|
let mut server = rpc::HttpServer::new(1);
|
||||||
|
server.add_delegate(Web3Client::new().to_delegate());
|
||||||
|
server.add_delegate(EthClient::new(client.clone()).to_delegate());
|
||||||
|
server.add_delegate(EthFilterClient::new(client).to_delegate());
|
||||||
|
server.add_delegate(NetClient::new().to_delegate());
|
||||||
|
server.start_async("127.0.0.1:3030");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(not(feature = "rpc"))]
|
||||||
|
fn setup_rpc_server(_client: Arc<Client>) {
|
||||||
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit());
|
let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit());
|
||||||
|
|
||||||
@ -57,6 +81,7 @@ fn main() {
|
|||||||
let mut net_settings = NetworkConfiguration::new();
|
let mut net_settings = NetworkConfiguration::new();
|
||||||
net_settings.boot_nodes = init_nodes;
|
net_settings.boot_nodes = init_nodes;
|
||||||
let mut service = ClientService::start(spec, net_settings).unwrap();
|
let mut service = ClientService::start(spec, net_settings).unwrap();
|
||||||
|
setup_rpc_server(service.client());
|
||||||
let io_handler = Arc::new(ClientIoHandler { client: service.client(), info: Default::default(), sync: service.sync() });
|
let io_handler = Arc::new(ClientIoHandler { client: service.client(), info: Default::default(), sync: service.sync() });
|
||||||
service.io().register_handler(io_handler).expect("Error registering IO handler");
|
service.io().register_handler(io_handler).expect("Error registering IO handler");
|
||||||
|
|
19
rpc/Cargo.toml
Normal file
19
rpc/Cargo.toml
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
[package]
|
||||||
|
description = "Ethcore jsonrpc"
|
||||||
|
name = "ethcore-rpc"
|
||||||
|
version = "0.1.0"
|
||||||
|
license = "GPL-3.0"
|
||||||
|
authors = ["Ethcore <admin@ethcore.io"]
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
serde = "0.6.7"
|
||||||
|
serde_macros = "0.6.10"
|
||||||
|
serde_json = "0.6.0"
|
||||||
|
jsonrpc-core = "1.1"
|
||||||
|
jsonrpc-http-server = "1.1"
|
||||||
|
ethcore-util = { path = "../util" }
|
||||||
|
ethcore = { path = ".." }
|
||||||
|
clippy = "0.0.37"
|
||||||
|
|
47
rpc/src/lib.rs
Normal file
47
rpc/src/lib.rs
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
//! Ethcore rpc.
|
||||||
|
#![warn(missing_docs)]
|
||||||
|
#![feature(custom_derive, custom_attribute, plugin)]
|
||||||
|
#![plugin(serde_macros)]
|
||||||
|
#![plugin(clippy)]
|
||||||
|
|
||||||
|
extern crate serde;
|
||||||
|
extern crate serde_json;
|
||||||
|
extern crate jsonrpc_core;
|
||||||
|
extern crate jsonrpc_http_server;
|
||||||
|
extern crate ethcore_util as util;
|
||||||
|
extern crate ethcore;
|
||||||
|
|
||||||
|
use self::jsonrpc_core::{IoHandler, IoDelegate};
|
||||||
|
|
||||||
|
macro_rules! rpcerr {
|
||||||
|
() => (Err(Error::internal_error()))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub mod v1;
|
||||||
|
|
||||||
|
/// Http server.
|
||||||
|
pub struct HttpServer {
|
||||||
|
handler: IoHandler,
|
||||||
|
threads: usize
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HttpServer {
|
||||||
|
/// Construct new http server object with given number of threads.
|
||||||
|
pub fn new(threads: usize) -> HttpServer {
|
||||||
|
HttpServer {
|
||||||
|
handler: IoHandler::new(),
|
||||||
|
threads: threads
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Add io delegate.
|
||||||
|
pub fn add_delegate<D>(&mut self, delegate: IoDelegate<D>) where D: Send + Sync + 'static {
|
||||||
|
self.handler.add_delegate(delegate);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Start server asynchronously in new thread
|
||||||
|
pub fn start_async(self, addr: &str) {
|
||||||
|
let server = jsonrpc_http_server::Server::new(self.handler, self.threads);
|
||||||
|
server.start_async(addr)
|
||||||
|
}
|
||||||
|
}
|
132
rpc/src/v1/impls/eth.rs
Normal file
132
rpc/src/v1/impls/eth.rs
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
//! Eth rpc implementation.
|
||||||
|
use std::sync::Arc;
|
||||||
|
use jsonrpc_core::*;
|
||||||
|
use util::hash::*;
|
||||||
|
use util::uint::*;
|
||||||
|
use util::sha3::*;
|
||||||
|
use ethcore::client::*;
|
||||||
|
use ethcore::views::*;
|
||||||
|
use v1::traits::{Eth, EthFilter};
|
||||||
|
use v1::types::Block;
|
||||||
|
|
||||||
|
/// Eth rpc implementation.
|
||||||
|
pub struct EthClient {
|
||||||
|
client: Arc<Client>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EthClient {
|
||||||
|
/// Creates new EthClient.
|
||||||
|
pub fn new(client: Arc<Client>) -> Self {
|
||||||
|
EthClient {
|
||||||
|
client: client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Eth for EthClient {
|
||||||
|
fn protocol_version(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match params {
|
||||||
|
Params::None => Ok(Value::U64(63)),
|
||||||
|
_ => Err(Error::invalid_params())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn author(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match params {
|
||||||
|
Params::None => to_value(&Address::new()),
|
||||||
|
_ => Err(Error::invalid_params())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gas_price(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match params {
|
||||||
|
Params::None => Ok(Value::U64(0)),
|
||||||
|
_ => Err(Error::invalid_params())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn block_number(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match params {
|
||||||
|
Params::None => Ok(Value::U64(self.client.chain_info().best_block_number)),
|
||||||
|
_ => Err(Error::invalid_params())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_mining(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match params {
|
||||||
|
Params::None => Ok(Value::Bool(false)),
|
||||||
|
_ => Err(Error::invalid_params())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn hashrate(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match params {
|
||||||
|
Params::None => Ok(Value::U64(0)),
|
||||||
|
_ => Err(Error::invalid_params())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn block_transaction_count(&self, _: Params) -> Result<Value, Error> {
|
||||||
|
Ok(Value::U64(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn block(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match from_params::<(H256, bool)>(params) {
|
||||||
|
Ok((hash, _include_txs)) => match (self.client.block_header(&hash), self.client.block_total_difficulty(&hash)) {
|
||||||
|
(Some(bytes), Some(total_difficulty)) => {
|
||||||
|
let view = HeaderView::new(&bytes);
|
||||||
|
let block = Block {
|
||||||
|
hash: view.sha3(),
|
||||||
|
parent_hash: view.parent_hash(),
|
||||||
|
uncles_hash: view.uncles_hash(),
|
||||||
|
author: view.author(),
|
||||||
|
miner: view.author(),
|
||||||
|
state_root: view.state_root(),
|
||||||
|
transactions_root: view.transactions_root(),
|
||||||
|
receipts_root: view.receipts_root(),
|
||||||
|
number: U256::from(view.number()),
|
||||||
|
gas_used: view.gas_used(),
|
||||||
|
gas_limit: view.gas_limit(),
|
||||||
|
logs_bloom: view.log_bloom(),
|
||||||
|
timestamp: U256::from(view.timestamp()),
|
||||||
|
difficulty: view.difficulty(),
|
||||||
|
total_difficulty: total_difficulty,
|
||||||
|
uncles: vec![],
|
||||||
|
transactions: vec![]
|
||||||
|
};
|
||||||
|
to_value(&block)
|
||||||
|
},
|
||||||
|
_ => Ok(Value::Null)
|
||||||
|
},
|
||||||
|
Err(err) => Err(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Eth filter rpc implementation.
|
||||||
|
pub struct EthFilterClient {
|
||||||
|
client: Arc<Client>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EthFilterClient {
|
||||||
|
/// Creates new Eth filter client.
|
||||||
|
pub fn new(client: Arc<Client>) -> Self {
|
||||||
|
EthFilterClient {
|
||||||
|
client: client
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EthFilter for EthFilterClient {
|
||||||
|
fn new_block_filter(&self, _params: Params) -> Result<Value, Error> {
|
||||||
|
Ok(Value::U64(0))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_pending_transaction_filter(&self, _params: Params) -> Result<Value, Error> {
|
||||||
|
Ok(Value::U64(1))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn filter_changes(&self, _: Params) -> Result<Value, Error> {
|
||||||
|
to_value(&self.client.chain_info().best_block_hash).map(|v| Value::Array(vec![v]))
|
||||||
|
}
|
||||||
|
}
|
8
rpc/src/v1/impls/mod.rs
Normal file
8
rpc/src/v1/impls/mod.rs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
//! Ethereum rpc interface implementation.
|
||||||
|
mod web3;
|
||||||
|
mod eth;
|
||||||
|
mod net;
|
||||||
|
|
||||||
|
pub use self::web3::Web3Client;
|
||||||
|
pub use self::eth::{EthClient, EthFilterClient};
|
||||||
|
pub use self::net::NetClient;
|
21
rpc/src/v1/impls/net.rs
Normal file
21
rpc/src/v1/impls/net.rs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
//! Net rpc implementation.
|
||||||
|
use jsonrpc_core::*;
|
||||||
|
use v1::traits::Net;
|
||||||
|
|
||||||
|
/// Net rpc implementation.
|
||||||
|
pub struct NetClient;
|
||||||
|
|
||||||
|
impl NetClient {
|
||||||
|
/// Creates new NetClient.
|
||||||
|
pub fn new() -> Self { NetClient }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Net for NetClient {
|
||||||
|
fn version(&self, _: Params) -> Result<Value, Error> {
|
||||||
|
Ok(Value::U64(63))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn peer_count(&self, _params: Params) -> Result<Value, Error> {
|
||||||
|
Ok(Value::U64(0))
|
||||||
|
}
|
||||||
|
}
|
21
rpc/src/v1/impls/web3.rs
Normal file
21
rpc/src/v1/impls/web3.rs
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
//! Web3 rpc implementation.
|
||||||
|
use jsonrpc_core::*;
|
||||||
|
use v1::traits::Web3;
|
||||||
|
|
||||||
|
/// Web3 rpc implementation.
|
||||||
|
pub struct Web3Client;
|
||||||
|
|
||||||
|
impl Web3Client {
|
||||||
|
/// Creates new Web3Client.
|
||||||
|
pub fn new() -> Self { Web3Client }
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Web3 for Web3Client {
|
||||||
|
fn client_version(&self, params: Params) -> Result<Value, Error> {
|
||||||
|
match params {
|
||||||
|
//Params::None => Ok(Value::String("parity/0.1.0/-/rust1.7-nightly".to_owned())),
|
||||||
|
Params::None => Ok(Value::String("surprise/0.1.0/surprise/surprise".to_owned())),
|
||||||
|
_ => Err(Error::invalid_params())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
10
rpc/src/v1/mod.rs
Normal file
10
rpc/src/v1/mod.rs
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
//! Ethcore rpc v1.
|
||||||
|
//!
|
||||||
|
//! Compliant with ethereum rpc.
|
||||||
|
|
||||||
|
pub mod traits;
|
||||||
|
mod impls;
|
||||||
|
mod types;
|
||||||
|
|
||||||
|
pub use self::traits::{Web3, Eth, EthFilter, Net};
|
||||||
|
pub use self::impls::*;
|
67
rpc/src/v1/traits/eth.rs
Normal file
67
rpc/src/v1/traits/eth.rs
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
//! Eth rpc interface.
|
||||||
|
use std::sync::Arc;
|
||||||
|
use jsonrpc_core::*;
|
||||||
|
|
||||||
|
/// Eth rpc interface.
|
||||||
|
pub trait Eth: Sized + Send + Sync + 'static {
|
||||||
|
/// Returns protocol version.
|
||||||
|
fn protocol_version(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns block author.
|
||||||
|
fn author(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns current gas_price.
|
||||||
|
fn gas_price(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns highest block number.
|
||||||
|
fn block_number(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns block with given index / hash.
|
||||||
|
fn block(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns true if client is actively mining new blocks.
|
||||||
|
fn is_mining(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns the number of hashes per second that the node is mining with.
|
||||||
|
fn hashrate(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns the number of transactions in a block.
|
||||||
|
fn block_transaction_count(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Should be used to convert object to io delegate.
|
||||||
|
fn to_delegate(self) -> IoDelegate<Self> {
|
||||||
|
let mut delegate = IoDelegate::new(Arc::new(self));
|
||||||
|
delegate.add_method("eth_protocolVersion", Eth::protocol_version);
|
||||||
|
delegate.add_method("eth_coinbase", Eth::author);
|
||||||
|
delegate.add_method("eth_gasPrice", Eth::gas_price);
|
||||||
|
delegate.add_method("eth_blockNumber", Eth::block_number);
|
||||||
|
delegate.add_method("eth_getBlockByHash", Eth::block);
|
||||||
|
delegate.add_method("eth_getBlockByNumber", Eth::block);
|
||||||
|
delegate.add_method("eth_mining", Eth::is_mining);
|
||||||
|
delegate.add_method("eth_hashrate", Eth::hashrate);
|
||||||
|
delegate.add_method("eth_getBlockTransactionCountByNumber", Eth::block_transaction_count);
|
||||||
|
delegate
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Eth filters rpc api (polling).
|
||||||
|
// TODO: do filters api properly
|
||||||
|
pub trait EthFilter: Sized + Send + Sync + 'static {
|
||||||
|
/// Returns id of new block filter
|
||||||
|
fn new_block_filter(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns id of new block filter
|
||||||
|
fn new_pending_transaction_filter(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns filter changes since last poll
|
||||||
|
fn filter_changes(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Should be used to convert object to io delegate.
|
||||||
|
fn to_delegate(self) -> IoDelegate<Self> {
|
||||||
|
let mut delegate = IoDelegate::new(Arc::new(self));
|
||||||
|
delegate.add_method("eth_newBlockFilter", EthFilter::new_block_filter);
|
||||||
|
delegate.add_method("eth_newPendingTransactionFilter", EthFilter::new_pending_transaction_filter);
|
||||||
|
delegate.add_method("eth_getFilterChanges", EthFilter::filter_changes);
|
||||||
|
delegate
|
||||||
|
}
|
||||||
|
}
|
8
rpc/src/v1/traits/mod.rs
Normal file
8
rpc/src/v1/traits/mod.rs
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
//! Ethereum rpc interfaces.
|
||||||
|
pub mod web3;
|
||||||
|
pub mod eth;
|
||||||
|
pub mod net;
|
||||||
|
|
||||||
|
pub use self::web3::Web3;
|
||||||
|
pub use self::eth::{Eth, EthFilter};
|
||||||
|
pub use self::net::Net;
|
20
rpc/src/v1/traits/net.rs
Normal file
20
rpc/src/v1/traits/net.rs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
//! Net rpc interface.
|
||||||
|
use std::sync::Arc;
|
||||||
|
use jsonrpc_core::*;
|
||||||
|
|
||||||
|
/// Net rpc interface.
|
||||||
|
pub trait Net: Sized + Send + Sync + 'static {
|
||||||
|
/// Returns protocol version.
|
||||||
|
fn version(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Returns number of peers connected to node.
|
||||||
|
fn peer_count(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Should be used to convert object to io delegate.
|
||||||
|
fn to_delegate(self) -> IoDelegate<Self> {
|
||||||
|
let mut delegate = IoDelegate::new(Arc::new(self));
|
||||||
|
delegate.add_method("net_version", Net::version);
|
||||||
|
delegate.add_method("net_peerCount", Net::peer_count);
|
||||||
|
delegate
|
||||||
|
}
|
||||||
|
}
|
16
rpc/src/v1/traits/web3.rs
Normal file
16
rpc/src/v1/traits/web3.rs
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
//! Web3 rpc interface.
|
||||||
|
use std::sync::Arc;
|
||||||
|
use jsonrpc_core::*;
|
||||||
|
|
||||||
|
/// Web3 rpc interface.
|
||||||
|
pub trait Web3: Sized + Send + Sync + 'static {
|
||||||
|
/// Returns current client version.
|
||||||
|
fn client_version(&self, _: Params) -> Result<Value, Error> { rpcerr!() }
|
||||||
|
|
||||||
|
/// Should be used to convert object to io delegate.
|
||||||
|
fn to_delegate(self) -> IoDelegate<Self> {
|
||||||
|
let mut delegate = IoDelegate::new(Arc::new(self));
|
||||||
|
delegate.add_method("web3_clientVersion", Web3::client_version);
|
||||||
|
delegate
|
||||||
|
}
|
||||||
|
}
|
36
rpc/src/v1/types/block.rs
Normal file
36
rpc/src/v1/types/block.rs
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
use util::hash::*;
|
||||||
|
use util::uint::*;
|
||||||
|
|
||||||
|
#[derive(Default, Debug, Serialize)]
|
||||||
|
pub struct Block {
|
||||||
|
pub hash: H256,
|
||||||
|
#[serde(rename="parentHash")]
|
||||||
|
pub parent_hash: H256,
|
||||||
|
#[serde(rename="sha3Uncles")]
|
||||||
|
pub uncles_hash: H256,
|
||||||
|
pub author: Address,
|
||||||
|
// TODO: get rid of this one
|
||||||
|
pub miner: Address,
|
||||||
|
#[serde(rename="stateRoot")]
|
||||||
|
pub state_root: H256,
|
||||||
|
#[serde(rename="transactionsRoot")]
|
||||||
|
pub transactions_root: H256,
|
||||||
|
#[serde(rename="receiptsRoot")]
|
||||||
|
pub receipts_root: H256,
|
||||||
|
pub number: U256,
|
||||||
|
#[serde(rename="gasUsed")]
|
||||||
|
pub gas_used: U256,
|
||||||
|
#[serde(rename="gasLimit")]
|
||||||
|
pub gas_limit: U256,
|
||||||
|
// TODO: figure out how to properly serialize bytes
|
||||||
|
//#[serde(rename="extraData")]
|
||||||
|
//extra_data: Vec<u8>,
|
||||||
|
#[serde(rename="logsBloom")]
|
||||||
|
pub logs_bloom: H2048,
|
||||||
|
pub timestamp: U256,
|
||||||
|
pub difficulty: U256,
|
||||||
|
#[serde(rename="totalDifficulty")]
|
||||||
|
pub total_difficulty: U256,
|
||||||
|
pub uncles: Vec<U256>,
|
||||||
|
pub transactions: Vec<U256>
|
||||||
|
}
|
3
rpc/src/v1/types/mod.rs
Normal file
3
rpc/src/v1/types/mod.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
mod block;
|
||||||
|
|
||||||
|
pub use self::block::Block;
|
@ -158,7 +158,6 @@ impl BlockQueue {
|
|||||||
},
|
},
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let mut v = verification.lock().unwrap();
|
let mut v = verification.lock().unwrap();
|
||||||
flushln!("Stage 2 block verification failed for {}\nError: {:?}", block_hash, err);
|
|
||||||
warn!(target: "client", "Stage 2 block verification failed for {}\nError: {:?}", block_hash, err);
|
warn!(target: "client", "Stage 2 block verification failed for {}\nError: {:?}", block_hash, err);
|
||||||
v.bad.insert(block_hash.clone());
|
v.bad.insert(block_hash.clone());
|
||||||
v.verifying.retain(|e| e.hash != block_hash);
|
v.verifying.retain(|e| e.hash != block_hash);
|
||||||
@ -200,34 +199,35 @@ impl BlockQueue {
|
|||||||
/// Add a block to the queue.
|
/// Add a block to the queue.
|
||||||
pub fn import_block(&mut self, bytes: Bytes) -> ImportResult {
|
pub fn import_block(&mut self, bytes: Bytes) -> ImportResult {
|
||||||
let header = BlockView::new(&bytes).header();
|
let header = BlockView::new(&bytes).header();
|
||||||
if self.processing.contains(&header.hash()) {
|
let h = header.hash();
|
||||||
|
if self.processing.contains(&h) {
|
||||||
return Err(ImportError::AlreadyQueued);
|
return Err(ImportError::AlreadyQueued);
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
let mut verification = self.verification.lock().unwrap();
|
let mut verification = self.verification.lock().unwrap();
|
||||||
if verification.bad.contains(&header.hash()) {
|
if verification.bad.contains(&h) {
|
||||||
return Err(ImportError::Bad(None));
|
return Err(ImportError::Bad(None));
|
||||||
}
|
}
|
||||||
|
|
||||||
if verification.bad.contains(&header.parent_hash) {
|
if verification.bad.contains(&header.parent_hash) {
|
||||||
verification.bad.insert(header.hash());
|
verification.bad.insert(h.clone());
|
||||||
return Err(ImportError::Bad(None));
|
return Err(ImportError::Bad(None));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match verify_block_basic(&header, &bytes, self.engine.deref().deref()) {
|
match verify_block_basic(&header, &bytes, self.engine.deref().deref()) {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
self.processing.insert(header.hash());
|
self.processing.insert(h.clone());
|
||||||
self.verification.lock().unwrap().unverified.push_back(UnVerifiedBlock { header: header, bytes: bytes });
|
self.verification.lock().unwrap().unverified.push_back(UnVerifiedBlock { header: header, bytes: bytes });
|
||||||
self.more_to_verify.notify_all();
|
self.more_to_verify.notify_all();
|
||||||
|
Ok(h)
|
||||||
},
|
},
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
flushln!("Stage 1 block verification failed for {}\nError: {:?}", BlockView::new(&bytes).header_view().sha3(), err);
|
|
||||||
warn!(target: "client", "Stage 1 block verification failed for {}\nError: {:?}", BlockView::new(&bytes).header_view().sha3(), err);
|
warn!(target: "client", "Stage 1 block verification failed for {}\nError: {:?}", BlockView::new(&bytes).header_view().sha3(), err);
|
||||||
self.verification.lock().unwrap().bad.insert(header.hash());
|
self.verification.lock().unwrap().bad.insert(h.clone());
|
||||||
|
Err(From::from(err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Mark given block and all its children as bad. Stops verification.
|
/// Mark given block and all its children as bad. Stops verification.
|
||||||
|
@ -565,15 +565,6 @@ impl BlockChain {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Tries to squeeze the cache if its too big.
|
|
||||||
pub fn squeeze_to_fit(&self, size: CacheSize) {
|
|
||||||
self.blocks.write().unwrap().squeeze(size.blocks);
|
|
||||||
self.block_details.write().unwrap().squeeze(size.block_details);
|
|
||||||
self.transaction_addresses.write().unwrap().squeeze(size.transaction_addresses);
|
|
||||||
self.block_logs.write().unwrap().squeeze(size.block_logs);
|
|
||||||
self.blocks_blooms.write().unwrap().squeeze(size.blocks_blooms);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Let the cache system know that a cacheable item has been used.
|
/// Let the cache system know that a cacheable item has been used.
|
||||||
fn note_used(&self, id: CacheID) {
|
fn note_used(&self, id: CacheID) {
|
||||||
let mut cache_man = self.cache_man.write().unwrap();
|
let mut cache_man = self.cache_man.write().unwrap();
|
||||||
|
@ -66,6 +66,9 @@ pub trait BlockChainClient : Sync + Send {
|
|||||||
/// Get block status by block header hash.
|
/// Get block status by block header hash.
|
||||||
fn block_status(&self, hash: &H256) -> BlockStatus;
|
fn block_status(&self, hash: &H256) -> BlockStatus;
|
||||||
|
|
||||||
|
/// Get block total difficulty.
|
||||||
|
fn block_total_difficulty(&self, hash: &H256) -> Option<U256>;
|
||||||
|
|
||||||
/// Get raw block header data by block number.
|
/// Get raw block header data by block number.
|
||||||
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes>;
|
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes>;
|
||||||
|
|
||||||
@ -79,6 +82,9 @@ pub trait BlockChainClient : Sync + Send {
|
|||||||
/// Get block status by block number.
|
/// Get block status by block number.
|
||||||
fn block_status_at(&self, n: BlockNumber) -> BlockStatus;
|
fn block_status_at(&self, n: BlockNumber) -> BlockStatus;
|
||||||
|
|
||||||
|
/// Get block total difficulty.
|
||||||
|
fn block_total_difficulty_at(&self, n: BlockNumber) -> Option<U256>;
|
||||||
|
|
||||||
/// Get a tree route between `from` and `to`.
|
/// Get a tree route between `from` and `to`.
|
||||||
/// See `BlockChain::tree_route`.
|
/// See `BlockChain::tree_route`.
|
||||||
fn tree_route(&self, from: &H256, to: &H256) -> Option<TreeRoute>;
|
fn tree_route(&self, from: &H256, to: &H256) -> Option<TreeRoute>;
|
||||||
@ -193,12 +199,12 @@ impl Client {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// This is triggered by a message coming from a block queue when the block is ready for insertion
|
/// This is triggered by a message coming from a block queue when the block is ready for insertion
|
||||||
pub fn import_verified_blocks(&self, _io: &IoChannel<NetSyncMessage>) {
|
pub fn import_verified_blocks(&self, _io: &IoChannel<NetSyncMessage>) -> usize {
|
||||||
|
let mut ret = 0;
|
||||||
let mut bad = HashSet::new();
|
let mut bad = HashSet::new();
|
||||||
let _import_lock = self.import_lock.lock();
|
let _import_lock = self.import_lock.lock();
|
||||||
let blocks = self.block_queue.write().unwrap().drain(128);
|
let blocks = self.block_queue.write().unwrap().drain(128);
|
||||||
for block in blocks {
|
for block in blocks {
|
||||||
// flushln!("Importing {}...", block.header.hash());
|
|
||||||
if bad.contains(&block.header.parent_hash) {
|
if bad.contains(&block.header.parent_hash) {
|
||||||
self.block_queue.write().unwrap().mark_as_bad(&block.header.hash());
|
self.block_queue.write().unwrap().mark_as_bad(&block.header.hash());
|
||||||
bad.insert(block.header.hash());
|
bad.insert(block.header.hash());
|
||||||
@ -207,20 +213,18 @@ impl Client {
|
|||||||
|
|
||||||
let header = &block.header;
|
let header = &block.header;
|
||||||
if let Err(e) = verify_block_family(&header, &block.bytes, self.engine.deref().deref(), self.chain.read().unwrap().deref()) {
|
if let Err(e) = verify_block_family(&header, &block.bytes, self.engine.deref().deref(), self.chain.read().unwrap().deref()) {
|
||||||
flushln!("Stage 3 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
|
||||||
warn!(target: "client", "Stage 3 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
warn!(target: "client", "Stage 3 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
||||||
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
||||||
bad.insert(block.header.hash());
|
bad.insert(block.header.hash());
|
||||||
return;
|
break;
|
||||||
};
|
};
|
||||||
let parent = match self.chain.read().unwrap().block_header(&header.parent_hash) {
|
let parent = match self.chain.read().unwrap().block_header(&header.parent_hash) {
|
||||||
Some(p) => p,
|
Some(p) => p,
|
||||||
None => {
|
None => {
|
||||||
flushln!("Block import failed for #{} ({}): Parent not found ({}) ", header.number(), header.hash(), header.parent_hash);
|
|
||||||
warn!(target: "client", "Block import failed for #{} ({}): Parent not found ({}) ", header.number(), header.hash(), header.parent_hash);
|
warn!(target: "client", "Block import failed for #{} ({}): Parent not found ({}) ", header.number(), header.hash(), header.parent_hash);
|
||||||
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
||||||
bad.insert(block.header.hash());
|
bad.insert(block.header.hash());
|
||||||
return;
|
break;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
// build last hashes
|
// build last hashes
|
||||||
@ -240,18 +244,16 @@ impl Client {
|
|||||||
let result = match enact_verified(&block, self.engine.deref().deref(), db, &parent, &last_hashes) {
|
let result = match enact_verified(&block, self.engine.deref().deref(), db, &parent, &last_hashes) {
|
||||||
Ok(b) => b,
|
Ok(b) => b,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
flushln!("Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
|
||||||
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
||||||
bad.insert(block.header.hash());
|
bad.insert(block.header.hash());
|
||||||
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
||||||
return;
|
break;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if let Err(e) = verify_block_final(&header, result.block().header()) {
|
if let Err(e) = verify_block_final(&header, result.block().header()) {
|
||||||
flushln!("Stage 4 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
|
||||||
warn!(target: "client", "Stage 4 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
warn!(target: "client", "Stage 4 block verification failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
|
||||||
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
self.block_queue.write().unwrap().mark_as_bad(&header.hash());
|
||||||
return;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
self.chain.write().unwrap().insert_block(&block.bytes); //TODO: err here?
|
self.chain.write().unwrap().insert_block(&block.bytes); //TODO: err here?
|
||||||
@ -260,12 +262,14 @@ impl Client {
|
|||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!(target: "client", "State DB commit failed: {:?}", e);
|
warn!(target: "client", "State DB commit failed: {:?}", e);
|
||||||
return;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.report.write().unwrap().accrue_block(&block);
|
self.report.write().unwrap().accrue_block(&block);
|
||||||
trace!(target: "client", "Imported #{} ({})", header.number(), header.hash());
|
trace!(target: "client", "Imported #{} ({})", header.number(), header.hash());
|
||||||
|
ret += 1;
|
||||||
}
|
}
|
||||||
|
ret
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clear cached state overlay
|
/// Clear cached state overlay
|
||||||
@ -317,6 +321,10 @@ impl BlockChainClient for Client {
|
|||||||
if self.chain.read().unwrap().is_known(&hash) { BlockStatus::InChain } else { BlockStatus::Unknown }
|
if self.chain.read().unwrap().is_known(&hash) { BlockStatus::InChain } else { BlockStatus::Unknown }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn block_total_difficulty(&self, hash: &H256) -> Option<U256> {
|
||||||
|
self.chain.read().unwrap().block_details(hash).map(|d| d.total_difficulty)
|
||||||
|
}
|
||||||
|
|
||||||
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes> {
|
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||||
self.chain.read().unwrap().block_hash(n).and_then(|h| self.block_header(&h))
|
self.chain.read().unwrap().block_hash(n).and_then(|h| self.block_header(&h))
|
||||||
}
|
}
|
||||||
@ -336,6 +344,10 @@ impl BlockChainClient for Client {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn block_total_difficulty_at(&self, n: BlockNumber) -> Option<U256> {
|
||||||
|
self.chain.read().unwrap().block_hash(n).and_then(|h| self.block_total_difficulty(&h))
|
||||||
|
}
|
||||||
|
|
||||||
fn tree_route(&self, from: &H256, to: &H256) -> Option<TreeRoute> {
|
fn tree_route(&self, from: &H256, to: &H256) -> Option<TreeRoute> {
|
||||||
self.chain.read().unwrap().tree_route(from.clone(), to.clone())
|
self.chain.read().unwrap().tree_route(from.clone(), to.clone())
|
||||||
}
|
}
|
||||||
|
@ -120,7 +120,9 @@ pub enum BlockError {
|
|||||||
/// TODO [arkpar] Please document me
|
/// TODO [arkpar] Please document me
|
||||||
InvalidParentHash(Mismatch<H256>),
|
InvalidParentHash(Mismatch<H256>),
|
||||||
/// TODO [arkpar] Please document me
|
/// TODO [arkpar] Please document me
|
||||||
InvalidNumber(OutOfBounds<BlockNumber>),
|
InvalidNumber(Mismatch<BlockNumber>),
|
||||||
|
/// Block number isn't sensible.
|
||||||
|
RidiculousNumber(OutOfBounds<BlockNumber>),
|
||||||
/// TODO [arkpar] Please document me
|
/// TODO [arkpar] Please document me
|
||||||
UnknownParent(H256),
|
UnknownParent(H256),
|
||||||
/// TODO [Gav Wood] Please document me
|
/// TODO [Gav Wood] Please document me
|
||||||
@ -145,7 +147,7 @@ impl From<Error> for ImportError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Result of import block operation.
|
/// Result of import block operation.
|
||||||
pub type ImportResult = Result<(), ImportError>;
|
pub type ImportResult = Result<H256, ImportError>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// General error type which should be capable of representing all errors in ethcore.
|
/// General error type which should be capable of representing all errors in ethcore.
|
||||||
|
@ -101,7 +101,7 @@ impl Engine for Ethash {
|
|||||||
fn verify_block_basic(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
fn verify_block_basic(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
||||||
// check the seal fields.
|
// check the seal fields.
|
||||||
try!(UntrustedRlp::new(&header.seal[0]).as_val::<H256>());
|
try!(UntrustedRlp::new(&header.seal[0]).as_val::<H256>());
|
||||||
try!(UntrustedRlp::new(&header.seal[1]).as_val::<u64>());
|
try!(UntrustedRlp::new(&header.seal[1]).as_val::<H64>());
|
||||||
|
|
||||||
let min_difficulty = decode(self.spec().engine_params.get("minimumDifficulty").unwrap());
|
let min_difficulty = decode(self.spec().engine_params.get("minimumDifficulty").unwrap());
|
||||||
if header.difficulty < min_difficulty {
|
if header.difficulty < min_difficulty {
|
||||||
@ -109,7 +109,7 @@ impl Engine for Ethash {
|
|||||||
}
|
}
|
||||||
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(quick_get_difficulty(
|
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(quick_get_difficulty(
|
||||||
&Ethash::to_ethash(header.bare_hash()),
|
&Ethash::to_ethash(header.bare_hash()),
|
||||||
header.nonce(),
|
header.nonce().low_u64(),
|
||||||
&Ethash::to_ethash(header.mix_hash()))));
|
&Ethash::to_ethash(header.mix_hash()))));
|
||||||
if difficulty < header.difficulty {
|
if difficulty < header.difficulty {
|
||||||
return Err(From::from(BlockError::InvalidEthashDifficulty(Mismatch { expected: header.difficulty, found: difficulty })));
|
return Err(From::from(BlockError::InvalidEthashDifficulty(Mismatch { expected: header.difficulty, found: difficulty })));
|
||||||
@ -118,7 +118,7 @@ impl Engine for Ethash {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn verify_block_unordered(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
fn verify_block_unordered(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
||||||
let result = self.pow.compute_light(header.number as u64, &Ethash::to_ethash(header.bare_hash()), header.nonce());
|
let result = self.pow.compute_light(header.number as u64, &Ethash::to_ethash(header.bare_hash()), header.nonce().low_u64());
|
||||||
let mix = Ethash::from_ethash(result.mix_hash);
|
let mix = Ethash::from_ethash(result.mix_hash);
|
||||||
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value));
|
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value));
|
||||||
if mix != header.mix_hash() {
|
if mix != header.mix_hash() {
|
||||||
@ -208,7 +208,7 @@ impl Ethash {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Header {
|
impl Header {
|
||||||
fn nonce(&self) -> u64 {
|
fn nonce(&self) -> H64 {
|
||||||
decode(&self.seal()[1])
|
decode(&self.seal()[1])
|
||||||
}
|
}
|
||||||
fn mix_hash(&self) -> H256 {
|
fn mix_hash(&self) -> H256 {
|
||||||
|
@ -415,7 +415,7 @@ impl ChainSync {
|
|||||||
Err(ImportError::AlreadyQueued) => {
|
Err(ImportError::AlreadyQueued) => {
|
||||||
trace!(target: "sync", "New block already queued {:?}", h);
|
trace!(target: "sync", "New block already queued {:?}", h);
|
||||||
},
|
},
|
||||||
Ok(()) => {
|
Ok(_) => {
|
||||||
trace!(target: "sync", "New block queued {:?}", h);
|
trace!(target: "sync", "New block queued {:?}", h);
|
||||||
},
|
},
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
@ -680,7 +680,7 @@ impl ChainSync {
|
|||||||
self.last_imported_block = headers.0 + i as BlockNumber;
|
self.last_imported_block = headers.0 + i as BlockNumber;
|
||||||
self.last_imported_hash = h.clone();
|
self.last_imported_hash = h.clone();
|
||||||
},
|
},
|
||||||
Ok(()) => {
|
Ok(_) => {
|
||||||
trace!(target: "sync", "Block queued {:?}", h);
|
trace!(target: "sync", "Block queued {:?}", h);
|
||||||
self.last_imported_block = headers.0 + i as BlockNumber;
|
self.last_imported_block = headers.0 + i as BlockNumber;
|
||||||
self.last_imported_hash = h.clone();
|
self.last_imported_hash = h.clone();
|
||||||
|
@ -51,6 +51,10 @@ impl TestBlockChainClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl BlockChainClient for TestBlockChainClient {
|
impl BlockChainClient for TestBlockChainClient {
|
||||||
|
fn block_total_difficulty(&self, _h: &H256) -> Option<U256> {
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
|
||||||
fn block_header(&self, h: &H256) -> Option<Bytes> {
|
fn block_header(&self, h: &H256) -> Option<Bytes> {
|
||||||
self.blocks.read().unwrap().get(h).map(|r| Rlp::new(r).at(0).as_raw().to_vec())
|
self.blocks.read().unwrap().get(h).map(|r| Rlp::new(r).at(0).as_raw().to_vec())
|
||||||
|
|
||||||
@ -76,6 +80,10 @@ impl BlockChainClient for TestBlockChainClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn block_total_difficulty_at(&self, _number: BlockNumber) -> Option<U256> {
|
||||||
|
unimplemented!();
|
||||||
|
}
|
||||||
|
|
||||||
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes> {
|
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||||
self.numbers.read().unwrap().get(&(n as usize)).and_then(|h| self.block_header(h))
|
self.numbers.read().unwrap().get(&(n as usize)).and_then(|h| self.block_header(h))
|
||||||
}
|
}
|
||||||
@ -114,6 +122,7 @@ impl BlockChainClient for TestBlockChainClient {
|
|||||||
|
|
||||||
fn import_block(&self, b: Bytes) -> ImportResult {
|
fn import_block(&self, b: Bytes) -> ImportResult {
|
||||||
let header = Rlp::new(&b).val_at::<BlockHeader>(0);
|
let header = Rlp::new(&b).val_at::<BlockHeader>(0);
|
||||||
|
let h = header.hash();
|
||||||
let number: usize = header.number as usize;
|
let number: usize = header.number as usize;
|
||||||
if number > self.blocks.read().unwrap().len() {
|
if number > self.blocks.read().unwrap().len() {
|
||||||
panic!("Unexpected block number. Expected {}, got {}", self.blocks.read().unwrap().len(), number);
|
panic!("Unexpected block number. Expected {}, got {}", self.blocks.read().unwrap().len(), number);
|
||||||
@ -134,9 +143,9 @@ impl BlockChainClient for TestBlockChainClient {
|
|||||||
let len = self.numbers.read().unwrap().len();
|
let len = self.numbers.read().unwrap().len();
|
||||||
if number == len {
|
if number == len {
|
||||||
*self.difficulty.write().unwrap().deref_mut() += header.difficulty;
|
*self.difficulty.write().unwrap().deref_mut() += header.difficulty;
|
||||||
mem::replace(self.last_hash.write().unwrap().deref_mut(), header.hash());
|
mem::replace(self.last_hash.write().unwrap().deref_mut(), h.clone());
|
||||||
self.blocks.write().unwrap().insert(header.hash(), b);
|
self.blocks.write().unwrap().insert(h.clone(), b);
|
||||||
self.numbers.write().unwrap().insert(number, header.hash());
|
self.numbers.write().unwrap().insert(number, h.clone());
|
||||||
let mut parent_hash = header.parent_hash;
|
let mut parent_hash = header.parent_hash;
|
||||||
if number > 0 {
|
if number > 0 {
|
||||||
let mut n = number - 1;
|
let mut n = number - 1;
|
||||||
@ -148,9 +157,9 @@ impl BlockChainClient for TestBlockChainClient {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
self.blocks.write().unwrap().insert(header.hash(), b.to_vec());
|
self.blocks.write().unwrap().insert(h.clone(), b.to_vec());
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(h)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn queue_info(&self) -> BlockQueueInfo {
|
fn queue_info(&self) -> BlockQueueInfo {
|
||||||
|
@ -21,7 +21,7 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
|||||||
|
|
||||||
flush(format!(" - {}...", name));
|
flush(format!(" - {}...", name));
|
||||||
|
|
||||||
let blocks: Vec<Bytes> = test["blocks"].as_array().unwrap().iter().map(|e| xjson!(&e["rlp"])).collect();
|
let blocks: Vec<(Bytes, bool)> = test["blocks"].as_array().unwrap().iter().map(|e| (xjson!(&e["rlp"]), e.find("blockHeader").is_some())).collect();
|
||||||
let mut spec = ethereum::new_frontier_like_test();
|
let mut spec = ethereum::new_frontier_like_test();
|
||||||
let s = PodState::from_json(test.find("pre").unwrap());
|
let s = PodState::from_json(test.find("pre").unwrap());
|
||||||
spec.set_genesis_state(s);
|
spec.set_genesis_state(s);
|
||||||
@ -32,11 +32,14 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
|||||||
dir.push(H32::random().hex());
|
dir.push(H32::random().hex());
|
||||||
{
|
{
|
||||||
let client = Client::new(spec, &dir, IoChannel::disconnected()).unwrap();
|
let client = Client::new(spec, &dir, IoChannel::disconnected()).unwrap();
|
||||||
for b in blocks.into_iter().filter(|ref b| Block::is_good(b)) {
|
for (b, is_valid) in blocks.into_iter() {
|
||||||
client.import_block(b).unwrap();
|
if Block::is_good(&b) {
|
||||||
|
let _ = client.import_block(b.clone());
|
||||||
|
}
|
||||||
|
client.flush_queue();
|
||||||
|
let imported_ok = client.import_verified_blocks(&IoChannel::disconnected()) > 0;
|
||||||
|
assert_eq!(imported_ok, is_valid);
|
||||||
}
|
}
|
||||||
client.flush_queue();
|
|
||||||
client.import_verified_blocks(&IoChannel::disconnected());
|
|
||||||
fail_unless(client.chain_info().best_block_hash == H256::from_json(&test["lastblockhash"]));
|
fail_unless(client.chain_info().best_block_hash == H256::from_json(&test["lastblockhash"]));
|
||||||
}
|
}
|
||||||
fs::remove_dir_all(&dir).unwrap();
|
fs::remove_dir_all(&dir).unwrap();
|
||||||
@ -51,16 +54,16 @@ fn do_json_test(json_data: &[u8]) -> Vec<String> {
|
|||||||
|
|
||||||
declare_test!{BlockchainTests_bcBlockGasLimitTest, "BlockchainTests/bcBlockGasLimitTest"}
|
declare_test!{BlockchainTests_bcBlockGasLimitTest, "BlockchainTests/bcBlockGasLimitTest"}
|
||||||
declare_test!{BlockchainTests_bcForkBlockTest, "BlockchainTests/bcForkBlockTest"}
|
declare_test!{BlockchainTests_bcForkBlockTest, "BlockchainTests/bcForkBlockTest"}
|
||||||
declare_test!{BlockchainTests_bcForkStressTest, "BlockchainTests/bcForkStressTest"} // STILL FAILS
|
declare_test!{BlockchainTests_bcForkStressTest, "BlockchainTests/bcForkStressTest"}
|
||||||
declare_test!{BlockchainTests_bcForkUncle, "BlockchainTests/bcForkUncle"} // STILL FAILS
|
declare_test!{BlockchainTests_bcForkUncle, "BlockchainTests/bcForkUncle"}
|
||||||
declare_test!{BlockchainTests_bcGasPricerTest, "BlockchainTests/bcGasPricerTest"}
|
declare_test!{BlockchainTests_bcGasPricerTest, "BlockchainTests/bcGasPricerTest"}
|
||||||
declare_test!{BlockchainTests_bcInvalidHeaderTest, "BlockchainTests/bcInvalidHeaderTest"}
|
declare_test!{BlockchainTests_bcInvalidHeaderTest, "BlockchainTests/bcInvalidHeaderTest"}
|
||||||
declare_test!{BlockchainTests_bcInvalidRLPTest, "BlockchainTests/bcInvalidRLPTest"} // FAILS
|
declare_test!{BlockchainTests_bcInvalidRLPTest, "BlockchainTests/bcInvalidRLPTest"}
|
||||||
declare_test!{BlockchainTests_bcMultiChainTest, "BlockchainTests/bcMultiChainTest"} // FAILS
|
declare_test!{BlockchainTests_bcMultiChainTest, "BlockchainTests/bcMultiChainTest"}
|
||||||
declare_test!{BlockchainTests_bcRPC_API_Test, "BlockchainTests/bcRPC_API_Test"}
|
declare_test!{BlockchainTests_bcRPC_API_Test, "BlockchainTests/bcRPC_API_Test"}
|
||||||
declare_test!{BlockchainTests_bcStateTest, "BlockchainTests/bcStateTest"}
|
declare_test!{BlockchainTests_bcStateTest, "BlockchainTests/bcStateTest"}
|
||||||
declare_test!{BlockchainTests_bcTotalDifficultyTest, "BlockchainTests/bcTotalDifficultyTest"}
|
declare_test!{BlockchainTests_bcTotalDifficultyTest, "BlockchainTests/bcTotalDifficultyTest"}
|
||||||
declare_test!{BlockchainTests_bcUncleHeaderValiditiy, "BlockchainTests/bcUncleHeaderValiditiy"} // FAILS
|
declare_test!{BlockchainTests_bcUncleHeaderValiditiy, "BlockchainTests/bcUncleHeaderValiditiy"}
|
||||||
declare_test!{BlockchainTests_bcUncleTest, "BlockchainTests/bcUncleTest"} // FAILS
|
declare_test!{BlockchainTests_bcUncleTest, "BlockchainTests/bcUncleTest"}
|
||||||
declare_test!{BlockchainTests_bcValidBlockTest, "BlockchainTests/bcValidBlockTest"} // FAILS
|
declare_test!{BlockchainTests_bcValidBlockTest, "BlockchainTests/bcValidBlockTest"}
|
||||||
declare_test!{BlockchainTests_bcWalletTest, "BlockchainTests/bcWalletTest"} // FAILS
|
declare_test!{BlockchainTests_bcWalletTest, "BlockchainTests/bcWalletTest"}
|
||||||
|
117
src/tests/client.rs
Normal file
117
src/tests/client.rs
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
use client::{BlockChainClient,Client};
|
||||||
|
use super::test_common::*;
|
||||||
|
use super::helpers::*;
|
||||||
|
|
||||||
|
fn get_good_dummy_block() -> Bytes {
|
||||||
|
let mut block_header = Header::new();
|
||||||
|
let test_spec = get_test_spec();
|
||||||
|
let test_engine = test_spec.to_engine().unwrap();
|
||||||
|
block_header.gas_limit = decode(test_engine.spec().engine_params.get("minGasLimit").unwrap());
|
||||||
|
block_header.difficulty = decode(test_engine.spec().engine_params.get("minimumDifficulty").unwrap());
|
||||||
|
block_header.timestamp = 40;
|
||||||
|
block_header.number = 1;
|
||||||
|
block_header.parent_hash = test_engine.spec().genesis_header().hash();
|
||||||
|
block_header.state_root = test_engine.spec().genesis_header().state_root;
|
||||||
|
|
||||||
|
create_test_block(&block_header)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_bad_state_dummy_block() -> Bytes {
|
||||||
|
let mut block_header = Header::new();
|
||||||
|
let test_spec = get_test_spec();
|
||||||
|
let test_engine = test_spec.to_engine().unwrap();
|
||||||
|
block_header.gas_limit = decode(test_engine.spec().engine_params.get("minGasLimit").unwrap());
|
||||||
|
block_header.difficulty = decode(test_engine.spec().engine_params.get("minimumDifficulty").unwrap());
|
||||||
|
block_header.timestamp = 40;
|
||||||
|
block_header.number = 1;
|
||||||
|
block_header.parent_hash = test_engine.spec().genesis_header().hash();
|
||||||
|
block_header.state_root = x!(0xbad);
|
||||||
|
|
||||||
|
create_test_block(&block_header)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> Arc<Client> {
|
||||||
|
let dir = RandomTempPath::new();
|
||||||
|
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||||
|
for block in &blocks {
|
||||||
|
if let Err(_) = client.import_block(block.clone()) {
|
||||||
|
panic!("panic importing block which is well-formed");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
client.flush_queue();
|
||||||
|
client.import_verified_blocks(&IoChannel::disconnected());
|
||||||
|
client
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn created() {
|
||||||
|
let dir = RandomTempPath::new();
|
||||||
|
let client_result = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected());
|
||||||
|
assert!(client_result.is_ok());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn imports_from_empty() {
|
||||||
|
let dir = RandomTempPath::new();
|
||||||
|
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||||
|
client.import_verified_blocks(&IoChannel::disconnected());
|
||||||
|
client.flush_queue();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn imports_good_block() {
|
||||||
|
let dir = RandomTempPath::new();
|
||||||
|
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||||
|
let good_block = get_good_dummy_block();
|
||||||
|
if let Err(_) = client.import_block(good_block) {
|
||||||
|
panic!("error importing block being good by definition");
|
||||||
|
}
|
||||||
|
client.flush_queue();
|
||||||
|
client.import_verified_blocks(&IoChannel::disconnected());
|
||||||
|
|
||||||
|
let block = client.block_header_at(1).unwrap();
|
||||||
|
assert!(!block.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn query_none_block() {
|
||||||
|
let dir = RandomTempPath::new();
|
||||||
|
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||||
|
|
||||||
|
let non_existant = client.block_header_at(188);
|
||||||
|
assert!(non_existant.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn query_bad_block() {
|
||||||
|
let client = get_test_client_with_blocks(vec![get_bad_state_dummy_block()]);
|
||||||
|
let bad_block:Option<Bytes> = client.block_header_at(1);
|
||||||
|
|
||||||
|
assert!(bad_block.is_none());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn returns_chain_info() {
|
||||||
|
let dummy_block = get_good_dummy_block();
|
||||||
|
let client = get_test_client_with_blocks(vec![dummy_block.clone()]);
|
||||||
|
let block = BlockView::new(&dummy_block);
|
||||||
|
let info = client.chain_info();
|
||||||
|
assert_eq!(info.best_block_hash, block.header().hash());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn imports_block_sequence() {
|
||||||
|
let client = generate_dummy_client(6);
|
||||||
|
let block = client.block_header_at(5).unwrap();
|
||||||
|
|
||||||
|
assert!(!block.is_empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn can_collect_garbage() {
|
||||||
|
let client = generate_dummy_client(100);
|
||||||
|
client.tick();
|
||||||
|
assert!(client.cache_info().blocks < 100 * 1024);
|
||||||
|
}
|
80
src/tests/helpers.rs
Normal file
80
src/tests/helpers.rs
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
use client::{BlockChainClient,Client};
|
||||||
|
use std::env;
|
||||||
|
use super::test_common::*;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use spec::*;
|
||||||
|
use std::fs::{remove_dir_all};
|
||||||
|
|
||||||
|
|
||||||
|
pub struct RandomTempPath {
|
||||||
|
path: PathBuf
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RandomTempPath {
|
||||||
|
pub fn new() -> RandomTempPath {
|
||||||
|
let mut dir = env::temp_dir();
|
||||||
|
dir.push(H32::random().hex());
|
||||||
|
RandomTempPath {
|
||||||
|
path: dir.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn as_path(&self) -> &PathBuf {
|
||||||
|
&self.path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Drop for RandomTempPath {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if let Err(e) = remove_dir_all(self.as_path()) {
|
||||||
|
panic!("failed to remove temp directory, probably something failed to destroyed ({})", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_test_spec() -> Spec {
|
||||||
|
Spec::new_test()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create_test_block(header: &Header) -> Bytes {
|
||||||
|
let mut rlp = RlpStream::new_list(3);
|
||||||
|
rlp.append(header);
|
||||||
|
rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1);
|
||||||
|
rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1);
|
||||||
|
rlp.out()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn generate_dummy_client(block_number: usize) -> Arc<Client> {
|
||||||
|
let dir = RandomTempPath::new();
|
||||||
|
|
||||||
|
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||||
|
let test_spec = get_test_spec();
|
||||||
|
let test_engine = test_spec.to_engine().unwrap();
|
||||||
|
let state_root = test_engine.spec().genesis_header().state_root;
|
||||||
|
let mut rolling_hash = test_engine.spec().genesis_header().hash();
|
||||||
|
let mut rolling_block_number = 1;
|
||||||
|
let mut rolling_timestamp = 40;
|
||||||
|
|
||||||
|
for _ in 0..block_number {
|
||||||
|
let mut header = Header::new();
|
||||||
|
|
||||||
|
header.gas_limit = decode(test_engine.spec().engine_params.get("minGasLimit").unwrap());
|
||||||
|
header.difficulty = decode(test_engine.spec().engine_params.get("minimumDifficulty").unwrap());
|
||||||
|
header.timestamp = rolling_timestamp;
|
||||||
|
header.number = rolling_block_number;
|
||||||
|
header.parent_hash = rolling_hash;
|
||||||
|
header.state_root = state_root.clone();
|
||||||
|
|
||||||
|
rolling_hash = header.hash();
|
||||||
|
rolling_block_number = rolling_block_number + 1;
|
||||||
|
rolling_timestamp = rolling_timestamp + 10;
|
||||||
|
|
||||||
|
if let Err(_) = client.import_block(create_test_block(&header)) {
|
||||||
|
panic!("error importing block which is valid by definition");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
client.flush_queue();
|
||||||
|
client.import_verified_blocks(&IoChannel::disconnected());
|
||||||
|
client
|
||||||
|
}
|
@ -4,4 +4,6 @@ mod test_common;
|
|||||||
mod transaction;
|
mod transaction;
|
||||||
mod executive;
|
mod executive;
|
||||||
mod state;
|
mod state;
|
||||||
|
mod client;
|
||||||
mod chain;
|
mod chain;
|
||||||
|
mod helpers;
|
@ -162,7 +162,7 @@ pub fn verify_block_final(expected: &Header, got: &Header) -> Result<(), Error>
|
|||||||
/// Check basic header parameters.
|
/// Check basic header parameters.
|
||||||
fn verify_header(header: &Header, engine: &Engine) -> Result<(), Error> {
|
fn verify_header(header: &Header, engine: &Engine) -> Result<(), Error> {
|
||||||
if header.number >= From::from(BlockNumber::max_value()) {
|
if header.number >= From::from(BlockNumber::max_value()) {
|
||||||
return Err(From::from(BlockError::InvalidNumber(OutOfBounds { max: Some(From::from(BlockNumber::max_value())), min: None, found: header.number })))
|
return Err(From::from(BlockError::RidiculousNumber(OutOfBounds { max: Some(From::from(BlockNumber::max_value())), min: None, found: header.number })))
|
||||||
}
|
}
|
||||||
if header.gas_used > header.gas_limit {
|
if header.gas_used > header.gas_limit {
|
||||||
return Err(From::from(BlockError::TooMuchGasUsed(OutOfBounds { max: Some(header.gas_limit), min: None, found: header.gas_used })));
|
return Err(From::from(BlockError::TooMuchGasUsed(OutOfBounds { max: Some(header.gas_limit), min: None, found: header.gas_used })));
|
||||||
@ -186,8 +186,8 @@ fn verify_parent(header: &Header, parent: &Header) -> Result<(), Error> {
|
|||||||
if header.timestamp <= parent.timestamp {
|
if header.timestamp <= parent.timestamp {
|
||||||
return Err(From::from(BlockError::InvalidTimestamp(OutOfBounds { max: None, min: Some(parent.timestamp + 1), found: header.timestamp })))
|
return Err(From::from(BlockError::InvalidTimestamp(OutOfBounds { max: None, min: Some(parent.timestamp + 1), found: header.timestamp })))
|
||||||
}
|
}
|
||||||
if header.number <= parent.number {
|
if header.number != parent.number + 1 {
|
||||||
return Err(From::from(BlockError::InvalidNumber(OutOfBounds { max: None, min: Some(parent.number + 1), found: header.number })));
|
return Err(From::from(BlockError::InvalidNumber(Mismatch { expected: parent.number + 1, found: header.number })));
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -400,7 +400,7 @@ mod tests {
|
|||||||
header = good.clone();
|
header = good.clone();
|
||||||
header.number = BlockNumber::max_value();
|
header.number = BlockNumber::max_value();
|
||||||
check_fail(basic_test(&create_test_block(&header), engine.deref()),
|
check_fail(basic_test(&create_test_block(&header), engine.deref()),
|
||||||
InvalidNumber(OutOfBounds { max: Some(BlockNumber::max_value()), min: None, found: header.number }));
|
RidiculousNumber(OutOfBounds { max: Some(BlockNumber::max_value()), min: None, found: header.number }));
|
||||||
|
|
||||||
header = good.clone();
|
header = good.clone();
|
||||||
header.gas_used = header.gas_limit + From::from(1);
|
header.gas_used = header.gas_limit + From::from(1);
|
||||||
@ -443,7 +443,7 @@ mod tests {
|
|||||||
header = good.clone();
|
header = good.clone();
|
||||||
header.number = 9;
|
header.number = 9;
|
||||||
check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine.deref(), &bc),
|
check_fail(family_test(&create_test_block_with_data(&header, &good_transactions, &good_uncles), engine.deref(), &bc),
|
||||||
InvalidNumber(OutOfBounds { max: None, min: Some(parent.number + 1), found: header.number }));
|
InvalidNumber(Mismatch { expected: parent.number + 1, found: header.number }));
|
||||||
|
|
||||||
header = good.clone();
|
header = good.clone();
|
||||||
let mut bad_uncles = good_uncles.clone();
|
let mut bad_uncles = good_uncles.clone();
|
||||||
|
@ -25,7 +25,8 @@ itertools = "0.4"
|
|||||||
crossbeam = "0.2"
|
crossbeam = "0.2"
|
||||||
slab = { git = "https://github.com/arkpar/slab.git" }
|
slab = { git = "https://github.com/arkpar/slab.git" }
|
||||||
sha3 = { path = "sha3" }
|
sha3 = { path = "sha3" }
|
||||||
clippy = "*" # Always newest, since we use nightly
|
serde = "0.6.7"
|
||||||
|
clippy = "0.0.37"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
json-tests = { path = "json-tests" }
|
json-tests = { path = "json-tests" }
|
||||||
|
@ -8,6 +8,8 @@ use rand::os::OsRng;
|
|||||||
use bytes::{BytesConvertable,Populatable};
|
use bytes::{BytesConvertable,Populatable};
|
||||||
use from_json::*;
|
use from_json::*;
|
||||||
use uint::{Uint, U256};
|
use uint::{Uint, U256};
|
||||||
|
use rustc_serialize::hex::ToHex;
|
||||||
|
use serde;
|
||||||
|
|
||||||
/// Trait for a fixed-size byte array to be used as the output of hash functions.
|
/// Trait for a fixed-size byte array to be used as the output of hash functions.
|
||||||
///
|
///
|
||||||
@ -41,6 +43,8 @@ pub trait FixedHash: Sized + BytesConvertable + Populatable + FromStr + Default
|
|||||||
fn contains<'a>(&'a self, b: &'a Self) -> bool;
|
fn contains<'a>(&'a self, b: &'a Self) -> bool;
|
||||||
/// TODO [debris] Please document me
|
/// TODO [debris] Please document me
|
||||||
fn is_zero(&self) -> bool;
|
fn is_zero(&self) -> bool;
|
||||||
|
/// Return the lowest 8 bytes interpreted as a BigEndian integer.
|
||||||
|
fn low_u64(&self) -> u64;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn clean_0x(s: &str) -> &str {
|
fn clean_0x(s: &str) -> &str {
|
||||||
@ -71,8 +75,8 @@ macro_rules! impl_hash {
|
|||||||
&self.0
|
&self.0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl DerefMut for $from {
|
|
||||||
|
|
||||||
|
impl DerefMut for $from {
|
||||||
#[inline]
|
#[inline]
|
||||||
fn deref_mut(&mut self) -> &mut [u8] {
|
fn deref_mut(&mut self) -> &mut [u8] {
|
||||||
&mut self.0
|
&mut self.0
|
||||||
@ -190,6 +194,14 @@ macro_rules! impl_hash {
|
|||||||
fn is_zero(&self) -> bool {
|
fn is_zero(&self) -> bool {
|
||||||
self.eq(&Self::new())
|
self.eq(&Self::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn low_u64(&self) -> u64 {
|
||||||
|
let mut ret = 0u64;
|
||||||
|
for i in 0..min($size, 8) {
|
||||||
|
ret |= (self.0[$size - 1 - i] as u64) << (i * 8);
|
||||||
|
}
|
||||||
|
ret
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromStr for $from {
|
impl FromStr for $from {
|
||||||
@ -205,6 +217,41 @@ macro_rules! impl_hash {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl serde::Serialize for $from {
|
||||||
|
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||||
|
where S: serde::Serializer {
|
||||||
|
let mut hex = "0x".to_owned();
|
||||||
|
hex.push_str(self.to_hex().as_ref());
|
||||||
|
serializer.visit_str(hex.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl serde::Deserialize for $from {
|
||||||
|
fn deserialize<D>(deserializer: &mut D) -> Result<$from, D::Error>
|
||||||
|
where D: serde::Deserializer {
|
||||||
|
struct HashVisitor;
|
||||||
|
|
||||||
|
impl serde::de::Visitor for HashVisitor {
|
||||||
|
type Value = $from;
|
||||||
|
|
||||||
|
fn visit_str<E>(&mut self, value: &str) -> Result<Self::Value, E> where E: serde::Error {
|
||||||
|
// 0x + len
|
||||||
|
if value.len() != 2 + $size * 2 {
|
||||||
|
return Err(serde::Error::syntax("Invalid length."));
|
||||||
|
}
|
||||||
|
|
||||||
|
value[2..].from_hex().map(|ref v| $from::from_slice(v)).map_err(|_| serde::Error::syntax("Invalid valid hex."))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_string<E>(&mut self, value: String) -> Result<Self::Value, E> where E: serde::Error {
|
||||||
|
self.visit_str(value.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
deserializer.visit(HashVisitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl FromJson for $from {
|
impl FromJson for $from {
|
||||||
fn from_json(json: &Json) -> Self {
|
fn from_json(json: &Json) -> Self {
|
||||||
match *json {
|
match *json {
|
||||||
|
@ -55,6 +55,7 @@ extern crate secp256k1;
|
|||||||
extern crate arrayvec;
|
extern crate arrayvec;
|
||||||
extern crate elastic_array;
|
extern crate elastic_array;
|
||||||
extern crate crossbeam;
|
extern crate crossbeam;
|
||||||
|
extern crate serde;
|
||||||
|
|
||||||
/// TODO [Gav Wood] Please document me
|
/// TODO [Gav Wood] Please document me
|
||||||
pub mod standard;
|
pub mod standard;
|
||||||
|
@ -23,6 +23,8 @@
|
|||||||
|
|
||||||
use standard::*;
|
use standard::*;
|
||||||
use from_json::*;
|
use from_json::*;
|
||||||
|
use rustc_serialize::hex::ToHex;
|
||||||
|
use serde;
|
||||||
|
|
||||||
macro_rules! impl_map_from {
|
macro_rules! impl_map_from {
|
||||||
($thing:ident, $from:ty, $to:ty) => {
|
($thing:ident, $from:ty, $to:ty) => {
|
||||||
@ -436,6 +438,17 @@ macro_rules! construct_uint {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl serde::Serialize for $name {
|
||||||
|
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||||
|
where S: serde::Serializer {
|
||||||
|
let mut hex = "0x".to_owned();
|
||||||
|
let mut bytes = [0u8; 8 * $n_words];
|
||||||
|
self.to_bytes(&mut bytes);
|
||||||
|
hex.push_str(bytes.to_hex().as_ref());
|
||||||
|
serializer.visit_str(hex.as_ref())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<u64> for $name {
|
impl From<u64> for $name {
|
||||||
fn from(value: u64) -> $name {
|
fn from(value: u64) -> $name {
|
||||||
let mut ret = [0; $n_words];
|
let mut ret = [0; $n_words];
|
||||||
|
Loading…
Reference in New Issue
Block a user