Merge branch 'master' of github.com:ethcore/parity into ark

This commit is contained in:
arkpar 2016-01-29 13:59:37 +01:00
commit 8ee59d30c4
12 changed files with 284 additions and 62 deletions

View File

@ -22,6 +22,7 @@ ethash = { path = "ethash" }
num_cpus = "0.2"
clippy = "0.0.37"
crossbeam = "0.1.5"
lazy_static = "0.1"
[features]
jit = ["evmjit"]

View File

@ -3,7 +3,7 @@
"engineName": "Ethash",
"params": {
"accountStartNonce": "0x00",
"frontierCompatibilityModeLimit": "0xDBBA0",
"frontierCompatibilityModeLimit": "0xdbba0",
"maximumExtraDataSize": "0x20",
"tieBreakingGas": false,
"minGasLimit": "0x1388",

View File

@ -3,7 +3,7 @@
"engineName": "Ethash",
"params": {
"accountStartNonce": "0x00",
"frontierCompatibilityModeLimit": "0",
"frontierCompatibilityModeLimit": 0,
"maximumExtraDataSize": "0x20",
"minGasLimit": "0x1388",
"tieBreakingGas": false,

View File

@ -3,7 +3,7 @@
"engineName": "Ethash",
"params": {
"accountStartNonce": "0x0100000",
"frontierCompatibilityModeLimit": "0xDBBA0",
"frontierCompatibilityModeLimit": "0xdbba0",
"maximumExtraDataSize": "0x20",
"tieBreakingGas": false,
"minGasLimit": "0x1388",

View File

@ -292,12 +292,68 @@ mod tests {
use util::*;
use spec::*;
use block_queue::*;
use tests::helpers::*;
use error::*;
fn get_test_queue() -> BlockQueue {
let spec = get_test_spec();
let engine = spec.to_engine().unwrap();
BlockQueue::new(Arc::new(engine), IoChannel::disconnected())
}
#[test]
fn test_block_queue() {
fn can_be_created() {
// TODO better test
let spec = Spec::new_test();
let engine = spec.to_engine().unwrap();
let _ = BlockQueue::new(Arc::new(engine), IoChannel::disconnected());
}
#[test]
fn can_import_blocks() {
let mut queue = get_test_queue();
if let Err(e) = queue.import_block(get_good_dummy_block()) {
panic!("error importing block that is valid by definition({:?})", e);
}
}
#[test]
fn returns_error_for_duplicates() {
let mut queue = get_test_queue();
if let Err(e) = queue.import_block(get_good_dummy_block()) {
panic!("error importing block that is valid by definition({:?})", e);
}
let duplicate_import = queue.import_block(get_good_dummy_block());
match duplicate_import {
Err(e) => {
match e {
ImportError::AlreadyQueued => {},
_ => { panic!("must return AlreadyQueued error"); }
}
}
Ok(_) => { panic!("must produce error"); }
}
}
#[test]
fn returns_error_for_verified_duplicates() {
let mut queue = get_test_queue();
if let Err(e) = queue.import_block(get_good_dummy_block()) {
panic!("error importing block that is valid by definition({:?})", e);
}
queue.drain(10);
queue.flush();
let duplicate_import = queue.import_block(get_good_dummy_block());
match duplicate_import {
Err(e) => {
match e {
ImportError::AlreadyQueued => {},
_ => { panic!("must return AlreadyQueued error"); }
}
}
Ok(_) => { panic!("must produce error"); }
}
}
}

View File

@ -763,4 +763,44 @@ mod tests {
assert_eq!(bc.best_block_hash(), b1_hash);
}
}
#[test]
fn can_contain_arbitrary_block_sequence() {
let bc_result = generate_dummy_blockchain(50);
let bc = bc_result.reference();
assert_eq!(bc.best_block_number(), 49);
}
#[test]
fn can_collect_garbage() {
let bc_result = generate_dummy_blockchain(3000);
let bc = bc_result.reference();
assert_eq!(bc.best_block_number(), 2999);
let best_hash = bc.best_block_hash();
let mut block_header = bc.block_header(&best_hash);
while !block_header.is_none() {
block_header = bc.block_header(&block_header.unwrap().parent_hash);
}
assert!(bc.cache_size().blocks > 1024 * 1024);
bc.collect_garbage(true);
assert!(bc.cache_size().blocks < 1024 * 1024);
}
#[test]
fn can_contain_arbitrary_block_sequence_with_extra() {
let bc_result = generate_dummy_blockchain_with_extra(25);
let bc = bc_result.reference();
assert_eq!(bc.best_block_number(), 24);
}
#[test]
fn can_contain_only_genesis_block() {
let bc_result = generate_dummy_empty_blockchain();
let bc = bc_result.reference();
assert_eq!(bc.best_block_number(), 0);
}
}

View File

@ -59,9 +59,14 @@ impl Engine for Ethash {
}
fn schedule(&self, env_info: &EnvInfo) -> Schedule {
trace!(target: "client", "Creating schedule. param={:?}, fCML={}", self.spec().engine_params.get("frontierCompatibilityModeLimit"), self.u64_param("frontierCompatibilityModeLimit"));
match env_info.number < self.u64_param("frontierCompatibilityModeLimit") {
true => Schedule::new_frontier(),
_ => Schedule::new_homestead(),
true => {
Schedule::new_frontier()
},
_ => {
Schedule::new_homestead()
},
}
}
@ -178,12 +183,13 @@ impl Ethash {
}
}
else {
trace!(target: "ethash", "Calculating difficulty parent.difficulty={}, header.timestamp={}, parent.timestamp={}", parent.difficulty, header.timestamp, parent.timestamp);
//block_diff = parent_diff + parent_diff // 2048 * max(1 - (block_timestamp - parent_timestamp) // 10, -99)
let diff_inc = (header.timestamp - parent.timestamp) / 10;
if diff_inc <= 1 {
parent.difficulty + parent.difficulty / From::from(2048) * From::from(1 - diff_inc)
}
else {
parent.difficulty - parent.difficulty / From::from(2048) * From::from(max(diff_inc - 1, 99))
} else {
parent.difficulty - parent.difficulty / From::from(2048) * From::from(min(diff_inc - 1, 99))
}
};
target = max(min_difficulty, target);

View File

@ -91,6 +91,8 @@ extern crate evmjit;
#[macro_use]
extern crate ethcore_util as util;
extern crate crossbeam;
#[macro_use]
extern crate lazy_static;
// NOTE: Add doc parser exception for these pub declarations.

View File

@ -16,7 +16,7 @@ pub fn gzip64res_to_json(source: &[u8]) -> Json {
Json::from_str(&s).expect("Json is invalid")
}
/// Convert JSON value to equivlaent RLP representation.
/// Convert JSON value to equivalent RLP representation.
// TODO: handle container types.
fn json_to_rlp(json: &Json) -> Bytes {
match *json {
@ -77,6 +77,10 @@ pub struct Spec {
pub gas_used: U256,
/// TODO [Gav Wood] Please document me
pub timestamp: u64,
/// Transactions root of the genesis block. Should be SHA3_NULL_RLP.
pub transactions_root: H256,
/// Receipts root of the genesis block. Should be SHA3_NULL_RLP.
pub receipts_root: H256,
/// TODO [arkpar] Please document me
pub extra_data: Bytes,
/// TODO [Gav Wood] Please document me
@ -120,11 +124,11 @@ impl Spec {
timestamp: self.timestamp,
number: 0,
author: self.author.clone(),
transactions_root: SHA3_NULL_RLP.clone(),
transactions_root: self.transactions_root.clone(),
uncles_hash: RlpStream::new_list(0).out().sha3(),
extra_data: self.extra_data.clone(),
state_root: self.state_root().clone(),
receipts_root: SHA3_NULL_RLP.clone(),
receipts_root: self.receipts_root.clone(),
log_bloom: H2048::new().clone(),
gas_used: self.gas_used.clone(),
gas_limit: self.gas_limit.clone(),
@ -172,6 +176,8 @@ impl Spec {
};
self.parent_hash = H256::from_json(&genesis["parentHash"]);
self.transactions_root = genesis.find("transactionsTrie").and_then(|_| Some(H256::from_json(&genesis["transactionsTrie"]))).unwrap_or(SHA3_NULL_RLP.clone());
self.receipts_root = genesis.find("receiptTrie").and_then(|_| Some(H256::from_json(&genesis["receiptTrie"]))).unwrap_or(SHA3_NULL_RLP.clone());
self.author = Address::from_json(&genesis["coinbase"]);
self.difficulty = U256::from_json(&genesis["difficulty"]);
self.gas_limit = U256::from_json(&genesis["gasLimit"]);
@ -248,6 +254,8 @@ impl FromJson for Spec {
gas_limit: U256::from_str(&genesis["gasLimit"].as_string().unwrap()[2..]).unwrap(),
gas_used: U256::from(0u8),
timestamp: u64::from_str(&genesis["timestamp"].as_string().unwrap()[2..]).unwrap(),
transactions_root: SHA3_NULL_RLP.clone(),
receipts_root: SHA3_NULL_RLP.clone(),
extra_data: genesis["extraData"].as_string().unwrap()[2..].from_hex().unwrap(),
genesis_state: state,
seal_fields: seal_fields,

View File

@ -1,3 +1,6 @@
use std::env;
use log::{LogLevelFilter};
use env_logger::LogBuilder;
use super::test_common::*;
use client::{BlockChainClient,Client};
use pod_state::*;
@ -10,7 +13,22 @@ pub enum ChainEra {
Homestead,
}
lazy_static! {
static ref LOG_DUMMY: bool = {
let mut builder = LogBuilder::new();
builder.filter(None, LogLevelFilter::Info);
if env::var("RUST_LOG").is_ok() {
builder.parse(&env::var("RUST_LOG").unwrap());
}
builder.init().unwrap();
true
};
}
pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
let _ = LOG_DUMMY.deref();
let json = Json::from_str(::std::str::from_utf8(json_data).unwrap()).expect("Json is invalid");
let mut failed = Vec::new();
@ -35,10 +53,13 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
spec.set_genesis_state(s);
spec.overwrite_genesis(test.find("genesisBlockHeader").unwrap());
assert!(spec.is_state_root_valid());
let genesis_hash = spec.genesis_header().hash();
assert_eq!(genesis_hash, H256::from_json(&test.find("genesisBlockHeader").unwrap()["hash"]));
let temp = RandomTempPath::new();
{
let client = Client::new(spec, temp.as_path(), IoChannel::disconnected()).unwrap();
assert_eq!(client.chain_info().best_block_hash, genesis_hash);
for (b, is_valid) in blocks.into_iter() {
if Block::is_good(&b) {
let _ = client.import_block(b.clone());

View File

@ -2,49 +2,6 @@ use client::{BlockChainClient,Client};
use super::test_common::*;
use super::helpers::*;
fn get_good_dummy_block() -> Bytes {
let mut block_header = Header::new();
let test_spec = get_test_spec();
let test_engine = test_spec.to_engine().unwrap();
block_header.gas_limit = decode(test_engine.spec().engine_params.get("minGasLimit").unwrap());
block_header.difficulty = decode(test_engine.spec().engine_params.get("minimumDifficulty").unwrap());
block_header.timestamp = 40;
block_header.number = 1;
block_header.parent_hash = test_engine.spec().genesis_header().hash();
block_header.state_root = test_engine.spec().genesis_header().state_root;
create_test_block(&block_header)
}
fn get_bad_state_dummy_block() -> Bytes {
let mut block_header = Header::new();
let test_spec = get_test_spec();
let test_engine = test_spec.to_engine().unwrap();
block_header.gas_limit = decode(test_engine.spec().engine_params.get("minGasLimit").unwrap());
block_header.difficulty = decode(test_engine.spec().engine_params.get("minimumDifficulty").unwrap());
block_header.timestamp = 40;
block_header.number = 1;
block_header.parent_hash = test_engine.spec().genesis_header().hash();
block_header.state_root = x!(0xbad);
create_test_block(&block_header)
}
fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> Arc<Client> {
let dir = RandomTempPath::new();
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
for block in &blocks {
if let Err(_) = client.import_block(block.clone()) {
panic!("panic importing block which is well-formed");
}
}
client.flush_queue();
client.import_verified_blocks(&IoChannel::disconnected());
client
}
#[test]
fn created() {
let dir = RandomTempPath::new();
@ -86,7 +43,8 @@ fn query_none_block() {
#[test]
fn query_bad_block() {
let client = get_test_client_with_blocks(vec![get_bad_state_dummy_block()]);
let client_result = get_test_client_with_blocks(vec![get_bad_state_dummy_block()]);
let client = client_result.reference();
let bad_block:Option<Bytes> = client.block_header_at(1);
assert!(bad_block.is_none());
@ -95,7 +53,8 @@ fn query_bad_block() {
#[test]
fn returns_chain_info() {
let dummy_block = get_good_dummy_block();
let client = get_test_client_with_blocks(vec![dummy_block.clone()]);
let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]);
let client = client_result.reference();
let block = BlockView::new(&dummy_block);
let info = client.chain_info();
assert_eq!(info.best_block_hash, block.header().hash());
@ -103,7 +62,8 @@ fn returns_chain_info() {
#[test]
fn imports_block_sequence() {
let client = generate_dummy_client(6);
let client_result = generate_dummy_client(6);
let client = client_result.reference();
let block = client.block_header_at(5).unwrap();
assert!(!block.is_empty());
@ -111,7 +71,8 @@ fn imports_block_sequence() {
#[test]
fn can_collect_garbage() {
let client = generate_dummy_client(100);
let client_result = generate_dummy_client(100);
let client = client_result.reference();
client.tick();
assert!(client.cache_info().blocks < 100 * 1024);
}

View File

@ -4,6 +4,7 @@ use super::test_common::*;
use std::path::PathBuf;
use spec::*;
use std::fs::{remove_dir_all};
use blockchain::{BlockChain};
pub struct RandomTempPath {
@ -32,6 +33,18 @@ impl Drop for RandomTempPath {
}
}
#[allow(dead_code)]
pub struct GuardedTempResult<T> {
result: T,
temp: RandomTempPath
}
impl<T> GuardedTempResult<T> {
pub fn reference(&self) -> &T {
&self.result
}
}
pub fn get_test_spec() -> Spec {
Spec::new_test()
}
@ -44,7 +57,36 @@ pub fn create_test_block(header: &Header) -> Bytes {
rlp.out()
}
pub fn generate_dummy_client(block_number: usize) -> Arc<Client> {
fn create_unverifiable_block_header(order: u32, parent_hash: H256) -> Header {
let mut header = Header::new();
header.gas_limit = x!(0);
header.difficulty = x!(order * 100);
header.timestamp = (order * 10) as u64;
header.number = order as u64;
header.parent_hash = parent_hash;
header.state_root = H256::zero();
header
}
fn create_unverifiable_block_with_extra(order: u32, parent_hash: H256, extra: Option<Bytes>) -> Bytes {
let mut header = create_unverifiable_block_header(order, parent_hash);
header.extra_data = match extra {
Some(extra_data) => extra_data,
None => {
let base = (order & 0x000000ff) as u8;
let generated: Vec<u8> = vec![base + 1, base + 2, base + 3];
generated
}
};
create_test_block(&header)
}
fn create_unverifiable_block(order: u32, parent_hash: H256) -> Bytes {
create_test_block(&create_unverifiable_block_header(order, parent_hash))
}
pub fn generate_dummy_client(block_number: u32) -> GuardedTempResult<Arc<Client>> {
let dir = RandomTempPath::new();
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
@ -76,5 +118,90 @@ pub fn generate_dummy_client(block_number: usize) -> Arc<Client> {
}
client.flush_queue();
client.import_verified_blocks(&IoChannel::disconnected());
client
GuardedTempResult::<Arc<Client>> {
temp: dir,
result: client
}
}
pub fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> GuardedTempResult<Arc<Client>> {
let dir = RandomTempPath::new();
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
for block in &blocks {
if let Err(_) = client.import_block(block.clone()) {
panic!("panic importing block which is well-formed");
}
}
client.flush_queue();
client.import_verified_blocks(&IoChannel::disconnected());
GuardedTempResult::<Arc<Client>> {
temp: dir,
result: client
}
}
pub fn generate_dummy_blockchain(block_number: u32) -> GuardedTempResult<BlockChain> {
let temp = RandomTempPath::new();
let bc = BlockChain::new(&create_unverifiable_block(0, H256::zero()), temp.as_path());
for block_order in 1..block_number {
bc.insert_block(&create_unverifiable_block(block_order, bc.best_block_hash()));
}
GuardedTempResult::<BlockChain> {
temp: temp,
result: bc
}
}
pub fn generate_dummy_blockchain_with_extra(block_number: u32) -> GuardedTempResult<BlockChain> {
let temp = RandomTempPath::new();
let bc = BlockChain::new(&create_unverifiable_block(0, H256::zero()), temp.as_path());
for block_order in 1..block_number {
bc.insert_block(&create_unverifiable_block_with_extra(block_order, bc.best_block_hash(), None));
}
GuardedTempResult::<BlockChain> {
temp: temp,
result: bc
}
}
pub fn generate_dummy_empty_blockchain() -> GuardedTempResult<BlockChain> {
let temp = RandomTempPath::new();
let bc = BlockChain::new(&create_unverifiable_block(0, H256::zero()), temp.as_path());
GuardedTempResult::<BlockChain> {
temp: temp,
result: bc
}
}
pub fn get_good_dummy_block() -> Bytes {
let mut block_header = Header::new();
let test_spec = get_test_spec();
let test_engine = test_spec.to_engine().unwrap();
block_header.gas_limit = decode(test_engine.spec().engine_params.get("minGasLimit").unwrap());
block_header.difficulty = decode(test_engine.spec().engine_params.get("minimumDifficulty").unwrap());
block_header.timestamp = 40;
block_header.number = 1;
block_header.parent_hash = test_engine.spec().genesis_header().hash();
block_header.state_root = test_engine.spec().genesis_header().state_root;
create_test_block(&block_header)
}
pub fn get_bad_state_dummy_block() -> Bytes {
let mut block_header = Header::new();
let test_spec = get_test_spec();
let test_engine = test_spec.to_engine().unwrap();
block_header.gas_limit = decode(test_engine.spec().engine_params.get("minGasLimit").unwrap());
block_header.difficulty = decode(test_engine.spec().engine_params.get("minimumDifficulty").unwrap());
block_header.timestamp = 40;
block_header.number = 1;
block_header.parent_hash = test_engine.spec().genesis_header().hash();
block_header.state_root = x!(0xbad);
create_test_block(&block_header)
}