2019-01-07 11:33:07 +01:00
|
|
|
// Copyright 2015-2019 Parity Technologies (UK) Ltd.
|
|
|
|
// This file is part of Parity Ethereum.
|
2016-02-05 13:40:41 +01:00
|
|
|
|
2019-01-07 11:33:07 +01:00
|
|
|
// Parity Ethereum is free software: you can redistribute it and/or modify
|
2016-02-05 13:40:41 +01:00
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
2019-01-07 11:33:07 +01:00
|
|
|
// Parity Ethereum is distributed in the hope that it will be useful,
|
2016-02-05 13:40:41 +01:00
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License
|
2019-01-07 11:33:07 +01:00
|
|
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
2016-02-05 13:40:41 +01:00
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
//! Set of different helpers for client tests
|
|
|
|
|
2018-06-20 15:13:07 +02:00
|
|
|
use std::path::Path;
|
2018-07-02 11:04:48 +02:00
|
|
|
use std::sync::Arc;
|
|
|
|
use std::{fs, io};
|
2019-01-04 14:05:46 +01:00
|
|
|
|
2018-06-20 15:13:07 +02:00
|
|
|
use blockchain::{BlockChain, BlockChainDB, BlockChainDBHandler, Config as BlockChainConfig, ExtrasInsert};
|
2019-01-04 14:05:46 +01:00
|
|
|
use blooms_db;
|
2017-09-26 14:19:08 +02:00
|
|
|
use bytes::Bytes;
|
2019-01-04 14:05:46 +01:00
|
|
|
use ethereum_types::{H256, U256, Address};
|
2017-09-26 14:19:08 +02:00
|
|
|
use ethkey::KeyPair;
|
|
|
|
use evm::Factory as EvmFactory;
|
|
|
|
use hash::keccak;
|
2019-01-04 14:05:46 +01:00
|
|
|
use io::IoChannel;
|
|
|
|
use kvdb::KeyValueDB;
|
|
|
|
use kvdb_rocksdb::{self, Database, DatabaseConfig};
|
2018-02-15 01:39:29 +01:00
|
|
|
use parking_lot::RwLock;
|
2017-03-20 19:14:29 +01:00
|
|
|
use rlp::{self, RlpStream};
|
2019-01-04 14:05:46 +01:00
|
|
|
use tempdir::TempDir;
|
|
|
|
use types::transaction::{Action, Transaction, SignedTransaction};
|
|
|
|
use types::encoded;
|
|
|
|
use types::header::Header;
|
|
|
|
use types::view;
|
|
|
|
use types::views::BlockView;
|
|
|
|
|
|
|
|
use account_provider::AccountProvider;
|
|
|
|
use block::{OpenBlock, Drain};
|
|
|
|
use client::{Client, ClientConfig, ChainInfo, ImportBlock, ChainNotify, ChainMessageType, PrepareOpenBlock};
|
|
|
|
use factory::Factories;
|
|
|
|
use miner::Miner;
|
2018-03-12 18:05:52 +01:00
|
|
|
use spec::Spec;
|
2017-09-26 14:19:08 +02:00
|
|
|
use state::*;
|
2019-01-04 14:05:46 +01:00
|
|
|
use state_db::StateDB;
|
2018-08-02 11:20:46 +02:00
|
|
|
use verification::queue::kind::blocks::Unverified;
|
2016-02-03 15:33:58 +01:00
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Creates test block with corresponding header
|
2016-01-27 13:23:24 +01:00
|
|
|
pub fn create_test_block(header: &Header) -> Bytes {
|
|
|
|
let mut rlp = RlpStream::new_list(3);
|
|
|
|
rlp.append(header);
|
|
|
|
rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1);
|
|
|
|
rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1);
|
|
|
|
rlp.out()
|
|
|
|
}
|
|
|
|
|
2016-01-28 15:38:42 +01:00
|
|
|
fn create_unverifiable_block_header(order: u32, parent_hash: H256) -> Header {
|
2016-01-27 18:31:14 +01:00
|
|
|
let mut header = Header::new();
|
2016-08-29 11:35:24 +02:00
|
|
|
header.set_gas_limit(0.into());
|
|
|
|
header.set_difficulty((order * 100).into());
|
|
|
|
header.set_timestamp((order * 10) as u64);
|
|
|
|
header.set_number(order as u64);
|
|
|
|
header.set_parent_hash(parent_hash);
|
|
|
|
header.set_state_root(H256::zero());
|
2016-01-27 18:31:14 +01:00
|
|
|
|
2016-01-28 12:24:16 +01:00
|
|
|
header
|
|
|
|
}
|
|
|
|
|
2016-01-28 15:38:42 +01:00
|
|
|
fn create_unverifiable_block_with_extra(order: u32, parent_hash: H256, extra: Option<Bytes>) -> Bytes {
|
|
|
|
let mut header = create_unverifiable_block_header(order, parent_hash);
|
2016-08-29 11:35:24 +02:00
|
|
|
header.set_extra_data(match extra {
|
2016-01-28 12:24:16 +01:00
|
|
|
Some(extra_data) => extra_data,
|
|
|
|
None => {
|
2016-01-28 15:38:42 +01:00
|
|
|
let base = (order & 0x000000ff) as u8;
|
2016-01-28 16:59:31 +01:00
|
|
|
let generated: Vec<u8> = vec![base + 1, base + 2, base + 3];
|
|
|
|
generated
|
2016-01-28 12:24:16 +01:00
|
|
|
}
|
2016-08-29 11:35:24 +02:00
|
|
|
});
|
2016-01-27 18:31:14 +01:00
|
|
|
create_test_block(&header)
|
|
|
|
}
|
|
|
|
|
2016-01-28 17:15:45 +01:00
|
|
|
fn create_unverifiable_block(order: u32, parent_hash: H256) -> Bytes {
|
|
|
|
create_test_block(&create_unverifiable_block_header(order, parent_hash))
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Creates test block with corresponding header and data
|
2016-06-16 12:44:08 +02:00
|
|
|
pub fn create_test_block_with_data(header: &Header, transactions: &[SignedTransaction], uncles: &[Header]) -> Bytes {
|
2016-01-29 10:16:53 +01:00
|
|
|
let mut rlp = RlpStream::new_list(3);
|
|
|
|
rlp.append(header);
|
2016-01-29 17:07:23 +01:00
|
|
|
rlp.begin_list(transactions.len());
|
2016-01-29 10:16:53 +01:00
|
|
|
for t in transactions {
|
2018-10-09 22:07:25 +02:00
|
|
|
rlp.append_raw(&rlp::encode(t), 1);
|
2016-01-29 10:16:53 +01:00
|
|
|
}
|
2017-03-20 19:14:29 +01:00
|
|
|
rlp.append_list(&uncles);
|
2016-01-29 10:16:53 +01:00
|
|
|
rlp.out()
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Generates dummy client (not test client) with corresponding amount of blocks
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn generate_dummy_client(block_number: u32) -> Arc<Client> {
|
2016-06-18 15:11:10 +02:00
|
|
|
generate_dummy_client_with_spec_and_data(Spec::new_test, block_number, 0, &[])
|
2016-06-16 12:44:08 +02:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Generates dummy client (not test client) with corresponding amount of blocks and txs per every block
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn generate_dummy_client_with_data(block_number: u32, txs_per_block: usize, tx_gas_prices: &[U256]) -> Arc<Client> {
|
2016-06-16 12:44:08 +02:00
|
|
|
generate_dummy_client_with_spec_and_data(Spec::new_null, block_number, txs_per_block, tx_gas_prices)
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Generates dummy client (not test client) with corresponding amount of blocks, txs per block and spec
|
2018-03-12 18:05:52 +01:00
|
|
|
pub fn generate_dummy_client_with_spec_and_data<F>(test_spec: F, block_number: u32, txs_per_block: usize, tx_gas_prices: &[U256]) -> Arc<Client> where F: Fn()->Spec {
|
|
|
|
generate_dummy_client_with_spec_accounts_and_data(test_spec, None, block_number, txs_per_block, tx_gas_prices)
|
2017-01-18 18:49:50 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Generates dummy client (not test client) with corresponding spec and accounts
|
2018-03-12 18:05:52 +01:00
|
|
|
pub fn generate_dummy_client_with_spec_and_accounts<F>(test_spec: F, accounts: Option<Arc<AccountProvider>>) -> Arc<Client> where F: Fn()->Spec {
|
|
|
|
generate_dummy_client_with_spec_accounts_and_data(test_spec, accounts, 0, 0, &[])
|
2017-01-18 18:49:50 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Generates dummy client (not test client) with corresponding blocks, accounts and spec
|
2018-03-12 18:05:52 +01:00
|
|
|
pub fn generate_dummy_client_with_spec_accounts_and_data<F>(test_spec: F, accounts: Option<Arc<AccountProvider>>, block_number: u32, txs_per_block: usize, tx_gas_prices: &[U256]) -> Arc<Client> where F: Fn()->Spec {
|
|
|
|
let test_spec = test_spec();
|
2017-04-06 19:26:17 +02:00
|
|
|
let client_db = new_db();
|
2016-09-07 15:27:28 +02:00
|
|
|
|
|
|
|
let client = Client::new(
|
|
|
|
ClientConfig::default(),
|
|
|
|
&test_spec,
|
2017-02-20 17:21:55 +01:00
|
|
|
client_db,
|
2018-04-13 17:34:27 +02:00
|
|
|
Arc::new(Miner::new_for_tests(&test_spec, accounts)),
|
2016-09-07 15:27:28 +02:00
|
|
|
IoChannel::disconnected(),
|
|
|
|
).unwrap();
|
2016-08-10 16:29:40 +02:00
|
|
|
let test_engine = &*test_spec.engine;
|
2016-06-16 12:44:08 +02:00
|
|
|
|
2017-04-06 19:26:17 +02:00
|
|
|
let mut db = test_spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap();
|
2016-06-16 12:44:08 +02:00
|
|
|
let genesis_header = test_spec.genesis_header();
|
|
|
|
|
2016-01-27 13:23:24 +01:00
|
|
|
let mut rolling_timestamp = 40;
|
2016-06-16 12:44:08 +02:00
|
|
|
let mut last_hashes = vec![];
|
|
|
|
let mut last_header = genesis_header.clone();
|
2016-01-27 11:50:48 +01:00
|
|
|
|
2017-08-30 19:18:28 +02:00
|
|
|
let kp = KeyPair::from_secret_slice(&keccak("")).unwrap();
|
2016-06-16 12:44:08 +02:00
|
|
|
let author = kp.address();
|
2016-01-27 11:50:48 +01:00
|
|
|
|
2016-06-16 12:44:08 +02:00
|
|
|
let mut n = 0;
|
|
|
|
for _ in 0..block_number {
|
|
|
|
last_hashes.push(last_header.hash());
|
|
|
|
|
|
|
|
// forge block.
|
|
|
|
let mut b = OpenBlock::new(
|
2016-08-10 16:29:40 +02:00
|
|
|
test_engine,
|
2016-07-01 20:29:56 +02:00
|
|
|
Default::default(),
|
2016-06-16 12:44:08 +02:00
|
|
|
false,
|
|
|
|
db,
|
|
|
|
&last_header,
|
2016-08-03 22:03:40 +02:00
|
|
|
Arc::new(last_hashes.clone()),
|
2016-06-16 12:44:08 +02:00
|
|
|
author.clone(),
|
2016-06-23 14:43:20 +02:00
|
|
|
(3141562.into(), 31415620.into()),
|
2017-06-28 13:17:36 +02:00
|
|
|
vec![],
|
|
|
|
false,
|
2018-05-16 08:58:01 +02:00
|
|
|
&mut Vec::new().into_iter(),
|
2016-06-16 12:44:08 +02:00
|
|
|
).unwrap();
|
|
|
|
rolling_timestamp += 10;
|
|
|
|
b.set_timestamp(rolling_timestamp);
|
|
|
|
|
|
|
|
// first block we don't have any balance, so can't send any transactions.
|
|
|
|
for _ in 0..txs_per_block {
|
|
|
|
b.push_transaction(Transaction {
|
|
|
|
nonce: n.into(),
|
|
|
|
gas_price: tx_gas_prices[n % tx_gas_prices.len()],
|
|
|
|
gas: 100000.into(),
|
|
|
|
action: Action::Create,
|
|
|
|
data: vec![],
|
|
|
|
value: U256::zero(),
|
2017-08-21 13:46:58 +02:00
|
|
|
}.sign(kp.secret(), Some(test_spec.chain_id())), None).unwrap();
|
2016-06-16 12:44:08 +02:00
|
|
|
n += 1;
|
|
|
|
}
|
2016-01-27 13:23:24 +01:00
|
|
|
|
2018-07-16 13:53:55 +02:00
|
|
|
let b = b.close_and_lock().unwrap().seal(test_engine, vec![]).unwrap();
|
2016-01-27 13:23:24 +01:00
|
|
|
|
2018-08-02 11:20:46 +02:00
|
|
|
if let Err(e) = client.import_block(Unverified::from_rlp(b.rlp_bytes()).unwrap()) {
|
2016-03-02 15:06:53 +01:00
|
|
|
panic!("error importing block which is valid by definition: {:?}", e);
|
2016-01-27 13:23:24 +01:00
|
|
|
}
|
2016-06-18 15:11:10 +02:00
|
|
|
|
2018-04-16 15:52:12 +02:00
|
|
|
last_header = view!(BlockView, &b.rlp_bytes()).header();
|
2018-07-15 11:01:47 +02:00
|
|
|
db = b.drain().state.drop().1;
|
2016-01-27 11:50:48 +01:00
|
|
|
}
|
2016-01-27 13:23:24 +01:00
|
|
|
client.flush_queue();
|
2016-07-19 09:21:41 +02:00
|
|
|
client.import_verified_blocks();
|
2017-04-06 19:26:17 +02:00
|
|
|
client
|
2016-01-27 17:32:53 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Adds blocks to the client
|
2016-03-02 15:06:53 +01:00
|
|
|
pub fn push_blocks_to_client(client: &Arc<Client>, timestamp_salt: u64, starting_number: usize, block_number: usize) {
|
2018-03-12 18:05:52 +01:00
|
|
|
let test_spec = Spec::new_test();
|
2016-08-29 11:35:24 +02:00
|
|
|
let state_root = test_spec.genesis_header().state_root().clone();
|
2017-09-26 14:19:08 +02:00
|
|
|
let genesis_gas = test_spec.genesis_header().gas_limit().clone();
|
|
|
|
|
2016-03-02 15:06:53 +01:00
|
|
|
let mut rolling_hash = client.chain_info().best_block_hash;
|
|
|
|
let mut rolling_block_number = starting_number as u64;
|
|
|
|
let mut rolling_timestamp = timestamp_salt + starting_number as u64 * 10;
|
|
|
|
|
|
|
|
for _ in 0..block_number {
|
|
|
|
let mut header = Header::new();
|
|
|
|
|
2017-09-26 14:19:08 +02:00
|
|
|
header.set_gas_limit(genesis_gas);
|
2016-08-29 11:35:24 +02:00
|
|
|
header.set_difficulty(U256::from(0x20000));
|
|
|
|
header.set_timestamp(rolling_timestamp);
|
|
|
|
header.set_number(rolling_block_number);
|
|
|
|
header.set_parent_hash(rolling_hash);
|
|
|
|
header.set_state_root(state_root);
|
2016-03-02 15:06:53 +01:00
|
|
|
|
|
|
|
rolling_hash = header.hash();
|
|
|
|
rolling_block_number = rolling_block_number + 1;
|
|
|
|
rolling_timestamp = rolling_timestamp + 10;
|
|
|
|
|
2018-08-02 11:20:46 +02:00
|
|
|
if let Err(e) = client.import_block(Unverified::from_rlp(create_test_block(&header)).unwrap()) {
|
2016-03-02 15:06:53 +01:00
|
|
|
panic!("error importing block which is valid by definition: {:?}", e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Adds one block with transactions
|
|
|
|
pub fn push_block_with_transactions(client: &Arc<Client>, transactions: &[SignedTransaction]) {
|
|
|
|
let test_spec = Spec::new_test();
|
|
|
|
let test_engine = &*test_spec.engine;
|
|
|
|
let block_number = client.chain_info().best_block_number as u64 + 1;
|
|
|
|
|
2018-07-16 13:53:55 +02:00
|
|
|
let mut b = client.prepare_open_block(Address::default(), (0.into(), 5000000.into()), Bytes::new()).unwrap();
|
2018-04-09 16:14:33 +02:00
|
|
|
b.set_timestamp(block_number * 10);
|
|
|
|
|
|
|
|
for t in transactions {
|
|
|
|
b.push_transaction(t.clone(), None).unwrap();
|
|
|
|
}
|
2018-07-16 13:53:55 +02:00
|
|
|
let b = b.close_and_lock().unwrap().seal(test_engine, vec![]).unwrap();
|
2018-04-09 16:14:33 +02:00
|
|
|
|
2018-08-02 11:20:46 +02:00
|
|
|
if let Err(e) = client.import_block(Unverified::from_rlp(b.rlp_bytes()).unwrap()) {
|
2018-04-09 16:14:33 +02:00
|
|
|
panic!("error importing block which is valid by definition: {:?}", e);
|
|
|
|
}
|
|
|
|
|
|
|
|
client.flush_queue();
|
|
|
|
client.import_verified_blocks();
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Creates dummy client (not test client) with corresponding blocks
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> Arc<Client> {
|
2018-03-12 18:05:52 +01:00
|
|
|
let test_spec = Spec::new_test();
|
2017-04-06 19:26:17 +02:00
|
|
|
let client_db = new_db();
|
2016-09-07 15:27:28 +02:00
|
|
|
|
|
|
|
let client = Client::new(
|
|
|
|
ClientConfig::default(),
|
|
|
|
&test_spec,
|
2017-02-20 17:21:55 +01:00
|
|
|
client_db,
|
2018-04-13 17:34:27 +02:00
|
|
|
Arc::new(Miner::new_for_tests(&test_spec, None)),
|
2016-09-07 15:27:28 +02:00
|
|
|
IoChannel::disconnected(),
|
|
|
|
).unwrap();
|
|
|
|
|
2017-09-26 14:19:08 +02:00
|
|
|
for block in blocks {
|
2018-08-02 11:20:46 +02:00
|
|
|
if let Err(e) = client.import_block(Unverified::from_rlp(block).unwrap()) {
|
2017-09-26 14:19:08 +02:00
|
|
|
panic!("error importing block which is well-formed: {:?}", e);
|
2016-01-28 15:38:42 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
client.flush_queue();
|
2016-07-19 09:21:41 +02:00
|
|
|
client.import_verified_blocks();
|
2017-04-06 19:26:17 +02:00
|
|
|
client
|
2016-01-28 15:38:42 +01:00
|
|
|
}
|
|
|
|
|
2018-11-18 00:06:34 +01:00
|
|
|
struct TestBlockChainDB {
|
|
|
|
_blooms_dir: TempDir,
|
|
|
|
_trace_blooms_dir: TempDir,
|
|
|
|
blooms: blooms_db::Database,
|
|
|
|
trace_blooms: blooms_db::Database,
|
|
|
|
key_value: Arc<KeyValueDB>,
|
|
|
|
}
|
2018-06-20 15:13:07 +02:00
|
|
|
|
2018-11-18 00:06:34 +01:00
|
|
|
impl BlockChainDB for TestBlockChainDB {
|
|
|
|
fn key_value(&self) -> &Arc<KeyValueDB> {
|
|
|
|
&self.key_value
|
|
|
|
}
|
2018-06-20 15:13:07 +02:00
|
|
|
|
2018-11-18 00:06:34 +01:00
|
|
|
fn blooms(&self) -> &blooms_db::Database {
|
|
|
|
&self.blooms
|
|
|
|
}
|
2018-06-20 15:13:07 +02:00
|
|
|
|
2018-11-18 00:06:34 +01:00
|
|
|
fn trace_blooms(&self) -> &blooms_db::Database {
|
|
|
|
&self.trace_blooms
|
2018-06-20 15:13:07 +02:00
|
|
|
}
|
2018-11-18 00:06:34 +01:00
|
|
|
}
|
2018-06-20 15:13:07 +02:00
|
|
|
|
2018-11-18 00:06:34 +01:00
|
|
|
/// Creates new test instance of `BlockChainDB`
|
|
|
|
pub fn new_db() -> Arc<BlockChainDB> {
|
2018-06-20 15:13:07 +02:00
|
|
|
let blooms_dir = TempDir::new("").unwrap();
|
|
|
|
let trace_blooms_dir = TempDir::new("").unwrap();
|
|
|
|
|
|
|
|
let db = TestBlockChainDB {
|
|
|
|
blooms: blooms_db::Database::open(blooms_dir.path()).unwrap(),
|
|
|
|
trace_blooms: blooms_db::Database::open(trace_blooms_dir.path()).unwrap(),
|
|
|
|
_blooms_dir: blooms_dir,
|
|
|
|
_trace_blooms_dir: trace_blooms_dir,
|
|
|
|
key_value: Arc::new(::kvdb_memorydb::create(::db::NUM_COLUMNS.unwrap()))
|
|
|
|
};
|
|
|
|
|
|
|
|
Arc::new(db)
|
|
|
|
}
|
|
|
|
|
2018-11-18 00:06:34 +01:00
|
|
|
/// Creates a new temporary `BlockChainDB` on FS
|
|
|
|
pub fn new_temp_db(tempdir: &Path) -> Arc<BlockChainDB> {
|
|
|
|
let blooms_dir = TempDir::new("").unwrap();
|
|
|
|
let trace_blooms_dir = TempDir::new("").unwrap();
|
|
|
|
let key_value_dir = tempdir.join("key_value");
|
|
|
|
|
|
|
|
let db_config = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
|
|
|
|
let key_value_db = Database::open(&db_config, key_value_dir.to_str().unwrap()).unwrap();
|
|
|
|
|
|
|
|
let db = TestBlockChainDB {
|
|
|
|
blooms: blooms_db::Database::open(blooms_dir.path()).unwrap(),
|
|
|
|
trace_blooms: blooms_db::Database::open(trace_blooms_dir.path()).unwrap(),
|
|
|
|
_blooms_dir: blooms_dir,
|
|
|
|
_trace_blooms_dir: trace_blooms_dir,
|
|
|
|
key_value: Arc::new(key_value_db)
|
|
|
|
};
|
|
|
|
|
|
|
|
Arc::new(db)
|
|
|
|
}
|
|
|
|
|
2018-06-20 15:13:07 +02:00
|
|
|
/// Creates new instance of KeyValueDBHandler
|
|
|
|
pub fn restoration_db_handler(config: kvdb_rocksdb::DatabaseConfig) -> Box<BlockChainDBHandler> {
|
|
|
|
struct RestorationDBHandler {
|
|
|
|
config: kvdb_rocksdb::DatabaseConfig,
|
|
|
|
}
|
|
|
|
|
|
|
|
struct RestorationDB {
|
|
|
|
blooms: blooms_db::Database,
|
|
|
|
trace_blooms: blooms_db::Database,
|
|
|
|
key_value: Arc<KeyValueDB>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl BlockChainDB for RestorationDB {
|
|
|
|
fn key_value(&self) -> &Arc<KeyValueDB> {
|
|
|
|
&self.key_value
|
|
|
|
}
|
|
|
|
|
|
|
|
fn blooms(&self) -> &blooms_db::Database {
|
|
|
|
&self.blooms
|
|
|
|
}
|
|
|
|
|
|
|
|
fn trace_blooms(&self) -> &blooms_db::Database {
|
|
|
|
&self.trace_blooms
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl BlockChainDBHandler for RestorationDBHandler {
|
2018-07-02 11:04:48 +02:00
|
|
|
fn open(&self, db_path: &Path) -> io::Result<Arc<BlockChainDB>> {
|
2018-06-20 15:13:07 +02:00
|
|
|
let key_value = Arc::new(kvdb_rocksdb::Database::open(&self.config, &db_path.to_string_lossy())?);
|
|
|
|
let blooms_path = db_path.join("blooms");
|
|
|
|
let trace_blooms_path = db_path.join("trace_blooms");
|
2018-06-22 10:52:01 +02:00
|
|
|
fs::create_dir_all(&blooms_path)?;
|
|
|
|
fs::create_dir_all(&trace_blooms_path)?;
|
2018-06-20 15:13:07 +02:00
|
|
|
let blooms = blooms_db::Database::open(blooms_path).unwrap();
|
|
|
|
let trace_blooms = blooms_db::Database::open(trace_blooms_path).unwrap();
|
|
|
|
let db = RestorationDB {
|
|
|
|
blooms,
|
|
|
|
trace_blooms,
|
|
|
|
key_value,
|
|
|
|
};
|
|
|
|
Ok(Arc::new(db))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Box::new(RestorationDBHandler { config })
|
2016-07-28 23:46:24 +02:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Generates dummy blockchain with corresponding amount of blocks
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn generate_dummy_blockchain(block_number: u32) -> BlockChain {
|
|
|
|
let db = new_db();
|
2017-01-23 15:27:11 +01:00
|
|
|
let bc = BlockChain::new(BlockChainConfig::default(), &create_unverifiable_block(0, H256::zero()), db.clone());
|
2016-07-28 23:46:24 +02:00
|
|
|
|
2018-06-20 15:13:07 +02:00
|
|
|
let mut batch = db.key_value().transaction();
|
2016-01-27 18:31:14 +01:00
|
|
|
for block_order in 1..block_number {
|
2018-05-16 08:58:01 +02:00
|
|
|
// Total difficulty is always 0 here.
|
2018-07-30 11:45:10 +02:00
|
|
|
bc.insert_block(&mut batch, encoded::Block::new(create_unverifiable_block(block_order, bc.best_block_hash())), vec![], ExtrasInsert {
|
2018-05-16 08:58:01 +02:00
|
|
|
fork_choice: ::engines::ForkChoice::New,
|
|
|
|
is_finalized: false,
|
|
|
|
});
|
2016-08-01 19:10:13 +02:00
|
|
|
bc.commit();
|
2016-01-27 18:31:14 +01:00
|
|
|
}
|
2018-06-20 15:13:07 +02:00
|
|
|
db.key_value().write(batch).unwrap();
|
2017-04-06 19:26:17 +02:00
|
|
|
bc
|
2016-01-28 12:24:16 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Generates dummy blockchain with corresponding amount of blocks (using creation with extra method for blocks creation)
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn generate_dummy_blockchain_with_extra(block_number: u32) -> BlockChain {
|
|
|
|
let db = new_db();
|
2017-01-23 15:27:11 +01:00
|
|
|
let bc = BlockChain::new(BlockChainConfig::default(), &create_unverifiable_block(0, H256::zero()), db.clone());
|
2016-07-28 23:46:24 +02:00
|
|
|
|
2018-06-20 15:13:07 +02:00
|
|
|
let mut batch = db.key_value().transaction();
|
2016-01-28 15:38:42 +01:00
|
|
|
for block_order in 1..block_number {
|
2018-05-16 08:58:01 +02:00
|
|
|
// Total difficulty is always 0 here.
|
2018-07-30 11:45:10 +02:00
|
|
|
bc.insert_block(&mut batch, encoded::Block::new(create_unverifiable_block_with_extra(block_order, bc.best_block_hash(), None)), vec![], ExtrasInsert {
|
2018-05-16 08:58:01 +02:00
|
|
|
fork_choice: ::engines::ForkChoice::New,
|
|
|
|
is_finalized: false,
|
|
|
|
});
|
2016-08-01 19:10:13 +02:00
|
|
|
bc.commit();
|
2016-01-28 15:38:42 +01:00
|
|
|
}
|
2018-06-20 15:13:07 +02:00
|
|
|
db.key_value().write(batch).unwrap();
|
2017-04-06 19:26:17 +02:00
|
|
|
bc
|
2016-01-28 15:38:42 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns empty dummy blockchain
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn generate_dummy_empty_blockchain() -> BlockChain {
|
|
|
|
let db = new_db();
|
2017-01-23 15:27:11 +01:00
|
|
|
let bc = BlockChain::new(BlockChainConfig::default(), &create_unverifiable_block(0, H256::zero()), db.clone());
|
2017-04-06 19:26:17 +02:00
|
|
|
bc
|
2016-01-28 19:14:07 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns temp state
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn get_temp_state() -> State<::state_db::StateDB> {
|
|
|
|
let journal_db = get_temp_state_db();
|
2017-04-12 13:33:49 +02:00
|
|
|
State::new(journal_db, U256::from(0), Default::default())
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns temp state using coresponding factory
|
2017-05-30 11:52:33 +02:00
|
|
|
pub fn get_temp_state_with_factory(factory: EvmFactory) -> State<::state_db::StateDB> {
|
|
|
|
let journal_db = get_temp_state_db();
|
|
|
|
let mut factories = Factories::default();
|
2018-02-19 12:27:42 +01:00
|
|
|
factories.vm = factory.into();
|
2017-05-30 11:52:33 +02:00
|
|
|
State::new(journal_db, U256::from(0), factories)
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns temp state db
|
2017-04-06 19:26:17 +02:00
|
|
|
pub fn get_temp_state_db() -> StateDB {
|
|
|
|
let db = new_db();
|
2018-06-20 15:13:07 +02:00
|
|
|
let journal_db = ::journaldb::new(db.key_value().clone(), ::journaldb::Algorithm::EarlyMerge, ::db::COL_STATE);
|
2017-04-12 13:33:49 +02:00
|
|
|
StateDB::new(journal_db, 5 * 1024 * 1024)
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns sequence of hashes of the dummy blocks
|
2016-02-25 17:14:45 +01:00
|
|
|
pub fn get_good_dummy_block_seq(count: usize) -> Vec<Bytes> {
|
2018-03-12 18:05:52 +01:00
|
|
|
let test_spec = Spec::new_test();
|
2017-04-06 19:26:17 +02:00
|
|
|
get_good_dummy_block_fork_seq(1, count, &test_spec.genesis_header().hash())
|
2016-03-02 15:06:53 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns sequence of hashes of the dummy blocks beginning from corresponding parent
|
2016-03-02 15:06:53 +01:00
|
|
|
pub fn get_good_dummy_block_fork_seq(start_number: usize, count: usize, parent_hash: &H256) -> Vec<Bytes> {
|
2018-03-12 18:05:52 +01:00
|
|
|
let test_spec = Spec::new_test();
|
2017-09-26 14:19:08 +02:00
|
|
|
let genesis_gas = test_spec.genesis_header().gas_limit().clone();
|
2016-03-02 15:06:53 +01:00
|
|
|
let mut rolling_timestamp = start_number as u64 * 10;
|
|
|
|
let mut parent = *parent_hash;
|
2016-02-25 17:14:45 +01:00
|
|
|
let mut r = Vec::new();
|
2016-03-02 15:06:53 +01:00
|
|
|
for i in start_number .. start_number + count + 1 {
|
2016-02-25 17:14:45 +01:00
|
|
|
let mut block_header = Header::new();
|
2017-09-26 14:19:08 +02:00
|
|
|
block_header.set_gas_limit(genesis_gas);
|
2017-07-29 17:12:07 +02:00
|
|
|
block_header.set_difficulty(U256::from(i) * U256([0, 1, 0, 0]));
|
2016-08-29 11:35:24 +02:00
|
|
|
block_header.set_timestamp(rolling_timestamp);
|
|
|
|
block_header.set_number(i as u64);
|
|
|
|
block_header.set_parent_hash(parent);
|
|
|
|
block_header.set_state_root(test_spec.genesis_header().state_root().clone());
|
2016-03-02 15:06:53 +01:00
|
|
|
|
2016-02-25 17:14:45 +01:00
|
|
|
parent = block_header.hash();
|
2016-03-02 15:06:53 +01:00
|
|
|
rolling_timestamp = rolling_timestamp + 10;
|
|
|
|
|
2016-02-25 17:14:45 +01:00
|
|
|
r.push(create_test_block(&block_header));
|
|
|
|
}
|
|
|
|
r
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns hash and header of the correct dummy block
|
2016-10-27 15:26:29 +02:00
|
|
|
pub fn get_good_dummy_block_hash() -> (H256, Bytes) {
|
2016-01-28 19:14:07 +01:00
|
|
|
let mut block_header = Header::new();
|
2018-03-12 18:05:52 +01:00
|
|
|
let test_spec = Spec::new_test();
|
2017-09-26 14:19:08 +02:00
|
|
|
let genesis_gas = test_spec.genesis_header().gas_limit().clone();
|
|
|
|
block_header.set_gas_limit(genesis_gas);
|
2016-08-29 11:35:24 +02:00
|
|
|
block_header.set_difficulty(U256::from(0x20000));
|
|
|
|
block_header.set_timestamp(40);
|
|
|
|
block_header.set_number(1);
|
|
|
|
block_header.set_parent_hash(test_spec.genesis_header().hash());
|
|
|
|
block_header.set_state_root(test_spec.genesis_header().state_root().clone());
|
2016-01-28 19:14:07 +01:00
|
|
|
|
2016-10-27 15:26:29 +02:00
|
|
|
(block_header.hash(), create_test_block(&block_header))
|
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns hash of the correct dummy block
|
2016-10-27 15:26:29 +02:00
|
|
|
pub fn get_good_dummy_block() -> Bytes {
|
|
|
|
let (_, bytes) = get_good_dummy_block_hash();
|
|
|
|
bytes
|
2016-01-28 19:14:07 +01:00
|
|
|
}
|
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Returns hash of the dummy block with incorrect state root
|
2016-01-28 19:14:07 +01:00
|
|
|
pub fn get_bad_state_dummy_block() -> Bytes {
|
|
|
|
let mut block_header = Header::new();
|
2018-03-12 18:05:52 +01:00
|
|
|
let test_spec = Spec::new_test();
|
2017-09-26 14:19:08 +02:00
|
|
|
let genesis_gas = test_spec.genesis_header().gas_limit().clone();
|
|
|
|
|
|
|
|
block_header.set_gas_limit(genesis_gas);
|
2016-08-29 11:35:24 +02:00
|
|
|
block_header.set_difficulty(U256::from(0x20000));
|
|
|
|
block_header.set_timestamp(40);
|
|
|
|
block_header.set_number(1);
|
|
|
|
block_header.set_parent_hash(test_spec.genesis_header().hash());
|
|
|
|
block_header.set_state_root(0xbad.into());
|
2016-01-28 19:14:07 +01:00
|
|
|
|
|
|
|
create_test_block(&block_header)
|
|
|
|
}
|
2016-11-04 23:09:23 +01:00
|
|
|
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Test actor for chain events
|
2018-02-15 01:39:29 +01:00
|
|
|
#[derive(Default)]
|
|
|
|
pub struct TestNotify {
|
2018-04-09 16:14:33 +02:00
|
|
|
/// Messages store
|
2018-02-15 01:39:29 +01:00
|
|
|
pub messages: RwLock<Vec<Bytes>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl ChainNotify for TestNotify {
|
2018-04-09 16:14:33 +02:00
|
|
|
fn broadcast(&self, message: ChainMessageType) {
|
|
|
|
let data = match message {
|
|
|
|
ChainMessageType::Consensus(data) => data,
|
2018-08-29 14:31:04 +02:00
|
|
|
ChainMessageType::SignedPrivateTransaction(_, data) => data,
|
|
|
|
ChainMessageType::PrivateTransaction(_, data) => data,
|
2018-04-09 16:14:33 +02:00
|
|
|
};
|
2018-02-15 01:39:29 +01:00
|
|
|
self.messages.write().push(data);
|
|
|
|
}
|
|
|
|
}
|