Use standard paths for Ethash cache (#5881)

* Use cache path to store ethash files.

* Fixing tests, more flexible API.

* Use AsRef<Path> everywhere.

* Fixing ethcore tests.

* Fix RPC tests.
This commit is contained in:
Tomasz Drwięga 2017-07-10 12:57:40 +02:00 committed by Arkadiy Paronyan
parent 125aa0aeb4
commit a24b6ad983
20 changed files with 210 additions and 142 deletions

View File

@ -48,7 +48,7 @@ pub trait Fetcher: Send + Sync + 'static {
} }
pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + 'static = URLHintContract> { pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + 'static = URLHintContract> {
dapps_path: PathBuf, cache_path: PathBuf,
resolver: R, resolver: R,
cache: Arc<Mutex<ContentCache>>, cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>, sync: Arc<SyncStatus>,
@ -61,7 +61,7 @@ pub struct ContentFetcher<F: Fetch = FetchClient, R: URLHint + 'static = URLHint
impl<R: URLHint + 'static, F: Fetch> Drop for ContentFetcher<F, R> { impl<R: URLHint + 'static, F: Fetch> Drop for ContentFetcher<F, R> {
fn drop(&mut self) { fn drop(&mut self) {
// Clear cache path // Clear cache path
let _ = fs::remove_dir_all(&self.dapps_path); let _ = fs::remove_dir_all(&self.cache_path);
} }
} }
@ -73,11 +73,11 @@ impl<R: URLHint + 'static, F: Fetch> ContentFetcher<F, R> {
remote: Remote, remote: Remote,
fetch: F, fetch: F,
) -> Self { ) -> Self {
let mut dapps_path = env::temp_dir(); let mut cache_path = env::temp_dir();
dapps_path.push(random_filename()); cache_path.push(random_filename());
ContentFetcher { ContentFetcher {
dapps_path: dapps_path, cache_path: cache_path,
resolver: resolver, resolver: resolver,
sync: sync_status, sync: sync_status,
cache: Arc::new(Mutex::new(ContentCache::default())), cache: Arc::new(Mutex::new(ContentCache::default())),
@ -200,7 +200,7 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
control, control,
installers::Dapp::new( installers::Dapp::new(
content_id.clone(), content_id.clone(),
self.dapps_path.clone(), self.cache_path.clone(),
Box::new(on_done), Box::new(on_done),
self.embeddable_on.clone(), self.embeddable_on.clone(),
), ),
@ -219,7 +219,7 @@ impl<R: URLHint + 'static, F: Fetch> Fetcher for ContentFetcher<F, R> {
installers::Content::new( installers::Content::new(
content_id.clone(), content_id.clone(),
content.mime, content.mime,
self.dapps_path.clone(), self.cache_path.clone(),
Box::new(on_done), Box::new(on_done),
), ),
self.embeddable_on.clone(), self.embeddable_on.clone(),

View File

@ -25,7 +25,7 @@ use std::mem;
use std::ptr; use std::ptr;
use sha3; use sha3;
use std::slice; use std::slice;
use std::path::PathBuf; use std::path::{Path, PathBuf};
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
use std::fs::{self, File}; use std::fs::{self, File};
@ -86,6 +86,7 @@ impl Node {
pub type H256 = [u8; 32]; pub type H256 = [u8; 32];
pub struct Light { pub struct Light {
cache_dir: PathBuf,
block_number: u64, block_number: u64,
cache: Vec<Node>, cache: Vec<Node>,
seed_compute: Mutex<SeedHashCompute>, seed_compute: Mutex<SeedHashCompute>,
@ -94,8 +95,8 @@ pub struct Light {
/// Light cache structure /// Light cache structure
impl Light { impl Light {
/// Create a new light cache for a given block number /// Create a new light cache for a given block number
pub fn new(block_number: u64) -> Light { pub fn new<T: AsRef<Path>>(cache_dir: T, block_number: u64) -> Light {
light_new(block_number) light_new(cache_dir, block_number)
} }
/// Calculate the light boundary data /// Calculate the light boundary data
@ -105,17 +106,15 @@ impl Light {
light_compute(self, header_hash, nonce) light_compute(self, header_hash, nonce)
} }
pub fn file_path(seed_hash: H256) -> PathBuf { pub fn file_path<T: AsRef<Path>>(cache_dir: T, seed_hash: H256) -> PathBuf {
let mut home = ::std::env::home_dir().unwrap(); let mut cache_dir = cache_dir.as_ref().to_path_buf();
home.push(".ethash"); cache_dir.push(to_hex(&seed_hash));
home.push("light"); cache_dir
home.push(to_hex(&seed_hash));
home
} }
pub fn from_file(block_number: u64) -> io::Result<Light> { pub fn from_file<T: AsRef<Path>>(cache_dir: T, block_number: u64) -> io::Result<Light> {
let seed_compute = SeedHashCompute::new(); let seed_compute = SeedHashCompute::new();
let path = Light::file_path(seed_compute.get_seedhash(block_number)); let path = Light::file_path(&cache_dir, seed_compute.get_seedhash(block_number));
let mut file = File::open(path)?; let mut file = File::open(path)?;
let cache_size = get_cache_size(block_number); let cache_size = get_cache_size(block_number);
@ -128,19 +127,22 @@ impl Light {
let buf = unsafe { slice::from_raw_parts_mut(nodes.as_mut_ptr() as *mut u8, cache_size) }; let buf = unsafe { slice::from_raw_parts_mut(nodes.as_mut_ptr() as *mut u8, cache_size) };
file.read_exact(buf)?; file.read_exact(buf)?;
Ok(Light { Ok(Light {
block_number,
cache_dir: cache_dir.as_ref().to_path_buf(),
cache: nodes, cache: nodes,
block_number: block_number,
seed_compute: Mutex::new(seed_compute), seed_compute: Mutex::new(seed_compute),
}) })
} }
pub fn to_file(&self) -> io::Result<PathBuf> { pub fn to_file(&self) -> io::Result<PathBuf> {
let seed_compute = self.seed_compute.lock(); let seed_compute = self.seed_compute.lock();
let path = Light::file_path(seed_compute.get_seedhash(self.block_number)); let path = Light::file_path(&self.cache_dir, seed_compute.get_seedhash(self.block_number));
if self.block_number >= ETHASH_EPOCH_LENGTH * 2 { if self.block_number >= ETHASH_EPOCH_LENGTH * 2 {
let deprecated = Light::file_path( let deprecated = Light::file_path(
seed_compute.get_seedhash(self.block_number - ETHASH_EPOCH_LENGTH * 2)); &self.cache_dir,
seed_compute.get_seedhash(self.block_number - ETHASH_EPOCH_LENGTH * 2)
);
if deprecated.exists() { if deprecated.exists() {
debug!(target: "ethash", "removing: {:?}", &deprecated); debug!(target: "ethash", "removing: {:?}", &deprecated);
@ -341,14 +343,12 @@ fn calculate_dag_item(node_index: u32, cache: &[Node]) -> Node {
} }
} }
fn light_new(block_number: u64) -> Light { fn light_new<T: AsRef<Path>>(cache_dir: T, block_number: u64) -> Light {
let seed_compute = SeedHashCompute::new(); let seed_compute = SeedHashCompute::new();
let seedhash = seed_compute.get_seedhash(block_number); let seedhash = seed_compute.get_seedhash(block_number);
let cache_size = get_cache_size(block_number); let cache_size = get_cache_size(block_number);
if cache_size % NODE_BYTES != 0 { assert!(cache_size % NODE_BYTES == 0, "Unaligned cache size");
panic!("Unaligned cache size");
}
let num_nodes = cache_size / NODE_BYTES; let num_nodes = cache_size / NODE_BYTES;
let mut nodes = Vec::with_capacity(num_nodes); let mut nodes = Vec::with_capacity(num_nodes);
@ -372,8 +372,9 @@ fn light_new(block_number: u64) -> Light {
} }
Light { Light {
block_number,
cache_dir: cache_dir.as_ref().to_path_buf(),
cache: nodes, cache: nodes,
block_number: block_number,
seed_compute: Mutex::new(seed_compute), seed_compute: Mutex::new(seed_compute),
} }
} }
@ -432,7 +433,7 @@ fn test_light_compute() {
let boundary = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84]; let boundary = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84];
let nonce = 0xd7b3ac70a301a249; let nonce = 0xd7b3ac70a301a249;
// difficulty = 0x085657254bd9u64; // difficulty = 0x085657254bd9u64;
let light = Light::new(486382); let light = Light::new(&::std::env::temp_dir(), 486382);
let result = light_compute(&light, &hash, nonce); let result = light_compute(&light, &hash, nonce);
assert_eq!(result.mix_hash[..], mix_hash[..]); assert_eq!(result.mix_hash[..], mix_hash[..]);
assert_eq!(result.value[..], boundary[..]); assert_eq!(result.value[..], boundary[..]);
@ -471,15 +472,16 @@ fn test_seed_compute_after_newer() {
#[test] #[test]
fn test_drop_old_data() { fn test_drop_old_data() {
let first = Light::new(0).to_file().unwrap(); let path = ::std::env::temp_dir();
let first = Light::new(&path, 0).to_file().unwrap();
let second = Light::new(ETHASH_EPOCH_LENGTH).to_file().unwrap(); let second = Light::new(&path, ETHASH_EPOCH_LENGTH).to_file().unwrap();
assert!(fs::metadata(&first).is_ok()); assert!(fs::metadata(&first).is_ok());
let _ = Light::new(ETHASH_EPOCH_LENGTH * 2).to_file(); let _ = Light::new(&path, ETHASH_EPOCH_LENGTH * 2).to_file();
assert!(fs::metadata(&first).is_err()); assert!(fs::metadata(&first).is_err());
assert!(fs::metadata(&second).is_ok()); assert!(fs::metadata(&second).is_ok());
let _ = Light::new(ETHASH_EPOCH_LENGTH * 3).to_file(); let _ = Light::new(&path, ETHASH_EPOCH_LENGTH * 3).to_file();
assert!(fs::metadata(&second).is_err()); assert!(fs::metadata(&second).is_err());
} }

View File

@ -25,6 +25,7 @@ extern crate log;
mod compute; mod compute;
use std::mem; use std::mem;
use std::path::{Path, PathBuf};
use compute::Light; use compute::Light;
pub use compute::{ETHASH_EPOCH_LENGTH, H256, ProofOfWork, SeedHashCompute, quick_get_difficulty, slow_get_seedhash}; pub use compute::{ETHASH_EPOCH_LENGTH, H256, ProofOfWork, SeedHashCompute, quick_get_difficulty, slow_get_seedhash};
@ -41,12 +42,14 @@ struct LightCache {
/// Light/Full cache manager. /// Light/Full cache manager.
pub struct EthashManager { pub struct EthashManager {
cache: Mutex<LightCache>, cache: Mutex<LightCache>,
cache_dir: PathBuf,
} }
impl EthashManager { impl EthashManager {
/// Create a new new instance of ethash manager /// Create a new new instance of ethash manager
pub fn new() -> EthashManager { pub fn new<T: AsRef<Path>>(cache_dir: T) -> EthashManager {
EthashManager { EthashManager {
cache_dir: cache_dir.as_ref().to_path_buf(),
cache: Mutex::new(LightCache { cache: Mutex::new(LightCache {
recent_epoch: None, recent_epoch: None,
recent: None, recent: None,
@ -88,11 +91,11 @@ impl EthashManager {
}; };
match light { match light {
None => { None => {
let light = match Light::from_file(block_number) { let light = match Light::from_file(&self.cache_dir, block_number) {
Ok(light) => Arc::new(light), Ok(light) => Arc::new(light),
Err(e) => { Err(e) => {
debug!("Light cache file not found for {}:{}", block_number, e); debug!("Light cache file not found for {}:{}", block_number, e);
let light = Light::new(block_number); let light = Light::new(&self.cache_dir, block_number);
if let Err(e) = light.to_file() { if let Err(e) = light.to_file() {
warn!("Light cache file write error: {}", e); warn!("Light cache file write error: {}", e);
} }
@ -112,7 +115,7 @@ impl EthashManager {
#[test] #[test]
fn test_lru() { fn test_lru() {
let ethash = EthashManager::new(); let ethash = EthashManager::new(&::std::env::temp_dir());
let hash = [0u8; 32]; let hash = [0u8; 32];
ethash.compute_light(1, &hash, 1); ethash.compute_light(1, &hash, 1);
ethash.compute_light(50000, &hash, 1); ethash.compute_light(50000, &hash, 1);

View File

@ -266,7 +266,7 @@ mod tests {
/// Create a new test chain spec with `BasicAuthority` consensus engine. /// Create a new test chain spec with `BasicAuthority` consensus engine.
fn new_test_authority() -> Spec { fn new_test_authority() -> Spec {
let bytes: &[u8] = include_bytes!("../../res/basic_authority.json"); let bytes: &[u8] = include_bytes!("../../res/basic_authority.json");
Spec::load(bytes).expect("invalid chain spec") Spec::load(::std::env::temp_dir(), bytes).expect("invalid chain spec")
} }
#[test] #[test]

View File

@ -386,7 +386,6 @@ pub trait Engine : Sync + Send {
} }
} }
/// Common engine utilities /// Common engine utilities
pub mod common { pub mod common {
use block::ExecutedBlock; use block::ExecutedBlock;

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::path::Path;
use ethash::{quick_get_difficulty, slow_get_seedhash, EthashManager}; use ethash::{quick_get_difficulty, slow_get_seedhash, EthashManager};
use util::*; use util::*;
use block::*; use block::*;
@ -24,7 +25,7 @@ use header::{Header, BlockNumber};
use state::CleanupMode; use state::CleanupMode;
use spec::CommonParams; use spec::CommonParams;
use transaction::UnverifiedTransaction; use transaction::UnverifiedTransaction;
use engines::Engine; use engines::{self, Engine};
use evm::Schedule; use evm::Schedule;
use ethjson; use ethjson;
use rlp::{self, UntrustedRlp}; use rlp::{self, UntrustedRlp};
@ -147,12 +148,17 @@ pub struct Ethash {
impl Ethash { impl Ethash {
/// Create a new instance of Ethash engine /// Create a new instance of Ethash engine
pub fn new(params: CommonParams, ethash_params: EthashParams, builtins: BTreeMap<Address, Builtin>) -> Arc<Self> { pub fn new<T: AsRef<Path>>(
cache_dir: T,
params: CommonParams,
ethash_params: EthashParams,
builtins: BTreeMap<Address, Builtin>,
) -> Arc<Self> {
Arc::new(Ethash { Arc::new(Ethash {
params: params, params,
ethash_params: ethash_params, ethash_params,
builtins: builtins, builtins,
pow: EthashManager::new(), pow: EthashManager::new(cache_dir),
}) })
} }
} }
@ -165,7 +171,7 @@ impl Ethash {
// for any block in the chain. // for any block in the chain.
// in the future, we might move the Ethash epoch // in the future, we might move the Ethash epoch
// caching onto this mechanism as well. // caching onto this mechanism as well.
impl ::engines::EpochVerifier for Arc<Ethash> { impl engines::EpochVerifier for Arc<Ethash> {
fn verify_light(&self, _header: &Header) -> Result<(), Error> { Ok(()) } fn verify_light(&self, _header: &Header) -> Result<(), Error> { Ok(()) }
fn verify_heavy(&self, header: &Header) -> Result<(), Error> { fn verify_heavy(&self, header: &Header) -> Result<(), Error> {
self.verify_block_unordered(header, None) self.verify_block_unordered(header, None)
@ -262,7 +268,7 @@ impl Engine for Arc<Ethash> {
_begins_epoch: bool, _begins_epoch: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
let parent_hash = block.fields().header.parent_hash().clone(); let parent_hash = block.fields().header.parent_hash().clone();
::engines::common::push_last_hash(block, last_hashes, self, &parent_hash)?; engines::common::push_last_hash(block, last_hashes, self, &parent_hash)?;
if block.fields().header.number() == self.ethash_params.dao_hardfork_transition { if block.fields().header.number() == self.ethash_params.dao_hardfork_transition {
let state = block.fields_mut().state; let state = block.fields_mut().state;
for child in &self.ethash_params.dao_hardfork_accounts { for child in &self.ethash_params.dao_hardfork_accounts {
@ -404,8 +410,8 @@ impl Engine for Arc<Ethash> {
Ok(()) Ok(())
} }
fn epoch_verifier<'a>(&self, _header: &Header, _proof: &'a [u8]) -> ::engines::ConstructedVerifier<'a> { fn epoch_verifier<'a>(&self, _header: &Header, _proof: &'a [u8]) -> engines::ConstructedVerifier<'a> {
::engines::ConstructedVerifier::Trusted(Box::new(self.clone())) engines::ConstructedVerifier::Trusted(Box::new(self.clone()))
} }
fn snapshot_components(&self) -> Option<Box<::snapshot::SnapshotComponents>> { fn snapshot_components(&self) -> Option<Box<::snapshot::SnapshotComponents>> {
@ -558,13 +564,18 @@ mod tests {
use engines::Engine; use engines::Engine;
use error::{BlockError, Error}; use error::{BlockError, Error};
use header::Header; use header::Header;
use spec::Spec;
use super::super::{new_morden, new_homestead_test}; use super::super::{new_morden, new_homestead_test};
use super::{Ethash, EthashParams, PARITY_GAS_LIMIT_DETERMINANT, ecip1017_eras_block_reward}; use super::{Ethash, EthashParams, PARITY_GAS_LIMIT_DETERMINANT, ecip1017_eras_block_reward};
use rlp; use rlp;
fn test_spec() -> Spec {
new_morden(&::std::env::temp_dir())
}
#[test] #[test]
fn on_close_block() { fn on_close_block() {
let spec = new_morden(); let spec = test_spec();
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap(); let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap();
@ -576,7 +587,7 @@ mod tests {
#[test] #[test]
fn on_close_block_with_uncle() { fn on_close_block_with_uncle() {
let spec = new_morden(); let spec = test_spec();
let engine = &*spec.engine; let engine = &*spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap(); let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap();
@ -594,14 +605,14 @@ mod tests {
#[test] #[test]
fn has_valid_metadata() { fn has_valid_metadata() {
let engine = new_morden().engine; let engine = test_spec().engine;
assert!(!engine.name().is_empty()); assert!(!engine.name().is_empty());
assert!(engine.version().major >= 1); assert!(engine.version().major >= 1);
} }
#[test] #[test]
fn can_return_schedule() { fn can_return_schedule() {
let engine = new_morden().engine; let engine = test_spec().engine;
let schedule = engine.schedule(10000000); let schedule = engine.schedule(10000000);
assert!(schedule.stack_limit > 0); assert!(schedule.stack_limit > 0);
@ -611,8 +622,8 @@ mod tests {
#[test] #[test]
fn can_do_seal_verification_fail() { fn can_do_seal_verification_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
//let engine = Ethash::new_test(new_morden()); //let engine = Ethash::new_test(test_spec());
let header: Header = Header::default(); let header: Header = Header::default();
let verify_result = engine.verify_block_basic(&header, None); let verify_result = engine.verify_block_basic(&header, None);
@ -626,7 +637,7 @@ mod tests {
#[test] #[test]
fn can_do_difficulty_verification_fail() { fn can_do_difficulty_verification_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
header.set_seal(vec![rlp::encode(&H256::zero()).into_vec(), rlp::encode(&H64::zero()).into_vec()]); header.set_seal(vec![rlp::encode(&H256::zero()).into_vec(), rlp::encode(&H64::zero()).into_vec()]);
@ -641,7 +652,7 @@ mod tests {
#[test] #[test]
fn can_do_proof_of_work_verification_fail() { fn can_do_proof_of_work_verification_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
header.set_seal(vec![rlp::encode(&H256::zero()).into_vec(), rlp::encode(&H64::zero()).into_vec()]); header.set_seal(vec![rlp::encode(&H256::zero()).into_vec(), rlp::encode(&H64::zero()).into_vec()]);
header.set_difficulty(U256::from_str("ffffffffffffffffffffffffffffffffffffffffffffaaaaaaaaaaaaaaaaaaaa").unwrap()); header.set_difficulty(U256::from_str("ffffffffffffffffffffffffffffffffffffffffffffaaaaaaaaaaaaaaaaaaaa").unwrap());
@ -657,7 +668,7 @@ mod tests {
#[test] #[test]
fn can_do_seal_unordered_verification_fail() { fn can_do_seal_unordered_verification_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let header: Header = Header::default(); let header: Header = Header::default();
let verify_result = engine.verify_block_unordered(&header, None); let verify_result = engine.verify_block_unordered(&header, None);
@ -671,7 +682,7 @@ mod tests {
#[test] #[test]
fn can_do_seal256_verification_fail() { fn can_do_seal256_verification_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
header.set_seal(vec![rlp::encode(&H256::zero()).into_vec(), rlp::encode(&H64::zero()).into_vec()]); header.set_seal(vec![rlp::encode(&H256::zero()).into_vec(), rlp::encode(&H64::zero()).into_vec()]);
let verify_result = engine.verify_block_unordered(&header, None); let verify_result = engine.verify_block_unordered(&header, None);
@ -685,7 +696,7 @@ mod tests {
#[test] #[test]
fn can_do_proof_of_work_unordered_verification_fail() { fn can_do_proof_of_work_unordered_verification_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
header.set_seal(vec![rlp::encode(&H256::from("b251bd2e0283d0658f2cadfdc8ca619b5de94eca5742725e2e757dd13ed7503d")).into_vec(), rlp::encode(&H64::zero()).into_vec()]); header.set_seal(vec![rlp::encode(&H256::from("b251bd2e0283d0658f2cadfdc8ca619b5de94eca5742725e2e757dd13ed7503d")).into_vec(), rlp::encode(&H64::zero()).into_vec()]);
header.set_difficulty(U256::from_str("ffffffffffffffffffffffffffffffffffffffffffffaaaaaaaaaaaaaaaaaaaa").unwrap()); header.set_difficulty(U256::from_str("ffffffffffffffffffffffffffffffffffffffffffffaaaaaaaaaaaaaaaaaaaa").unwrap());
@ -701,7 +712,7 @@ mod tests {
#[test] #[test]
fn can_verify_block_family_genesis_fail() { fn can_verify_block_family_genesis_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let header: Header = Header::default(); let header: Header = Header::default();
let parent_header: Header = Header::default(); let parent_header: Header = Header::default();
@ -716,7 +727,7 @@ mod tests {
#[test] #[test]
fn can_verify_block_family_difficulty_fail() { fn can_verify_block_family_difficulty_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
header.set_number(2); header.set_number(2);
let mut parent_header: Header = Header::default(); let mut parent_header: Header = Header::default();
@ -733,7 +744,7 @@ mod tests {
#[test] #[test]
fn can_verify_block_family_gas_fail() { fn can_verify_block_family_gas_fail() {
let engine = new_morden().engine; let engine = test_spec().engine;
let mut header: Header = Header::default(); let mut header: Header = Header::default();
header.set_number(2); header.set_number(2);
header.set_difficulty(U256::from_str("0000000000000000000000000000000000000000000000000000000000020000").unwrap()); header.set_difficulty(U256::from_str("0000000000000000000000000000000000000000000000000000000000020000").unwrap());
@ -763,7 +774,7 @@ mod tests {
fn difficulty_frontier() { fn difficulty_frontier() {
let spec = new_homestead_test(); let spec = new_homestead_test();
let ethparams = get_default_ethash_params(); let ethparams = get_default_ethash_params();
let ethash = Ethash::new(spec.params().clone(), ethparams, BTreeMap::new()); let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
let mut parent_header = Header::default(); let mut parent_header = Header::default();
parent_header.set_number(1000000); parent_header.set_number(1000000);
@ -781,7 +792,7 @@ mod tests {
fn difficulty_homestead() { fn difficulty_homestead() {
let spec = new_homestead_test(); let spec = new_homestead_test();
let ethparams = get_default_ethash_params(); let ethparams = get_default_ethash_params();
let ethash = Ethash::new(spec.params().clone(), ethparams, BTreeMap::new()); let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
let mut parent_header = Header::default(); let mut parent_header = Header::default();
parent_header.set_number(1500000); parent_header.set_number(1500000);
@ -838,7 +849,7 @@ mod tests {
ecip1010_pause_transition: 3000000, ecip1010_pause_transition: 3000000,
..get_default_ethash_params() ..get_default_ethash_params()
}; };
let ethash = Ethash::new(spec.params().clone(), ethparams, BTreeMap::new()); let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
let mut parent_header = Header::default(); let mut parent_header = Header::default();
parent_header.set_number(3500000); parent_header.set_number(3500000);
@ -872,7 +883,7 @@ mod tests {
ecip1010_continue_transition: 5000000, ecip1010_continue_transition: 5000000,
..get_default_ethash_params() ..get_default_ethash_params()
}; };
let ethash = Ethash::new(spec.params().clone(), ethparams, BTreeMap::new()); let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
let mut parent_header = Header::default(); let mut parent_header = Header::default();
parent_header.set_number(5000102); parent_header.set_number(5000102);
@ -917,7 +928,8 @@ mod tests {
#[test] #[test]
fn gas_limit_is_multiple_of_determinant() { fn gas_limit_is_multiple_of_determinant() {
let spec = new_homestead_test(); let spec = new_homestead_test();
let ethash = Ethash::new(spec.params().clone(), get_default_ethash_params(), BTreeMap::new()); let ethparams = get_default_ethash_params();
let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
let mut parent = Header::new(); let mut parent = Header::new();
let mut header = Header::new(); let mut header = Header::new();
header.set_number(1); header.set_number(1);
@ -961,7 +973,7 @@ mod tests {
fn difficulty_max_timestamp() { fn difficulty_max_timestamp() {
let spec = new_homestead_test(); let spec = new_homestead_test();
let ethparams = get_default_ethash_params(); let ethparams = get_default_ethash_params();
let ethash = Ethash::new(spec.params().clone(), ethparams, BTreeMap::new()); let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
let mut parent_header = Header::default(); let mut parent_header = Header::default();
parent_header.set_number(1000000); parent_header.set_number(1000000);
@ -989,7 +1001,7 @@ mod tests {
header.set_number(parent_header.number() + 1); header.set_number(parent_header.number() + 1);
header.set_gas_limit(100_001.into()); header.set_gas_limit(100_001.into());
header.set_difficulty(ethparams.minimum_difficulty); header.set_difficulty(ethparams.minimum_difficulty);
let ethash = Ethash::new(spec.params().clone(), ethparams, BTreeMap::new()); let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
assert!(ethash.verify_block_family(&header, &parent_header, None).is_ok()); assert!(ethash.verify_block_family(&header, &parent_header, None).is_ok());
parent_header.set_number(9); parent_header.set_number(9);
@ -1044,7 +1056,7 @@ mod tests {
nonce: U256::zero(), nonce: U256::zero(),
}.sign(keypair.secret(), None).into(); }.sign(keypair.secret(), None).into();
let ethash = Ethash::new(spec.params().clone(), ethparams, BTreeMap::new()); let ethash = Ethash::new(&::std::env::temp_dir(), spec.params().clone(), ethparams, BTreeMap::new());
assert!(ethash.verify_transaction_basic(&tx1, &header).is_ok()); assert!(ethash.verify_transaction_basic(&tx1, &header).is_ok());
assert!(ethash.verify_transaction_basic(&tx2, &header).is_ok()); assert!(ethash.verify_transaction_basic(&tx2, &header).is_ok());

View File

@ -27,6 +27,7 @@ pub mod denominations;
pub use self::ethash::{Ethash}; pub use self::ethash::{Ethash};
pub use self::denominations::*; pub use self::denominations::*;
use std::path::Path;
use super::spec::*; use super::spec::*;
/// Most recent fork block that we support on Mainnet. /// Most recent fork block that we support on Mainnet.
@ -38,51 +39,56 @@ pub const FORK_SUPPORTED_ROPSTEN: u64 = 10;
/// Most recent fork block that we support on Kovan. /// Most recent fork block that we support on Kovan.
pub const FORK_SUPPORTED_KOVAN: u64 = 0; pub const FORK_SUPPORTED_KOVAN: u64 = 0;
fn load(b: &[u8]) -> Spec { fn load<'a, T: 'a + Into<Option<&'a Path>>>(cache_dir: T, b: &[u8]) -> Spec {
Spec::load(b).expect("chain spec is invalid") match cache_dir.into() {
Some(path) => Spec::load(path, b),
None => Spec::load(&::std::env::temp_dir(), b)
}.expect("chain spec is invalid")
} }
/// Create a new Foundation Olympic chain spec. /// Create a new Foundation Olympic chain spec.
pub fn new_olympic() -> Spec { load(include_bytes!("../../res/ethereum/olympic.json")) } pub fn new_olympic(cache_dir: &Path) -> Spec { load(cache_dir, include_bytes!("../../res/ethereum/olympic.json")) }
/// Create a new Foundation Mainnet chain spec. /// Create a new Foundation Mainnet chain spec.
pub fn new_foundation() -> Spec { load(include_bytes!("../../res/ethereum/foundation.json")) } pub fn new_foundation(cache_dir: &Path) -> Spec { load(cache_dir, include_bytes!("../../res/ethereum/foundation.json")) }
/// Create a new Classic Mainnet chain spec without the DAO hardfork. /// Create a new Classic Mainnet chain spec without the DAO hardfork.
pub fn new_classic() -> Spec { load(include_bytes!("../../res/ethereum/classic.json")) } pub fn new_classic(cache_dir: &Path) -> Spec { load(cache_dir, include_bytes!("../../res/ethereum/classic.json")) }
/// Create a new Expanse mainnet chain spec. /// Create a new Expanse mainnet chain spec.
pub fn new_expanse() -> Spec { load(include_bytes!("../../res/ethereum/expanse.json")) } pub fn new_expanse(cache_dir: &Path) -> Spec { load(cache_dir, include_bytes!("../../res/ethereum/expanse.json")) }
/// Create a new Kovan testnet chain spec. /// Create a new Kovan testnet chain spec.
pub fn new_kovan() -> Spec { load(include_bytes!("../../res/ethereum/kovan.json")) } pub fn new_kovan(cache_dir: &Path) -> Spec { load(cache_dir, include_bytes!("../../res/ethereum/kovan.json")) }
/// Create a new Foundation Frontier-era chain spec as though it never changes to Homestead.
pub fn new_frontier_test() -> Spec { load(include_bytes!("../../res/ethereum/frontier_test.json")) }
/// Create a new Foundation Homestead-era chain spec as though it never changed from Frontier.
pub fn new_homestead_test() -> Spec { load(include_bytes!("../../res/ethereum/homestead_test.json")) }
/// Create a new Foundation Homestead-EIP150-era chain spec as though it never changed from Homestead/Frontier.
pub fn new_eip150_test() -> Spec { load(include_bytes!("../../res/ethereum/eip150_test.json")) }
/// Create a new Foundation Homestead-EIP161-era chain spec as though it never changed from Homestead/Frontier.
pub fn new_eip161_test() -> Spec { load(include_bytes!("../../res/ethereum/eip161_test.json")) }
/// Create a new Foundation Frontier/Homestead/DAO chain spec with transition points at #5 and #8.
pub fn new_transition_test() -> Spec { load(include_bytes!("../../res/ethereum/transition_test.json")) }
/// Create a new Foundation Mainnet chain spec without genesis accounts.
pub fn new_mainnet_like() -> Spec { load(include_bytes!("../../res/ethereum/frontier_like_test.json")) }
/// Create a new Foundation Metropolis era spec.
pub fn new_metropolis_test() -> Spec { load(include_bytes!("../../res/ethereum/metropolis_test.json")) }
/// Create a new Foundation Ropsten chain spec. /// Create a new Foundation Ropsten chain spec.
pub fn new_ropsten() -> Spec { load(include_bytes!("../../res/ethereum/ropsten.json")) } pub fn new_ropsten(cache_dir: &Path) -> Spec { load(cache_dir, include_bytes!("../../res/ethereum/ropsten.json")) }
/// Create a new Morden chain spec. /// Create a new Morden chain spec.
pub fn new_morden() -> Spec { load(include_bytes!("../../res/ethereum/morden.json")) } pub fn new_morden(cache_dir: &Path) -> Spec { load(cache_dir, include_bytes!("../../res/ethereum/morden.json")) }
// For tests
/// Create a new Foundation Frontier-era chain spec as though it never changes to Homestead.
pub fn new_frontier_test() -> Spec { load(None, include_bytes!("../../res/ethereum/frontier_test.json")) }
/// Create a new Foundation Homestead-era chain spec as though it never changed from Frontier.
pub fn new_homestead_test() -> Spec { load(None, include_bytes!("../../res/ethereum/homestead_test.json")) }
/// Create a new Foundation Homestead-EIP150-era chain spec as though it never changed from Homestead/Frontier.
pub fn new_eip150_test() -> Spec { load(None, include_bytes!("../../res/ethereum/eip150_test.json")) }
/// Create a new Foundation Homestead-EIP161-era chain spec as though it never changed from Homestead/Frontier.
pub fn new_eip161_test() -> Spec { load(None, include_bytes!("../../res/ethereum/eip161_test.json")) }
/// Create a new Foundation Frontier/Homestead/DAO chain spec with transition points at #5 and #8.
pub fn new_transition_test() -> Spec { load(None, include_bytes!("../../res/ethereum/transition_test.json")) }
/// Create a new Foundation Mainnet chain spec without genesis accounts.
pub fn new_mainnet_like() -> Spec { load(None, include_bytes!("../../res/ethereum/frontier_like_test.json")) }
/// Create a new Foundation Metropolis era spec.
pub fn new_metropolis_test() -> Spec { load(None, include_bytes!("../../res/ethereum/metropolis_test.json")) }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
@ -94,7 +100,7 @@ mod tests {
#[test] #[test]
fn ensure_db_good() { fn ensure_db_good() {
let spec = new_morden(); let spec = new_morden(&::std::env::temp_dir());
let engine = &spec.engine; let engine = &spec.engine;
let genesis_header = spec.genesis_header(); let genesis_header = spec.genesis_header();
let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap(); let db = spec.ensure_db_good(get_temp_state_db(), &Default::default()).unwrap();
@ -109,7 +115,7 @@ mod tests {
#[test] #[test]
fn morden() { fn morden() {
let morden = new_morden(); let morden = new_morden(&::std::env::temp_dir());
assert_eq!(morden.state_root(), "f3f4696bbf3b3b07775128eb7a3763279a394e382130f27c21e70233e04946a9".into()); assert_eq!(morden.state_root(), "f3f4696bbf3b3b07775128eb7a3763279a394e382130f27c21e70233e04946a9".into());
let genesis = morden.genesis_block(); let genesis = morden.genesis_block();
@ -120,7 +126,7 @@ mod tests {
#[test] #[test]
fn frontier() { fn frontier() {
let frontier = new_foundation(); let frontier = new_foundation(&::std::env::temp_dir());
assert_eq!(frontier.state_root(), "d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544".into()); assert_eq!(frontier.state_root(), "d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544".into());
let genesis = frontier.genesis_block(); let genesis = frontier.genesis_block();
@ -128,4 +134,23 @@ mod tests {
let _ = frontier.engine; let _ = frontier.engine;
} }
#[test]
fn all_spec_files_valid() {
let tmp = ::std::env::temp_dir();
new_olympic(&tmp);
new_foundation(&tmp);
new_classic(&tmp);
new_expanse(&tmp);
new_kovan(&tmp);
new_ropsten(&tmp);
new_morden(&tmp);
new_frontier_test();
new_homestead_test();
new_eip150_test();
new_eip161_test();
new_transition_test();
new_mainnet_like();
new_metropolis_test();
}
} }

View File

@ -32,7 +32,7 @@
//! use ethcore::miner::{Miner, MinerService}; //! use ethcore::miner::{Miner, MinerService};
//! //!
//! fn main() { //! fn main() {
//! let miner: Miner = Miner::with_spec(&ethereum::new_foundation()); //! let miner: Miner = Miner::with_spec(&ethereum::new_foundation(&env::temp_dir()));
//! // get status //! // get status
//! assert_eq!(miner.status().transactions_in_pending_queue, 0); //! assert_eq!(miner.status().transactions_in_pending_queue, 0);
//! //!

View File

@ -59,7 +59,7 @@ lazy_static! {
/// `native_contracts::test_contracts::ValidatorSet` provides a native wrapper for the ABi. /// `native_contracts::test_contracts::ValidatorSet` provides a native wrapper for the ABi.
fn spec_fixed_to_contract() -> Spec { fn spec_fixed_to_contract() -> Spec {
let data = include_bytes!("test_validator_contract.json"); let data = include_bytes!("test_validator_contract.json");
Spec::load(&data[..]).unwrap() Spec::load(&::std::env::temp_dir(), &data[..]).unwrap()
} }
// creates an account provider, filling it with accounts from all the given // creates an account provider, filling it with accounts from all the given

View File

@ -158,7 +158,7 @@ pub struct Spec {
genesis_state: PodState, genesis_state: PodState,
} }
fn load_from(s: ethjson::spec::Spec) -> Result<Spec, Error> { fn load_from<T: AsRef<Path>>(cache_dir: T, s: ethjson::spec::Spec) -> Result<Spec, Error> {
let builtins = s.accounts.builtins().into_iter().map(|p| (p.0.into(), From::from(p.1))).collect(); let builtins = s.accounts.builtins().into_iter().map(|p| (p.0.into(), From::from(p.1))).collect();
let g = Genesis::from(s.genesis); let g = Genesis::from(s.genesis);
let GenericSeal(seal_rlp) = g.seal.into(); let GenericSeal(seal_rlp) = g.seal.into();
@ -166,7 +166,7 @@ fn load_from(s: ethjson::spec::Spec) -> Result<Spec, Error> {
let mut s = Spec { let mut s = Spec {
name: s.name.clone().into(), name: s.name.clone().into(),
engine: Spec::engine(s.engine, params, builtins), engine: Spec::engine(cache_dir, s.engine, params, builtins),
data_dir: s.data_dir.unwrap_or(s.name).into(), data_dir: s.data_dir.unwrap_or(s.name).into(),
nodes: s.nodes.unwrap_or_else(Vec::new), nodes: s.nodes.unwrap_or_else(Vec::new),
parent_hash: g.parent_hash, parent_hash: g.parent_hash,
@ -195,18 +195,26 @@ fn load_from(s: ethjson::spec::Spec) -> Result<Spec, Error> {
macro_rules! load_bundled { macro_rules! load_bundled {
($e:expr) => { ($e:expr) => {
Spec::load(include_bytes!(concat!("../../res/", $e, ".json")) as &[u8]).expect(concat!("Chain spec ", $e, " is invalid.")) Spec::load(
&::std::env::temp_dir(),
include_bytes!(concat!("../../res/", $e, ".json")) as &[u8]
).expect(concat!("Chain spec ", $e, " is invalid."))
}; };
} }
impl Spec { impl Spec {
/// Convert engine spec into a arc'd Engine of the right underlying type. /// Convert engine spec into a arc'd Engine of the right underlying type.
/// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead. /// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead.
fn engine(engine_spec: ethjson::spec::Engine, params: CommonParams, builtins: BTreeMap<Address, Builtin>) -> Arc<Engine> { fn engine<T: AsRef<Path>>(
cache_dir: T,
engine_spec: ethjson::spec::Engine,
params: CommonParams,
builtins: BTreeMap<Address, Builtin>,
) -> Arc<Engine> {
match engine_spec { match engine_spec {
ethjson::spec::Engine::Null => Arc::new(NullEngine::new(params, builtins)), ethjson::spec::Engine::Null => Arc::new(NullEngine::new(params, builtins)),
ethjson::spec::Engine::InstantSeal(instant) => Arc::new(InstantSeal::new(params, instant.params.registrar.map_or_else(Address::new, Into::into), builtins)), ethjson::spec::Engine::InstantSeal(instant) => Arc::new(InstantSeal::new(params, instant.params.registrar.map_or_else(Address::new, Into::into), builtins)),
ethjson::spec::Engine::Ethash(ethash) => Arc::new(ethereum::Ethash::new(params, From::from(ethash.params), builtins)), ethjson::spec::Engine::Ethash(ethash) => Arc::new(ethereum::Ethash::new(cache_dir, params, From::from(ethash.params), builtins)),
ethjson::spec::Engine::BasicAuthority(basic_authority) => Arc::new(BasicAuthority::new(params, From::from(basic_authority.params), builtins)), ethjson::spec::Engine::BasicAuthority(basic_authority) => Arc::new(BasicAuthority::new(params, From::from(basic_authority.params), builtins)),
ethjson::spec::Engine::AuthorityRound(authority_round) => AuthorityRound::new(params, From::from(authority_round.params), builtins).expect("Failed to start AuthorityRound consensus engine."), ethjson::spec::Engine::AuthorityRound(authority_round) => AuthorityRound::new(params, From::from(authority_round.params), builtins).expect("Failed to start AuthorityRound consensus engine."),
ethjson::spec::Engine::Tendermint(tendermint) => Tendermint::new(params, From::from(tendermint.params), builtins).expect("Failed to start the Tendermint consensus engine."), ethjson::spec::Engine::Tendermint(tendermint) => Tendermint::new(params, From::from(tendermint.params), builtins).expect("Failed to start the Tendermint consensus engine."),
@ -397,13 +405,13 @@ impl Spec {
/// Loads spec from json file. Provide factories for executing contracts and ensuring /// Loads spec from json file. Provide factories for executing contracts and ensuring
/// storage goes to the right place. /// storage goes to the right place.
pub fn load<R>(reader: R) -> Result<Self, String> where R: Read { pub fn load<T: AsRef<Path>, R>(cache_dir: T, reader: R) -> Result<Self, String> where R: Read {
fn fmt<F: ::std::fmt::Display>(f: F) -> String { fn fmt<F: ::std::fmt::Display>(f: F) -> String {
format!("Spec json is invalid: {}", f) format!("Spec json is invalid: {}", f)
} }
ethjson::spec::Spec::load(reader).map_err(fmt) ethjson::spec::Spec::load(reader).map_err(fmt)
.and_then(|x| load_from(x).map_err(fmt)) .and_then(|x| load_from(cache_dir, x).map_err(fmt))
} }
/// Create a new Spec which conforms to the Frontier-era Morden chain except that it's a NullEngine consensus. /// Create a new Spec which conforms to the Frontier-era Morden chain except that it's a NullEngine consensus.
@ -453,7 +461,7 @@ mod tests {
// https://github.com/paritytech/parity/issues/1840 // https://github.com/paritytech/parity/issues/1840
#[test] #[test]
fn test_load_empty() { fn test_load_empty() {
assert!(Spec::load(&[] as &[u8]).is_err()); assert!(Spec::load(::std::env::temp_dir(), &[] as &[u8]).is_err());
} }
#[test] #[test]

View File

@ -52,7 +52,7 @@ fn imports_from_empty() {
#[test] #[test]
fn should_return_registrar() { fn should_return_registrar() {
let dir = RandomTempPath::new(); let dir = RandomTempPath::new();
let spec = ethereum::new_morden(); let spec = ethereum::new_morden(&dir);
let db_config = DatabaseConfig::with_columns(::db::NUM_COLUMNS); let db_config = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let client_db = Arc::new(Database::open(&db_config, dir.as_path().to_str().unwrap()).unwrap()); let client_db = Arc::new(Database::open(&db_config, dir.as_path().to_str().unwrap()).unwrap());

View File

@ -71,7 +71,7 @@ pub fn execute(cmd: AccountCmd) -> Result<String, String> {
} }
fn keys_dir(path: String, spec: SpecType) -> Result<RootDiskDirectory, String> { fn keys_dir(path: String, spec: SpecType) -> Result<RootDiskDirectory, String> {
let spec = spec.spec()?; let spec = spec.spec(&::std::env::temp_dir())?;
let mut path = PathBuf::from(&path); let mut path = PathBuf::from(&path);
path.push(spec.data_dir); path.push(spec.data_dir);
RootDiskDirectory::create(path).map_err(|e| format!("Could not open keys directory: {}", e)) RootDiskDirectory::create(path).map_err(|e| format!("Could not open keys directory: {}", e))

View File

@ -148,7 +148,7 @@ fn execute_import(cmd: ImportBlockchain) -> Result<(), String> {
let timer = Instant::now(); let timer = Instant::now();
// load spec file // load spec file
let spec = cmd.spec.spec()?; let spec = cmd.spec.spec(&cmd.dirs.cache)?;
// load genesis hash // load genesis hash
let genesis_hash = spec.genesis_header().hash(); let genesis_hash = spec.genesis_header().hash();
@ -320,7 +320,7 @@ fn start_client(
) -> Result<ClientService, String> { ) -> Result<ClientService, String> {
// load spec file // load spec file
let spec = spec.spec()?; let spec = spec.spec(&dirs.cache)?;
// load genesis hash // load genesis hash
let genesis_hash = spec.genesis_header().hash(); let genesis_hash = spec.genesis_header().hash();
@ -517,7 +517,7 @@ fn execute_export_state(cmd: ExportState) -> Result<(), String> {
} }
pub fn kill_db(cmd: KillBlockchain) -> Result<(), String> { pub fn kill_db(cmd: KillBlockchain) -> Result<(), String> {
let spec = cmd.spec.spec()?; let spec = cmd.spec.spec(&cmd.dirs.cache)?;
let genesis_hash = spec.genesis_header().hash(); let genesis_hash = spec.genesis_header().hash();
let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir); let db_dirs = cmd.dirs.database(genesis_hash, None, spec.data_dir);
let user_defaults_path = db_dirs.user_defaults_path(); let user_defaults_path = db_dirs.user_defaults_path();

View File

@ -34,7 +34,7 @@ use rpc::{IpcConfiguration, HttpConfiguration, WsConfiguration, UiConfiguration}
use rpc_apis::ApiSet; use rpc_apis::ApiSet;
use parity_rpc::NetworkSettings; use parity_rpc::NetworkSettings;
use cache::CacheConfig; use cache::CacheConfig;
use helpers::{to_duration, to_mode, to_block_id, to_u256, to_pending_set, to_price, replace_home, replace_home_for_db, use helpers::{to_duration, to_mode, to_block_id, to_u256, to_pending_set, to_price, replace_home, replace_home_and_local,
geth_ipc_path, parity_ipc_path, to_bootnodes, to_addresses, to_address, to_gas_limit, to_queue_strategy}; geth_ipc_path, parity_ipc_path, to_bootnodes, to_addresses, to_address, to_gas_limit, to_queue_strategy};
use params::{SpecType, ResealPolicy, AccountsConfig, GasPricerConfig, MinerExtras, Pruning, Switch}; use params::{SpecType, ResealPolicy, AccountsConfig, GasPricerConfig, MinerExtras, Pruning, Switch};
use ethcore_logger::Config as LogConfig; use ethcore_logger::Config as LogConfig;
@ -893,14 +893,20 @@ impl Configuration {
let local_path = default_local_path(); let local_path = default_local_path();
let base_path = self.args.flag_base_path.as_ref().or_else(|| self.args.flag_datadir.as_ref()).map_or_else(|| default_data_path(), |s| s.clone()); let base_path = self.args.flag_base_path.as_ref().or_else(|| self.args.flag_datadir.as_ref()).map_or_else(|| default_data_path(), |s| s.clone());
let data_path = replace_home("", &base_path); let data_path = replace_home("", &base_path);
let base_db_path = if self.args.flag_base_path.is_some() && self.args.flag_db_path.is_none() { let is_using_base_path = self.args.flag_base_path.is_some();
// If base_path is set and db_path is not we default to base path subdir instead of LOCAL. // If base_path is set and db_path is not we default to base path subdir instead of LOCAL.
let base_db_path = if is_using_base_path && self.args.flag_db_path.is_none() {
"$BASE/chains" "$BASE/chains"
} else { } else {
self.args.flag_db_path.as_ref().map_or(dir::CHAINS_PATH, |s| &s) self.args.flag_db_path.as_ref().map_or(dir::CHAINS_PATH, |s| &s)
}; };
let cache_path = if is_using_base_path {
"$BASE/cache".into()
} else {
replace_home_and_local(&data_path, &local_path, &dir::CACHE_PATH)
};
let db_path = replace_home_for_db(&data_path, &local_path, &base_db_path); let db_path = replace_home_and_local(&data_path, &local_path, &base_db_path);
let keys_path = replace_home(&data_path, &self.args.flag_keys_path); let keys_path = replace_home(&data_path, &self.args.flag_keys_path);
let dapps_path = replace_home(&data_path, &self.args.flag_dapps_path); let dapps_path = replace_home(&data_path, &self.args.flag_dapps_path);
let secretstore_path = replace_home(&data_path, &self.args.flag_secretstore_path); let secretstore_path = replace_home(&data_path, &self.args.flag_secretstore_path);
@ -924,6 +930,7 @@ impl Configuration {
Directories { Directories {
keys: keys_path, keys: keys_path,
base: data_path, base: data_path,
cache: cache_path,
db: db_path, db: db_path,
dapps: dapps_path, dapps: dapps_path,
signer: ui_path, signer: ui_path,

View File

@ -18,7 +18,7 @@ use std::fs;
use std::path::{PathBuf, Path}; use std::path::{PathBuf, Path};
use util::{H64, H256}; use util::{H64, H256};
use util::journaldb::Algorithm; use util::journaldb::Algorithm;
use helpers::{replace_home, replace_home_for_db}; use helpers::{replace_home, replace_home_and_local};
use app_dirs::{AppInfo, get_app_root, AppDataType}; use app_dirs::{AppInfo, get_app_root, AppDataType};
#[cfg(target_os = "macos")] const AUTHOR: &'static str = "Parity"; #[cfg(target_os = "macos")] const AUTHOR: &'static str = "Parity";
@ -34,6 +34,9 @@ use app_dirs::{AppInfo, get_app_root, AppDataType};
#[cfg(target_os = "windows")] pub const CHAINS_PATH: &'static str = "$LOCAL/chains"; #[cfg(target_os = "windows")] pub const CHAINS_PATH: &'static str = "$LOCAL/chains";
#[cfg(not(target_os = "windows"))] pub const CHAINS_PATH: &'static str = "$BASE/chains"; #[cfg(not(target_os = "windows"))] pub const CHAINS_PATH: &'static str = "$BASE/chains";
#[cfg(target_os = "windows")] pub const CACHE_PATH: &'static str = "$LOCAL/cache";
#[cfg(not(target_os = "windows"))] pub const CACHE_PATH: &'static str = "$BASE/cache";
// this const is irrelevent cause we do have migrations now, // this const is irrelevent cause we do have migrations now,
// but we still use it for backwards compatibility // but we still use it for backwards compatibility
const LEGACY_CLIENT_DB_VER_STR: &'static str = "5.3"; const LEGACY_CLIENT_DB_VER_STR: &'static str = "5.3";
@ -42,6 +45,7 @@ const LEGACY_CLIENT_DB_VER_STR: &'static str = "5.3";
pub struct Directories { pub struct Directories {
pub base: String, pub base: String,
pub db: String, pub db: String,
pub cache: String,
pub keys: String, pub keys: String,
pub signer: String, pub signer: String,
pub dapps: String, pub dapps: String,
@ -54,7 +58,8 @@ impl Default for Directories {
let local_dir = default_local_path(); let local_dir = default_local_path();
Directories { Directories {
base: replace_home(&data_dir, "$BASE"), base: replace_home(&data_dir, "$BASE"),
db: replace_home_for_db(&data_dir, &local_dir, CHAINS_PATH), db: replace_home_and_local(&data_dir, &local_dir, CHAINS_PATH),
cache: replace_home_and_local(&data_dir, &local_dir, CACHE_PATH),
keys: replace_home(&data_dir, "$BASE/keys"), keys: replace_home(&data_dir, "$BASE/keys"),
signer: replace_home(&data_dir, "$BASE/signer"), signer: replace_home(&data_dir, "$BASE/signer"),
dapps: replace_home(&data_dir, "$BASE/dapps"), dapps: replace_home(&data_dir, "$BASE/dapps"),
@ -67,6 +72,7 @@ impl Directories {
pub fn create_dirs(&self, dapps_enabled: bool, signer_enabled: bool, secretstore_enabled: bool) -> Result<(), String> { pub fn create_dirs(&self, dapps_enabled: bool, signer_enabled: bool, secretstore_enabled: bool) -> Result<(), String> {
fs::create_dir_all(&self.base).map_err(|e| e.to_string())?; fs::create_dir_all(&self.base).map_err(|e| e.to_string())?;
fs::create_dir_all(&self.db).map_err(|e| e.to_string())?; fs::create_dir_all(&self.db).map_err(|e| e.to_string())?;
fs::create_dir_all(&self.cache).map_err(|e| e.to_string())?;
fs::create_dir_all(&self.keys).map_err(|e| e.to_string())?; fs::create_dir_all(&self.keys).map_err(|e| e.to_string())?;
if signer_enabled { if signer_enabled {
fs::create_dir_all(&self.signer).map_err(|e| e.to_string())?; fs::create_dir_all(&self.signer).map_err(|e| e.to_string())?;
@ -231,7 +237,7 @@ pub fn default_hypervisor_path() -> String {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::Directories; use super::Directories;
use helpers::{replace_home, replace_home_for_db}; use helpers::{replace_home, replace_home_and_local};
#[test] #[test]
fn test_default_directories() { fn test_default_directories() {
@ -239,10 +245,14 @@ mod tests {
let local_dir = super::default_local_path(); let local_dir = super::default_local_path();
let expected = Directories { let expected = Directories {
base: replace_home(&data_dir, "$BASE"), base: replace_home(&data_dir, "$BASE"),
db: replace_home_for_db(&data_dir, &local_dir, db: replace_home_and_local(&data_dir, &local_dir,
if cfg!(target_os = "windows") { "$LOCAL/chains" } if cfg!(target_os = "windows") { "$LOCAL/chains" }
else { "$BASE/chains" } else { "$BASE/chains" }
), ),
cache: replace_home_and_local(&data_dir, &local_dir,
if cfg!(target_os = "windows") { "$LOCAL/cache" }
else { "$BASE/cache" }
),
keys: replace_home(&data_dir, "$BASE/keys"), keys: replace_home(&data_dir, "$BASE/keys"),
signer: replace_home(&data_dir, "$BASE/signer"), signer: replace_home(&data_dir, "$BASE/signer"),
dapps: replace_home(&data_dir, "$BASE/dapps"), dapps: replace_home(&data_dir, "$BASE/dapps"),

View File

@ -140,7 +140,7 @@ pub fn replace_home(base: &str, arg: &str) -> String {
r.replace("/", &::std::path::MAIN_SEPARATOR.to_string()) r.replace("/", &::std::path::MAIN_SEPARATOR.to_string())
} }
pub fn replace_home_for_db(base: &str, local: &str, arg: &str) -> String { pub fn replace_home_and_local(base: &str, local: &str, arg: &str) -> String {
let r = replace_home(base, arg); let r = replace_home(base, arg);
r.replace("$LOCAL", local) r.replace("$LOCAL", local)
} }

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::{str, fs, fmt}; use std::{str, fs, fmt, path};
use std::time::Duration; use std::time::Duration;
use util::{Address, U256, version_data}; use util::{Address, U256, version_data};
use util::journaldb::Algorithm; use util::journaldb::Algorithm;
@ -79,19 +79,20 @@ impl fmt::Display for SpecType {
} }
impl SpecType { impl SpecType {
pub fn spec(&self) -> Result<Spec, String> { pub fn spec<T: AsRef<path::Path>>(&self, cache_dir: T) -> Result<Spec, String> {
let cache_dir = cache_dir.as_ref();
match *self { match *self {
SpecType::Foundation => Ok(ethereum::new_foundation()), SpecType::Foundation => Ok(ethereum::new_foundation(cache_dir)),
SpecType::Morden => Ok(ethereum::new_morden()), SpecType::Morden => Ok(ethereum::new_morden(cache_dir)),
SpecType::Ropsten => Ok(ethereum::new_ropsten()), SpecType::Ropsten => Ok(ethereum::new_ropsten(cache_dir)),
SpecType::Olympic => Ok(ethereum::new_olympic()), SpecType::Olympic => Ok(ethereum::new_olympic(cache_dir)),
SpecType::Classic => Ok(ethereum::new_classic()), SpecType::Classic => Ok(ethereum::new_classic(cache_dir)),
SpecType::Expanse => Ok(ethereum::new_expanse()), SpecType::Expanse => Ok(ethereum::new_expanse(cache_dir)),
SpecType::Kovan => Ok(ethereum::new_kovan()), SpecType::Kovan => Ok(ethereum::new_kovan(cache_dir)),
SpecType::Dev => Ok(Spec::new_instant()), SpecType::Dev => Ok(Spec::new_instant()),
SpecType::Custom(ref filename) => { SpecType::Custom(ref filename) => {
let file = fs::File::open(filename).map_err(|_| "Could not load specification file.")?; let file = fs::File::open(filename).map_err(|e| format!("Could not load specification file at {}: {}", filename, e))?;
Spec::load(file) Spec::load(cache_dir, file)
} }
} }
} }

View File

@ -168,7 +168,7 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
use util::RwLock; use util::RwLock;
// load spec // load spec
let spec = cmd.spec.spec()?; let spec = cmd.spec.spec(&cmd.dirs.cache)?;
// load genesis hash // load genesis hash
let genesis_hash = spec.genesis_header().hash(); let genesis_hash = spec.genesis_header().hash();
@ -352,7 +352,7 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
} }
// load spec // load spec
let spec = cmd.spec.spec()?; let spec = cmd.spec.spec(&cmd.dirs.cache)?;
// load genesis hash // load genesis hash
let genesis_hash = spec.genesis_header().hash(); let genesis_hash = spec.genesis_header().hash();

View File

@ -133,7 +133,7 @@ impl SnapshotCommand {
// shared portion of snapshot commands: start the client service // shared portion of snapshot commands: start the client service
fn start_service(self) -> Result<ClientService, String> { fn start_service(self) -> Result<ClientService, String> {
// load spec file // load spec file
let spec = self.spec.spec()?; let spec = self.spec.spec(&self.dirs.cache)?;
// load genesis hash // load genesis hash
let genesis_hash = spec.genesis_header().hash(); let genesis_hash = spec.genesis_header().hash();

View File

@ -15,6 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! rpc integration tests. //! rpc integration tests.
use std::env;
use std::sync::Arc; use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
@ -318,7 +319,7 @@ const POSITIVE_NONCE_SPEC: &'static [u8] = br#"{
#[test] #[test]
fn eth_transaction_count() { fn eth_transaction_count() {
let secret = "8a283037bb19c4fed7b1c569e40c7dcff366165eb869110a1b11532963eb9cb2".parse().unwrap(); let secret = "8a283037bb19c4fed7b1c569e40c7dcff366165eb869110a1b11532963eb9cb2".parse().unwrap();
let tester = EthTester::from_spec(Spec::load(TRANSACTION_COUNT_SPEC).expect("invalid chain spec")); let tester = EthTester::from_spec(Spec::load(&env::temp_dir(), TRANSACTION_COUNT_SPEC).expect("invalid chain spec"));
let address = tester.accounts.insert_account(secret, "").unwrap(); let address = tester.accounts.insert_account(secret, "").unwrap();
tester.accounts.unlock_account_permanently(address, "".into()).unwrap(); tester.accounts.unlock_account_permanently(address, "".into()).unwrap();
@ -444,7 +445,7 @@ fn verify_transaction_counts(name: String, chain: BlockChain) {
#[test] #[test]
fn starting_nonce_test() { fn starting_nonce_test() {
let tester = EthTester::from_spec(Spec::load(POSITIVE_NONCE_SPEC).expect("invalid chain spec")); let tester = EthTester::from_spec(Spec::load(&env::temp_dir(), POSITIVE_NONCE_SPEC).expect("invalid chain spec"));
let address = Address::from(10); let address = Address::from(10);
let sample = tester.handler.handle_request_sync(&(r#" let sample = tester.handler.handle_request_sync(&(r#"