Merge branch 'master' into sync-svc

This commit is contained in:
NikVolf 2016-07-18 15:20:57 +02:00
commit 028d6f6853
26 changed files with 380 additions and 258 deletions

View File

@ -22,7 +22,6 @@ matrix:
env:
global:
- TRAVIS_NODE_VERSION="6"
- CXX="g++-4.8"
- CC="gcc-4.8"
- RUST_BACKTRACE="1"
@ -51,7 +50,6 @@ addons:
- g++-4.8
install:
- rm -rf ~/.nvm && git clone https://github.com/creationix/nvm.git ~/.nvm && (cd ~/.nvm && git checkout `git describe --abbrev=0 --tags`) && source ~/.nvm/nvm.sh && nvm install $TRAVIS_NODE_VERSION
- ([ "$RUN_COVERAGE" = "false" ]) || (test -x $KCOV_CMD) || (
wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
tar xzf master.tar.gz &&

42
Cargo.lock generated
View File

@ -279,10 +279,10 @@ dependencies = [
"jsonrpc-http-server 5.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps 0.3.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-home 0.5.2 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-status 0.5.1 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-wallet 0.6.1 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-home 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-status 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-wallet 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)",
@ -377,7 +377,7 @@ dependencies = [
"ethcore-util 1.3.0",
"jsonrpc-core 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps-signer 0.2.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps-signer 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ws 0.5.0 (git+https://github.com/ethcore/ws-rs.git?branch=stable)",
@ -882,8 +882,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "parity-dapps"
version = "0.3.0"
source = "git+https://github.com/ethcore/parity-ui.git#f16a7e8b7f1ea4fe4da12af22f36a745a07513d6"
version = "0.6.0"
source = "git+https://github.com/ethcore/parity-ui.git#fb88ca259fa8eda6e54d9a04b325abd9eec2818b"
dependencies = [
"aster 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
@ -896,34 +896,34 @@ dependencies = [
[[package]]
name = "parity-dapps-home"
version = "0.5.2"
source = "git+https://github.com/ethcore/parity-ui.git#f16a7e8b7f1ea4fe4da12af22f36a745a07513d6"
version = "0.6.0"
source = "git+https://github.com/ethcore/parity-ui.git#fb88ca259fa8eda6e54d9a04b325abd9eec2818b"
dependencies = [
"parity-dapps 0.3.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
]
[[package]]
name = "parity-dapps-signer"
version = "0.2.0"
source = "git+https://github.com/ethcore/parity-ui.git#f16a7e8b7f1ea4fe4da12af22f36a745a07513d6"
version = "0.6.0"
source = "git+https://github.com/ethcore/parity-ui.git#fb88ca259fa8eda6e54d9a04b325abd9eec2818b"
dependencies = [
"parity-dapps 0.3.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
]
[[package]]
name = "parity-dapps-status"
version = "0.5.1"
source = "git+https://github.com/ethcore/parity-ui.git#f16a7e8b7f1ea4fe4da12af22f36a745a07513d6"
version = "0.6.0"
source = "git+https://github.com/ethcore/parity-ui.git#fb88ca259fa8eda6e54d9a04b325abd9eec2818b"
dependencies = [
"parity-dapps 0.3.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
]
[[package]]
name = "parity-dapps-wallet"
version = "0.6.1"
source = "git+https://github.com/ethcore/parity-ui.git#f16a7e8b7f1ea4fe4da12af22f36a745a07513d6"
version = "0.6.0"
source = "git+https://github.com/ethcore/parity-ui.git#fb88ca259fa8eda6e54d9a04b325abd9eec2818b"
dependencies = [
"parity-dapps 0.3.0 (git+https://github.com/ethcore/parity-ui.git)",
"parity-dapps 0.6.0 (git+https://github.com/ethcore/parity-ui.git)",
]
[[package]]
@ -1078,7 +1078,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "rocksdb"
version = "0.4.5"
source = "git+https://github.com/ethcore/rust-rocksdb#6472a9dce16c267a3acec2ee6fd01d1bf8de4913"
source = "git+https://github.com/ethcore/rust-rocksdb#dd597245bfcb621c6ffc45478e1fda0b05d2f409"
dependencies = [
"libc 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
"rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)",
@ -1087,7 +1087,7 @@ dependencies = [
[[package]]
name = "rocksdb-sys"
version = "0.3.0"
source = "git+https://github.com/ethcore/rust-rocksdb#6472a9dce16c267a3acec2ee6fd01d1bf8de4913"
source = "git+https://github.com/ethcore/rust-rocksdb#dd597245bfcb621c6ffc45478e1fda0b05d2f409"
dependencies = [
"gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -51,7 +51,9 @@ version = "0.8"
default-features = false
[features]
default = ["dapps", "ethcore-signer/ui"]
default = ["ui", "use-precompiled-js"]
ui = ["dapps", "ethcore-signer/ui"]
use-precompiled-js = ["ethcore-dapps/use-precompiled-js", "ethcore-signer/use-precompiled-js"]
dapps = ["ethcore-dapps"]
dev = ["clippy", "ethcore/dev", "ethcore-util/dev", "ethsync/dev", "ethcore-rpc/dev", "ethcore-dapps/dev", "ethcore-signer/dev"]
travis-beta = ["ethcore/json-tests"]

View File

@ -18,10 +18,10 @@ branches:
install:
- git submodule update --init --recursive
- ps: Install-Product node 6
- ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-1.9.0-x86_64-pc-windows-msvc.exe"
- ps: Start-FileDownload "https://static.rust-lang.org/dist/rust-1.10.0-x86_64-pc-windows-msvc.exe"
- ps: Start-FileDownload "https://github.com/ethcore/win-build/raw/master/SimpleFC.dll" -FileName nsis\SimpleFC.dll
- ps: Start-FileDownload "https://github.com/ethcore/win-build/raw/master/vc_redist.x64.exe" -FileName nsis\vc_redist.x64.exe
- rust-1.9.0-x86_64-pc-windows-msvc.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust"
- rust-1.10.0-x86_64-pc-windows-msvc.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust"
- SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin;C:\Program Files (x86)\NSIS;C:\Program Files (x86)\Microsoft SDKs\Windows\v7.1A\Bin
- rustc -V
- cargo -V

View File

@ -21,11 +21,11 @@ serde_json = "0.7.0"
serde_macros = { version = "0.7.0", optional = true }
ethcore-rpc = { path = "../rpc" }
ethcore-util = { path = "../util" }
parity-dapps = { git = "https://github.com/ethcore/parity-ui.git", version = "0.3" }
parity-dapps = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6" }
# List of apps
parity-dapps-status = { git = "https://github.com/ethcore/parity-ui.git", version = "0.5.1" }
parity-dapps-home = { git = "https://github.com/ethcore/parity-ui.git", version = "0.5.2" }
parity-dapps-wallet = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6.0", optional = true }
parity-dapps-status = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6" }
parity-dapps-home = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6" }
parity-dapps-wallet = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6", optional = true }
mime_guess = { version = "1.6.1" }
clippy = { version = "0.0.79", optional = true}
@ -38,3 +38,9 @@ default = ["serde_codegen", "extra-dapps"]
extra-dapps = ["parity-dapps-wallet"]
nightly = ["serde_macros"]
dev = ["clippy", "ethcore-rpc/dev", "ethcore-util/dev"]
use-precompiled-js = [
"parity-dapps-status/use-precompiled-js",
"parity-dapps-home/use-precompiled-js",
"parity-dapps-wallet/use-precompiled-js"
]

View File

@ -271,11 +271,12 @@ fn hash_compute(light: &Light, full_size: usize, header_hash: &H256, nonce: u64)
let page_size = 4 * MIX_WORDS;
let num_full_pages = (full_size / page_size) as u32;
let cache: &[Node] = &light.cache; // deref once for better performance
for i in 0..(ETHASH_ACCESSES as u32) {
let index = fnv_hash(f_mix.get_unchecked(0).as_words().get_unchecked(0) ^ i, *mix.get_unchecked(0).as_words().get_unchecked((i as usize) % MIX_WORDS)) % num_full_pages;
for n in 0..MIX_NODES {
let tmp_node = calculate_dag_item(index * MIX_NODES as u32 + n as u32, light);
let tmp_node = calculate_dag_item(index * MIX_NODES as u32 + n as u32, cache);
for w in 0..NODE_WORDS {
*mix.get_unchecked_mut(n).as_words_mut().get_unchecked_mut(w) = fnv_hash(*mix.get_unchecked(n).as_words().get_unchecked(w), *tmp_node.as_words().get_unchecked(w));
}
@ -306,18 +307,17 @@ fn hash_compute(light: &Light, full_size: usize, header_hash: &H256, nonce: u64)
}
}
fn calculate_dag_item(node_index: u32, light: &Light) -> Node {
fn calculate_dag_item(node_index: u32, cache: &[Node]) -> Node {
unsafe {
let num_parent_nodes = light.cache.len();
let cache_nodes = &light.cache;
let init = cache_nodes.get_unchecked(node_index as usize % num_parent_nodes);
let num_parent_nodes = cache.len();
let init = cache.get_unchecked(node_index as usize % num_parent_nodes);
let mut ret = init.clone();
*ret.as_words_mut().get_unchecked_mut(0) ^= node_index;
sha3::sha3_512(ret.bytes.as_mut_ptr(), ret.bytes.len(), ret.bytes.as_ptr(), ret.bytes.len());
for i in 0..ETHASH_DATASET_PARENTS {
let parent_index = fnv_hash(node_index ^ i, *ret.as_words().get_unchecked(i as usize % NODE_WORDS)) % num_parent_nodes as u32;
let parent = cache_nodes.get_unchecked(parent_index as usize);
let parent = cache.get_unchecked(parent_index as usize);
for w in 0..NODE_WORDS {
*ret.as_words_mut().get_unchecked_mut(w) = fnv_hash(*ret.as_words().get_unchecked(w), *parent.as_words().get_unchecked(w));
}

View File

@ -35,6 +35,8 @@ pub struct Account {
code_hash: Option<H256>,
// Code cache of the account.
code_cache: Bytes,
// Account is new or has been modified
dirty: bool,
}
impl Account {
@ -47,7 +49,8 @@ impl Account {
storage_root: SHA3_NULL_RLP,
storage_overlay: RefCell::new(storage.into_iter().map(|(k, v)| (k, (Filth::Dirty, v))).collect()),
code_hash: Some(code.sha3()),
code_cache: code
code_cache: code,
dirty: true,
}
}
@ -59,7 +62,8 @@ impl Account {
storage_root: SHA3_NULL_RLP,
storage_overlay: RefCell::new(pod.storage.into_iter().map(|(k, v)| (k, (Filth::Dirty, v))).collect()),
code_hash: Some(pod.code.sha3()),
code_cache: pod.code
code_cache: pod.code,
dirty: true,
}
}
@ -72,6 +76,7 @@ impl Account {
storage_overlay: RefCell::new(HashMap::new()),
code_hash: Some(SHA3_EMPTY),
code_cache: vec![],
dirty: true,
}
}
@ -85,6 +90,7 @@ impl Account {
storage_overlay: RefCell::new(HashMap::new()),
code_hash: Some(r.val_at(3)),
code_cache: vec![],
dirty: false,
}
}
@ -98,6 +104,7 @@ impl Account {
storage_overlay: RefCell::new(HashMap::new()),
code_hash: None,
code_cache: vec![],
dirty: true,
}
}
@ -106,6 +113,7 @@ impl Account {
pub fn init_code(&mut self, code: Bytes) {
assert!(self.code_hash.is_none());
self.code_cache = code;
self.dirty = true;
}
/// Reset this account's code to the given code.
@ -117,6 +125,7 @@ impl Account {
/// Set (and cache) the contents of the trie's storage at `key` to `value`.
pub fn set_storage(&mut self, key: H256, value: H256) {
self.storage_overlay.borrow_mut().insert(key, (Filth::Dirty, value));
self.dirty = true;
}
/// Get (and cache) the contents of the trie's storage at `key`.
@ -172,6 +181,10 @@ impl Account {
!self.code_cache.is_empty() || (self.code_cache.is_empty() && self.code_hash == Some(SHA3_EMPTY))
}
/// Is this a new or modified account?
pub fn is_dirty(&self) -> bool {
self.dirty
}
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &AccountDB) -> bool {
// TODO: fill out self.code_cache;
@ -201,16 +214,23 @@ impl Account {
pub fn storage_overlay(&self) -> Ref<HashMap<H256, (Filth, H256)>> { self.storage_overlay.borrow() }
/// Increment the nonce of the account by one.
pub fn inc_nonce(&mut self) { self.nonce = self.nonce + U256::from(1u8); }
pub fn inc_nonce(&mut self) {
self.nonce = self.nonce + U256::from(1u8);
self.dirty = true;
}
/// Increment the nonce of the account by one.
pub fn add_balance(&mut self, x: &U256) { self.balance = self.balance + *x; }
pub fn add_balance(&mut self, x: &U256) {
self.balance = self.balance + *x;
self.dirty = true;
}
/// Increment the nonce of the account by one.
/// Panics if balance is less than `x`
pub fn sub_balance(&mut self, x: &U256) {
assert!(self.balance >= *x);
self.balance = self.balance - *x;
self.dirty = true;
}
/// Commit the `storage_overlay` to the backing DB and update `storage_root`.

View File

@ -296,32 +296,20 @@ impl BlockChain {
// load best block
let best_block_hash = match bc.extras_db.get(b"best").unwrap() {
Some(best) => {
let best = H256::from_slice(&best);
let mut b = best.clone();
let mut removed = 0;
let mut best_num = 0;
while !bc.blocks_db.get(&b).unwrap().is_some() {
// track back to the best block we have in the blocks database
let extras: BlockDetails = bc.extras_db.read(&b).unwrap();
type DetailsKey = Key<BlockDetails, Target=H264>;
bc.extras_db.delete(&(DetailsKey::key(&b))).unwrap();
b = extras.parent;
best_num = extras.number;
removed += 1;
}
if b != best {
let batch = DBTransaction::new();
let range = (best_num + 1) as bc::Number .. (best_num + removed) as bc::Number;
let chain = bc::group::BloomGroupChain::new(bc.blooms_config, &bc);
let changes = chain.replace(&range, vec![]);
for (k, v) in changes.into_iter() {
batch.write(&LogGroupPosition::from(k), &BloomGroup::from(v));
let mut new_best = H256::from_slice(&best);
while !bc.blocks_db.get(&new_best).unwrap().is_some() {
match bc.rewind() {
Some(h) => {
new_best = h;
}
None => {
warn!("Can't rewind blockchain");
break;
}
}
batch.put(b"best", &b).unwrap();
bc.extras_db.write(batch).unwrap();
info!("Restored mismatched best block. Was: {}, new: {}", best.hex(), b.hex());
info!("Restored mismatched best block. Was: {}, new: {}", H256::from_slice(&best).hex(), new_best.hex());
}
b
new_best
}
None => {
// best block does not exist
@ -359,6 +347,52 @@ impl BlockChain {
bc
}
/// Returns true if the given parent block has given child
/// (though not necessarily a part of the canon chain).
fn is_known_child(&self, parent: &H256, hash: &H256) -> bool {
self.extras_db.read_with_cache(&self.block_details, parent).map_or(false, |d| d.children.contains(hash))
}
/// Rewind to a previous block
pub fn rewind(&self) -> Option<H256> {
let batch = DBTransaction::new();
// track back to the best block we have in the blocks database
if let Some(best_block_hash) = self.extras_db.get(b"best").unwrap() {
let best_block_hash = H256::from_slice(&best_block_hash);
if best_block_hash == self.genesis_hash() {
return None;
}
if let Some(extras) = self.extras_db.read(&best_block_hash) as Option<BlockDetails> {
type DetailsKey = Key<BlockDetails, Target=H264>;
batch.delete(&(DetailsKey::key(&best_block_hash))).unwrap();
let hash = extras.parent;
let range = extras.number as bc::Number .. extras.number as bc::Number;
let chain = bc::group::BloomGroupChain::new(self.blooms_config, self);
let changes = chain.replace(&range, vec![]);
for (k, v) in changes.into_iter() {
batch.write(&LogGroupPosition::from(k), &BloomGroup::from(v));
}
batch.put(b"best", &hash).unwrap();
let mut best_block = self.best_block.write();
best_block.number = extras.number - 1;
best_block.total_difficulty = self.block_details(&hash).unwrap().total_difficulty;
best_block.hash = hash;
// update parent extras
if let Some(mut details) = self.extras_db.read(&hash) as Option<BlockDetails> {
details.children.clear();
batch.write(&hash, &details);
}
self.extras_db.write(batch).unwrap();
self.block_details.write().clear();
self.block_hashes.write().clear();
self.blocks.write().clear();
self.block_receipts.write().clear();
return Some(hash);
}
}
return None;
}
/// Set the cache configuration.
pub fn configure_cache(&self, pref_cache_size: usize, max_cache_size: usize) {
self.pref_cache_size.store(pref_cache_size, AtomicOrder::Relaxed);
@ -463,7 +497,7 @@ impl BlockChain {
let header = block.header_view();
let hash = header.sha3();
if self.is_known(&hash) {
if self.is_known_child(&header.parent_hash(), &hash) {
return ImportRoute::none();
}
@ -508,14 +542,15 @@ impl BlockChain {
batch.extend_with_cache(&mut *write_blocks_blooms, update.blocks_blooms, CacheUpdatePolicy::Remove);
}
// These cached values must be updated last and togeterh
// These cached values must be updated last with all three locks taken to avoid
// cache decoherence
{
let mut best_block = self.best_block.write();
// update best block
match update.info.location {
BlockLocation::Branch => (),
_ => {
batch.put(b"best", &update.info.hash).unwrap();
let mut best_block = self.best_block.write();
*best_block = BestBlock {
hash: update.info.hash,
number: update.info.number,
@ -1216,4 +1251,30 @@ mod tests {
let bc = BlockChain::new(Config::default(), &genesis, temp.as_path());
assert_eq!(bc.best_block_number(), 5);
}
#[test]
fn test_rewind() {
let mut canon_chain = ChainGenerator::default();
let mut finalizer = BlockFinalizer::default();
let genesis = canon_chain.generate(&mut finalizer).unwrap();
let first = canon_chain.generate(&mut finalizer).unwrap();
let second = canon_chain.generate(&mut finalizer).unwrap();
let genesis_hash = BlockView::new(&genesis).header_view().sha3();
let first_hash = BlockView::new(&first).header_view().sha3();
let second_hash = BlockView::new(&second).header_view().sha3();
let temp = RandomTempPath::new();
let bc = BlockChain::new(Config::default(), &genesis, temp.as_path());
bc.insert_block(&first, vec![]);
bc.insert_block(&second, vec![]);
assert_eq!(bc.rewind(), Some(first_hash.clone()));
assert!(!bc.is_known(&second_hash));
assert_eq!(bc.best_block_number(), 1);
assert_eq!(bc.best_block_hash(), first_hash.clone());
assert_eq!(bc.rewind(), Some(genesis_hash.clone()));
assert_eq!(bc.rewind(), None);
}
}

View File

@ -22,21 +22,18 @@ use std::sync::{Arc, Weak};
use std::path::{Path, PathBuf};
use std::fmt;
use std::sync::atomic::{AtomicUsize, AtomicBool, Ordering as AtomicOrdering};
use std::time::Instant;
use std::time::{Instant, Duration};
use time::precise_time_ns;
// util
use util::{journaldb, rlp, Bytes, Stream, View, PerfTimer, Itertools, Mutex, RwLock, Colour};
use util::journaldb::JournalDB;
use util::rlp::{RlpStream, Rlp, UntrustedRlp};
use util::numbers::*;
use util::panics::*;
use util::io::*;
use util::rlp;
use util::sha3::*;
use util::Bytes;
use util::rlp::{RlpStream, Rlp, UntrustedRlp};
use util::journaldb;
use util::journaldb::JournalDB;
use util::kvdb::*;
use util::{Stream, View, PerfTimer, Itertools};
use util::{Mutex, RwLock};
// other
use views::BlockView;
@ -145,6 +142,9 @@ pub struct Client {
notify: RwLock<Option<Weak<ChainNotify>>>,
queue_transactions: AtomicUsize,
previous_enode: Mutex<Option<String>>,
skipped: AtomicUsize,
last_import: Mutex<Instant>,
last_hashes: RwLock<VecDeque<H256>>,
}
const HISTORY: u64 = 1200;
@ -205,6 +205,11 @@ impl Client {
state_db.commit(0, &spec.genesis_header().hash(), None).expect("Error commiting genesis state to state DB");
}
while !chain.block_header(&chain.best_block_hash()).map_or(true, |h| state_db.contains(h.state_root())) {
warn!("State root not found for block #{} ({}), recovering...", chain.best_block_number(), chain.best_block_hash().hex());
chain.rewind();
}
let engine = Arc::new(spec.engine);
let block_queue = BlockQueue::new(config.queue, engine.clone(), message_channel.clone());
@ -232,6 +237,9 @@ impl Client {
notify: RwLock::new(None),
queue_transactions: AtomicUsize::new(0),
previous_enode: Mutex::new(None),
skipped: AtomicUsize::new(0),
last_import: Mutex::new(Instant::now()),
last_hashes: RwLock::new(VecDeque::new()),
};
Ok(Arc::new(client))
}
@ -253,6 +261,14 @@ impl Client {
}
fn build_last_hashes(&self, parent_hash: H256) -> LastHashes {
{
let hashes = self.last_hashes.read();
if hashes.front().map_or(false, |h| h == &parent_hash) {
let mut res = Vec::from(hashes.clone());
res.resize(256, H256::default());
return res;
}
}
let mut last_hashes = LastHashes::new();
last_hashes.resize(256, H256::new());
last_hashes[0] = parent_hash;
@ -264,6 +280,8 @@ impl Client {
None => break,
}
}
let mut cached_hashes = self.last_hashes.write();
*cached_hashes = VecDeque::from(last_hashes.clone());
last_hashes
}
@ -355,16 +373,21 @@ impl Client {
for block in blocks {
let header = &block.header;
let start = precise_time_ns();
if invalid_blocks.contains(&header.parent_hash) {
invalid_blocks.insert(header.hash());
continue;
}
let tx_count = block.transactions.len();
let size = block.bytes.len();
let closed_block = self.check_and_close_block(&block);
if let Err(_) = closed_block {
invalid_blocks.insert(header.hash());
continue;
}
let closed_block = closed_block.unwrap();
imported_blocks.push(header.hash());
@ -372,7 +395,30 @@ impl Client {
import_results.push(route);
self.report.write().accrue_block(&block);
trace!(target: "client", "Imported #{} ({})", header.number(), header.hash());
let duration_ns = precise_time_ns() - start;
let mut last_import = self.last_import.lock();
if Instant::now() > *last_import + Duration::from_secs(1) {
let queue_info = self.queue_info();
let importing = queue_info.unverified_queue_size + queue_info.verified_queue_size > 3;
if !importing {
let skipped = self.skipped.load(AtomicOrdering::Relaxed);
info!(target: "import", "Imported {} {} ({} txs, {} Mgas, {} ms, {} KiB){}",
Colour::White.bold().paint(format!("#{}", header.number())),
Colour::White.bold().paint(format!("{}", header.hash())),
Colour::Yellow.bold().paint(format!("{}", tx_count)),
Colour::Yellow.bold().paint(format!("{:.2}", header.gas_used.low_u64() as f32 / 1000000f32)),
Colour::Purple.bold().paint(format!("{:.2}", duration_ns as f32 / 1000000f32)),
Colour::Blue.bold().paint(format!("{:.2}", size as f32 / 1024f32)),
if skipped > 0 { format!(" + another {} block(s)", Colour::Red.bold().paint(format!("{}", skipped))) } else { String::new() }
);
*last_import = Instant::now();
}
self.skipped.store(0, AtomicOrdering::Relaxed);
} else {
self.skipped.fetch_add(1, AtomicOrdering::Relaxed);
}
}
let imported = imported_blocks.len();
@ -418,6 +464,7 @@ impl Client {
fn commit_block<B>(&self, block: B, hash: &H256, block_data: &[u8]) -> ImportRoute where B: IsBlock + Drain {
let number = block.header().number();
let parent = block.header().parent_hash().clone();
// Are we committing an era?
let ancient = if number >= HISTORY {
let n = number - HISTORY;
@ -445,9 +492,20 @@ impl Client {
enacted: route.enacted.clone(),
retracted: route.retracted.len()
});
self.update_last_hashes(&parent, hash);
route
}
fn update_last_hashes(&self, parent: &H256, hash: &H256) {
let mut hashes = self.last_hashes.write();
if hashes.front().map_or(false, |h| h == parent) {
if hashes.len() > 255 {
hashes.pop_back();
}
hashes.push_front(hash.clone());
}
}
/// Import transactions from the IO queue
pub fn import_queued_transactions(&self, transactions: &[Bytes]) -> usize {
let _timer = PerfTimer::new("import_queued_transactions");

View File

@ -59,8 +59,6 @@ pub enum TransactionError {
},
/// Transaction's gas limit (aka gas) is invalid.
InvalidGasLimit(OutOfBounds<U256>),
/// Transaction is invalid for some other reason.
DAORescue,
}
impl fmt::Display for TransactionError {
@ -79,7 +77,6 @@ impl fmt::Display for TransactionError {
GasLimitExceeded { limit, got } =>
format!("Gas limit exceeded. Limit={}, Given={}", limit, got),
InvalidGasLimit(ref err) => format!("Invalid gas limit. {}", err),
DAORescue => "Transaction is invalid due to the DAO rescue.".into(),
};
f.write_fmt(format_args!("Transaction error ({})", msg))

View File

@ -80,8 +80,6 @@ pub struct Schedule {
pub tx_data_non_zero_gas: usize,
/// Gas price for copying memory
pub copy_gas: usize,
/// DAO Rescue softfork block
pub reject_dao_transactions: bool,
}
impl Schedule {
@ -128,7 +126,6 @@ impl Schedule {
tx_data_zero_gas: 4,
tx_data_non_zero_gas: 68,
copy_gas: 3,
reject_dao_transactions: false,
}
}
}

View File

@ -232,36 +232,10 @@ impl State {
let options = TransactOptions { tracing: tracing, vm_tracing: false, check_nonce: true };
let e = try!(Executive::new(self, env_info, engine, vm_factory).transact(t, options));
let broken_dao = H256::from("6a5d24750f78441e56fec050dc52fe8e911976485b7472faac7464a176a67caa");
// dao attack soft fork
if engine.schedule(&env_info).reject_dao_transactions {
let whitelisted = if let Action::Call(to) = t.action {
to == Address::from("Da4a4626d3E16e094De3225A751aAb7128e96526") ||
to == Address::from("2ba9D006C1D72E67A70b5526Fc6b4b0C0fd6D334")
} else { false };
if !whitelisted {
// collect all the addresses which have changed.
let addresses = self.cache.borrow().iter().map(|(addr, _)| addr.clone()).collect::<Vec<_>>();
for a in &addresses {
if self.code(a).map_or(false, |c| c.sha3() == broken_dao) {
// Figure out if the balance has been reduced.
let maybe_original = self.trie_factory
.readonly(self.db.as_hashdb(), &self.root)
.expect(SEC_TRIE_DB_UNWRAP_STR)
.get(&a).map(Account::from_rlp);
if maybe_original.map_or(false, |original| *original.balance() > self.balance(a)) {
return Err(Error::Transaction(TransactionError::DAORescue));
}
}
}
}
}
// TODO uncomment once to_pod() works correctly.
// trace!("Applied transaction. Diff:\n{}\n", state_diff::diff_pod(&old, &self.to_pod()));
self.commit();
self.clear();
let receipt = Receipt::new(self.root().clone(), e.cumulative_gas_used, e.logs);
// trace!("Transaction receipt: {:?}", receipt);
Ok(ApplyOutcome{receipt: receipt, trace: e.trace})
@ -275,12 +249,12 @@ impl State {
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
for (address, ref mut a) in accounts.iter_mut() {
match a {
&mut&mut Some(ref mut account) => {
&mut&mut Some(ref mut account) if account.is_dirty() => {
let mut account_db = AccountDBMut::new(db, address);
account.commit_storage(trie_factory, &mut account_db);
account.commit_code(&mut account_db);
}
&mut&mut None => {}
_ => {}
}
}
@ -288,8 +262,9 @@ impl State {
let mut trie = trie_factory.from_existing(db, root).unwrap();
for (address, ref a) in accounts.iter() {
match **a {
Some(ref account) => trie.insert(address, &account.rlp()),
Some(ref account) if account.is_dirty() => trie.insert(address, &account.rlp()),
None => trie.remove(address),
_ => (),
}
}
}
@ -301,6 +276,11 @@ impl State {
Self::commit_into(&self.trie_factory, self.db.as_hashdb_mut(), &mut self.root, self.cache.borrow_mut().deref_mut());
}
/// Clear state cache
pub fn clear(&mut self) {
self.cache.borrow_mut().clear();
}
#[cfg(test)]
#[cfg(feature = "json-tests")]
/// Populate the state from `accounts`.

View File

@ -1,6 +1,6 @@
#!/usr/bin/env bash
PARITY_DEB_URL=https://github.com/ethcore/parity/releases/download/v1.2.1/parity_linux_1.2.1-0_amd64.deb
PARITY_DEB_URL=https://github.com/ethcore/parity/releases/download/v1.2.2/parity_linux_1.2.2-0_amd64.deb
function run_installer()

View File

@ -15,7 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
extern crate ansi_term;
use self::ansi_term::Colour::{White, Yellow, Green, Cyan, Blue, Purple};
use self::ansi_term::Colour::{White, Yellow, Green, Cyan, Blue};
use self::ansi_term::Style;
use std::time::{Instant, Duration};
@ -83,12 +83,17 @@ impl Informant {
return;
}
*self.last_tick.write() = Instant::now();
let chain_info = client.chain_info();
let queue_info = client.queue_info();
let cache_info = client.blockchain_cache_info();
let importing = queue_info.unverified_queue_size + queue_info.verified_queue_size > 3;
if !importing && elapsed < Duration::from_secs(30) {
return;
}
*self.last_tick.write() = Instant::now();
let mut write_report = self.report.write();
let report = client.report();
@ -97,42 +102,46 @@ impl Informant {
false => t,
};
if let (_, _, &Some(ref last_report)) = (
self.chain_info.read().deref(),
self.cache_info.read().deref(),
write_report.deref()
) {
println!("{} {} {} blk/s {} tx/s {} Mgas/s {}{}+{} Qed {} db {} chain {} queue{}",
paint(White.bold(), format!("{:>8}", format!("#{}", chain_info.best_block_number))),
paint(White.bold(), format!("{}", chain_info.best_block_hash)),
paint(Yellow.bold(), format!("{:4}", ((report.blocks_imported - last_report.blocks_imported) * 1000) as u64 / elapsed.as_milliseconds())),
paint(Yellow.bold(), format!("{:4}", ((report.transactions_applied - last_report.transactions_applied) * 1000) as u64 / elapsed.as_milliseconds())),
paint(Yellow.bold(), format!("{:3}", ((report.gas_processed - last_report.gas_processed) / From::from(elapsed.as_milliseconds() * 1000)).low_u64())),
match maybe_status {
Some((ref sync_info, ref net_config)) => {
format!("{}/{}/{} peers {} ",
paint(Green.bold(), format!("{:2}", sync_info.num_active_peers)),
paint(Green.bold(), format!("{:2}", sync_info.num_peers)),
paint(Green.bold(), format!("{:2}", net_config.ideal_peers)),
paint(Cyan.bold(), format!("{:>8}", format!("#{}", sync_info.last_imported_block_number.unwrap_or(chain_info.best_block_number)))),
info!("{} {} {}",
match importing {
true => format!("{} {} {} {}+{} Qed",
paint(White.bold(), format!("{:>8}", format!("#{}", chain_info.best_block_number))),
paint(White.bold(), format!("{}", chain_info.best_block_hash)),
{
let last_report = match write_report.deref() { &Some(ref last_report) => last_report.clone(), _ => ClientReport::default() };
format!("{} blk/s {} tx/s {} Mgas/s",
paint(Yellow.bold(), format!("{:4}", ((report.blocks_imported - last_report.blocks_imported) * 1000) as u64 / elapsed.as_milliseconds())),
paint(Yellow.bold(), format!("{:4}", ((report.transactions_applied - last_report.transactions_applied) * 1000) as u64 / elapsed.as_milliseconds())),
paint(Yellow.bold(), format!("{:3}", ((report.gas_processed - last_report.gas_processed) / From::from(elapsed.as_milliseconds() * 1000)).low_u64()))
)
}
None => String::new()
},
paint(Blue.bold(), format!("{:5}", queue_info.unverified_queue_size)),
paint(Blue.bold(), format!("{:5}", queue_info.verified_queue_size)),
paint(Purple.bold(), format!("{:>8}", Informant::format_bytes(report.state_db_mem))),
paint(Purple.bold(), format!("{:>8}", Informant::format_bytes(cache_info.total()))),
paint(Purple.bold(), format!("{:>8}", Informant::format_bytes(queue_info.mem_used))),
if let Some((ref sync_info, _)) = maybe_status {
format!(" {} sync", paint(Purple.bold(), format!("{:>8}", Informant::format_bytes(sync_info.mem_used))))
} else { String::new() },
);
}
},
paint(Green.bold(), format!("{:5}", queue_info.unverified_queue_size)),
paint(Green.bold(), format!("{:5}", queue_info.verified_queue_size))
),
false => String::new(),
},
match maybe_status {
Some((ref sync_info, ref net_config)) => format!("{}{}/{}/{} peers",
match importing {
true => format!("{} ", paint(Green.bold(), format!("{:>8}", format!("#{}", sync_info.last_imported_block_number.unwrap_or(chain_info.best_block_number))))),
false => String::new(),
},
paint(Cyan.bold(), format!("{:2}", sync_info.num_active_peers)),
paint(Cyan.bold(), format!("{:2}", sync_info.num_peers)),
paint(Cyan.bold(), format!("{:2}", net_config.ideal_peers))
),
None => String::new(),
},
format!("{} db {} chain {} queue{}",
paint(Blue.bold(), format!("{:>8}", Informant::format_bytes(report.state_db_mem))),
paint(Blue.bold(), format!("{:>8}", Informant::format_bytes(cache_info.total()))),
paint(Blue.bold(), format!("{:>8}", Informant::format_bytes(queue_info.mem_used))),
match maybe_status {
Some((ref sync_info, _)) => format!(" {} sync", paint(Blue.bold(), format!("{:>8}", Informant::format_bytes(sync_info.mem_used)))),
_ => String::new(),
}
)
);
*self.chain_info.write().deref_mut() = Some(chain_info);
*self.cache_info.write().deref_mut() = Some(cache_info);

View File

@ -84,7 +84,7 @@ use std::thread::sleep;
use std::time::Duration;
use rustc_serialize::hex::FromHex;
use ctrlc::CtrlC;
use util::{H256, ToPretty, PayloadInfo, Bytes, Colour, version, journaldb};
use util::{H256, ToPretty, PayloadInfo, Bytes, Colour, version, journaldb, RotatingLogger};
use util::panics::{MayPanic, ForwardPanic, PanicHandler};
use ethcore::client::{BlockID, BlockChainClient, ClientConfig, get_db_path, BlockImportError, Mode};
use ethcore::error::{ImportError};
@ -129,6 +129,13 @@ fn execute(conf: Configuration) {
daemonize(&conf);
}
// Setup panic handler
let panic_handler = PanicHandler::new_in_arc();
// Setup logging
let logger = setup_log::setup_log(&conf.args.flag_logging, conf.have_color(), &conf.args.flag_log_file);
// Raise fdlimit
unsafe { ::fdlimit::raise_fd_limit(); }
if conf.args.cmd_account {
execute_account_cli(conf);
return;
@ -140,16 +147,16 @@ fn execute(conf: Configuration) {
}
if conf.args.cmd_export {
execute_export(conf);
execute_export(conf, panic_handler);
return;
}
if conf.args.cmd_import {
execute_import(conf);
execute_import(conf, panic_handler);
return;
}
execute_client(conf, spec, client_config);
execute_client(conf, spec, client_config, panic_handler, logger);
}
#[cfg(not(windows))]
@ -169,7 +176,7 @@ fn daemonize(_conf: &Configuration) {
fn execute_upgrades(conf: &Configuration, spec: &Spec, client_config: &ClientConfig) {
match ::upgrade::upgrade(Some(&conf.path())) {
Ok(upgrades_applied) if upgrades_applied > 0 => {
println!("Executed {} upgrade scripts - ok", upgrades_applied);
debug!("Executed {} upgrade scripts - ok", upgrades_applied);
},
Err(e) => {
die!("Error upgrading parity data: {:?}", e);
@ -184,15 +191,7 @@ fn execute_upgrades(conf: &Configuration, spec: &Spec, client_config: &ClientCon
}
}
fn execute_client(conf: Configuration, spec: Spec, client_config: ClientConfig) {
// Setup panic handler
let panic_handler = PanicHandler::new_in_arc();
// Setup logging
let logger = setup_log::setup_log(&conf.args.flag_logging, conf.have_color(), &conf.args.flag_log_file);
// Raise fdlimit
unsafe { ::fdlimit::raise_fd_limit(); }
fn execute_client(conf: Configuration, spec: Spec, client_config: ClientConfig, panic_handler: Arc<PanicHandler>, logger: Arc<RotatingLogger>) {
info!("Starting {}", Colour::White.bold().paint(format!("{}", version())));
info!("Using state DB journalling strategy {}", Colour::White.bold().paint(match client_config.pruning {
journaldb::Algorithm::Archive => "archive",
@ -340,15 +339,7 @@ enum DataFormat {
Binary,
}
fn execute_export(conf: Configuration) {
// Setup panic handler
let panic_handler = PanicHandler::new_in_arc();
// Setup logging
let _logger = setup_log::setup_log(&conf.args.flag_logging, conf.have_color(), &conf.args.flag_log_file);
// Raise fdlimit
unsafe { ::fdlimit::raise_fd_limit(); }
fn execute_export(conf: Configuration, panic_handler: Arc<PanicHandler>) {
let spec = conf.spec();
let client_config = conf.client_config(&spec);
@ -401,15 +392,7 @@ fn execute_export(conf: Configuration) {
}
}
fn execute_import(conf: Configuration) {
// Setup panic handler
let panic_handler = PanicHandler::new_in_arc();
// Setup logging
let _logger = setup_log::setup_log(&conf.args.flag_logging, conf.have_color(), &conf.args.flag_log_file);
// Raise fdlimit
unsafe { ::fdlimit::raise_fd_limit(); }
fn execute_import(conf: Configuration, panic_handler: Arc<PanicHandler>) {
let spec = conf.spec();
let client_config = conf.client_config(&spec);
@ -444,11 +427,11 @@ fn execute_import(conf: Configuration) {
first_read = instream.read(&mut(first_bytes[..])).unwrap_or_else(|_| die!("Error reading from the file/stream."));
match first_bytes[0] {
0xf9 => {
println!("Autodetected binary data format.");
info!("Autodetected binary data format.");
DataFormat::Binary
}
_ => {
println!("Autodetected hex data format.");
info!("Autodetected hex data format.");
DataFormat::Hex
}
}
@ -499,9 +482,10 @@ fn execute_signer(conf: Configuration) {
}
let path = conf.directories().signer;
new_token(path).unwrap_or_else(|e| {
let code = new_token(path).unwrap_or_else(|e| {
die!("Error generating token: {:?}", e)
});
println!("This key code will authorise your System Signer UI: {}", if conf.args.flag_no_color { code } else { format!("{}", Colour::White.bold().paint(code)) });
}
fn execute_account_cli(conf: Configuration) {

View File

@ -54,13 +54,13 @@ fn codes_path(path: String) -> PathBuf {
p
}
pub fn new_token(path: String) -> io::Result<()> {
pub fn new_token(path: String) -> io::Result<String> {
let path = codes_path(path);
let mut codes = try!(signer::AuthCodes::from_file(&path));
let code = try!(codes.generate_new());
try!(codes.to_file(&path));
info!("This key code will authorise your System Signer UI: {}", Colour::White.bold().paint(code));
Ok(())
trace!("New key code created: {}", Colour::White.bold().paint(&code[..]));
Ok(code)
}
fn do_start(conf: Configuration, deps: Dependencies) -> SignerServer {

View File

@ -64,7 +64,6 @@ impl UpgradeKey {
// dummy upgrade (remove when the first one is in)
fn dummy_upgrade() -> Result<(), Error> {
println!("Adding ver.lock");
Ok(())
}

View File

@ -170,7 +170,6 @@ fn transaction_error(error: EthcoreError) -> Error {
format!("Transaction cost exceeds current gas limit. Limit: {}, got: {}. Try decreasing supplied gas.", limit, got)
},
InvalidGasLimit(_) => "Supplied gas is beyond limit.".into(),
DAORescue => "Transaction removes funds from a DAO.".into(),
};
Error {
code: ErrorCode::ServerError(error_codes::TRANSACTION_ERROR),

View File

@ -71,25 +71,25 @@ impl<C: 'static, M: 'static> PersonalSigner for SignerClient<C, M> where C: Mini
let client = take_weak!(self.client);
let miner = take_weak!(self.miner);
queue.peek(&id).and_then(|confirmation| {
let mut request = confirmation.transaction;
// apply modification
if let Some(gas_price) = modification.gas_price {
request.gas_price = Some(gas_price.into());
}
let mut request = confirmation.transaction;
// apply modification
if let Some(gas_price) = modification.gas_price {
request.gas_price = Some(gas_price.into());
}
let sender = request.from;
let sender = request.from;
match unlock_sign_and_dispatch(&*client, &*miner, request, &*accounts, sender, pass) {
Ok(hash) => {
queue.request_confirmed(id, Ok(hash.clone()));
Some(to_value(&hash))
},
_ => None
}
})
.unwrap_or_else(|| {
to_value(&false)
})
match unlock_sign_and_dispatch(&*client, &*miner, request, &*accounts, sender, pass) {
Ok(hash) => {
queue.request_confirmed(id, Ok(hash.clone()));
Some(to_value(&hash))
},
_ => None
}
})
.unwrap_or_else(|| {
to_value(&false)
})
}
)
}

View File

@ -21,7 +21,7 @@ if ! type $KCOV > /dev/null; then
fi
. ./scripts/targets.sh
cargo test $TARGETS --no-default-features --no-run || exit $?
cargo test $TARGETS --no-run || exit $?

View File

@ -3,5 +3,5 @@
. ./scripts/targets.sh
cargo doc --no-deps --verbose --no-default-features $TARGETS &&
cargo doc --no-deps --verbose $TARGETS &&
echo '<meta http-equiv=refresh content=0;url=ethcore/index.html>' > target/doc/index.html

View File

@ -1,14 +1,14 @@
#!/bin/bash
export TARGETS="
-p ethkey \
-p ethstore \
-p bigint\
-p ethash \
-p ethcore-util \
-p ethcore \
-p ethsync \
-p ethcore-dapps \
-p ethcore-rpc \
-p ethcore-signer \
-p parity \
-p bigint"
# TODO [ToDr] add ethcore-dapps back
-p ethcore-util \
-p ethkey \
-p ethstore \
-p ethsync \
-p parity"

View File

@ -18,10 +18,11 @@ env_logger = "0.3"
ws = { git = "https://github.com/ethcore/ws-rs.git", branch = "stable" }
ethcore-util = { path = "../util" }
ethcore-rpc = { path = "../rpc" }
parity-dapps-signer = { git = "https://github.com/ethcore/parity-ui.git", version = "0.2.0", optional = true}
parity-dapps-signer = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6", optional = true}
clippy = { version = "0.0.79", optional = true}
[features]
dev = ["clippy"]
ui = ["parity-dapps-signer"]
use-precompiled-js = ["parity-dapps-signer/use-precompiled-js"]

View File

@ -14,5 +14,5 @@ case $1 in
esac
. ./scripts/targets.sh
cargo test --no-default-features $FEATURES $TARGETS $1 \
cargo test --release --verbose $FEATURES $TARGETS $1 \

View File

@ -177,16 +177,13 @@ impl Database {
opts.set_block_based_table_factory(&block_opts);
opts.set_prefix_extractor_fixed_size(size);
if let Some(cache_size) = config.cache_size {
block_opts.set_cache(Cache::new(cache_size * 1024 * 256));
opts.set_write_buffer_size(cache_size * 1024 * 256);
block_opts.set_cache(Cache::new(cache_size * 1024 * 1024));
}
} else if let Some(cache_size) = config.cache_size {
let mut block_opts = BlockBasedOptions::new();
// half goes to read cache
block_opts.set_cache(Cache::new(cache_size * 1024 * 256));
block_opts.set_cache(Cache::new(cache_size * 1024 * 1024));
opts.set_block_based_table_factory(&block_opts);
// quarter goes to each of the two write buffers
opts.set_write_buffer_size(cache_size * 1024 * 256);
}
let mut write_opts = WriteOptions::new();
@ -207,12 +204,12 @@ impl Database {
/// Insert a key-value pair in the transaction. Any existing value value will be overwritten.
pub fn put(&self, key: &[u8], value: &[u8]) -> Result<(), String> {
self.db.put(key, value)
self.db.put_opt(key, value, &self.write_opts)
}
/// Delete value by key.
pub fn delete(&self, key: &[u8]) -> Result<(), String> {
self.db.delete(key)
self.db.delete_opt(key, &self.write_opts)
}
/// Commit transaction to database.

View File

@ -16,51 +16,65 @@
//! Path utilities
use std::path::Path;
use std::path::PathBuf;
#[cfg(target_os = "macos")]
/// Get the config path for application `name`.
/// `name` should be capitalized, e.g. `"Ethereum"`, `"Parity"`.
pub fn config_path(name: &str) -> PathBuf {
let mut home = ::std::env::home_dir().expect("Failed to get home dir");
home.push("Library");
home.push(name);
home
}
#[cfg(windows)]
/// Get the config path for application `name`.
/// `name` should be capitalized, e.g. `"Ethereum"`, `"Parity"`.
pub fn config_path(name: &str) -> PathBuf {
let mut home = ::std::env::home_dir().expect("Failed to get home dir");
home.push("AppData");
home.push("Roaming");
home.push(name);
home
}
#[cfg(not(any(target_os = "macos", windows)))]
/// Get the config path for application `name`.
/// `name` should be capitalized, e.g. `"Ethereum"`, `"Parity"`.
pub fn config_path(name: &str) -> PathBuf {
let mut home = ::std::env::home_dir().expect("Failed to get home dir");
home.push(format!(".{}", name.to_lowercase()));
home
}
/// Get the specific folder inside a config path.
pub fn config_path_with(name: &str, then: &str) -> PathBuf {
let mut path = config_path(name);
path.push(then);
path
}
/// Default ethereum paths
pub mod ethereum {
use std::path::PathBuf;
#[cfg(target_os = "macos")]
/// Default path for ethereum installation on Mac Os
pub fn default() -> PathBuf {
let mut home = ::std::env::home_dir().expect("Failed to get home dir");
home.push("Library");
home.push("Ethereum");
home
}
#[cfg(windows)]
/// Default path for ethereum installation on Windows
pub fn default() -> PathBuf {
let mut home = ::std::env::home_dir().expect("Failed to get home dir");
home.push("AppData");
home.push("Roaming");
home.push("Ethereum");
home
}
#[cfg(not(any(target_os = "macos", windows)))]
/// Default path for ethereum installation on posix system which is not Mac OS
pub fn default() -> PathBuf {
let mut home = ::std::env::home_dir().expect("Failed to get home dir");
home.push(".ethereum");
home
}
pub fn default() -> PathBuf { super::config_path("Ethereum") }
/// Get the specific folder inside default ethereum installation
pub fn with_default(s: &str) -> PathBuf {
let mut pth = default();
pth.push(s);
pth
let mut path = default();
path.push(s);
path
}
/// Get the specific folder inside default ethereum installation configured for testnet
pub fn with_testnet(s: &str) -> PathBuf {
let mut pth = default();
pth.push("testnet");
pth.push(s);
pth
let mut path = default();
path.push("testnet");
path.push(s);
path
}
}