Merge branch 'master' of github.com:ethcore/parity into beta

This commit is contained in:
arkpar 2016-06-24 09:16:49 +02:00
commit 84ded6f43c
45 changed files with 519 additions and 335 deletions

20
Cargo.lock generated
View File

@ -3,7 +3,7 @@ name = "parity"
version = "1.2.0"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
@ -129,15 +129,15 @@ dependencies = [
[[package]]
name = "clippy"
version = "0.0.76"
version = "0.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"clippy_lints 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy_lints 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "clippy_lints"
version = "0.0.76"
version = "0.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -250,7 +250,7 @@ name = "ethcore"
version = "1.2.0"
dependencies = [
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.2.0",
@ -275,7 +275,7 @@ dependencies = [
name = "ethcore-dapps"
version = "1.2.0"
dependencies = [
"clippy 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-rpc 1.2.0",
"ethcore-util 1.2.0",
"hyper 0.9.3 (git+https://github.com/ethcore/hyper)",
@ -337,7 +337,7 @@ dependencies = [
name = "ethcore-rpc"
version = "1.2.0"
dependencies = [
"clippy 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.2.0",
"ethcore 1.2.0",
"ethcore-devtools 1.2.0",
@ -360,7 +360,7 @@ dependencies = [
name = "ethcore-signer"
version = "1.2.0"
dependencies = [
"clippy 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-rpc 1.2.0",
"ethcore-util 1.2.0",
@ -379,7 +379,7 @@ dependencies = [
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"bigint 0.1.0",
"chrono 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -451,7 +451,7 @@ dependencies = [
name = "ethsync"
version = "1.2.0"
dependencies = [
"clippy 0.0.76 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.77 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.2.0",
"ethcore-util 1.2.0",

View File

@ -22,7 +22,7 @@ fdlimit = { path = "util/fdlimit" }
num_cpus = "0.2"
number_prefix = "0.2"
rpassword = "0.2.1"
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
ethcore = { path = "ethcore" }
ethcore-util = { path = "util" }
ethsync = { path = "sync" }

View File

@ -28,7 +28,7 @@ parity-dapps-wallet = { git = "https://github.com/ethcore/parity-dapps-wallet-rs
parity-dapps-dao = { git = "https://github.com/ethcore/parity-dapps-dao-rs.git", version = "0.4.0", optional = true }
parity-dapps-makerotc = { git = "https://github.com/ethcore/parity-dapps-makerotc-rs.git", version = "0.3.0", optional = true }
mime_guess = { version = "1.6.1" }
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
[build-dependencies]
serde_codegen = { version = "0.7.0", optional = true }

View File

@ -12,7 +12,7 @@ syntex = "*"
ethcore-ipc-codegen = { path = "../ipc/codegen" }
[dependencies]
clippy = { version = "0.0.67", optional = true}
clippy = { version = "0.0.77", optional = true}
ethcore-devtools = { path = "../devtools" }
ethcore-ipc = { path = "../ipc/rpc" }
rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }

View File

@ -22,7 +22,7 @@ ethcore-util = { path = "../util" }
evmjit = { path = "../evmjit", optional = true }
ethash = { path = "../ethash" }
num_cpus = "0.2"
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
crossbeam = "0.2.9"
lazy_static = "0.2"
ethcore-devtools = { path = "../devtools" }

View File

@ -166,16 +166,16 @@ impl Account {
!self.code_cache.is_empty() || (self.code_cache.is_empty() && self.code_hash == Some(SHA3_EMPTY))
}
/// Provide a database to lookup `code_hash`. Should not be called if it is a contract without code.
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
pub fn cache_code(&mut self, db: &AccountDB) -> bool {
// TODO: fill out self.code_cache;
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.is_cached() ||
match self.code_hash {
Some(ref h) => match db.lookup(h) {
Some(ref h) => match db.get(h) {
Some(x) => { self.code_cache = x.to_vec(); true },
_ => {
warn!("Failed reverse lookup of {}", h);
warn!("Failed reverse get of {}", h);
false
},
},

View File

@ -30,18 +30,18 @@ impl<'db> HashDB for AccountDB<'db>{
unimplemented!()
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<&[u8]> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
}
self.db.lookup(&combine_key(&self.address, key))
self.db.get(&combine_key(&self.address, key))
}
fn exists(&self, key: &H256) -> bool {
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.exists(&combine_key(&self.address, key))
self.db.contains(&combine_key(&self.address, key))
}
fn insert(&mut self, _value: &[u8]) -> H256 {
@ -52,7 +52,7 @@ impl<'db> HashDB for AccountDB<'db>{
unimplemented!()
}
fn kill(&mut self, _key: &H256) {
fn remove(&mut self, _key: &H256) {
unimplemented!()
}
}
@ -82,18 +82,18 @@ impl<'db> HashDB for AccountDBMut<'db>{
unimplemented!()
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<&[u8]> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
}
self.db.lookup(&combine_key(&self.address, key))
self.db.get(&combine_key(&self.address, key))
}
fn exists(&self, key: &H256) -> bool {
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
self.db.exists(&combine_key(&self.address, key))
self.db.contains(&combine_key(&self.address, key))
}
fn insert(&mut self, value: &[u8]) -> H256 {
@ -114,12 +114,12 @@ impl<'db> HashDB for AccountDBMut<'db>{
self.db.emplace(key, value.to_vec())
}
fn kill(&mut self, key: &H256) {
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}
let key = combine_key(&self.address, key);
self.db.kill(&key)
self.db.remove(&key)
}
}

View File

@ -205,6 +205,13 @@ impl AccountProvider {
self.unlock_account(account, password, Unlock::Temp)
}
/// Checks if given account is unlocked
pub fn is_unlocked<A>(&self, account: A) -> bool where Address: From<A> {
let account = Address::from(account).into();
let unlocked = self.unlocked.read().unwrap();
unlocked.get(&account).is_some()
}
/// Signs the message. Account must be unlocked.
pub fn sign<A, M>(&self, account: A, message: M) -> Result<H520, Error> where Address: From<A>, Message: From<M> {
let account = Address::from(account).into();

View File

@ -288,6 +288,7 @@ impl<'x> OpenBlock<'x> {
/// Get the environment info concerning this block.
pub fn env_info(&self) -> EnvInfo {
// TODO: memoise.
const SOFT_FORK_BLOCK: u64 = 1775000;
EnvInfo {
number: self.block.base.header.number,
author: self.block.base.header.author.clone(),
@ -296,7 +297,7 @@ impl<'x> OpenBlock<'x> {
last_hashes: self.last_hashes.clone(), // TODO: should be a reference.
gas_used: self.block.receipts.last().map_or(U256::zero(), |r| r.gas_used),
gas_limit: self.block.base.header.gas_limit.clone(),
dao_rescue_block_gas_limit: if self.block.base.header.number == 1760000 { Some(self.block.base.header.gas_limit) } else { self.dao_rescue_block_gas_limit },
dao_rescue_block_gas_limit: if self.block.base.header.number == SOFT_FORK_BLOCK { Some(self.block.base.header.gas_limit) } else { self.dao_rescue_block_gas_limit },
}
}
@ -489,6 +490,7 @@ pub fn enact(
}
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
#[cfg_attr(feature="dev", allow(too_many_arguments))]
pub fn enact_bytes(
block_bytes: &[u8],
engine: &Engine,
@ -505,6 +507,7 @@ pub fn enact_bytes(
}
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
#[cfg_attr(feature="dev", allow(too_many_arguments))]
pub fn enact_verified(
block: &PreverifiedBlock,
engine: &Engine,
@ -520,6 +523,7 @@ pub fn enact_verified(
}
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header. Seal the block aferwards
#[cfg_attr(feature="dev", allow(too_many_arguments))]
pub fn enact_and_seal(
block_bytes: &[u8],
engine: &Engine,

View File

@ -230,7 +230,7 @@ impl<V> Client<V> where V: Verifier {
let last_hashes = self.build_last_hashes(header.parent_hash.clone());
let db = self.state_db.lock().unwrap().boxed_clone();
let enact_result = enact_verified(&block, engine, self.tracedb.tracing_enabled(), db, &parent, last_hashes, self.dao_rescue_block_gas_limit(), &self.vm_factory);
let enact_result = enact_verified(&block, engine, self.tracedb.tracing_enabled(), db, &parent, last_hashes, self.dao_rescue_block_gas_limit(header.parent_hash.clone()), &self.vm_factory);
if let Err(e) = enact_result {
warn!(target: "client", "Block import failed for #{} ({})\nError: {:?}", header.number(), header.hash(), e);
return Err(());
@ -486,7 +486,7 @@ impl<V> BlockChainClient for Client<V> where V: Verifier {
last_hashes: last_hashes,
gas_used: U256::zero(),
gas_limit: U256::max_value(),
dao_rescue_block_gas_limit: self.dao_rescue_block_gas_limit(),
dao_rescue_block_gas_limit: self.dao_rescue_block_gas_limit(view.parent_hash()),
};
// that's just a copy of the state.
let mut state = self.state();
@ -808,7 +808,7 @@ impl<V> MiningBlockChainClient for Client<V> where V: Verifier {
self.state_db.lock().unwrap().boxed_clone(),
&self.chain.block_header(&h).expect("h is best block hash: so it's header must exist: qed"),
self.build_last_hashes(h.clone()),
self.dao_rescue_block_gas_limit(),
self.dao_rescue_block_gas_limit(h.clone()),
author,
gas_floor_target,
extra_data,

View File

@ -225,11 +225,26 @@ pub trait BlockChainClient : Sync + Send {
}
}
/// Get `Some` gas limit of block 1_760_000, or `None` if chain is not yet that long.
fn dao_rescue_block_gas_limit(&self) -> Option<U256> {
self.block_header(BlockID::Number(1_760_000))
.map(|header| HeaderView::new(&header).gas_limit())
/// Get `Some` gas limit of SOFT_FORK_BLOCK, or `None` if chain is not yet that long.
fn dao_rescue_block_gas_limit(&self, chain_hash: H256) -> Option<U256> {
const SOFT_FORK_BLOCK: u64 = 1775000;
// shortcut if the canon chain is already known.
if self.chain_info().best_block_number > SOFT_FORK_BLOCK + 1000 {
return self.block_header(BlockID::Number(SOFT_FORK_BLOCK)).map(|header| HeaderView::new(&header).gas_limit());
}
// otherwise check according to `chain_hash`.
if let Some(mut header) = self.block_header(BlockID::Hash(chain_hash)) {
if HeaderView::new(&header).number() < SOFT_FORK_BLOCK {
None
} else {
while HeaderView::new(&header).number() != SOFT_FORK_BLOCK {
header = self.block_header(BlockID::Hash(HeaderView::new(&header).parent_hash())).expect("chain is complete; parent of chain entry must be in chain; qed");
}
Some(HeaderView::new(&header).gas_limit())
}
} else {
None
}
}
}

View File

@ -40,7 +40,7 @@ pub struct EnvInfo {
/// The gas used.
pub gas_used: U256,
/// Block gas limit at DAO rescue block #1760000 or None if not yet there.
/// Block gas limit at DAO rescue block SOFT_FORK_BLOCK or None if not yet there.
pub dao_rescue_block_gas_limit: Option<U256>,
}

View File

@ -228,7 +228,7 @@ pub enum Error {
/// The value of the nonce or mishash is invalid.
PowInvalid,
/// Error concerning TrieDBs
TrieError(TrieError),
Trie(TrieError),
}
impl fmt::Display for Error {
@ -244,7 +244,7 @@ impl fmt::Display for Error {
f.write_fmt(format_args!("Unknown engine name ({})", name)),
Error::PowHashInvalid => f.write_str("Invalid or out of date PoW hash."),
Error::PowInvalid => f.write_str("Invalid nonce or mishash"),
Error::TrieError(ref err) => f.write_fmt(format_args!("{}", err)),
Error::Trie(ref err) => f.write_fmt(format_args!("{}", err)),
}
}
}
@ -308,7 +308,7 @@ impl From<IoError> for Error {
impl From<TrieError> for Error {
fn from(err: TrieError) -> Error {
Error::TrieError(err)
Error::Trie(err)
}
}

View File

@ -106,7 +106,7 @@ impl Engine for Ethash {
} else {
let mut s = Schedule::new_homestead();
if self.ethash_params.dao_rescue_soft_fork {
s.reject_dao_transactions = env_info.dao_rescue_block_gas_limit.map(|x| x <= 4_000_000.into()).unwrap_or(false);
s.reject_dao_transactions = env_info.dao_rescue_block_gas_limit.map_or(false, |x| x <= 4_000_000.into());
}
s
}

View File

@ -16,9 +16,8 @@
//! Just in time compiler execution environment.
use common::*;
use trace::VMTracer;
use evmjit;
use evm::{self, Error, GasLeft};
use evm::{self, GasLeft};
/// Should be used to convert jit types to ethcore
trait FromJit<T>: Sized {
@ -303,7 +302,7 @@ impl<'a> evmjit::Ext for ExtAdapter<'a> {
#[derive(Default)]
pub struct JitEvm {
ctxt: Option<evmjit::ContextHandle>,
context: Option<evmjit::ContextHandle>,
}
impl evm::Evm for JitEvm {
@ -347,7 +346,7 @@ impl evm::Evm for JitEvm {
data.timestamp = ext.env_info().timestamp as i64;
self.context = Some(unsafe { evmjit::ContextHandle::new(data, schedule, &mut ext_handle) });
let context = self.context.as_ref_mut().unwrap();
let mut context = self.context.as_mut().unwrap();
let res = context.exec();
match res {

View File

@ -274,7 +274,7 @@ impl MinerService for Miner {
last_hashes: last_hashes,
gas_used: U256::zero(),
gas_limit: U256::max_value(),
dao_rescue_block_gas_limit: chain.dao_rescue_block_gas_limit(),
dao_rescue_block_gas_limit: chain.dao_rescue_block_gas_limit(header.parent_hash().clone()),
};
// that's just a copy of the state.
let mut state = block.state().clone();

View File

@ -239,6 +239,7 @@ impl TransactionSet {
if let Some(ref old_order) = r {
self.by_priority.remove(old_order);
}
assert_eq!(self.by_priority.len(), self.by_address.len());
r
}
@ -279,8 +280,10 @@ impl TransactionSet {
fn drop(&mut self, sender: &Address, nonce: &U256) -> Option<TransactionOrder> {
if let Some(tx_order) = self.by_address.remove(sender, nonce) {
self.by_priority.remove(&tx_order);
assert_eq!(self.by_priority.len(), self.by_address.len());
return Some(tx_order);
}
assert_eq!(self.by_priority.len(), self.by_address.len());
None
}
@ -468,7 +471,9 @@ impl TransactionQueue {
}));
}
self.import_tx(vtx, client_account.nonce).map_err(Error::Transaction)
let r = self.import_tx(vtx, client_account.nonce).map_err(Error::Transaction);
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
r
}
/// Removes all transactions from particular sender up to (excluding) given client (state) nonce.
@ -484,6 +489,7 @@ impl TransactionQueue {
// And now lets check if there is some batch of transactions in future
// that should be placed in current. It should also update last_nonces.
self.move_matching_future_to_current(sender, client_nonce, client_nonce);
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
}
/// Removes invalid transaction identified by hash from queue.
@ -493,6 +499,8 @@ impl TransactionQueue {
/// If gap is introduced marks subsequent transactions as future
pub fn remove_invalid<T>(&mut self, transaction_hash: &H256, fetch_account: &T)
where T: Fn(&Address) -> AccountDetails {
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
let transaction = self.by_hash.remove(transaction_hash);
if transaction.is_none() {
// We don't know this transaction
@ -511,6 +519,7 @@ impl TransactionQueue {
// And now lets check if there is some chain of transactions in future
// that should be placed in current
self.move_matching_future_to_current(sender, current_nonce, current_nonce);
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
return;
}
@ -520,6 +529,7 @@ impl TransactionQueue {
// This will keep consistency in queue
// Moves all to future and then promotes a batch from current:
self.remove_all(sender, current_nonce);
assert_eq!(self.future.by_priority.len() + self.current.by_priority.len(), self.by_hash.len());
return;
}
}
@ -538,7 +548,7 @@ impl TransactionQueue {
} else {
trace!(target: "miner", "Removing old transaction: {:?} (nonce: {} < {})", order.hash, k, current_nonce);
// Remove the transaction completely
self.by_hash.remove(&order.hash);
self.by_hash.remove(&order.hash).expect("All transactions in `future` are also in `by_hash`");
}
}
}
@ -558,7 +568,7 @@ impl TransactionQueue {
self.future.insert(*sender, k, order.update_height(k, current_nonce));
} else {
trace!(target: "miner", "Removing old transaction: {:?} (nonce: {} < {})", order.hash, k, current_nonce);
self.by_hash.remove(&order.hash);
self.by_hash.remove(&order.hash).expect("All transactions in `future` are also in `by_hash`");
}
}
self.future.enforce_limit(&mut self.by_hash);
@ -686,7 +696,7 @@ impl TransactionQueue {
// same (sender, nonce), but above function would not move it.
if let Some(order) = self.future.drop(&address, &nonce) {
// Let's insert that transaction to current (if it has higher gas_price)
let future_tx = self.by_hash.remove(&order.hash).unwrap();
let future_tx = self.by_hash.remove(&order.hash).expect("All transactions in `future` are always in `by_hash`.");
// if transaction in `current` (then one we are importing) is replaced it means that it has to low gas_price
try!(check_too_cheap(!Self::replace_transaction(future_tx, state_nonce, &mut self.current, &mut self.by_hash)));
}
@ -728,7 +738,9 @@ impl TransactionQueue {
let address = tx.sender();
let nonce = tx.nonce();
by_hash.insert(hash, tx);
let old_hash = by_hash.insert(hash, tx);
assert!(old_hash.is_none(), "Each hash has to be inserted exactly once.");
if let Some(old) = set.insert(address, nonce, order.clone()) {
// There was already transaction in queue. Let's check which one should stay
@ -738,11 +750,11 @@ impl TransactionQueue {
// Put back old transaction since it has greater priority (higher gas_price)
set.insert(address, nonce, old);
// and remove new one
by_hash.remove(&hash);
by_hash.remove(&hash).expect("The hash has been just inserted and no other line is altering `by_hash`.");
false
} else {
// Make sure we remove old transaction entirely
by_hash.remove(&old.hash);
by_hash.remove(&old.hash).expect("The hash is coming from `future` so it has to be in `by_hash`.");
true
}
} else {

View File

@ -20,6 +20,7 @@ Usage:
ethstore change-pwd <address> <old-pwd> <new-pwd> [--dir DIR]
ethstore list [--dir DIR]
ethstore import [--src DIR] [--dir DIR]
ethstore import-wallet <path> <password> [--dir DIR]
ethstore remove <address> <password> [--dir DIR]
ethstore sign <address> <password> <message> [--dir DIR]
ethstore [-h | --help]
@ -38,6 +39,7 @@ Commands:
change-pwd Change account password.
list List accounts.
import Import accounts from src.
import-wallet Import presale wallet.
remove Remove account.
sign Sign message.
```
@ -48,11 +50,11 @@ Commands:
*Encrypt secret with a password and save it in secret store.*
- `<secret>` - ethereum secret, 32 bytes long
- `<password>` - account password, any string
- `<password>` - account password, file path
- `[--dir DIR]` - secret store directory, It may be either parity, parity-test, geth, geth-test or a path. default: parity
```
ethstore insert 7d29fab185a33e2cd955812397354c472d2b84615b645aa135ff539f6b0d70d5 "this is sparta"
ethstore insert 7d29fab185a33e2cd955812397354c472d2b84615b645aa135ff539f6b0d70d5 password.txt
```
```
@ -75,12 +77,12 @@ ethstore insert `ethkey generate random -s` "this is sparta"
*Change account password.*
- `<address>` - ethereum address, 20 bytes long
- `<old-pwd>` - old account password, any string
- `<new-pwd>` - new account password, any string
- `<old-pwd>` - old account password, file path
- `<new-pwd>` - new account password, file path
- `[--dir DIR]` - secret store directory, It may be either parity, parity-test, geth, geth-test or a path. default: parity
```
ethstore change-pwd a8fa5dd30a87bb9e3288d604eb74949c515ab66e "this is sparta" "hello world"
ethstore change-pwd a8fa5dd30a87bb9e3288d604eb74949c515ab66e old_pwd.txt new_pwd.txt
```
```
@ -112,6 +114,10 @@ ethstore list
- `[--src DIR]` - secret store directory, It may be either parity, parity-test, geth, geth-test or a path. default: geth
- `[--dir DIR]` - secret store directory, It may be either parity, parity-test, geth, geth-test or a path. default: parity
```
ethstore import
```
```
0: e6a3d25a7cb7cd21cb720df5b5e8afd154af1bbb
1: 6edddfc6349aff20bc6467ccf276c5b52487f7a8
@ -119,15 +125,32 @@ ethstore list
--
#### `import-wallet <path> <password> [--dir DIR]`
*Import account from presale wallet.*
- `<path>` - presale wallet path
- `<password>` - account password, file path
- `[--dir DIR]` - secret store directory, It may be either parity, parity-test, geth, geth-test or a path. default: parity
```
ethstore import-wallet ethwallet.json password.txt
```
```
e6a3d25a7cb7cd21cb720df5b5e8afd154af1bbb
```
--
#### `remove <address> <password> [--dir DIR]`
*Remove account from secret store.*
- `<address>` - ethereum address, 20 bytes long
- `<password>` - account password, any string
- `<password>` - account password, file path
- `[--dir DIR]` - secret store directory, It may be either parity, parity-test, geth, geth-test or a path. default: parity
```
ethstore remove a8fa5dd30a87bb9e3288d604eb74949c515ab66e "hello world"
ethstore remove a8fa5dd30a87bb9e3288d604eb74949c515ab66e password.txt
```
```
@ -140,12 +163,12 @@ true
*Sign message with account's secret.*
- `<address>` - ethereum address, 20 bytes long
- `<password>` - account password, any string
- `<password>` - account password, file path
- `<message>` - message to sign, 32 bytes long
- `[--dir DIR]` - secret store directory, It may be either parity, parity-test, geth, geth-test or a path. default: parity
```
ethstore sign 24edfff680d536a5f6fe862d36df6f8f6f40f115 "this is sparta" 7d29fab185a33e2cd955812397354c472d2b84615b645aa135ff539f6b0d70d5
ethstore sign 24edfff680d536a5f6fe862d36df6f8f6f40f115 password.txt 7d29fab185a33e2cd955812397354c472d2b84615b645aa135ff539f6b0d70d5
```
```

View File

@ -18,13 +18,14 @@ extern crate rustc_serialize;
extern crate docopt;
extern crate ethstore;
use std::{env, process};
use std::{env, process, fs};
use std::io::Read;
use std::ops::Deref;
use std::str::FromStr;
use docopt::Docopt;
use ethstore::ethkey::{Secret, Address, Message};
use ethstore::dir::{KeyDirectory, ParityDirectory, DiskDirectory, GethDirectory, DirectoryType};
use ethstore::{EthStore, SecretStore, import_accounts, Error};
use ethstore::{EthStore, SecretStore, import_accounts, Error, PresaleWallet};
pub const USAGE: &'static str = r#"
Ethereum key management.
@ -35,6 +36,7 @@ Usage:
ethstore change-pwd <address> <old-pwd> <new-pwd> [--dir DIR]
ethstore list [--dir DIR]
ethstore import [--src DIR] [--dir DIR]
ethstore import-wallet <path> <password> [--dir DIR]
ethstore remove <address> <password> [--dir DIR]
ethstore sign <address> <password> <message> [--dir DIR]
ethstore [-h | --help]
@ -53,6 +55,7 @@ Commands:
change-pwd Change password.
list List accounts.
import Import accounts from src.
import-wallet Import presale wallet.
remove Remove account.
sign Sign message.
"#;
@ -63,6 +66,7 @@ struct Args {
cmd_change_pwd: bool,
cmd_list: bool,
cmd_import: bool,
cmd_import_wallet: bool,
cmd_remove: bool,
cmd_sign: bool,
arg_secret: String,
@ -71,6 +75,7 @@ struct Args {
arg_new_pwd: String,
arg_address: String,
arg_message: String,
arg_path: String,
flag_src: String,
flag_dir: String,
}
@ -105,6 +110,15 @@ fn format_accounts(accounts: &[Address]) -> String {
.join("\n")
}
fn load_password(path: &str) -> Result<String, Error> {
let mut file = try!(fs::File::open(path));
let mut password = String::new();
try!(file.read_to_string(&mut password));
// drop EOF
let _ = password.pop();
Ok(password)
}
fn execute<S, I>(command: I) -> Result<String, Error> where I: IntoIterator<Item=S>, S: AsRef<str> {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.argv(command).decode())
@ -114,11 +128,14 @@ fn execute<S, I>(command: I) -> Result<String, Error> where I: IntoIterator<Item
return if args.cmd_insert {
let secret = try!(Secret::from_str(&args.arg_secret));
let address = try!(store.insert_account(secret, &args.arg_password));
let password = try!(load_password(&args.arg_password));
let address = try!(store.insert_account(secret, &password));
Ok(format!("{}", address))
} else if args.cmd_change_pwd {
let address = try!(Address::from_str(&args.arg_address));
let ok = store.change_password(&address, &args.arg_old_pwd, &args.arg_new_pwd).is_ok();
let old_pwd = try!(load_password(&args.arg_old_pwd));
let new_pwd = try!(load_password(&args.arg_new_pwd));
let ok = store.change_password(&address, &old_pwd, &new_pwd).is_ok();
Ok(format!("{}", ok))
} else if args.cmd_list {
let accounts = store.accounts();
@ -128,14 +145,22 @@ fn execute<S, I>(command: I) -> Result<String, Error> where I: IntoIterator<Item
let dst = try!(key_dir(&args.flag_dir));
let accounts = try!(import_accounts(src.deref(), dst.deref()));
Ok(format_accounts(&accounts))
} else if args.cmd_import_wallet {
let wallet = try!(PresaleWallet::open(&args.arg_path));
let password = try!(load_password(&args.arg_password));
let kp = try!(wallet.decrypt(&password));
let address = try!(store.insert_account(kp.secret().clone(), &password));
Ok(format!("{}", address))
} else if args.cmd_remove {
let address = try!(Address::from_str(&args.arg_address));
let ok = store.remove_account(&address, &args.arg_password).is_ok();
let password = try!(load_password(&args.arg_password));
let ok = store.remove_account(&address, &password).is_ok();
Ok(format!("{}", ok))
} else if args.cmd_sign {
let address = try!(Address::from_str(&args.arg_address));
let message = try!(Message::from_str(&args.arg_message));
let signature = try!(store.sign(&address, &args.arg_password, &message));
let password = try!(load_password(&args.arg_password));
let signature = try!(store.sign(&address, &password, &message));
Ok(format!("{}", signature))
} else {
unreachable!();

View File

@ -67,7 +67,7 @@ impl Keccak256<[u8; 32]> for [u8] {
pub mod aes {
use rcrypto::blockmodes::{CtrMode, CbcDecryptor, PkcsPadding};
use rcrypto::aessafe::{AesSafe128Encryptor, AesSafe128Decryptor};
use rcrypto::symmetriccipher::{Encryptor, Decryptor};
use rcrypto::symmetriccipher::{Encryptor, Decryptor, SymmetricCipherError};
use rcrypto::buffer::{RefReadBuffer, RefWriteBuffer};
/// Encrypt a message
@ -83,9 +83,10 @@ pub mod aes {
}
/// Decrypt a message using cbc mode
pub fn decrypt_cbc(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) {
pub fn decrypt_cbc(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) -> Result<(), SymmetricCipherError> {
let mut encryptor = CbcDecryptor::new(AesSafe128Decryptor::new(k), PkcsPadding, iv.to_vec());
encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true).expect("Invalid length or padding");
try!(encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true));
Ok(())
}
}

View File

@ -43,7 +43,7 @@ impl PresaleWallet {
pbkdf2(&mut h_mac, password.as_bytes(), 2000, &mut derived_key);
let mut key = [0u8; 64];
crypto::aes::decrypt_cbc(&derived_key, &self.iv, &self.ciphertext, &mut key);
try!(crypto::aes::decrypt_cbc(&derived_key, &self.iv, &self.ciphertext, &mut key).map_err(|_| Error::InvalidPassword));
let secret = Secret::from(key.keccak256());
if let Ok(kp) = KeyPair::from_secret(secret) {
@ -58,7 +58,6 @@ impl PresaleWallet {
#[cfg(test)]
mod tests {
use ethkey::Address;
use super::PresaleWallet;
use json;
@ -74,7 +73,7 @@ mod tests {
let wallet = json::PresaleWallet::load(json.as_bytes()).unwrap();
let wallet = PresaleWallet::from(wallet);
let kp = wallet.decrypt("123").unwrap();
assert_eq!(kp.address(), Address::from(wallet.address));
assert!(wallet.decrypt("123").is_ok());
assert!(wallet.decrypt("124").is_err());
}
}

View File

@ -10,7 +10,7 @@ rustc-serialize = "0.3"
serde = "0.7.0"
serde_json = "0.7.0"
serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
[build-dependencies]
serde_codegen = { version = "0.7.0", optional = true }

View File

@ -25,6 +25,7 @@ Usage:
parity daemon <pid-file> [options]
parity account (new | list ) [options]
parity account import <path>... [options]
parity wallet import <path> --password FILE [options]
parity import [ <file> ] [options]
parity export [ <file> ] [options]
parity signer new-token [options]
@ -55,7 +56,7 @@ Account Options:
ACCOUNTS is a comma-delimited list of addresses.
--password FILE Provide a file containing a password for unlocking
an account.
--keys-iterations NUM Specify the number of iterations to use when
--keys-iterations NUM Specify the number of iterations to use when
deriving key from the password (bigger is more
secure) [default: 10240].
--no-import-keys Do not import keys from legacy clients.
@ -81,7 +82,7 @@ Networking Options:
--reserved-only Connect only to reserved nodes.
API and Console Options:
--jsonrpc-off Disable the JSON-RPC API server.
--no-jsonrpc Disable the JSON-RPC API server.
--jsonrpc-port PORT Specify the port portion of the JSONRPC API server
[default: 8545].
--jsonrpc-interface IP Specify the hostname portion of the JSONRPC API
@ -94,13 +95,13 @@ API and Console Options:
ethcore, ethcore_set, traces.
[default: web3,eth,net,ethcore,personal,traces].
--ipc-off Disable JSON-RPC over IPC service.
--no-ipc Disable JSON-RPC over IPC service.
--ipc-path PATH Specify custom path for JSON-RPC over IPC service
[default: $HOME/.parity/jsonrpc.ipc].
--ipc-apis APIS Specify custom API set available via JSON-RPC over
IPC [default: web3,eth,net,ethcore,personal,traces].
IPC [default: web3,eth,net,ethcore,personal,traces,rpc].
--dapps-off Disable the Dapps server (e.g. status page).
--no-dapps Disable the Dapps server (e.g. status page).
--dapps-port PORT Specify the port portion of the Dapps server
[default: 8080].
--dapps-interface IP Specify the hostname portion of the Dapps
@ -159,7 +160,7 @@ Footprint Options:
light - early merges with partial tracking. Fast,
light, and experimental!
auto - use the method most recently synced or
default to archive if none synced [default: auto].
default to fast if none synced [default: auto].
--cache-pref-size BYTES Specify the prefered size of the blockchain cache in
bytes [default: 16384].
--cache-max-size BYTES Specify the maximum size of the blockchain cache in
@ -196,13 +197,16 @@ Legacy Options:
--nodekey KEY Equivalent to --node-key KEY.
--nodiscover Equivalent to --no-discovery.
-j --jsonrpc Does nothing; JSON-RPC is on by default now.
--jsonrpc-off Equivalent to --no-jsonrpc.
-w --webapp Does nothing; dapps server is on by default now.
--dapps-off Equivalent to --no-dapps.
--rpc Does nothing; JSON-RPC is on by default now.
--rpcaddr IP Equivalent to --jsonrpc-interface IP.
--rpcport PORT Equivalent to --jsonrpc-port PORT.
--rpcapi APIS Equivalent to --jsonrpc-apis APIS.
--rpccorsdomain URL Equivalent to --jsonrpc-cors URL.
--ipcdisable Equivalent to --ipc-off.
--ipcdisable Equivalent to --no-ipc.
--ipc-off Equivalent to --no-ipc.
--ipcapi APIS Equivalent to --ipc-apis APIS.
--ipcpath PATH Equivalent to --ipc-path PATH.
--gasprice WEI Minimum amount of Wei per GAS to be paid for a
@ -223,6 +227,7 @@ Miscellaneous Options:
pub struct Args {
pub cmd_daemon: bool,
pub cmd_account: bool,
pub cmd_wallet: bool,
pub cmd_new: bool,
pub cmd_list: bool,
pub cmd_export: bool,
@ -258,15 +263,15 @@ pub struct Args {
pub flag_cache_pref_size: usize,
pub flag_cache_max_size: usize,
pub flag_queue_max_size: usize,
pub flag_jsonrpc_off: bool,
pub flag_no_jsonrpc: bool,
pub flag_jsonrpc_interface: String,
pub flag_jsonrpc_port: u16,
pub flag_jsonrpc_cors: Option<String>,
pub flag_jsonrpc_apis: String,
pub flag_ipc_off: bool,
pub flag_no_ipc: bool,
pub flag_ipc_path: String,
pub flag_ipc_apis: String,
pub flag_dapps_off: bool,
pub flag_no_dapps: bool,
pub flag_dapps_port: u16,
pub flag_dapps_interface: String,
pub flag_dapps_user: Option<String>,
@ -310,6 +315,9 @@ pub struct Args {
pub flag_testnet: bool,
pub flag_networkid: Option<String>,
pub flag_ipcdisable: bool,
pub flag_ipc_off: bool,
pub flag_jsonrpc_off: bool,
pub flag_dapps_off: bool,
pub flag_ipcpath: Option<String>,
pub flag_ipcapi: Option<String>,
pub flag_db_cache_size: Option<usize>,

View File

@ -265,7 +265,7 @@ impl Configuration {
"light" => journaldb::Algorithm::EarlyMerge,
"fast" => journaldb::Algorithm::OverlayRecent,
"basic" => journaldb::Algorithm::RefCounted,
"auto" => self.find_best_db(spec).unwrap_or(journaldb::Algorithm::Archive),
"auto" => self.find_best_db(spec).unwrap_or(journaldb::Algorithm::OverlayRecent),
_ => { die!("Invalid pruning method given."); }
};
@ -359,7 +359,7 @@ impl Configuration {
pub fn ipc_settings(&self) -> IpcConfiguration {
IpcConfiguration {
enabled: !(self.args.flag_ipcdisable || self.args.flag_ipc_off),
enabled: !(self.args.flag_ipcdisable || self.args.flag_ipc_off || self.args.flag_no_ipc),
socket_addr: self.ipc_path(),
apis: self.args.flag_ipcapi.clone().unwrap_or(self.args.flag_ipc_apis.clone()),
}
@ -372,7 +372,7 @@ impl Configuration {
chain: self.chain(),
max_peers: self.max_peers(),
network_port: self.net_port(),
rpc_enabled: !self.args.flag_jsonrpc_off,
rpc_enabled: !self.args.flag_jsonrpc_off && !self.args.flag_no_jsonrpc,
rpc_interface: self.args.flag_rpcaddr.clone().unwrap_or(self.args.flag_jsonrpc_interface.clone()),
rpc_port: self.args.flag_rpcport.unwrap_or(self.args.flag_jsonrpc_port),
}
@ -432,10 +432,10 @@ impl Configuration {
}
pub fn signer_port(&self) -> Option<u16> {
if self.args.flag_signer {
Some(self.args.flag_signer_port)
} else {
if !self.args.flag_signer {
None
} else {
Some(self.args.flag_signer_port)
}
}
}

View File

@ -129,6 +129,11 @@ fn execute(conf: Configuration) {
return;
}
if conf.args.cmd_wallet {
execute_wallet_cli(conf);
return;
}
if conf.args.cmd_export {
execute_export(conf);
return;
@ -193,6 +198,12 @@ fn execute_client(conf: Configuration, spec: Spec, client_config: ClientConfig)
});
}
// Display warning about using unlock with signer
if conf.args.flag_signer && conf.args.flag_unlock.is_some() {
warn!("Using Trusted Signer and --unlock is not recommended!");
warn!("NOTE that Signer will not ask you to confirm transactions from unlocked account.");
}
// Secret Store
let account_service = Arc::new(conf.account_service());
@ -253,7 +264,7 @@ fn execute_client(conf: Configuration, spec: Spec, client_config: ClientConfig)
if conf.args.flag_webapp { println!("WARNING: Flag -w/--webapp is deprecated. Dapps server is now on by default. Ignoring."); }
let dapps_server = dapps::new(dapps::Configuration {
enabled: !conf.args.flag_dapps_off,
enabled: !conf.args.flag_dapps_off && !conf.args.flag_no_dapps,
interface: conf.args.flag_dapps_interface.clone(),
port: conf.args.flag_dapps_port,
user: conf.args.flag_dapps_user.clone(),
@ -534,6 +545,30 @@ fn execute_account_cli(conf: Configuration) {
}
}
fn execute_wallet_cli(conf: Configuration) {
use ethcore::ethstore::{PresaleWallet, EthStore};
use ethcore::ethstore::dir::DiskDirectory;
use ethcore::account_provider::AccountProvider;
let wallet_path = conf.args.arg_path.first().unwrap();
let filename = conf.args.flag_password.first().unwrap();
let mut file = File::open(filename).unwrap_or_else(|_| die!("{} Unable to read password file.", filename));
let mut file_content = String::new();
file.read_to_string(&mut file_content).unwrap_or_else(|_| die!("{} Unable to read password file.", filename));
let dir = Box::new(DiskDirectory::create(conf.keys_path()).unwrap());
let iterations = conf.keys_iterations();
let store = AccountProvider::new(Box::new(EthStore::open_with_iterations(dir, iterations).unwrap()));
// remove eof
let pass = &file_content[..file_content.len() - 1];
let wallet = PresaleWallet::open(wallet_path).unwrap_or_else(|_| die!("Unable to open presale wallet."));
let kp = wallet.decrypt(pass).unwrap_or_else(|_| die!("Invalid password"));
let address = store.insert_account(kp.secret().clone(), pass).unwrap();
println!("Imported account: {}", address);
}
fn wait_for_exit(
panic_handler: Arc<PanicHandler>,
_rpc_server: Option<RpcServer>,

View File

@ -150,7 +150,7 @@ pub fn setup_rpc<T: Extendable>(server: T, deps: Arc<Dependencies>, apis: ApiSet
server.add_delegate(EthFilterClient::new(&deps.client, &deps.miner).to_delegate());
if deps.signer_port.is_some() {
server.add_delegate(EthSigningQueueClient::new(&deps.signer_queue, &deps.client, &deps.miner).to_delegate());
server.add_delegate(EthSigningQueueClient::new(&deps.signer_queue, &deps.client, &deps.miner, &deps.secret_store).to_delegate());
} else {
server.add_delegate(EthSigningUnsafeClient::new(&deps.client, &deps.secret_store, &deps.miner).to_delegate());
}

View File

@ -23,7 +23,7 @@ ethcore-devtools = { path = "../devtools" }
rustc-serialize = "0.3"
transient-hashmap = "0.1"
serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
[build-dependencies]

View File

@ -43,15 +43,17 @@ fn fill_optional_fields<C, M>(request: &mut TransactionRequest, client: &C, mine
/// Implementation of functions that require signing when no trusted signer is used.
pub struct EthSigningQueueClient<C, M> where C: MiningBlockChainClient, M: MinerService {
queue: Weak<ConfirmationsQueue>,
accounts: Weak<AccountProvider>,
client: Weak<C>,
miner: Weak<M>,
}
impl<C, M> EthSigningQueueClient<C, M> where C: MiningBlockChainClient, M: MinerService {
/// Creates a new signing queue client given shared signing queue.
pub fn new(queue: &Arc<ConfirmationsQueue>, client: &Arc<C>, miner: &Arc<M>) -> Self {
pub fn new(queue: &Arc<ConfirmationsQueue>, client: &Arc<C>, miner: &Arc<M>, accounts: &Arc<AccountProvider>) -> Self {
EthSigningQueueClient {
queue: Arc::downgrade(queue),
accounts: Arc::downgrade(accounts),
client: Arc::downgrade(client),
miner: Arc::downgrade(miner),
}
@ -71,9 +73,18 @@ impl<C, M> EthSigning for EthSigningQueueClient<C, M>
fn send_transaction(&self, params: Params) -> Result<Value, Error> {
from_params::<(TransactionRequest, )>(params)
.and_then(|(mut request, )| {
let queue = take_weak!(self.queue);
let accounts = take_weak!(self.accounts);
let (client, miner) = (take_weak!(self.client), take_weak!(self.miner));
if accounts.is_unlocked(request.from) {
let sender = request.from;
return match sign_and_dispatch(&*client, &*miner, request, &*accounts, sender) {
Ok(hash) => to_value(&hash),
_ => to_value(&H256::zero()),
}
}
let queue = take_weak!(self.queue);
fill_optional_fields(&mut request, &*client, &*miner);
let id = queue.add_request(request);
let result = id.wait_with_timeout();

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::str::FromStr;
use std::sync::Arc;
use jsonrpc_core::IoHandler;
use v1::impls::EthSigningQueueClient;
@ -21,12 +22,16 @@ use v1::traits::EthSigning;
use v1::helpers::{ConfirmationsQueue, SigningQueue};
use v1::tests::helpers::TestMinerService;
use util::{Address, FixedHash};
use util::numbers::{Uint, U256};
use ethcore::account_provider::AccountProvider;
use ethcore::client::TestBlockChainClient;
use ethcore::transaction::{Transaction, Action};
struct EthSigningTester {
pub queue: Arc<ConfirmationsQueue>,
pub client: Arc<TestBlockChainClient>,
pub miner: Arc<TestMinerService>,
pub accounts: Arc<AccountProvider>,
pub io: IoHandler,
}
@ -35,13 +40,15 @@ impl Default for EthSigningTester {
let queue = Arc::new(ConfirmationsQueue::default());
let client = Arc::new(TestBlockChainClient::default());
let miner = Arc::new(TestMinerService::default());
let accounts = Arc::new(AccountProvider::transient_provider());
let io = IoHandler::new();
io.add_delegate(EthSigningQueueClient::new(&queue, &client, &miner).to_delegate());
io.add_delegate(EthSigningQueueClient::new(&queue, &client, &miner, &accounts).to_delegate());
EthSigningTester {
queue: queue,
client: client,
miner: miner,
accounts: accounts,
io: io,
}
}
@ -78,5 +85,41 @@ fn should_add_transaction_to_queue() {
// then
assert_eq!(tester.io.handle_request(&request), Some(response.to_owned()));
assert_eq!(tester.queue.requests().len(), 1);
}
#[test]
fn should_dispatch_transaction_if_account_is_unlocked() {
// given
let tester = eth_signing();
let acc = tester.accounts.new_account("test").unwrap();
tester.accounts.unlock_account_permanently(acc, "test".into()).unwrap();
let t = Transaction {
nonce: U256::zero(),
gas_price: U256::from(0x9184e72a000u64),
gas: U256::from(0x76c0),
action: Action::Call(Address::from_str("d46e8dd67c5d32be8058bb8eb970870f07244567").unwrap()),
value: U256::from(0x9184e72au64),
data: vec![]
};
let signature = tester.accounts.sign(acc, t.hash()).unwrap();
let t = t.with_signature(signature);
// when
let request = r#"{
"jsonrpc": "2.0",
"method": "eth_sendTransaction",
"params": [{
"from": ""#.to_owned() + format!("0x{:?}", acc).as_ref() + r#"",
"to": "0xd46e8dd67c5d32be8058bb8eb970870f07244567",
"gas": "0x76c0",
"gasPrice": "0x9184e72a000",
"value": "0x9184e72a"
}],
"id": 1
}"#;
let response = r#"{"jsonrpc":"2.0","result":""#.to_owned() + format!("0x{:?}", t.hash()).as_ref() + r#"","id":1}"#;
// then
assert_eq!(tester.io.handle_request(&request), Some(response.to_owned()));
}

View File

@ -20,7 +20,7 @@ ethcore-util = { path = "../util" }
ethcore-rpc = { path = "../rpc" }
parity-minimal-sysui = { git = "https://github.com/ethcore/parity-dapps-minimal-sysui-rs.git" }
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
[features]
dev = ["clippy"]

View File

@ -93,7 +93,6 @@ impl Server {
let config = {
let mut config = ws::Settings::default();
// It's also used for handling min-sysui requests (browser can make many of them in paralel)
config.max_connections = 15;
config.method_strict = true;
// Was shutting down server when suspending on linux:
config.shutdown_on_interrupt = false;

View File

@ -10,7 +10,7 @@ authors = ["Ethcore <admin@ethcore.io"]
[dependencies]
ethcore-util = { path = "../util" }
ethcore = { path = "../ethcore" }
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
log = "0.3"
env_logger = "0.3"
time = "0.1.34"

View File

@ -946,7 +946,7 @@ impl ChainSync {
let tx = try!(r.at(i)).as_raw().to_vec();
transactions.push(tx);
}
let _ = io.chain().queue_transactions(transactions);
io.chain().queue_transactions(transactions);
Ok(())
}

View File

@ -28,7 +28,7 @@ crossbeam = "0.2"
slab = "0.2"
sha3 = { path = "sha3" }
serde = "0.7.0"
clippy = { version = "0.0.76", optional = true}
clippy = { version = "0.0.77", optional = true}
json-tests = { path = "json-tests" }
igd = "0.4.2"
ethcore-devtools = { path = "../devtools" }

View File

@ -20,12 +20,10 @@ use bytes::*;
use std::collections::HashMap;
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
pub trait HashDB : AsHashDB {
pub trait HashDB: AsHashDB {
/// Get the keys in the database together with number of underlying references.
fn keys(&self) -> HashMap<H256, i32>;
/// Deprecated. use `get`.
fn lookup(&self, key: &H256) -> Option<&[u8]>; // TODO: rename to get.
/// Look up a given hash into the bytes that hash to it, returning None if the
/// hash is not known.
///
@ -41,10 +39,8 @@ pub trait HashDB : AsHashDB {
/// assert_eq!(m.get(&hash).unwrap(), hello_bytes);
/// }
/// ```
fn get(&self, key: &H256) -> Option<&[u8]> { self.lookup(key) }
fn get(&self, key: &H256) -> Option<&[u8]>;
/// Deprecated. Use `contains`.
fn exists(&self, key: &H256) -> bool; // TODO: rename to contains.
/// Check for the existance of a hash-key.
///
/// # Examples
@ -63,10 +59,10 @@ pub trait HashDB : AsHashDB {
/// assert!(!m.contains(&key));
/// }
/// ```
fn contains(&self, key: &H256) -> bool { self.exists(key) }
fn contains(&self, key: &H256) -> bool;
/// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions
/// are counted and the equivalent number of `kill()`s must be performed before the data
/// are counted and the equivalent number of `remove()`s must be performed before the data
/// is considered dead.
///
/// # Examples
@ -86,8 +82,6 @@ pub trait HashDB : AsHashDB {
/// Like `insert()` , except you provide the key and the data is all moved.
fn emplace(&mut self, key: H256, value: Bytes);
/// Deprecated - use `remove`.
fn kill(&mut self, key: &H256); // TODO: rename to remove.
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
/// happen without the data being eventually being inserted into the DB.
///
@ -109,7 +103,7 @@ pub trait HashDB : AsHashDB {
/// assert_eq!(m.get(key).unwrap(), d);
/// }
/// ```
fn remove(&mut self, key: &H256) { self.kill(key) }
fn remove(&mut self, key: &H256);
}
/// Upcast trait.
@ -121,6 +115,10 @@ pub trait AsHashDB {
}
impl<T: HashDB> AsHashDB for T {
fn as_hashdb(&self) -> &HashDB { self }
fn as_hashdb_mut(&mut self) -> &mut HashDB { self }
fn as_hashdb(&self) -> &HashDB {
self
}
fn as_hashdb_mut(&mut self) -> &mut HashDB {
self
}
}

View File

@ -98,7 +98,7 @@ impl HashDB for ArchiveDB {
ret
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<&[u8]> {
let k = self.overlay.raw(key);
match k {
Some(&(ref d, rc)) if rc > 0 => Some(d),
@ -113,8 +113,8 @@ impl HashDB for ArchiveDB {
}
}
fn exists(&self, key: &H256) -> bool {
self.lookup(key).is_some()
fn contains(&self, key: &H256) -> bool {
self.get(key).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
@ -123,8 +123,8 @@ impl HashDB for ArchiveDB {
fn emplace(&mut self, key: H256, value: Bytes) {
self.overlay.emplace(key, value);
}
fn kill(&mut self, key: &H256) {
self.overlay.kill(key);
fn remove(&mut self, key: &H256) {
self.overlay.remove(key);
}
}
@ -207,7 +207,7 @@ mod tests {
jdb.commit(5, &b"1004a".sha3(), Some((3, b"1002a".sha3()))).unwrap();
jdb.commit(6, &b"1005a".sha3(), Some((4, b"1003a".sha3()))).unwrap();
assert!(jdb.exists(&x));
assert!(jdb.contains(&x));
}
#[test]
@ -216,14 +216,14 @@ mod tests {
let mut jdb = ArchiveDB::new_temp();
let h = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.remove(&h);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
}
@ -235,26 +235,26 @@ mod tests {
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&baz));
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
}
@ -267,8 +267,8 @@ mod tests {
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
@ -277,12 +277,12 @@ mod tests {
jdb.remove(&bar);
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -292,16 +292,16 @@ mod tests {
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
jdb.insert(b"foo");
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(3, &b"2".sha3(), Some((0, b"2".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -315,10 +315,10 @@ mod tests {
jdb.insert(b"foo");
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2a".sha3(), Some((1, b"1a".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -344,8 +344,8 @@ mod tests {
{
let mut jdb = ArchiveDB::new(dir.to_str().unwrap(), None);
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
}
}
@ -373,7 +373,7 @@ mod tests {
let mut jdb = ArchiveDB::new(dir.to_str().unwrap(), None);
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
jdb.commit(5, &b"5".sha3(), Some((4, b"4".sha3()))).unwrap();
@ -402,7 +402,7 @@ mod tests {
{
let mut jdb = ArchiveDB::new(dir.to_str().unwrap(), None);
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
}

View File

@ -172,8 +172,8 @@ impl EarlyMergeDB {
trace!(target: "jdb.fine", "replay_keys: (end) refs={:?}", refs);
}
fn kill_keys(deletes: &[H256], refs: &mut HashMap<H256, RefInfo>, batch: &DBTransaction, from: RemoveFrom, trace: bool) {
// with a kill on {queue_refs: 1, in_archive: true}, we have two options:
fn remove_keys(deletes: &[H256], refs: &mut HashMap<H256, RefInfo>, batch: &DBTransaction, from: RemoveFrom, trace: bool) {
// with a remove on {queue_refs: 1, in_archive: true}, we have two options:
// - convert to {queue_refs: 1, in_archive: false} (i.e. remove it from the conceptual archive)
// - convert to {queue_refs: 0, in_archive: true} (i.e. remove it from the conceptual queue)
// (the latter option would then mean removing the RefInfo, since it would no longer be counted in the queue.)
@ -186,13 +186,13 @@ impl EarlyMergeDB {
c.in_archive = false;
Self::reset_already_in(batch, h);
if trace {
trace!(target: "jdb.fine", " kill({}): In archive, 1 in queue: Reducing to queue only and recording", h);
trace!(target: "jdb.fine", " remove({}): In archive, 1 in queue: Reducing to queue only and recording", h);
}
continue;
} else if c.queue_refs > 1 {
c.queue_refs -= 1;
if trace {
trace!(target: "jdb.fine", " kill({}): In queue > 1 refs: Decrementing ref count to {}", h, c.queue_refs);
trace!(target: "jdb.fine", " remove({}): In queue > 1 refs: Decrementing ref count to {}", h, c.queue_refs);
}
continue;
} else {
@ -204,14 +204,14 @@ impl EarlyMergeDB {
refs.remove(h);
Self::reset_already_in(batch, h);
if trace {
trace!(target: "jdb.fine", " kill({}): In archive, 1 in queue: Removing from queue and leaving in archive", h);
trace!(target: "jdb.fine", " remove({}): In archive, 1 in queue: Removing from queue and leaving in archive", h);
}
}
Some(RefInfo{queue_refs: 1, in_archive: false}) => {
refs.remove(h);
batch.delete(&h.bytes()).expect("Low-level database error. Some issue with your hard disk?");
if trace {
trace!(target: "jdb.fine", " kill({}): Not in archive, only 1 ref in queue: Removing from queue and DB", h);
trace!(target: "jdb.fine", " remove({}): Not in archive, only 1 ref in queue: Removing from queue and DB", h);
}
}
None => {
@ -219,7 +219,7 @@ impl EarlyMergeDB {
//assert!(!Self::is_already_in(db, &h));
batch.delete(&h.bytes()).expect("Low-level database error. Some issue with your hard disk?");
if trace {
trace!(target: "jdb.fine", " kill({}): Not in queue - MUST BE IN ARCHIVE: Removing from DB", h);
trace!(target: "jdb.fine", " remove({}): Not in queue - MUST BE IN ARCHIVE: Removing from DB", h);
}
}
_ => panic!("Invalid value in refs: {:?}", n),
@ -290,7 +290,7 @@ impl HashDB for EarlyMergeDB {
ret
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<&[u8]> {
let k = self.overlay.raw(key);
match k {
Some(&(ref d, rc)) if rc > 0 => Some(d),
@ -305,8 +305,8 @@ impl HashDB for EarlyMergeDB {
}
}
fn exists(&self, key: &H256) -> bool {
self.lookup(key).is_some()
fn contains(&self, key: &H256) -> bool {
self.get(key).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
@ -315,8 +315,8 @@ impl HashDB for EarlyMergeDB {
fn emplace(&mut self, key: H256, value: Bytes) {
self.overlay.emplace(key, value);
}
fn kill(&mut self, key: &H256) {
self.overlay.kill(key);
fn remove(&mut self, key: &H256) {
self.overlay.remove(key);
}
}
@ -472,7 +472,7 @@ impl JournalDB for EarlyMergeDB {
if trace {
trace!(target: "jdb.ops", " Expunging: {:?}", deletes);
}
Self::kill_keys(&deletes, &mut refs, &batch, RemoveFrom::Archive, trace);
Self::remove_keys(&deletes, &mut refs, &batch, RemoveFrom::Archive, trace);
if trace {
trace!(target: "jdb.ops", " Finalising: {:?}", inserts);
@ -504,7 +504,7 @@ impl JournalDB for EarlyMergeDB {
if trace {
trace!(target: "jdb.ops", " Reverting: {:?}", inserts);
}
Self::kill_keys(&inserts, &mut refs, &batch, RemoveFrom::Queue, trace);
Self::remove_keys(&inserts, &mut refs, &batch, RemoveFrom::Queue, trace);
}
try!(batch.delete(&last));
@ -565,7 +565,7 @@ mod tests {
jdb.commit(6, &b"1005a".sha3(), Some((4, b"1003a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&x));
assert!(jdb.contains(&x));
}
#[test]
@ -584,8 +584,8 @@ mod tests {
assert!(jdb.can_reconstruct_refs());
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
}
#[test]
@ -595,20 +595,20 @@ mod tests {
let h = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.remove(&h);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&h));
assert!(!jdb.contains(&h));
}
#[test]
@ -620,38 +620,38 @@ mod tests {
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&baz));
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
}
#[test]
@ -663,8 +663,8 @@ mod tests {
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
@ -675,15 +675,15 @@ mod tests {
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
}
#[test]
@ -694,19 +694,19 @@ mod tests {
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(3, &b"2".sha3(), Some((0, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -730,11 +730,11 @@ mod tests {
jdb.commit(1, &b"1c".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2a".sha3(), Some((1, b"1a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -758,11 +758,11 @@ mod tests {
jdb.commit(1, &b"1c".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -826,11 +826,11 @@ mod tests {
{
let mut jdb = EarlyMergeDB::new(dir.to_str().unwrap(), None);
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.contains(&foo));
}
}
@ -933,7 +933,7 @@ mod tests {
jdb.insert(b"foo");
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap(); // BROKEN
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
@ -941,7 +941,7 @@ mod tests {
jdb.commit(5, &b"5".sha3(), Some((4, b"4".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.contains(&foo));
}
#[test]
@ -1002,12 +1002,12 @@ mod tests {
jdb.remove(&foo);
jdb.commit(2, &b"2".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.insert(b"foo");
jdb.commit(3, &b"3".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
// incantation to reopen the db
}; { let mut jdb = EarlyMergeDB::new(dir.to_str().unwrap(), None);
@ -1015,21 +1015,21 @@ mod tests {
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
// incantation to reopen the db
}; { let mut jdb = EarlyMergeDB::new(dir.to_str().unwrap(), None);
jdb.commit(5, &b"5".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
// incantation to reopen the db
}; { let mut jdb = EarlyMergeDB::new(dir.to_str().unwrap(), None);
jdb.commit(6, &b"6".sha3(), Some((4, b"4".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.contains(&foo));
}
}
@ -1059,9 +1059,9 @@ mod tests {
let mut jdb = EarlyMergeDB::new(dir.to_str().unwrap(), None);
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
}
}
}

View File

@ -288,11 +288,11 @@ impl JournalDB for OverlayRecentDB {
}
// update the overlay
for k in overlay_deletions {
journal_overlay.backing_overlay.kill(&k);
journal_overlay.backing_overlay.remove(&k);
}
// apply canon deletions
for k in canon_deletions {
if !journal_overlay.backing_overlay.exists(&k) {
if !journal_overlay.backing_overlay.contains(&k) {
try!(batch.delete(&k));
}
}
@ -321,12 +321,12 @@ impl HashDB for OverlayRecentDB {
ret
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<&[u8]> {
let k = self.transaction_overlay.raw(key);
match k {
Some(&(ref d, rc)) if rc > 0 => Some(d),
_ => {
let v = self.journal_overlay.read().unwrap().backing_overlay.lookup(key).map(|v| v.to_vec());
let v = self.journal_overlay.read().unwrap().backing_overlay.get(key).map(|v| v.to_vec());
match v {
Some(x) => {
Some(&self.transaction_overlay.denote(key, x).0)
@ -344,8 +344,8 @@ impl HashDB for OverlayRecentDB {
}
}
fn exists(&self, key: &H256) -> bool {
self.lookup(key).is_some()
fn contains(&self, key: &H256) -> bool {
self.get(key).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
@ -354,8 +354,8 @@ impl HashDB for OverlayRecentDB {
fn emplace(&mut self, key: H256, value: Bytes) {
self.transaction_overlay.emplace(key, value);
}
fn kill(&mut self, key: &H256) {
self.transaction_overlay.kill(key);
fn remove(&mut self, key: &H256) {
self.transaction_overlay.remove(key);
}
}
@ -397,7 +397,7 @@ mod tests {
jdb.commit(6, &b"1005a".sha3(), Some((4, b"1003a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&x));
assert!(jdb.contains(&x));
}
#[test]
@ -407,20 +407,20 @@ mod tests {
let h = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.remove(&h);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&h));
assert!(!jdb.contains(&h));
}
#[test]
@ -432,38 +432,38 @@ mod tests {
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&baz));
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
}
#[test]
@ -475,8 +475,8 @@ mod tests {
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
@ -487,15 +487,15 @@ mod tests {
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
}
#[test]
@ -506,19 +506,19 @@ mod tests {
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(3, &b"2".sha3(), Some((0, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -542,11 +542,11 @@ mod tests {
jdb.commit(1, &b"1c".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2a".sha3(), Some((1, b"1a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -570,11 +570,11 @@ mod tests {
jdb.commit(1, &b"1c".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
}
#[test]
@ -638,11 +638,11 @@ mod tests {
{
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap(), None);
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.contains(&foo));
}
}
@ -745,7 +745,7 @@ mod tests {
jdb.insert(b"foo");
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap(); // BROKEN
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
@ -753,7 +753,7 @@ mod tests {
jdb.commit(5, &b"5".sha3(), Some((4, b"4".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.contains(&foo));
}
#[test]
@ -814,12 +814,12 @@ mod tests {
jdb.remove(&foo);
jdb.commit(2, &b"2".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
jdb.insert(b"foo");
jdb.commit(3, &b"3".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
// incantation to reopen the db
}; { let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap(), None);
@ -827,21 +827,21 @@ mod tests {
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
// incantation to reopen the db
}; { let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap(), None);
jdb.commit(5, &b"5".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.contains(&foo));
// incantation to reopen the db
}; { let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap(), None);
jdb.commit(6, &b"6".sha3(), Some((4, b"4".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.contains(&foo));
}
}
@ -871,9 +871,9 @@ mod tests {
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap(), None);
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
}
}
@ -893,7 +893,7 @@ mod tests {
assert!(jdb.can_reconstruct_refs());
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
}
}

View File

@ -88,11 +88,11 @@ impl RefCountedDB {
impl HashDB for RefCountedDB {
fn keys(&self) -> HashMap<H256, i32> { self.forward.keys() }
fn lookup(&self, key: &H256) -> Option<&[u8]> { self.forward.lookup(key) }
fn exists(&self, key: &H256) -> bool { self.forward.exists(key) }
fn get(&self, key: &H256) -> Option<&[u8]> { self.forward.get(key) }
fn contains(&self, key: &H256) -> bool { self.forward.contains(key) }
fn insert(&mut self, value: &[u8]) -> H256 { let r = self.forward.insert(value); self.inserts.push(r.clone()); r }
fn emplace(&mut self, key: H256, value: Bytes) { self.inserts.push(key.clone()); self.forward.emplace(key, value); }
fn kill(&mut self, key: &H256) { self.removes.push(key.clone()); }
fn remove(&mut self, key: &H256) { self.removes.push(key.clone()); }
}
impl JournalDB for RefCountedDB {
@ -212,16 +212,16 @@ mod tests {
let mut jdb = RefCountedDB::new_temp();
let h = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.remove(&h);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&h));
assert!(jdb.contains(&h));
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(!jdb.exists(&h));
assert!(!jdb.contains(&h));
}
#[test]
@ -251,34 +251,34 @@ mod tests {
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&baz));
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(!jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
}
#[test]
@ -289,8 +289,8 @@ mod tests {
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
@ -299,13 +299,13 @@ mod tests {
jdb.remove(&bar);
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
}
}

View File

@ -20,8 +20,10 @@ use std::default::Default;
use rocksdb::{DB, Writable, WriteBatch, IteratorMode, DBVector, DBIterator,
IndexType, Options, DBCompactionStyle, BlockBasedOptions, Direction};
const DB_FILE_SIZE_BASE: u64 = 10 * 1024 * 1024;
const DB_FILE_SIZE_MULTIPLIER: i32 = 5;
const DB_FILE_SIZE_BASE: u64 = 128 * 1024 * 1024;
const DB_FILE_SIZE_MULTIPLIER: i32 = 1;
const DB_BACKGROUND_FLUSHES: i32 = 4;
const DB_BACKGROUND_COMPACTIONS: i32 = 4;
/// Write transaction. Batches a sequence of put/delete operations for efficiency.
pub struct DBTransaction {
@ -116,6 +118,8 @@ impl Database {
opts.set_compaction_style(DBCompactionStyle::DBUniversalCompaction);
opts.set_target_file_size_base(DB_FILE_SIZE_BASE);
opts.set_target_file_size_multiplier(DB_FILE_SIZE_MULTIPLIER);
opts.set_max_background_flushes(DB_BACKGROUND_FLUSHES);
opts.set_max_background_compactions(DB_BACKGROUND_COMPACTIONS);
if let Some(cache_size) = config.cache_size {
// half goes to read cache
opts.set_block_cache_size_mb(cache_size as u64 / 2);

View File

@ -162,7 +162,7 @@ impl MemoryDB {
static NULL_RLP_STATIC: [u8; 1] = [0x80; 1];
impl HashDB for MemoryDB {
fn lookup(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<&[u8]> {
if key == &SHA3_NULL_RLP {
return Some(&NULL_RLP_STATIC);
}
@ -176,7 +176,7 @@ impl HashDB for MemoryDB {
self.data.iter().filter_map(|(k, v)| if v.1 != 0 {Some((k.clone(), v.1))} else {None}).collect()
}
fn exists(&self, key: &H256) -> bool {
fn contains(&self, key: &H256) -> bool {
if key == &SHA3_NULL_RLP {
return true;
}
@ -222,7 +222,7 @@ impl HashDB for MemoryDB {
self.data.insert(key, (value, 1));
}
fn kill(&mut self, key: &H256) {
fn remove(&mut self, key: &H256) {
if key == &SHA3_NULL_RLP {
return;
}

View File

@ -735,6 +735,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
self.kill_connection(token, io, true);
}
#[cfg_attr(feature="dev", allow(collapsible_if))]
fn session_readable(&self, token: StreamToken, io: &IoContext<NetworkIoMessage<Message>>) {
let mut ready_data: Vec<ProtocolId> = Vec::new();
let mut packet_data: Vec<(ProtocolId, PacketId, Vec<u8>)> = Vec::new();

View File

@ -92,7 +92,7 @@ impl OverlayDB {
///
/// Returns either an error or the number of items changed in the backing database.
///
/// Will return an error if the number of `kill()`s ever exceeds the number of
/// Will return an error if the number of `remove()`s ever exceeds the number of
/// `insert()`s for any key. This will leave the database in an undeterminate
/// state. Don't ever let it happen.
///
@ -104,15 +104,15 @@ impl OverlayDB {
/// fn main() {
/// let mut m = OverlayDB::new_temp();
/// let key = m.insert(b"foo"); // insert item.
/// assert!(m.exists(&key)); // key exists (in memory).
/// assert!(m.contains(&key)); // key exists (in memory).
/// assert_eq!(m.commit().unwrap(), 1); // 1 item changed.
/// assert!(m.exists(&key)); // key still exists (in backing).
/// m.kill(&key); // delete item.
/// assert!(!m.exists(&key)); // key "doesn't exist" (though still does in backing).
/// m.kill(&key); // oh dear... more kills than inserts for the key...
/// assert!(m.contains(&key)); // key still exists (in backing).
/// m.remove(&key); // delete item.
/// assert!(!m.contains(&key)); // key "doesn't exist" (though still does in backing).
/// m.remove(&key); // oh dear... more removes than inserts for the key...
/// //m.commit().unwrap(); // this commit/unwrap would cause a panic.
/// m.revert(); // revert both kills.
/// assert!(m.exists(&key)); // key now still exists.
/// m.revert(); // revert both removes.
/// assert!(m.contains(&key)); // key now still exists.
/// }
/// ```
pub fn commit(&mut self) -> Result<u32, UtilError> {
@ -224,7 +224,7 @@ impl HashDB for OverlayDB {
}
ret
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
fn get(&self, key: &H256) -> Option<&[u8]> {
// return ok if positive; if negative, check backing - might be enough references there to make
// it positive again.
let k = self.overlay.raw(key);
@ -249,7 +249,7 @@ impl HashDB for OverlayDB {
}
}
}
fn exists(&self, key: &H256) -> bool {
fn contains(&self, key: &H256) -> bool {
// return ok if positive; if negative, check backing - might be enough references there to make
// it positive again.
let k = self.overlay.raw(key);
@ -271,7 +271,7 @@ impl HashDB for OverlayDB {
}
fn insert(&mut self, value: &[u8]) -> H256 { self.overlay.insert(value) }
fn emplace(&mut self, key: H256, value: Bytes) { self.overlay.emplace(key, value); }
fn kill(&mut self, key: &H256) { self.overlay.kill(key); }
fn remove(&mut self, key: &H256) { self.overlay.remove(key); }
}
#[test]

View File

@ -133,7 +133,7 @@ impl<'db> TrieDB<'db> {
/// Get the data of the root node.
fn root_data(&self) -> &[u8] {
self.db.lookup(&self.root).expect("Trie root not found!")
self.db.get(&self.root).expect("Trie root not found!")
}
/// Get the root node as a `Node`.
@ -184,7 +184,7 @@ impl<'db> TrieDB<'db> {
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
fn do_lookup<'a, 'key>(&'a self, key: &NibbleSlice<'key>) -> Option<&'a [u8]> where 'a: 'key {
let root_rlp = self.db.lookup(&self.root).expect("Trie root not found!");
let root_rlp = self.db.get(&self.root).expect("Trie root not found!");
self.get_from_node(&root_rlp, key)
}
@ -213,7 +213,7 @@ impl<'db> TrieDB<'db> {
// check if its sha3 + len
let r = Rlp::new(node);
match r.is_data() && r.size() == 32 {
true => self.db.lookup(&r.as_val::<H256>()).unwrap_or_else(|| panic!("Not found! {:?}", r.as_val::<H256>())),
true => self.db.get(&r.as_val::<H256>()).unwrap_or_else(|| panic!("Not found! {:?}", r.as_val::<H256>())),
false => node
}
}
@ -349,7 +349,7 @@ impl<'db> Trie for TrieDB<'db> {
impl<'db> fmt::Debug for TrieDB<'db> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(writeln!(f, "c={:?} [", self.hash_count));
let root_rlp = self.db.lookup(&self.root).expect("Trie root not found!");
let root_rlp = self.db.get(&self.root).expect("Trie root not found!");
try!(self.fmt_all(Node::decoded(root_rlp), f, 0));
writeln!(f, "]")
}

View File

@ -87,7 +87,7 @@ impl<'db> TrieDBMut<'db> {
/// Create a new trie with the backing database `db` and `root`.
/// Returns an error if `root` does not exist.
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> Result<Self, TrieError> {
if !db.exists(root) {
if !db.contains(root) {
Err(TrieError::InvalidStateRoot)
} else {
Ok(TrieDBMut {
@ -143,7 +143,7 @@ impl<'db> TrieDBMut<'db> {
/// Set the trie to a new root node's RLP, inserting the new RLP into the backing database
/// and removing the old.
fn set_root_rlp(&mut self, root_data: &[u8]) {
self.db.kill(&self.root);
self.db.remove(&self.root);
*self.root = self.db.insert(root_data);
self.hash_count += 1;
trace!("set_root_rlp {:?} {:?}", root_data.pretty(), self.root);
@ -174,7 +174,7 @@ impl<'db> TrieDBMut<'db> {
/// Get the root node's RLP.
fn root_node(&self) -> Node {
Node::decoded(self.db.lookup(&self.root).expect("Trie root not found!"))
Node::decoded(self.db.get(&self.root).expect("Trie root not found!"))
}
/// Get the root node as a `Node`.
@ -225,7 +225,7 @@ impl<'db> TrieDBMut<'db> {
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
fn do_lookup<'a, 'key>(&'a self, key: &NibbleSlice<'key>) -> Option<&'a [u8]> where 'a: 'key {
let root_rlp = self.db.lookup(&self.root).expect("Trie root not found!");
let root_rlp = self.db.get(&self.root).expect("Trie root not found!");
self.get_from_node(&root_rlp, key)
}
@ -254,7 +254,7 @@ impl<'db> TrieDBMut<'db> {
// check if its sha3 + len
let r = Rlp::new(node);
match r.is_data() && r.size() == 32 {
true => self.db.lookup(&r.as_val::<H256>()).expect("Not found!"),
true => self.db.get(&r.as_val::<H256>()).expect("Not found!"),
false => node
}
}
@ -266,7 +266,7 @@ impl<'db> TrieDBMut<'db> {
trace!("ADD: {:?} {:?}", key, value.pretty());
// determine what the new root is, insert new nodes and remove old as necessary.
let mut todo: Journal = Journal::new();
let root_rlp = self.augmented(self.db.lookup(&self.root).expect("Trie root not found!"), key, value, &mut todo);
let root_rlp = self.augmented(self.db.get(&self.root).expect("Trie root not found!"), key, value, &mut todo);
self.apply(todo);
self.set_root_rlp(&root_rlp);
trace!("/");
@ -279,7 +279,7 @@ impl<'db> TrieDBMut<'db> {
trace!("DELETE: {:?}", key);
// determine what the new root is, insert new nodes and remove old as necessary.
let mut todo: Journal = Journal::new();
match self.cleared_from_slice(self.db.lookup(&self.root).expect("Trie root not found!"), key, &mut todo) {
match self.cleared_from_slice(self.db.get(&self.root).expect("Trie root not found!"), key, &mut todo) {
Some(root_rlp) => {
self.apply(todo);
self.set_root_rlp(&root_rlp);
@ -335,7 +335,7 @@ impl<'db> TrieDBMut<'db> {
}
else if rlp.is_data() && rlp.size() == 32 {
let h = rlp.as_val();
let r = self.db.lookup(&h).unwrap_or_else(||{
let r = self.db.get(&h).unwrap_or_else(||{
println!("Node not found! rlp={:?}, node_hash={:?}", rlp.as_raw().pretty(), h);
println!("Journal: {:?}", journal);
panic!();
@ -670,7 +670,7 @@ impl<'db> TrieMut for TrieDBMut<'db> {
impl<'db> fmt::Debug for TrieDBMut<'db> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(writeln!(f, "c={:?} [", self.hash_count));
let root_rlp = self.db.lookup(&self.root).expect("Trie root not found!");
let root_rlp = self.db.get(&self.root).expect("Trie root not found!");
try!(self.fmt_all(Node::decoded(root_rlp), f, 0));
writeln!(f, "]")
}