Merge branch 'master' into jsonrpc2
This commit is contained in:
commit
8113c689fc
26
README.md
26
README.md
@ -26,12 +26,17 @@ apt-get install -y --force-yes librocksdb-dev
|
||||
# install multirust
|
||||
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sh -s -- --yes
|
||||
|
||||
# install beta and make it default
|
||||
multirust default beta
|
||||
# install beta
|
||||
multirust update beta
|
||||
|
||||
# download and build parity
|
||||
git clone https://github.com/ethcore/parity
|
||||
cd parity
|
||||
|
||||
# parity should be build with rust beta
|
||||
multirust override beta
|
||||
|
||||
# build in release
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
@ -49,12 +54,17 @@ cd ..
|
||||
# install rust beta
|
||||
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sudo sh -s -- --yes
|
||||
|
||||
# install beta and make it default
|
||||
sudo multirust default beta
|
||||
# install rust beta
|
||||
sudo multirust update beta
|
||||
|
||||
# download and build parity
|
||||
git clone https://github.com/ethcore/parity
|
||||
cd parity
|
||||
|
||||
# parity should be build with rust beta
|
||||
sudo multirust override beta
|
||||
|
||||
# build in release
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
@ -66,12 +76,16 @@ brew update
|
||||
brew install rocksdb
|
||||
brew install multirust
|
||||
|
||||
# install beta and make it default
|
||||
multirust default beta
|
||||
# install beta
|
||||
multirust update beta
|
||||
|
||||
# download and build parity
|
||||
git clone https://github.com/ethcore/parity
|
||||
cd parity
|
||||
|
||||
# use rust beta for building parity
|
||||
multirust override beta
|
||||
|
||||
cargo build --release
|
||||
```
|
||||
|
||||
|
@ -24,7 +24,7 @@ pub type LogBloom = H2048;
|
||||
/// Constant 2048-bit datum for 0. Often used as a default.
|
||||
pub static ZERO_LOGBLOOM: LogBloom = H2048([0x00; 256]);
|
||||
|
||||
#[allow(enum_variant_names)]
|
||||
#[cfg_attr(feature="dev", allow(enum_variant_names))]
|
||||
/// Semantic boolean for when a seal/signature is included.
|
||||
pub enum Seal {
|
||||
/// The seal/signature is included.
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
//! Blockchain block.
|
||||
|
||||
#![allow(ptr_arg)] // Because of &LastHashes -> &Vec<_>
|
||||
#![cfg_attr(feature="dev", allow(ptr_arg))] // Because of &LastHashes -> &Vec<_>
|
||||
|
||||
use common::*;
|
||||
use engine::*;
|
||||
|
@ -87,7 +87,7 @@ struct QueueSignal {
|
||||
}
|
||||
|
||||
impl QueueSignal {
|
||||
#[allow(bool_comparison)]
|
||||
#[cfg_attr(feature="dev", allow(bool_comparison))]
|
||||
fn set(&self) {
|
||||
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
|
||||
self.message_channel.send(UserMessage(SyncMessage::BlockVerified)).expect("Error sending BlockVerified message");
|
||||
|
@ -206,7 +206,7 @@ impl Engine for Ethash {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(wrong_self_convention)] // to_ethash should take self
|
||||
#[cfg_attr(feature="dev", allow(wrong_self_convention))] // to_ethash should take self
|
||||
impl Ethash {
|
||||
fn calculate_difficuty(&self, header: &Header, parent: &Header) -> U256 {
|
||||
const EXP_DIFF_PERIOD: u64 = 100000;
|
||||
|
@ -243,7 +243,7 @@ struct CodeReader<'a> {
|
||||
code: &'a Bytes
|
||||
}
|
||||
|
||||
#[allow(len_without_is_empty)]
|
||||
#[cfg_attr(feature="dev", allow(len_without_is_empty))]
|
||||
impl<'a> CodeReader<'a> {
|
||||
/// Get `no_of_bytes` from code and convert to U256. Move PC
|
||||
fn read(&mut self, no_of_bytes: usize) -> U256 {
|
||||
@ -258,7 +258,7 @@ impl<'a> CodeReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(enum_variant_names)]
|
||||
#[cfg_attr(feature="dev", allow(enum_variant_names))]
|
||||
enum InstructionCost {
|
||||
Gas(U256),
|
||||
GasMem(U256, U256),
|
||||
@ -347,7 +347,7 @@ impl evm::Evm for Interpreter {
|
||||
}
|
||||
|
||||
impl Interpreter {
|
||||
#[allow(cyclomatic_complexity)]
|
||||
#[cfg_attr(feature="dev", allow(cyclomatic_complexity))]
|
||||
fn get_gas_cost_mem(&self,
|
||||
ext: &evm::Ext,
|
||||
instruction: Instruction,
|
||||
|
@ -188,7 +188,7 @@ impl<'a> Ext for Externalities<'a> {
|
||||
self.state.code(address).unwrap_or_else(|| vec![])
|
||||
}
|
||||
|
||||
#[allow(match_ref_pats)]
|
||||
#[cfg_attr(feature="dev", allow(match_ref_pats))]
|
||||
fn ret(&mut self, gas: &U256, data: &[u8]) -> Result<U256, evm::Error> {
|
||||
match &mut self.output {
|
||||
&mut OutputPolicy::Return(BytesRef::Fixed(ref mut slice)) => unsafe {
|
||||
|
@ -17,12 +17,14 @@
|
||||
#![warn(missing_docs)]
|
||||
#![cfg_attr(feature="dev", feature(plugin))]
|
||||
#![cfg_attr(feature="dev", plugin(clippy))]
|
||||
|
||||
// Clippy config
|
||||
// TODO [todr] not really sure
|
||||
#![allow(needless_range_loop)]
|
||||
#![cfg_attr(feature="dev", allow(needless_range_loop))]
|
||||
// Shorter than if-else
|
||||
#![allow(match_bool)]
|
||||
#![cfg_attr(feautre="dev", allow(match_bool))]
|
||||
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
|
||||
#![allow(clone_on_copy)]
|
||||
#![cfg_attr(feature="dev", allow(clone_on_copy))]
|
||||
|
||||
//! Ethcore library
|
||||
//!
|
||||
|
@ -110,8 +110,8 @@ impl IoHandler<NetSyncMessage> for ClientIoHandler {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(match_ref_pats)]
|
||||
#[allow(single_match)]
|
||||
#[cfg_attr(feature="dev", allow(match_ref_pats))]
|
||||
#[cfg_attr(feature="dev", allow(single_match))]
|
||||
fn message(&self, io: &IoContext<NetSyncMessage>, net_message: &NetSyncMessage) {
|
||||
if let &UserMessage(ref message) = net_message {
|
||||
match message {
|
||||
|
@ -97,7 +97,7 @@ pub struct Spec {
|
||||
genesis_state: PodState,
|
||||
}
|
||||
|
||||
#[allow(wrong_self_convention)] // because to_engine(self) should be to_engine(&self)
|
||||
#[cfg_attr(feature="dev", allow(wrong_self_convention))] // because to_engine(self) should be to_engine(&self)
|
||||
impl Spec {
|
||||
/// Convert this object into a boxed Engine of the right underlying type.
|
||||
// TODO avoid this hard-coded nastiness - use dynamic-linked plugin framework instead.
|
||||
|
@ -224,7 +224,7 @@ impl State {
|
||||
|
||||
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
|
||||
/// `accounts` is mutable because we may need to commit the code or storage and record that.
|
||||
#[allow(match_ref_pats)]
|
||||
#[cfg_attr(feature="dev", allow(match_ref_pats))]
|
||||
pub fn commit_into(db: &mut HashDB, root: &mut H256, accounts: &mut HashMap<Address, Option<Account>>) {
|
||||
// first, commit the sub trees.
|
||||
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
|
||||
|
@ -55,13 +55,12 @@ impl Visitor for BlockNumberVisitor {
|
||||
}
|
||||
|
||||
impl Into<BlockId> for BlockNumber {
|
||||
#[allow(match_same_arms)]
|
||||
fn into(self) -> BlockId {
|
||||
match self {
|
||||
BlockNumber::Num(n) => BlockId::Number(n),
|
||||
BlockNumber::Earliest => BlockId::Earliest,
|
||||
BlockNumber::Latest => BlockId::Latest,
|
||||
BlockNumber::Pending => BlockId::Latest // TODO: change this once blockid support pending
|
||||
// TODO: change this once blockid support pendingst,
|
||||
BlockNumber::Pending | BlockNumber::Latest => BlockId::Latest,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -251,7 +251,7 @@ impl ChainSync {
|
||||
}
|
||||
|
||||
|
||||
#[allow(for_kv_map)] // Because it's not possible to get `values_mut()`
|
||||
#[cfg_attr(feature="dev", allow(for_kv_map))] // Because it's not possible to get `values_mut()`
|
||||
/// Rest sync. Clear all downloaded data but keep the queue
|
||||
fn reset(&mut self) {
|
||||
self.downloading_headers.clear();
|
||||
@ -319,7 +319,7 @@ impl ChainSync {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[allow(cyclomatic_complexity)]
|
||||
#[cfg_attr(feature="dev", allow(cyclomatic_complexity))]
|
||||
/// Called by peer once it has new block headers during sync
|
||||
fn on_peer_block_headers(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
|
||||
self.reset_peer_asking(peer_id, PeerAsking::BlockHeaders);
|
||||
|
@ -17,8 +17,9 @@
|
||||
#![warn(missing_docs)]
|
||||
#![cfg_attr(feature="dev", feature(plugin))]
|
||||
#![cfg_attr(feature="dev", plugin(clippy))]
|
||||
|
||||
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
|
||||
#![allow(clone_on_copy)]
|
||||
#![cfg_attr(feature="dev", allow(clone_on_copy))]
|
||||
|
||||
//! Blockchain sync module
|
||||
//! Implements ethereum protocol version 63 as specified here:
|
||||
|
@ -141,7 +141,7 @@ impl<'a> Deref for BytesRef<'a> {
|
||||
fn deref(&self) -> &[u8] {
|
||||
match *self {
|
||||
BytesRef::Flexible(ref bytes) => bytes,
|
||||
BytesRef::Fixed(ref bytes) => bytes
|
||||
BytesRef::Fixed(ref bytes) => bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -150,7 +150,7 @@ impl <'a> DerefMut for BytesRef<'a> {
|
||||
fn deref_mut(&mut self) -> &mut [u8] {
|
||||
match *self {
|
||||
BytesRef::Flexible(ref mut bytes) => bytes,
|
||||
BytesRef::Fixed(ref mut bytes) => bytes
|
||||
BytesRef::Fixed(ref mut bytes) => bytes,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -178,6 +178,10 @@ impl BytesConvertable for Vec<u8> {
|
||||
fn bytes(&self) -> &[u8] { self }
|
||||
}
|
||||
|
||||
impl BytesConvertable for String {
|
||||
fn bytes(&self) -> &[u8] { &self.as_bytes() }
|
||||
}
|
||||
|
||||
macro_rules! impl_bytes_convertable_for_array {
|
||||
($zero: expr) => ();
|
||||
($len: expr, $($idx: expr),*) => {
|
||||
@ -252,6 +256,49 @@ impl<T> Populatable for [T] where T: Sized {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
/// Bytes array deserialization error
|
||||
pub enum FromBytesError {
|
||||
/// Not enough bytes for the requested type
|
||||
NotLongEnough,
|
||||
/// Too many bytes for the requested type
|
||||
TooLong,
|
||||
}
|
||||
|
||||
/// Value that can be serialized from bytes array
|
||||
pub trait FromRawBytes : Sized {
|
||||
/// function that will instantiate and initialize object from slice
|
||||
fn from_bytes(d: &[u8]) -> Result<Self, FromBytesError>;
|
||||
}
|
||||
|
||||
impl<T> FromRawBytes for T where T: Sized + FixedHash {
|
||||
fn from_bytes(bytes: &[u8]) -> Result<Self, FromBytesError> {
|
||||
use std::mem;
|
||||
use std::cmp::Ordering;
|
||||
match bytes.len().cmp(&mem::size_of::<T>()) {
|
||||
Ordering::Less => return Err(FromBytesError::NotLongEnough),
|
||||
Ordering::Greater => return Err(FromBytesError::TooLong),
|
||||
Ordering::Equal => ()
|
||||
};
|
||||
|
||||
let mut res: Self = unsafe { mem::uninitialized() };
|
||||
res.copy_raw(bytes);
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRawBytes for String {
|
||||
fn from_bytes(bytes: &[u8]) -> Result<String, FromBytesError> {
|
||||
Ok(::std::str::from_utf8(bytes).unwrap().to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromRawBytes for Vec<u8> {
|
||||
fn from_bytes(bytes: &[u8]) -> Result<Vec<u8>, FromBytesError> {
|
||||
Ok(bytes.clone().to_vec())
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fax_raw() {
|
||||
let mut x = [255u8; 4];
|
||||
|
@ -27,7 +27,7 @@ const MAX_CACHE_USAGE_TRACK: usize = 128;
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
pub enum CryptoCipherType {
|
||||
/// aes-128-ctr with 128-bit initialisation vector(iv)
|
||||
Aes128Ctr(U128)
|
||||
Aes128Ctr(H128)
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Clone)]
|
||||
@ -168,6 +168,8 @@ pub struct KeyFileCrypto {
|
||||
pub cipher_text: Bytes,
|
||||
/// Password derived key generator function settings.
|
||||
pub kdf: KeyFileKdf,
|
||||
/// Mac
|
||||
pub mac: H256
|
||||
}
|
||||
|
||||
impl KeyFileCrypto {
|
||||
@ -182,7 +184,7 @@ impl KeyFileCrypto {
|
||||
Some("aes-128-ctr") => CryptoCipherType::Aes128Ctr(
|
||||
match try!(as_object.get("cipherparams").ok_or(CryptoParseError::NoCipherParameters)).as_object() {
|
||||
None => { return Err(CryptoParseError::NoCipherParameters); },
|
||||
Some(cipher_param) => match U128::from_str(match cipher_param["iv"].as_string() {
|
||||
Some(cipher_param) => match H128::from_str(match cipher_param["iv"].as_string() {
|
||||
None => { return Err(CryptoParseError::NoInitialVector); },
|
||||
Some(iv_hex_string) => iv_hex_string
|
||||
})
|
||||
@ -216,22 +218,31 @@ impl KeyFileCrypto {
|
||||
}
|
||||
};
|
||||
|
||||
let cipher_text = match as_object["ciphertext"].as_string() {
|
||||
None => { return Err(CryptoParseError::NoCipherText); }
|
||||
let cipher_text = match try!(as_object.get("ciphertext").ok_or(CryptoParseError::NoCipherText)).as_string() {
|
||||
None => { return Err(CryptoParseError::InvalidCipherText); }
|
||||
Some(text) => text
|
||||
};
|
||||
|
||||
let mac: H256 = match try!(as_object.get("mac").ok_or(CryptoParseError::NoMac)).as_string() {
|
||||
None => { return Err(CryptoParseError::InvalidMacFormat(None)) },
|
||||
Some(salt_value) => match H256::from_str(salt_value) {
|
||||
Ok(salt_hex_value) => salt_hex_value,
|
||||
Err(from_hex_error) => { return Err(CryptoParseError::InvalidMacFormat(Some(from_hex_error))); },
|
||||
}
|
||||
};
|
||||
|
||||
Ok(KeyFileCrypto {
|
||||
cipher_text: Bytes::from(cipher_text),
|
||||
cipher_text: match FromHex::from_hex(cipher_text) { Ok(bytes) => bytes, Err(_) => { return Err(CryptoParseError::InvalidCipherText); } },
|
||||
cipher_type: cipher_type,
|
||||
kdf: kdf,
|
||||
mac: mac,
|
||||
})
|
||||
}
|
||||
|
||||
fn to_json(&self) -> Json {
|
||||
let mut map = BTreeMap::new();
|
||||
match self.cipher_type {
|
||||
CryptoCipherType::Aes128Ctr(iv) => {
|
||||
CryptoCipherType::Aes128Ctr(ref iv) => {
|
||||
map.insert("cipher".to_owned(), Json::String("aes-128-ctr".to_owned()));
|
||||
let mut cipher_params = BTreeMap::new();
|
||||
cipher_params.insert("iv".to_owned(), Json::String(format!("{:?}", iv)));
|
||||
@ -251,6 +262,8 @@ impl KeyFileCrypto {
|
||||
KeyFileKdf::Scrypt(ref scrypt_params) => scrypt_params.to_json()
|
||||
});
|
||||
|
||||
map.insert("mac".to_owned(), Json::String(format!("{:?}", self.mac)));
|
||||
|
||||
Json::Object(map)
|
||||
}
|
||||
|
||||
@ -260,7 +273,7 @@ impl KeyFileCrypto {
|
||||
/// `c` - number of iterations for derived key.
|
||||
/// `salt` - cryptographic site, random 256-bit hash (ensure it's crypto-random).
|
||||
/// `iv` - initialisation vector.
|
||||
pub fn new_pbkdf2(cipher_text: Bytes, iv: U128, salt: H256, c: u32, dk_len: u32) -> KeyFileCrypto {
|
||||
pub fn new_pbkdf2(cipher_text: Bytes, iv: H128, salt: H256, mac: H256, c: u32, dk_len: u32) -> KeyFileCrypto {
|
||||
KeyFileCrypto {
|
||||
cipher_type: CryptoCipherType::Aes128Ctr(iv),
|
||||
cipher_text: cipher_text,
|
||||
@ -270,6 +283,7 @@ impl KeyFileCrypto {
|
||||
c: c,
|
||||
prf: Pbkdf2CryptoFunction::HMacSha256
|
||||
}),
|
||||
mac: mac,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -324,14 +338,17 @@ pub struct KeyFileContent {
|
||||
|
||||
#[derive(Debug)]
|
||||
enum CryptoParseError {
|
||||
InvalidMacFormat(Option<UtilError>),
|
||||
NoMac,
|
||||
NoCipherText,
|
||||
InvalidCipherText,
|
||||
NoCipherType,
|
||||
InvalidJsonFormat,
|
||||
InvalidKdfType(Mismatch<String>),
|
||||
InvalidCipherType(Mismatch<String>),
|
||||
NoInitialVector,
|
||||
NoCipherParameters,
|
||||
InvalidInitialVector(FromHexError),
|
||||
InvalidInitialVector(UtilError),
|
||||
NoKdf,
|
||||
NoKdfType,
|
||||
Scrypt(ScryptParseError),
|
||||
@ -425,17 +442,17 @@ enum KeyFileLoadError {
|
||||
pub struct KeyDirectory {
|
||||
/// Directory path for key management.
|
||||
path: String,
|
||||
cache: HashMap<Uuid, KeyFileContent>,
|
||||
cache_usage: VecDeque<Uuid>,
|
||||
cache: RefCell<HashMap<Uuid, KeyFileContent>>,
|
||||
cache_usage: RefCell<VecDeque<Uuid>>,
|
||||
}
|
||||
|
||||
impl KeyDirectory {
|
||||
/// Initializes new cache directory context with a given `path`
|
||||
pub fn new(path: &Path) -> KeyDirectory {
|
||||
KeyDirectory {
|
||||
cache: HashMap::new(),
|
||||
cache: RefCell::new(HashMap::new()),
|
||||
path: path.to_str().expect("Initialized key directory with empty path").to_owned(),
|
||||
cache_usage: VecDeque::new(),
|
||||
cache_usage: RefCell::new(VecDeque::new()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -448,25 +465,37 @@ impl KeyDirectory {
|
||||
let json_bytes = json_text.into_bytes();
|
||||
try!(file.write(&json_bytes));
|
||||
}
|
||||
let mut cache = self.cache.borrow_mut();
|
||||
let id = key_file.id.clone();
|
||||
self.cache.insert(id.clone(), key_file);
|
||||
cache.insert(id.clone(), key_file);
|
||||
Ok(id.clone())
|
||||
}
|
||||
|
||||
/// Returns key given by id if corresponding file exists and no load error occured.
|
||||
/// Warns if any error occured during the key loading
|
||||
pub fn get(&mut self, id: &Uuid) -> Option<&KeyFileContent> {
|
||||
pub fn get(&self, id: &Uuid) -> Option<KeyFileContent> {
|
||||
let path = self.key_path(id);
|
||||
self.cache_usage.push_back(id.clone());
|
||||
Some(self.cache.entry(id.to_owned()).or_insert(
|
||||
{
|
||||
let mut usage = self.cache_usage.borrow_mut();
|
||||
usage.push_back(id.clone());
|
||||
}
|
||||
|
||||
if !self.cache.borrow().contains_key(id) {
|
||||
match KeyDirectory::load_key(&path) {
|
||||
Ok(loaded_key) => loaded_key,
|
||||
Ok(loaded_key) => {
|
||||
self.cache.borrow_mut().insert(id.to_owned(), loaded_key);
|
||||
}
|
||||
Err(error) => {
|
||||
warn!(target: "sstore", "error loading key {:?}: {:?}", id, error);
|
||||
return None;
|
||||
}
|
||||
}
|
||||
))
|
||||
}
|
||||
|
||||
// todo: replace with Ref::map when it stabilized to avoid copies
|
||||
Some(self.cache.borrow().get(id)
|
||||
.expect("Key should be there, we have just inserted or checked it.")
|
||||
.clone())
|
||||
}
|
||||
|
||||
/// Returns current path to the directory with keys
|
||||
@ -476,29 +505,63 @@ impl KeyDirectory {
|
||||
|
||||
/// Removes keys that never been requested during last `MAX_USAGE_TRACK` times
|
||||
pub fn collect_garbage(&mut self) {
|
||||
let total_usages = self.cache_usage.len();
|
||||
let mut cache_usage = self.cache_usage.borrow_mut();
|
||||
|
||||
let total_usages = cache_usage.len();
|
||||
let untracked_usages = max(total_usages as i64 - MAX_CACHE_USAGE_TRACK as i64, 0) as usize;
|
||||
if untracked_usages > 0 {
|
||||
self.cache_usage.drain(..untracked_usages);
|
||||
cache_usage.drain(..untracked_usages);
|
||||
}
|
||||
|
||||
if self.cache.len() <= MAX_CACHE_USAGE_TRACK { return; }
|
||||
if self.cache.borrow().len() <= MAX_CACHE_USAGE_TRACK { return; }
|
||||
|
||||
let uniqs: HashSet<&Uuid> = self.cache_usage.iter().collect();
|
||||
let mut removes = HashSet::new();
|
||||
|
||||
for key in self.cache.keys() {
|
||||
if !uniqs.contains(key) {
|
||||
removes.insert(key.clone());
|
||||
}
|
||||
}
|
||||
|
||||
for removed_key in removes { self.cache.remove(&removed_key); }
|
||||
let uniqs: HashSet<&Uuid> = cache_usage.iter().collect();
|
||||
let removes:Vec<Uuid> = {
|
||||
let cache = self.cache.borrow();
|
||||
cache.keys().cloned().filter(|key| !uniqs.contains(key)).collect()
|
||||
};
|
||||
if removes.is_empty() { return; }
|
||||
let mut cache = self.cache.borrow_mut();
|
||||
for key in removes { cache.remove(&key); }
|
||||
}
|
||||
|
||||
/// Reports how many keys are currently cached.
|
||||
pub fn cache_size(&self) -> usize {
|
||||
self.cache.len()
|
||||
self.cache.borrow().len()
|
||||
}
|
||||
|
||||
/// Removes key file from key directory
|
||||
pub fn delete(&mut self, id: &Uuid) -> Result<(), ::std::io::Error> {
|
||||
let path = self.key_path(id);
|
||||
|
||||
if !self.cache.borrow().contains_key(id) {
|
||||
return match fs::remove_file(&path) {
|
||||
Ok(_) => {
|
||||
self.cache.borrow_mut().remove(&id);
|
||||
Ok(())
|
||||
},
|
||||
Err(e) => Err(e)
|
||||
};
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Enumerates all keys in the directory
|
||||
pub fn list(&self) -> Result<Vec<Uuid>, ::std::io::Error> {
|
||||
let mut result = Vec::new();
|
||||
for entry in try!(fs::read_dir(&self.path)) {
|
||||
let entry = try!(entry);
|
||||
if !try!(fs::metadata(entry.path())).is_dir() {
|
||||
match entry.file_name().to_str() {
|
||||
Some(ref name) => {
|
||||
if let Ok(uuid) = uuid_from_string(name) { result.push(uuid); }
|
||||
},
|
||||
None => { continue; }
|
||||
};
|
||||
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
fn key_path(&self, id: &Uuid) -> PathBuf {
|
||||
@ -820,14 +883,14 @@ mod file_tests {
|
||||
#[test]
|
||||
fn can_create_key_with_new_id() {
|
||||
let cipher_text: Bytes = FromHex::from_hex("a0f05555").unwrap();
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, U128::zero(), H256::random(), 32, 32));
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, H128::zero(), H256::random(), H256::random(), 32, 32));
|
||||
assert!(!uuid_to_string(&key.id).is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_load_json_from_itself() {
|
||||
let cipher_text: Bytes = FromHex::from_hex("aaaaaaaaaaaaaaaaaaaaaaaaaaa22222222222222222222222").unwrap();
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, U128::zero(), H256::random(), 32, 32));
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, H128::zero(), H256::random(), H256::random(), 32, 32));
|
||||
let json = key.to_json();
|
||||
|
||||
let loaded_key = KeyFileContent::from_json(&json).unwrap();
|
||||
@ -985,7 +1048,7 @@ mod directory_tests {
|
||||
let cipher_text: Bytes = FromHex::from_hex("a0f05555").unwrap();
|
||||
let temp_path = RandomTempPath::create_dir();
|
||||
let mut directory = KeyDirectory::new(&temp_path.as_path());
|
||||
let uuid = directory.save(KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, U128::zero(), H256::random(), 32, 32))).unwrap();
|
||||
let uuid = directory.save(KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, H128::zero(), H256::random(), H256::random(), 32, 32))).unwrap();
|
||||
let path = directory.key_path(&uuid);
|
||||
|
||||
let key = KeyDirectory::load_key(&path).unwrap();
|
||||
@ -1001,7 +1064,7 @@ mod directory_tests {
|
||||
let cipher_text: Bytes = FromHex::from_hex("a0f05555").unwrap();
|
||||
let mut keys = Vec::new();
|
||||
for _ in 0..1000 {
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), U128::zero(), H256::random(), 32, 32));
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), H128::zero(), H256::random(), H256::random(), 32, 32));
|
||||
keys.push(directory.save(key).unwrap());
|
||||
}
|
||||
|
||||
@ -1021,7 +1084,7 @@ mod directory_tests {
|
||||
let cipher_text: Bytes = FromHex::from_hex("a0f05555").unwrap();
|
||||
let mut keys = Vec::new();
|
||||
for _ in 0..1000 {
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), U128::zero(), H256::random(), 32, 32));
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), H128::zero(), H256::random(), H256::random(), 32, 32));
|
||||
keys.push(directory.save(key).unwrap());
|
||||
}
|
||||
|
||||
@ -1033,6 +1096,14 @@ mod directory_tests {
|
||||
// since all keys are different, should be exactly MAX_CACHE_USAGE_TRACK
|
||||
assert_eq!(MAX_CACHE_USAGE_TRACK, directory.cache_size())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collects_garbage_on_empty() {
|
||||
let temp_path = RandomTempPath::create_dir();
|
||||
let mut directory = KeyDirectory::new(&temp_path.as_path());
|
||||
directory.collect_garbage();
|
||||
assert_eq!(0, directory.cache_size())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -1054,7 +1125,7 @@ mod specs {
|
||||
let temp_path = RandomTempPath::create_dir();
|
||||
let mut directory = KeyDirectory::new(&temp_path.as_path());
|
||||
|
||||
let uuid = directory.save(KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, U128::zero(), H256::random(), 32, 32)));
|
||||
let uuid = directory.save(KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text, H128::zero(), H256::random(), H256::random(), 32, 32)));
|
||||
|
||||
assert!(uuid.is_ok());
|
||||
}
|
||||
@ -1064,7 +1135,7 @@ mod specs {
|
||||
let cipher_text: Bytes = FromHex::from_hex("a0f05555").unwrap();
|
||||
let temp_path = RandomTempPath::create_dir();
|
||||
let mut directory = KeyDirectory::new(&temp_path.as_path());
|
||||
let uuid = directory.save(KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), U128::zero(), H256::random(), 32, 32))).unwrap();
|
||||
let uuid = directory.save(KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), H128::zero(), H256::random(), H256::random(), 32, 32))).unwrap();
|
||||
|
||||
let key = directory.get(&uuid).unwrap();
|
||||
|
||||
@ -1079,10 +1150,25 @@ mod specs {
|
||||
let cipher_text: Bytes = FromHex::from_hex("a0f05555").unwrap();
|
||||
let mut keys = Vec::new();
|
||||
for _ in 0..10 {
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), U128::zero(), H256::random(), 32, 32));
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), H128::zero(), H256::random(), H256::random(), 32, 32));
|
||||
keys.push(directory.save(key).unwrap());
|
||||
}
|
||||
|
||||
assert_eq!(10, keys.len())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_list_keys() {
|
||||
let temp_path = RandomTempPath::create_dir();
|
||||
let mut directory = KeyDirectory::new(&temp_path.as_path());
|
||||
|
||||
let cipher_text: Bytes = FromHex::from_hex("a0f05555").unwrap();
|
||||
let mut keys = Vec::new();
|
||||
for _ in 0..33 {
|
||||
let key = KeyFileContent::new(KeyFileCrypto::new_pbkdf2(cipher_text.clone(), H128::zero(), H256::random(), H256::random(), 32, 32));
|
||||
keys.push(directory.save(key).unwrap());
|
||||
}
|
||||
|
||||
assert_eq!(33, directory.list().unwrap().len());
|
||||
}
|
||||
}
|
||||
|
@ -17,3 +17,4 @@
|
||||
//! Key management module
|
||||
|
||||
pub mod directory;
|
||||
pub mod store;
|
||||
|
297
util/src/keys/store.rs
Normal file
297
util/src/keys/store.rs
Normal file
@ -0,0 +1,297 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Secret Store
|
||||
|
||||
use keys::directory::*;
|
||||
use common::*;
|
||||
use rcrypto::pbkdf2::*;
|
||||
use rcrypto::hmac::*;
|
||||
use crypto;
|
||||
|
||||
const KEY_LENGTH: u32 = 32;
|
||||
const KEY_ITERATIONS: u32 = 4096;
|
||||
const KEY_LENGTH_AES: u32 = KEY_LENGTH/2;
|
||||
|
||||
const KEY_LENGTH_USIZE: usize = KEY_LENGTH as usize;
|
||||
const KEY_LENGTH_AES_USIZE: usize = KEY_LENGTH_AES as usize;
|
||||
|
||||
/// Encrypted hash-map, each request should contain password
|
||||
pub trait EncryptedHashMap<Key: Hash + Eq> {
|
||||
/// Returns existing value for the key, if any
|
||||
fn get<Value: FromRawBytes + BytesConvertable>(&self, key: &Key, password: &str) -> Result<Value, EncryptedHashMapError>;
|
||||
/// Insert new encrypted key-value and returns previous if there was any
|
||||
fn insert<Value: FromRawBytes + BytesConvertable>(&mut self, key: Key, value: Value, password: &str) -> Option<Value>;
|
||||
/// Removes key-value by key and returns the removed one, if any exists and password was provided
|
||||
fn remove<Value: FromRawBytes + BytesConvertable> (&mut self, key: &Key, password: Option<&str>) -> Option<Value>;
|
||||
/// Deletes key-value by key and returns if the key-value existed
|
||||
fn delete(&mut self, key: &Key) -> bool {
|
||||
self.remove::<Bytes>(key, None).is_some()
|
||||
}
|
||||
}
|
||||
|
||||
/// Error retrieving value from encrypted hashmap
|
||||
#[derive(Debug)]
|
||||
pub enum EncryptedHashMapError {
|
||||
/// Encryption failed
|
||||
InvalidPassword,
|
||||
/// No key in the hashmap
|
||||
UnknownIdentifier,
|
||||
/// Stored value is not well formed for the requested type
|
||||
InvalidValueFormat(FromBytesError),
|
||||
}
|
||||
|
||||
/// Represent service for storing encrypted arbitrary data
|
||||
pub struct SecretStore {
|
||||
directory: KeyDirectory
|
||||
}
|
||||
|
||||
impl SecretStore {
|
||||
/// new instance of Secret Store
|
||||
pub fn new() -> SecretStore {
|
||||
let mut path = ::std::env::home_dir().expect("Failed to get home dir");
|
||||
path.push(".keys");
|
||||
SecretStore {
|
||||
directory: KeyDirectory::new(&path)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn new_test(path: &::tests::helpers::RandomTempPath) -> SecretStore {
|
||||
SecretStore {
|
||||
directory: KeyDirectory::new(path.as_path())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn derive_key_iterations(password: &str, salt: &H256, c: u32) -> (Bytes, Bytes) {
|
||||
let mut h_mac = Hmac::new(::rcrypto::sha2::Sha256::new(), password.as_bytes());
|
||||
let mut derived_key = vec![0u8; KEY_LENGTH_USIZE];
|
||||
pbkdf2(&mut h_mac, &salt.as_slice(), c, &mut derived_key);
|
||||
let derived_right_bits = &derived_key[0..KEY_LENGTH_AES_USIZE];
|
||||
let derived_left_bits = &derived_key[KEY_LENGTH_AES_USIZE..KEY_LENGTH_USIZE];
|
||||
(derived_right_bits.to_vec(), derived_left_bits.to_vec())
|
||||
}
|
||||
|
||||
fn derive_key(password: &str, salt: &H256) -> (Bytes, Bytes) {
|
||||
derive_key_iterations(password, salt, KEY_ITERATIONS)
|
||||
}
|
||||
|
||||
fn derive_mac(derived_left_bits: &[u8], cipher_text: &[u8]) -> Bytes {
|
||||
let mut mac = vec![0u8; KEY_LENGTH_AES_USIZE + cipher_text.len()];
|
||||
mac[0..KEY_LENGTH_AES_USIZE].clone_from_slice(derived_left_bits);
|
||||
mac[KEY_LENGTH_AES_USIZE..cipher_text.len()+KEY_LENGTH_AES_USIZE].clone_from_slice(cipher_text);
|
||||
mac
|
||||
}
|
||||
|
||||
impl EncryptedHashMap<H128> for SecretStore {
|
||||
fn get<Value: FromRawBytes + BytesConvertable>(&self, key: &H128, password: &str) -> Result<Value, EncryptedHashMapError> {
|
||||
match self.directory.get(key) {
|
||||
Some(key_file) => {
|
||||
let decrypted_bytes = match key_file.crypto.kdf {
|
||||
KeyFileKdf::Pbkdf2(ref params) => {
|
||||
let (derived_left_bits, derived_right_bits) = derive_key_iterations(password, ¶ms.salt, params.c);
|
||||
if derive_mac(&derived_right_bits, &key_file.crypto.cipher_text)
|
||||
.sha3() != key_file.crypto.mac { return Err(EncryptedHashMapError::InvalidPassword); }
|
||||
|
||||
let mut val = vec![0u8; key_file.crypto.cipher_text.len()];
|
||||
match key_file.crypto.cipher_type {
|
||||
CryptoCipherType::Aes128Ctr(ref iv) => {
|
||||
crypto::aes::decrypt(&derived_left_bits, &iv.as_slice(), &key_file.crypto.cipher_text, &mut val);
|
||||
}
|
||||
}
|
||||
val
|
||||
}
|
||||
_ => { unimplemented!(); }
|
||||
};
|
||||
|
||||
match Value::from_bytes(&decrypted_bytes) {
|
||||
Ok(value) => Ok(value),
|
||||
Err(bytes_error) => Err(EncryptedHashMapError::InvalidValueFormat(bytes_error))
|
||||
}
|
||||
},
|
||||
None => Err(EncryptedHashMapError::UnknownIdentifier)
|
||||
}
|
||||
}
|
||||
|
||||
fn insert<Value: FromRawBytes + BytesConvertable>(&mut self, key: H128, value: Value, password: &str) -> Option<Value> {
|
||||
let previous = if let Ok(previous_value) = self.get(&key, password) { Some(previous_value) } else { None };
|
||||
|
||||
// crypto random initiators
|
||||
let salt = H256::random();
|
||||
let iv = H128::random();
|
||||
|
||||
// two parts of derived key
|
||||
// DK = [ DK[0..15] DK[16..31] ] = [derived_left_bits, derived_right_bits]
|
||||
let (derived_left_bits, derived_right_bits) = derive_key(password, &salt);
|
||||
|
||||
let mut cipher_text = vec![0u8; value.as_slice().len()];
|
||||
// aes-128-ctr with initial vector of iv
|
||||
crypto::aes::encrypt(&derived_left_bits, &iv.clone(), &value.as_slice(), &mut cipher_text);
|
||||
|
||||
// KECCAK(DK[16..31] ++ <ciphertext>), where DK[16..31] - derived_right_bits
|
||||
let mac = derive_mac(&derived_right_bits, &cipher_text.clone()).sha3();
|
||||
|
||||
let mut key_file = KeyFileContent::new(
|
||||
KeyFileCrypto::new_pbkdf2(
|
||||
cipher_text,
|
||||
iv,
|
||||
salt,
|
||||
mac,
|
||||
KEY_ITERATIONS,
|
||||
KEY_LENGTH));
|
||||
key_file.id = key;
|
||||
if let Err(io_error) = self.directory.save(key_file) {
|
||||
warn!("Error saving key file: {:?}", io_error);
|
||||
}
|
||||
previous
|
||||
}
|
||||
|
||||
fn remove<Value: FromRawBytes + BytesConvertable>(&mut self, key: &H128, password: Option<&str>) -> Option<Value> {
|
||||
let previous = if let Some(pass) = password {
|
||||
if let Ok(previous_value) = self.get(&key, pass) { Some(previous_value) } else { None }
|
||||
}
|
||||
else { None };
|
||||
|
||||
if let Err(io_error) = self.directory.delete(key) {
|
||||
warn!("Error saving key file: {:?}", io_error);
|
||||
}
|
||||
previous
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod vector_tests {
|
||||
use super::{derive_mac,derive_key_iterations};
|
||||
use common::*;
|
||||
|
||||
|
||||
#[test]
|
||||
fn mac_vector() {
|
||||
let password = "testpassword";
|
||||
let salt = H256::from_str("ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd").unwrap();
|
||||
let cipher_text = FromHex::from_hex("5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46").unwrap();
|
||||
let iterations = 262144u32;
|
||||
|
||||
let (derived_left_bits, derived_right_bits) = derive_key_iterations(password, &salt, iterations);
|
||||
assert_eq!("f06d69cdc7da0faffb1008270bca38f5", derived_left_bits.to_hex());
|
||||
assert_eq!("e31891a3a773950e6d0fea48a7188551", derived_right_bits.to_hex());
|
||||
|
||||
let mac_body = derive_mac(&derived_right_bits, &cipher_text);
|
||||
assert_eq!("e31891a3a773950e6d0fea48a71885515318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46", mac_body.to_hex());
|
||||
|
||||
let mac = mac_body.sha3();
|
||||
assert_eq!("517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e9b2", format!("{:?}", mac));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tests::helpers::*;
|
||||
use common::*;
|
||||
|
||||
#[test]
|
||||
fn can_insert() {
|
||||
let temp = RandomTempPath::create_dir();
|
||||
let mut sstore = SecretStore::new_test(&temp);
|
||||
|
||||
let id = H128::random();
|
||||
sstore.insert(id.clone(), "Cat".to_owned(), "pass");
|
||||
|
||||
assert!(sstore.get::<String>(&id, "pass").is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get_fail() {
|
||||
let temp = RandomTempPath::create_dir();
|
||||
{
|
||||
use keys::directory::{KeyFileContent, KeyFileCrypto};
|
||||
let mut write_sstore = SecretStore::new_test(&temp);
|
||||
write_sstore.directory.save(
|
||||
KeyFileContent::new(
|
||||
KeyFileCrypto::new_pbkdf2(
|
||||
FromHex::from_hex("5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46").unwrap(),
|
||||
H128::from_str("6087dab2f9fdbbfaddc31a909735c1e6").unwrap(),
|
||||
H256::from_str("ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd").unwrap(),
|
||||
H256::from_str("517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e9b2").unwrap(),
|
||||
262144,
|
||||
32)))
|
||||
.unwrap();
|
||||
}
|
||||
let sstore = SecretStore::new_test(&temp);
|
||||
if let Ok(_) = sstore.get::<Bytes>(&H128::from_str("3198bc9c66725ab3d9954942343ae5b6").unwrap(), "testpassword") {
|
||||
panic!("should be error loading key, we requested the wrong key");
|
||||
}
|
||||
}
|
||||
|
||||
fn pregenerate_keys(temp: &RandomTempPath, count: usize) -> Vec<H128> {
|
||||
use keys::directory::{KeyFileContent, KeyFileCrypto};
|
||||
let mut write_sstore = SecretStore::new_test(&temp);
|
||||
let mut result = Vec::new();
|
||||
for _ in 0..count {
|
||||
result.push(write_sstore.directory.save(
|
||||
KeyFileContent::new(
|
||||
KeyFileCrypto::new_pbkdf2(
|
||||
FromHex::from_hex("5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46").unwrap(),
|
||||
H128::from_str("6087dab2f9fdbbfaddc31a909735c1e6").unwrap(),
|
||||
H256::from_str("ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd").unwrap(),
|
||||
H256::from_str("517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e9b2").unwrap(),
|
||||
262144,
|
||||
32)))
|
||||
.unwrap());
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_get() {
|
||||
let temp = RandomTempPath::create_dir();
|
||||
let key_id = {
|
||||
use keys::directory::{KeyFileContent, KeyFileCrypto};
|
||||
let mut write_sstore = SecretStore::new_test(&temp);
|
||||
write_sstore.directory.save(
|
||||
KeyFileContent::new(
|
||||
KeyFileCrypto::new_pbkdf2(
|
||||
FromHex::from_hex("5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46").unwrap(),
|
||||
H128::from_str("6087dab2f9fdbbfaddc31a909735c1e6").unwrap(),
|
||||
H256::from_str("ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd").unwrap(),
|
||||
H256::from_str("517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e9b2").unwrap(),
|
||||
262144,
|
||||
32)))
|
||||
.unwrap()
|
||||
};
|
||||
let sstore = SecretStore::new_test(&temp);
|
||||
if let Err(e) = sstore.get::<Bytes>(&key_id, "testpassword") {
|
||||
panic!("got no key: {:?}", e);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_delete() {
|
||||
let temp = RandomTempPath::create_dir();
|
||||
let keys = pregenerate_keys(&temp, 5);
|
||||
|
||||
let mut sstore = SecretStore::new_test(&temp);
|
||||
sstore.delete(&keys[2]);
|
||||
|
||||
assert_eq!(4, sstore.directory.list().unwrap().len())
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -17,15 +17,16 @@
|
||||
#![warn(missing_docs)]
|
||||
#![cfg_attr(feature="dev", feature(plugin))]
|
||||
#![cfg_attr(feature="dev", plugin(clippy))]
|
||||
|
||||
// Clippy settings
|
||||
// TODO [todr] not really sure
|
||||
#![allow(needless_range_loop)]
|
||||
#![cfg_attr(feature="dev", allow(needless_range_loop))]
|
||||
// Shorter than if-else
|
||||
#![allow(match_bool)]
|
||||
#![cfg_attr(feature="dev", allow(match_bool))]
|
||||
// We use that to be more explicit about handled cases
|
||||
#![allow(match_same_arms)]
|
||||
#![cfg_attr(feature="dev", allow(match_same_arms))]
|
||||
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
|
||||
#![allow(clone_on_copy)]
|
||||
#![cfg_attr(feature="dev", allow(clone_on_copy))]
|
||||
|
||||
//! Ethcore-util library
|
||||
//!
|
||||
|
@ -138,7 +138,7 @@ impl Discovery {
|
||||
ret
|
||||
}
|
||||
|
||||
#[allow(cyclomatic_complexity)]
|
||||
#[cfg_attr(feature="dev", allow(cyclomatic_complexity))]
|
||||
fn nearest_node_entries<'b>(source: &NodeId, target: &NodeId, buckets: &'b [NodeBucket]) -> Vec<&'b NodeId>
|
||||
{
|
||||
// send ALPHA FindNode packets to nodes we know, closest to target
|
||||
|
@ -471,7 +471,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(single_match)]
|
||||
#[cfg_attr(feature="dev", allow(single_match))]
|
||||
fn connect_peer(&self, id: &NodeId, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||
if self.have_session(id)
|
||||
{
|
||||
@ -501,7 +501,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
||||
self.create_connection(socket, Some(id), io);
|
||||
}
|
||||
|
||||
#[allow(block_in_if_condition_stmt)]
|
||||
#[cfg_attr(feature="dev", allow(block_in_if_condition_stmt))]
|
||||
fn create_connection(&self, socket: TcpStream, id: Option<&NodeId>, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||
let nonce = self.info.write().unwrap().next_nonce();
|
||||
let mut connections = self.connections.write().unwrap();
|
||||
@ -532,7 +532,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
||||
io.update_registration(TCP_ACCEPT).expect("Error registering TCP listener");
|
||||
}
|
||||
|
||||
#[allow(single_match)]
|
||||
#[cfg_attr(feature="dev", allow(single_match))]
|
||||
fn connection_writable(&self, token: StreamToken, io: &IoContext<NetworkIoMessage<Message>>) {
|
||||
let mut create_session = false;
|
||||
let mut kill = false;
|
||||
|
@ -72,7 +72,7 @@ impl PanicHandler {
|
||||
|
||||
/// Invoke closure and catch any possible panics.
|
||||
/// In case of panic notifies all listeners about it.
|
||||
#[allow(deprecated)]
|
||||
#[cfg_attr(feature="dev", allow(deprecated))]
|
||||
// TODO [todr] catch_panic is deprecated but panic::recover has different bounds (not allowing mutex)
|
||||
pub fn catch_panic<G, R>(&self, g: G) -> thread::Result<R> where G: FnOnce() -> R + Send + 'static {
|
||||
let guard = PanicGuard { handler: self };
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
//! Semantic version formatting and comparing.
|
||||
|
||||
/// A version value with strict meaning. Use `to_u32` to convert to a simple integer.
|
||||
/// A version value with strict meaning. Use `as_u32` to convert to a simple integer.
|
||||
///
|
||||
/// # Example
|
||||
/// ```
|
||||
|
@ -54,7 +54,7 @@ pub struct TrieDB<'db> {
|
||||
pub hash_count: usize,
|
||||
}
|
||||
|
||||
#[allow(wrong_self_convention)]
|
||||
#[cfg_attr(feature="dev", allow(wrong_self_convention))]
|
||||
impl<'db> TrieDB<'db> {
|
||||
/// Create a new trie with the backing database `db` and `root`
|
||||
/// Panics, if `root` does not exist
|
||||
|
@ -66,7 +66,7 @@ enum MaybeChanged<'a> {
|
||||
Changed(Bytes),
|
||||
}
|
||||
|
||||
#[allow(wrong_self_convention)]
|
||||
#[cfg_attr(feature="dev", allow(wrong_self_convention))]
|
||||
impl<'db> TrieDBMut<'db> {
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
@ -350,7 +350,7 @@ impl<'db> TrieDBMut<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(cyclomatic_complexity)]
|
||||
#[cfg_attr(feature="dev", allow(cyclomatic_complexity))]
|
||||
/// Determine the RLP of the node, assuming we're inserting `partial` into the
|
||||
/// node currently of data `old`. This will *not* delete any hash of `old` from the database;
|
||||
/// it will just return the new RLP that includes the new node.
|
||||
|
Loading…
Reference in New Issue
Block a user