2020-09-22 14:53:52 +02:00
|
|
|
// Copyright 2015-2020 Parity Technologies (UK) Ltd.
|
|
|
|
// This file is part of OpenEthereum.
|
2016-06-20 10:06:49 +02:00
|
|
|
|
2020-09-22 14:53:52 +02:00
|
|
|
// OpenEthereum is free software: you can redistribute it and/or modify
|
2016-06-20 10:06:49 +02:00
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
2020-09-22 14:53:52 +02:00
|
|
|
// OpenEthereum is distributed in the hope that it will be useful,
|
2016-06-20 10:06:49 +02:00
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License
|
2020-09-22 14:53:52 +02:00
|
|
|
// along with OpenEthereum. If not, see <http://www.gnu.org/licenses/>.
|
2016-06-20 10:06:49 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
use super::{
|
2017-02-05 16:17:56 +01:00
|
|
|
vault::{VaultDiskDirectory, VAULT_FILE_NAME},
|
2017-01-30 11:44:09 +01:00
|
|
|
KeyDirectory, VaultKey, VaultKeyDirectory, VaultKeyDirectoryProvider,
|
|
|
|
};
|
2019-01-03 14:07:27 +01:00
|
|
|
use ethkey::Password;
|
2016-12-09 23:01:43 +01:00
|
|
|
use json::{self, Uuid};
|
2016-06-20 00:10:34 +02:00
|
|
|
use std::{
|
|
|
|
collections::HashMap,
|
2020-08-05 06:08:03 +02:00
|
|
|
fs, io,
|
2017-01-30 11:44:09 +01:00
|
|
|
io::Write,
|
2016-06-20 00:10:34 +02:00
|
|
|
path::{Path, PathBuf},
|
2020-08-05 06:08:03 +02:00
|
|
|
};
|
2016-07-25 10:45:45 +02:00
|
|
|
use time;
|
2016-07-25 16:09:47 +02:00
|
|
|
use Error;
|
|
|
|
use SafeAccount;
|
2016-06-20 00:10:34 +02:00
|
|
|
|
2017-01-30 10:59:46 +01:00
|
|
|
const IGNORED_FILES: &'static [&'static str] = &[
|
|
|
|
"thumbs.db",
|
|
|
|
"address_book.json",
|
|
|
|
"dapps_policy.json",
|
|
|
|
"dapps_accounts.json",
|
|
|
|
"dapps_history.json",
|
2017-01-30 11:44:09 +01:00
|
|
|
"vault.json",
|
2017-01-30 10:59:46 +01:00
|
|
|
];
|
2016-08-10 16:42:15 +02:00
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
/// Find a unique filename that does not exist using four-letter random suffix.
|
|
|
|
pub fn find_unique_filename_using_random_suffix(
|
|
|
|
parent_path: &Path,
|
|
|
|
original_filename: &str,
|
|
|
|
) -> io::Result<String> {
|
|
|
|
let mut path = parent_path.join(original_filename);
|
|
|
|
let mut deduped_filename = original_filename.to_string();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
if path.exists() {
|
|
|
|
const MAX_RETRIES: usize = 500;
|
|
|
|
let mut retries = 0;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
while path.exists() {
|
|
|
|
if retries >= MAX_RETRIES {
|
|
|
|
return Err(io::Error::new(
|
|
|
|
io::ErrorKind::Other,
|
|
|
|
"Exceeded maximum retries when deduplicating filename.",
|
|
|
|
));
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
let suffix = ::random::random_string(4);
|
|
|
|
deduped_filename = format!("{}-{}", original_filename, suffix);
|
|
|
|
path.set_file_name(&deduped_filename);
|
|
|
|
retries += 1;
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
}
|
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
Ok(deduped_filename)
|
|
|
|
}
|
2018-06-14 13:54:12 +02:00
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
/// Create a new file and restrict permissions to owner only. It errors if the file already exists.
|
2018-06-14 13:54:12 +02:00
|
|
|
#[cfg(unix)]
|
2018-06-22 13:30:48 +02:00
|
|
|
pub fn create_new_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
2018-06-14 13:54:12 +02:00
|
|
|
use std::os::unix::fs::OpenOptionsExt;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-06-14 13:54:12 +02:00
|
|
|
fs::OpenOptions::new()
|
|
|
|
.write(true)
|
|
|
|
.create_new(true)
|
2018-06-14 16:17:40 +02:00
|
|
|
.mode((libc::S_IWUSR | libc::S_IRUSR) as u32)
|
2018-06-14 13:54:12 +02:00
|
|
|
.open(file_path)
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
/// Create a new file and restrict permissions to owner only. It errors if the file already exists.
|
2018-06-14 13:54:12 +02:00
|
|
|
#[cfg(not(unix))]
|
2018-06-22 13:30:48 +02:00
|
|
|
pub fn create_new_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
2018-06-14 13:54:12 +02:00
|
|
|
fs::OpenOptions::new()
|
|
|
|
.write(true)
|
|
|
|
.create_new(true)
|
|
|
|
.open(file_path)
|
|
|
|
}
|
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
/// Create a new file and restrict permissions to owner only. It replaces the existing file if it already exists.
|
2018-06-14 13:54:12 +02:00
|
|
|
#[cfg(unix)]
|
2018-06-22 13:30:48 +02:00
|
|
|
pub fn replace_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
2018-06-14 13:54:12 +02:00
|
|
|
use std::os::unix::fs::PermissionsExt;
|
|
|
|
|
|
|
|
let file = fs::File::create(file_path)?;
|
|
|
|
let mut permissions = file.metadata()?.permissions();
|
2018-06-14 16:17:40 +02:00
|
|
|
permissions.set_mode((libc::S_IWUSR | libc::S_IRUSR) as u32);
|
2018-06-14 13:54:12 +02:00
|
|
|
file.set_permissions(permissions)?;
|
|
|
|
|
|
|
|
Ok(file)
|
|
|
|
}
|
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
/// Create a new file and restrict permissions to owner only. It replaces the existing file if it already exists.
|
2018-06-14 13:54:12 +02:00
|
|
|
#[cfg(not(unix))]
|
2018-06-22 13:30:48 +02:00
|
|
|
pub fn replace_file_with_permissions_to_owner(file_path: &Path) -> io::Result<fs::File> {
|
2018-06-14 13:54:12 +02:00
|
|
|
fs::File::create(file_path)
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
/// Root keys directory implementation
|
|
|
|
pub type RootDiskDirectory = DiskDirectory<DiskKeyFileManager>;
|
|
|
|
|
|
|
|
/// Disk directory key file manager
|
|
|
|
pub trait KeyFileManager: Send + Sync {
|
|
|
|
/// Read `SafeAccount` from given key file stream
|
|
|
|
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error>
|
|
|
|
where
|
|
|
|
T: io::Read;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
/// Write `SafeAccount` to given key file stream
|
|
|
|
fn write<T>(&self, account: SafeAccount, writer: &mut T) -> Result<(), Error>
|
|
|
|
where
|
|
|
|
T: io::Write;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Disk-based keys directory implementation
|
|
|
|
pub struct DiskDirectory<T>
|
|
|
|
where
|
|
|
|
T: KeyFileManager,
|
|
|
|
{
|
2016-06-20 00:10:34 +02:00
|
|
|
path: PathBuf,
|
2017-01-30 11:44:09 +01:00
|
|
|
key_manager: T,
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
/// Keys file manager for root keys directory
|
2019-01-03 14:07:27 +01:00
|
|
|
#[derive(Default)]
|
|
|
|
pub struct DiskKeyFileManager {
|
|
|
|
password: Option<Password>,
|
|
|
|
}
|
2017-01-30 11:44:09 +01:00
|
|
|
|
|
|
|
impl RootDiskDirectory {
|
2016-06-20 00:10:34 +02:00
|
|
|
pub fn create<P>(path: P) -> Result<Self, Error>
|
|
|
|
where
|
|
|
|
P: AsRef<Path>,
|
|
|
|
{
|
2016-12-27 12:53:56 +01:00
|
|
|
fs::create_dir_all(&path)?;
|
2016-06-20 00:10:34 +02:00
|
|
|
Ok(Self::at(path))
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2019-01-03 14:07:27 +01:00
|
|
|
/// allows to read keyfiles with given password (needed for keyfiles w/o address)
|
|
|
|
pub fn with_password(&self, password: Option<Password>) -> Self {
|
|
|
|
DiskDirectory::new(&self.path, DiskKeyFileManager { password })
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-06-20 00:10:34 +02:00
|
|
|
pub fn at<P>(path: P) -> Self
|
|
|
|
where
|
|
|
|
P: AsRef<Path>,
|
|
|
|
{
|
2019-01-03 14:07:27 +01:00
|
|
|
DiskDirectory::new(path, DiskKeyFileManager::default())
|
2017-01-30 11:44:09 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> DiskDirectory<T>
|
|
|
|
where
|
|
|
|
T: KeyFileManager,
|
|
|
|
{
|
|
|
|
/// Create new disk directory instance
|
|
|
|
pub fn new<P>(path: P, key_manager: T) -> Self
|
|
|
|
where
|
|
|
|
P: AsRef<Path>,
|
|
|
|
{
|
2016-06-20 00:10:34 +02:00
|
|
|
DiskDirectory {
|
|
|
|
path: path.as_ref().to_path_buf(),
|
2017-01-30 11:44:09 +01:00
|
|
|
key_manager: key_manager,
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
}
|
|
|
|
|
2017-02-16 16:20:24 +01:00
|
|
|
fn files(&self) -> Result<Vec<PathBuf>, Error> {
|
|
|
|
Ok(fs::read_dir(&self.path)?
|
2016-06-20 00:10:34 +02:00
|
|
|
.flat_map(Result::ok)
|
|
|
|
.filter(|entry| {
|
2016-10-25 22:34:52 +02:00
|
|
|
let metadata = entry.metadata().ok();
|
2016-08-10 16:42:15 +02:00
|
|
|
let file_name = entry.file_name();
|
2016-10-25 22:34:52 +02:00
|
|
|
let name = file_name.to_string_lossy();
|
2016-08-10 16:42:15 +02:00
|
|
|
// filter directories
|
2016-10-25 22:34:52 +02:00
|
|
|
metadata.map_or(false, |m| !m.is_dir()) &&
|
2017-02-16 16:20:24 +01:00
|
|
|
// hidden files
|
|
|
|
!name.starts_with(".") &&
|
|
|
|
// other ignored files
|
|
|
|
!IGNORED_FILES.contains(&&*name)
|
2016-06-20 00:10:34 +02:00
|
|
|
})
|
|
|
|
.map(|entry| entry.path())
|
2017-02-16 16:20:24 +01:00
|
|
|
.collect::<Vec<PathBuf>>())
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-16 20:53:58 +01:00
|
|
|
pub fn files_hash(&self) -> Result<u64, Error> {
|
|
|
|
use std::{collections::hash_map::DefaultHasher, hash::Hasher};
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-16 20:53:58 +01:00
|
|
|
let mut hasher = DefaultHasher::new();
|
2017-02-16 16:47:58 +01:00
|
|
|
let files = self.files()?;
|
2017-02-16 20:53:58 +01:00
|
|
|
for file in files {
|
|
|
|
hasher.write(file.to_str().unwrap_or("").as_bytes())
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-16 20:53:58 +01:00
|
|
|
Ok(hasher.finish())
|
2017-02-16 16:47:58 +01:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-10-20 20:20:41 +02:00
|
|
|
fn last_modification_date(&self) -> Result<u64, Error> {
|
|
|
|
use std::time::{Duration, UNIX_EPOCH};
|
|
|
|
let duration = fs::metadata(&self.path)?
|
|
|
|
.modified()?
|
|
|
|
.duration_since(UNIX_EPOCH)
|
|
|
|
.unwrap_or(Duration::default());
|
|
|
|
let timestamp = duration.as_secs() ^ (duration.subsec_nanos() as u64);
|
|
|
|
Ok(timestamp)
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-16 16:20:24 +01:00
|
|
|
/// all accounts found in keys directory
|
|
|
|
fn files_content(&self) -> Result<HashMap<PathBuf, SafeAccount>, Error> {
|
|
|
|
// it's not done using one iterator cause
|
|
|
|
// there is an issue with rustc and it takes tooo much time to compile
|
|
|
|
let paths = self.files()?;
|
2016-12-20 16:34:53 +01:00
|
|
|
Ok(paths
|
2017-01-30 11:44:09 +01:00
|
|
|
.into_iter()
|
|
|
|
.filter_map(|path| {
|
|
|
|
let filename = Some(
|
|
|
|
path.file_name()
|
|
|
|
.and_then(|n| n.to_str())
|
|
|
|
.expect("Keys have valid UTF8 names only.")
|
|
|
|
.to_owned(),
|
|
|
|
);
|
|
|
|
fs::File::open(path.clone())
|
|
|
|
.map_err(Into::into)
|
|
|
|
.and_then(|file| self.key_manager.read(filename, file))
|
|
|
|
.map_err(|err| {
|
|
|
|
warn!("Invalid key file: {:?} ({})", path, err);
|
|
|
|
err
|
|
|
|
})
|
|
|
|
.map(|account| (path, account))
|
|
|
|
.ok()
|
2016-07-28 20:26:07 +02:00
|
|
|
})
|
|
|
|
.collect())
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
/// insert account with given filename. if the filename is a duplicate of any stored account and dedup is set to
|
|
|
|
/// true, a random suffix is appended to the filename.
|
|
|
|
pub fn insert_with_filename(
|
|
|
|
&self,
|
|
|
|
account: SafeAccount,
|
|
|
|
mut filename: String,
|
|
|
|
dedup: bool,
|
|
|
|
) -> Result<SafeAccount, Error> {
|
2018-06-22 13:30:48 +02:00
|
|
|
if dedup {
|
|
|
|
filename = find_unique_filename_using_random_suffix(&self.path, &filename)?;
|
2018-02-14 14:21:58 +01:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-06-22 13:30:48 +02:00
|
|
|
// path to keyfile
|
|
|
|
let keyfile_path = self.path.join(filename.as_str());
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
// update account filename
|
|
|
|
let original_account = account.clone();
|
|
|
|
let mut account = account;
|
2018-02-14 14:21:58 +01:00
|
|
|
account.filename = Some(filename);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
{
|
|
|
|
// save the file
|
2018-06-14 13:54:12 +02:00
|
|
|
let mut file = if dedup {
|
|
|
|
create_new_file_with_permissions_to_owner(&keyfile_path)?
|
|
|
|
} else {
|
|
|
|
replace_file_with_permissions_to_owner(&keyfile_path)?
|
|
|
|
};
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
// write key content
|
2018-02-12 18:03:37 +01:00
|
|
|
self.key_manager
|
|
|
|
.write(original_account, &mut file)
|
|
|
|
.map_err(|e| Error::Custom(format!("{:?}", e)))?;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-12 18:03:37 +01:00
|
|
|
file.flush()?;
|
|
|
|
file.sync_all()?;
|
2017-01-30 11:44:09 +01:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
Ok(account)
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-08 13:53:39 +01:00
|
|
|
/// Get key file manager referece
|
2017-01-30 11:44:09 +01:00
|
|
|
pub fn key_manager(&self) -> &T {
|
|
|
|
&self.key_manager
|
|
|
|
}
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
impl<T> KeyDirectory for DiskDirectory<T>
|
|
|
|
where
|
|
|
|
T: KeyFileManager,
|
|
|
|
{
|
2016-06-20 00:10:34 +02:00
|
|
|
fn load(&self) -> Result<Vec<SafeAccount>, Error> {
|
2017-02-16 16:20:24 +01:00
|
|
|
let accounts = self
|
|
|
|
.files_content()?
|
2016-06-20 00:10:34 +02:00
|
|
|
.into_iter()
|
|
|
|
.map(|(_, account)| account)
|
|
|
|
.collect();
|
|
|
|
Ok(accounts)
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-12-09 09:31:58 +01:00
|
|
|
fn update(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
|
|
|
|
// Disk store handles updates correctly iff filename is the same
|
2018-02-14 14:21:58 +01:00
|
|
|
let filename = account_filename(&account);
|
|
|
|
self.insert_with_filename(account, filename, false)
|
2016-12-09 09:31:58 +01:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-07-25 10:45:45 +02:00
|
|
|
fn insert(&self, account: SafeAccount) -> Result<SafeAccount, Error> {
|
2018-02-14 14:21:58 +01:00
|
|
|
let filename = account_filename(&account);
|
|
|
|
self.insert_with_filename(account, filename, true)
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-11-30 13:47:14 +01:00
|
|
|
fn remove(&self, account: &SafeAccount) -> Result<(), Error> {
|
2016-06-20 00:10:34 +02:00
|
|
|
// enumerate all entries in keystore
|
|
|
|
// and find entry with given address
|
2017-02-16 16:20:24 +01:00
|
|
|
let to_remove = self
|
|
|
|
.files_content()?
|
2016-06-20 00:10:34 +02:00
|
|
|
.into_iter()
|
2017-02-05 16:17:56 +01:00
|
|
|
.find(|&(_, ref acc)| acc.id == account.id && acc.address == account.address);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-06-20 00:10:34 +02:00
|
|
|
// remove it
|
|
|
|
match to_remove {
|
|
|
|
None => Err(Error::InvalidAccount),
|
|
|
|
Some((path, _)) => fs::remove_file(path).map_err(From::from),
|
|
|
|
}
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-08-11 18:31:28 +02:00
|
|
|
fn path(&self) -> Option<&PathBuf> {
|
|
|
|
Some(&self.path)
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2020-07-29 10:36:15 +02:00
|
|
|
fn as_vault_provider(&self) -> Option<&dyn VaultKeyDirectoryProvider> {
|
2017-01-30 11:44:09 +01:00
|
|
|
Some(self)
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-04-11 10:24:56 +02:00
|
|
|
fn unique_repr(&self) -> Result<u64, Error> {
|
2017-10-20 20:20:41 +02:00
|
|
|
self.last_modification_date()
|
2017-02-16 20:10:29 +01:00
|
|
|
}
|
2017-01-30 11:44:09 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> VaultKeyDirectoryProvider for DiskDirectory<T>
|
|
|
|
where
|
|
|
|
T: KeyFileManager,
|
|
|
|
{
|
2020-07-29 10:36:15 +02:00
|
|
|
fn create(&self, name: &str, key: VaultKey) -> Result<Box<dyn VaultKeyDirectory>, Error> {
|
2017-01-30 11:44:09 +01:00
|
|
|
let vault_dir = VaultDiskDirectory::create(&self.path, name, key)?;
|
|
|
|
Ok(Box::new(vault_dir))
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2020-07-29 10:36:15 +02:00
|
|
|
fn open(&self, name: &str, key: VaultKey) -> Result<Box<dyn VaultKeyDirectory>, Error> {
|
2017-01-30 11:44:09 +01:00
|
|
|
let vault_dir = VaultDiskDirectory::at(&self.path, name, key)?;
|
|
|
|
Ok(Box::new(vault_dir))
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-05 16:17:56 +01:00
|
|
|
fn list_vaults(&self) -> Result<Vec<String>, Error> {
|
|
|
|
Ok(fs::read_dir(&self.path)?
|
|
|
|
.filter_map(|e| e.ok().map(|e| e.path()))
|
|
|
|
.filter_map(|path| {
|
|
|
|
let mut vault_file_path = path.clone();
|
|
|
|
vault_file_path.push(VAULT_FILE_NAME);
|
|
|
|
if vault_file_path.is_file() {
|
|
|
|
path.file_name()
|
|
|
|
.and_then(|f| f.to_str())
|
|
|
|
.map(|f| f.to_owned())
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.collect())
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-09 16:47:22 +01:00
|
|
|
fn vault_meta(&self, name: &str) -> Result<String, Error> {
|
|
|
|
VaultDiskDirectory::meta_at(&self.path, name)
|
|
|
|
}
|
2016-06-20 00:10:34 +02:00
|
|
|
}
|
2016-08-03 17:58:22 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
impl KeyFileManager for DiskKeyFileManager {
|
|
|
|
fn read<T>(&self, filename: Option<String>, reader: T) -> Result<SafeAccount, Error>
|
|
|
|
where
|
|
|
|
T: io::Read,
|
|
|
|
{
|
|
|
|
let key_file =
|
|
|
|
json::KeyFile::load(reader).map_err(|e| Error::Custom(format!("{:?}", e)))?;
|
2019-01-03 14:07:27 +01:00
|
|
|
SafeAccount::from_file(key_file, filename, &self.password)
|
2017-01-30 11:44:09 +01:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-09 16:47:22 +01:00
|
|
|
fn write<T>(&self, mut account: SafeAccount, writer: &mut T) -> Result<(), Error>
|
|
|
|
where
|
|
|
|
T: io::Write,
|
|
|
|
{
|
|
|
|
// when account is moved back to root directory from vault
|
|
|
|
// => remove vault field from meta
|
|
|
|
account.meta = json::remove_vault_name_from_json_meta(&account.meta)
|
|
|
|
.map_err(|err| Error::Custom(format!("{:?}", err)))?;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
let key_file: json::KeyFile = account.into();
|
|
|
|
key_file
|
|
|
|
.write(writer)
|
|
|
|
.map_err(|e| Error::Custom(format!("{:?}", e)))
|
|
|
|
}
|
|
|
|
}
|
2016-08-03 17:58:22 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
fn account_filename(account: &SafeAccount) -> String {
|
|
|
|
// build file path
|
|
|
|
account.filename.clone().unwrap_or_else(|| {
|
|
|
|
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc())
|
|
|
|
.expect("Time-format string is valid.");
|
|
|
|
format!("UTC--{}Z--{}", timestamp, Uuid::from(account.id))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2016-08-03 17:58:22 +02:00
|
|
|
#[cfg(test)]
|
|
|
|
mod test {
|
2017-04-11 10:24:56 +02:00
|
|
|
extern crate tempdir;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2019-02-06 17:53:34 +01:00
|
|
|
use self::tempdir::TempDir;
|
2017-12-24 09:34:43 +01:00
|
|
|
use super::{KeyDirectory, RootDiskDirectory, VaultKey};
|
2016-08-03 17:58:22 +02:00
|
|
|
use account::SafeAccount;
|
|
|
|
use ethkey::{Generator, Random};
|
2017-04-11 10:24:56 +02:00
|
|
|
use std::{env, fs, num::NonZeroU32};
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2019-02-06 17:53:34 +01:00
|
|
|
lazy_static! {
|
|
|
|
static ref ITERATIONS: NonZeroU32 = NonZeroU32::new(1024).expect("1024 > 0; qed");
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-08-03 17:58:22 +02:00
|
|
|
#[test]
|
|
|
|
fn should_create_new_account() {
|
|
|
|
// given
|
2016-10-18 16:36:43 +02:00
|
|
|
let mut dir = env::temp_dir();
|
|
|
|
dir.push("ethstore_should_create_new_account");
|
2016-08-03 17:58:22 +02:00
|
|
|
let keypair = Random.generate().unwrap();
|
2018-06-22 15:09:15 +02:00
|
|
|
let password = "hello world".into();
|
2017-01-30 11:44:09 +01:00
|
|
|
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-08-03 17:58:22 +02:00
|
|
|
// when
|
2019-02-06 17:53:34 +01:00
|
|
|
let account = SafeAccount::create(
|
|
|
|
&keypair,
|
|
|
|
[0u8; 16],
|
|
|
|
&password,
|
|
|
|
*ITERATIONS,
|
|
|
|
"Test".to_owned(),
|
|
|
|
"{}".to_owned(),
|
|
|
|
);
|
2018-05-05 11:02:33 +02:00
|
|
|
let res = directory.insert(account.unwrap());
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-08-03 17:58:22 +02:00
|
|
|
// then
|
|
|
|
assert!(res.is_ok(), "Should save account succesfuly.");
|
|
|
|
assert!(
|
|
|
|
res.unwrap().filename.is_some(),
|
|
|
|
"Filename has been assigned."
|
|
|
|
);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-08-03 17:58:22 +02:00
|
|
|
// cleanup
|
|
|
|
let _ = fs::remove_dir_all(dir);
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
#[test]
|
|
|
|
fn should_handle_duplicate_filenames() {
|
|
|
|
// given
|
|
|
|
let mut dir = env::temp_dir();
|
|
|
|
dir.push("ethstore_should_handle_duplicate_filenames");
|
|
|
|
let keypair = Random.generate().unwrap();
|
2018-06-22 15:09:15 +02:00
|
|
|
let password = "hello world".into();
|
2018-02-14 14:21:58 +01:00
|
|
|
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
// when
|
2019-02-06 17:53:34 +01:00
|
|
|
let account = SafeAccount::create(
|
|
|
|
&keypair,
|
|
|
|
[0u8; 16],
|
|
|
|
&password,
|
|
|
|
*ITERATIONS,
|
|
|
|
"Test".to_owned(),
|
|
|
|
"{}".to_owned(),
|
2020-08-05 06:08:03 +02:00
|
|
|
)
|
2019-02-06 17:53:34 +01:00
|
|
|
.unwrap();
|
2018-02-14 14:21:58 +01:00
|
|
|
let filename = "test".to_string();
|
|
|
|
let dedup = true;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
directory
|
|
|
|
.insert_with_filename(account.clone(), "foo".to_string(), dedup)
|
|
|
|
.unwrap();
|
|
|
|
let file1 = directory
|
|
|
|
.insert_with_filename(account.clone(), filename.clone(), dedup)
|
|
|
|
.unwrap()
|
|
|
|
.filename
|
|
|
|
.unwrap();
|
|
|
|
let file2 = directory
|
|
|
|
.insert_with_filename(account.clone(), filename.clone(), dedup)
|
|
|
|
.unwrap()
|
|
|
|
.filename
|
|
|
|
.unwrap();
|
|
|
|
let file3 = directory
|
|
|
|
.insert_with_filename(account.clone(), filename.clone(), dedup)
|
2020-08-05 06:08:03 +02:00
|
|
|
.unwrap()
|
|
|
|
.filename
|
2018-02-14 14:21:58 +01:00
|
|
|
.unwrap();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
// then
|
|
|
|
// the first file should have the original names
|
|
|
|
assert_eq!(file1, filename);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
// the following duplicate files should have a suffix appended
|
|
|
|
assert!(file2 != file3);
|
|
|
|
assert_eq!(file2.len(), filename.len() + 5);
|
|
|
|
assert_eq!(file3.len(), filename.len() + 5);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-02-14 14:21:58 +01:00
|
|
|
// cleanup
|
|
|
|
let _ = fs::remove_dir_all(dir);
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
#[test]
|
|
|
|
fn should_manage_vaults() {
|
|
|
|
// given
|
|
|
|
let mut dir = env::temp_dir();
|
|
|
|
dir.push("should_create_new_vault");
|
|
|
|
let directory = RootDiskDirectory::create(dir.clone()).unwrap();
|
|
|
|
let vault_name = "vault";
|
2018-06-22 15:09:15 +02:00
|
|
|
let password = "password".into();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
// then
|
|
|
|
assert!(directory.as_vault_provider().is_some());
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
// and when
|
|
|
|
let before_root_items_count = fs::read_dir(&dir).unwrap().count();
|
2019-02-06 17:53:34 +01:00
|
|
|
let vault = directory
|
|
|
|
.as_vault_provider()
|
|
|
|
.unwrap()
|
|
|
|
.create(vault_name, VaultKey::new(&password, *ITERATIONS));
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
// then
|
|
|
|
assert!(vault.is_ok());
|
|
|
|
let after_root_items_count = fs::read_dir(&dir).unwrap().count();
|
|
|
|
assert!(after_root_items_count > before_root_items_count);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
// and when
|
2019-02-06 17:53:34 +01:00
|
|
|
let vault = directory
|
|
|
|
.as_vault_provider()
|
|
|
|
.unwrap()
|
|
|
|
.open(vault_name, VaultKey::new(&password, *ITERATIONS));
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
// then
|
|
|
|
assert!(vault.is_ok());
|
|
|
|
let after_root_items_count2 = fs::read_dir(&dir).unwrap().count();
|
|
|
|
assert!(after_root_items_count == after_root_items_count2);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-01-30 11:44:09 +01:00
|
|
|
// cleanup
|
|
|
|
let _ = fs::remove_dir_all(dir);
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-05 16:17:56 +01:00
|
|
|
#[test]
|
|
|
|
fn should_list_vaults() {
|
|
|
|
// given
|
2017-04-11 10:24:56 +02:00
|
|
|
let temp_path = TempDir::new("").unwrap();
|
2017-02-05 16:17:56 +01:00
|
|
|
let directory = RootDiskDirectory::create(&temp_path).unwrap();
|
|
|
|
let vault_provider = directory.as_vault_provider().unwrap();
|
2019-02-06 17:53:34 +01:00
|
|
|
let iter = NonZeroU32::new(1).expect("1 > 0; qed");
|
|
|
|
vault_provider
|
|
|
|
.create("vault1", VaultKey::new(&"password1".into(), iter))
|
|
|
|
.unwrap();
|
|
|
|
vault_provider
|
|
|
|
.create("vault2", VaultKey::new(&"password2".into(), iter))
|
|
|
|
.unwrap();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-05 16:17:56 +01:00
|
|
|
// then
|
|
|
|
let vaults = vault_provider.list_vaults().unwrap();
|
|
|
|
assert_eq!(vaults.len(), 2);
|
|
|
|
assert!(vaults.iter().any(|v| &*v == "vault1"));
|
|
|
|
assert!(vaults.iter().any(|v| &*v == "vault2"));
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-16 16:47:58 +01:00
|
|
|
#[test]
|
|
|
|
fn hash_of_files() {
|
2017-04-11 10:24:56 +02:00
|
|
|
let temp_path = TempDir::new("").unwrap();
|
2017-02-16 16:47:58 +01:00
|
|
|
let directory = RootDiskDirectory::create(&temp_path).unwrap();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-04-11 10:24:56 +02:00
|
|
|
let hash = directory
|
|
|
|
.files_hash()
|
|
|
|
.expect("Files hash should be calculated ok");
|
2017-02-16 20:53:58 +01:00
|
|
|
assert_eq!(hash, 15130871412783076140);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-16 16:47:58 +01:00
|
|
|
let keypair = Random.generate().unwrap();
|
2018-06-22 15:09:15 +02:00
|
|
|
let password = "test pass".into();
|
2019-02-06 17:53:34 +01:00
|
|
|
let account = SafeAccount::create(
|
|
|
|
&keypair,
|
|
|
|
[0u8; 16],
|
|
|
|
&password,
|
|
|
|
*ITERATIONS,
|
|
|
|
"Test".to_owned(),
|
|
|
|
"{}".to_owned(),
|
|
|
|
);
|
2018-05-05 11:02:33 +02:00
|
|
|
directory
|
|
|
|
.insert(account.unwrap())
|
|
|
|
.expect("Account should be inserted ok");
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-04-11 10:24:56 +02:00
|
|
|
let new_hash = directory
|
|
|
|
.files_hash()
|
|
|
|
.expect("New files hash should be calculated ok");
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-16 16:47:58 +01:00
|
|
|
assert!(
|
|
|
|
new_hash != hash,
|
|
|
|
"hash of the file list should change once directory content changed"
|
|
|
|
);
|
|
|
|
}
|
2016-08-03 17:58:22 +02:00
|
|
|
}
|