Persistent tracking of dapps (#4302)
* Tests for RPC * Extracting dapp_id from Origin and x-parity-origin * Separate type for DappId * Persistent tracking of recent dapps * Fixing tests * Exposing dapp timestamps * Fixing import to work on stable * Fixing test again
This commit is contained in:
parent
47e1c5e2f1
commit
cf348dae60
@ -57,12 +57,19 @@ impl Endpoint for RpcEndpoint {
|
||||
struct MetadataExtractor;
|
||||
impl HttpMetaExtractor<Metadata> for MetadataExtractor {
|
||||
fn read_metadata(&self, request: &hyper::server::Request<hyper::net::HttpStream>) -> Metadata {
|
||||
let dapp_id = request.headers().get::<hyper::header::Referer>()
|
||||
.and_then(|referer| hyper::Url::parse(referer).ok())
|
||||
.and_then(|url| {
|
||||
url.path_segments()
|
||||
.and_then(|mut split| split.next())
|
||||
.map(|app_id| app_id.to_owned())
|
||||
let dapp_id = request.headers().get::<hyper::header::Origin>()
|
||||
.map(|origin| format!("{}://{}", origin.scheme, origin.host))
|
||||
.or_else(|| {
|
||||
// fallback to custom header, but only if origin is null
|
||||
request.headers().get_raw("origin")
|
||||
.and_then(|raw| raw.one())
|
||||
.and_then(|raw| if raw == "null".as_bytes() {
|
||||
request.headers().get_raw("x-parity-origin")
|
||||
.and_then(|raw| raw.one())
|
||||
.map(|raw| String::from_utf8_lossy(raw).into_owned())
|
||||
} else {
|
||||
None
|
||||
})
|
||||
});
|
||||
Metadata {
|
||||
dapp_id: dapp_id,
|
||||
|
@ -19,6 +19,7 @@ use std::str;
|
||||
use std::ops::Deref;
|
||||
use std::sync::Arc;
|
||||
use env_logger::LogBuilder;
|
||||
use ethcore_rpc::Metadata;
|
||||
use jsonrpc_core::MetaIoHandler;
|
||||
use jsonrpc_core::reactor::RpcEventLoop;
|
||||
|
||||
@ -58,7 +59,7 @@ impl Deref for ServerLoop {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_server<F, B>(process: F, remote: Remote) -> (ServerLoop, Arc<FakeRegistrar>) where
|
||||
pub fn init_server<F, B>(process: F, io: MetaIoHandler<Metadata>, remote: Remote) -> (ServerLoop, Arc<FakeRegistrar>) where
|
||||
F: FnOnce(ServerBuilder) -> ServerBuilder<B>,
|
||||
B: Fetch,
|
||||
{
|
||||
@ -70,7 +71,7 @@ pub fn init_server<F, B>(process: F, remote: Remote) -> (ServerLoop, Arc<FakeReg
|
||||
// TODO [ToDr] When https://github.com/ethcore/jsonrpc/issues/26 is resolved
|
||||
// this additional EventLoop wouldn't be needed, we should be able to re-use remote.
|
||||
let event_loop = RpcEventLoop::spawn();
|
||||
let handler = event_loop.handler(Arc::new(MetaIoHandler::default()));
|
||||
let handler = event_loop.handler(Arc::new(io));
|
||||
let server = process(ServerBuilder::new(
|
||||
&dapps_path, registrar.clone(), remote,
|
||||
))
|
||||
@ -100,12 +101,16 @@ pub fn serve_with_auth(user: &str, pass: &str) -> ServerLoop {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serve_with_rpc(io: MetaIoHandler<Metadata>) -> ServerLoop {
|
||||
init_server(|builder| builder.allowed_hosts(None), io, Remote::new_sync()).0
|
||||
}
|
||||
|
||||
pub fn serve_hosts(hosts: Option<Vec<String>>) -> ServerLoop {
|
||||
init_server(|builder| builder.allowed_hosts(hosts), Remote::new_sync()).0
|
||||
init_server(|builder| builder.allowed_hosts(hosts), Default::default(), Remote::new_sync()).0
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar() -> (ServerLoop, Arc<FakeRegistrar>) {
|
||||
init_server(|builder| builder.allowed_hosts(None), Remote::new_sync())
|
||||
init_server(|builder| builder.allowed_hosts(None), Default::default(), Remote::new_sync())
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar_and_sync() -> (ServerLoop, Arc<FakeRegistrar>) {
|
||||
@ -113,7 +118,7 @@ pub fn serve_with_registrar_and_sync() -> (ServerLoop, Arc<FakeRegistrar>) {
|
||||
builder
|
||||
.sync_status(Arc::new(|| true))
|
||||
.allowed_hosts(None)
|
||||
}, Remote::new_sync())
|
||||
}, Default::default(), Remote::new_sync())
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar_and_fetch() -> (ServerLoop, FakeFetch, Arc<FakeRegistrar>) {
|
||||
@ -125,7 +130,7 @@ pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Serv
|
||||
let f = fetch.clone();
|
||||
let (server, reg) = init_server(move |builder| {
|
||||
builder.allowed_hosts(None).fetch(f.clone())
|
||||
}, if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
|
||||
}, Default::default(), if multi_threaded { Remote::new_thread_per_future() } else { Remote::new_sync() });
|
||||
|
||||
(server, fetch, reg)
|
||||
}
|
||||
@ -138,13 +143,13 @@ pub fn serve_with_fetch(web_token: &'static str) -> (ServerLoop, FakeFetch) {
|
||||
.allowed_hosts(None)
|
||||
.fetch(f.clone())
|
||||
.web_proxy_tokens(Arc::new(move |token| &token == web_token))
|
||||
}, Remote::new_sync());
|
||||
}, Default::default(), Remote::new_sync());
|
||||
|
||||
(server, fetch)
|
||||
}
|
||||
|
||||
pub fn serve() -> ServerLoop {
|
||||
init_server(|builder| builder.allowed_hosts(None), Remote::new_sync()).0
|
||||
init_server(|builder| builder.allowed_hosts(None), Default::default(), Remote::new_sync()).0
|
||||
}
|
||||
|
||||
pub fn request(server: ServerLoop, request: &str) -> http_client::Response {
|
||||
|
@ -22,5 +22,6 @@ mod api;
|
||||
mod authorization;
|
||||
mod fetch;
|
||||
mod redirection;
|
||||
mod rpc;
|
||||
mod validation;
|
||||
|
||||
|
119
dapps/src/tests/rpc.rs
Normal file
119
dapps/src/tests/rpc.rs
Normal file
@ -0,0 +1,119 @@
|
||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use futures::{future, Future};
|
||||
use ethcore_rpc::{Metadata, Origin};
|
||||
use jsonrpc_core::{MetaIoHandler, Value};
|
||||
|
||||
use tests::helpers::{serve_with_rpc, request};
|
||||
|
||||
#[test]
|
||||
fn should_serve_rpc() {
|
||||
// given
|
||||
let mut io = MetaIoHandler::new();
|
||||
io.add_method("rpc_test", |_| {
|
||||
Ok(Value::String("Hello World!".into()))
|
||||
});
|
||||
let server = serve_with_rpc(io);
|
||||
|
||||
// when
|
||||
let req = r#"{"jsonrpc":"2.0","id":1,"method":"rpc_test","params":[]}"#;
|
||||
let response = request(server, &format!(
|
||||
"\
|
||||
POST /rpc/ HTTP/1.1\r\n\
|
||||
Host: 127.0.0.1:8080\r\n\
|
||||
Connection: close\r\n\
|
||||
Content-Type: application/json\r\n\
|
||||
Content-Length: {}\r\n\
|
||||
\r\n\
|
||||
{}\r\n\
|
||||
",
|
||||
req.as_bytes().len(),
|
||||
req,
|
||||
));
|
||||
|
||||
// then
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
assert_eq!(response.body, "31\n{\"jsonrpc\":\"2.0\",\"result\":\"Hello World!\",\"id\":1}\n\n0\n\n".to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_extract_metadata() {
|
||||
// given
|
||||
let mut io = MetaIoHandler::new();
|
||||
io.add_method_with_meta("rpc_test", |_params, meta: Metadata| {
|
||||
assert_eq!(meta.dapp_id, Some("https://parity.io/".to_owned()));
|
||||
assert_eq!(meta.origin, Origin::Dapps);
|
||||
future::ok(Value::String("Hello World!".into())).boxed()
|
||||
});
|
||||
let server = serve_with_rpc(io);
|
||||
|
||||
// when
|
||||
let req = r#"{"jsonrpc":"2.0","id":1,"method":"rpc_test","params":[]}"#;
|
||||
let response = request(server, &format!(
|
||||
"\
|
||||
POST /rpc/ HTTP/1.1\r\n\
|
||||
Host: 127.0.0.1:8080\r\n\
|
||||
Connection: close\r\n\
|
||||
Origin: https://parity.io/\r\n\
|
||||
X-Parity-Origin: https://this.should.be.ignored\r\n\
|
||||
Content-Type: application/json\r\n\
|
||||
Content-Length: {}\r\n\
|
||||
\r\n\
|
||||
{}\r\n\
|
||||
",
|
||||
req.as_bytes().len(),
|
||||
req,
|
||||
));
|
||||
|
||||
// then
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
assert_eq!(response.body, "31\n{\"jsonrpc\":\"2.0\",\"result\":\"Hello World!\",\"id\":1}\n\n0\n\n".to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_extract_metadata_from_custom_header() {
|
||||
// given
|
||||
let mut io = MetaIoHandler::new();
|
||||
io.add_method_with_meta("rpc_test", |_params, meta: Metadata| {
|
||||
assert_eq!(meta.dapp_id, Some("https://parity.io/".to_owned()));
|
||||
assert_eq!(meta.origin, Origin::Dapps);
|
||||
future::ok(Value::String("Hello World!".into())).boxed()
|
||||
});
|
||||
let server = serve_with_rpc(io);
|
||||
|
||||
// when
|
||||
let req = r#"{"jsonrpc":"2.0","id":1,"method":"rpc_test","params":[]}"#;
|
||||
let response = request(server, &format!(
|
||||
"\
|
||||
POST /rpc/ HTTP/1.1\r\n\
|
||||
Host: 127.0.0.1:8080\r\n\
|
||||
Connection: close\r\n\
|
||||
Origin: null\r\n\
|
||||
X-Parity-Origin: https://parity.io/\r\n\
|
||||
Content-Type: application/json\r\n\
|
||||
Content-Length: {}\r\n\
|
||||
\r\n\
|
||||
{}\r\n\
|
||||
",
|
||||
req.as_bytes().len(),
|
||||
req,
|
||||
));
|
||||
|
||||
// then
|
||||
response.assert_status("HTTP/1.1 200 OK");
|
||||
assert_eq!(response.body, "31\n{\"jsonrpc\":\"2.0\",\"result\":\"Hello World!\",\"id\":1}\n\n0\n\n".to_owned());
|
||||
}
|
@ -74,7 +74,18 @@ impl From<SSError> for Error {
|
||||
}
|
||||
|
||||
/// Dapp identifier
|
||||
pub type DappId = String;
|
||||
#[derive(Default, Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
|
||||
pub struct DappId(String);
|
||||
|
||||
impl From<DappId> for String {
|
||||
fn from(id: DappId) -> String { id.0 }
|
||||
}
|
||||
impl From<String> for DappId {
|
||||
fn from(id: String) -> DappId { DappId(id) }
|
||||
}
|
||||
impl<'a> From<&'a str> for DappId {
|
||||
fn from(id: &'a str) -> DappId { DappId(id.to_owned()) }
|
||||
}
|
||||
|
||||
fn transient_sstore() -> EthMultiStore {
|
||||
EthMultiStore::open(Box::new(MemoryDirectory::default())).expect("MemoryDirectory load always succeeds; qed")
|
||||
@ -181,7 +192,7 @@ impl AccountProvider {
|
||||
}
|
||||
|
||||
/// Gets a list of dapps recently requesting accounts.
|
||||
pub fn recent_dapps(&self) -> Result<Vec<DappId>, Error> {
|
||||
pub fn recent_dapps(&self) -> Result<HashMap<DappId, u64>, Error> {
|
||||
Ok(self.dapps_settings.read().recent_dapps())
|
||||
}
|
||||
|
||||
@ -405,7 +416,7 @@ impl AccountProvider {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{AccountProvider, Unlock};
|
||||
use super::{AccountProvider, Unlock, DappId};
|
||||
use std::time::Instant;
|
||||
use ethstore::ethkey::{Generator, Random};
|
||||
|
||||
@ -466,7 +477,7 @@ mod tests {
|
||||
fn should_set_dapps_addresses() {
|
||||
// given
|
||||
let ap = AccountProvider::transient_provider();
|
||||
let app = "app1".to_owned();
|
||||
let app = DappId("app1".into());
|
||||
// set `AllAccounts` policy
|
||||
ap.set_new_dapps_whitelist(None).unwrap();
|
||||
|
||||
|
@ -17,11 +17,17 @@
|
||||
//! Address Book and Dapps Settings Store
|
||||
|
||||
use std::{fs, fmt, hash, ops};
|
||||
use std::collections::{HashMap, VecDeque};
|
||||
use std::sync::atomic::{self, AtomicUsize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use ethstore::ethkey::Address;
|
||||
use ethjson::misc::{AccountMeta, DappsSettings as JsonSettings, NewDappsPolicy as JsonNewDappsPolicy};
|
||||
use ethjson::misc::{
|
||||
AccountMeta,
|
||||
DappsSettings as JsonSettings,
|
||||
DappsHistory as JsonDappsHistory,
|
||||
NewDappsPolicy as JsonNewDappsPolicy,
|
||||
};
|
||||
use account_provider::DappId;
|
||||
|
||||
/// Disk-backed map from Address to String. Uses JSON.
|
||||
@ -35,7 +41,7 @@ impl AddressBook {
|
||||
let mut r = AddressBook {
|
||||
cache: DiskMap::new(path, "address_book.json".into())
|
||||
};
|
||||
r.cache.revert(AccountMeta::read_address_map);
|
||||
r.cache.revert(AccountMeta::read);
|
||||
r
|
||||
}
|
||||
|
||||
@ -52,7 +58,7 @@ impl AddressBook {
|
||||
}
|
||||
|
||||
fn save(&self) {
|
||||
self.cache.save(AccountMeta::write_address_map)
|
||||
self.cache.save(AccountMeta::write)
|
||||
}
|
||||
|
||||
/// Sets new name for given address.
|
||||
@ -134,7 +140,51 @@ impl From<NewDappsPolicy> for JsonNewDappsPolicy {
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_RECENT_DAPPS: usize = 10;
|
||||
/// Transient dapps data
|
||||
#[derive(Default, Debug, Clone, Eq, PartialEq)]
|
||||
pub struct TransientDappsData {
|
||||
/// Timestamp of last access
|
||||
pub last_accessed: u64,
|
||||
}
|
||||
|
||||
impl From<JsonDappsHistory> for TransientDappsData {
|
||||
fn from(s: JsonDappsHistory) -> Self {
|
||||
TransientDappsData {
|
||||
last_accessed: s.last_accessed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TransientDappsData> for JsonDappsHistory {
|
||||
fn from(s: TransientDappsData) -> Self {
|
||||
JsonDappsHistory {
|
||||
last_accessed: s.last_accessed,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum TimeProvider {
|
||||
Clock,
|
||||
Incremenetal(AtomicUsize)
|
||||
}
|
||||
|
||||
impl TimeProvider {
|
||||
fn get(&self) -> u64 {
|
||||
match *self {
|
||||
TimeProvider::Clock => {
|
||||
::std::time::UNIX_EPOCH.elapsed()
|
||||
.expect("Correct time is required to be set")
|
||||
.as_secs()
|
||||
|
||||
},
|
||||
TimeProvider::Incremenetal(ref time) => {
|
||||
time.fetch_add(1, atomic::Ordering::SeqCst) as u64
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_RECENT_DAPPS: usize = 50;
|
||||
|
||||
/// Disk-backed map from DappId to Settings. Uses JSON.
|
||||
pub struct DappsSettingsStore {
|
||||
@ -142,8 +192,10 @@ pub struct DappsSettingsStore {
|
||||
settings: DiskMap<DappId, DappsSettings>,
|
||||
/// New Dapps Policy
|
||||
policy: DiskMap<String, NewDappsPolicy>,
|
||||
/// Recently Accessed Dapps (transient)
|
||||
recent: VecDeque<DappId>,
|
||||
/// Transient Data of recently Accessed Dapps
|
||||
history: DiskMap<DappId, TransientDappsData>,
|
||||
/// Time
|
||||
time: TimeProvider,
|
||||
}
|
||||
|
||||
impl DappsSettingsStore {
|
||||
@ -152,10 +204,12 @@ impl DappsSettingsStore {
|
||||
let mut r = DappsSettingsStore {
|
||||
settings: DiskMap::new(path.clone(), "dapps_accounts.json".into()),
|
||||
policy: DiskMap::new(path.clone(), "dapps_policy.json".into()),
|
||||
recent: VecDeque::with_capacity(MAX_RECENT_DAPPS),
|
||||
history: DiskMap::new(path.clone(), "dapps_history.json".into()),
|
||||
time: TimeProvider::Clock,
|
||||
};
|
||||
r.settings.revert(JsonSettings::read_dapps_settings);
|
||||
r.policy.revert(JsonNewDappsPolicy::read_new_dapps_policy);
|
||||
r.settings.revert(JsonSettings::read);
|
||||
r.policy.revert(JsonNewDappsPolicy::read);
|
||||
r.history.revert(JsonDappsHistory::read);
|
||||
r
|
||||
}
|
||||
|
||||
@ -164,7 +218,8 @@ impl DappsSettingsStore {
|
||||
DappsSettingsStore {
|
||||
settings: DiskMap::transient(),
|
||||
policy: DiskMap::transient(),
|
||||
recent: VecDeque::with_capacity(MAX_RECENT_DAPPS),
|
||||
history: DiskMap::transient(),
|
||||
time: TimeProvider::Incremenetal(AtomicUsize::new(1)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -178,24 +233,36 @@ impl DappsSettingsStore {
|
||||
self.policy.get("default").cloned().unwrap_or(NewDappsPolicy::AllAccounts)
|
||||
}
|
||||
|
||||
/// Returns recent dapps (in order of last request)
|
||||
pub fn recent_dapps(&self) -> Vec<DappId> {
|
||||
self.recent.iter().cloned().collect()
|
||||
/// Returns recent dapps with last accessed timestamp
|
||||
pub fn recent_dapps(&self) -> HashMap<DappId, u64> {
|
||||
self.history.iter().map(|(k, v)| (k.clone(), v.last_accessed)).collect()
|
||||
}
|
||||
|
||||
/// Marks recent dapp as used
|
||||
pub fn mark_dapp_used(&mut self, dapp: DappId) {
|
||||
self.recent.retain(|id| id != &dapp);
|
||||
self.recent.push_front(dapp);
|
||||
while self.recent.len() > MAX_RECENT_DAPPS {
|
||||
self.recent.pop_back();
|
||||
{
|
||||
let mut entry = self.history.entry(dapp).or_insert_with(|| Default::default());
|
||||
entry.last_accessed = self.time.get();
|
||||
}
|
||||
// Clear extraneous entries
|
||||
while self.history.len() > MAX_RECENT_DAPPS {
|
||||
let min = self.history.iter()
|
||||
.min_by_key(|&(_, ref v)| v.last_accessed)
|
||||
.map(|(ref k, _)| k.clone())
|
||||
.cloned();
|
||||
|
||||
match min {
|
||||
Some(k) => self.history.remove(&k),
|
||||
None => break,
|
||||
};
|
||||
}
|
||||
self.history.save(JsonDappsHistory::write);
|
||||
}
|
||||
|
||||
/// Sets current new dapps policy
|
||||
pub fn set_policy(&mut self, policy: NewDappsPolicy) {
|
||||
self.policy.insert("default".into(), policy);
|
||||
self.policy.save(JsonNewDappsPolicy::write_new_dapps_policy);
|
||||
self.policy.save(JsonNewDappsPolicy::write);
|
||||
}
|
||||
|
||||
/// Sets accounts for specific dapp.
|
||||
@ -204,7 +271,7 @@ impl DappsSettingsStore {
|
||||
let mut settings = self.settings.entry(id).or_insert_with(DappsSettings::default);
|
||||
settings.accounts = accounts;
|
||||
}
|
||||
self.settings.save(JsonSettings::write_dapps_settings);
|
||||
self.settings.save(JsonSettings::write);
|
||||
}
|
||||
}
|
||||
|
||||
@ -280,6 +347,7 @@ impl<K: hash::Hash + Eq, V> DiskMap<K, V> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{AddressBook, DappsSettingsStore, DappsSettings, NewDappsPolicy};
|
||||
use account_provider::DappId;
|
||||
use std::collections::HashMap;
|
||||
use ethjson::misc::AccountMeta;
|
||||
use devtools::RandomTempPath;
|
||||
@ -333,18 +401,22 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_maintain_a_list_of_recent_dapps() {
|
||||
fn should_maintain_a_map_of_recent_dapps() {
|
||||
let mut store = DappsSettingsStore::transient();
|
||||
assert!(store.recent_dapps().is_empty(), "Initially recent dapps should be empty.");
|
||||
|
||||
store.mark_dapp_used("dapp1".into());
|
||||
assert_eq!(store.recent_dapps(), vec!["dapp1".to_owned()]);
|
||||
let dapp1: DappId = "dapp1".into();
|
||||
let dapp2: DappId = "dapp2".into();
|
||||
store.mark_dapp_used(dapp1.clone());
|
||||
let recent = store.recent_dapps();
|
||||
assert_eq!(recent.len(), 1);
|
||||
assert_eq!(recent.get(&dapp1), Some(&1));
|
||||
|
||||
store.mark_dapp_used("dapp2".into());
|
||||
assert_eq!(store.recent_dapps(), vec!["dapp2".to_owned(), "dapp1".to_owned()]);
|
||||
|
||||
store.mark_dapp_used("dapp1".into());
|
||||
assert_eq!(store.recent_dapps(), vec!["dapp1".to_owned(), "dapp2".to_owned()]);
|
||||
store.mark_dapp_used(dapp2.clone());
|
||||
let recent = store.recent_dapps();
|
||||
assert_eq!(recent.len(), 2);
|
||||
assert_eq!(recent.get(&dapp1), Some(&1));
|
||||
assert_eq!(recent.get(&dapp2), Some(&2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -22,7 +22,13 @@ use {json, SafeAccount, Error};
|
||||
use json::Uuid;
|
||||
use super::KeyDirectory;
|
||||
|
||||
const IGNORED_FILES: &'static [&'static str] = &["thumbs.db", "address_book.json", "dapps_policy.json"];
|
||||
const IGNORED_FILES: &'static [&'static str] = &[
|
||||
"thumbs.db",
|
||||
"address_book.json",
|
||||
"dapps_policy.json",
|
||||
"dapps_accounts.json",
|
||||
"dapps_history.json",
|
||||
];
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn restrict_permissions_to_owner(file_path: &Path) -> Result<(), i32> {
|
||||
|
@ -29,9 +29,9 @@ macro_rules! impl_hash {
|
||||
#[derive(Default, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Clone)]
|
||||
pub struct $name(pub $inner);
|
||||
|
||||
impl Into<$inner> for $name {
|
||||
fn into(self) -> $inner {
|
||||
self.0
|
||||
impl From<$name> for $inner {
|
||||
fn from(other: $name) -> $inner {
|
||||
other.0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,14 +16,10 @@
|
||||
|
||||
//! Misc deserialization.
|
||||
|
||||
use std::io::{Read, Write};
|
||||
use std::collections::HashMap;
|
||||
use serde_json;
|
||||
use util;
|
||||
use hash;
|
||||
|
||||
/// Collected account metadata
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
#[derive(Default, Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct AccountMeta {
|
||||
/// The name of the account.
|
||||
pub name: String,
|
||||
@ -33,26 +29,4 @@ pub struct AccountMeta {
|
||||
pub uuid: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for AccountMeta {
|
||||
fn default() -> Self {
|
||||
AccountMeta {
|
||||
name: String::new(),
|
||||
meta: "{}".to_owned(),
|
||||
uuid: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AccountMeta {
|
||||
/// Read a hash map of Address -> AccountMeta.
|
||||
pub fn read_address_map<R>(reader: R) -> Result<HashMap<util::Address, AccountMeta>, serde_json::Error> where R: Read {
|
||||
serde_json::from_reader(reader).map(|ok: HashMap<hash::Address, AccountMeta>|
|
||||
ok.into_iter().map(|(a, m)| (a.into(), m)).collect()
|
||||
)
|
||||
}
|
||||
|
||||
/// Write a hash map of Address -> AccountMeta.
|
||||
pub fn write_address_map<W>(m: &HashMap<util::Address, AccountMeta>, writer: &mut W) -> Result<(), serde_json::Error> where W: Write {
|
||||
serde_json::to_writer(writer, &m.iter().map(|(a, m)| (a.clone().into(), m)).collect::<HashMap<hash::Address, _>>())
|
||||
}
|
||||
}
|
||||
impl_serialization!(hash::Address => AccountMeta);
|
||||
|
@ -16,13 +16,8 @@
|
||||
|
||||
//! Dapps settings de/serialization.
|
||||
|
||||
use std::io;
|
||||
use std::collections::HashMap;
|
||||
use serde_json;
|
||||
use hash;
|
||||
|
||||
type DappId = String;
|
||||
|
||||
/// Settings for specific dapp.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct DappsSettings {
|
||||
@ -30,26 +25,17 @@ pub struct DappsSettings {
|
||||
pub accounts: Vec<hash::Address>,
|
||||
}
|
||||
|
||||
impl DappsSettings {
|
||||
/// Read a hash map of DappId -> DappsSettings
|
||||
pub fn read_dapps_settings<R, S>(reader: R) -> Result<HashMap<DappId, S>, serde_json::Error> where
|
||||
R: io::Read,
|
||||
S: From<DappsSettings> + Clone,
|
||||
{
|
||||
serde_json::from_reader(reader).map(|ok: HashMap<DappId, DappsSettings>|
|
||||
ok.into_iter().map(|(a, m)| (a.into(), m.into())).collect()
|
||||
)
|
||||
}
|
||||
impl_serialization!(String => DappsSettings);
|
||||
|
||||
/// Write a hash map of DappId -> DappsSettings
|
||||
pub fn write_dapps_settings<W, S>(m: &HashMap<DappId, S>, writer: &mut W) -> Result<(), serde_json::Error> where
|
||||
W: io::Write,
|
||||
S: Into<DappsSettings> + Clone,
|
||||
{
|
||||
serde_json::to_writer(writer, &m.iter().map(|(a, m)| (a.clone().into(), m.clone().into())).collect::<HashMap<DappId, DappsSettings>>())
|
||||
}
|
||||
/// History for specific dapp.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct DappsHistory {
|
||||
/// Last accessed timestamp
|
||||
pub last_accessed: u64,
|
||||
}
|
||||
|
||||
impl_serialization!(String => DappsHistory);
|
||||
|
||||
/// Accounts policy for new dapps.
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub enum NewDappsPolicy {
|
||||
@ -59,22 +45,4 @@ pub enum NewDappsPolicy {
|
||||
Whitelist(Vec<hash::Address>),
|
||||
}
|
||||
|
||||
impl NewDappsPolicy {
|
||||
/// Read a hash map of `String -> NewDappsPolicy`
|
||||
pub fn read_new_dapps_policy<R, S>(reader: R) -> Result<HashMap<String, S>, serde_json::Error> where
|
||||
R: io::Read,
|
||||
S: From<NewDappsPolicy> + Clone,
|
||||
{
|
||||
serde_json::from_reader(reader).map(|ok: HashMap<String, NewDappsPolicy>|
|
||||
ok.into_iter().map(|(a, m)| (a.into(), m.into())).collect()
|
||||
)
|
||||
}
|
||||
|
||||
/// Write a hash map of `String -> NewDappsPolicy`
|
||||
pub fn write_new_dapps_policy<W, S>(m: &HashMap<String, S>, writer: &mut W) -> Result<(), serde_json::Error> where
|
||||
W: io::Write,
|
||||
S: Into<NewDappsPolicy> + Clone,
|
||||
{
|
||||
serde_json::to_writer(writer, &m.iter().map(|(a, m)| (a.clone().into(), m.clone().into())).collect::<HashMap<String, NewDappsPolicy>>())
|
||||
}
|
||||
}
|
||||
impl_serialization!(String => NewDappsPolicy);
|
||||
|
@ -16,8 +16,39 @@
|
||||
|
||||
//! Misc deserialization.
|
||||
|
||||
macro_rules! impl_serialization {
|
||||
($key: ty => $name: ty) => {
|
||||
impl $name {
|
||||
/// Read a hash map of DappId -> $name
|
||||
pub fn read<R, S, D>(reader: R) -> Result<::std::collections::HashMap<D, S>, ::serde_json::Error> where
|
||||
R: ::std::io::Read,
|
||||
D: From<$key> + ::std::hash::Hash + Eq,
|
||||
S: From<$name> + Clone,
|
||||
{
|
||||
::serde_json::from_reader(reader).map(|ok: ::std::collections::HashMap<$key, $name>|
|
||||
ok.into_iter().map(|(a, m)| (a.into(), m.into())).collect()
|
||||
)
|
||||
}
|
||||
|
||||
/// Write a hash map of DappId -> $name
|
||||
pub fn write<W, S, D>(m: &::std::collections::HashMap<D, S>, writer: &mut W) -> Result<(), ::serde_json::Error> where
|
||||
W: ::std::io::Write,
|
||||
D: Into<$key> + ::std::hash::Hash + Eq + Clone,
|
||||
S: Into<$name> + Clone,
|
||||
{
|
||||
::serde_json::to_writer(
|
||||
writer,
|
||||
&m.iter()
|
||||
.map(|(a, m)| (a.clone().into(), m.clone().into()))
|
||||
.collect::<::std::collections::HashMap<$key, $name>>()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod account_meta;
|
||||
mod dapps_settings;
|
||||
|
||||
pub use self::dapps_settings::{DappsSettings, NewDappsPolicy};
|
||||
pub use self::dapps_settings::{DappsSettings, DappsHistory, NewDappsPolicy};
|
||||
pub use self::account_meta::AccountMeta;
|
||||
|
@ -196,12 +196,12 @@ impl<C: 'static> ParityAccounts for ParityAccountsClient<C> where C: MiningBlock
|
||||
.map(|accounts| accounts.map(into_vec))
|
||||
}
|
||||
|
||||
fn recent_dapps(&self) -> Result<Vec<DappId>, Error> {
|
||||
fn recent_dapps(&self) -> Result<BTreeMap<DappId, u64>, Error> {
|
||||
let store = take_weak!(self.accounts);
|
||||
|
||||
store.recent_dapps()
|
||||
.map_err(|e| errors::account("Couldn't get recent dapps.", e))
|
||||
.map(into_vec)
|
||||
.map(|map| map.into_iter().map(|(k, v)| (k.into(), v)).collect())
|
||||
}
|
||||
|
||||
fn import_geth_accounts(&self, addresses: Vec<RpcH160>) -> Result<Vec<RpcH160>, Error> {
|
||||
|
@ -173,7 +173,7 @@ fn rpc_parity_recent_dapps() {
|
||||
|
||||
// then
|
||||
let request = r#"{"jsonrpc": "2.0", "method": "parity_listRecentDapps","params":[], "id": 1}"#;
|
||||
let response = r#"{"jsonrpc":"2.0","result":["dapp1"],"id":1}"#;
|
||||
let response = r#"{"jsonrpc":"2.0","result":{"dapp1":1},"id":1}"#;
|
||||
assert_eq!(tester.io.handle_request_sync(request), Some(response.to_owned()));
|
||||
}
|
||||
|
||||
|
@ -92,9 +92,10 @@ build_rpc_trait! {
|
||||
#[rpc(name = "parity_getNewDappsWhitelist")]
|
||||
fn new_dapps_whitelist(&self) -> Result<Option<Vec<H160>>, Error>;
|
||||
|
||||
/// Sets accounts exposed for particular dapp.
|
||||
/// Returns identified dapps that recently used RPC
|
||||
/// Includes last usage timestamp.
|
||||
#[rpc(name = "parity_listRecentDapps")]
|
||||
fn recent_dapps(&self) -> Result<Vec<DappId>, Error>;
|
||||
fn recent_dapps(&self) -> Result<BTreeMap<DappId, u64>, Error>;
|
||||
|
||||
/// Imports a number of Geth accounts, with the list provided as the argument.
|
||||
#[rpc(name = "parity_importGethAccounts")]
|
||||
|
@ -16,8 +16,10 @@
|
||||
|
||||
//! Dapp Id type
|
||||
|
||||
use ethcore::account_provider::DappId as EthDappId;
|
||||
|
||||
/// Dapplication Internal Id
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
pub struct DappId(pub String);
|
||||
|
||||
impl Into<String> for DappId {
|
||||
@ -32,6 +34,18 @@ impl From<String> for DappId {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EthDappId> for DappId {
|
||||
fn from(id: EthDappId) -> Self {
|
||||
DappId(id.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<EthDappId> for DappId {
|
||||
fn into(self) -> EthDappId {
|
||||
Into::<String>::into(self).into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user