Merge branch 'master' into lightrpc

This commit is contained in:
Robert Habermeier 2017-02-08 19:20:35 +01:00
commit 0246d5c734
32 changed files with 268 additions and 43 deletions

View File

@ -422,7 +422,7 @@ windows:
- set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64 - set LIB=C:\vs2015\VC\lib;C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64
- set RUST_BACKTRACE=1 - set RUST_BACKTRACE=1
- set RUSTFLAGS=%RUSTFLAGS% - set RUSTFLAGS=%RUSTFLAGS%
- rustup default stable-x86_64-pc-windows-msvc - rustup default 1.14.0-x86_64-pc-windows-msvc
- cargo build --features final --release #%CARGOFLAGS% - cargo build --features final --release #%CARGOFLAGS%
- signtool sign /f %keyfile% /p %certpass% target\release\parity.exe - signtool sign /f %keyfile% /p %certpass% target\release\parity.exe
- target\release\parity.exe tools hash target\release\parity.exe > parity.sha3 - target\release\parity.exe tools hash target\release\parity.exe > parity.sha3

2
Cargo.lock generated
View File

@ -1575,7 +1575,7 @@ dependencies = [
[[package]] [[package]]
name = "parity-ui-precompiled" name = "parity-ui-precompiled"
version = "1.4.0" version = "1.4.0"
source = "git+https://github.com/ethcore/js-precompiled.git#a590186c6acf75e31b7cff259721793960ded4e1" source = "git+https://github.com/ethcore/js-precompiled.git#cb0dd77b70c552bb68288a94c7d5d37ecdd611c8"
dependencies = [ dependencies = [
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]

View File

@ -61,7 +61,7 @@ We recommend installing Rust through [rustup](https://www.rustup.rs/). If you do
$ curl https://sh.rustup.rs -sSf | sh $ curl https://sh.rustup.rs -sSf | sh
``` ```
Parity also requires `gcc`, `g++`, `libssl-dev`/`openssl` and `pkg-config` packages to be installed. Parity also requires `gcc`, `g++`, `libssl-dev`/`openssl`, `libudev-dev` and `pkg-config` packages to be installed.
- OSX: - OSX:
```bash ```bash
$ curl https://sh.rustup.rs -sSf | sh $ curl https://sh.rustup.rs -sSf | sh

View File

View File

@ -43,7 +43,7 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Option
if let Some(embeddable_on) = embeddable_on { if let Some(embeddable_on) = embeddable_on {
headers.set_raw( headers.set_raw(
"X-Frame-Options", "X-Frame-Options",
vec![format!("ALLOW-FROM http://{}", address(embeddable_on)).into_bytes()] vec![format!("ALLOW-FROM http://{}", address(&embeddable_on)).into_bytes()]
); );
} else { } else {
// TODO [ToDr] Should we be more strict here (DENY?)? // TODO [ToDr] Should we be more strict here (DENY?)?

View File

@ -253,7 +253,12 @@ impl Server {
match signer_address { match signer_address {
Some(signer_address) => vec![ Some(signer_address) => vec![
format!("http://{}{}", HOME_PAGE, DAPPS_DOMAIN), format!("http://{}{}", HOME_PAGE, DAPPS_DOMAIN),
format!("http://{}", address(signer_address)), format!("http://{}{}:{}", HOME_PAGE, DAPPS_DOMAIN, signer_address.1),
format!("http://{}", address(&signer_address)),
format!("https://{}{}", HOME_PAGE, DAPPS_DOMAIN),
format!("https://{}{}:{}", HOME_PAGE, DAPPS_DOMAIN, signer_address.1),
format!("https://{}", address(&signer_address)),
], ],
None => vec![], None => vec![],
} }
@ -377,7 +382,7 @@ fn random_filename() -> String {
rng.gen_ascii_chars().take(12).collect() rng.gen_ascii_chars().take(12).collect()
} }
fn address(address: (String, u16)) -> String { fn address(address: &(String, u16)) -> String {
format!("{}:{}", address.0, address.1) format!("{}:{}", address.0, address.1)
} }
@ -411,6 +416,14 @@ mod util_tests {
// then // then
assert_eq!(none, Vec::<String>::new()); assert_eq!(none, Vec::<String>::new());
assert_eq!(some, vec!["http://parity.web3.site".to_owned(), "http://127.0.0.1:18180".into()]); assert_eq!(some, vec![
"http://parity.web3.site".to_owned(),
"http://parity.web3.site:18180".into(),
"http://127.0.0.1:18180".into(),
"https://parity.web3.site".into(),
"https://parity.web3.site:18180".into(),
"https://127.0.0.1:18180".into()
]);
} }
} }

View File

@ -35,7 +35,8 @@ impl ProxyPac {
impl Endpoint for ProxyPac { impl Endpoint for ProxyPac {
fn to_handler(&self, path: EndpointPath) -> Box<Handler> { fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
let signer = self.signer_address.clone() let signer = self.signer_address
.as_ref()
.map(address) .map(address)
.unwrap_or_else(|| format!("{}:{}", path.host, path.port)); .unwrap_or_else(|| format!("{}:{}", path.host, path.port));

View File

@ -138,7 +138,7 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
}, },
// Redirect any other GET request to signer. // Redirect any other GET request to signer.
_ if is_get_request => { _ if is_get_request => {
if let Some(signer_address) = self.signer_address.clone() { if let Some(ref signer_address) = self.signer_address {
trace!(target: "dapps", "Redirecting to signer interface."); trace!(target: "dapps", "Redirecting to signer interface.");
Redirection::boxed(&format!("http://{}", address(signer_address))) Redirection::boxed(&format!("http://{}", address(signer_address)))
} else { } else {

View File

@ -158,3 +158,57 @@ fn should_return_signer_port_cors_headers_for_home_parity() {
response.headers response.headers
); );
} }
#[test]
fn should_return_signer_port_cors_headers_for_home_parity_with_https() {
// given
let server = serve();
// when
let response = request(server,
"\
POST /api/ping HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Origin: https://parity.web3.site\r\n\
Connection: close\r\n\
\r\n\
{}
"
);
// then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert!(
response.headers_raw.contains("Access-Control-Allow-Origin: https://parity.web3.site"),
"CORS header for parity.web3.site missing: {:?}",
response.headers
);
}
#[test]
fn should_return_signer_port_cors_headers_for_home_parity_with_port() {
// given
let server = serve();
// when
let response = request(server,
"\
POST /api/ping HTTP/1.1\r\n\
Host: localhost:8080\r\n\
Origin: http://parity.web3.site:18180\r\n\
Connection: close\r\n\
\r\n\
{}
"
);
// then
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
assert!(
response.headers_raw.contains("Access-Control-Allow-Origin: http://parity.web3.site:18180"),
"CORS header for parity.web3.site missing: {:?}",
response.headers
);
}

View File

@ -19,6 +19,7 @@ RUN apt-get update && \
file \ file \
openssl \ openssl \
libssl-dev \ libssl-dev \
libudev-dev \
pkg-config \ pkg-config \
dpkg-dev \ dpkg-dev \
# evmjit dependencies # evmjit dependencies

View File

@ -493,6 +493,18 @@ impl AccountProvider {
.map_err(Into::into) .map_err(Into::into)
.map(|_| ()) .map(|_| ())
} }
/// Get vault metadata string.
pub fn get_vault_meta(&self, name: &str) -> Result<String, Error> {
self.sstore.get_vault_meta(name)
.map_err(Into::into)
}
/// Set vault metadata string.
pub fn set_vault_meta(&self, name: &str, meta: &str) -> Result<(), Error> {
self.sstore.set_vault_meta(name, meta)
.map_err(Into::into)
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -220,6 +220,7 @@ pub trait BlockChainClient : Sync + Send {
let block = self.block(BlockId::Hash(h)).expect("h is either the best_block_hash or an ancestor; qed"); let block = self.block(BlockId::Hash(h)).expect("h is either the best_block_hash or an ancestor; qed");
let header = block.header_view(); let header = block.header_view();
if header.number() == 0 { if header.number() == 0 {
corpus.sort();
return corpus; return corpus;
} }
block.transaction_views().iter().foreach(|t| corpus.push(t.gas_price())); block.transaction_views().iter().foreach(|t| corpus.push(t.gas_price()));

View File

@ -233,6 +233,13 @@ fn empty_gas_price_histogram() {
assert!(client.gas_price_histogram(20, 5).is_none()); assert!(client.gas_price_histogram(20, 5).is_none());
} }
#[test]
fn corpus_is_sorted() {
let client_result = generate_dummy_client_with_data(2, 1, slice_into![U256::from_str("11426908979").unwrap(), U256::from_str("50426908979").unwrap()]);
let client = client_result.reference();
let corpus = client.gas_price_corpus(20);
assert!(corpus[0] < corpus[1]);
}
#[test] #[test]
fn can_handle_long_fork() { fn can_handle_long_fork() {

View File

@ -16,7 +16,7 @@
//! Trace database. //! Trace database.
use std::ops::Deref; use std::ops::Deref;
use std::collections::HashMap; use std::collections::{HashMap, VecDeque};
use std::sync::Arc; use std::sync::Arc;
use bloomchain::{Number, Config as BloomConfig}; use bloomchain::{Number, Config as BloomConfig};
use bloomchain::group::{BloomGroupDatabase, BloomGroupChain, GroupPosition, BloomGroup}; use bloomchain::group::{BloomGroupDatabase, BloomGroupChain, GroupPosition, BloomGroup};
@ -305,7 +305,7 @@ impl<T> TraceDatabase for TraceDB<T> where T: DatabaseExtras {
} }
fn trace(&self, block_number: BlockNumber, tx_position: usize, trace_position: Vec<usize>) -> Option<LocalizedTrace> { fn trace(&self, block_number: BlockNumber, tx_position: usize, trace_position: Vec<usize>) -> Option<LocalizedTrace> {
let trace_position_deq = trace_position.into_iter().collect(); let trace_position_deq = trace_position.into_iter().collect::<VecDeque<usize>>();
self.extras.block_hash(block_number) self.extras.block_hash(block_number)
.and_then(|block_hash| self.transactions_traces(&block_hash) .and_then(|block_hash| self.transactions_traces(&block_hash)
.and_then(|traces| traces.into_iter().nth(tx_position)) .and_then(|traces| traces.into_iter().nth(tx_position))

View File

@ -158,7 +158,7 @@ impl<T> DiskDirectory<T> where T: KeyFileManager {
Ok(account) Ok(account)
} }
/// Get key file manager /// Get key file manager referece
pub fn key_manager(&self) -> &T { pub fn key_manager(&self) -> &T {
&self.key_manager &self.key_manager
} }

View File

@ -84,6 +84,10 @@ pub trait VaultKeyDirectory: KeyDirectory {
fn key(&self) -> VaultKey; fn key(&self) -> VaultKey;
/// Set new key for vault /// Set new key for vault
fn set_key(&self, key: VaultKey) -> Result<(), SetKeyError>; fn set_key(&self, key: VaultKey) -> Result<(), SetKeyError>;
/// Get vault meta
fn meta(&self) -> String;
/// Set vault meta
fn set_meta(&self, meta: &str) -> Result<(), Error>;
} }
pub use self::disk::RootDiskDirectory; pub use self::disk::RootDiskDirectory;

View File

@ -16,6 +16,7 @@
use std::{fs, io}; use std::{fs, io};
use std::path::{PathBuf, Path}; use std::path::{PathBuf, Path};
use parking_lot::Mutex;
use {json, SafeAccount, Error}; use {json, SafeAccount, Error};
use util::sha3::Hashable; use util::sha3::Hashable;
use super::super::account::Crypto; use super::super::account::Crypto;
@ -24,6 +25,8 @@ use super::disk::{DiskDirectory, KeyFileManager};
/// Name of vault metadata file /// Name of vault metadata file
pub const VAULT_FILE_NAME: &'static str = "vault.json"; pub const VAULT_FILE_NAME: &'static str = "vault.json";
/// Name of temporary vault metadata file
pub const VAULT_TEMP_FILE_NAME: &'static str = "vault_temp.json";
/// Vault directory implementation /// Vault directory implementation
pub type VaultDiskDirectory = DiskDirectory<VaultKeyFileManager>; pub type VaultDiskDirectory = DiskDirectory<VaultKeyFileManager>;
@ -32,6 +35,7 @@ pub type VaultDiskDirectory = DiskDirectory<VaultKeyFileManager>;
pub struct VaultKeyFileManager { pub struct VaultKeyFileManager {
name: String, name: String,
key: VaultKey, key: VaultKey,
meta: Mutex<String>,
} }
impl VaultDiskDirectory { impl VaultDiskDirectory {
@ -44,13 +48,14 @@ impl VaultDiskDirectory {
} }
// create vault && vault file // create vault && vault file
let vault_meta = "{}";
fs::create_dir_all(&vault_dir_path)?; fs::create_dir_all(&vault_dir_path)?;
if let Err(err) = create_vault_file(&vault_dir_path, &key) { if let Err(err) = create_vault_file(&vault_dir_path, &key, vault_meta) {
let _ = fs::remove_dir_all(&vault_dir_path); // can't do anything with this let _ = fs::remove_dir_all(&vault_dir_path); // can't do anything with this
return Err(err); return Err(err);
} }
Ok(DiskDirectory::new(vault_dir_path, VaultKeyFileManager::new(name, key))) Ok(DiskDirectory::new(vault_dir_path, VaultKeyFileManager::new(name, key, vault_meta)))
} }
/// Open existing vault directory with given key /// Open existing vault directory with given key
@ -62,9 +67,9 @@ impl VaultDiskDirectory {
} }
// check that passed key matches vault file // check that passed key matches vault file
check_vault_file(&vault_dir_path, &key)?; let meta = read_vault_file(&vault_dir_path, &key)?;
Ok(DiskDirectory::new(vault_dir_path, VaultKeyFileManager::new(name, key))) Ok(DiskDirectory::new(vault_dir_path, VaultKeyFileManager::new(name, key, &meta)))
} }
fn create_temp_vault(&self, key: VaultKey) -> Result<VaultDiskDirectory, Error> { fn create_temp_vault(&self, key: VaultKey) -> Result<VaultDiskDirectory, Error> {
@ -145,13 +150,26 @@ impl VaultKeyDirectory for VaultDiskDirectory {
temp_vault.delete().map_err(|err| SetKeyError::NonFatalNew(err)) temp_vault.delete().map_err(|err| SetKeyError::NonFatalNew(err))
} }
fn meta(&self) -> String {
self.key_manager().meta.lock().clone()
}
fn set_meta(&self, meta: &str) -> Result<(), Error> {
let key_manager = self.key_manager();
let vault_path = self.path().expect("self is instance of DiskDirectory; DiskDirectory always returns path; qed");
create_vault_file(vault_path, &key_manager.key, meta)?;
*key_manager.meta.lock() = meta.to_owned();
Ok(())
}
} }
impl VaultKeyFileManager { impl VaultKeyFileManager {
pub fn new(name: &str, key: VaultKey) -> Self { pub fn new(name: &str, key: VaultKey, meta: &str) -> Self {
VaultKeyFileManager { VaultKeyFileManager {
name: name.into(), name: name.into(),
key: key, key: key,
meta: Mutex::new(meta.to_owned()),
} }
} }
} }
@ -199,29 +217,37 @@ fn check_vault_name(name: &str) -> bool {
} }
/// Vault can be empty, but still must be pluggable => we store vault password in separate file /// Vault can be empty, but still must be pluggable => we store vault password in separate file
fn create_vault_file<P>(vault_dir_path: P, key: &VaultKey) -> Result<(), Error> where P: AsRef<Path> { fn create_vault_file<P>(vault_dir_path: P, key: &VaultKey, meta: &str) -> Result<(), Error> where P: AsRef<Path> {
let password_hash = key.password.sha3(); let password_hash = key.password.sha3();
let crypto = Crypto::with_plain(&password_hash, &key.password, key.iterations); let crypto = Crypto::with_plain(&password_hash, &key.password, key.iterations);
let mut vault_file_path: PathBuf = vault_dir_path.as_ref().into(); let mut vault_file_path: PathBuf = vault_dir_path.as_ref().into();
vault_file_path.push(VAULT_FILE_NAME); vault_file_path.push(VAULT_FILE_NAME);
let mut temp_vault_file_path: PathBuf = vault_dir_path.as_ref().into();
temp_vault_file_path.push(VAULT_TEMP_FILE_NAME);
let mut vault_file = fs::File::create(vault_file_path)?; // this method is used to rewrite existing vault file
// => write to temporary file first, then rename temporary file to vault file
let mut vault_file = fs::File::create(&temp_vault_file_path)?;
let vault_file_contents = json::VaultFile { let vault_file_contents = json::VaultFile {
crypto: crypto.into(), crypto: crypto.into(),
meta: Some(meta.to_owned()),
}; };
vault_file_contents.write(&mut vault_file).map_err(|e| Error::Custom(format!("{:?}", e)))?; vault_file_contents.write(&mut vault_file).map_err(|e| Error::Custom(format!("{:?}", e)))?;
drop(vault_file);
fs::rename(&temp_vault_file_path, &vault_file_path)?;
Ok(()) Ok(())
} }
/// When vault is opened => we must check that password matches /// When vault is opened => we must check that password matches && read metadata
fn check_vault_file<P>(vault_dir_path: P, key: &VaultKey) -> Result<(), Error> where P: AsRef<Path> { fn read_vault_file<P>(vault_dir_path: P, key: &VaultKey) -> Result<String, Error> where P: AsRef<Path> {
let mut vault_file_path: PathBuf = vault_dir_path.as_ref().into(); let mut vault_file_path: PathBuf = vault_dir_path.as_ref().into();
vault_file_path.push(VAULT_FILE_NAME); vault_file_path.push(VAULT_FILE_NAME);
let vault_file = fs::File::open(vault_file_path)?; let vault_file = fs::File::open(vault_file_path)?;
let vault_file_contents = json::VaultFile::load(vault_file).map_err(|e| Error::Custom(format!("{:?}", e)))?; let vault_file_contents = json::VaultFile::load(vault_file).map_err(|e| Error::Custom(format!("{:?}", e)))?;
let vault_file_meta = vault_file_contents.meta.unwrap_or("{}".to_owned());
let vault_file_crypto: Crypto = vault_file_contents.crypto.into(); let vault_file_crypto: Crypto = vault_file_contents.crypto.into();
let password_bytes = vault_file_crypto.decrypt(&key.password)?; let password_bytes = vault_file_crypto.decrypt(&key.password)?;
@ -230,7 +256,7 @@ fn check_vault_file<P>(vault_dir_path: P, key: &VaultKey) -> Result<(), Error> w
return Err(Error::InvalidPassword); return Err(Error::InvalidPassword);
} }
Ok(()) Ok(vault_file_meta)
} }
#[cfg(test)] #[cfg(test)]
@ -238,8 +264,8 @@ mod test {
use std::fs; use std::fs;
use std::io::Write; use std::io::Write;
use std::path::PathBuf; use std::path::PathBuf;
use dir::VaultKey; use dir::{VaultKey, VaultKeyDirectory};
use super::{VAULT_FILE_NAME, check_vault_name, make_vault_dir_path, create_vault_file, check_vault_file, VaultDiskDirectory}; use super::{VAULT_FILE_NAME, check_vault_name, make_vault_dir_path, create_vault_file, read_vault_file, VaultDiskDirectory};
use devtools::RandomTempPath; use devtools::RandomTempPath;
#[test] #[test]
@ -283,7 +309,7 @@ mod test {
fs::create_dir_all(&vault_dir).unwrap(); fs::create_dir_all(&vault_dir).unwrap();
// when // when
let result = create_vault_file(&vault_dir, &key); let result = create_vault_file(&vault_dir, &key, "{}");
// then // then
assert!(result.is_ok()); assert!(result.is_ok());
@ -293,7 +319,7 @@ mod test {
} }
#[test] #[test]
fn check_vault_file_succeeds() { fn read_vault_file_succeeds() {
// given // given
let temp_path = RandomTempPath::create_dir(); let temp_path = RandomTempPath::create_dir();
let key = VaultKey::new("password", 1024); let key = VaultKey::new("password", 1024);
@ -307,14 +333,14 @@ mod test {
} }
// when // when
let result = check_vault_file(&dir, &key); let result = read_vault_file(&dir, &key);
// then // then
assert!(result.is_ok()); assert!(result.is_ok());
} }
#[test] #[test]
fn check_vault_file_fails() { fn read_vault_file_fails() {
// given // given
let temp_path = RandomTempPath::create_dir(); let temp_path = RandomTempPath::create_dir();
let key = VaultKey::new("password1", 1024); let key = VaultKey::new("password1", 1024);
@ -323,7 +349,7 @@ mod test {
vault_file_path.push(VAULT_FILE_NAME); vault_file_path.push(VAULT_FILE_NAME);
// when // when
let result = check_vault_file(&dir, &key); let result = read_vault_file(&dir, &key);
// then // then
assert!(result.is_err()); assert!(result.is_err());
@ -336,7 +362,7 @@ mod test {
} }
// when // when
let result = check_vault_file(&dir, &key); let result = read_vault_file(&dir, &key);
// then // then
assert!(result.is_err()); assert!(result.is_err());
@ -392,4 +418,22 @@ mod test {
// then // then
assert!(vault.is_err()); assert!(vault.is_err());
} }
#[test]
fn vault_directory_can_preserve_meta() {
// given
let temp_path = RandomTempPath::new();
let key = VaultKey::new("password", 1024);
let dir: PathBuf = temp_path.as_path().into();
let vault = VaultDiskDirectory::create(&dir, "vault", key.clone()).unwrap();
// then
assert_eq!(vault.meta(), "{}".to_owned());
assert!(vault.set_meta("Hello, world!!!").is_ok());
assert_eq!(vault.meta(), "Hello, world!!!".to_owned());
// and when
let vault = VaultDiskDirectory::at(&dir, "vault", key.clone()).unwrap();
assert_eq!(vault.meta(), "Hello, world!!!".to_owned());
}
} }

View File

@ -107,6 +107,14 @@ impl SimpleSecretStore for EthStore {
fn change_account_vault(&self, vault: SecretVaultRef, account: StoreAccountRef) -> Result<StoreAccountRef, Error> { fn change_account_vault(&self, vault: SecretVaultRef, account: StoreAccountRef) -> Result<StoreAccountRef, Error> {
self.store.change_account_vault(vault, account) self.store.change_account_vault(vault, account)
} }
fn get_vault_meta(&self, name: &str) -> Result<String, Error> {
self.store.get_vault_meta(name)
}
fn set_vault_meta(&self, name: &str, meta: &str) -> Result<(), Error> {
self.store.set_vault_meta(name, meta)
}
} }
impl SecretStore for EthStore { impl SecretStore for EthStore {
@ -491,6 +499,20 @@ impl SimpleSecretStore for EthMultiStore {
self.reload_accounts()?; self.reload_accounts()?;
Ok(new_account_ref) Ok(new_account_ref)
} }
fn get_vault_meta(&self, name: &str) -> Result<String, Error> {
self.vaults.lock()
.get(name)
.ok_or(Error::VaultNotFound)
.and_then(|v| Ok(v.meta()))
}
fn set_vault_meta(&self, name: &str, meta: &str) -> Result<(), Error> {
self.vaults.lock()
.get(name)
.ok_or(Error::VaultNotFound)
.and_then(|v| v.set_meta(meta))
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -25,10 +25,13 @@ use super::Crypto;
pub struct VaultFile { pub struct VaultFile {
/// Vault password, encrypted with vault password /// Vault password, encrypted with vault password
pub crypto: Crypto, pub crypto: Crypto,
/// Vault metadata string
pub meta: Option<String>,
} }
enum VaultFileField { enum VaultFileField {
Crypto, Crypto,
Meta,
} }
impl Deserialize for VaultFileField { impl Deserialize for VaultFileField {
@ -49,6 +52,7 @@ impl Visitor for VaultFileFieldVisitor {
{ {
match value { match value {
"crypto" => Ok(VaultFileField::Crypto), "crypto" => Ok(VaultFileField::Crypto),
"meta" => Ok(VaultFileField::Meta),
_ => Err(Error::custom(format!("Unknown field: '{}'", value))), _ => Err(Error::custom(format!("Unknown field: '{}'", value))),
} }
} }
@ -58,7 +62,7 @@ impl Deserialize for VaultFile {
fn deserialize<D>(deserializer: &mut D) -> Result<VaultFile, D::Error> fn deserialize<D>(deserializer: &mut D) -> Result<VaultFile, D::Error>
where D: Deserializer where D: Deserializer
{ {
static FIELDS: &'static [&'static str] = &["crypto"]; static FIELDS: &'static [&'static str] = &["crypto", "meta"];
deserializer.deserialize_struct("VaultFile", FIELDS, VaultFileVisitor) deserializer.deserialize_struct("VaultFile", FIELDS, VaultFileVisitor)
} }
} }
@ -72,11 +76,13 @@ impl Visitor for VaultFileVisitor {
where V: MapVisitor where V: MapVisitor
{ {
let mut crypto = None; let mut crypto = None;
let mut meta = None;
loop { loop {
match visitor.visit_key()? { match visitor.visit_key()? {
Some(VaultFileField::Crypto) => { crypto = Some(visitor.visit_value()?); } Some(VaultFileField::Crypto) => { crypto = Some(visitor.visit_value()?); },
None => { break; } Some(VaultFileField::Meta) => { meta = Some(visitor.visit_value()?); }
None => { break; },
} }
} }
@ -89,6 +95,7 @@ impl Visitor for VaultFileVisitor {
let result = VaultFile { let result = VaultFile {
crypto: crypto, crypto: crypto,
meta: meta,
}; };
Ok(result) Ok(result)
@ -125,7 +132,8 @@ mod test {
salt: "b6a9338a7ccd39288a86dba73bfecd9101b4f3db9c9830e7c76afdbd4f6872e5".into(), salt: "b6a9338a7ccd39288a86dba73bfecd9101b4f3db9c9830e7c76afdbd4f6872e5".into(),
}), }),
mac: "16381463ea11c6eb2239a9f339c2e780516d29d234ce30ac5f166f9080b5a262".into(), mac: "16381463ea11c6eb2239a9f339c2e780516d29d234ce30ac5f166f9080b5a262".into(),
} },
meta: Some("{}".into()),
}; };
let serialized = serde_json::to_string(&file).unwrap(); let serialized = serde_json::to_string(&file).unwrap();

View File

@ -65,6 +65,10 @@ pub trait SimpleSecretStore: Send + Sync {
fn change_vault_password(&self, name: &str, new_password: &str) -> Result<(), Error>; fn change_vault_password(&self, name: &str, new_password: &str) -> Result<(), Error>;
/// Cnage account' vault /// Cnage account' vault
fn change_account_vault(&self, vault: SecretVaultRef, account: StoreAccountRef) -> Result<StoreAccountRef, Error>; fn change_account_vault(&self, vault: SecretVaultRef, account: StoreAccountRef) -> Result<StoreAccountRef, Error>;
/// Get vault metadata string.
fn get_vault_meta(&self, name: &str) -> Result<String, Error>;
/// Set vault metadata string.
fn set_vault_meta(&self, name: &str, meta: &str) -> Result<(), Error>;
} }
pub trait SecretStore: SimpleSecretStore { pub trait SecretStore: SimpleSecretStore {

View File

@ -1,6 +1,6 @@
{ {
"name": "parity.js", "name": "parity.js",
"version": "0.3.70", "version": "0.3.72",
"main": "release/index.js", "main": "release/index.js",
"jsnext:main": "src/index.js", "jsnext:main": "src/index.js",
"author": "Parity Team <admin@parity.io>", "author": "Parity Team <admin@parity.io>",

View File

@ -198,6 +198,9 @@ export default class CreateWalletStore {
.get() .get()
.registry .registry
.lookupAddress(walletLibraryRegKey) .lookupAddress(walletLibraryRegKey)
.catch(() => {
return null; // exception when registry is not available
})
.then((address) => { .then((address) => {
const walletLibraryAddress = (address || '').replace(/^0x/, '').toLowerCase(); const walletLibraryAddress = (address || '').replace(/^0x/, '').toLowerCase();
const code = walletLibraryAddress.length && !/^0+$/.test(walletLibraryAddress) const code = walletLibraryAddress.length && !/^0+$/.test(walletLibraryAddress)

View File

@ -217,8 +217,8 @@ export default class TypedInput extends Component {
renderEth () { renderEth () {
const { ethValue, isEth } = this.state; const { ethValue, isEth } = this.state;
const value = ethValue && typeof ethValue.toNumber === 'function' const value = ethValue && typeof ethValue.toFixed === 'function'
? ethValue.toNumber() ? ethValue.toFixed() // we need a string representation, could be >15 digits
: ethValue; : ethValue;
const input = isEth const input = isEth
@ -257,7 +257,7 @@ export default class TypedInput extends Component {
return readOnly return readOnly
? bnValue.toFormat() ? bnValue.toFormat()
: bnValue.toNumber(); : bnValue.toFixed(); // we need a string representation, could be >15 digits
} }
renderInteger (value = this.props.value, onChange = this.onChange) { renderInteger (value = this.props.value, onChange = this.onChange) {

View File

@ -180,7 +180,7 @@ export default class GasPriceEditor {
// NOTE fetching histogram may fail if there is not enough data. // NOTE fetching histogram may fail if there is not enough data.
// We fallback to empty histogram. // We fallback to empty histogram.
this._api.parity.gasPriceHistogram().catch(() => ({ this._api.parity.gasPriceHistogram().catch(() => ({
bucket_bounds: [], bucketBounds: [],
counts: [] counts: []
})), })),
this._api.eth.gasPrice(), this._api.eth.gasPrice(),

View File

@ -96,6 +96,7 @@ describe('ui/GasPriceEditor/Store', () => {
setImmediate(() => { setImmediate(() => {
expect(store.histogram).not.to.be.null; expect(store.histogram).not.to.be.null;
expect(store.histogram.bucketBounds).not.to.be.null;
done(); done();
}); });
}); });

View File

@ -249,8 +249,12 @@ pub fn execute<D: Dispatcher + 'static>(
.map(ConfirmationResponse::SignTransaction) .map(ConfirmationResponse::SignTransaction)
).boxed() ).boxed()
}, },
ConfirmationPayload::Signature(address, data) => { ConfirmationPayload::Signature(address, mut data) => {
let res = signature(accounts, address, data.sha3(), pass) let mut message_data =
format!("\x19Ethereum Signed Message:\n{}", data.len())
.into_bytes();
message_data.append(&mut data);
let res = signature(accounts, address, message_data.sha3(), pass)
.map(|result| result .map(|result| result
.map(|rsv| { .map(|rsv| {
let mut vrs = [0u8; 65]; let mut vrs = [0u8; 65];

View File

@ -248,6 +248,19 @@ impl ParityAccounts for ParityAccountsClient {
.map_err(|e| errors::account("Could not change vault.", e)) .map_err(|e| errors::account("Could not change vault.", e))
.map(|_| true) .map(|_| true)
} }
fn get_vault_meta(&self, name: String) -> Result<String, Error> {
take_weak!(self.accounts)
.get_vault_meta(&name)
.map_err(|e| errors::account("Could not get vault metadata.", e))
}
fn set_vault_meta(&self, name: String, meta: String) -> Result<bool, Error> {
take_weak!(self.accounts)
.set_vault_meta(&name, &meta)
.map_err(|e| errors::account("Could not update vault metadata.", e))
.map(|_| true)
}
} }
fn into_vec<A, B>(a: Vec<A>) -> Vec<B> where fn into_vec<A, B>(a: Vec<A>) -> Vec<B> where

View File

@ -313,7 +313,7 @@ fn rpc_eth_sign() {
], ],
"id": 1 "id": 1
}"#; }"#;
let res = r#"{"jsonrpc":"2.0","result":"0x1b5100b2be0aafd86271c8f49891262920bfbfeaeccb2ef1d0b2053aefc3ddb399483eb3c902ecf4add3156461a61f59e924a65eb5e6cdbab0a158d45db5f87cdf","id":1}"#; let res = r#"{"jsonrpc":"2.0","result":"0x1ba2870db1d0c26ef93c7b72d2a0830fa6b841e0593f7186bc6c7cc317af8cf3a42fda03bd589a49949aa05db83300cdb553116274518dbe9d90c65d0213f4af49","id":1}"#;
assert_eq!(tester.io.handle_request_sync(&req), Some(res.into())); assert_eq!(tester.io.handle_request_sync(&req), Some(res.into()));
} }

View File

@ -351,3 +351,27 @@ fn rpc_parity_list_opened_vaults() {
assert!(actual_response == Some(response1.to_owned()) assert!(actual_response == Some(response1.to_owned())
|| actual_response == Some(response2.to_owned())); || actual_response == Some(response2.to_owned()));
} }
#[test]
fn rpc_parity_get_set_vault_meta() {
let temp_path = RandomTempPath::new();
let tester = setup_with_vaults_support(temp_path.as_str());
assert!(tester.accounts.create_vault("vault1", "password1").is_ok());
assert!(tester.accounts.set_vault_meta("vault1", "vault1_meta").is_ok());
let request = r#"{"jsonrpc": "2.0", "method": "parity_getVaultMeta", "params":["vault1"], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":"vault1_meta","id":1}"#;
assert_eq!(tester.io.handle_request_sync(request), Some(response.to_owned()));
let request = r#"{"jsonrpc": "2.0", "method": "parity_setVaultMeta", "params":["vault1", "updated_vault1_meta"], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(tester.io.handle_request_sync(request), Some(response.to_owned()));
let request = r#"{"jsonrpc": "2.0", "method": "parity_getVaultMeta", "params":["vault1"], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":"updated_vault1_meta","id":1}"#;
assert_eq!(tester.io.handle_request_sync(request), Some(response.to_owned()));
}

View File

@ -212,7 +212,7 @@ fn should_sign_if_account_is_unlocked() {
], ],
"id": 1 "id": 1
}"#; }"#;
let response = r#"{"jsonrpc":"2.0","result":"0x1bb3062482b0687e9c97c7609ea60c1649959dbb334f71b3d5cacd496e0848ba8137bc765756627722389c6c39bc77700ccdc8916916a0eb03bcf5191d4f74dc65","id":1}"#; let response = r#"{"jsonrpc":"2.0","result":"0x1bdb53b32e56cf3e9735377b7664d6de5a03e125b1bf8ec55715d253668b4238503b4ac931fe6af90add73e72a585e952665376b2b9afc5b6b239b7df74c734e12","id":1}"#;
assert_eq!(tester.io.handle_request_sync(&request), Some(response.to_owned())); assert_eq!(tester.io.handle_request_sync(&request), Some(response.to_owned()));
assert_eq!(tester.signer.requests().len(), 0); assert_eq!(tester.signer.requests().len(), 0);
} }

View File

@ -133,5 +133,13 @@ build_rpc_trait! {
/// Change vault of the given address. /// Change vault of the given address.
#[rpc(name = "parity_changeVault")] #[rpc(name = "parity_changeVault")]
fn change_vault(&self, H160, String) -> Result<bool, Error>; fn change_vault(&self, H160, String) -> Result<bool, Error>;
/// Get vault metadata string.
#[rpc(name = "parity_getVaultMeta")]
fn get_vault_meta(&self, String) -> Result<String, Error>;
/// Set vault metadata string.
#[rpc(name = "parity_setVaultMeta")]
fn set_vault_meta(&self, String, String) -> Result<bool, Error>;
} }
} }

View File

@ -33,6 +33,7 @@ impl Histogram {
if corpus.len() < 1 { return None; } if corpus.len() < 1 { return None; }
let corpus_end = corpus.last().expect("there is at least 1 element; qed").clone(); let corpus_end = corpus.last().expect("there is at least 1 element; qed").clone();
let corpus_start = corpus.first().expect("there is at least 1 element; qed").clone(); let corpus_start = corpus.first().expect("there is at least 1 element; qed").clone();
trace!(target: "stats", "Computing histogram from {} to {} with {} buckets.", corpus_start, corpus_end, bucket_number);
// Bucket needs to be at least 1 wide. // Bucket needs to be at least 1 wide.
let bucket_size = { let bucket_size = {
// Round up to get the entire corpus included. // Round up to get the entire corpus included.