Merge branch 'master' into dapps-content
Conflicts: dapps/src/apps/fetcher.rs dapps/src/apps/urlhint.rs dapps/src/handlers/client/mod.rs dapps/src/handlers/fetch.rs dapps/src/lib.rs dapps/src/page/local.rs
This commit is contained in:
commit
840b64b813
12
Cargo.lock
generated
12
Cargo.lock
generated
@ -310,6 +310,7 @@ version = "1.4.0"
|
||||
dependencies = [
|
||||
"clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore-devtools 1.4.0",
|
||||
"ethcore-rpc 1.4.0",
|
||||
"ethcore-util 1.4.0",
|
||||
"https-fetch 0.1.0",
|
||||
@ -478,6 +479,7 @@ version = "1.4.0"
|
||||
dependencies = [
|
||||
"clippy 0.0.85 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore-devtools 1.4.0",
|
||||
"ethcore-io 1.4.0",
|
||||
"ethcore-rpc 1.4.0",
|
||||
"ethcore-util 1.4.0",
|
||||
@ -1109,7 +1111,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
[[package]]
|
||||
name = "parity-dapps"
|
||||
version = "1.4.0"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#e4dddf36e7c9fa5c6e746790119c71f67438784a"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
|
||||
dependencies = [
|
||||
"aster 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1123,7 +1125,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "parity-dapps-home"
|
||||
version = "1.4.0"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#e4dddf36e7c9fa5c6e746790119c71f67438784a"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
|
||||
dependencies = [
|
||||
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
|
||||
]
|
||||
@ -1131,7 +1133,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "parity-dapps-signer"
|
||||
version = "1.4.0"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#e4dddf36e7c9fa5c6e746790119c71f67438784a"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
|
||||
dependencies = [
|
||||
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
|
||||
]
|
||||
@ -1139,7 +1141,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "parity-dapps-status"
|
||||
version = "1.4.0"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#e4dddf36e7c9fa5c6e746790119c71f67438784a"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
|
||||
dependencies = [
|
||||
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
|
||||
]
|
||||
@ -1147,7 +1149,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "parity-dapps-wallet"
|
||||
version = "1.4.0"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#e4dddf36e7c9fa5c6e746790119c71f67438784a"
|
||||
source = "git+https://github.com/ethcore/parity-ui.git#926b09b66c4940b09dc82c52adb4afd9e31155bc"
|
||||
dependencies = [
|
||||
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
|
||||
]
|
||||
|
@ -23,6 +23,7 @@ serde_macros = { version = "0.8", optional = true }
|
||||
zip = { version = "0.1", default-features = false }
|
||||
ethabi = "0.2.2"
|
||||
linked-hash-map = "0.3"
|
||||
ethcore-devtools = { path = "../devtools" }
|
||||
ethcore-rpc = { path = "../rpc" }
|
||||
ethcore-util = { path = "../util" }
|
||||
https-fetch = { path = "../util/https-fetch" }
|
||||
|
@ -60,7 +60,7 @@ impl ContentCache {
|
||||
ContentStatus::Fetching(ref abort) => {
|
||||
trace!(target: "dapps", "Aborting {} because of limit.", entry.0);
|
||||
// Mark as aborted
|
||||
abort.store(true, Ordering::Relaxed);
|
||||
abort.store(true, Ordering::SeqCst);
|
||||
},
|
||||
ContentStatus::Ready(ref endpoint) => {
|
||||
trace!(target: "dapps", "Removing {} because of limit.", entry.0);
|
||||
|
@ -30,6 +30,7 @@ use hyper;
|
||||
use hyper::status::StatusCode;
|
||||
|
||||
use random_filename;
|
||||
use SyncStatus;
|
||||
use util::{Mutex, H256};
|
||||
use util::sha3::sha3;
|
||||
use page::LocalPageEndpoint;
|
||||
@ -45,6 +46,7 @@ pub struct ContentFetcher<R: URLHint = URLHintContract> {
|
||||
dapps_path: PathBuf,
|
||||
resolver: R,
|
||||
cache: Arc<Mutex<ContentCache>>,
|
||||
sync: Arc<SyncStatus>,
|
||||
}
|
||||
|
||||
impl<R: URLHint> Drop for ContentFetcher<R> {
|
||||
@ -56,13 +58,14 @@ impl<R: URLHint> Drop for ContentFetcher<R> {
|
||||
|
||||
impl<R: URLHint> ContentFetcher<R> {
|
||||
|
||||
pub fn new(resolver: R) -> Self {
|
||||
pub fn new(resolver: R, sync_status: Arc<SyncStatus>) -> Self {
|
||||
let mut dapps_path = env::temp_dir();
|
||||
dapps_path.push(random_filename());
|
||||
|
||||
ContentFetcher {
|
||||
dapps_path: dapps_path,
|
||||
resolver: resolver,
|
||||
sync: sync_status,
|
||||
cache: Arc::new(Mutex::new(ContentCache::default())),
|
||||
}
|
||||
}
|
||||
@ -74,14 +77,20 @@ impl<R: URLHint> ContentFetcher<R> {
|
||||
|
||||
pub fn contains(&self, content_id: &str) -> bool {
|
||||
let mut cache = self.cache.lock();
|
||||
match cache.get(content_id) {
|
||||
// Check if we already have the app
|
||||
Some(_) => true,
|
||||
if cache.get(content_id).is_some() {
|
||||
return true;
|
||||
}
|
||||
// fallback to resolver
|
||||
None => match content_id.from_hex() {
|
||||
Ok(content_id) => self.resolver.resolve(content_id).is_some(),
|
||||
_ => false,
|
||||
},
|
||||
if let Ok(content_id) = content_id.from_hex() {
|
||||
// if app_id is valid, but we are syncing always return true.
|
||||
if self.sync.is_major_syncing() {
|
||||
return true;
|
||||
}
|
||||
// else try to resolve the app_id
|
||||
self.resolver.resolve(content_id).is_some()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
@ -89,6 +98,15 @@ impl<R: URLHint> ContentFetcher<R> {
|
||||
let mut cache = self.cache.lock();
|
||||
let content_id = path.app_id.clone();
|
||||
|
||||
if self.sync.is_major_syncing() {
|
||||
return Box::new(ContentHandler::error(
|
||||
StatusCode::ServiceUnavailable,
|
||||
"Sync In Progress",
|
||||
"Your node is still syncing. We cannot resolve any content before it's fully synced.",
|
||||
Some("<a href=\"javascript:window.location.reload()\">Refresh</a>")
|
||||
));
|
||||
}
|
||||
|
||||
let (new_status, handler) = {
|
||||
let status = cache.get(&content_id);
|
||||
match status {
|
||||
@ -98,23 +116,23 @@ impl<R: URLHint> ContentFetcher<R> {
|
||||
},
|
||||
// App is already being fetched
|
||||
Some(&mut ContentStatus::Fetching(_)) => {
|
||||
(None, Box::new(ContentHandler::html(
|
||||
(None, Box::new(ContentHandler::error_with_refresh(
|
||||
StatusCode::ServiceUnavailable,
|
||||
format!(
|
||||
"<html><head>{}</head><body>{}</body></html>",
|
||||
"<meta http-equiv=\"refresh\" content=\"1\">",
|
||||
"<h1>This dapp is already being downloaded.</h1><h2>Please wait...</h2>",
|
||||
)
|
||||
"Download In Progress",
|
||||
"This dapp is already being downloaded. Please wait...",
|
||||
None,
|
||||
)) as Box<Handler>)
|
||||
},
|
||||
// We need to start fetching app
|
||||
None => {
|
||||
let app_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
|
||||
let app = self.resolver.resolve(app_hex).expect("to_handler is called only when `contains` returns true.");
|
||||
let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
|
||||
let content = self.resolver.resolve(content_hex);
|
||||
let abort = Arc::new(AtomicBool::new(false));
|
||||
|
||||
(Some(ContentStatus::Fetching(abort.clone())), match app {
|
||||
URLHintResult::Dapp(dapp) => Box::new(ContentFetcherHandler::new(
|
||||
match content {
|
||||
Some(URLHintResult::Dapp(dapp)) => (
|
||||
Some(ContentStatus::Fetching(abort.clone())),
|
||||
Box::new(ContentFetcherHandler::new(
|
||||
dapp.url(),
|
||||
abort,
|
||||
control,
|
||||
@ -123,9 +141,11 @@ impl<R: URLHint> ContentFetcher<R> {
|
||||
id: content_id.clone(),
|
||||
dapps_path: self.dapps_path.clone(),
|
||||
cache: self.cache.clone(),
|
||||
}
|
||||
)) as Box<Handler>,
|
||||
URLHintResult::Content(content) => Box::new(ContentFetcherHandler::new(
|
||||
})) as Box<Handler>
|
||||
),
|
||||
Some(URLHintResult::Content(content)) => (
|
||||
Some(ContentStatus::Fetching(abort.clone())),
|
||||
Box::new(ContentFetcherHandler::new(
|
||||
content.url,
|
||||
abort,
|
||||
control,
|
||||
@ -137,7 +157,18 @@ impl<R: URLHint> ContentFetcher<R> {
|
||||
cache: self.cache.clone(),
|
||||
}
|
||||
)) as Box<Handler>,
|
||||
})
|
||||
),
|
||||
None => {
|
||||
// This may happen when sync status changes in between
|
||||
// `contains` and `to_handler`
|
||||
(None, Box::new(ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"Resource Not Found",
|
||||
"Requested resource was not found.",
|
||||
None
|
||||
)) as Box<Handler>)
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
@ -155,7 +186,7 @@ impl<R: URLHint> ContentFetcher<R> {
|
||||
pub enum ValidationError {
|
||||
Io(io::Error),
|
||||
Zip(zip::result::ZipError),
|
||||
InvalidDappId,
|
||||
InvalidContentId,
|
||||
ManifestNotFound,
|
||||
ManifestSerialization(String),
|
||||
HashMismatch { expected: H256, got: H256, },
|
||||
@ -166,7 +197,7 @@ impl fmt::Display for ValidationError {
|
||||
match *self {
|
||||
ValidationError::Io(ref io) => write!(f, "Unexpected IO error occured: {:?}", io),
|
||||
ValidationError::Zip(ref zip) => write!(f, "Unable to read ZIP archive: {:?}", zip),
|
||||
ValidationError::InvalidDappId => write!(f, "Dapp ID is invalid. It should be 32 bytes hash of content."),
|
||||
ValidationError::InvalidContentId => write!(f, "ID is invalid. It should be 26 bits keccak hash of content."),
|
||||
ValidationError::ManifestNotFound => write!(f, "Downloaded Dapp bundle did not contain valid manifest.json file."),
|
||||
ValidationError::ManifestSerialization(ref err) => {
|
||||
write!(f, "There was an error during Dapp Manifest serialization: {:?}", err)
|
||||
@ -277,7 +308,7 @@ impl ContentValidator for DappInstaller {
|
||||
trace!(target: "dapps", "Opening dapp bundle at {:?}", app_path);
|
||||
let mut file_reader = io::BufReader::new(try!(fs::File::open(app_path)));
|
||||
let hash = try!(sha3(&mut file_reader));
|
||||
let id = try!(self.id.as_str().parse().map_err(|_| ValidationError::InvalidDappId));
|
||||
let id = try!(self.id.as_str().parse().map_err(|_| ValidationError::InvalidContentId));
|
||||
if id != hash {
|
||||
return Err(ValidationError::HashMismatch {
|
||||
expected: id,
|
||||
@ -350,6 +381,7 @@ impl ContentValidator for DappInstaller {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
use std::sync::Arc;
|
||||
use util::Bytes;
|
||||
use endpoint::EndpointInfo;
|
||||
use page::LocalPageEndpoint;
|
||||
@ -368,7 +400,7 @@ mod tests {
|
||||
fn should_true_if_contains_the_app() {
|
||||
// given
|
||||
let path = env::temp_dir();
|
||||
let fetcher = ContentFetcher::new(FakeResolver);
|
||||
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false));
|
||||
let handler = LocalPageEndpoint::new(path, EndpointInfo {
|
||||
name: "fake".into(),
|
||||
description: "".into(),
|
||||
|
22
dapps/src/error_tpl.html
Normal file
22
dapps/src/error_tpl.html
Normal file
@ -0,0 +1,22 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
{meta}
|
||||
<title>{title}</title>
|
||||
<link rel="stylesheet" href="/parity-utils/styles.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="parity-navbar">
|
||||
</div>
|
||||
<div class="parity-box">
|
||||
<h1>{title}</h1>
|
||||
<h3>{message}</h3>
|
||||
<p><code>{details}</code></p>
|
||||
</div>
|
||||
<div class="parity-status">
|
||||
<small>{version}</small>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
@ -94,7 +94,7 @@ impl Fetch {
|
||||
|
||||
impl Fetch {
|
||||
fn is_aborted(&self) -> bool {
|
||||
self.abort.load(Ordering::Relaxed)
|
||||
self.abort.load(Ordering::SeqCst)
|
||||
}
|
||||
fn mark_aborted(&mut self) -> Next {
|
||||
self.result = Some(Err(Error::Aborted.into()));
|
||||
|
@ -21,6 +21,8 @@ use hyper::{header, server, Decoder, Encoder, Next};
|
||||
use hyper::net::HttpStream;
|
||||
use hyper::status::StatusCode;
|
||||
|
||||
use util::version;
|
||||
|
||||
pub struct ContentHandler {
|
||||
code: StatusCode,
|
||||
content: String,
|
||||
@ -38,15 +40,6 @@ impl ContentHandler {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn forbidden(content: String, mimetype: String) -> Self {
|
||||
ContentHandler {
|
||||
code: StatusCode::Forbidden,
|
||||
content: content,
|
||||
mimetype: mimetype,
|
||||
write_pos: 0
|
||||
}
|
||||
}
|
||||
|
||||
pub fn not_found(content: String, mimetype: String) -> Self {
|
||||
ContentHandler {
|
||||
code: StatusCode::NotFound,
|
||||
@ -60,6 +53,28 @@ impl ContentHandler {
|
||||
Self::new(code, content, "text/html".into())
|
||||
}
|
||||
|
||||
pub fn error(code: StatusCode, title: &str, message: &str, details: Option<&str>) -> Self {
|
||||
Self::html(code, format!(
|
||||
include_str!("../error_tpl.html"),
|
||||
title=title,
|
||||
meta="",
|
||||
message=message,
|
||||
details=details.unwrap_or_else(|| ""),
|
||||
version=version(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn error_with_refresh(code: StatusCode, title: &str, message: &str, details: Option<&str>) -> Self {
|
||||
Self::html(code, format!(
|
||||
include_str!("../error_tpl.html"),
|
||||
title=title,
|
||||
meta="<meta http-equiv=\"refresh\" content=\"1\">",
|
||||
message=message,
|
||||
details=details.unwrap_or_else(|| ""),
|
||||
version=version(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn new(code: StatusCode, content: String, mimetype: String) -> Self {
|
||||
ContentHandler {
|
||||
code: code,
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
//! Hyper Server Handler that fetches a file during a request (proxy).
|
||||
|
||||
use std::{fs, fmt};
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{mpsc, Arc};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
@ -120,16 +120,20 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
|
||||
deadline: Instant::now() + Duration::from_secs(FETCH_TIMEOUT),
|
||||
receiver: receiver,
|
||||
},
|
||||
Err(e) => FetchState::Error(ContentHandler::html(
|
||||
Err(e) => FetchState::Error(ContentHandler::error(
|
||||
StatusCode::BadGateway,
|
||||
format!("<h1>Error starting dapp download.</h1><pre>{}</pre>", e),
|
||||
"Unable To Start Dapp Download",
|
||||
"Could not initialize download of the dapp. It might be a problem with the remote server.",
|
||||
Some(&format!("{}", e)),
|
||||
)),
|
||||
}
|
||||
},
|
||||
// or return error
|
||||
_ => FetchState::Error(ContentHandler::html(
|
||||
_ => FetchState::Error(ContentHandler::error(
|
||||
StatusCode::MethodNotAllowed,
|
||||
"<h1>Only <code>GET</code> requests are allowed.</h1>".into(),
|
||||
"Method Not Allowed",
|
||||
"Only <code>GET</code> requests are allowed.",
|
||||
None,
|
||||
)),
|
||||
})
|
||||
} else { None };
|
||||
@ -146,9 +150,11 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
|
||||
// Request may time out
|
||||
FetchState::InProgress { ref deadline, .. } if *deadline < Instant::now() => {
|
||||
trace!(target: "dapps", "Fetching dapp failed because of timeout.");
|
||||
let timeout = ContentHandler::html(
|
||||
let timeout = ContentHandler::error(
|
||||
StatusCode::GatewayTimeout,
|
||||
format!("<h1>Could not fetch app bundle within {} seconds.</h1>", FETCH_TIMEOUT),
|
||||
"Download Timeout",
|
||||
&format!("Could not fetch content within {} seconds.", FETCH_TIMEOUT),
|
||||
None
|
||||
);
|
||||
Self::close_client(&mut self.client);
|
||||
(Some(FetchState::Error(timeout)), Next::write())
|
||||
@ -159,28 +165,33 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
|
||||
match rec {
|
||||
// Unpack and validate
|
||||
Ok(Ok(path)) => {
|
||||
trace!(target: "dapps", "Fetching dapp finished. Starting validation.");
|
||||
trace!(target: "dapps", "Fetching content finished. Starting validation.");
|
||||
Self::close_client(&mut self.client);
|
||||
// Unpack and verify
|
||||
let state = match self.installer.validate_and_install(path.clone()) {
|
||||
Err(e) => {
|
||||
trace!(target: "dapps", "Error while validating dapp: {:?}", e);
|
||||
FetchState::Error(ContentHandler::html(
|
||||
trace!(target: "dapps", "Error while validating content: {:?}", e);
|
||||
FetchState::Error(ContentHandler::error(
|
||||
StatusCode::BadGateway,
|
||||
format!("<h1>Downloaded bundle does not contain valid app.</h1><pre>{}</pre>", e),
|
||||
"Invalid Dapp",
|
||||
"Downloaded bundle does not contain a valid content.",
|
||||
Some(&format!("{:?}", e))
|
||||
))
|
||||
},
|
||||
Ok(result) => FetchState::Done(result)
|
||||
};
|
||||
// Remove temporary zip file
|
||||
let _ = fs::remove_file(path);
|
||||
// TODO [todr] Uncomment me
|
||||
// let _ = fs::remove_file(path);
|
||||
(Some(state), Next::write())
|
||||
},
|
||||
Ok(Err(e)) => {
|
||||
warn!(target: "dapps", "Unable to fetch content: {:?}", e);
|
||||
let error = ContentHandler::html(
|
||||
let error = ContentHandler::error(
|
||||
StatusCode::BadGateway,
|
||||
"<h1>There was an error when fetching the dapp.</h1>".into(),
|
||||
"Download Error",
|
||||
"There was an error when fetching the content.",
|
||||
Some(&format!("{:?}", e)),
|
||||
);
|
||||
(Some(FetchState::Error(error)), Next::write())
|
||||
},
|
||||
|
@ -62,6 +62,8 @@ extern crate https_fetch;
|
||||
extern crate ethcore_rpc;
|
||||
extern crate ethcore_util as util;
|
||||
extern crate linked_hash_map;
|
||||
#[cfg(test)]
|
||||
extern crate ethcore_devtools as devtools;
|
||||
|
||||
mod endpoint;
|
||||
mod apps;
|
||||
@ -87,11 +89,22 @@ use ethcore_rpc::Extendable;
|
||||
|
||||
static DAPPS_DOMAIN : &'static str = ".parity";
|
||||
|
||||
/// Indicates sync status
|
||||
pub trait SyncStatus: Send + Sync {
|
||||
/// Returns true if there is a major sync happening.
|
||||
fn is_major_syncing(&self) -> bool;
|
||||
}
|
||||
|
||||
impl<F> SyncStatus for F where F: Fn() -> bool + Send + Sync {
|
||||
fn is_major_syncing(&self) -> bool { self() }
|
||||
}
|
||||
|
||||
/// Webapps HTTP+RPC server build.
|
||||
pub struct ServerBuilder {
|
||||
dapps_path: String,
|
||||
handler: Arc<IoHandler>,
|
||||
registrar: Arc<ContractClient>,
|
||||
sync_status: Arc<SyncStatus>,
|
||||
}
|
||||
|
||||
impl Extendable for ServerBuilder {
|
||||
@ -107,9 +120,15 @@ impl ServerBuilder {
|
||||
dapps_path: dapps_path,
|
||||
handler: Arc::new(IoHandler::new()),
|
||||
registrar: registrar,
|
||||
sync_status: Arc::new(|| false),
|
||||
}
|
||||
}
|
||||
|
||||
/// Change default sync status.
|
||||
pub fn with_sync_status(&mut self, status: Arc<SyncStatus>) {
|
||||
self.sync_status = status;
|
||||
}
|
||||
|
||||
/// Asynchronously start server with no authentication,
|
||||
/// returns result with `Server` handle on success or an error.
|
||||
pub fn start_unsecured_http(&self, addr: &SocketAddr, hosts: Option<Vec<String>>) -> Result<Server, ServerError> {
|
||||
@ -119,7 +138,8 @@ impl ServerBuilder {
|
||||
NoAuth,
|
||||
self.handler.clone(),
|
||||
self.dapps_path.clone(),
|
||||
self.registrar.clone()
|
||||
self.registrar.clone(),
|
||||
self.sync_status.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
@ -132,7 +152,8 @@ impl ServerBuilder {
|
||||
HttpBasicAuth::single_user(username, password),
|
||||
self.handler.clone(),
|
||||
self.dapps_path.clone(),
|
||||
self.registrar.clone()
|
||||
self.registrar.clone(),
|
||||
self.sync_status.clone(),
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -166,10 +187,11 @@ impl Server {
|
||||
handler: Arc<IoHandler>,
|
||||
dapps_path: String,
|
||||
registrar: Arc<ContractClient>,
|
||||
sync_status: Arc<SyncStatus>,
|
||||
) -> Result<Server, ServerError> {
|
||||
let panic_handler = Arc::new(Mutex::new(None));
|
||||
let authorization = Arc::new(authorization);
|
||||
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar)));
|
||||
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status));
|
||||
let endpoints = Arc::new(apps::all_endpoints(dapps_path));
|
||||
let special = Arc::new({
|
||||
let mut special = HashMap::new();
|
||||
|
@ -79,7 +79,7 @@ impl<T: WebApp> Endpoint for PageEndpoint<T> {
|
||||
app: BuiltinDapp::new(self.app.clone()),
|
||||
prefix: self.prefix.clone(),
|
||||
path: path,
|
||||
file: None,
|
||||
file: Default::default(),
|
||||
safe_to_embed: self.safe_to_embed,
|
||||
})
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ use hyper::net::HttpStream;
|
||||
use hyper::status::StatusCode;
|
||||
use hyper::{Decoder, Encoder, Next};
|
||||
use endpoint::EndpointPath;
|
||||
use handlers::ContentHandler;
|
||||
|
||||
/// Represents a file that can be sent to client.
|
||||
/// Implementation should keep track of bytes already sent internally.
|
||||
@ -48,6 +49,25 @@ pub trait Dapp: Send + 'static {
|
||||
fn file(&self, path: &str) -> Option<Self::DappFile>;
|
||||
}
|
||||
|
||||
/// Currently served by `PageHandler` file
|
||||
pub enum ServedFile<T: Dapp> {
|
||||
/// File from dapp
|
||||
File(T::DappFile),
|
||||
/// Error (404)
|
||||
Error(ContentHandler),
|
||||
}
|
||||
|
||||
impl<T: Dapp> Default for ServedFile<T> {
|
||||
fn default() -> Self {
|
||||
ServedFile::Error(ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"404 Not Found",
|
||||
"Requested dapp resource was not found.",
|
||||
None
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
/// A handler for a single webapp.
|
||||
/// Resolves correct paths and serves as a plumbing code between
|
||||
/// hyper server and dapp.
|
||||
@ -55,7 +75,7 @@ pub struct PageHandler<T: Dapp> {
|
||||
/// A Dapp.
|
||||
pub app: T,
|
||||
/// File currently being served (or `None` if file does not exist).
|
||||
pub file: Option<T::DappFile>,
|
||||
pub file: ServedFile<T>,
|
||||
/// Optional prefix to strip from path.
|
||||
pub prefix: Option<String>,
|
||||
/// Requested path.
|
||||
@ -95,7 +115,7 @@ impl<T: Dapp> server::Handler<HttpStream> for PageHandler<T> {
|
||||
self.app.file(&self.extract_path(url.path()))
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
}.map_or_else(|| ServedFile::default(), |f| ServedFile::File(f));
|
||||
Next::write()
|
||||
}
|
||||
|
||||
@ -104,24 +124,26 @@ impl<T: Dapp> server::Handler<HttpStream> for PageHandler<T> {
|
||||
}
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
if let Some(ref f) = self.file {
|
||||
match self.file {
|
||||
ServedFile::File(ref f) => {
|
||||
res.set_status(StatusCode::Ok);
|
||||
res.headers_mut().set(header::ContentType(f.content_type().parse().unwrap()));
|
||||
if !self.safe_to_embed {
|
||||
res.headers_mut().set_raw("X-Frame-Options", vec![b"SAMEORIGIN".to_vec()]);
|
||||
}
|
||||
Next::write()
|
||||
} else {
|
||||
res.set_status(StatusCode::NotFound);
|
||||
Next::write()
|
||||
},
|
||||
ServedFile::Error(ref mut handler) => {
|
||||
handler.on_response(res)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn on_response_writable(&mut self, encoder: &mut Encoder<HttpStream>) -> Next {
|
||||
match self.file {
|
||||
None => Next::end(),
|
||||
Some(ref f) if f.is_drained() => Next::end(),
|
||||
Some(ref mut f) => match encoder.write(f.next_chunk()) {
|
||||
ServedFile::Error(ref mut handler) => handler.on_response_writable(encoder),
|
||||
ServedFile::File(ref f) if f.is_drained() => Next::end(),
|
||||
ServedFile::File(ref mut f) => match encoder.write(f.next_chunk()) {
|
||||
Ok(bytes) => {
|
||||
f.bytes_written(bytes);
|
||||
Next::write()
|
||||
@ -190,7 +212,7 @@ fn should_extract_path_with_appid() {
|
||||
port: 8080,
|
||||
using_dapps_domains: true,
|
||||
},
|
||||
file: None,
|
||||
file: Default::default(),
|
||||
safe_to_embed: true,
|
||||
};
|
||||
|
||||
|
@ -60,7 +60,7 @@ impl Endpoint for LocalPageEndpoint {
|
||||
app: LocalSingleFile { path: self.path.clone(), mime: mime.clone() },
|
||||
prefix: None,
|
||||
path: path,
|
||||
file: None,
|
||||
file: Default::default(),
|
||||
safe_to_embed: false,
|
||||
})
|
||||
} else {
|
||||
@ -68,7 +68,7 @@ impl Endpoint for LocalPageEndpoint {
|
||||
app: LocalDapp { path: self.path.clone() },
|
||||
prefix: None,
|
||||
path: path,
|
||||
file: None,
|
||||
file: Default::default(),
|
||||
safe_to_embed: false,
|
||||
})
|
||||
}
|
||||
|
@ -55,10 +55,11 @@ impl Authorization for HttpBasicAuth {
|
||||
|
||||
match auth {
|
||||
Access::Denied => {
|
||||
Authorized::No(Box::new(ContentHandler::new(
|
||||
Authorized::No(Box::new(ContentHandler::error(
|
||||
status::StatusCode::Unauthorized,
|
||||
"<h1>Unauthorized</h1>".into(),
|
||||
"text/html".into(),
|
||||
"Unauthorized",
|
||||
"You need to provide valid credentials to access this page.",
|
||||
None
|
||||
)))
|
||||
},
|
||||
Access::AuthRequired => {
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
|
||||
use DAPPS_DOMAIN;
|
||||
use hyper::{server, header};
|
||||
use hyper::{server, header, StatusCode};
|
||||
use hyper::net::HttpStream;
|
||||
|
||||
use jsonrpc_http_server::{is_host_header_valid};
|
||||
@ -38,11 +38,9 @@ pub fn is_valid(request: &server::Request<HttpStream>, allowed_hosts: &[String],
|
||||
}
|
||||
|
||||
pub fn host_invalid_response() -> Box<server::Handler<HttpStream> + Send> {
|
||||
Box::new(ContentHandler::forbidden(
|
||||
r#"
|
||||
<h1>Request with disallowed <code>Host</code> header has been blocked.</h1>
|
||||
<p>Check the URL in your browser address bar.</p>
|
||||
"#.into(),
|
||||
"text/html".into()
|
||||
Box::new(ContentHandler::error(StatusCode::Forbidden,
|
||||
"Current Host Is Disallowed",
|
||||
"You are trying to access your node using incorrect address.",
|
||||
Some("Use allowed URL or specify different <code>hosts</code> CLI options.")
|
||||
))
|
||||
}
|
||||
|
@ -24,12 +24,12 @@ use DAPPS_DOMAIN;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashMap;
|
||||
use url::{Url, Host};
|
||||
use hyper::{self, server, Next, Encoder, Decoder, Control};
|
||||
use hyper::{self, server, Next, Encoder, Decoder, Control, StatusCode};
|
||||
use hyper::net::HttpStream;
|
||||
use apps;
|
||||
use apps::fetcher::ContentFetcher;
|
||||
use endpoint::{Endpoint, Endpoints, EndpointPath};
|
||||
use handlers::{Redirection, extract_url};
|
||||
use handlers::{Redirection, extract_url, ContentHandler};
|
||||
use self::auth::{Authorization, Authorized};
|
||||
|
||||
/// Special endpoints are accessible on every domain (every dapp)
|
||||
@ -55,9 +55,16 @@ pub struct Router<A: Authorization + 'static> {
|
||||
impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
|
||||
|
||||
fn on_request(&mut self, req: server::Request<HttpStream>) -> Next {
|
||||
|
||||
// Choose proper handler depending on path / domain
|
||||
let url = extract_url(&req);
|
||||
let endpoint = extract_endpoint(&url);
|
||||
let is_utils = endpoint.1 == SpecialEndpoint::Utils;
|
||||
|
||||
// Validate Host header
|
||||
if let Some(ref hosts) = self.allowed_hosts {
|
||||
if !host_validation::is_valid(&req, hosts, self.endpoints.keys().cloned().collect()) {
|
||||
let is_valid = is_utils || host_validation::is_valid(&req, hosts, self.endpoints.keys().cloned().collect());
|
||||
if !is_valid {
|
||||
self.handler = host_validation::host_invalid_response();
|
||||
return self.handler.on_request(req);
|
||||
}
|
||||
@ -70,11 +77,7 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
|
||||
return self.handler.on_request(req);
|
||||
}
|
||||
|
||||
// Choose proper handler depending on path / domain
|
||||
let url = extract_url(&req);
|
||||
let endpoint = extract_endpoint(&url);
|
||||
let control = self.control.take().expect("on_request is called only once; control is always defined at start; qed");
|
||||
|
||||
self.handler = match endpoint {
|
||||
// First check special endpoints
|
||||
(ref path, ref endpoint) if self.special.contains_key(endpoint) => {
|
||||
@ -91,7 +94,12 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
|
||||
// Redirection to main page (maybe 404 instead?)
|
||||
(Some(ref path), _) if *req.method() == hyper::method::Method::Get => {
|
||||
let address = apps::redirection_address(path.using_dapps_domains, self.main_page);
|
||||
Redirection::new(address.as_str())
|
||||
Box::new(ContentHandler::error(
|
||||
StatusCode::NotFound,
|
||||
"404 Not Found",
|
||||
"Requested content was not found.",
|
||||
Some(&format!("Go back to the <a href=\"{}\">Home Page</a>.", address))
|
||||
))
|
||||
},
|
||||
// Redirect any GET request to home.
|
||||
_ if *req.method() == hyper::method::Method::Get => {
|
||||
|
@ -57,7 +57,7 @@ fn should_serve_apps() {
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
|
||||
assert_eq!(response.headers.get(0).unwrap(), "Content-Type: application/json");
|
||||
assert!(response.body.contains("Parity Home Screen"));
|
||||
assert!(response.body.contains("Parity Home Screen"), response.body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -54,7 +54,7 @@ fn should_reject_on_invalid_auth() {
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 401 Unauthorized".to_owned());
|
||||
assert_eq!(response.body, "15\n<h1>Unauthorized</h1>\n0\n\n".to_owned());
|
||||
assert!(response.body.contains("Unauthorized"), response.body);
|
||||
assert_eq!(response.headers_raw.contains("WWW-Authenticate"), false);
|
||||
}
|
||||
|
||||
|
38
dapps/src/tests/fetch.rs
Normal file
38
dapps/src/tests/fetch.rs
Normal file
@ -0,0 +1,38 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use tests::helpers::{serve_with_registrar, request};
|
||||
|
||||
#[test]
|
||||
fn should_resolve_dapp() {
|
||||
// given
|
||||
let (server, registrar) = serve_with_registrar();
|
||||
|
||||
// when
|
||||
let response = request(server,
|
||||
"\
|
||||
GET / HTTP/1.1\r\n\
|
||||
Host: 1472a9e190620cdf6b31f383373e45efcfe869a820c91f9ccd7eb9fb45e4985d.parity\r\n\
|
||||
Connection: close\r\n\
|
||||
\r\n\
|
||||
"
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
|
||||
assert_eq!(registrar.calls.lock().len(), 2);
|
||||
}
|
||||
|
@ -15,16 +15,15 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::env;
|
||||
use std::io::{Read, Write};
|
||||
use std::str::{self, Lines};
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
use std::net::TcpStream;
|
||||
use rustc_serialize::hex::{ToHex, FromHex};
|
||||
|
||||
use ServerBuilder;
|
||||
use Server;
|
||||
use apps::urlhint::ContractClient;
|
||||
use util::{Bytes, Address, Mutex, ToPretty};
|
||||
use devtools::http_client;
|
||||
|
||||
const REGISTRAR: &'static str = "8e4e9b13d4b45cb0befc93c3061b1408f67316b2";
|
||||
const URLHINT: &'static str = "deadbeefcafe0000000000000000000000000000";
|
||||
@ -59,65 +58,37 @@ impl ContractClient for FakeRegistrar {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
|
||||
pub fn init_server(hosts: Option<Vec<String>>) -> (Server, Arc<FakeRegistrar>) {
|
||||
let registrar = Arc::new(FakeRegistrar::new());
|
||||
let mut dapps_path = env::temp_dir();
|
||||
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
|
||||
let builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar);
|
||||
builder.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap()
|
||||
let builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar.clone());
|
||||
(
|
||||
builder.start_unsecured_http(&"127.0.0.1:0".parse().unwrap(), hosts).unwrap(),
|
||||
registrar,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn serve_with_auth(user: &str, pass: &str) -> Server {
|
||||
let registrar = Arc::new(FakeRegistrar::new());
|
||||
let builder = ServerBuilder::new(env::temp_dir().to_str().unwrap().into(), registrar);
|
||||
let mut dapps_path = env::temp_dir();
|
||||
dapps_path.push("non-existent-dir-to-prevent-fs-files-from-loading");
|
||||
let builder = ServerBuilder::new(dapps_path.to_str().unwrap().into(), registrar);
|
||||
builder.start_basic_auth_http(&"127.0.0.1:0".parse().unwrap(), None, user, pass).unwrap()
|
||||
}
|
||||
|
||||
pub fn serve_hosts(hosts: Option<Vec<String>>) -> Server {
|
||||
init_server(hosts).0
|
||||
}
|
||||
|
||||
pub fn serve_with_registrar() -> (Server, Arc<FakeRegistrar>) {
|
||||
init_server(None)
|
||||
}
|
||||
|
||||
pub fn serve() -> Server {
|
||||
serve_hosts(None)
|
||||
init_server(None).0
|
||||
}
|
||||
|
||||
pub struct Response {
|
||||
pub status: String,
|
||||
pub headers: Vec<String>,
|
||||
pub headers_raw: String,
|
||||
pub body: String,
|
||||
pub fn request(server: Server, request: &str) -> http_client::Response {
|
||||
http_client::request(server.addr(), request)
|
||||
}
|
||||
|
||||
pub fn read_block(lines: &mut Lines, all: bool) -> String {
|
||||
let mut block = String::new();
|
||||
loop {
|
||||
let line = lines.next();
|
||||
match line {
|
||||
None => break,
|
||||
Some("") if !all => break,
|
||||
Some(v) => {
|
||||
block.push_str(v);
|
||||
block.push_str("\n");
|
||||
},
|
||||
}
|
||||
}
|
||||
block
|
||||
}
|
||||
|
||||
pub fn request(server: Server, request: &str) -> Response {
|
||||
let mut req = TcpStream::connect(server.addr()).unwrap();
|
||||
req.write_all(request.as_bytes()).unwrap();
|
||||
|
||||
let mut response = String::new();
|
||||
req.read_to_string(&mut response).unwrap();
|
||||
|
||||
let mut lines = response.lines();
|
||||
let status = lines.next().unwrap().to_owned();
|
||||
let headers_raw = read_block(&mut lines, false);
|
||||
let headers = headers_raw.split('\n').map(|v| v.to_owned()).collect();
|
||||
let body = read_block(&mut lines, true);
|
||||
|
||||
Response {
|
||||
status: status,
|
||||
headers: headers,
|
||||
headers_raw: headers_raw,
|
||||
body: body,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,6 +20,7 @@ mod helpers;
|
||||
|
||||
mod api;
|
||||
mod authorization;
|
||||
mod fetch;
|
||||
mod redirection;
|
||||
mod validation;
|
||||
|
||||
|
@ -57,7 +57,7 @@ fn should_redirect_to_home_when_trailing_slash_is_missing() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_redirect_to_home_on_invalid_dapp() {
|
||||
fn should_display_404_on_invalid_dapp() {
|
||||
// given
|
||||
let server = serve();
|
||||
|
||||
@ -72,12 +72,12 @@ fn should_redirect_to_home_on_invalid_dapp() {
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned());
|
||||
assert_eq!(response.headers.get(0).unwrap(), "Location: /home/");
|
||||
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
|
||||
assert!(response.body.contains("href=\"/home/"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_redirect_to_home_on_invalid_dapp_with_domain() {
|
||||
fn should_display_404_on_invalid_dapp_with_domain() {
|
||||
// given
|
||||
let server = serve();
|
||||
|
||||
@ -92,8 +92,8 @@ fn should_redirect_to_home_on_invalid_dapp_with_domain() {
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 302 Found".to_owned());
|
||||
assert_eq!(response.headers.get(0).unwrap(), "Location: http://home.parity/");
|
||||
assert_eq!(response.status, "HTTP/1.1 404 Not Found".to_owned());
|
||||
assert!(response.body.contains("href=\"http://home.parity/"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -34,7 +34,7 @@ fn should_reject_invalid_host() {
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 403 Forbidden".to_owned());
|
||||
assert_eq!(response.body, "85\n\n\t\t<h1>Request with disallowed <code>Host</code> header has been blocked.</h1>\n\t\t<p>Check the URL in your browser address bar.</p>\n\t\t\n0\n\n".to_owned());
|
||||
assert!(response.body.contains("Current Host Is Disallowed"), response.body);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -77,3 +77,24 @@ fn should_serve_dapps_domains() {
|
||||
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// NOTE [todr] This is required for error pages to be styled properly.
|
||||
fn should_allow_parity_utils_even_on_invalid_domain() {
|
||||
// given
|
||||
let server = serve_hosts(Some(vec!["localhost:8080".into()]));
|
||||
|
||||
// when
|
||||
let response = request(server,
|
||||
"\
|
||||
GET /parity-utils/styles.css HTTP/1.1\r\n\
|
||||
Host: 127.0.0.1:8080\r\n\
|
||||
Connection: close\r\n\
|
||||
\r\n\
|
||||
{}
|
||||
"
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
|
||||
}
|
||||
|
||||
|
64
devtools/src/http_client.rs
Normal file
64
devtools/src/http_client.rs
Normal file
@ -0,0 +1,64 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::io::{Read, Write};
|
||||
use std::str::{self, Lines};
|
||||
use std::net::{TcpStream, SocketAddr};
|
||||
|
||||
pub struct Response {
|
||||
pub status: String,
|
||||
pub headers: Vec<String>,
|
||||
pub headers_raw: String,
|
||||
pub body: String,
|
||||
}
|
||||
|
||||
pub fn read_block(lines: &mut Lines, all: bool) -> String {
|
||||
let mut block = String::new();
|
||||
loop {
|
||||
let line = lines.next();
|
||||
match line {
|
||||
None => break,
|
||||
Some("") if !all => break,
|
||||
Some(v) => {
|
||||
block.push_str(v);
|
||||
block.push_str("\n");
|
||||
},
|
||||
}
|
||||
}
|
||||
block
|
||||
}
|
||||
|
||||
pub fn request(address: &SocketAddr, request: &str) -> Response {
|
||||
let mut req = TcpStream::connect(address).unwrap();
|
||||
req.write_all(request.as_bytes()).unwrap();
|
||||
|
||||
let mut response = String::new();
|
||||
req.read_to_string(&mut response).unwrap();
|
||||
|
||||
let mut lines = response.lines();
|
||||
let status = lines.next().unwrap().to_owned();
|
||||
let headers_raw = read_block(&mut lines, false);
|
||||
let headers = headers_raw.split('\n').map(|v| v.to_owned()).collect();
|
||||
let body = read_block(&mut lines, true);
|
||||
|
||||
Response {
|
||||
status: status,
|
||||
headers: headers,
|
||||
headers_raw: headers_raw,
|
||||
body: body,
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ extern crate rand;
|
||||
mod random_path;
|
||||
mod test_socket;
|
||||
mod stop_guard;
|
||||
pub mod http_client;
|
||||
|
||||
pub use random_path::*;
|
||||
pub use test_socket::*;
|
||||
|
@ -16,11 +16,13 @@
|
||||
|
||||
//! Evm interface.
|
||||
|
||||
use common::*;
|
||||
use std::{ops, cmp, fmt};
|
||||
use util::{U128, U256, U512, Uint};
|
||||
use action_params::ActionParams;
|
||||
use evm::Ext;
|
||||
|
||||
/// Evm errors.
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Error {
|
||||
/// `OutOfGas` is returned when transaction execution runs out of gas.
|
||||
/// The state should be reverted to the state from before the
|
||||
@ -63,6 +65,21 @@ pub enum Error {
|
||||
Internal,
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use self::Error::*;
|
||||
let message = match *self {
|
||||
OutOfGas => "Out of gas",
|
||||
BadJumpDestination { .. } => "Bad jump destination",
|
||||
BadInstruction { .. } => "Bad instruction",
|
||||
StackUnderflow { .. } => "Stack underflow",
|
||||
OutOfStack { .. } => "Out of stack",
|
||||
Internal => "Internal error",
|
||||
};
|
||||
message.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
/// A specialized version of Result over EVM errors.
|
||||
pub type Result<T> = ::std::result::Result<T, Error>;
|
||||
|
||||
@ -193,10 +210,13 @@ pub trait Evm {
|
||||
fn exec(&mut self, params: ActionParams, ext: &mut Ext) -> Result<GasLeft>;
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
#[cfg(test)]
|
||||
fn should_calculate_overflow_mul_shr_without_overflow() {
|
||||
mod tests {
|
||||
use util::{U256, Uint};
|
||||
use super::CostType;
|
||||
|
||||
#[test]
|
||||
fn should_calculate_overflow_mul_shr_without_overflow() {
|
||||
// given
|
||||
let num = 1048576;
|
||||
|
||||
@ -209,15 +229,14 @@ fn should_calculate_overflow_mul_shr_without_overflow() {
|
||||
assert!(!o1);
|
||||
assert_eq!(res2, num);
|
||||
assert!(!o2);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(test)]
|
||||
fn should_calculate_overflow_mul_shr_with_overflow() {
|
||||
#[test]
|
||||
fn should_calculate_overflow_mul_shr_with_overflow() {
|
||||
// given
|
||||
let max = ::std::u64::MAX;
|
||||
let max = u64::max_value();
|
||||
let num1 = U256([max, max, max, max]);
|
||||
let num2 = ::std::usize::MAX;
|
||||
let num2 = usize::max_value();
|
||||
|
||||
// when
|
||||
let (res1, o1) = num1.overflow_mul_shr(num1, 256);
|
||||
@ -229,17 +248,17 @@ fn should_calculate_overflow_mul_shr_with_overflow() {
|
||||
|
||||
assert_eq!(res1, !U256::zero() - U256::one());
|
||||
assert!(o1);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(test)]
|
||||
fn should_validate_u256_to_usize_conversion() {
|
||||
#[test]
|
||||
fn should_validate_u256_to_usize_conversion() {
|
||||
// given
|
||||
let v = U256::from(::std::usize::MAX) + U256::from(1);
|
||||
let v = U256::from(usize::max_value()) + U256::from(1);
|
||||
|
||||
// when
|
||||
let res = usize::from_u256(v);
|
||||
|
||||
// then
|
||||
assert!(res.is_err());
|
||||
}
|
||||
}
|
||||
|
@ -286,7 +286,7 @@ impl<'a> Executive<'a> {
|
||||
// just drain the whole gas
|
||||
self.state.revert_snapshot();
|
||||
|
||||
tracer.trace_failed_call(trace_info, vec![]);
|
||||
tracer.trace_failed_call(trace_info, vec![], evm::Error::OutOfGas.into());
|
||||
|
||||
Err(evm::Error::OutOfGas)
|
||||
}
|
||||
@ -320,7 +320,7 @@ impl<'a> Executive<'a> {
|
||||
trace_output,
|
||||
traces
|
||||
),
|
||||
_ => tracer.trace_failed_call(trace_info, traces),
|
||||
Err(e) => tracer.trace_failed_call(trace_info, traces, e.into()),
|
||||
};
|
||||
|
||||
trace!(target: "executive", "substate={:?}; unconfirmed_substate={:?}\n", substate, unconfirmed_substate);
|
||||
@ -385,7 +385,7 @@ impl<'a> Executive<'a> {
|
||||
created,
|
||||
subtracer.traces()
|
||||
),
|
||||
_ => tracer.trace_failed_create(trace_info, subtracer.traces())
|
||||
Err(e) => tracer.trace_failed_create(trace_info, subtracer.traces(), e.into())
|
||||
};
|
||||
|
||||
self.enact_result(&res, substate, unconfirmed_substate);
|
||||
|
@ -341,3 +341,91 @@ impl SnapshotReader for LooseReader {
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use devtools::RandomTempPath;
|
||||
use util::sha3::Hashable;
|
||||
|
||||
use snapshot::ManifestData;
|
||||
use super::{SnapshotWriter, SnapshotReader, PackedWriter, PackedReader, LooseWriter, LooseReader};
|
||||
|
||||
const STATE_CHUNKS: &'static [&'static [u8]] = &[b"dog", b"cat", b"hello world", b"hi", b"notarealchunk"];
|
||||
const BLOCK_CHUNKS: &'static [&'static [u8]] = &[b"hello!", b"goodbye!", b"abcdefg", b"hijklmnop", b"qrstuvwxy", b"and", b"z"];
|
||||
|
||||
#[test]
|
||||
fn packed_write_and_read() {
|
||||
let path = RandomTempPath::new();
|
||||
let mut writer = PackedWriter::new(path.as_path()).unwrap();
|
||||
|
||||
let mut state_hashes = Vec::new();
|
||||
let mut block_hashes = Vec::new();
|
||||
|
||||
for chunk in STATE_CHUNKS {
|
||||
let hash = chunk.sha3();
|
||||
state_hashes.push(hash.clone());
|
||||
writer.write_state_chunk(hash, chunk).unwrap();
|
||||
}
|
||||
|
||||
for chunk in BLOCK_CHUNKS {
|
||||
let hash = chunk.sha3();
|
||||
block_hashes.push(hash.clone());
|
||||
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
|
||||
}
|
||||
|
||||
let manifest = ManifestData {
|
||||
state_hashes: state_hashes,
|
||||
block_hashes: block_hashes,
|
||||
state_root: b"notarealroot".sha3(),
|
||||
block_number: 12345678987654321,
|
||||
block_hash: b"notarealblock".sha3(),
|
||||
};
|
||||
|
||||
writer.finish(manifest.clone()).unwrap();
|
||||
|
||||
let reader = PackedReader::new(path.as_path()).unwrap().unwrap();
|
||||
assert_eq!(reader.manifest(), &manifest);
|
||||
|
||||
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
|
||||
reader.chunk(hash.clone()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn loose_write_and_read() {
|
||||
let path = RandomTempPath::new();
|
||||
let mut writer = LooseWriter::new(path.as_path().into()).unwrap();
|
||||
|
||||
let mut state_hashes = Vec::new();
|
||||
let mut block_hashes = Vec::new();
|
||||
|
||||
for chunk in STATE_CHUNKS {
|
||||
let hash = chunk.sha3();
|
||||
state_hashes.push(hash.clone());
|
||||
writer.write_state_chunk(hash, chunk).unwrap();
|
||||
}
|
||||
|
||||
for chunk in BLOCK_CHUNKS {
|
||||
let hash = chunk.sha3();
|
||||
block_hashes.push(hash.clone());
|
||||
writer.write_block_chunk(chunk.sha3(), chunk).unwrap();
|
||||
}
|
||||
|
||||
let manifest = ManifestData {
|
||||
state_hashes: state_hashes,
|
||||
block_hashes: block_hashes,
|
||||
state_root: b"notarealroot".sha3(),
|
||||
block_number: 12345678987654321,
|
||||
block_hash: b"notarealblock".sha3(),
|
||||
};
|
||||
|
||||
writer.finish(manifest.clone()).unwrap();
|
||||
|
||||
let reader = LooseReader::new(path.as_path().into()).unwrap();
|
||||
assert_eq!(reader.manifest(), &manifest);
|
||||
|
||||
for hash in manifest.state_hashes.iter().chain(&manifest.block_hashes) {
|
||||
reader.chunk(hash.clone()).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
@ -40,7 +40,7 @@ use util::kvdb::{Database, DatabaseConfig};
|
||||
use util::snappy;
|
||||
|
||||
/// Statuses for restorations.
|
||||
#[derive(PartialEq, Clone, Copy, Debug)]
|
||||
#[derive(PartialEq, Eq, Clone, Copy, Debug)]
|
||||
pub enum RestorationStatus {
|
||||
/// No restoration.
|
||||
Inactive,
|
||||
@ -519,3 +519,50 @@ impl SnapshotService for Service {
|
||||
.expect("snapshot service and io service are kept alive by client service; qed");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use service::ClientIoMessage;
|
||||
use io::{IoService};
|
||||
use devtools::RandomTempPath;
|
||||
use tests::helpers::get_test_spec;
|
||||
use util::journaldb::Algorithm;
|
||||
|
||||
use snapshot::ManifestData;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn sends_async_messages() {
|
||||
let service = IoService::<ClientIoMessage>::start().unwrap();
|
||||
|
||||
let dir = RandomTempPath::new();
|
||||
let mut dir = dir.as_path().to_owned();
|
||||
dir.push("pruning");
|
||||
dir.push("db");
|
||||
|
||||
let service = Service::new(
|
||||
&get_test_spec(),
|
||||
Algorithm::Archive,
|
||||
dir,
|
||||
service.channel()
|
||||
).unwrap();
|
||||
|
||||
assert!(service.manifest().is_none());
|
||||
assert!(service.chunk(Default::default()).is_none());
|
||||
assert_eq!(service.status(), RestorationStatus::Inactive);
|
||||
assert_eq!(service.chunks_done(), (0, 0));
|
||||
|
||||
let manifest = ManifestData {
|
||||
state_hashes: vec![],
|
||||
block_hashes: vec![],
|
||||
state_root: Default::default(),
|
||||
block_number: 0,
|
||||
block_hash: Default::default(),
|
||||
};
|
||||
|
||||
service.begin_restore(manifest);
|
||||
service.abort_restore();
|
||||
service.restore_state_chunk(Default::default(), vec![]);
|
||||
service.restore_block_chunk(Default::default(), vec![]);
|
||||
}
|
||||
}
|
@ -78,7 +78,7 @@ impl StateProducer {
|
||||
let new_accs = rng.gen::<u32>() % 5;
|
||||
|
||||
for _ in 0..new_accs {
|
||||
let address_hash = H256::random();
|
||||
let address_hash = H256(rng.gen());
|
||||
let balance: usize = rng.gen();
|
||||
let nonce: usize = rng.gen();
|
||||
let acc = ::state::Account::new_basic(balance.into(), nonce.into()).rlp();
|
||||
|
@ -20,3 +20,18 @@ mod blocks;
|
||||
mod state;
|
||||
|
||||
pub mod helpers;
|
||||
|
||||
use super::ManifestData;
|
||||
|
||||
#[test]
|
||||
fn manifest_rlp() {
|
||||
let manifest = ManifestData {
|
||||
block_hashes: Vec::new(),
|
||||
state_hashes: Vec::new(),
|
||||
block_number: 1234567,
|
||||
state_root: Default::default(),
|
||||
block_hash: Default::default(),
|
||||
};
|
||||
let raw = manifest.clone().into_rlp();
|
||||
assert_eq!(ManifestData::from_rlp(&raw).unwrap(), manifest);
|
||||
}
|
@ -20,7 +20,7 @@ use snapshot::{chunk_state, Progress, StateRebuilder};
|
||||
use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter};
|
||||
use super::helpers::{compare_dbs, StateProducer};
|
||||
|
||||
use rand;
|
||||
use rand::{XorShiftRng, SeedableRng};
|
||||
use util::hash::H256;
|
||||
use util::journaldb::{self, Algorithm};
|
||||
use util::kvdb::{Database, DatabaseConfig};
|
||||
@ -33,7 +33,7 @@ use std::sync::Arc;
|
||||
#[test]
|
||||
fn snap_and_restore() {
|
||||
let mut producer = StateProducer::new();
|
||||
let mut rng = rand::thread_rng();
|
||||
let mut rng = XorShiftRng::from_seed([1, 2, 3, 4]);
|
||||
let mut old_db = MemoryDB::new();
|
||||
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
|
||||
|
||||
|
@ -444,8 +444,7 @@ use env_info::*;
|
||||
use spec::*;
|
||||
use transaction::*;
|
||||
use util::log::init_log;
|
||||
use trace::trace;
|
||||
use trace::FlatTrace;
|
||||
use trace::{FlatTrace, TraceError, trace};
|
||||
use types::executed::CallType;
|
||||
|
||||
#[test]
|
||||
@ -538,7 +537,7 @@ fn should_trace_failed_create_transaction() {
|
||||
gas: 78792.into(),
|
||||
init: vec![91, 96, 0, 86],
|
||||
}),
|
||||
result: trace::Res::FailedCreate,
|
||||
result: trace::Res::FailedCreate(TraceError::OutOfGas),
|
||||
subtraces: 0
|
||||
}];
|
||||
|
||||
@ -869,7 +868,7 @@ fn should_trace_failed_call_transaction() {
|
||||
input: vec![],
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: trace::Res::FailedCall,
|
||||
result: trace::Res::FailedCall(TraceError::OutOfGas),
|
||||
subtraces: 0,
|
||||
}];
|
||||
|
||||
@ -1084,7 +1083,7 @@ fn should_trace_failed_subcall_transaction() {
|
||||
input: vec![],
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: trace::Res::FailedCall,
|
||||
result: trace::Res::FailedCall(TraceError::OutOfGas),
|
||||
}];
|
||||
|
||||
assert_eq!(result.trace, expected_trace);
|
||||
@ -1217,7 +1216,7 @@ fn should_trace_failed_subcall_with_subcall_transaction() {
|
||||
input: vec![],
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: trace::Res::FailedCall,
|
||||
result: trace::Res::FailedCall(TraceError::OutOfGas),
|
||||
}, FlatTrace {
|
||||
trace_address: vec![0, 0].into_iter().collect(),
|
||||
subtraces: 0,
|
||||
|
@ -420,7 +420,7 @@ mod tests {
|
||||
use devtools::RandomTempPath;
|
||||
use header::BlockNumber;
|
||||
use trace::{Config, Switch, TraceDB, Database as TraceDatabase, DatabaseExtras, ImportRequest};
|
||||
use trace::{Filter, LocalizedTrace, AddressesFilter};
|
||||
use trace::{Filter, LocalizedTrace, AddressesFilter, TraceError};
|
||||
use trace::trace::{Call, Action, Res};
|
||||
use trace::flat::{FlatTrace, FlatBlockTraces, FlatTransactionTraces};
|
||||
use types::executed::CallType;
|
||||
@ -560,7 +560,7 @@ mod tests {
|
||||
input: vec![],
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: Res::FailedCall,
|
||||
result: Res::FailedCall(TraceError::OutOfGas),
|
||||
}])]),
|
||||
block_hash: block_hash.clone(),
|
||||
block_number: block_number,
|
||||
@ -579,7 +579,7 @@ mod tests {
|
||||
input: vec![],
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: Res::FailedCall,
|
||||
result: Res::FailedCall(TraceError::OutOfGas),
|
||||
trace_address: vec![],
|
||||
subtraces: 0,
|
||||
transaction_number: 0,
|
||||
|
@ -19,7 +19,7 @@
|
||||
use util::{Bytes, Address, U256};
|
||||
use action_params::ActionParams;
|
||||
use trace::trace::{Call, Create, Action, Res, CreateResult, CallResult, VMTrace, VMOperation, VMExecutedOperation, MemoryDiff, StorageDiff, Suicide};
|
||||
use trace::{Tracer, VMTracer, FlatTrace};
|
||||
use trace::{Tracer, VMTracer, FlatTrace, TraceError};
|
||||
|
||||
/// Simple executive tracer. Traces all calls and creates. Ignores delegatecalls.
|
||||
#[derive(Default)]
|
||||
@ -112,23 +112,23 @@ impl Tracer for ExecutiveTracer {
|
||||
self.traces.extend(update_trace_address(subs));
|
||||
}
|
||||
|
||||
fn trace_failed_call(&mut self, call: Option<Call>, subs: Vec<FlatTrace>) {
|
||||
fn trace_failed_call(&mut self, call: Option<Call>, subs: Vec<FlatTrace>, error: TraceError) {
|
||||
let trace = FlatTrace {
|
||||
trace_address: Default::default(),
|
||||
subtraces: top_level_subtraces(&subs),
|
||||
action: Action::Call(call.expect("self.prepare_trace_call().is_some(): so we must be tracing: qed")),
|
||||
result: Res::FailedCall,
|
||||
result: Res::FailedCall(error),
|
||||
};
|
||||
debug!(target: "trace", "Traced failed call {:?}", trace);
|
||||
self.traces.push(trace);
|
||||
self.traces.extend(update_trace_address(subs));
|
||||
}
|
||||
|
||||
fn trace_failed_create(&mut self, create: Option<Create>, subs: Vec<FlatTrace>) {
|
||||
fn trace_failed_create(&mut self, create: Option<Create>, subs: Vec<FlatTrace>, error: TraceError) {
|
||||
let trace = FlatTrace {
|
||||
subtraces: top_level_subtraces(&subs),
|
||||
action: Action::Create(create.expect("self.prepare_trace_create().is_some(): so we must be tracing: qed")),
|
||||
result: Res::FailedCreate,
|
||||
result: Res::FailedCreate(error),
|
||||
trace_address: Default::default(),
|
||||
};
|
||||
debug!(target: "trace", "Traced failed create {:?}", trace);
|
||||
|
@ -24,7 +24,8 @@ mod executive_tracer;
|
||||
mod import;
|
||||
mod noop_tracer;
|
||||
|
||||
pub use types::trace_types::*;
|
||||
pub use types::trace_types::{filter, flat, localized, trace};
|
||||
pub use types::trace_types::error::Error as TraceError;
|
||||
pub use self::config::{Config, Switch};
|
||||
pub use self::db::TraceDB;
|
||||
pub use self::error::Error;
|
||||
@ -71,10 +72,10 @@ pub trait Tracer: Send {
|
||||
);
|
||||
|
||||
/// Stores failed call trace.
|
||||
fn trace_failed_call(&mut self, call: Option<Call>, subs: Vec<FlatTrace>);
|
||||
fn trace_failed_call(&mut self, call: Option<Call>, subs: Vec<FlatTrace>, error: TraceError);
|
||||
|
||||
/// Stores failed create trace.
|
||||
fn trace_failed_create(&mut self, create: Option<Create>, subs: Vec<FlatTrace>);
|
||||
fn trace_failed_create(&mut self, create: Option<Create>, subs: Vec<FlatTrace>, error: TraceError);
|
||||
|
||||
/// Stores suicide info.
|
||||
fn trace_suicide(&mut self, address: Address, balance: U256, refund_address: Address);
|
||||
|
@ -18,7 +18,7 @@
|
||||
|
||||
use util::{Bytes, Address, U256};
|
||||
use action_params::ActionParams;
|
||||
use trace::{Tracer, VMTracer, FlatTrace};
|
||||
use trace::{Tracer, VMTracer, FlatTrace, TraceError};
|
||||
use trace::trace::{Call, Create, VMTrace};
|
||||
|
||||
/// Nonoperative tracer. Does not trace anything.
|
||||
@ -47,11 +47,11 @@ impl Tracer for NoopTracer {
|
||||
assert!(code.is_none(), "self.prepare_trace_output().is_none(): so we can't be tracing: qed");
|
||||
}
|
||||
|
||||
fn trace_failed_call(&mut self, call: Option<Call>, _: Vec<FlatTrace>) {
|
||||
fn trace_failed_call(&mut self, call: Option<Call>, _: Vec<FlatTrace>, _: TraceError) {
|
||||
assert!(call.is_none(), "self.prepare_trace_call().is_none(): so we can't be tracing: qed");
|
||||
}
|
||||
|
||||
fn trace_failed_create(&mut self, create: Option<Create>, _: Vec<FlatTrace>) {
|
||||
fn trace_failed_create(&mut self, create: Option<Create>, _: Vec<FlatTrace>, _: TraceError) {
|
||||
assert!(create.is_none(), "self.prepare_trace_create().is_none(): so we can't be tracing: qed");
|
||||
}
|
||||
|
||||
|
99
ethcore/src/types/trace_types/error.rs
Normal file
99
ethcore/src/types/trace_types/error.rs
Normal file
@ -0,0 +1,99 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Trace errors.
|
||||
|
||||
use std::fmt;
|
||||
use rlp::{Encodable, RlpStream, Decodable, Decoder, DecoderError, Stream, View};
|
||||
use evm::Error as EvmError;
|
||||
|
||||
/// Trace evm errors.
|
||||
#[derive(Debug, PartialEq, Clone, Binary)]
|
||||
pub enum Error {
|
||||
/// `OutOfGas` is returned when transaction execution runs out of gas.
|
||||
OutOfGas,
|
||||
/// `BadJumpDestination` is returned when execution tried to move
|
||||
/// to position that wasn't marked with JUMPDEST instruction
|
||||
BadJumpDestination,
|
||||
/// `BadInstructions` is returned when given instruction is not supported
|
||||
BadInstruction,
|
||||
/// `StackUnderflow` when there is not enough stack elements to execute instruction
|
||||
StackUnderflow,
|
||||
/// When execution would exceed defined Stack Limit
|
||||
OutOfStack,
|
||||
/// Returned on evm internal error. Should never be ignored during development.
|
||||
/// Likely to cause consensus issues.
|
||||
Internal,
|
||||
}
|
||||
|
||||
impl From<EvmError> for Error {
|
||||
fn from(e: EvmError) -> Self {
|
||||
match e {
|
||||
EvmError::OutOfGas => Error::OutOfGas,
|
||||
EvmError::BadJumpDestination { .. } => Error::BadJumpDestination,
|
||||
EvmError::BadInstruction { .. } => Error::BadInstruction,
|
||||
EvmError::StackUnderflow { .. } => Error::StackUnderflow,
|
||||
EvmError::OutOfStack { .. } => Error::OutOfStack,
|
||||
EvmError::Internal => Error::Internal,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use self::Error::*;
|
||||
let message = match *self {
|
||||
OutOfGas => "Out of gas",
|
||||
BadJumpDestination => "Bad jump destination",
|
||||
BadInstruction => "Bad instruction",
|
||||
StackUnderflow => "Stack underflow",
|
||||
OutOfStack => "Out of stack",
|
||||
Internal => "Internal error",
|
||||
};
|
||||
message.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Error {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
use self::Error::*;
|
||||
let value = match *self {
|
||||
OutOfGas => 0u8,
|
||||
BadJumpDestination => 1,
|
||||
BadInstruction => 2,
|
||||
StackUnderflow => 3,
|
||||
OutOfStack => 4,
|
||||
Internal => 5,
|
||||
};
|
||||
s.append(&value);
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Error {
|
||||
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
|
||||
use self::Error::*;
|
||||
let value: u8 = try!(decoder.as_rlp().as_val());
|
||||
match value {
|
||||
0 => Ok(OutOfGas),
|
||||
1 => Ok(BadJumpDestination),
|
||||
2 => Ok(BadInstruction),
|
||||
3 => Ok(StackUnderflow),
|
||||
4 => Ok(OutOfStack),
|
||||
5 => Ok(Internal),
|
||||
_ => Err(DecoderError::Custom("Invalid error type")),
|
||||
}
|
||||
}
|
||||
}
|
@ -140,7 +140,7 @@ mod tests {
|
||||
use util::bloom::Bloomable;
|
||||
use trace::trace::{Action, Call, Res, Create, CreateResult, Suicide};
|
||||
use trace::flat::FlatTrace;
|
||||
use trace::{Filter, AddressesFilter};
|
||||
use trace::{Filter, AddressesFilter, TraceError};
|
||||
use types::executed::CallType;
|
||||
|
||||
#[test]
|
||||
@ -286,7 +286,7 @@ mod tests {
|
||||
input: vec![0x5],
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: Res::FailedCall,
|
||||
result: Res::FailedCall(TraceError::OutOfGas),
|
||||
trace_address: vec![0].into_iter().collect(),
|
||||
subtraces: 0,
|
||||
};
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
//! Types used in the public api
|
||||
|
||||
pub mod error;
|
||||
pub mod filter;
|
||||
pub mod flat;
|
||||
pub mod trace;
|
||||
|
@ -24,6 +24,7 @@ use rlp::*;
|
||||
use action_params::ActionParams;
|
||||
use basic_types::LogBloom;
|
||||
use types::executed::CallType;
|
||||
use super::error::Error;
|
||||
|
||||
/// `Call` result.
|
||||
#[derive(Debug, Clone, PartialEq, Default, Binary)]
|
||||
@ -322,9 +323,9 @@ pub enum Res {
|
||||
/// Successful create action result.
|
||||
Create(CreateResult),
|
||||
/// Failed call.
|
||||
FailedCall,
|
||||
FailedCall(Error),
|
||||
/// Failed create.
|
||||
FailedCreate,
|
||||
FailedCreate(Error),
|
||||
/// None
|
||||
None,
|
||||
}
|
||||
@ -342,13 +343,15 @@ impl Encodable for Res {
|
||||
s.append(&1u8);
|
||||
s.append(create);
|
||||
},
|
||||
Res::FailedCall => {
|
||||
s.begin_list(1);
|
||||
Res::FailedCall(ref err) => {
|
||||
s.begin_list(2);
|
||||
s.append(&2u8);
|
||||
s.append(err);
|
||||
},
|
||||
Res::FailedCreate => {
|
||||
s.begin_list(1);
|
||||
Res::FailedCreate(ref err) => {
|
||||
s.begin_list(2);
|
||||
s.append(&3u8);
|
||||
s.append(err);
|
||||
},
|
||||
Res::None => {
|
||||
s.begin_list(1);
|
||||
@ -365,8 +368,8 @@ impl Decodable for Res {
|
||||
match action_type {
|
||||
0 => d.val_at(1).map(Res::Call),
|
||||
1 => d.val_at(1).map(Res::Create),
|
||||
2 => Ok(Res::FailedCall),
|
||||
3 => Ok(Res::FailedCreate),
|
||||
2 => d.val_at(1).map(Res::FailedCall),
|
||||
3 => d.val_at(1).map(Res::FailedCreate),
|
||||
4 => Ok(Res::None),
|
||||
_ => Err(DecoderError::Custom("Invalid result type.")),
|
||||
}
|
||||
@ -378,7 +381,7 @@ impl Res {
|
||||
pub fn bloom(&self) -> LogBloom {
|
||||
match *self {
|
||||
Res::Create(ref create) => create.bloom(),
|
||||
Res::Call(_) | Res::FailedCall | Res::FailedCreate | Res::None => Default::default(),
|
||||
Res::Call(_) | Res::FailedCall(_) | Res::FailedCreate(_) | Res::None => Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ use std::sync::Arc;
|
||||
use io::PanicHandler;
|
||||
use rpc_apis;
|
||||
use ethcore::client::Client;
|
||||
use ethsync::SyncProvider;
|
||||
use helpers::replace_home;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
@ -49,6 +50,7 @@ pub struct Dependencies {
|
||||
pub panic_handler: Arc<PanicHandler>,
|
||||
pub apis: Arc<rpc_apis::Dependencies>,
|
||||
pub client: Arc<Client>,
|
||||
pub sync: Arc<SyncProvider>,
|
||||
}
|
||||
|
||||
pub fn new(configuration: Configuration, deps: Dependencies) -> Result<Option<WebappServer>, String> {
|
||||
@ -117,9 +119,12 @@ mod server {
|
||||
) -> Result<WebappServer, String> {
|
||||
use ethcore_dapps as dapps;
|
||||
|
||||
let server = dapps::ServerBuilder::new(dapps_path, Arc::new(Registrar {
|
||||
client: deps.client.clone(),
|
||||
}));
|
||||
let mut server = dapps::ServerBuilder::new(
|
||||
dapps_path,
|
||||
Arc::new(Registrar { client: deps.client.clone() })
|
||||
);
|
||||
let sync = deps.sync.clone();
|
||||
server.with_sync_status(Arc::new(move || sync.status().is_major_syncing()));
|
||||
let server = rpc_apis::setup_rpc(server, deps.apis.clone(), rpc_apis::ApiSet::UnsafeContext);
|
||||
let start_result = match auth {
|
||||
None => {
|
||||
|
@ -224,6 +224,7 @@ pub fn execute(cmd: RunCmd) -> Result<(), String> {
|
||||
panic_handler: panic_handler.clone(),
|
||||
apis: deps_for_rpc_apis.clone(),
|
||||
client: client.clone(),
|
||||
sync: sync_provider.clone(),
|
||||
};
|
||||
|
||||
// start dapps server
|
||||
|
@ -16,8 +16,7 @@
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use serde::{Serialize, Serializer};
|
||||
use ethcore::trace::trace;
|
||||
use ethcore::trace::{FlatTrace, LocalizedTrace as EthLocalizedTrace};
|
||||
use ethcore::trace::{FlatTrace, LocalizedTrace as EthLocalizedTrace, trace, TraceError};
|
||||
use ethcore::trace as et;
|
||||
use ethcore::state_diff;
|
||||
use ethcore::account_diff;
|
||||
@ -319,16 +318,13 @@ impl From<trace::Suicide> for Suicide {
|
||||
}
|
||||
|
||||
/// Action
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug)]
|
||||
pub enum Action {
|
||||
/// Call
|
||||
#[serde(rename="call")]
|
||||
Call(Call),
|
||||
/// Create
|
||||
#[serde(rename="create")]
|
||||
Create(Create),
|
||||
/// Suicide
|
||||
#[serde(rename="suicide")]
|
||||
Suicide(Suicide),
|
||||
}
|
||||
|
||||
@ -384,22 +380,17 @@ impl From<trace::CreateResult> for CreateResult {
|
||||
}
|
||||
|
||||
/// Response
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug)]
|
||||
pub enum Res {
|
||||
/// Call
|
||||
#[serde(rename="call")]
|
||||
Call(CallResult),
|
||||
/// Create
|
||||
#[serde(rename="create")]
|
||||
Create(CreateResult),
|
||||
/// Call failure
|
||||
#[serde(rename="failedCall")]
|
||||
FailedCall,
|
||||
FailedCall(TraceError),
|
||||
/// Creation failure
|
||||
#[serde(rename="failedCreate")]
|
||||
FailedCreate,
|
||||
FailedCreate(TraceError),
|
||||
/// None
|
||||
#[serde(rename="none")]
|
||||
None,
|
||||
}
|
||||
|
||||
@ -408,39 +399,73 @@ impl From<trace::Res> for Res {
|
||||
match t {
|
||||
trace::Res::Call(call) => Res::Call(CallResult::from(call)),
|
||||
trace::Res::Create(create) => Res::Create(CreateResult::from(create)),
|
||||
trace::Res::FailedCall => Res::FailedCall,
|
||||
trace::Res::FailedCreate => Res::FailedCreate,
|
||||
trace::Res::FailedCall(error) => Res::FailedCall(error),
|
||||
trace::Res::FailedCreate(error) => Res::FailedCreate(error),
|
||||
trace::Res::None => Res::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Trace
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug)]
|
||||
pub struct LocalizedTrace {
|
||||
/// Action
|
||||
action: Action,
|
||||
/// Result
|
||||
result: Res,
|
||||
/// Trace address
|
||||
#[serde(rename="traceAddress")]
|
||||
trace_address: Vec<U256>,
|
||||
/// Subtraces
|
||||
subtraces: U256,
|
||||
/// Transaction position
|
||||
#[serde(rename="transactionPosition")]
|
||||
transaction_position: U256,
|
||||
/// Transaction hash
|
||||
#[serde(rename="transactionHash")]
|
||||
transaction_hash: H256,
|
||||
/// Block Number
|
||||
#[serde(rename="blockNumber")]
|
||||
block_number: U256,
|
||||
/// Block Hash
|
||||
#[serde(rename="blockHash")]
|
||||
block_hash: H256,
|
||||
}
|
||||
|
||||
impl Serialize for LocalizedTrace {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||
where S: Serializer
|
||||
{
|
||||
let mut state = try!(serializer.serialize_struct("LocalizedTrace", 9));
|
||||
match self.action {
|
||||
Action::Call(ref call) => {
|
||||
try!(serializer.serialize_struct_elt(&mut state, "type", "call"));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "action", call));
|
||||
},
|
||||
Action::Create(ref create) => {
|
||||
try!(serializer.serialize_struct_elt(&mut state, "type", "create"));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "action", create));
|
||||
},
|
||||
Action::Suicide(ref suicide) => {
|
||||
try!(serializer.serialize_struct_elt(&mut state, "type", "suicide"));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "action", suicide));
|
||||
},
|
||||
}
|
||||
|
||||
match self.result {
|
||||
Res::Call(ref call) => try!(serializer.serialize_struct_elt(&mut state, "result", call)),
|
||||
Res::Create(ref create) => try!(serializer.serialize_struct_elt(&mut state, "result", create)),
|
||||
Res::FailedCall(ref error) => try!(serializer.serialize_struct_elt(&mut state, "error", error.to_string())),
|
||||
Res::FailedCreate(ref error) => try!(serializer.serialize_struct_elt(&mut state, "error", error.to_string())),
|
||||
Res::None => try!(serializer.serialize_struct_elt(&mut state, "result", None as Option<u8>)),
|
||||
}
|
||||
|
||||
try!(serializer.serialize_struct_elt(&mut state, "traceAddress", &self.trace_address));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "subtraces", &self.subtraces));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "transactionPosition", &self.transaction_position));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "transactionHash", &self.transaction_hash));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "blockNumber", &self.block_number));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "blockHash", &self.block_hash));
|
||||
|
||||
serializer.serialize_struct_end(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EthLocalizedTrace> for LocalizedTrace {
|
||||
fn from(t: EthLocalizedTrace) -> Self {
|
||||
LocalizedTrace {
|
||||
@ -457,10 +482,9 @@ impl From<EthLocalizedTrace> for LocalizedTrace {
|
||||
}
|
||||
|
||||
/// Trace
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug)]
|
||||
pub struct Trace {
|
||||
/// Trace address
|
||||
#[serde(rename="traceAddress")]
|
||||
trace_address: Vec<U256>,
|
||||
/// Subtraces
|
||||
subtraces: U256,
|
||||
@ -470,6 +494,41 @@ pub struct Trace {
|
||||
result: Res,
|
||||
}
|
||||
|
||||
impl Serialize for Trace {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||
where S: Serializer
|
||||
{
|
||||
let mut state = try!(serializer.serialize_struct("Trace", 4));
|
||||
match self.action {
|
||||
Action::Call(ref call) => {
|
||||
try!(serializer.serialize_struct_elt(&mut state, "type", "call"));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "action", call));
|
||||
},
|
||||
Action::Create(ref create) => {
|
||||
try!(serializer.serialize_struct_elt(&mut state, "type", "create"));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "action", create));
|
||||
},
|
||||
Action::Suicide(ref suicide) => {
|
||||
try!(serializer.serialize_struct_elt(&mut state, "type", "suicide"));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "action", suicide));
|
||||
},
|
||||
}
|
||||
|
||||
match self.result {
|
||||
Res::Call(ref call) => try!(serializer.serialize_struct_elt(&mut state, "result", call)),
|
||||
Res::Create(ref create) => try!(serializer.serialize_struct_elt(&mut state, "result", create)),
|
||||
Res::FailedCall(ref error) => try!(serializer.serialize_struct_elt(&mut state, "error", error.to_string())),
|
||||
Res::FailedCreate(ref error) => try!(serializer.serialize_struct_elt(&mut state, "error", error.to_string())),
|
||||
Res::None => try!(serializer.serialize_struct_elt(&mut state, "result", None as Option<u8>)),
|
||||
}
|
||||
|
||||
try!(serializer.serialize_struct_elt(&mut state, "traceAddress", &self.trace_address));
|
||||
try!(serializer.serialize_struct_elt(&mut state, "subtraces", &self.subtraces));
|
||||
|
||||
serializer.serialize_struct_end(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FlatTrace> for Trace {
|
||||
fn from(t: FlatTrace) -> Self {
|
||||
Trace {
|
||||
@ -511,7 +570,8 @@ impl From<Executed> for TraceResults {
|
||||
mod tests {
|
||||
use serde_json;
|
||||
use std::collections::BTreeMap;
|
||||
use v1::types::{Bytes, U256, H256, H160};
|
||||
use v1::types::Bytes;
|
||||
use ethcore::trace::TraceError;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
@ -527,29 +587,118 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trace_serialize() {
|
||||
fn test_trace_call_serialize() {
|
||||
let t = LocalizedTrace {
|
||||
action: Action::Call(Call {
|
||||
from: H160::from(4),
|
||||
to: H160::from(5),
|
||||
value: U256::from(6),
|
||||
gas: U256::from(7),
|
||||
from: 4.into(),
|
||||
to: 5.into(),
|
||||
value: 6.into(),
|
||||
gas: 7.into(),
|
||||
input: Bytes::new(vec![0x12, 0x34]),
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: Res::Call(CallResult {
|
||||
gas_used: U256::from(8),
|
||||
gas_used: 8.into(),
|
||||
output: vec![0x56, 0x78].into(),
|
||||
}),
|
||||
trace_address: vec![U256::from(10)],
|
||||
subtraces: U256::from(1),
|
||||
transaction_position: U256::from(11),
|
||||
transaction_hash: H256::from(12),
|
||||
block_number: U256::from(13),
|
||||
block_hash: H256::from(14),
|
||||
trace_address: vec![10.into()],
|
||||
subtraces: 1.into(),
|
||||
transaction_position: 11.into(),
|
||||
transaction_hash: 12.into(),
|
||||
block_number: 13.into(),
|
||||
block_hash: 14.into(),
|
||||
};
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"action":{"call":{"from":"0x0000000000000000000000000000000000000004","to":"0x0000000000000000000000000000000000000005","value":"0x6","gas":"0x7","input":"0x1234","callType":"call"}},"result":{"call":{"gasUsed":"0x8","output":"0x5678"}},"traceAddress":["0xa"],"subtraces":"0x1","transactionPosition":"0xb","transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":"0xd","blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
|
||||
assert_eq!(serialized, r#"{"type":"call","action":{"from":"0x0000000000000000000000000000000000000004","to":"0x0000000000000000000000000000000000000005","value":"0x6","gas":"0x7","input":"0x1234","callType":"call"},"result":{"gasUsed":"0x8","output":"0x5678"},"traceAddress":["0xa"],"subtraces":"0x1","transactionPosition":"0xb","transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":"0xd","blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trace_failed_call_serialize() {
|
||||
let t = LocalizedTrace {
|
||||
action: Action::Call(Call {
|
||||
from: 4.into(),
|
||||
to: 5.into(),
|
||||
value: 6.into(),
|
||||
gas: 7.into(),
|
||||
input: Bytes::new(vec![0x12, 0x34]),
|
||||
call_type: CallType::Call,
|
||||
}),
|
||||
result: Res::FailedCall(TraceError::OutOfGas),
|
||||
trace_address: vec![10.into()],
|
||||
subtraces: 1.into(),
|
||||
transaction_position: 11.into(),
|
||||
transaction_hash: 12.into(),
|
||||
block_number: 13.into(),
|
||||
block_hash: 14.into(),
|
||||
};
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"type":"call","action":{"from":"0x0000000000000000000000000000000000000004","to":"0x0000000000000000000000000000000000000005","value":"0x6","gas":"0x7","input":"0x1234","callType":"call"},"error":"Out of gas","traceAddress":["0xa"],"subtraces":"0x1","transactionPosition":"0xb","transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":"0xd","blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trace_create_serialize() {
|
||||
let t = LocalizedTrace {
|
||||
action: Action::Create(Create {
|
||||
from: 4.into(),
|
||||
value: 6.into(),
|
||||
gas: 7.into(),
|
||||
init: Bytes::new(vec![0x12, 0x34]),
|
||||
}),
|
||||
result: Res::Create(CreateResult {
|
||||
gas_used: 8.into(),
|
||||
code: vec![0x56, 0x78].into(),
|
||||
address: 0xff.into(),
|
||||
}),
|
||||
trace_address: vec![10.into()],
|
||||
subtraces: 1.into(),
|
||||
transaction_position: 11.into(),
|
||||
transaction_hash: 12.into(),
|
||||
block_number: 13.into(),
|
||||
block_hash: 14.into(),
|
||||
};
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"type":"create","action":{"from":"0x0000000000000000000000000000000000000004","value":"0x6","gas":"0x7","init":"0x1234"},"result":{"gasUsed":"0x8","code":"0x5678","address":"0x00000000000000000000000000000000000000ff"},"traceAddress":["0xa"],"subtraces":"0x1","transactionPosition":"0xb","transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":"0xd","blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trace_failed_create_serialize() {
|
||||
let t = LocalizedTrace {
|
||||
action: Action::Create(Create {
|
||||
from: 4.into(),
|
||||
value: 6.into(),
|
||||
gas: 7.into(),
|
||||
init: Bytes::new(vec![0x12, 0x34]),
|
||||
}),
|
||||
result: Res::FailedCreate(TraceError::OutOfGas),
|
||||
trace_address: vec![10.into()],
|
||||
subtraces: 1.into(),
|
||||
transaction_position: 11.into(),
|
||||
transaction_hash: 12.into(),
|
||||
block_number: 13.into(),
|
||||
block_hash: 14.into(),
|
||||
};
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"type":"create","action":{"from":"0x0000000000000000000000000000000000000004","value":"0x6","gas":"0x7","init":"0x1234"},"error":"Out of gas","traceAddress":["0xa"],"subtraces":"0x1","transactionPosition":"0xb","transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":"0xd","blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trace_suicide_serialize() {
|
||||
let t = LocalizedTrace {
|
||||
action: Action::Suicide(Suicide {
|
||||
address: 4.into(),
|
||||
refund_address: 6.into(),
|
||||
balance: 7.into(),
|
||||
}),
|
||||
result: Res::None,
|
||||
trace_address: vec![10.into()],
|
||||
subtraces: 1.into(),
|
||||
transaction_position: 11.into(),
|
||||
transaction_hash: 12.into(),
|
||||
block_number: 13.into(),
|
||||
block_hash: 14.into(),
|
||||
};
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"type":"suicide","action":{"address":"0x0000000000000000000000000000000000000004","refundAddress":"0x0000000000000000000000000000000000000006","balance":"0x7"},"result":null,"traceAddress":["0xa"],"subtraces":"0x1","transactionPosition":"0xb","transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":"0xd","blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -616,44 +765,4 @@ mod tests {
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"0x000000000000000000000000000000000000002a":{"balance":"=","nonce":{"+":"0x1"},"code":"=","storage":{"0x000000000000000000000000000000000000000000000000000000000000002a":"="}},"0x0000000000000000000000000000000000000045":{"balance":"=","nonce":{"*":{"from":"0x1","to":"0x0"}},"code":{"-":"0x60"},"storage":{}}}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_action_serialize() {
|
||||
let actions = vec![Action::Call(Call {
|
||||
from: H160::from(1),
|
||||
to: H160::from(2),
|
||||
value: U256::from(3),
|
||||
gas: U256::from(4),
|
||||
input: vec![0x12, 0x34].into(),
|
||||
call_type: CallType::Call,
|
||||
}), Action::Create(Create {
|
||||
from: H160::from(5),
|
||||
value: U256::from(6),
|
||||
gas: U256::from(7),
|
||||
init: vec![0x56, 0x78].into(),
|
||||
})];
|
||||
|
||||
let serialized = serde_json::to_string(&actions).unwrap();
|
||||
assert_eq!(serialized, r#"[{"call":{"from":"0x0000000000000000000000000000000000000001","to":"0x0000000000000000000000000000000000000002","value":"0x3","gas":"0x4","input":"0x1234","callType":"call"}},{"create":{"from":"0x0000000000000000000000000000000000000005","value":"0x6","gas":"0x7","init":"0x5678"}}]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_result_serialize() {
|
||||
let results = vec![
|
||||
Res::Call(CallResult {
|
||||
gas_used: U256::from(1),
|
||||
output: vec![0x12, 0x34].into(),
|
||||
}),
|
||||
Res::Create(CreateResult {
|
||||
gas_used: U256::from(2),
|
||||
code: vec![0x45, 0x56].into(),
|
||||
address: H160::from(3),
|
||||
}),
|
||||
Res::FailedCall,
|
||||
Res::FailedCreate,
|
||||
];
|
||||
|
||||
let serialized = serde_json::to_string(&results).unwrap();
|
||||
assert_eq!(serialized, r#"[{"call":{"gasUsed":"0x1","output":"0x1234"}},{"create":{"gasUsed":"0x2","code":"0x4556","address":"0x0000000000000000000000000000000000000003"}},"failedCall","failedCreate"]"#);
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@ ws = { git = "https://github.com/ethcore/ws-rs.git", branch = "mio-upstream-stab
|
||||
ethcore-util = { path = "../util" }
|
||||
ethcore-io = { path = "../util/io" }
|
||||
ethcore-rpc = { path = "../rpc" }
|
||||
ethcore-devtools = { path = "../devtools" }
|
||||
parity-dapps-signer = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4", optional = true}
|
||||
|
||||
clippy = { version = "0.0.85", optional = true}
|
||||
|
@ -54,15 +54,13 @@ extern crate jsonrpc_core;
|
||||
extern crate ws;
|
||||
#[cfg(feature = "ui")]
|
||||
extern crate parity_dapps_signer as signer;
|
||||
#[cfg(test)]
|
||||
extern crate ethcore_devtools as devtools;
|
||||
|
||||
mod authcode_store;
|
||||
mod ws_server;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub use authcode_store::*;
|
||||
pub use ws_server::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn should_work() {}
|
||||
}
|
||||
|
81
signer/src/tests/mod.rs
Normal file
81
signer/src/tests/mod.rs
Normal file
@ -0,0 +1,81 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::env;
|
||||
use std::thread;
|
||||
use std::time::Duration;
|
||||
use std::sync::Arc;
|
||||
use devtools::http_client;
|
||||
use rpc::ConfirmationsQueue;
|
||||
use rand;
|
||||
|
||||
use ServerBuilder;
|
||||
use Server;
|
||||
|
||||
pub fn serve() -> Server {
|
||||
let queue = Arc::new(ConfirmationsQueue::default());
|
||||
let builder = ServerBuilder::new(queue, env::temp_dir());
|
||||
let port = 35000 + rand::random::<usize>() % 10000;
|
||||
let res = builder.start(format!("127.0.0.1:{}", port).parse().unwrap()).unwrap();
|
||||
thread::sleep(Duration::from_millis(25));
|
||||
res
|
||||
}
|
||||
|
||||
pub fn request(server: Server, request: &str) -> http_client::Response {
|
||||
http_client::request(server.addr(), request)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_reject_invalid_host() {
|
||||
// given
|
||||
let server = serve();
|
||||
|
||||
// when
|
||||
let response = request(server,
|
||||
"\
|
||||
GET / HTTP/1.1\r\n\
|
||||
Host: test:8180\r\n\
|
||||
Connection: close\r\n\
|
||||
\r\n\
|
||||
{}
|
||||
"
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 403 FORBIDDEN".to_owned());
|
||||
assert!(response.body.contains("URL Blocked"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_serve_styles_even_on_disallowed_domain() {
|
||||
// given
|
||||
let server = serve();
|
||||
|
||||
// when
|
||||
let response = request(server,
|
||||
"\
|
||||
GET /styles.css HTTP/1.1\r\n\
|
||||
Host: test:8180\r\n\
|
||||
Connection: close\r\n\
|
||||
\r\n\
|
||||
{}
|
||||
"
|
||||
);
|
||||
|
||||
// then
|
||||
assert_eq!(response.status, "HTTP/1.1 200 OK".to_owned());
|
||||
}
|
||||
|
21
signer/src/ws_server/error_tpl.html
Normal file
21
signer/src/ws_server/error_tpl.html
Normal file
@ -0,0 +1,21 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta name="viewport" content="width=device-width">
|
||||
{meta}
|
||||
<title>{title}</title>
|
||||
<link rel="stylesheet" href="/styles.css">
|
||||
</head>
|
||||
<body>
|
||||
<div class="parity-navbar"></div>
|
||||
<div class="parity-box">
|
||||
<h1>{title}</h1>
|
||||
<h3>{message}</h3>
|
||||
<p><code>{details}</code></p>
|
||||
</div>
|
||||
<div class="parity-status">
|
||||
<small>{version}</small>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
@ -93,9 +93,15 @@ pub struct Server {
|
||||
broadcaster_handle: Option<thread::JoinHandle<()>>,
|
||||
queue: Arc<ConfirmationsQueue>,
|
||||
panic_handler: Arc<PanicHandler>,
|
||||
addr: SocketAddr,
|
||||
}
|
||||
|
||||
impl Server {
|
||||
/// Returns the address this server is listening on
|
||||
pub fn addr(&self) -> &SocketAddr {
|
||||
&self.addr
|
||||
}
|
||||
|
||||
/// Starts a new `WebSocket` server in separate thread.
|
||||
/// Returns a `Server` handle which closes the server when droped.
|
||||
fn start(addr: SocketAddr, handler: Arc<IoHandler>, queue: Arc<ConfirmationsQueue>, authcodes_path: PathBuf, skip_origin_validation: bool) -> Result<Server, ServerError> {
|
||||
@ -121,7 +127,7 @@ impl Server {
|
||||
// Spawn a thread with event loop
|
||||
let handle = thread::spawn(move || {
|
||||
ph.catch_panic(move || {
|
||||
match ws.listen(addr).map_err(ServerError::from) {
|
||||
match ws.listen(addr.clone()).map_err(ServerError::from) {
|
||||
Err(ServerError::IoError(io)) => die(format!(
|
||||
"Signer: Could not start listening on specified address. Make sure that no other instance is running on Signer's port. Details: {:?}",
|
||||
io
|
||||
@ -158,6 +164,7 @@ impl Server {
|
||||
broadcaster_handle: Some(broadcaster_handle),
|
||||
queue: queue,
|
||||
panic_handler: panic_handler,
|
||||
addr: addr,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,7 @@ use std::path::{PathBuf, Path};
|
||||
use std::sync::Arc;
|
||||
use std::str::FromStr;
|
||||
use jsonrpc_core::IoHandler;
|
||||
use util::{H256, Mutex};
|
||||
use util::{H256, Mutex, version};
|
||||
|
||||
#[cfg(feature = "ui")]
|
||||
mod signer {
|
||||
@ -107,21 +107,32 @@ impl ws::Handler for Session {
|
||||
fn on_request(&mut self, req: &ws::Request) -> ws::Result<(ws::Response)> {
|
||||
let origin = req.header("origin").or_else(|| req.header("Origin")).map(|x| &x[..]);
|
||||
let host = req.header("host").or_else(|| req.header("Host")).map(|x| &x[..]);
|
||||
// Styles file is allowed for error pages to display nicely.
|
||||
let is_styles_file = req.resource() == "/styles.css";
|
||||
|
||||
// Check request origin and host header.
|
||||
if !self.skip_origin_validation {
|
||||
if !origin_is_allowed(&self.self_origin, origin) && !(origin.is_none() && origin_is_allowed(&self.self_origin, host)) {
|
||||
let is_valid = origin_is_allowed(&self.self_origin, origin) || (origin.is_none() && origin_is_allowed(&self.self_origin, host));
|
||||
let is_valid = is_styles_file || is_valid;
|
||||
|
||||
if !is_valid {
|
||||
warn!(target: "signer", "Blocked connection to Signer API from untrusted origin.");
|
||||
return Ok(ws::Response::forbidden(format!("You are not allowed to access system ui. Use: http://{}", self.self_origin)));
|
||||
return Ok(error(
|
||||
ErrorType::Forbidden,
|
||||
"URL Blocked",
|
||||
"You are not allowed to access Trusted Signer using this URL.",
|
||||
Some(&format!("Use: http://{}", self.self_origin)),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Detect if it's a websocket request.
|
||||
if req.header("sec-websocket-key").is_some() {
|
||||
// Detect if it's a websocket request
|
||||
// (styles file skips origin validation, so make sure to prevent WS connections on this resource)
|
||||
if req.header("sec-websocket-key").is_some() && !is_styles_file {
|
||||
// Check authorization
|
||||
if !auth_is_valid(&self.authcodes_path, req.protocols()) {
|
||||
info!(target: "signer", "Unauthorized connection to Signer API blocked.");
|
||||
return Ok(ws::Response::forbidden("You are not authorized.".into()));
|
||||
return Ok(error(ErrorType::Forbidden, "Not Authorized", "Request to this API was not authorized.", None));
|
||||
}
|
||||
|
||||
let protocols = req.protocols().expect("Existence checked by authorization.");
|
||||
@ -137,7 +148,7 @@ impl ws::Handler for Session {
|
||||
Ok(signer::handle(req.resource())
|
||||
.map_or_else(
|
||||
// return 404 not found
|
||||
|| add_headers(ws::Response::not_found("Not found".into()), "text/plain"),
|
||||
|| error(ErrorType::NotFound, "Not found", "Requested file was not found.", None),
|
||||
// or serve the file
|
||||
|f| add_headers(ws::Response::ok(f.content.into()), &f.mime)
|
||||
))
|
||||
@ -189,3 +200,24 @@ impl ws::Factory for Factory {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ErrorType {
|
||||
NotFound,
|
||||
Forbidden,
|
||||
}
|
||||
|
||||
fn error(error: ErrorType, title: &str, message: &str, details: Option<&str>) -> ws::Response {
|
||||
let content = format!(
|
||||
include_str!("./error_tpl.html"),
|
||||
title=title,
|
||||
meta="",
|
||||
message=message,
|
||||
details=details.unwrap_or(""),
|
||||
version=version(),
|
||||
);
|
||||
let res = match error {
|
||||
ErrorType::NotFound => ws::Response::not_found(content),
|
||||
ErrorType::Forbidden => ws::Response::forbidden(content),
|
||||
};
|
||||
add_headers(res, "text/html")
|
||||
}
|
||||
|
@ -1505,11 +1505,13 @@ impl ChainSync {
|
||||
|
||||
// Send RLPs
|
||||
let sent = lucky_peers.len();
|
||||
if sent > 0 {
|
||||
for (peer_id, rlp) in lucky_peers.into_iter() {
|
||||
self.send_packet(io, peer_id, TRANSACTIONS_PACKET, rlp);
|
||||
}
|
||||
|
||||
trace!(target: "sync", "Sent up to {} transactions to {} peers.", transactions.len(), sent);
|
||||
}
|
||||
sent
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user