Merge pull request #2050 from ethcore/dapps-content

Fetching any content-addressed content
This commit is contained in:
Robert Habermeier 2016-09-08 17:38:37 +02:00 committed by GitHub
commit 8b971966b3
9 changed files with 332 additions and 146 deletions

View File

@ -39,7 +39,7 @@ clippy = { version = "0.0.85", optional = true}
serde_codegen = { version = "0.8", optional = true } serde_codegen = { version = "0.8", optional = true }
[features] [features]
default = ["serde_codegen", "extra-dapps", "https-fetch/ca-github-only"] default = ["serde_codegen", "extra-dapps"]
extra-dapps = ["parity-dapps-wallet"] extra-dapps = ["parity-dapps-wallet"]
nightly = ["serde_macros"] nightly = ["serde_macros"]
dev = ["clippy", "ethcore-rpc/dev", "ethcore-util/dev"] dev = ["clippy", "ethcore-rpc/dev", "ethcore-util/dev"]

View File

@ -38,65 +38,65 @@ use handlers::{ContentHandler, ContentFetcherHandler, ContentValidator};
use endpoint::{Endpoint, EndpointPath, Handler}; use endpoint::{Endpoint, EndpointPath, Handler};
use apps::cache::{ContentCache, ContentStatus}; use apps::cache::{ContentCache, ContentStatus};
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest}; use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest};
use apps::urlhint::{URLHintContract, URLHint}; use apps::urlhint::{URLHintContract, URLHint, URLHintResult};
const MAX_CACHED_DAPPS: usize = 10; const MAX_CACHED_DAPPS: usize = 10;
pub struct AppFetcher<R: URLHint = URLHintContract> { pub struct ContentFetcher<R: URLHint = URLHintContract> {
dapps_path: PathBuf, dapps_path: PathBuf,
resolver: R, resolver: R,
cache: Arc<Mutex<ContentCache>>,
sync: Arc<SyncStatus>, sync: Arc<SyncStatus>,
dapps: Arc<Mutex<ContentCache>>,
} }
impl<R: URLHint> Drop for AppFetcher<R> { impl<R: URLHint> Drop for ContentFetcher<R> {
fn drop(&mut self) { fn drop(&mut self) {
// Clear cache path // Clear cache path
let _ = fs::remove_dir_all(&self.dapps_path); let _ = fs::remove_dir_all(&self.dapps_path);
} }
} }
impl<R: URLHint> AppFetcher<R> { impl<R: URLHint> ContentFetcher<R> {
pub fn new(resolver: R, sync_status: Arc<SyncStatus>) -> Self { pub fn new(resolver: R, sync_status: Arc<SyncStatus>) -> Self {
let mut dapps_path = env::temp_dir(); let mut dapps_path = env::temp_dir();
dapps_path.push(random_filename()); dapps_path.push(random_filename());
AppFetcher { ContentFetcher {
dapps_path: dapps_path, dapps_path: dapps_path,
resolver: resolver, resolver: resolver,
sync: sync_status, sync: sync_status,
dapps: Arc::new(Mutex::new(ContentCache::default())), cache: Arc::new(Mutex::new(ContentCache::default())),
} }
} }
#[cfg(test)] #[cfg(test)]
fn set_status(&self, app_id: &str, status: ContentStatus) { fn set_status(&self, content_id: &str, status: ContentStatus) {
self.dapps.lock().insert(app_id.to_owned(), status); self.cache.lock().insert(content_id.to_owned(), status);
} }
pub fn contains(&self, app_id: &str) -> bool { pub fn contains(&self, content_id: &str) -> bool {
let mut dapps = self.dapps.lock(); let mut cache = self.cache.lock();
// Check if we already have the app // Check if we already have the app
if dapps.get(app_id).is_some() { if cache.get(content_id).is_some() {
return true; return true;
} }
// fallback to resolver // fallback to resolver
if let Ok(app_id) = app_id.from_hex() { if let Ok(content_id) = content_id.from_hex() {
// if app_id is valid, but we are syncing always return true. // if app_id is valid, but we are syncing always return true.
if self.sync.is_major_syncing() { if self.sync.is_major_syncing() {
return true; return true;
} }
// else try to resolve the app_id // else try to resolve the app_id
self.resolver.resolve(app_id).is_some() self.resolver.resolve(content_id).is_some()
} else { } else {
false false
} }
} }
pub fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> { pub fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
let mut dapps = self.dapps.lock(); let mut cache = self.cache.lock();
let app_id = path.app_id.clone(); let content_id = path.app_id.clone();
if self.sync.is_major_syncing() { if self.sync.is_major_syncing() {
return Box::new(ContentHandler::error( return Box::new(ContentHandler::error(
@ -108,7 +108,7 @@ impl<R: URLHint> AppFetcher<R> {
} }
let (new_status, handler) = { let (new_status, handler) = {
let status = dapps.get(&app_id); let status = cache.get(&content_id);
match status { match status {
// Just server dapp // Just server dapp
Some(&mut ContentStatus::Ready(ref endpoint)) => { Some(&mut ContentStatus::Ready(ref endpoint)) => {
@ -125,24 +125,40 @@ impl<R: URLHint> AppFetcher<R> {
}, },
// We need to start fetching app // We need to start fetching app
None => { None => {
let app_hex = app_id.from_hex().expect("to_handler is called only when `contains` returns true."); let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
let app = self.resolver.resolve(app_hex); let content = self.resolver.resolve(content_hex);
if let Some(app) = app {
let abort = Arc::new(AtomicBool::new(false)); let abort = Arc::new(AtomicBool::new(false));
(Some(ContentStatus::Fetching(abort.clone())), Box::new(ContentFetcherHandler::new( match content {
app, Some(URLHintResult::Dapp(dapp)) => (
Some(ContentStatus::Fetching(abort.clone())),
Box::new(ContentFetcherHandler::new(
dapp.url(),
abort, abort,
control, control,
path.using_dapps_domains, path.using_dapps_domains,
DappInstaller { DappInstaller {
dapp_id: app_id.clone(), id: content_id.clone(),
dapps_path: self.dapps_path.clone(), dapps_path: self.dapps_path.clone(),
dapps: self.dapps.clone(), cache: self.cache.clone(),
})) as Box<Handler>
),
Some(URLHintResult::Content(content)) => (
Some(ContentStatus::Fetching(abort.clone())),
Box::new(ContentFetcherHandler::new(
content.url,
abort,
control,
path.using_dapps_domains,
ContentInstaller {
id: content_id.clone(),
mime: content.mime,
content_path: self.dapps_path.clone(),
cache: self.cache.clone(),
} }
)) as Box<Handler>) )) as Box<Handler>,
} else { ),
None => {
// This may happen when sync status changes in between // This may happen when sync status changes in between
// `contains` and `to_handler` // `contains` and `to_handler`
(None, Box::new(ContentHandler::error( (None, Box::new(ContentHandler::error(
@ -151,14 +167,15 @@ impl<R: URLHint> AppFetcher<R> {
"Requested resource was not found.", "Requested resource was not found.",
None None
)) as Box<Handler>) )) as Box<Handler>)
},
} }
}, },
} }
}; };
if let Some(status) = new_status { if let Some(status) = new_status {
dapps.clear_garbage(MAX_CACHED_DAPPS); cache.clear_garbage(MAX_CACHED_DAPPS);
dapps.insert(app_id, status); cache.insert(content_id, status);
} }
handler handler
@ -169,7 +186,7 @@ impl<R: URLHint> AppFetcher<R> {
pub enum ValidationError { pub enum ValidationError {
Io(io::Error), Io(io::Error),
Zip(zip::result::ZipError), Zip(zip::result::ZipError),
InvalidDappId, InvalidContentId,
ManifestNotFound, ManifestNotFound,
ManifestSerialization(String), ManifestSerialization(String),
HashMismatch { expected: H256, got: H256, }, HashMismatch { expected: H256, got: H256, },
@ -180,7 +197,7 @@ impl fmt::Display for ValidationError {
match *self { match *self {
ValidationError::Io(ref io) => write!(f, "Unexpected IO error occured: {:?}", io), ValidationError::Io(ref io) => write!(f, "Unexpected IO error occured: {:?}", io),
ValidationError::Zip(ref zip) => write!(f, "Unable to read ZIP archive: {:?}", zip), ValidationError::Zip(ref zip) => write!(f, "Unable to read ZIP archive: {:?}", zip),
ValidationError::InvalidDappId => write!(f, "Dapp ID is invalid. It should be 32 bytes hash of content."), ValidationError::InvalidContentId => write!(f, "ID is invalid. It should be 256 bits keccak hash of content."),
ValidationError::ManifestNotFound => write!(f, "Downloaded Dapp bundle did not contain valid manifest.json file."), ValidationError::ManifestNotFound => write!(f, "Downloaded Dapp bundle did not contain valid manifest.json file."),
ValidationError::ManifestSerialization(ref err) => { ValidationError::ManifestSerialization(ref err) => {
write!(f, "There was an error during Dapp Manifest serialization: {:?}", err) write!(f, "There was an error during Dapp Manifest serialization: {:?}", err)
@ -204,10 +221,55 @@ impl From<zip::result::ZipError> for ValidationError {
} }
} }
struct ContentInstaller {
id: String,
mime: String,
content_path: PathBuf,
cache: Arc<Mutex<ContentCache>>,
}
impl ContentValidator for ContentInstaller {
type Error = ValidationError;
type Result = PathBuf;
fn validate_and_install(&self, path: PathBuf) -> Result<(String, PathBuf), ValidationError> {
// Create dir
try!(fs::create_dir_all(&self.content_path));
// And prepare path for a file
let filename = path.file_name().expect("We always fetch a file.");
let mut content_path = self.content_path.clone();
content_path.push(&filename);
if content_path.exists() {
try!(fs::remove_dir_all(&content_path))
}
try!(fs::copy(&path, &content_path));
Ok((self.id.clone(), content_path))
}
fn done(&self, result: Option<&PathBuf>) {
let mut cache = self.cache.lock();
match result {
Some(result) => {
let page = LocalPageEndpoint::single_file(result.clone(), self.mime.clone());
cache.insert(self.id.clone(), ContentStatus::Ready(page));
},
// In case of error
None => {
cache.remove(&self.id);
},
}
}
}
struct DappInstaller { struct DappInstaller {
dapp_id: String, id: String,
dapps_path: PathBuf, dapps_path: PathBuf,
dapps: Arc<Mutex<ContentCache>>, cache: Arc<Mutex<ContentCache>>,
} }
impl DappInstaller { impl DappInstaller {
@ -244,15 +306,16 @@ impl DappInstaller {
impl ContentValidator for DappInstaller { impl ContentValidator for DappInstaller {
type Error = ValidationError; type Error = ValidationError;
type Result = Manifest;
fn validate_and_install(&self, app_path: PathBuf) -> Result<Manifest, ValidationError> { fn validate_and_install(&self, app_path: PathBuf) -> Result<(String, Manifest), ValidationError> {
trace!(target: "dapps", "Opening dapp bundle at {:?}", app_path); trace!(target: "dapps", "Opening dapp bundle at {:?}", app_path);
let mut file_reader = io::BufReader::new(try!(fs::File::open(app_path))); let mut file_reader = io::BufReader::new(try!(fs::File::open(app_path)));
let hash = try!(sha3(&mut file_reader)); let hash = try!(sha3(&mut file_reader));
let dapp_id = try!(self.dapp_id.as_str().parse().map_err(|_| ValidationError::InvalidDappId)); let id = try!(self.id.as_str().parse().map_err(|_| ValidationError::InvalidContentId));
if dapp_id != hash { if id != hash {
return Err(ValidationError::HashMismatch { return Err(ValidationError::HashMismatch {
expected: dapp_id, expected: id,
got: hash, got: hash,
}); });
} }
@ -262,7 +325,7 @@ impl ContentValidator for DappInstaller {
// First find manifest file // First find manifest file
let (mut manifest, manifest_dir) = try!(Self::find_manifest(&mut zip)); let (mut manifest, manifest_dir) = try!(Self::find_manifest(&mut zip));
// Overwrite id to match hash // Overwrite id to match hash
manifest.id = self.dapp_id.clone(); manifest.id = self.id.clone();
let target = self.dapp_target_path(&manifest); let target = self.dapp_target_path(&manifest);
@ -300,20 +363,20 @@ impl ContentValidator for DappInstaller {
try!(manifest_file.write_all(manifest_str.as_bytes())); try!(manifest_file.write_all(manifest_str.as_bytes()));
// Return modified app manifest // Return modified app manifest
Ok(manifest) Ok((manifest.id.clone(), manifest))
} }
fn done(&self, manifest: Option<&Manifest>) { fn done(&self, manifest: Option<&Manifest>) {
let mut dapps = self.dapps.lock(); let mut cache = self.cache.lock();
match manifest { match manifest {
Some(manifest) => { Some(manifest) => {
let path = self.dapp_target_path(manifest); let path = self.dapp_target_path(manifest);
let app = LocalPageEndpoint::new(path, manifest.clone().into()); let app = LocalPageEndpoint::new(path, manifest.clone().into());
dapps.insert(self.dapp_id.clone(), ContentStatus::Ready(app)); cache.insert(self.id.clone(), ContentStatus::Ready(app));
}, },
// In case of error // In case of error
None => { None => {
dapps.remove(&self.dapp_id); cache.remove(&self.id);
}, },
} }
} }
@ -327,12 +390,12 @@ mod tests {
use endpoint::EndpointInfo; use endpoint::EndpointInfo;
use page::LocalPageEndpoint; use page::LocalPageEndpoint;
use apps::cache::ContentStatus; use apps::cache::ContentStatus;
use apps::urlhint::{GithubApp, URLHint}; use apps::urlhint::{URLHint, URLHintResult};
use super::AppFetcher; use super::ContentFetcher;
struct FakeResolver; struct FakeResolver;
impl URLHint for FakeResolver { impl URLHint for FakeResolver {
fn resolve(&self, _app_id: Bytes) -> Option<GithubApp> { fn resolve(&self, _id: Bytes) -> Option<URLHintResult> {
None None
} }
} }
@ -341,7 +404,7 @@ mod tests {
fn should_true_if_contains_the_app() { fn should_true_if_contains_the_app() {
// given // given
let path = env::temp_dir(); let path = env::temp_dir();
let fetcher = AppFetcher::new(FakeResolver, Arc::new(|| false)); let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false));
let handler = LocalPageEndpoint::new(path, EndpointInfo { let handler = LocalPageEndpoint::new(path, EndpointInfo {
name: "fake".into(), name: "fake".into(),
description: "".into(), description: "".into(),

View File

@ -17,6 +17,7 @@
use std::fmt; use std::fmt;
use std::sync::Arc; use std::sync::Arc;
use rustc_serialize::hex::ToHex; use rustc_serialize::hex::ToHex;
use mime_guess;
use ethabi::{Interface, Contract, Token}; use ethabi::{Interface, Contract, Token};
use util::{Address, Bytes, Hashable}; use util::{Address, Bytes, Hashable};
@ -52,6 +53,13 @@ impl GithubApp {
} }
} }
#[derive(Debug, PartialEq)]
pub struct Content {
pub url: String,
pub mime: String,
pub owner: Address,
}
/// RAW Contract interface. /// RAW Contract interface.
/// Should execute transaction using current blockchain state. /// Should execute transaction using current blockchain state.
pub trait ContractClient: Send + Sync { pub trait ContractClient: Send + Sync {
@ -61,10 +69,19 @@ pub trait ContractClient: Send + Sync {
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String>; fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String>;
} }
/// Result of resolving id to URL
#[derive(Debug, PartialEq)]
pub enum URLHintResult {
/// Dapp
Dapp(GithubApp),
/// Content
Content(Content),
}
/// URLHint Contract interface /// URLHint Contract interface
pub trait URLHint { pub trait URLHint {
/// Resolves given id to registrar entry. /// Resolves given id to registrar entry.
fn resolve(&self, app_id: Bytes) -> Option<GithubApp>; fn resolve(&self, id: Bytes) -> Option<URLHintResult>;
} }
pub struct URLHintContract { pub struct URLHintContract {
@ -110,10 +127,10 @@ impl URLHintContract {
} }
} }
fn encode_urlhint_call(&self, app_id: Bytes) -> Option<Bytes> { fn encode_urlhint_call(&self, id: Bytes) -> Option<Bytes> {
let call = self.urlhint let call = self.urlhint
.function("entries".into()) .function("entries".into())
.and_then(|f| f.encode_call(vec![Token::FixedBytes(app_id)])); .and_then(|f| f.encode_call(vec![Token::FixedBytes(id)]));
match call { match call {
Ok(res) => { Ok(res) => {
@ -126,7 +143,7 @@ impl URLHintContract {
} }
} }
fn decode_urlhint_output(&self, output: Bytes) -> Option<GithubApp> { fn decode_urlhint_output(&self, output: Bytes) -> Option<URLHintResult> {
trace!(target: "dapps", "Output: {:?}", output.to_hex()); trace!(target: "dapps", "Output: {:?}", output.to_hex());
let output = self.urlhint let output = self.urlhint
.function("entries".into()) .function("entries".into())
@ -149,6 +166,17 @@ impl URLHintContract {
if owner == Address::default() { if owner == Address::default() {
return None; return None;
} }
let commit = GithubApp::commit(&commit);
if commit == Some(Default::default()) {
let mime = guess_mime_type(&account_slash_repo).unwrap_or("application/octet-stream".into());
return Some(URLHintResult::Content(Content {
url: account_slash_repo,
mime: mime,
owner: owner,
}));
}
let (account, repo) = { let (account, repo) = {
let mut it = account_slash_repo.split('/'); let mut it = account_slash_repo.split('/');
match (it.next(), it.next()) { match (it.next(), it.next()) {
@ -157,12 +185,12 @@ impl URLHintContract {
} }
}; };
GithubApp::commit(&commit).map(|commit| GithubApp { commit.map(|commit| URLHintResult::Dapp(GithubApp {
account: account, account: account,
repo: repo, repo: repo,
commit: commit, commit: commit,
owner: owner, owner: owner,
}) }))
}, },
e => { e => {
warn!(target: "dapps", "Invalid contract output parameters: {:?}", e); warn!(target: "dapps", "Invalid contract output parameters: {:?}", e);
@ -177,10 +205,10 @@ impl URLHintContract {
} }
impl URLHint for URLHintContract { impl URLHint for URLHintContract {
fn resolve(&self, app_id: Bytes) -> Option<GithubApp> { fn resolve(&self, id: Bytes) -> Option<URLHintResult> {
self.urlhint_address().and_then(|address| { self.urlhint_address().and_then(|address| {
// Prepare contract call // Prepare contract call
self.encode_urlhint_call(app_id) self.encode_urlhint_call(id)
.and_then(|data| { .and_then(|data| {
let call = self.client.call(address, data); let call = self.client.call(address, data);
if let Err(ref e) = call { if let Err(ref e) = call {
@ -193,6 +221,34 @@ impl URLHint for URLHintContract {
} }
} }
fn guess_mime_type(url: &str) -> Option<String> {
const CONTENT_TYPE: &'static str = "content-type=";
let mut it = url.split('#');
// skip url
let url = it.next();
// get meta headers
let metas = it.next();
if let Some(metas) = metas {
for meta in metas.split('&') {
let meta = meta.to_lowercase();
if meta.starts_with(CONTENT_TYPE) {
return Some(meta[CONTENT_TYPE.len()..].to_owned());
}
}
}
url.and_then(|url| {
url.split('.').last()
}).and_then(|extension| {
mime_guess::get_mime_type_str(extension).map(Into::into)
})
}
#[cfg(test)]
pub fn test_guess_mime_type(url: &str) -> Option<String> {
guess_mime_type(url)
}
fn as_string<T: fmt::Debug>(e: T) -> String { fn as_string<T: fmt::Debug>(e: T) -> String {
format!("{:?}", e) format!("{:?}", e)
} }
@ -201,7 +257,7 @@ fn as_string<T: fmt::Debug>(e: T) -> String {
mod tests { mod tests {
use std::sync::Arc; use std::sync::Arc;
use std::str::FromStr; use std::str::FromStr;
use rustc_serialize::hex::{ToHex, FromHex}; use rustc_serialize::hex::FromHex;
use super::*; use super::*;
use util::{Bytes, Address, Mutex, ToPretty}; use util::{Bytes, Address, Mutex, ToPretty};
@ -279,12 +335,33 @@ mod tests {
let res = urlhint.resolve("test".bytes().collect()); let res = urlhint.resolve("test".bytes().collect());
// then // then
assert_eq!(res, Some(GithubApp { assert_eq!(res, Some(URLHintResult::Dapp(GithubApp {
account: "ethcore".into(), account: "ethcore".into(),
repo: "dao.claim".into(), repo: "dao.claim".into(),
commit: GithubApp::commit(&"ec4c1fe06c808fe3739858c347109b1f5f1ed4b5".from_hex().unwrap()).unwrap(), commit: GithubApp::commit(&"ec4c1fe06c808fe3739858c347109b1f5f1ed4b5".from_hex().unwrap()).unwrap(),
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(), owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
})) })))
}
#[test]
fn should_decode_urlhint_content_output() {
// given
let mut registrar = FakeRegistrar::new();
registrar.responses = Mutex::new(vec![
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
Ok("00000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff000000000000000000000000000000000000000000000000000000000000003d68747470733a2f2f657468636f72652e696f2f6173736574732f696d616765732f657468636f72652d626c61636b2d686f72697a6f6e74616c2e706e67000000".from_hex().unwrap()),
]);
let urlhint = URLHintContract::new(Arc::new(registrar));
// when
let res = urlhint.resolve("test".bytes().collect());
// then
assert_eq!(res, Some(URLHintResult::Content(Content {
url: "https://ethcore.io/assets/images/ethcore-black-horizontal.png".into(),
mime: "image/png".into(),
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
})))
} }
#[test] #[test]
@ -303,4 +380,20 @@ mod tests {
// then // then
assert_eq!(url, "https://codeload.github.com/test/xyz/zip/000102030405060708090a0b0c0d0e0f10111213".to_owned()); assert_eq!(url, "https://codeload.github.com/test/xyz/zip/000102030405060708090a0b0c0d0e0f10111213".to_owned());
} }
#[test]
fn should_guess_mime_type_from_url() {
let url1 = "https://ethcore.io/parity";
let url2 = "https://ethcore.io/parity#content-type=image/png";
let url3 = "https://ethcore.io/parity#something&content-type=image/png";
let url4 = "https://ethcore.io/parity.png#content-type=image/jpeg";
let url5 = "https://ethcore.io/parity.png";
assert_eq!(test_guess_mime_type(url1), None);
assert_eq!(test_guess_mime_type(url2), Some("image/png".into()));
assert_eq!(test_guess_mime_type(url3), Some("image/png".into()));
assert_eq!(test_guess_mime_type(url4), Some("image/jpeg".into()));
assert_eq!(test_guess_mime_type(url5), Some("image/png".into()));
}
} }

View File

@ -63,7 +63,7 @@ impl Client {
self.https_client.close(); self.https_client.close();
} }
pub fn request(&mut self, url: String, abort: Arc<AtomicBool>, on_done: Box<Fn() + Send>) -> Result<mpsc::Receiver<FetchResult>, FetchError> { pub fn request(&mut self, url: &str, abort: Arc<AtomicBool>, on_done: Box<Fn() + Send>) -> Result<mpsc::Receiver<FetchResult>, FetchError> {
let is_https = url.starts_with("https://"); let is_https = url.starts_with("https://");
let url = try!(url.parse().map_err(|_| FetchError::InvalidUrl)); let url = try!(url.parse().map_err(|_| FetchError::InvalidUrl));
trace!(target: "dapps", "Fetching from: {:?}", url); trace!(target: "dapps", "Fetching from: {:?}", url);

View File

@ -16,7 +16,7 @@
//! Hyper Server Handler that fetches a file during a request (proxy). //! Hyper Server Handler that fetches a file during a request (proxy).
use std::fmt; use std::{fs, fmt};
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::{mpsc, Arc}; use std::sync::{mpsc, Arc};
use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicBool;
@ -29,51 +29,50 @@ use hyper::status::StatusCode;
use handlers::ContentHandler; use handlers::ContentHandler;
use handlers::client::{Client, FetchResult}; use handlers::client::{Client, FetchResult};
use apps::redirection_address; use apps::redirection_address;
use apps::urlhint::GithubApp;
use apps::manifest::Manifest;
const FETCH_TIMEOUT: u64 = 30; const FETCH_TIMEOUT: u64 = 30;
enum FetchState { enum FetchState<T: fmt::Debug> {
NotStarted(GithubApp), NotStarted(String),
Error(ContentHandler), Error(ContentHandler),
InProgress { InProgress {
deadline: Instant, deadline: Instant,
receiver: mpsc::Receiver<FetchResult>, receiver: mpsc::Receiver<FetchResult>,
}, },
Done(Manifest), Done((String, T)),
} }
pub trait ContentValidator { pub trait ContentValidator {
type Error: fmt::Debug + fmt::Display; type Error: fmt::Debug + fmt::Display;
type Result: fmt::Debug;
fn validate_and_install(&self, app: PathBuf) -> Result<Manifest, Self::Error>; fn validate_and_install(&self, app: PathBuf) -> Result<(String, Self::Result), Self::Error>;
fn done(&self, Option<&Manifest>); fn done(&self, Option<&Self::Result>);
} }
pub struct ContentFetcherHandler<H: ContentValidator> { pub struct ContentFetcherHandler<H: ContentValidator> {
abort: Arc<AtomicBool>, abort: Arc<AtomicBool>,
control: Option<Control>, control: Option<Control>,
status: FetchState, status: FetchState<H::Result>,
client: Option<Client>, client: Option<Client>,
using_dapps_domains: bool, using_dapps_domains: bool,
dapp: H, installer: H,
} }
impl<H: ContentValidator> Drop for ContentFetcherHandler<H> { impl<H: ContentValidator> Drop for ContentFetcherHandler<H> {
fn drop(&mut self) { fn drop(&mut self) {
let manifest = match self.status { let result = match self.status {
FetchState::Done(ref manifest) => Some(manifest), FetchState::Done((_, ref result)) => Some(result),
_ => None, _ => None,
}; };
self.dapp.done(manifest); self.installer.done(result);
} }
} }
impl<H: ContentValidator> ContentFetcherHandler<H> { impl<H: ContentValidator> ContentFetcherHandler<H> {
pub fn new( pub fn new(
app: GithubApp, url: String,
abort: Arc<AtomicBool>, abort: Arc<AtomicBool>,
control: Control, control: Control,
using_dapps_domains: bool, using_dapps_domains: bool,
@ -84,9 +83,9 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
abort: abort, abort: abort,
control: Some(control), control: Some(control),
client: Some(client), client: Some(client),
status: FetchState::NotStarted(app), status: FetchState::NotStarted(url),
using_dapps_domains: using_dapps_domains, using_dapps_domains: using_dapps_domains,
dapp: handler, installer: handler,
} }
} }
@ -97,8 +96,8 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
} }
fn fetch_app(client: &mut Client, app: &GithubApp, abort: Arc<AtomicBool>, control: Control) -> Result<mpsc::Receiver<FetchResult>, String> { fn fetch_content(client: &mut Client, url: &str, abort: Arc<AtomicBool>, control: Control) -> Result<mpsc::Receiver<FetchResult>, String> {
client.request(app.url(), abort, Box::new(move || { client.request(url, abort, Box::new(move || {
trace!(target: "dapps", "Fetching finished."); trace!(target: "dapps", "Fetching finished.");
// Ignoring control errors // Ignoring control errors
let _ = control.ready(Next::read()); let _ = control.ready(Next::read());
@ -108,14 +107,14 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<H> { impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<H> {
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next { fn on_request(&mut self, request: server::Request<HttpStream>) -> Next {
let status = if let FetchState::NotStarted(ref app) = self.status { let status = if let FetchState::NotStarted(ref url) = self.status {
Some(match *request.method() { Some(match *request.method() {
// Start fetching content // Start fetching content
Method::Get => { Method::Get => {
trace!(target: "dapps", "Fetching dapp: {:?}", app); trace!(target: "dapps", "Fetching content from: {:?}", url);
let control = self.control.take().expect("on_request is called only once, thus control is always Some"); let control = self.control.take().expect("on_request is called only once, thus control is always Some");
let client = self.client.as_mut().expect("on_request is called before client is closed."); let client = self.client.as_mut().expect("on_request is called before client is closed.");
let fetch = Self::fetch_app(client, app, self.abort.clone(), control); let fetch = Self::fetch_content(client, url, self.abort.clone(), control);
match fetch { match fetch {
Ok(receiver) => FetchState::InProgress { Ok(receiver) => FetchState::InProgress {
deadline: Instant::now() + Duration::from_secs(FETCH_TIMEOUT), deadline: Instant::now() + Duration::from_secs(FETCH_TIMEOUT),
@ -154,7 +153,7 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
let timeout = ContentHandler::error( let timeout = ContentHandler::error(
StatusCode::GatewayTimeout, StatusCode::GatewayTimeout,
"Download Timeout", "Download Timeout",
&format!("Could not fetch dapp bundle within {} seconds.", FETCH_TIMEOUT), &format!("Could not fetch content within {} seconds.", FETCH_TIMEOUT),
None None
); );
Self::close_client(&mut self.client); Self::close_client(&mut self.client);
@ -166,32 +165,31 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
match rec { match rec {
// Unpack and validate // Unpack and validate
Ok(Ok(path)) => { Ok(Ok(path)) => {
trace!(target: "dapps", "Fetching dapp finished. Starting validation."); trace!(target: "dapps", "Fetching content finished. Starting validation ({:?})", path);
Self::close_client(&mut self.client); Self::close_client(&mut self.client);
// Unpack and verify // Unpack and verify
let state = match self.dapp.validate_and_install(path.clone()) { let state = match self.installer.validate_and_install(path.clone()) {
Err(e) => { Err(e) => {
trace!(target: "dapps", "Error while validating dapp: {:?}", e); trace!(target: "dapps", "Error while validating content: {:?}", e);
FetchState::Error(ContentHandler::error( FetchState::Error(ContentHandler::error(
StatusCode::BadGateway, StatusCode::BadGateway,
"Invalid Dapp", "Invalid Dapp",
"Downloaded bundle does not contain a valid dapp.", "Downloaded bundle does not contain a valid content.",
Some(&format!("{:?}", e)) Some(&format!("{:?}", e))
)) ))
}, },
Ok(manifest) => FetchState::Done(manifest) Ok(result) => FetchState::Done(result)
}; };
// Remove temporary zip file // Remove temporary zip file
// TODO [todr] Uncomment me let _ = fs::remove_file(path);
// let _ = fs::remove_file(path);
(Some(state), Next::write()) (Some(state), Next::write())
}, },
Ok(Err(e)) => { Ok(Err(e)) => {
warn!(target: "dapps", "Unable to fetch new dapp: {:?}", e); warn!(target: "dapps", "Unable to fetch content: {:?}", e);
let error = ContentHandler::error( let error = ContentHandler::error(
StatusCode::BadGateway, StatusCode::BadGateway,
"Download Error", "Download Error",
"There was an error when fetching the dapp.", "There was an error when fetching the content.",
Some(&format!("{:?}", e)), Some(&format!("{:?}", e)),
); );
(Some(FetchState::Error(error)), Next::write()) (Some(FetchState::Error(error)), Next::write())
@ -213,10 +211,10 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
fn on_response(&mut self, res: &mut server::Response) -> Next { fn on_response(&mut self, res: &mut server::Response) -> Next {
match self.status { match self.status {
FetchState::Done(ref manifest) => { FetchState::Done((ref id, _)) => {
trace!(target: "dapps", "Fetching dapp finished. Redirecting to {}", manifest.id); trace!(target: "dapps", "Fetching content finished. Redirecting to {}", id);
res.set_status(StatusCode::Found); res.set_status(StatusCode::Found);
res.headers_mut().set(header::Location(redirection_address(self.using_dapps_domains, &manifest.id))); res.headers_mut().set(header::Location(redirection_address(self.using_dapps_domains, id)));
Next::write() Next::write()
}, },
FetchState::Error(ref mut handler) => handler.on_response(res), FetchState::Error(ref mut handler) => handler.on_response(res),

View File

@ -191,7 +191,7 @@ impl Server {
) -> Result<Server, ServerError> { ) -> Result<Server, ServerError> {
let panic_handler = Arc::new(Mutex::new(None)); let panic_handler = Arc::new(Mutex::new(None));
let authorization = Arc::new(authorization); let authorization = Arc::new(authorization);
let apps_fetcher = Arc::new(apps::fetcher::AppFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status)); let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status));
let endpoints = Arc::new(apps::all_endpoints(dapps_path)); let endpoints = Arc::new(apps::all_endpoints(dapps_path));
let special = Arc::new({ let special = Arc::new({
let mut special = HashMap::new(); let mut special = HashMap::new();
@ -206,7 +206,7 @@ impl Server {
.handle(move |ctrl| router::Router::new( .handle(move |ctrl| router::Router::new(
ctrl, ctrl,
apps::main_page(), apps::main_page(),
apps_fetcher.clone(), content_fetcher.clone(),
endpoints.clone(), endpoints.clone(),
special.clone(), special.clone(),
authorization.clone(), authorization.clone(),

View File

@ -17,20 +17,30 @@
use mime_guess; use mime_guess;
use std::io::{Seek, Read, SeekFrom}; use std::io::{Seek, Read, SeekFrom};
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::{Path, PathBuf};
use page::handler; use page::handler;
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler}; use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler};
pub struct LocalPageEndpoint { pub struct LocalPageEndpoint {
path: PathBuf, path: PathBuf,
info: EndpointInfo, mime: Option<String>,
info: Option<EndpointInfo>,
} }
impl LocalPageEndpoint { impl LocalPageEndpoint {
pub fn new(path: PathBuf, info: EndpointInfo) -> Self { pub fn new(path: PathBuf, info: EndpointInfo) -> Self {
LocalPageEndpoint { LocalPageEndpoint {
path: path, path: path,
info: info, mime: None,
info: Some(info),
}
}
pub fn single_file(path: PathBuf, mime: String) -> Self {
LocalPageEndpoint {
path: path,
mime: Some(mime),
info: None,
} }
} }
@ -41,32 +51,47 @@ impl LocalPageEndpoint {
impl Endpoint for LocalPageEndpoint { impl Endpoint for LocalPageEndpoint {
fn info(&self) -> Option<&EndpointInfo> { fn info(&self) -> Option<&EndpointInfo> {
Some(&self.info) self.info.as_ref()
} }
fn to_handler(&self, path: EndpointPath) -> Box<Handler> { fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
if let Some(ref mime) = self.mime {
Box::new(handler::PageHandler { Box::new(handler::PageHandler {
app: LocalDapp::new(self.path.clone()), app: LocalSingleFile { path: self.path.clone(), mime: mime.clone() },
prefix: None,
path: path,
file: Default::default(),
safe_to_embed: false,
})
} else {
Box::new(handler::PageHandler {
app: LocalDapp { path: self.path.clone() },
prefix: None, prefix: None,
path: path, path: path,
file: Default::default(), file: Default::default(),
safe_to_embed: false, safe_to_embed: false,
}) })
} }
}
}
struct LocalSingleFile {
path: PathBuf,
mime: String,
}
impl handler::Dapp for LocalSingleFile {
type DappFile = LocalFile;
fn file(&self, _path: &str) -> Option<Self::DappFile> {
LocalFile::from_path(&self.path, Some(&self.mime))
}
} }
struct LocalDapp { struct LocalDapp {
path: PathBuf, path: PathBuf,
} }
impl LocalDapp {
fn new(path: PathBuf) -> Self {
LocalDapp {
path: path
}
}
}
impl handler::Dapp for LocalDapp { impl handler::Dapp for LocalDapp {
type DappFile = LocalFile; type DappFile = LocalFile;
@ -75,18 +100,7 @@ impl handler::Dapp for LocalDapp {
for part in file_path.split('/') { for part in file_path.split('/') {
path.push(part); path.push(part);
} }
// Check if file exists LocalFile::from_path(&path, None)
fs::File::open(path.clone()).ok().map(|file| {
let content_type = mime_guess::guess_mime_type(path);
let len = file.metadata().ok().map_or(0, |meta| meta.len());
LocalFile {
content_type: content_type.to_string(),
buffer: [0; 4096],
file: file,
pos: 0,
len: len,
}
})
} }
} }
@ -98,6 +112,24 @@ struct LocalFile {
pos: u64, pos: u64,
} }
impl LocalFile {
fn from_path<P: AsRef<Path>>(path: P, mime: Option<&str>) -> Option<Self> {
// Check if file exists
fs::File::open(&path).ok().map(|file| {
let content_type = mime.map(|mime| mime.to_owned())
.unwrap_or_else(|| mime_guess::guess_mime_type(path).to_string());
let len = file.metadata().ok().map_or(0, |meta| meta.len());
LocalFile {
content_type: content_type,
buffer: [0; 4096],
file: file,
pos: 0,
len: len,
}
})
}
}
impl handler::DappFile for LocalFile { impl handler::DappFile for LocalFile {
fn content_type(&self) -> &str { fn content_type(&self) -> &str {
&self.content_type &self.content_type

View File

@ -27,7 +27,7 @@ use url::{Url, Host};
use hyper::{self, server, Next, Encoder, Decoder, Control, StatusCode}; use hyper::{self, server, Next, Encoder, Decoder, Control, StatusCode};
use hyper::net::HttpStream; use hyper::net::HttpStream;
use apps; use apps;
use apps::fetcher::AppFetcher; use apps::fetcher::ContentFetcher;
use endpoint::{Endpoint, Endpoints, EndpointPath}; use endpoint::{Endpoint, Endpoints, EndpointPath};
use handlers::{Redirection, extract_url, ContentHandler}; use handlers::{Redirection, extract_url, ContentHandler};
use self::auth::{Authorization, Authorized}; use self::auth::{Authorization, Authorized};
@ -45,7 +45,7 @@ pub struct Router<A: Authorization + 'static> {
control: Option<Control>, control: Option<Control>,
main_page: &'static str, main_page: &'static str,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
fetch: Arc<AppFetcher>, fetch: Arc<ContentFetcher>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
authorization: Arc<A>, authorization: Arc<A>,
allowed_hosts: Option<Vec<String>>, allowed_hosts: Option<Vec<String>>,
@ -136,7 +136,7 @@ impl<A: Authorization> Router<A> {
pub fn new( pub fn new(
control: Control, control: Control,
main_page: &'static str, main_page: &'static str,
app_fetcher: Arc<AppFetcher>, content_fetcher: Arc<ContentFetcher>,
endpoints: Arc<Endpoints>, endpoints: Arc<Endpoints>,
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>, special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
authorization: Arc<A>, authorization: Arc<A>,
@ -148,7 +148,7 @@ impl<A: Authorization> Router<A> {
control: Some(control), control: Some(control),
main_page: main_page, main_page: main_page,
endpoints: endpoints, endpoints: endpoints,
fetch: app_fetcher, fetch: content_fetcher,
special: special, special: special,
authorization: authorization, authorization: authorization,
allowed_hosts: allowed_hosts, allowed_hosts: allowed_hosts,

View File

@ -17,7 +17,7 @@
use std::env; use std::env;
use std::str; use std::str;
use std::sync::Arc; use std::sync::Arc;
use rustc_serialize::hex::{ToHex, FromHex}; use rustc_serialize::hex::FromHex;
use ServerBuilder; use ServerBuilder;
use Server; use Server;