diff --git a/dapps/Cargo.toml b/dapps/Cargo.toml index 531f7da1b..742d59bf2 100644 --- a/dapps/Cargo.toml +++ b/dapps/Cargo.toml @@ -39,7 +39,7 @@ clippy = { version = "0.0.85", optional = true} serde_codegen = { version = "0.8", optional = true } [features] -default = ["serde_codegen", "extra-dapps", "https-fetch/ca-github-only"] +default = ["serde_codegen", "extra-dapps"] extra-dapps = ["parity-dapps-wallet"] nightly = ["serde_macros"] dev = ["clippy", "ethcore-rpc/dev", "ethcore-util/dev"] diff --git a/dapps/src/apps/fetcher.rs b/dapps/src/apps/fetcher.rs index 502fbe4aa..8c0e7c421 100644 --- a/dapps/src/apps/fetcher.rs +++ b/dapps/src/apps/fetcher.rs @@ -38,65 +38,65 @@ use handlers::{ContentHandler, ContentFetcherHandler, ContentValidator}; use endpoint::{Endpoint, EndpointPath, Handler}; use apps::cache::{ContentCache, ContentStatus}; use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest}; -use apps::urlhint::{URLHintContract, URLHint}; +use apps::urlhint::{URLHintContract, URLHint, URLHintResult}; const MAX_CACHED_DAPPS: usize = 10; -pub struct AppFetcher { +pub struct ContentFetcher { dapps_path: PathBuf, resolver: R, + cache: Arc>, sync: Arc, - dapps: Arc>, } -impl Drop for AppFetcher { +impl Drop for ContentFetcher { fn drop(&mut self) { // Clear cache path let _ = fs::remove_dir_all(&self.dapps_path); } } -impl AppFetcher { +impl ContentFetcher { pub fn new(resolver: R, sync_status: Arc) -> Self { let mut dapps_path = env::temp_dir(); dapps_path.push(random_filename()); - AppFetcher { + ContentFetcher { dapps_path: dapps_path, resolver: resolver, sync: sync_status, - dapps: Arc::new(Mutex::new(ContentCache::default())), + cache: Arc::new(Mutex::new(ContentCache::default())), } } #[cfg(test)] - fn set_status(&self, app_id: &str, status: ContentStatus) { - self.dapps.lock().insert(app_id.to_owned(), status); + fn set_status(&self, content_id: &str, status: ContentStatus) { + self.cache.lock().insert(content_id.to_owned(), status); } - pub fn contains(&self, app_id: &str) -> bool { - let mut dapps = self.dapps.lock(); + pub fn contains(&self, content_id: &str) -> bool { + let mut cache = self.cache.lock(); // Check if we already have the app - if dapps.get(app_id).is_some() { + if cache.get(content_id).is_some() { return true; } // fallback to resolver - if let Ok(app_id) = app_id.from_hex() { + if let Ok(content_id) = content_id.from_hex() { // if app_id is valid, but we are syncing always return true. if self.sync.is_major_syncing() { return true; } // else try to resolve the app_id - self.resolver.resolve(app_id).is_some() + self.resolver.resolve(content_id).is_some() } else { false } } pub fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box { - let mut dapps = self.dapps.lock(); - let app_id = path.app_id.clone(); + let mut cache = self.cache.lock(); + let content_id = path.app_id.clone(); if self.sync.is_major_syncing() { return Box::new(ContentHandler::error( @@ -108,7 +108,7 @@ impl AppFetcher { } let (new_status, handler) = { - let status = dapps.get(&app_id); + let status = cache.get(&content_id); match status { // Just server dapp Some(&mut ContentStatus::Ready(ref endpoint)) => { @@ -125,40 +125,57 @@ impl AppFetcher { }, // We need to start fetching app None => { - let app_hex = app_id.from_hex().expect("to_handler is called only when `contains` returns true."); - let app = self.resolver.resolve(app_hex); + let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true."); + let content = self.resolver.resolve(content_hex); + let abort = Arc::new(AtomicBool::new(false)); - if let Some(app) = app { - let abort = Arc::new(AtomicBool::new(false)); - - (Some(ContentStatus::Fetching(abort.clone())), Box::new(ContentFetcherHandler::new( - app, - abort, - control, - path.using_dapps_domains, - DappInstaller { - dapp_id: app_id.clone(), - dapps_path: self.dapps_path.clone(), - dapps: self.dapps.clone(), - } - )) as Box) - } else { - // This may happen when sync status changes in between - // `contains` and `to_handler` - (None, Box::new(ContentHandler::error( - StatusCode::NotFound, - "Resource Not Found", - "Requested resource was not found.", - None - )) as Box) + match content { + Some(URLHintResult::Dapp(dapp)) => ( + Some(ContentStatus::Fetching(abort.clone())), + Box::new(ContentFetcherHandler::new( + dapp.url(), + abort, + control, + path.using_dapps_domains, + DappInstaller { + id: content_id.clone(), + dapps_path: self.dapps_path.clone(), + cache: self.cache.clone(), + })) as Box + ), + Some(URLHintResult::Content(content)) => ( + Some(ContentStatus::Fetching(abort.clone())), + Box::new(ContentFetcherHandler::new( + content.url, + abort, + control, + path.using_dapps_domains, + ContentInstaller { + id: content_id.clone(), + mime: content.mime, + content_path: self.dapps_path.clone(), + cache: self.cache.clone(), + } + )) as Box, + ), + None => { + // This may happen when sync status changes in between + // `contains` and `to_handler` + (None, Box::new(ContentHandler::error( + StatusCode::NotFound, + "Resource Not Found", + "Requested resource was not found.", + None + )) as Box) + }, } }, } }; if let Some(status) = new_status { - dapps.clear_garbage(MAX_CACHED_DAPPS); - dapps.insert(app_id, status); + cache.clear_garbage(MAX_CACHED_DAPPS); + cache.insert(content_id, status); } handler @@ -169,7 +186,7 @@ impl AppFetcher { pub enum ValidationError { Io(io::Error), Zip(zip::result::ZipError), - InvalidDappId, + InvalidContentId, ManifestNotFound, ManifestSerialization(String), HashMismatch { expected: H256, got: H256, }, @@ -180,7 +197,7 @@ impl fmt::Display for ValidationError { match *self { ValidationError::Io(ref io) => write!(f, "Unexpected IO error occured: {:?}", io), ValidationError::Zip(ref zip) => write!(f, "Unable to read ZIP archive: {:?}", zip), - ValidationError::InvalidDappId => write!(f, "Dapp ID is invalid. It should be 32 bytes hash of content."), + ValidationError::InvalidContentId => write!(f, "ID is invalid. It should be 256 bits keccak hash of content."), ValidationError::ManifestNotFound => write!(f, "Downloaded Dapp bundle did not contain valid manifest.json file."), ValidationError::ManifestSerialization(ref err) => { write!(f, "There was an error during Dapp Manifest serialization: {:?}", err) @@ -204,10 +221,55 @@ impl From for ValidationError { } } +struct ContentInstaller { + id: String, + mime: String, + content_path: PathBuf, + cache: Arc>, +} + +impl ContentValidator for ContentInstaller { + type Error = ValidationError; + type Result = PathBuf; + + fn validate_and_install(&self, path: PathBuf) -> Result<(String, PathBuf), ValidationError> { + // Create dir + try!(fs::create_dir_all(&self.content_path)); + + // And prepare path for a file + let filename = path.file_name().expect("We always fetch a file."); + let mut content_path = self.content_path.clone(); + content_path.push(&filename); + + if content_path.exists() { + try!(fs::remove_dir_all(&content_path)) + } + + try!(fs::copy(&path, &content_path)); + + Ok((self.id.clone(), content_path)) + } + + fn done(&self, result: Option<&PathBuf>) { + let mut cache = self.cache.lock(); + match result { + Some(result) => { + let page = LocalPageEndpoint::single_file(result.clone(), self.mime.clone()); + cache.insert(self.id.clone(), ContentStatus::Ready(page)); + }, + // In case of error + None => { + cache.remove(&self.id); + }, + } + } +} + + struct DappInstaller { - dapp_id: String, + id: String, dapps_path: PathBuf, - dapps: Arc>, + cache: Arc>, } impl DappInstaller { @@ -244,15 +306,16 @@ impl DappInstaller { impl ContentValidator for DappInstaller { type Error = ValidationError; + type Result = Manifest; - fn validate_and_install(&self, app_path: PathBuf) -> Result { + fn validate_and_install(&self, app_path: PathBuf) -> Result<(String, Manifest), ValidationError> { trace!(target: "dapps", "Opening dapp bundle at {:?}", app_path); let mut file_reader = io::BufReader::new(try!(fs::File::open(app_path))); let hash = try!(sha3(&mut file_reader)); - let dapp_id = try!(self.dapp_id.as_str().parse().map_err(|_| ValidationError::InvalidDappId)); - if dapp_id != hash { + let id = try!(self.id.as_str().parse().map_err(|_| ValidationError::InvalidContentId)); + if id != hash { return Err(ValidationError::HashMismatch { - expected: dapp_id, + expected: id, got: hash, }); } @@ -262,7 +325,7 @@ impl ContentValidator for DappInstaller { // First find manifest file let (mut manifest, manifest_dir) = try!(Self::find_manifest(&mut zip)); // Overwrite id to match hash - manifest.id = self.dapp_id.clone(); + manifest.id = self.id.clone(); let target = self.dapp_target_path(&manifest); @@ -300,20 +363,20 @@ impl ContentValidator for DappInstaller { try!(manifest_file.write_all(manifest_str.as_bytes())); // Return modified app manifest - Ok(manifest) + Ok((manifest.id.clone(), manifest)) } fn done(&self, manifest: Option<&Manifest>) { - let mut dapps = self.dapps.lock(); + let mut cache = self.cache.lock(); match manifest { Some(manifest) => { let path = self.dapp_target_path(manifest); let app = LocalPageEndpoint::new(path, manifest.clone().into()); - dapps.insert(self.dapp_id.clone(), ContentStatus::Ready(app)); + cache.insert(self.id.clone(), ContentStatus::Ready(app)); }, // In case of error None => { - dapps.remove(&self.dapp_id); + cache.remove(&self.id); }, } } @@ -327,12 +390,12 @@ mod tests { use endpoint::EndpointInfo; use page::LocalPageEndpoint; use apps::cache::ContentStatus; - use apps::urlhint::{GithubApp, URLHint}; - use super::AppFetcher; + use apps::urlhint::{URLHint, URLHintResult}; + use super::ContentFetcher; struct FakeResolver; impl URLHint for FakeResolver { - fn resolve(&self, _app_id: Bytes) -> Option { + fn resolve(&self, _id: Bytes) -> Option { None } } @@ -341,7 +404,7 @@ mod tests { fn should_true_if_contains_the_app() { // given let path = env::temp_dir(); - let fetcher = AppFetcher::new(FakeResolver, Arc::new(|| false)); + let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false)); let handler = LocalPageEndpoint::new(path, EndpointInfo { name: "fake".into(), description: "".into(), diff --git a/dapps/src/apps/urlhint.rs b/dapps/src/apps/urlhint.rs index f57e5e0d7..2b86c0777 100644 --- a/dapps/src/apps/urlhint.rs +++ b/dapps/src/apps/urlhint.rs @@ -17,6 +17,7 @@ use std::fmt; use std::sync::Arc; use rustc_serialize::hex::ToHex; +use mime_guess; use ethabi::{Interface, Contract, Token}; use util::{Address, Bytes, Hashable}; @@ -52,6 +53,13 @@ impl GithubApp { } } +#[derive(Debug, PartialEq)] +pub struct Content { + pub url: String, + pub mime: String, + pub owner: Address, +} + /// RAW Contract interface. /// Should execute transaction using current blockchain state. pub trait ContractClient: Send + Sync { @@ -61,10 +69,19 @@ pub trait ContractClient: Send + Sync { fn call(&self, address: Address, data: Bytes) -> Result; } +/// Result of resolving id to URL +#[derive(Debug, PartialEq)] +pub enum URLHintResult { + /// Dapp + Dapp(GithubApp), + /// Content + Content(Content), +} + /// URLHint Contract interface pub trait URLHint { /// Resolves given id to registrar entry. - fn resolve(&self, app_id: Bytes) -> Option; + fn resolve(&self, id: Bytes) -> Option; } pub struct URLHintContract { @@ -110,10 +127,10 @@ impl URLHintContract { } } - fn encode_urlhint_call(&self, app_id: Bytes) -> Option { + fn encode_urlhint_call(&self, id: Bytes) -> Option { let call = self.urlhint .function("entries".into()) - .and_then(|f| f.encode_call(vec![Token::FixedBytes(app_id)])); + .and_then(|f| f.encode_call(vec![Token::FixedBytes(id)])); match call { Ok(res) => { @@ -126,7 +143,7 @@ impl URLHintContract { } } - fn decode_urlhint_output(&self, output: Bytes) -> Option { + fn decode_urlhint_output(&self, output: Bytes) -> Option { trace!(target: "dapps", "Output: {:?}", output.to_hex()); let output = self.urlhint .function("entries".into()) @@ -149,6 +166,17 @@ impl URLHintContract { if owner == Address::default() { return None; } + + let commit = GithubApp::commit(&commit); + if commit == Some(Default::default()) { + let mime = guess_mime_type(&account_slash_repo).unwrap_or("application/octet-stream".into()); + return Some(URLHintResult::Content(Content { + url: account_slash_repo, + mime: mime, + owner: owner, + })); + } + let (account, repo) = { let mut it = account_slash_repo.split('/'); match (it.next(), it.next()) { @@ -157,12 +185,12 @@ impl URLHintContract { } }; - GithubApp::commit(&commit).map(|commit| GithubApp { + commit.map(|commit| URLHintResult::Dapp(GithubApp { account: account, repo: repo, commit: commit, owner: owner, - }) + })) }, e => { warn!(target: "dapps", "Invalid contract output parameters: {:?}", e); @@ -177,10 +205,10 @@ impl URLHintContract { } impl URLHint for URLHintContract { - fn resolve(&self, app_id: Bytes) -> Option { + fn resolve(&self, id: Bytes) -> Option { self.urlhint_address().and_then(|address| { // Prepare contract call - self.encode_urlhint_call(app_id) + self.encode_urlhint_call(id) .and_then(|data| { let call = self.client.call(address, data); if let Err(ref e) = call { @@ -193,6 +221,34 @@ impl URLHint for URLHintContract { } } +fn guess_mime_type(url: &str) -> Option { + const CONTENT_TYPE: &'static str = "content-type="; + + let mut it = url.split('#'); + // skip url + let url = it.next(); + // get meta headers + let metas = it.next(); + if let Some(metas) = metas { + for meta in metas.split('&') { + let meta = meta.to_lowercase(); + if meta.starts_with(CONTENT_TYPE) { + return Some(meta[CONTENT_TYPE.len()..].to_owned()); + } + } + } + url.and_then(|url| { + url.split('.').last() + }).and_then(|extension| { + mime_guess::get_mime_type_str(extension).map(Into::into) + }) +} + +#[cfg(test)] +pub fn test_guess_mime_type(url: &str) -> Option { + guess_mime_type(url) +} + fn as_string(e: T) -> String { format!("{:?}", e) } @@ -201,7 +257,7 @@ fn as_string(e: T) -> String { mod tests { use std::sync::Arc; use std::str::FromStr; - use rustc_serialize::hex::{ToHex, FromHex}; + use rustc_serialize::hex::FromHex; use super::*; use util::{Bytes, Address, Mutex, ToPretty}; @@ -279,12 +335,33 @@ mod tests { let res = urlhint.resolve("test".bytes().collect()); // then - assert_eq!(res, Some(GithubApp { + assert_eq!(res, Some(URLHintResult::Dapp(GithubApp { account: "ethcore".into(), repo: "dao.claim".into(), commit: GithubApp::commit(&"ec4c1fe06c808fe3739858c347109b1f5f1ed4b5".from_hex().unwrap()).unwrap(), owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(), - })) + }))) + } + + #[test] + fn should_decode_urlhint_content_output() { + // given + let mut registrar = FakeRegistrar::new(); + registrar.responses = Mutex::new(vec![ + Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()), + Ok("00000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff000000000000000000000000000000000000000000000000000000000000003d68747470733a2f2f657468636f72652e696f2f6173736574732f696d616765732f657468636f72652d626c61636b2d686f72697a6f6e74616c2e706e67000000".from_hex().unwrap()), + ]); + let urlhint = URLHintContract::new(Arc::new(registrar)); + + // when + let res = urlhint.resolve("test".bytes().collect()); + + // then + assert_eq!(res, Some(URLHintResult::Content(Content { + url: "https://ethcore.io/assets/images/ethcore-black-horizontal.png".into(), + mime: "image/png".into(), + owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(), + }))) } #[test] @@ -303,4 +380,20 @@ mod tests { // then assert_eq!(url, "https://codeload.github.com/test/xyz/zip/000102030405060708090a0b0c0d0e0f10111213".to_owned()); } + + #[test] + fn should_guess_mime_type_from_url() { + let url1 = "https://ethcore.io/parity"; + let url2 = "https://ethcore.io/parity#content-type=image/png"; + let url3 = "https://ethcore.io/parity#something&content-type=image/png"; + let url4 = "https://ethcore.io/parity.png#content-type=image/jpeg"; + let url5 = "https://ethcore.io/parity.png"; + + + assert_eq!(test_guess_mime_type(url1), None); + assert_eq!(test_guess_mime_type(url2), Some("image/png".into())); + assert_eq!(test_guess_mime_type(url3), Some("image/png".into())); + assert_eq!(test_guess_mime_type(url4), Some("image/jpeg".into())); + assert_eq!(test_guess_mime_type(url5), Some("image/png".into())); + } } diff --git a/dapps/src/handlers/client/mod.rs b/dapps/src/handlers/client/mod.rs index 181f60001..3d8551e8a 100644 --- a/dapps/src/handlers/client/mod.rs +++ b/dapps/src/handlers/client/mod.rs @@ -63,7 +63,7 @@ impl Client { self.https_client.close(); } - pub fn request(&mut self, url: String, abort: Arc, on_done: Box) -> Result, FetchError> { + pub fn request(&mut self, url: &str, abort: Arc, on_done: Box) -> Result, FetchError> { let is_https = url.starts_with("https://"); let url = try!(url.parse().map_err(|_| FetchError::InvalidUrl)); trace!(target: "dapps", "Fetching from: {:?}", url); diff --git a/dapps/src/handlers/fetch.rs b/dapps/src/handlers/fetch.rs index 98242f2b3..790bf4710 100644 --- a/dapps/src/handlers/fetch.rs +++ b/dapps/src/handlers/fetch.rs @@ -16,7 +16,7 @@ //! Hyper Server Handler that fetches a file during a request (proxy). -use std::fmt; +use std::{fs, fmt}; use std::path::PathBuf; use std::sync::{mpsc, Arc}; use std::sync::atomic::AtomicBool; @@ -29,51 +29,50 @@ use hyper::status::StatusCode; use handlers::ContentHandler; use handlers::client::{Client, FetchResult}; use apps::redirection_address; -use apps::urlhint::GithubApp; -use apps::manifest::Manifest; const FETCH_TIMEOUT: u64 = 30; -enum FetchState { - NotStarted(GithubApp), +enum FetchState { + NotStarted(String), Error(ContentHandler), InProgress { deadline: Instant, receiver: mpsc::Receiver, }, - Done(Manifest), + Done((String, T)), } pub trait ContentValidator { type Error: fmt::Debug + fmt::Display; + type Result: fmt::Debug; - fn validate_and_install(&self, app: PathBuf) -> Result; - fn done(&self, Option<&Manifest>); + fn validate_and_install(&self, app: PathBuf) -> Result<(String, Self::Result), Self::Error>; + fn done(&self, Option<&Self::Result>); } pub struct ContentFetcherHandler { abort: Arc, control: Option, - status: FetchState, + status: FetchState, client: Option, using_dapps_domains: bool, - dapp: H, + installer: H, } impl Drop for ContentFetcherHandler { fn drop(&mut self) { - let manifest = match self.status { - FetchState::Done(ref manifest) => Some(manifest), + let result = match self.status { + FetchState::Done((_, ref result)) => Some(result), _ => None, }; - self.dapp.done(manifest); + self.installer.done(result); } } impl ContentFetcherHandler { pub fn new( - app: GithubApp, + url: String, abort: Arc, control: Control, using_dapps_domains: bool, @@ -84,9 +83,9 @@ impl ContentFetcherHandler { abort: abort, control: Some(control), client: Some(client), - status: FetchState::NotStarted(app), + status: FetchState::NotStarted(url), using_dapps_domains: using_dapps_domains, - dapp: handler, + installer: handler, } } @@ -97,8 +96,8 @@ impl ContentFetcherHandler { } - fn fetch_app(client: &mut Client, app: &GithubApp, abort: Arc, control: Control) -> Result, String> { - client.request(app.url(), abort, Box::new(move || { + fn fetch_content(client: &mut Client, url: &str, abort: Arc, control: Control) -> Result, String> { + client.request(url, abort, Box::new(move || { trace!(target: "dapps", "Fetching finished."); // Ignoring control errors let _ = control.ready(Next::read()); @@ -108,14 +107,14 @@ impl ContentFetcherHandler { impl server::Handler for ContentFetcherHandler { fn on_request(&mut self, request: server::Request) -> Next { - let status = if let FetchState::NotStarted(ref app) = self.status { + let status = if let FetchState::NotStarted(ref url) = self.status { Some(match *request.method() { // Start fetching content Method::Get => { - trace!(target: "dapps", "Fetching dapp: {:?}", app); + trace!(target: "dapps", "Fetching content from: {:?}", url); let control = self.control.take().expect("on_request is called only once, thus control is always Some"); let client = self.client.as_mut().expect("on_request is called before client is closed."); - let fetch = Self::fetch_app(client, app, self.abort.clone(), control); + let fetch = Self::fetch_content(client, url, self.abort.clone(), control); match fetch { Ok(receiver) => FetchState::InProgress { deadline: Instant::now() + Duration::from_secs(FETCH_TIMEOUT), @@ -154,7 +153,7 @@ impl server::Handler for ContentFetcherHandler< let timeout = ContentHandler::error( StatusCode::GatewayTimeout, "Download Timeout", - &format!("Could not fetch dapp bundle within {} seconds.", FETCH_TIMEOUT), + &format!("Could not fetch content within {} seconds.", FETCH_TIMEOUT), None ); Self::close_client(&mut self.client); @@ -166,32 +165,31 @@ impl server::Handler for ContentFetcherHandler< match rec { // Unpack and validate Ok(Ok(path)) => { - trace!(target: "dapps", "Fetching dapp finished. Starting validation."); + trace!(target: "dapps", "Fetching content finished. Starting validation ({:?})", path); Self::close_client(&mut self.client); // Unpack and verify - let state = match self.dapp.validate_and_install(path.clone()) { + let state = match self.installer.validate_and_install(path.clone()) { Err(e) => { - trace!(target: "dapps", "Error while validating dapp: {:?}", e); + trace!(target: "dapps", "Error while validating content: {:?}", e); FetchState::Error(ContentHandler::error( StatusCode::BadGateway, "Invalid Dapp", - "Downloaded bundle does not contain a valid dapp.", + "Downloaded bundle does not contain a valid content.", Some(&format!("{:?}", e)) )) }, - Ok(manifest) => FetchState::Done(manifest) + Ok(result) => FetchState::Done(result) }; // Remove temporary zip file - // TODO [todr] Uncomment me - // let _ = fs::remove_file(path); + let _ = fs::remove_file(path); (Some(state), Next::write()) }, Ok(Err(e)) => { - warn!(target: "dapps", "Unable to fetch new dapp: {:?}", e); + warn!(target: "dapps", "Unable to fetch content: {:?}", e); let error = ContentHandler::error( StatusCode::BadGateway, "Download Error", - "There was an error when fetching the dapp.", + "There was an error when fetching the content.", Some(&format!("{:?}", e)), ); (Some(FetchState::Error(error)), Next::write()) @@ -213,10 +211,10 @@ impl server::Handler for ContentFetcherHandler< fn on_response(&mut self, res: &mut server::Response) -> Next { match self.status { - FetchState::Done(ref manifest) => { - trace!(target: "dapps", "Fetching dapp finished. Redirecting to {}", manifest.id); + FetchState::Done((ref id, _)) => { + trace!(target: "dapps", "Fetching content finished. Redirecting to {}", id); res.set_status(StatusCode::Found); - res.headers_mut().set(header::Location(redirection_address(self.using_dapps_domains, &manifest.id))); + res.headers_mut().set(header::Location(redirection_address(self.using_dapps_domains, id))); Next::write() }, FetchState::Error(ref mut handler) => handler.on_response(res), diff --git a/dapps/src/lib.rs b/dapps/src/lib.rs index 87563a3ae..4dcf53a44 100644 --- a/dapps/src/lib.rs +++ b/dapps/src/lib.rs @@ -191,7 +191,7 @@ impl Server { ) -> Result { let panic_handler = Arc::new(Mutex::new(None)); let authorization = Arc::new(authorization); - let apps_fetcher = Arc::new(apps::fetcher::AppFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status)); + let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status)); let endpoints = Arc::new(apps::all_endpoints(dapps_path)); let special = Arc::new({ let mut special = HashMap::new(); @@ -206,7 +206,7 @@ impl Server { .handle(move |ctrl| router::Router::new( ctrl, apps::main_page(), - apps_fetcher.clone(), + content_fetcher.clone(), endpoints.clone(), special.clone(), authorization.clone(), diff --git a/dapps/src/page/local.rs b/dapps/src/page/local.rs index 86d4273d5..10b6f08b1 100644 --- a/dapps/src/page/local.rs +++ b/dapps/src/page/local.rs @@ -17,20 +17,30 @@ use mime_guess; use std::io::{Seek, Read, SeekFrom}; use std::fs; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use page::handler; use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler}; pub struct LocalPageEndpoint { path: PathBuf, - info: EndpointInfo, + mime: Option, + info: Option, } impl LocalPageEndpoint { pub fn new(path: PathBuf, info: EndpointInfo) -> Self { LocalPageEndpoint { path: path, - info: info, + mime: None, + info: Some(info), + } + } + + pub fn single_file(path: PathBuf, mime: String) -> Self { + LocalPageEndpoint { + path: path, + mime: Some(mime), + info: None, } } @@ -41,17 +51,40 @@ impl LocalPageEndpoint { impl Endpoint for LocalPageEndpoint { fn info(&self) -> Option<&EndpointInfo> { - Some(&self.info) + self.info.as_ref() } fn to_handler(&self, path: EndpointPath) -> Box { - Box::new(handler::PageHandler { - app: LocalDapp::new(self.path.clone()), - prefix: None, - path: path, - file: Default::default(), - safe_to_embed: false, - }) + if let Some(ref mime) = self.mime { + Box::new(handler::PageHandler { + app: LocalSingleFile { path: self.path.clone(), mime: mime.clone() }, + prefix: None, + path: path, + file: Default::default(), + safe_to_embed: false, + }) + } else { + Box::new(handler::PageHandler { + app: LocalDapp { path: self.path.clone() }, + prefix: None, + path: path, + file: Default::default(), + safe_to_embed: false, + }) + } + } +} + +struct LocalSingleFile { + path: PathBuf, + mime: String, +} + +impl handler::Dapp for LocalSingleFile { + type DappFile = LocalFile; + + fn file(&self, _path: &str) -> Option { + LocalFile::from_path(&self.path, Some(&self.mime)) } } @@ -59,14 +92,6 @@ struct LocalDapp { path: PathBuf, } -impl LocalDapp { - fn new(path: PathBuf) -> Self { - LocalDapp { - path: path - } - } -} - impl handler::Dapp for LocalDapp { type DappFile = LocalFile; @@ -75,18 +100,7 @@ impl handler::Dapp for LocalDapp { for part in file_path.split('/') { path.push(part); } - // Check if file exists - fs::File::open(path.clone()).ok().map(|file| { - let content_type = mime_guess::guess_mime_type(path); - let len = file.metadata().ok().map_or(0, |meta| meta.len()); - LocalFile { - content_type: content_type.to_string(), - buffer: [0; 4096], - file: file, - pos: 0, - len: len, - } - }) + LocalFile::from_path(&path, None) } } @@ -98,6 +112,24 @@ struct LocalFile { pos: u64, } +impl LocalFile { + fn from_path>(path: P, mime: Option<&str>) -> Option { + // Check if file exists + fs::File::open(&path).ok().map(|file| { + let content_type = mime.map(|mime| mime.to_owned()) + .unwrap_or_else(|| mime_guess::guess_mime_type(path).to_string()); + let len = file.metadata().ok().map_or(0, |meta| meta.len()); + LocalFile { + content_type: content_type, + buffer: [0; 4096], + file: file, + pos: 0, + len: len, + } + }) + } +} + impl handler::DappFile for LocalFile { fn content_type(&self) -> &str { &self.content_type diff --git a/dapps/src/router/mod.rs b/dapps/src/router/mod.rs index c93456d71..b908203d6 100644 --- a/dapps/src/router/mod.rs +++ b/dapps/src/router/mod.rs @@ -27,7 +27,7 @@ use url::{Url, Host}; use hyper::{self, server, Next, Encoder, Decoder, Control, StatusCode}; use hyper::net::HttpStream; use apps; -use apps::fetcher::AppFetcher; +use apps::fetcher::ContentFetcher; use endpoint::{Endpoint, Endpoints, EndpointPath}; use handlers::{Redirection, extract_url, ContentHandler}; use self::auth::{Authorization, Authorized}; @@ -45,7 +45,7 @@ pub struct Router { control: Option, main_page: &'static str, endpoints: Arc, - fetch: Arc, + fetch: Arc, special: Arc>>, authorization: Arc, allowed_hosts: Option>, @@ -136,7 +136,7 @@ impl Router { pub fn new( control: Control, main_page: &'static str, - app_fetcher: Arc, + content_fetcher: Arc, endpoints: Arc, special: Arc>>, authorization: Arc, @@ -148,7 +148,7 @@ impl Router { control: Some(control), main_page: main_page, endpoints: endpoints, - fetch: app_fetcher, + fetch: content_fetcher, special: special, authorization: authorization, allowed_hosts: allowed_hosts, diff --git a/dapps/src/tests/helpers.rs b/dapps/src/tests/helpers.rs index 4cd21520c..efbd24a8d 100644 --- a/dapps/src/tests/helpers.rs +++ b/dapps/src/tests/helpers.rs @@ -17,7 +17,7 @@ use std::env; use std::str; use std::sync::Arc; -use rustc_serialize::hex::{ToHex, FromHex}; +use rustc_serialize::hex::FromHex; use ServerBuilder; use Server;