Merge pull request #2050 from ethcore/dapps-content
Fetching any content-addressed content
This commit is contained in:
commit
8b971966b3
@ -39,7 +39,7 @@ clippy = { version = "0.0.85", optional = true}
|
||||
serde_codegen = { version = "0.8", optional = true }
|
||||
|
||||
[features]
|
||||
default = ["serde_codegen", "extra-dapps", "https-fetch/ca-github-only"]
|
||||
default = ["serde_codegen", "extra-dapps"]
|
||||
extra-dapps = ["parity-dapps-wallet"]
|
||||
nightly = ["serde_macros"]
|
||||
dev = ["clippy", "ethcore-rpc/dev", "ethcore-util/dev"]
|
||||
|
@ -38,65 +38,65 @@ use handlers::{ContentHandler, ContentFetcherHandler, ContentValidator};
|
||||
use endpoint::{Endpoint, EndpointPath, Handler};
|
||||
use apps::cache::{ContentCache, ContentStatus};
|
||||
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest};
|
||||
use apps::urlhint::{URLHintContract, URLHint};
|
||||
use apps::urlhint::{URLHintContract, URLHint, URLHintResult};
|
||||
|
||||
const MAX_CACHED_DAPPS: usize = 10;
|
||||
|
||||
pub struct AppFetcher<R: URLHint = URLHintContract> {
|
||||
pub struct ContentFetcher<R: URLHint = URLHintContract> {
|
||||
dapps_path: PathBuf,
|
||||
resolver: R,
|
||||
cache: Arc<Mutex<ContentCache>>,
|
||||
sync: Arc<SyncStatus>,
|
||||
dapps: Arc<Mutex<ContentCache>>,
|
||||
}
|
||||
|
||||
impl<R: URLHint> Drop for AppFetcher<R> {
|
||||
impl<R: URLHint> Drop for ContentFetcher<R> {
|
||||
fn drop(&mut self) {
|
||||
// Clear cache path
|
||||
let _ = fs::remove_dir_all(&self.dapps_path);
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: URLHint> AppFetcher<R> {
|
||||
impl<R: URLHint> ContentFetcher<R> {
|
||||
|
||||
pub fn new(resolver: R, sync_status: Arc<SyncStatus>) -> Self {
|
||||
let mut dapps_path = env::temp_dir();
|
||||
dapps_path.push(random_filename());
|
||||
|
||||
AppFetcher {
|
||||
ContentFetcher {
|
||||
dapps_path: dapps_path,
|
||||
resolver: resolver,
|
||||
sync: sync_status,
|
||||
dapps: Arc::new(Mutex::new(ContentCache::default())),
|
||||
cache: Arc::new(Mutex::new(ContentCache::default())),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn set_status(&self, app_id: &str, status: ContentStatus) {
|
||||
self.dapps.lock().insert(app_id.to_owned(), status);
|
||||
fn set_status(&self, content_id: &str, status: ContentStatus) {
|
||||
self.cache.lock().insert(content_id.to_owned(), status);
|
||||
}
|
||||
|
||||
pub fn contains(&self, app_id: &str) -> bool {
|
||||
let mut dapps = self.dapps.lock();
|
||||
pub fn contains(&self, content_id: &str) -> bool {
|
||||
let mut cache = self.cache.lock();
|
||||
// Check if we already have the app
|
||||
if dapps.get(app_id).is_some() {
|
||||
if cache.get(content_id).is_some() {
|
||||
return true;
|
||||
}
|
||||
// fallback to resolver
|
||||
if let Ok(app_id) = app_id.from_hex() {
|
||||
if let Ok(content_id) = content_id.from_hex() {
|
||||
// if app_id is valid, but we are syncing always return true.
|
||||
if self.sync.is_major_syncing() {
|
||||
return true;
|
||||
}
|
||||
// else try to resolve the app_id
|
||||
self.resolver.resolve(app_id).is_some()
|
||||
self.resolver.resolve(content_id).is_some()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_async_handler(&self, path: EndpointPath, control: hyper::Control) -> Box<Handler> {
|
||||
let mut dapps = self.dapps.lock();
|
||||
let app_id = path.app_id.clone();
|
||||
let mut cache = self.cache.lock();
|
||||
let content_id = path.app_id.clone();
|
||||
|
||||
if self.sync.is_major_syncing() {
|
||||
return Box::new(ContentHandler::error(
|
||||
@ -108,7 +108,7 @@ impl<R: URLHint> AppFetcher<R> {
|
||||
}
|
||||
|
||||
let (new_status, handler) = {
|
||||
let status = dapps.get(&app_id);
|
||||
let status = cache.get(&content_id);
|
||||
match status {
|
||||
// Just server dapp
|
||||
Some(&mut ContentStatus::Ready(ref endpoint)) => {
|
||||
@ -125,24 +125,40 @@ impl<R: URLHint> AppFetcher<R> {
|
||||
},
|
||||
// We need to start fetching app
|
||||
None => {
|
||||
let app_hex = app_id.from_hex().expect("to_handler is called only when `contains` returns true.");
|
||||
let app = self.resolver.resolve(app_hex);
|
||||
|
||||
if let Some(app) = app {
|
||||
let content_hex = content_id.from_hex().expect("to_handler is called only when `contains` returns true.");
|
||||
let content = self.resolver.resolve(content_hex);
|
||||
let abort = Arc::new(AtomicBool::new(false));
|
||||
|
||||
(Some(ContentStatus::Fetching(abort.clone())), Box::new(ContentFetcherHandler::new(
|
||||
app,
|
||||
match content {
|
||||
Some(URLHintResult::Dapp(dapp)) => (
|
||||
Some(ContentStatus::Fetching(abort.clone())),
|
||||
Box::new(ContentFetcherHandler::new(
|
||||
dapp.url(),
|
||||
abort,
|
||||
control,
|
||||
path.using_dapps_domains,
|
||||
DappInstaller {
|
||||
dapp_id: app_id.clone(),
|
||||
id: content_id.clone(),
|
||||
dapps_path: self.dapps_path.clone(),
|
||||
dapps: self.dapps.clone(),
|
||||
cache: self.cache.clone(),
|
||||
})) as Box<Handler>
|
||||
),
|
||||
Some(URLHintResult::Content(content)) => (
|
||||
Some(ContentStatus::Fetching(abort.clone())),
|
||||
Box::new(ContentFetcherHandler::new(
|
||||
content.url,
|
||||
abort,
|
||||
control,
|
||||
path.using_dapps_domains,
|
||||
ContentInstaller {
|
||||
id: content_id.clone(),
|
||||
mime: content.mime,
|
||||
content_path: self.dapps_path.clone(),
|
||||
cache: self.cache.clone(),
|
||||
}
|
||||
)) as Box<Handler>)
|
||||
} else {
|
||||
)) as Box<Handler>,
|
||||
),
|
||||
None => {
|
||||
// This may happen when sync status changes in between
|
||||
// `contains` and `to_handler`
|
||||
(None, Box::new(ContentHandler::error(
|
||||
@ -151,14 +167,15 @@ impl<R: URLHint> AppFetcher<R> {
|
||||
"Requested resource was not found.",
|
||||
None
|
||||
)) as Box<Handler>)
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(status) = new_status {
|
||||
dapps.clear_garbage(MAX_CACHED_DAPPS);
|
||||
dapps.insert(app_id, status);
|
||||
cache.clear_garbage(MAX_CACHED_DAPPS);
|
||||
cache.insert(content_id, status);
|
||||
}
|
||||
|
||||
handler
|
||||
@ -169,7 +186,7 @@ impl<R: URLHint> AppFetcher<R> {
|
||||
pub enum ValidationError {
|
||||
Io(io::Error),
|
||||
Zip(zip::result::ZipError),
|
||||
InvalidDappId,
|
||||
InvalidContentId,
|
||||
ManifestNotFound,
|
||||
ManifestSerialization(String),
|
||||
HashMismatch { expected: H256, got: H256, },
|
||||
@ -180,7 +197,7 @@ impl fmt::Display for ValidationError {
|
||||
match *self {
|
||||
ValidationError::Io(ref io) => write!(f, "Unexpected IO error occured: {:?}", io),
|
||||
ValidationError::Zip(ref zip) => write!(f, "Unable to read ZIP archive: {:?}", zip),
|
||||
ValidationError::InvalidDappId => write!(f, "Dapp ID is invalid. It should be 32 bytes hash of content."),
|
||||
ValidationError::InvalidContentId => write!(f, "ID is invalid. It should be 256 bits keccak hash of content."),
|
||||
ValidationError::ManifestNotFound => write!(f, "Downloaded Dapp bundle did not contain valid manifest.json file."),
|
||||
ValidationError::ManifestSerialization(ref err) => {
|
||||
write!(f, "There was an error during Dapp Manifest serialization: {:?}", err)
|
||||
@ -204,10 +221,55 @@ impl From<zip::result::ZipError> for ValidationError {
|
||||
}
|
||||
}
|
||||
|
||||
struct ContentInstaller {
|
||||
id: String,
|
||||
mime: String,
|
||||
content_path: PathBuf,
|
||||
cache: Arc<Mutex<ContentCache>>,
|
||||
}
|
||||
|
||||
impl ContentValidator for ContentInstaller {
|
||||
type Error = ValidationError;
|
||||
type Result = PathBuf;
|
||||
|
||||
fn validate_and_install(&self, path: PathBuf) -> Result<(String, PathBuf), ValidationError> {
|
||||
// Create dir
|
||||
try!(fs::create_dir_all(&self.content_path));
|
||||
|
||||
// And prepare path for a file
|
||||
let filename = path.file_name().expect("We always fetch a file.");
|
||||
let mut content_path = self.content_path.clone();
|
||||
content_path.push(&filename);
|
||||
|
||||
if content_path.exists() {
|
||||
try!(fs::remove_dir_all(&content_path))
|
||||
}
|
||||
|
||||
try!(fs::copy(&path, &content_path));
|
||||
|
||||
Ok((self.id.clone(), content_path))
|
||||
}
|
||||
|
||||
fn done(&self, result: Option<&PathBuf>) {
|
||||
let mut cache = self.cache.lock();
|
||||
match result {
|
||||
Some(result) => {
|
||||
let page = LocalPageEndpoint::single_file(result.clone(), self.mime.clone());
|
||||
cache.insert(self.id.clone(), ContentStatus::Ready(page));
|
||||
},
|
||||
// In case of error
|
||||
None => {
|
||||
cache.remove(&self.id);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
struct DappInstaller {
|
||||
dapp_id: String,
|
||||
id: String,
|
||||
dapps_path: PathBuf,
|
||||
dapps: Arc<Mutex<ContentCache>>,
|
||||
cache: Arc<Mutex<ContentCache>>,
|
||||
}
|
||||
|
||||
impl DappInstaller {
|
||||
@ -244,15 +306,16 @@ impl DappInstaller {
|
||||
|
||||
impl ContentValidator for DappInstaller {
|
||||
type Error = ValidationError;
|
||||
type Result = Manifest;
|
||||
|
||||
fn validate_and_install(&self, app_path: PathBuf) -> Result<Manifest, ValidationError> {
|
||||
fn validate_and_install(&self, app_path: PathBuf) -> Result<(String, Manifest), ValidationError> {
|
||||
trace!(target: "dapps", "Opening dapp bundle at {:?}", app_path);
|
||||
let mut file_reader = io::BufReader::new(try!(fs::File::open(app_path)));
|
||||
let hash = try!(sha3(&mut file_reader));
|
||||
let dapp_id = try!(self.dapp_id.as_str().parse().map_err(|_| ValidationError::InvalidDappId));
|
||||
if dapp_id != hash {
|
||||
let id = try!(self.id.as_str().parse().map_err(|_| ValidationError::InvalidContentId));
|
||||
if id != hash {
|
||||
return Err(ValidationError::HashMismatch {
|
||||
expected: dapp_id,
|
||||
expected: id,
|
||||
got: hash,
|
||||
});
|
||||
}
|
||||
@ -262,7 +325,7 @@ impl ContentValidator for DappInstaller {
|
||||
// First find manifest file
|
||||
let (mut manifest, manifest_dir) = try!(Self::find_manifest(&mut zip));
|
||||
// Overwrite id to match hash
|
||||
manifest.id = self.dapp_id.clone();
|
||||
manifest.id = self.id.clone();
|
||||
|
||||
let target = self.dapp_target_path(&manifest);
|
||||
|
||||
@ -300,20 +363,20 @@ impl ContentValidator for DappInstaller {
|
||||
try!(manifest_file.write_all(manifest_str.as_bytes()));
|
||||
|
||||
// Return modified app manifest
|
||||
Ok(manifest)
|
||||
Ok((manifest.id.clone(), manifest))
|
||||
}
|
||||
|
||||
fn done(&self, manifest: Option<&Manifest>) {
|
||||
let mut dapps = self.dapps.lock();
|
||||
let mut cache = self.cache.lock();
|
||||
match manifest {
|
||||
Some(manifest) => {
|
||||
let path = self.dapp_target_path(manifest);
|
||||
let app = LocalPageEndpoint::new(path, manifest.clone().into());
|
||||
dapps.insert(self.dapp_id.clone(), ContentStatus::Ready(app));
|
||||
cache.insert(self.id.clone(), ContentStatus::Ready(app));
|
||||
},
|
||||
// In case of error
|
||||
None => {
|
||||
dapps.remove(&self.dapp_id);
|
||||
cache.remove(&self.id);
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -327,12 +390,12 @@ mod tests {
|
||||
use endpoint::EndpointInfo;
|
||||
use page::LocalPageEndpoint;
|
||||
use apps::cache::ContentStatus;
|
||||
use apps::urlhint::{GithubApp, URLHint};
|
||||
use super::AppFetcher;
|
||||
use apps::urlhint::{URLHint, URLHintResult};
|
||||
use super::ContentFetcher;
|
||||
|
||||
struct FakeResolver;
|
||||
impl URLHint for FakeResolver {
|
||||
fn resolve(&self, _app_id: Bytes) -> Option<GithubApp> {
|
||||
fn resolve(&self, _id: Bytes) -> Option<URLHintResult> {
|
||||
None
|
||||
}
|
||||
}
|
||||
@ -341,7 +404,7 @@ mod tests {
|
||||
fn should_true_if_contains_the_app() {
|
||||
// given
|
||||
let path = env::temp_dir();
|
||||
let fetcher = AppFetcher::new(FakeResolver, Arc::new(|| false));
|
||||
let fetcher = ContentFetcher::new(FakeResolver, Arc::new(|| false));
|
||||
let handler = LocalPageEndpoint::new(path, EndpointInfo {
|
||||
name: "fake".into(),
|
||||
description: "".into(),
|
||||
|
@ -17,6 +17,7 @@
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
use rustc_serialize::hex::ToHex;
|
||||
use mime_guess;
|
||||
|
||||
use ethabi::{Interface, Contract, Token};
|
||||
use util::{Address, Bytes, Hashable};
|
||||
@ -52,6 +53,13 @@ impl GithubApp {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct Content {
|
||||
pub url: String,
|
||||
pub mime: String,
|
||||
pub owner: Address,
|
||||
}
|
||||
|
||||
/// RAW Contract interface.
|
||||
/// Should execute transaction using current blockchain state.
|
||||
pub trait ContractClient: Send + Sync {
|
||||
@ -61,10 +69,19 @@ pub trait ContractClient: Send + Sync {
|
||||
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String>;
|
||||
}
|
||||
|
||||
/// Result of resolving id to URL
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum URLHintResult {
|
||||
/// Dapp
|
||||
Dapp(GithubApp),
|
||||
/// Content
|
||||
Content(Content),
|
||||
}
|
||||
|
||||
/// URLHint Contract interface
|
||||
pub trait URLHint {
|
||||
/// Resolves given id to registrar entry.
|
||||
fn resolve(&self, app_id: Bytes) -> Option<GithubApp>;
|
||||
fn resolve(&self, id: Bytes) -> Option<URLHintResult>;
|
||||
}
|
||||
|
||||
pub struct URLHintContract {
|
||||
@ -110,10 +127,10 @@ impl URLHintContract {
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_urlhint_call(&self, app_id: Bytes) -> Option<Bytes> {
|
||||
fn encode_urlhint_call(&self, id: Bytes) -> Option<Bytes> {
|
||||
let call = self.urlhint
|
||||
.function("entries".into())
|
||||
.and_then(|f| f.encode_call(vec![Token::FixedBytes(app_id)]));
|
||||
.and_then(|f| f.encode_call(vec![Token::FixedBytes(id)]));
|
||||
|
||||
match call {
|
||||
Ok(res) => {
|
||||
@ -126,7 +143,7 @@ impl URLHintContract {
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_urlhint_output(&self, output: Bytes) -> Option<GithubApp> {
|
||||
fn decode_urlhint_output(&self, output: Bytes) -> Option<URLHintResult> {
|
||||
trace!(target: "dapps", "Output: {:?}", output.to_hex());
|
||||
let output = self.urlhint
|
||||
.function("entries".into())
|
||||
@ -149,6 +166,17 @@ impl URLHintContract {
|
||||
if owner == Address::default() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let commit = GithubApp::commit(&commit);
|
||||
if commit == Some(Default::default()) {
|
||||
let mime = guess_mime_type(&account_slash_repo).unwrap_or("application/octet-stream".into());
|
||||
return Some(URLHintResult::Content(Content {
|
||||
url: account_slash_repo,
|
||||
mime: mime,
|
||||
owner: owner,
|
||||
}));
|
||||
}
|
||||
|
||||
let (account, repo) = {
|
||||
let mut it = account_slash_repo.split('/');
|
||||
match (it.next(), it.next()) {
|
||||
@ -157,12 +185,12 @@ impl URLHintContract {
|
||||
}
|
||||
};
|
||||
|
||||
GithubApp::commit(&commit).map(|commit| GithubApp {
|
||||
commit.map(|commit| URLHintResult::Dapp(GithubApp {
|
||||
account: account,
|
||||
repo: repo,
|
||||
commit: commit,
|
||||
owner: owner,
|
||||
})
|
||||
}))
|
||||
},
|
||||
e => {
|
||||
warn!(target: "dapps", "Invalid contract output parameters: {:?}", e);
|
||||
@ -177,10 +205,10 @@ impl URLHintContract {
|
||||
}
|
||||
|
||||
impl URLHint for URLHintContract {
|
||||
fn resolve(&self, app_id: Bytes) -> Option<GithubApp> {
|
||||
fn resolve(&self, id: Bytes) -> Option<URLHintResult> {
|
||||
self.urlhint_address().and_then(|address| {
|
||||
// Prepare contract call
|
||||
self.encode_urlhint_call(app_id)
|
||||
self.encode_urlhint_call(id)
|
||||
.and_then(|data| {
|
||||
let call = self.client.call(address, data);
|
||||
if let Err(ref e) = call {
|
||||
@ -193,6 +221,34 @@ impl URLHint for URLHintContract {
|
||||
}
|
||||
}
|
||||
|
||||
fn guess_mime_type(url: &str) -> Option<String> {
|
||||
const CONTENT_TYPE: &'static str = "content-type=";
|
||||
|
||||
let mut it = url.split('#');
|
||||
// skip url
|
||||
let url = it.next();
|
||||
// get meta headers
|
||||
let metas = it.next();
|
||||
if let Some(metas) = metas {
|
||||
for meta in metas.split('&') {
|
||||
let meta = meta.to_lowercase();
|
||||
if meta.starts_with(CONTENT_TYPE) {
|
||||
return Some(meta[CONTENT_TYPE.len()..].to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
url.and_then(|url| {
|
||||
url.split('.').last()
|
||||
}).and_then(|extension| {
|
||||
mime_guess::get_mime_type_str(extension).map(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn test_guess_mime_type(url: &str) -> Option<String> {
|
||||
guess_mime_type(url)
|
||||
}
|
||||
|
||||
fn as_string<T: fmt::Debug>(e: T) -> String {
|
||||
format!("{:?}", e)
|
||||
}
|
||||
@ -201,7 +257,7 @@ fn as_string<T: fmt::Debug>(e: T) -> String {
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
use std::str::FromStr;
|
||||
use rustc_serialize::hex::{ToHex, FromHex};
|
||||
use rustc_serialize::hex::FromHex;
|
||||
|
||||
use super::*;
|
||||
use util::{Bytes, Address, Mutex, ToPretty};
|
||||
@ -279,12 +335,33 @@ mod tests {
|
||||
let res = urlhint.resolve("test".bytes().collect());
|
||||
|
||||
// then
|
||||
assert_eq!(res, Some(GithubApp {
|
||||
assert_eq!(res, Some(URLHintResult::Dapp(GithubApp {
|
||||
account: "ethcore".into(),
|
||||
repo: "dao.claim".into(),
|
||||
commit: GithubApp::commit(&"ec4c1fe06c808fe3739858c347109b1f5f1ed4b5".from_hex().unwrap()).unwrap(),
|
||||
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
|
||||
}))
|
||||
})))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_decode_urlhint_content_output() {
|
||||
// given
|
||||
let mut registrar = FakeRegistrar::new();
|
||||
registrar.responses = Mutex::new(vec![
|
||||
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
|
||||
Ok("00000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff000000000000000000000000000000000000000000000000000000000000003d68747470733a2f2f657468636f72652e696f2f6173736574732f696d616765732f657468636f72652d626c61636b2d686f72697a6f6e74616c2e706e67000000".from_hex().unwrap()),
|
||||
]);
|
||||
let urlhint = URLHintContract::new(Arc::new(registrar));
|
||||
|
||||
// when
|
||||
let res = urlhint.resolve("test".bytes().collect());
|
||||
|
||||
// then
|
||||
assert_eq!(res, Some(URLHintResult::Content(Content {
|
||||
url: "https://ethcore.io/assets/images/ethcore-black-horizontal.png".into(),
|
||||
mime: "image/png".into(),
|
||||
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
|
||||
})))
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -303,4 +380,20 @@ mod tests {
|
||||
// then
|
||||
assert_eq!(url, "https://codeload.github.com/test/xyz/zip/000102030405060708090a0b0c0d0e0f10111213".to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_guess_mime_type_from_url() {
|
||||
let url1 = "https://ethcore.io/parity";
|
||||
let url2 = "https://ethcore.io/parity#content-type=image/png";
|
||||
let url3 = "https://ethcore.io/parity#something&content-type=image/png";
|
||||
let url4 = "https://ethcore.io/parity.png#content-type=image/jpeg";
|
||||
let url5 = "https://ethcore.io/parity.png";
|
||||
|
||||
|
||||
assert_eq!(test_guess_mime_type(url1), None);
|
||||
assert_eq!(test_guess_mime_type(url2), Some("image/png".into()));
|
||||
assert_eq!(test_guess_mime_type(url3), Some("image/png".into()));
|
||||
assert_eq!(test_guess_mime_type(url4), Some("image/jpeg".into()));
|
||||
assert_eq!(test_guess_mime_type(url5), Some("image/png".into()));
|
||||
}
|
||||
}
|
||||
|
@ -63,7 +63,7 @@ impl Client {
|
||||
self.https_client.close();
|
||||
}
|
||||
|
||||
pub fn request(&mut self, url: String, abort: Arc<AtomicBool>, on_done: Box<Fn() + Send>) -> Result<mpsc::Receiver<FetchResult>, FetchError> {
|
||||
pub fn request(&mut self, url: &str, abort: Arc<AtomicBool>, on_done: Box<Fn() + Send>) -> Result<mpsc::Receiver<FetchResult>, FetchError> {
|
||||
let is_https = url.starts_with("https://");
|
||||
let url = try!(url.parse().map_err(|_| FetchError::InvalidUrl));
|
||||
trace!(target: "dapps", "Fetching from: {:?}", url);
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
//! Hyper Server Handler that fetches a file during a request (proxy).
|
||||
|
||||
use std::fmt;
|
||||
use std::{fs, fmt};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{mpsc, Arc};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
@ -29,51 +29,50 @@ use hyper::status::StatusCode;
|
||||
use handlers::ContentHandler;
|
||||
use handlers::client::{Client, FetchResult};
|
||||
use apps::redirection_address;
|
||||
use apps::urlhint::GithubApp;
|
||||
use apps::manifest::Manifest;
|
||||
|
||||
const FETCH_TIMEOUT: u64 = 30;
|
||||
|
||||
enum FetchState {
|
||||
NotStarted(GithubApp),
|
||||
enum FetchState<T: fmt::Debug> {
|
||||
NotStarted(String),
|
||||
Error(ContentHandler),
|
||||
InProgress {
|
||||
deadline: Instant,
|
||||
receiver: mpsc::Receiver<FetchResult>,
|
||||
},
|
||||
Done(Manifest),
|
||||
Done((String, T)),
|
||||
}
|
||||
|
||||
pub trait ContentValidator {
|
||||
type Error: fmt::Debug + fmt::Display;
|
||||
type Result: fmt::Debug;
|
||||
|
||||
fn validate_and_install(&self, app: PathBuf) -> Result<Manifest, Self::Error>;
|
||||
fn done(&self, Option<&Manifest>);
|
||||
fn validate_and_install(&self, app: PathBuf) -> Result<(String, Self::Result), Self::Error>;
|
||||
fn done(&self, Option<&Self::Result>);
|
||||
}
|
||||
|
||||
pub struct ContentFetcherHandler<H: ContentValidator> {
|
||||
abort: Arc<AtomicBool>,
|
||||
control: Option<Control>,
|
||||
status: FetchState,
|
||||
status: FetchState<H::Result>,
|
||||
client: Option<Client>,
|
||||
using_dapps_domains: bool,
|
||||
dapp: H,
|
||||
installer: H,
|
||||
}
|
||||
|
||||
impl<H: ContentValidator> Drop for ContentFetcherHandler<H> {
|
||||
fn drop(&mut self) {
|
||||
let manifest = match self.status {
|
||||
FetchState::Done(ref manifest) => Some(manifest),
|
||||
let result = match self.status {
|
||||
FetchState::Done((_, ref result)) => Some(result),
|
||||
_ => None,
|
||||
};
|
||||
self.dapp.done(manifest);
|
||||
self.installer.done(result);
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: ContentValidator> ContentFetcherHandler<H> {
|
||||
|
||||
pub fn new(
|
||||
app: GithubApp,
|
||||
url: String,
|
||||
abort: Arc<AtomicBool>,
|
||||
control: Control,
|
||||
using_dapps_domains: bool,
|
||||
@ -84,9 +83,9 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
|
||||
abort: abort,
|
||||
control: Some(control),
|
||||
client: Some(client),
|
||||
status: FetchState::NotStarted(app),
|
||||
status: FetchState::NotStarted(url),
|
||||
using_dapps_domains: using_dapps_domains,
|
||||
dapp: handler,
|
||||
installer: handler,
|
||||
}
|
||||
}
|
||||
|
||||
@ -97,8 +96,8 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
|
||||
}
|
||||
|
||||
|
||||
fn fetch_app(client: &mut Client, app: &GithubApp, abort: Arc<AtomicBool>, control: Control) -> Result<mpsc::Receiver<FetchResult>, String> {
|
||||
client.request(app.url(), abort, Box::new(move || {
|
||||
fn fetch_content(client: &mut Client, url: &str, abort: Arc<AtomicBool>, control: Control) -> Result<mpsc::Receiver<FetchResult>, String> {
|
||||
client.request(url, abort, Box::new(move || {
|
||||
trace!(target: "dapps", "Fetching finished.");
|
||||
// Ignoring control errors
|
||||
let _ = control.ready(Next::read());
|
||||
@ -108,14 +107,14 @@ impl<H: ContentValidator> ContentFetcherHandler<H> {
|
||||
|
||||
impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<H> {
|
||||
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next {
|
||||
let status = if let FetchState::NotStarted(ref app) = self.status {
|
||||
let status = if let FetchState::NotStarted(ref url) = self.status {
|
||||
Some(match *request.method() {
|
||||
// Start fetching content
|
||||
Method::Get => {
|
||||
trace!(target: "dapps", "Fetching dapp: {:?}", app);
|
||||
trace!(target: "dapps", "Fetching content from: {:?}", url);
|
||||
let control = self.control.take().expect("on_request is called only once, thus control is always Some");
|
||||
let client = self.client.as_mut().expect("on_request is called before client is closed.");
|
||||
let fetch = Self::fetch_app(client, app, self.abort.clone(), control);
|
||||
let fetch = Self::fetch_content(client, url, self.abort.clone(), control);
|
||||
match fetch {
|
||||
Ok(receiver) => FetchState::InProgress {
|
||||
deadline: Instant::now() + Duration::from_secs(FETCH_TIMEOUT),
|
||||
@ -154,7 +153,7 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
|
||||
let timeout = ContentHandler::error(
|
||||
StatusCode::GatewayTimeout,
|
||||
"Download Timeout",
|
||||
&format!("Could not fetch dapp bundle within {} seconds.", FETCH_TIMEOUT),
|
||||
&format!("Could not fetch content within {} seconds.", FETCH_TIMEOUT),
|
||||
None
|
||||
);
|
||||
Self::close_client(&mut self.client);
|
||||
@ -166,32 +165,31 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
|
||||
match rec {
|
||||
// Unpack and validate
|
||||
Ok(Ok(path)) => {
|
||||
trace!(target: "dapps", "Fetching dapp finished. Starting validation.");
|
||||
trace!(target: "dapps", "Fetching content finished. Starting validation ({:?})", path);
|
||||
Self::close_client(&mut self.client);
|
||||
// Unpack and verify
|
||||
let state = match self.dapp.validate_and_install(path.clone()) {
|
||||
let state = match self.installer.validate_and_install(path.clone()) {
|
||||
Err(e) => {
|
||||
trace!(target: "dapps", "Error while validating dapp: {:?}", e);
|
||||
trace!(target: "dapps", "Error while validating content: {:?}", e);
|
||||
FetchState::Error(ContentHandler::error(
|
||||
StatusCode::BadGateway,
|
||||
"Invalid Dapp",
|
||||
"Downloaded bundle does not contain a valid dapp.",
|
||||
"Downloaded bundle does not contain a valid content.",
|
||||
Some(&format!("{:?}", e))
|
||||
))
|
||||
},
|
||||
Ok(manifest) => FetchState::Done(manifest)
|
||||
Ok(result) => FetchState::Done(result)
|
||||
};
|
||||
// Remove temporary zip file
|
||||
// TODO [todr] Uncomment me
|
||||
// let _ = fs::remove_file(path);
|
||||
let _ = fs::remove_file(path);
|
||||
(Some(state), Next::write())
|
||||
},
|
||||
Ok(Err(e)) => {
|
||||
warn!(target: "dapps", "Unable to fetch new dapp: {:?}", e);
|
||||
warn!(target: "dapps", "Unable to fetch content: {:?}", e);
|
||||
let error = ContentHandler::error(
|
||||
StatusCode::BadGateway,
|
||||
"Download Error",
|
||||
"There was an error when fetching the dapp.",
|
||||
"There was an error when fetching the content.",
|
||||
Some(&format!("{:?}", e)),
|
||||
);
|
||||
(Some(FetchState::Error(error)), Next::write())
|
||||
@ -213,10 +211,10 @@ impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<
|
||||
|
||||
fn on_response(&mut self, res: &mut server::Response) -> Next {
|
||||
match self.status {
|
||||
FetchState::Done(ref manifest) => {
|
||||
trace!(target: "dapps", "Fetching dapp finished. Redirecting to {}", manifest.id);
|
||||
FetchState::Done((ref id, _)) => {
|
||||
trace!(target: "dapps", "Fetching content finished. Redirecting to {}", id);
|
||||
res.set_status(StatusCode::Found);
|
||||
res.headers_mut().set(header::Location(redirection_address(self.using_dapps_domains, &manifest.id)));
|
||||
res.headers_mut().set(header::Location(redirection_address(self.using_dapps_domains, id)));
|
||||
Next::write()
|
||||
},
|
||||
FetchState::Error(ref mut handler) => handler.on_response(res),
|
||||
|
@ -191,7 +191,7 @@ impl Server {
|
||||
) -> Result<Server, ServerError> {
|
||||
let panic_handler = Arc::new(Mutex::new(None));
|
||||
let authorization = Arc::new(authorization);
|
||||
let apps_fetcher = Arc::new(apps::fetcher::AppFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status));
|
||||
let content_fetcher = Arc::new(apps::fetcher::ContentFetcher::new(apps::urlhint::URLHintContract::new(registrar), sync_status));
|
||||
let endpoints = Arc::new(apps::all_endpoints(dapps_path));
|
||||
let special = Arc::new({
|
||||
let mut special = HashMap::new();
|
||||
@ -206,7 +206,7 @@ impl Server {
|
||||
.handle(move |ctrl| router::Router::new(
|
||||
ctrl,
|
||||
apps::main_page(),
|
||||
apps_fetcher.clone(),
|
||||
content_fetcher.clone(),
|
||||
endpoints.clone(),
|
||||
special.clone(),
|
||||
authorization.clone(),
|
||||
|
@ -17,20 +17,30 @@
|
||||
use mime_guess;
|
||||
use std::io::{Seek, Read, SeekFrom};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::path::{Path, PathBuf};
|
||||
use page::handler;
|
||||
use endpoint::{Endpoint, EndpointInfo, EndpointPath, Handler};
|
||||
|
||||
pub struct LocalPageEndpoint {
|
||||
path: PathBuf,
|
||||
info: EndpointInfo,
|
||||
mime: Option<String>,
|
||||
info: Option<EndpointInfo>,
|
||||
}
|
||||
|
||||
impl LocalPageEndpoint {
|
||||
pub fn new(path: PathBuf, info: EndpointInfo) -> Self {
|
||||
LocalPageEndpoint {
|
||||
path: path,
|
||||
info: info,
|
||||
mime: None,
|
||||
info: Some(info),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn single_file(path: PathBuf, mime: String) -> Self {
|
||||
LocalPageEndpoint {
|
||||
path: path,
|
||||
mime: Some(mime),
|
||||
info: None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,12 +51,21 @@ impl LocalPageEndpoint {
|
||||
|
||||
impl Endpoint for LocalPageEndpoint {
|
||||
fn info(&self) -> Option<&EndpointInfo> {
|
||||
Some(&self.info)
|
||||
self.info.as_ref()
|
||||
}
|
||||
|
||||
fn to_handler(&self, path: EndpointPath) -> Box<Handler> {
|
||||
if let Some(ref mime) = self.mime {
|
||||
Box::new(handler::PageHandler {
|
||||
app: LocalDapp::new(self.path.clone()),
|
||||
app: LocalSingleFile { path: self.path.clone(), mime: mime.clone() },
|
||||
prefix: None,
|
||||
path: path,
|
||||
file: Default::default(),
|
||||
safe_to_embed: false,
|
||||
})
|
||||
} else {
|
||||
Box::new(handler::PageHandler {
|
||||
app: LocalDapp { path: self.path.clone() },
|
||||
prefix: None,
|
||||
path: path,
|
||||
file: Default::default(),
|
||||
@ -54,19 +73,25 @@ impl Endpoint for LocalPageEndpoint {
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalSingleFile {
|
||||
path: PathBuf,
|
||||
mime: String,
|
||||
}
|
||||
|
||||
impl handler::Dapp for LocalSingleFile {
|
||||
type DappFile = LocalFile;
|
||||
|
||||
fn file(&self, _path: &str) -> Option<Self::DappFile> {
|
||||
LocalFile::from_path(&self.path, Some(&self.mime))
|
||||
}
|
||||
}
|
||||
|
||||
struct LocalDapp {
|
||||
path: PathBuf,
|
||||
}
|
||||
|
||||
impl LocalDapp {
|
||||
fn new(path: PathBuf) -> Self {
|
||||
LocalDapp {
|
||||
path: path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl handler::Dapp for LocalDapp {
|
||||
type DappFile = LocalFile;
|
||||
|
||||
@ -75,18 +100,7 @@ impl handler::Dapp for LocalDapp {
|
||||
for part in file_path.split('/') {
|
||||
path.push(part);
|
||||
}
|
||||
// Check if file exists
|
||||
fs::File::open(path.clone()).ok().map(|file| {
|
||||
let content_type = mime_guess::guess_mime_type(path);
|
||||
let len = file.metadata().ok().map_or(0, |meta| meta.len());
|
||||
LocalFile {
|
||||
content_type: content_type.to_string(),
|
||||
buffer: [0; 4096],
|
||||
file: file,
|
||||
pos: 0,
|
||||
len: len,
|
||||
}
|
||||
})
|
||||
LocalFile::from_path(&path, None)
|
||||
}
|
||||
}
|
||||
|
||||
@ -98,6 +112,24 @@ struct LocalFile {
|
||||
pos: u64,
|
||||
}
|
||||
|
||||
impl LocalFile {
|
||||
fn from_path<P: AsRef<Path>>(path: P, mime: Option<&str>) -> Option<Self> {
|
||||
// Check if file exists
|
||||
fs::File::open(&path).ok().map(|file| {
|
||||
let content_type = mime.map(|mime| mime.to_owned())
|
||||
.unwrap_or_else(|| mime_guess::guess_mime_type(path).to_string());
|
||||
let len = file.metadata().ok().map_or(0, |meta| meta.len());
|
||||
LocalFile {
|
||||
content_type: content_type,
|
||||
buffer: [0; 4096],
|
||||
file: file,
|
||||
pos: 0,
|
||||
len: len,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl handler::DappFile for LocalFile {
|
||||
fn content_type(&self) -> &str {
|
||||
&self.content_type
|
||||
|
@ -27,7 +27,7 @@ use url::{Url, Host};
|
||||
use hyper::{self, server, Next, Encoder, Decoder, Control, StatusCode};
|
||||
use hyper::net::HttpStream;
|
||||
use apps;
|
||||
use apps::fetcher::AppFetcher;
|
||||
use apps::fetcher::ContentFetcher;
|
||||
use endpoint::{Endpoint, Endpoints, EndpointPath};
|
||||
use handlers::{Redirection, extract_url, ContentHandler};
|
||||
use self::auth::{Authorization, Authorized};
|
||||
@ -45,7 +45,7 @@ pub struct Router<A: Authorization + 'static> {
|
||||
control: Option<Control>,
|
||||
main_page: &'static str,
|
||||
endpoints: Arc<Endpoints>,
|
||||
fetch: Arc<AppFetcher>,
|
||||
fetch: Arc<ContentFetcher>,
|
||||
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
|
||||
authorization: Arc<A>,
|
||||
allowed_hosts: Option<Vec<String>>,
|
||||
@ -136,7 +136,7 @@ impl<A: Authorization> Router<A> {
|
||||
pub fn new(
|
||||
control: Control,
|
||||
main_page: &'static str,
|
||||
app_fetcher: Arc<AppFetcher>,
|
||||
content_fetcher: Arc<ContentFetcher>,
|
||||
endpoints: Arc<Endpoints>,
|
||||
special: Arc<HashMap<SpecialEndpoint, Box<Endpoint>>>,
|
||||
authorization: Arc<A>,
|
||||
@ -148,7 +148,7 @@ impl<A: Authorization> Router<A> {
|
||||
control: Some(control),
|
||||
main_page: main_page,
|
||||
endpoints: endpoints,
|
||||
fetch: app_fetcher,
|
||||
fetch: content_fetcher,
|
||||
special: special,
|
||||
authorization: authorization,
|
||||
allowed_hosts: allowed_hosts,
|
||||
|
@ -17,7 +17,7 @@
|
||||
use std::env;
|
||||
use std::str;
|
||||
use std::sync::Arc;
|
||||
use rustc_serialize::hex::{ToHex, FromHex};
|
||||
use rustc_serialize::hex::FromHex;
|
||||
|
||||
use ServerBuilder;
|
||||
use Server;
|
||||
|
Loading…
Reference in New Issue
Block a user