LRU cache for dapps (#2006)
Conflicts: dapps/Cargo.toml dapps/src/lib.rs
This commit is contained in:
parent
6da60afaba
commit
6f321d9849
7
Cargo.lock
generated
7
Cargo.lock
generated
@ -295,6 +295,7 @@ dependencies = [
|
|||||||
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
|
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
|
||||||
"jsonrpc-core 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"jsonrpc-core 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)",
|
"jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)",
|
||||||
|
"linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
|
"parity-dapps 1.4.0 (git+https://github.com/ethcore/parity-ui.git)",
|
||||||
@ -793,6 +794,11 @@ name = "libc"
|
|||||||
version = "0.2.12"
|
version = "0.2.12"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "linked-hash-map"
|
||||||
|
version = "0.3.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.3.6"
|
version = "0.3.6"
|
||||||
@ -1699,6 +1705,7 @@ dependencies = [
|
|||||||
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
|
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
|
||||||
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
|
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
|
||||||
"checksum libc 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "97def9dc7ce1d8e153e693e3a33020bc69972181adb2f871e87e888876feae49"
|
"checksum libc 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "97def9dc7ce1d8e153e693e3a33020bc69972181adb2f871e87e888876feae49"
|
||||||
|
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
|
||||||
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
|
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
|
||||||
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
|
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
|
||||||
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
|
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
|
||||||
|
@ -22,6 +22,7 @@ serde_json = "0.7.0"
|
|||||||
serde_macros = { version = "0.7.0", optional = true }
|
serde_macros = { version = "0.7.0", optional = true }
|
||||||
zip = { version = "0.1", default-features = false }
|
zip = { version = "0.1", default-features = false }
|
||||||
ethabi = "0.2.1"
|
ethabi = "0.2.1"
|
||||||
|
linked-hash-map = "0.3"
|
||||||
ethcore-rpc = { path = "../rpc" }
|
ethcore-rpc = { path = "../rpc" }
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
parity-dapps = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" }
|
parity-dapps = { git = "https://github.com/ethcore/parity-ui.git", version = "1.4" }
|
||||||
|
128
dapps/src/apps/cache.rs
Normal file
128
dapps/src/apps/cache.rs
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||||
|
// This file is part of Parity.
|
||||||
|
|
||||||
|
// Parity is free software: you can redistribute it and/or modify
|
||||||
|
// it under the terms of the GNU General Public License as published by
|
||||||
|
// the Free Software Foundation, either version 3 of the License, or
|
||||||
|
// (at your option) any later version.
|
||||||
|
|
||||||
|
// Parity is distributed in the hope that it will be useful,
|
||||||
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
// GNU General Public License for more details.
|
||||||
|
|
||||||
|
// You should have received a copy of the GNU General Public License
|
||||||
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
//! Fetchable Dapps support.
|
||||||
|
|
||||||
|
use std::fs;
|
||||||
|
use std::sync::{Arc};
|
||||||
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
|
||||||
|
use linked_hash_map::LinkedHashMap;
|
||||||
|
use page::LocalPageEndpoint;
|
||||||
|
|
||||||
|
pub enum ContentStatus {
|
||||||
|
Fetching(Arc<AtomicBool>),
|
||||||
|
Ready(LocalPageEndpoint),
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct ContentCache {
|
||||||
|
cache: LinkedHashMap<String, ContentStatus>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ContentCache {
|
||||||
|
pub fn insert(&mut self, content_id: String, status: ContentStatus) -> Option<ContentStatus> {
|
||||||
|
self.cache.insert(content_id, status)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove(&mut self, content_id: &str) -> Option<ContentStatus> {
|
||||||
|
self.cache.remove(content_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get(&mut self, content_id: &str) -> Option<&mut ContentStatus> {
|
||||||
|
self.cache.get_refresh(content_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clear_garbage(&mut self, expected_size: usize) -> Vec<(String, ContentStatus)> {
|
||||||
|
let mut len = self.cache.len();
|
||||||
|
|
||||||
|
if len <= expected_size {
|
||||||
|
return Vec::new();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut removed = Vec::with_capacity(len - expected_size);
|
||||||
|
while len > expected_size {
|
||||||
|
let entry = self.cache.pop_front().unwrap();
|
||||||
|
match entry.1 {
|
||||||
|
ContentStatus::Fetching(ref abort) => {
|
||||||
|
trace!(target: "dapps", "Aborting {} because of limit.", entry.0);
|
||||||
|
// Mark as aborted
|
||||||
|
abort.store(true, Ordering::Relaxed);
|
||||||
|
},
|
||||||
|
ContentStatus::Ready(ref endpoint) => {
|
||||||
|
trace!(target: "dapps", "Removing {} because of limit.", entry.0);
|
||||||
|
// Remove path
|
||||||
|
let res = fs::remove_dir_all(&endpoint.path());
|
||||||
|
if let Err(e) = res {
|
||||||
|
warn!(target: "dapps", "Unable to remove dapp: {:?}", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
removed.push(entry);
|
||||||
|
len -= 1;
|
||||||
|
}
|
||||||
|
removed
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn len(&self) -> usize {
|
||||||
|
self.cache.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn only_keys(data: Vec<(String, ContentStatus)>) -> Vec<String> {
|
||||||
|
data.into_iter().map(|x| x.0).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_remove_least_recently_used() {
|
||||||
|
// given
|
||||||
|
let mut cache = ContentCache::default();
|
||||||
|
cache.insert("a".into(), ContentStatus::Fetching(Default::default()));
|
||||||
|
cache.insert("b".into(), ContentStatus::Fetching(Default::default()));
|
||||||
|
cache.insert("c".into(), ContentStatus::Fetching(Default::default()));
|
||||||
|
|
||||||
|
// when
|
||||||
|
let res = cache.clear_garbage(2);
|
||||||
|
|
||||||
|
// then
|
||||||
|
assert_eq!(cache.len(), 2);
|
||||||
|
assert_eq!(only_keys(res), vec!["a"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_update_lru_if_accessed() {
|
||||||
|
// given
|
||||||
|
let mut cache = ContentCache::default();
|
||||||
|
cache.insert("a".into(), ContentStatus::Fetching(Default::default()));
|
||||||
|
cache.insert("b".into(), ContentStatus::Fetching(Default::default()));
|
||||||
|
cache.insert("c".into(), ContentStatus::Fetching(Default::default()));
|
||||||
|
|
||||||
|
// when
|
||||||
|
cache.get("a");
|
||||||
|
let res = cache.clear_garbage(2);
|
||||||
|
|
||||||
|
// then
|
||||||
|
assert_eq!(cache.len(), 2);
|
||||||
|
assert_eq!(only_keys(res), vec!["b"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -23,7 +23,7 @@ use std::{fs, env};
|
|||||||
use std::io::{self, Read, Write};
|
use std::io::{self, Read, Write};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::collections::HashMap;
|
use std::sync::atomic::{AtomicBool};
|
||||||
use rustc_serialize::hex::FromHex;
|
use rustc_serialize::hex::FromHex;
|
||||||
|
|
||||||
use hyper::Control;
|
use hyper::Control;
|
||||||
@ -33,20 +33,18 @@ use random_filename;
|
|||||||
use util::{Mutex, H256};
|
use util::{Mutex, H256};
|
||||||
use util::sha3::sha3;
|
use util::sha3::sha3;
|
||||||
use page::LocalPageEndpoint;
|
use page::LocalPageEndpoint;
|
||||||
use handlers::{ContentHandler, AppFetcherHandler, DappHandler};
|
use handlers::{ContentHandler, ContentFetcherHandler, ContentValidator};
|
||||||
use endpoint::{Endpoint, EndpointPath, Handler};
|
use endpoint::{Endpoint, EndpointPath, Handler};
|
||||||
|
use apps::cache::{ContentCache, ContentStatus};
|
||||||
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest};
|
use apps::manifest::{MANIFEST_FILENAME, deserialize_manifest, serialize_manifest, Manifest};
|
||||||
use apps::urlhint::{URLHintContract, URLHint};
|
use apps::urlhint::{URLHintContract, URLHint};
|
||||||
|
|
||||||
enum AppStatus {
|
const MAX_CACHED_DAPPS: usize = 10;
|
||||||
Fetching,
|
|
||||||
Ready(LocalPageEndpoint),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct AppFetcher<R: URLHint = URLHintContract> {
|
pub struct AppFetcher<R: URLHint = URLHintContract> {
|
||||||
dapps_path: PathBuf,
|
dapps_path: PathBuf,
|
||||||
resolver: R,
|
resolver: R,
|
||||||
dapps: Arc<Mutex<HashMap<String, AppStatus>>>,
|
dapps: Arc<Mutex<ContentCache>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<R: URLHint> Drop for AppFetcher<R> {
|
impl<R: URLHint> Drop for AppFetcher<R> {
|
||||||
@ -65,17 +63,17 @@ impl<R: URLHint> AppFetcher<R> {
|
|||||||
AppFetcher {
|
AppFetcher {
|
||||||
dapps_path: dapps_path,
|
dapps_path: dapps_path,
|
||||||
resolver: resolver,
|
resolver: resolver,
|
||||||
dapps: Arc::new(Mutex::new(HashMap::new())),
|
dapps: Arc::new(Mutex::new(ContentCache::default())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn set_status(&self, app_id: &str, status: AppStatus) {
|
fn set_status(&self, app_id: &str, status: ContentStatus) {
|
||||||
self.dapps.lock().insert(app_id.to_owned(), status);
|
self.dapps.lock().insert(app_id.to_owned(), status);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn contains(&self, app_id: &str) -> bool {
|
pub fn contains(&self, app_id: &str) -> bool {
|
||||||
let dapps = self.dapps.lock();
|
let mut dapps = self.dapps.lock();
|
||||||
match dapps.get(app_id) {
|
match dapps.get(app_id) {
|
||||||
// Check if we already have the app
|
// Check if we already have the app
|
||||||
Some(_) => true,
|
Some(_) => true,
|
||||||
@ -95,11 +93,11 @@ impl<R: URLHint> AppFetcher<R> {
|
|||||||
let status = dapps.get(&app_id);
|
let status = dapps.get(&app_id);
|
||||||
match status {
|
match status {
|
||||||
// Just server dapp
|
// Just server dapp
|
||||||
Some(&AppStatus::Ready(ref endpoint)) => {
|
Some(&mut ContentStatus::Ready(ref endpoint)) => {
|
||||||
(None, endpoint.to_handler(path))
|
(None, endpoint.to_handler(path))
|
||||||
},
|
},
|
||||||
// App is already being fetched
|
// App is already being fetched
|
||||||
Some(&AppStatus::Fetching) => {
|
Some(&mut ContentStatus::Fetching(_)) => {
|
||||||
(None, Box::new(ContentHandler::html(
|
(None, Box::new(ContentHandler::html(
|
||||||
StatusCode::ServiceUnavailable,
|
StatusCode::ServiceUnavailable,
|
||||||
format!(
|
format!(
|
||||||
@ -111,11 +109,13 @@ impl<R: URLHint> AppFetcher<R> {
|
|||||||
},
|
},
|
||||||
// We need to start fetching app
|
// We need to start fetching app
|
||||||
None => {
|
None => {
|
||||||
// TODO [todr] Keep only last N dapps available!
|
|
||||||
let app_hex = app_id.from_hex().expect("to_handler is called only when `contains` returns true.");
|
let app_hex = app_id.from_hex().expect("to_handler is called only when `contains` returns true.");
|
||||||
let app = self.resolver.resolve(app_hex).expect("to_handler is called only when `contains` returns true.");
|
let app = self.resolver.resolve(app_hex).expect("to_handler is called only when `contains` returns true.");
|
||||||
(Some(AppStatus::Fetching), Box::new(AppFetcherHandler::new(
|
let abort = Arc::new(AtomicBool::new(false));
|
||||||
|
|
||||||
|
(Some(ContentStatus::Fetching(abort.clone())), Box::new(ContentFetcherHandler::new(
|
||||||
app,
|
app,
|
||||||
|
abort,
|
||||||
control,
|
control,
|
||||||
path.using_dapps_domains,
|
path.using_dapps_domains,
|
||||||
DappInstaller {
|
DappInstaller {
|
||||||
@ -129,6 +129,7 @@ impl<R: URLHint> AppFetcher<R> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Some(status) = new_status {
|
if let Some(status) = new_status {
|
||||||
|
dapps.clear_garbage(MAX_CACHED_DAPPS);
|
||||||
dapps.insert(app_id, status);
|
dapps.insert(app_id, status);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -161,7 +162,7 @@ impl From<zip::result::ZipError> for ValidationError {
|
|||||||
struct DappInstaller {
|
struct DappInstaller {
|
||||||
dapp_id: String,
|
dapp_id: String,
|
||||||
dapps_path: PathBuf,
|
dapps_path: PathBuf,
|
||||||
dapps: Arc<Mutex<HashMap<String, AppStatus>>>,
|
dapps: Arc<Mutex<ContentCache>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DappInstaller {
|
impl DappInstaller {
|
||||||
@ -196,7 +197,7 @@ impl DappInstaller {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DappHandler for DappInstaller {
|
impl ContentValidator for DappInstaller {
|
||||||
type Error = ValidationError;
|
type Error = ValidationError;
|
||||||
|
|
||||||
fn validate_and_install(&self, app_path: PathBuf) -> Result<Manifest, ValidationError> {
|
fn validate_and_install(&self, app_path: PathBuf) -> Result<Manifest, ValidationError> {
|
||||||
@ -262,7 +263,7 @@ impl DappHandler for DappInstaller {
|
|||||||
Some(manifest) => {
|
Some(manifest) => {
|
||||||
let path = self.dapp_target_path(manifest);
|
let path = self.dapp_target_path(manifest);
|
||||||
let app = LocalPageEndpoint::new(path, manifest.clone().into());
|
let app = LocalPageEndpoint::new(path, manifest.clone().into());
|
||||||
dapps.insert(self.dapp_id.clone(), AppStatus::Ready(app));
|
dapps.insert(self.dapp_id.clone(), ContentStatus::Ready(app));
|
||||||
},
|
},
|
||||||
// In case of error
|
// In case of error
|
||||||
None => {
|
None => {
|
||||||
@ -274,12 +275,13 @@ impl DappHandler for DappInstaller {
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::path::PathBuf;
|
use std::env;
|
||||||
use super::{AppFetcher, AppStatus};
|
use util::Bytes;
|
||||||
use apps::urlhint::{GithubApp, URLHint};
|
|
||||||
use endpoint::EndpointInfo;
|
use endpoint::EndpointInfo;
|
||||||
use page::LocalPageEndpoint;
|
use page::LocalPageEndpoint;
|
||||||
use util::Bytes;
|
use apps::cache::ContentStatus;
|
||||||
|
use apps::urlhint::{GithubApp, URLHint};
|
||||||
|
use super::AppFetcher;
|
||||||
|
|
||||||
struct FakeResolver;
|
struct FakeResolver;
|
||||||
impl URLHint for FakeResolver {
|
impl URLHint for FakeResolver {
|
||||||
@ -291,8 +293,9 @@ mod tests {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_true_if_contains_the_app() {
|
fn should_true_if_contains_the_app() {
|
||||||
// given
|
// given
|
||||||
|
let path = env::temp_dir();
|
||||||
let fetcher = AppFetcher::new(FakeResolver);
|
let fetcher = AppFetcher::new(FakeResolver);
|
||||||
let handler = LocalPageEndpoint::new(PathBuf::from("/tmp/test"), EndpointInfo {
|
let handler = LocalPageEndpoint::new(path, EndpointInfo {
|
||||||
name: "fake".into(),
|
name: "fake".into(),
|
||||||
description: "".into(),
|
description: "".into(),
|
||||||
version: "".into(),
|
version: "".into(),
|
||||||
@ -301,8 +304,8 @@ mod tests {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// when
|
// when
|
||||||
fetcher.set_status("test", AppStatus::Ready(handler));
|
fetcher.set_status("test", ContentStatus::Ready(handler));
|
||||||
fetcher.set_status("test2", AppStatus::Fetching);
|
fetcher.set_status("test2", ContentStatus::Fetching(Default::default()));
|
||||||
|
|
||||||
// then
|
// then
|
||||||
assert_eq!(fetcher.contains("test"), true);
|
assert_eq!(fetcher.contains("test"), true);
|
||||||
|
@ -19,6 +19,7 @@ use page::PageEndpoint;
|
|||||||
use proxypac::ProxyPac;
|
use proxypac::ProxyPac;
|
||||||
use parity_dapps::WebApp;
|
use parity_dapps::WebApp;
|
||||||
|
|
||||||
|
mod cache;
|
||||||
mod fs;
|
mod fs;
|
||||||
pub mod urlhint;
|
pub mod urlhint;
|
||||||
pub mod fetcher;
|
pub mod fetcher;
|
||||||
|
@ -18,7 +18,8 @@
|
|||||||
|
|
||||||
use std::{env, io, fs, fmt};
|
use std::{env, io, fs, fmt};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::mpsc;
|
use std::sync::{mpsc, Arc};
|
||||||
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use random_filename;
|
use random_filename;
|
||||||
|
|
||||||
@ -29,6 +30,7 @@ use hyper::{self, Decoder, Encoder, Next};
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
Aborted,
|
||||||
NotStarted,
|
NotStarted,
|
||||||
UnexpectedStatus(StatusCode),
|
UnexpectedStatus(StatusCode),
|
||||||
IoError(io::Error),
|
IoError(io::Error),
|
||||||
@ -40,6 +42,7 @@ pub type OnDone = Box<Fn() + Send>;
|
|||||||
|
|
||||||
pub struct Fetch {
|
pub struct Fetch {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
|
abort: Arc<AtomicBool>,
|
||||||
file: Option<fs::File>,
|
file: Option<fs::File>,
|
||||||
result: Option<FetchResult>,
|
result: Option<FetchResult>,
|
||||||
sender: mpsc::Sender<FetchResult>,
|
sender: mpsc::Sender<FetchResult>,
|
||||||
@ -56,7 +59,7 @@ impl Drop for Fetch {
|
|||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let res = self.result.take().unwrap_or(Err(Error::NotStarted));
|
let res = self.result.take().unwrap_or(Err(Error::NotStarted));
|
||||||
// Remove file if there was an error
|
// Remove file if there was an error
|
||||||
if res.is_err() {
|
if res.is_err() || self.is_aborted() {
|
||||||
if let Some(file) = self.file.take() {
|
if let Some(file) = self.file.take() {
|
||||||
drop(file);
|
drop(file);
|
||||||
// Remove file
|
// Remove file
|
||||||
@ -72,12 +75,13 @@ impl Drop for Fetch {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Fetch {
|
impl Fetch {
|
||||||
pub fn new(sender: mpsc::Sender<FetchResult>, on_done: OnDone) -> Self {
|
pub fn new(sender: mpsc::Sender<FetchResult>, abort: Arc<AtomicBool>, on_done: OnDone) -> Self {
|
||||||
let mut dir = env::temp_dir();
|
let mut dir = env::temp_dir();
|
||||||
dir.push(random_filename());
|
dir.push(random_filename());
|
||||||
|
|
||||||
Fetch {
|
Fetch {
|
||||||
path: dir,
|
path: dir,
|
||||||
|
abort: abort,
|
||||||
file: None,
|
file: None,
|
||||||
result: None,
|
result: None,
|
||||||
sender: sender,
|
sender: sender,
|
||||||
@ -86,17 +90,36 @@ impl Fetch {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Fetch {
|
||||||
|
fn is_aborted(&self) -> bool {
|
||||||
|
self.abort.load(Ordering::Relaxed)
|
||||||
|
}
|
||||||
|
fn mark_aborted(&mut self) -> Next {
|
||||||
|
self.result = Some(Err(Error::Aborted));
|
||||||
|
Next::end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl hyper::client::Handler<HttpStream> for Fetch {
|
impl hyper::client::Handler<HttpStream> for Fetch {
|
||||||
fn on_request(&mut self, req: &mut Request) -> Next {
|
fn on_request(&mut self, req: &mut Request) -> Next {
|
||||||
|
if self.is_aborted() {
|
||||||
|
return self.mark_aborted();
|
||||||
|
}
|
||||||
req.headers_mut().set(Connection::close());
|
req.headers_mut().set(Connection::close());
|
||||||
read()
|
read()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_request_writable(&mut self, _encoder: &mut Encoder<HttpStream>) -> Next {
|
fn on_request_writable(&mut self, _encoder: &mut Encoder<HttpStream>) -> Next {
|
||||||
|
if self.is_aborted() {
|
||||||
|
return self.mark_aborted();
|
||||||
|
}
|
||||||
read()
|
read()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn on_response(&mut self, res: Response) -> Next {
|
fn on_response(&mut self, res: Response) -> Next {
|
||||||
|
if self.is_aborted() {
|
||||||
|
return self.mark_aborted();
|
||||||
|
}
|
||||||
if *res.status() != StatusCode::Ok {
|
if *res.status() != StatusCode::Ok {
|
||||||
self.result = Some(Err(Error::UnexpectedStatus(*res.status())));
|
self.result = Some(Err(Error::UnexpectedStatus(*res.status())));
|
||||||
return Next::end();
|
return Next::end();
|
||||||
@ -117,6 +140,9 @@ impl hyper::client::Handler<HttpStream> for Fetch {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn on_response_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next {
|
fn on_response_readable(&mut self, decoder: &mut Decoder<HttpStream>) -> Next {
|
||||||
|
if self.is_aborted() {
|
||||||
|
return self.mark_aborted();
|
||||||
|
}
|
||||||
match io::copy(decoder, self.file.as_mut().expect("File is there because on_response has created it.")) {
|
match io::copy(decoder, self.file.as_mut().expect("File is there because on_response has created it.")) {
|
||||||
Ok(0) => Next::end(),
|
Ok(0) => Next::end(),
|
||||||
Ok(_) => read(),
|
Ok(_) => read(),
|
||||||
|
@ -18,7 +18,8 @@
|
|||||||
|
|
||||||
use std::{fs, fmt};
|
use std::{fs, fmt};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::mpsc;
|
use std::sync::{mpsc, Arc};
|
||||||
|
use std::sync::atomic::AtomicBool;
|
||||||
use std::time::{Instant, Duration};
|
use std::time::{Instant, Duration};
|
||||||
|
|
||||||
use hyper::{header, server, Decoder, Encoder, Next, Method, Control, Client};
|
use hyper::{header, server, Decoder, Encoder, Next, Method, Control, Client};
|
||||||
@ -38,19 +39,20 @@ enum FetchState {
|
|||||||
Error(ContentHandler),
|
Error(ContentHandler),
|
||||||
InProgress {
|
InProgress {
|
||||||
deadline: Instant,
|
deadline: Instant,
|
||||||
receiver: mpsc::Receiver<FetchResult>
|
receiver: mpsc::Receiver<FetchResult>,
|
||||||
},
|
},
|
||||||
Done(Manifest),
|
Done(Manifest),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait DappHandler {
|
pub trait ContentValidator {
|
||||||
type Error: fmt::Debug;
|
type Error: fmt::Debug;
|
||||||
|
|
||||||
fn validate_and_install(&self, app: PathBuf) -> Result<Manifest, Self::Error>;
|
fn validate_and_install(&self, app: PathBuf) -> Result<Manifest, Self::Error>;
|
||||||
fn done(&self, Option<&Manifest>);
|
fn done(&self, Option<&Manifest>);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct AppFetcherHandler<H: DappHandler> {
|
pub struct ContentFetcherHandler<H: ContentValidator> {
|
||||||
|
abort: Arc<AtomicBool>,
|
||||||
control: Option<Control>,
|
control: Option<Control>,
|
||||||
status: FetchState,
|
status: FetchState,
|
||||||
client: Option<Client<Fetch>>,
|
client: Option<Client<Fetch>>,
|
||||||
@ -58,7 +60,7 @@ pub struct AppFetcherHandler<H: DappHandler> {
|
|||||||
dapp: H,
|
dapp: H,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<H: DappHandler> Drop for AppFetcherHandler<H> {
|
impl<H: ContentValidator> Drop for ContentFetcherHandler<H> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
let manifest = match self.status {
|
let manifest = match self.status {
|
||||||
FetchState::Done(ref manifest) => Some(manifest),
|
FetchState::Done(ref manifest) => Some(manifest),
|
||||||
@ -68,16 +70,18 @@ impl<H: DappHandler> Drop for AppFetcherHandler<H> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<H: DappHandler> AppFetcherHandler<H> {
|
impl<H: ContentValidator> ContentFetcherHandler<H> {
|
||||||
|
|
||||||
pub fn new(
|
pub fn new(
|
||||||
app: GithubApp,
|
app: GithubApp,
|
||||||
|
abort: Arc<AtomicBool>,
|
||||||
control: Control,
|
control: Control,
|
||||||
using_dapps_domains: bool,
|
using_dapps_domains: bool,
|
||||||
handler: H) -> Self {
|
handler: H) -> Self {
|
||||||
|
|
||||||
let client = Client::new().expect("Failed to create a Client");
|
let client = Client::new().expect("Failed to create a Client");
|
||||||
AppFetcherHandler {
|
ContentFetcherHandler {
|
||||||
|
abort: abort,
|
||||||
control: Some(control),
|
control: Some(control),
|
||||||
client: Some(client),
|
client: Some(client),
|
||||||
status: FetchState::NotStarted(app),
|
status: FetchState::NotStarted(app),
|
||||||
@ -94,12 +98,12 @@ impl<H: DappHandler> AppFetcherHandler<H> {
|
|||||||
|
|
||||||
|
|
||||||
// TODO [todr] https support
|
// TODO [todr] https support
|
||||||
fn fetch_app(client: &mut Client<Fetch>, app: &GithubApp, control: Control) -> Result<mpsc::Receiver<FetchResult>, String> {
|
fn fetch_app(client: &mut Client<Fetch>, app: &GithubApp, abort: Arc<AtomicBool>, control: Control) -> Result<mpsc::Receiver<FetchResult>, String> {
|
||||||
let url = try!(app.url().parse().map_err(|e| format!("{:?}", e)));
|
let url = try!(app.url().parse().map_err(|e| format!("{:?}", e)));
|
||||||
trace!(target: "dapps", "Fetching from: {:?}", url);
|
trace!(target: "dapps", "Fetching from: {:?}", url);
|
||||||
|
|
||||||
let (tx, rx) = mpsc::channel();
|
let (tx, rx) = mpsc::channel();
|
||||||
let res = client.request(url, Fetch::new(tx, Box::new(move || {
|
let res = client.request(url, Fetch::new(tx, abort, Box::new(move || {
|
||||||
trace!(target: "dapps", "Fetching finished.");
|
trace!(target: "dapps", "Fetching finished.");
|
||||||
// Ignoring control errors
|
// Ignoring control errors
|
||||||
let _ = control.ready(Next::read());
|
let _ = control.ready(Next::read());
|
||||||
@ -111,7 +115,7 @@ impl<H: DappHandler> AppFetcherHandler<H> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<H: DappHandler> server::Handler<HttpStream> for AppFetcherHandler<H> {
|
impl<H: ContentValidator> server::Handler<HttpStream> for ContentFetcherHandler<H> {
|
||||||
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next {
|
fn on_request(&mut self, request: server::Request<HttpStream>) -> Next {
|
||||||
let status = if let FetchState::NotStarted(ref app) = self.status {
|
let status = if let FetchState::NotStarted(ref app) = self.status {
|
||||||
Some(match *request.method() {
|
Some(match *request.method() {
|
||||||
@ -120,7 +124,7 @@ impl<H: DappHandler> server::Handler<HttpStream> for AppFetcherHandler<H> {
|
|||||||
trace!(target: "dapps", "Fetching dapp: {:?}", app);
|
trace!(target: "dapps", "Fetching dapp: {:?}", app);
|
||||||
let control = self.control.take().expect("on_request is called only once, thus control is always Some");
|
let control = self.control.take().expect("on_request is called only once, thus control is always Some");
|
||||||
let client = self.client.as_mut().expect("on_request is called before client is closed.");
|
let client = self.client.as_mut().expect("on_request is called before client is closed.");
|
||||||
let fetch = Self::fetch_app(client, app, control);
|
let fetch = Self::fetch_app(client, app, self.abort.clone(), control);
|
||||||
match fetch {
|
match fetch {
|
||||||
Ok(receiver) => FetchState::InProgress {
|
Ok(receiver) => FetchState::InProgress {
|
||||||
deadline: Instant::now() + Duration::from_secs(FETCH_TIMEOUT),
|
deadline: Instant::now() + Duration::from_secs(FETCH_TIMEOUT),
|
||||||
|
@ -27,7 +27,7 @@ pub use self::auth::AuthRequiredHandler;
|
|||||||
pub use self::echo::EchoHandler;
|
pub use self::echo::EchoHandler;
|
||||||
pub use self::content::ContentHandler;
|
pub use self::content::ContentHandler;
|
||||||
pub use self::redirect::Redirection;
|
pub use self::redirect::Redirection;
|
||||||
pub use self::fetch::{AppFetcherHandler, DappHandler};
|
pub use self::fetch::{ContentFetcherHandler, ContentValidator};
|
||||||
|
|
||||||
use url::Url;
|
use url::Url;
|
||||||
use hyper::{server, header, net, uri};
|
use hyper::{server, header, net, uri};
|
||||||
|
@ -60,6 +60,7 @@ extern crate rustc_serialize;
|
|||||||
extern crate parity_dapps;
|
extern crate parity_dapps;
|
||||||
extern crate ethcore_rpc;
|
extern crate ethcore_rpc;
|
||||||
extern crate ethcore_util as util;
|
extern crate ethcore_util as util;
|
||||||
|
extern crate linked_hash_map;
|
||||||
|
|
||||||
mod endpoint;
|
mod endpoint;
|
||||||
mod apps;
|
mod apps;
|
||||||
|
@ -33,6 +33,10 @@ impl LocalPageEndpoint {
|
|||||||
info: info,
|
info: info,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn path(&self) -> PathBuf {
|
||||||
|
self.path.clone()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Endpoint for LocalPageEndpoint {
|
impl Endpoint for LocalPageEndpoint {
|
||||||
|
Loading…
Reference in New Issue
Block a user