2018-06-04 10:19:50 +02:00
|
|
|
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
2016-12-22 18:26:39 +01:00
|
|
|
// This file is part of Parity.
|
|
|
|
|
|
|
|
// Parity is free software: you can redistribute it and/or modify
|
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
|
|
|
// Parity is distributed in the hope that it will be useful,
|
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License
|
|
|
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
//! Serving web-based content (proxying)
|
|
|
|
|
2016-12-27 11:15:02 +01:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
2017-02-04 09:52:14 +01:00
|
|
|
use base32;
|
2017-10-05 12:35:01 +02:00
|
|
|
use fetch::{self, Fetch};
|
|
|
|
use hyper::{mime, StatusCode};
|
2016-12-27 11:15:02 +01:00
|
|
|
|
|
|
|
use apps;
|
2017-10-05 12:35:01 +02:00
|
|
|
use endpoint::{Endpoint, EndpointPath, Request, Response};
|
|
|
|
use futures::future;
|
2018-03-14 13:40:54 +01:00
|
|
|
use futures_cpupool::CpuPool;
|
2016-12-27 11:15:02 +01:00
|
|
|
use handlers::{
|
|
|
|
ContentFetcherHandler, ContentHandler, ContentValidator, ValidatorResponse,
|
2017-10-05 12:35:01 +02:00
|
|
|
StreamingHandler,
|
2016-12-27 11:15:02 +01:00
|
|
|
};
|
2018-06-06 10:05:52 +02:00
|
|
|
use WebProxyTokens;
|
2016-12-22 18:26:39 +01:00
|
|
|
|
|
|
|
pub struct Web<F> {
|
2016-12-27 11:15:02 +01:00
|
|
|
web_proxy_tokens: Arc<WebProxyTokens>,
|
2016-12-22 18:26:39 +01:00
|
|
|
fetch: F,
|
2018-03-14 13:40:54 +01:00
|
|
|
pool: CpuPool,
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<F: Fetch> Web<F> {
|
2017-07-04 16:04:09 +02:00
|
|
|
pub fn boxed(
|
|
|
|
web_proxy_tokens: Arc<WebProxyTokens>,
|
|
|
|
fetch: F,
|
2018-03-14 13:40:54 +01:00
|
|
|
pool: CpuPool,
|
2017-07-04 16:04:09 +02:00
|
|
|
) -> Box<Endpoint> {
|
2016-12-22 18:26:39 +01:00
|
|
|
Box::new(Web {
|
2017-07-04 16:04:09 +02:00
|
|
|
web_proxy_tokens,
|
|
|
|
fetch,
|
2018-03-14 13:40:54 +01:00
|
|
|
pool,
|
2016-12-22 18:26:39 +01:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
fn extract_target_url(&self, path: &EndpointPath) -> Result<String, ContentHandler> {
|
|
|
|
let token_and_url = path.app_params.get(0)
|
2017-02-04 09:52:14 +01:00
|
|
|
.map(|encoded| encoded.replace('.', ""))
|
|
|
|
.and_then(|encoded| base32::decode(base32::Alphabet::Crockford, &encoded.to_uppercase()))
|
|
|
|
.and_then(|data| String::from_utf8(data).ok())
|
2017-10-05 12:35:01 +02:00
|
|
|
.ok_or_else(|| ContentHandler::error(
|
2017-02-04 09:52:14 +01:00
|
|
|
StatusCode::BadRequest,
|
|
|
|
"Invalid parameter",
|
|
|
|
"Couldn't parse given parameter:",
|
2017-10-05 12:35:01 +02:00
|
|
|
path.app_params.get(0).map(String::as_str),
|
|
|
|
))?;
|
2017-02-04 09:52:14 +01:00
|
|
|
|
|
|
|
let mut token_it = token_and_url.split('+');
|
|
|
|
let token = token_it.next();
|
|
|
|
let target_url = token_it.next();
|
2016-12-22 18:26:39 +01:00
|
|
|
|
2016-12-27 11:15:02 +01:00
|
|
|
// Check if token supplied in URL is correct.
|
2017-06-22 20:05:40 +02:00
|
|
|
let domain = match token.and_then(|token| self.web_proxy_tokens.domain(token)) {
|
|
|
|
Some(domain) => domain,
|
2016-12-27 11:15:02 +01:00
|
|
|
_ => {
|
2017-10-05 12:35:01 +02:00
|
|
|
return Err(ContentHandler::error(
|
2018-06-06 10:05:52 +02:00
|
|
|
StatusCode::BadRequest, "Invalid Access Token", "Invalid or old web proxy access token supplied.", Some("Try refreshing the page."),
|
2017-10-05 12:35:01 +02:00
|
|
|
));
|
2016-12-27 11:15:02 +01:00
|
|
|
}
|
2017-06-22 20:05:40 +02:00
|
|
|
};
|
2016-12-27 11:15:02 +01:00
|
|
|
|
2016-12-22 18:26:39 +01:00
|
|
|
// Validate protocol
|
2017-02-04 09:52:14 +01:00
|
|
|
let mut target_url = match target_url {
|
|
|
|
Some(url) if url.starts_with("http://") || url.starts_with("https://") => url.to_owned(),
|
2016-12-22 18:26:39 +01:00
|
|
|
_ => {
|
2017-10-05 12:35:01 +02:00
|
|
|
return Err(ContentHandler::error(
|
2018-06-06 10:05:52 +02:00
|
|
|
StatusCode::BadRequest, "Invalid Protocol", "Invalid protocol used.", None,
|
2017-10-05 12:35:01 +02:00
|
|
|
));
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-06-22 20:05:40 +02:00
|
|
|
if !target_url.starts_with(&*domain) {
|
2017-10-05 12:35:01 +02:00
|
|
|
return Err(ContentHandler::error(
|
2018-06-06 10:05:52 +02:00
|
|
|
StatusCode::BadRequest, "Invalid Domain", "Dapp attempted to access invalid domain.", Some(&target_url),
|
2017-10-05 12:35:01 +02:00
|
|
|
));
|
2017-06-22 20:05:40 +02:00
|
|
|
}
|
|
|
|
|
2017-02-04 09:52:14 +01:00
|
|
|
if !target_url.ends_with("/") {
|
|
|
|
target_url = format!("{}/", target_url);
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
// Skip the token
|
|
|
|
let query = path.query.as_ref().map_or_else(String::new, |query| format!("?{}", query));
|
|
|
|
let path = path.app_params[1..].join("/");
|
2016-12-27 11:15:02 +01:00
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
Ok(format!("{}{}{}", target_url, path, query))
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
impl<F: Fetch> Endpoint for Web<F> {
|
|
|
|
fn respond(&self, path: EndpointPath, req: Request) -> Response {
|
2016-12-22 18:26:39 +01:00
|
|
|
// First extract the URL (reject invalid URLs)
|
2017-10-05 12:35:01 +02:00
|
|
|
let target_url = match self.extract_target_url(&path) {
|
2016-12-22 18:26:39 +01:00
|
|
|
Ok(url) => url,
|
2017-10-05 12:35:01 +02:00
|
|
|
Err(response) => {
|
|
|
|
return Box::new(future::ok(response.into()));
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
let token = path.app_params.get(0)
|
|
|
|
.expect("`target_url` is valid; app_params is not empty;qed")
|
|
|
|
.to_owned();
|
|
|
|
|
|
|
|
Box::new(ContentFetcherHandler::new(
|
|
|
|
req.method(),
|
|
|
|
&target_url,
|
|
|
|
path,
|
2016-12-27 11:15:02 +01:00
|
|
|
WebInstaller {
|
2017-10-05 12:35:01 +02:00
|
|
|
token,
|
2016-12-27 11:15:02 +01:00
|
|
|
},
|
2016-12-22 18:26:39 +01:00
|
|
|
self.fetch.clone(),
|
2018-03-14 13:40:54 +01:00
|
|
|
self.pool.clone(),
|
2017-10-05 12:35:01 +02:00
|
|
|
))
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
2017-10-05 12:35:01 +02:00
|
|
|
}
|
2016-12-22 18:26:39 +01:00
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
struct WebInstaller {
|
|
|
|
token: String,
|
|
|
|
}
|
2016-12-22 18:26:39 +01:00
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
impl ContentValidator for WebInstaller {
|
|
|
|
type Error = String;
|
2016-12-22 18:26:39 +01:00
|
|
|
|
2017-10-05 12:35:01 +02:00
|
|
|
fn validate_and_install(self, response: fetch::Response) -> Result<ValidatorResponse, String> {
|
|
|
|
let status = response.status();
|
|
|
|
let is_html = response.is_html();
|
|
|
|
let mime = response.content_type().unwrap_or(mime::TEXT_HTML);
|
|
|
|
let mut handler = StreamingHandler::new(
|
2018-03-14 13:40:54 +01:00
|
|
|
fetch::BodyReader::new(response),
|
2017-10-05 12:35:01 +02:00
|
|
|
status,
|
|
|
|
mime,
|
|
|
|
);
|
|
|
|
if is_html {
|
|
|
|
handler.set_initial_content(&format!(
|
2018-06-06 10:05:52 +02:00
|
|
|
r#"<script>history.replaceState({{}}, "", "/?{}{}/{}")</script>"#,
|
2017-10-05 12:35:01 +02:00
|
|
|
apps::URL_REFERER,
|
|
|
|
apps::WEB_PATH,
|
|
|
|
&self.token,
|
|
|
|
));
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
2017-10-05 12:35:01 +02:00
|
|
|
Ok(ValidatorResponse::Streaming(handler))
|
2016-12-22 18:26:39 +01:00
|
|
|
}
|
|
|
|
}
|