Add renamed files.
This commit is contained in:
parent
10b0898bdf
commit
35b037e055
15
hash-fetch/Cargo.toml
Normal file
15
hash-fetch/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
||||
[package]
|
||||
description = "Fetching hash-addressed content."
|
||||
homepage = "https://ethcore.io"
|
||||
license = "GPL-3.0"
|
||||
name = "parity-hash-fetch"
|
||||
version = "1.5.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[dependencies]
|
||||
log = "0.3"
|
||||
rustc-serialize = "0.3"
|
||||
ethabi = "0.2.2"
|
||||
mime_guess = "1.6.1"
|
||||
fetch = { path = "../util/fetch" }
|
||||
ethcore-util = { path = "../util" }
|
21
hash-fetch/res/registrar.json
Normal file
21
hash-fetch/res/registrar.json
Normal file
@ -0,0 +1,21 @@
|
||||
[
|
||||
{"constant":false,"inputs":[{"name":"_new","type":"address"}],"name":"setOwner","outputs":[],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"string"}],"name":"confirmReverse","outputs":[{"name":"success","type":"bool"}],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"}],"name":"reserve","outputs":[{"name":"success","type":"bool"}],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"},{"name":"_value","type":"bytes32"}],"name":"set","outputs":[{"name":"success","type":"bool"}],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"}],"name":"drop","outputs":[{"name":"success","type":"bool"}],"type":"function"},
|
||||
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"}],"name":"getAddress","outputs":[{"name":"","type":"address"}],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_amount","type":"uint256"}],"name":"setFee","outputs":[],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_to","type":"address"}],"name":"transfer","outputs":[{"name":"success","type":"bool"}],"type":"function"},
|
||||
{"constant":true,"inputs":[],"name":"owner","outputs":[{"name":"","type":"address"}],"type":"function"},
|
||||
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"}],"name":"reserved","outputs":[{"name":"reserved","type":"bool"}],"type":"function"},
|
||||
{"constant":false,"inputs":[],"name":"drain","outputs":[],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"string"},{"name":"_who","type":"address"}],"name":"proposeReverse","outputs":[{"name":"success","type":"bool"}],"type":"function"},
|
||||
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"}],"name":"getUint","outputs":[{"name":"","type":"uint256"}],"type":"function"},
|
||||
{"constant":true,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"}],"name":"get","outputs":[{"name":"","type":"bytes32"}],"type":"function"},
|
||||
{"constant":true,"inputs":[],"name":"fee","outputs":[{"name":"","type":"uint256"}],"type":"function"},
|
||||
{"constant":true,"inputs":[{"name":"","type":"address"}],"name":"reverse","outputs":[{"name":"","type":"string"}],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"},{"name":"_value","type":"uint256"}],"name":"setUint","outputs":[{"name":"success","type":"bool"}],"type":"function"},
|
||||
{"constant":false,"inputs":[],"name":"removeReverse","outputs":[],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_name","type":"bytes32"},{"name":"_key","type":"string"},{"name":"_value","type":"address"}],"name":"setAddress","outputs":[{"name":"success","type":"bool"}],"type":"function"},{"anonymous":false,"inputs":[{"indexed":false,"name":"amount","type":"uint256"}],"name":"Drained","type":"event"},{"anonymous":false,"inputs":[{"indexed":false,"name":"amount","type":"uint256"}],"name":"FeeChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"owner","type":"address"}],"name":"Reserved","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"oldOwner","type":"address"},{"indexed":true,"name":"newOwner","type":"address"}],"name":"Transferred","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"owner","type":"address"}],"name":"Dropped","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"bytes32"},{"indexed":true,"name":"owner","type":"address"},{"indexed":true,"name":"key","type":"string"}],"name":"DataChanged","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":true,"name":"reverse","type":"address"}],"name":"ReverseProposed","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":true,"name":"reverse","type":"address"}],"name":"ReverseConfirmed","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"name","type":"string"},{"indexed":true,"name":"reverse","type":"address"}],"name":"ReverseRemoved","type":"event"},{"anonymous":false,"inputs":[{"indexed":true,"name":"old","type":"address"},{"indexed":true,"name":"current","type":"address"}],"name":"NewOwner","type":"event"}
|
||||
]
|
6
hash-fetch/res/urlhint.json
Normal file
6
hash-fetch/res/urlhint.json
Normal file
@ -0,0 +1,6 @@
|
||||
[
|
||||
{"constant":false,"inputs":[{"name":"_content","type":"bytes32"},{"name":"_url","type":"string"}],"name":"hintURL","outputs":[],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_content","type":"bytes32"},{"name":"_accountSlashRepo","type":"string"},{"name":"_commit","type":"bytes20"}],"name":"hint","outputs":[],"type":"function"},
|
||||
{"constant":true,"inputs":[{"name":"","type":"bytes32"}],"name":"entries","outputs":[{"name":"accountSlashRepo","type":"string"},{"name":"commit","type":"bytes20"},{"name":"owner","type":"address"}],"type":"function"},
|
||||
{"constant":false,"inputs":[{"name":"_content","type":"bytes32"}],"name":"unhint","outputs":[],"type":"function"}
|
||||
]
|
119
hash-fetch/src/client.rs
Normal file
119
hash-fetch/src/client.rs
Normal file
@ -0,0 +1,119 @@
|
||||
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Hash-addressed content resolver & fetcher.
|
||||
|
||||
use std::{io, fs};
|
||||
use std::sync::Arc;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use util::{Mutex, H256, sha3};
|
||||
use fetch::{Fetch, FetchError, Client as FetchClient};
|
||||
|
||||
use urlhint::{ContractClient, URLHintContract, URLHint, URLHintResult};
|
||||
|
||||
/// API for fetching by hash.
|
||||
pub trait HashFetch: Send + Sync + 'static {
|
||||
/// Fetch hash-addressed content.
|
||||
/// Parameters:
|
||||
/// 1. `hash` - content hash
|
||||
/// 2. `on_done` - callback function invoked when the content is ready (or there was error during fetch)
|
||||
///
|
||||
/// This function may fail immediately when fetch cannot be initialized or content cannot be resolved.
|
||||
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error>;
|
||||
}
|
||||
|
||||
/// Hash-fetching error.
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// Hash could not be resolved to a valid content address.
|
||||
NoResolution,
|
||||
/// Downloaded content hash does not match.
|
||||
HashMismatch {
|
||||
/// Expected hash
|
||||
expected: H256,
|
||||
/// Computed hash
|
||||
got: H256,
|
||||
},
|
||||
/// IO Error while validating hash.
|
||||
IO(io::Error),
|
||||
/// Error during fetch.
|
||||
Fetch(FetchError),
|
||||
}
|
||||
|
||||
impl From<FetchError> for Error {
|
||||
fn from(error: FetchError) -> Self {
|
||||
Error::Fetch(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
fn from(error: io::Error) -> Self {
|
||||
Error::IO(error)
|
||||
}
|
||||
}
|
||||
|
||||
/// Default Hash-fetching client using on-chain contract to resolve hashes to URLs.
|
||||
pub struct Client {
|
||||
contract: URLHintContract,
|
||||
fetch: Mutex<FetchClient>,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
/// Creates new instance of the `Client` given on-chain contract client.
|
||||
pub fn new(contract: Arc<ContractClient>) -> Self {
|
||||
Client {
|
||||
contract: URLHintContract::new(contract),
|
||||
fetch: Mutex::new(FetchClient::default()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HashFetch for Client {
|
||||
fn fetch(&self, hash: H256, on_done: Box<Fn(Result<PathBuf, Error>) + Send>) -> Result<(), Error> {
|
||||
debug!(target: "fetch", "Fetching: {:?}", hash);
|
||||
|
||||
let url = try!(
|
||||
self.contract.resolve(hash.to_vec()).map(|content| match content {
|
||||
URLHintResult::Dapp(dapp) => {
|
||||
dapp.url()
|
||||
},
|
||||
URLHintResult::Content(content) => {
|
||||
content.url
|
||||
},
|
||||
}).ok_or_else(|| Error::NoResolution)
|
||||
);
|
||||
|
||||
debug!(target: "fetch", "Resolved {:?} to {:?}. Fetching...", hash, url);
|
||||
|
||||
self.fetch.lock().request_async(&url, Default::default(), Box::new(move |result| {
|
||||
fn validate_hash(hash: H256, result: Result<PathBuf, FetchError>) -> Result<PathBuf, Error> {
|
||||
let path = try!(result);
|
||||
let mut file_reader = io::BufReader::new(try!(fs::File::open(&path)));
|
||||
let content_hash = try!(sha3(&mut file_reader));
|
||||
|
||||
if content_hash != hash {
|
||||
Err(Error::HashMismatch{ got: content_hash, expected: hash })
|
||||
} else {
|
||||
Ok(path)
|
||||
}
|
||||
}
|
||||
|
||||
debug!(target: "fetch", "Content fetched, validating hash ({:?})", hash);
|
||||
on_done(validate_hash(hash, result))
|
||||
})).map_err(Into::into)
|
||||
}
|
||||
}
|
33
hash-fetch/src/lib.rs
Normal file
33
hash-fetch/src/lib.rs
Normal file
@ -0,0 +1,33 @@
|
||||
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Hash-addressed content resolver & fetcher.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
extern crate rustc_serialize;
|
||||
extern crate mime_guess;
|
||||
extern crate ethabi;
|
||||
extern crate ethcore_util as util;
|
||||
extern crate fetch;
|
||||
|
||||
mod client;
|
||||
|
||||
pub mod urlhint;
|
||||
|
||||
pub use client::{HashFetch, Client, Error};
|
409
hash-fetch/src/urlhint.rs
Normal file
409
hash-fetch/src/urlhint.rs
Normal file
@ -0,0 +1,409 @@
|
||||
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! URLHint Contract
|
||||
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
use rustc_serialize::hex::ToHex;
|
||||
use mime_guess;
|
||||
|
||||
use ethabi::{Interface, Contract, Token};
|
||||
use util::{Address, Bytes, Hashable};
|
||||
|
||||
const COMMIT_LEN: usize = 20;
|
||||
|
||||
/// RAW Contract interface.
|
||||
/// Should execute transaction using current blockchain state.
|
||||
pub trait ContractClient: Send + Sync {
|
||||
/// Get registrar address
|
||||
fn registrar(&self) -> Result<Address, String>;
|
||||
/// Call Contract
|
||||
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String>;
|
||||
}
|
||||
|
||||
/// Github-hosted dapp.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct GithubApp {
|
||||
/// Github Account
|
||||
pub account: String,
|
||||
/// Github Repository
|
||||
pub repo: String,
|
||||
/// Commit on Github
|
||||
pub commit: [u8;COMMIT_LEN],
|
||||
/// Dapp owner address
|
||||
pub owner: Address,
|
||||
}
|
||||
|
||||
impl GithubApp {
|
||||
/// Returns URL of this Github-hosted dapp package.
|
||||
pub fn url(&self) -> String {
|
||||
// Since https fetcher doesn't support redirections we use direct link
|
||||
// format!("https://github.com/{}/{}/archive/{}.zip", self.account, self.repo, self.commit.to_hex())
|
||||
format!("https://codeload.github.com/{}/{}/zip/{}", self.account, self.repo, self.commit.to_hex())
|
||||
}
|
||||
|
||||
fn commit(bytes: &[u8]) -> Option<[u8;COMMIT_LEN]> {
|
||||
if bytes.len() < COMMIT_LEN {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut commit = [0; COMMIT_LEN];
|
||||
for i in 0..COMMIT_LEN {
|
||||
commit[i] = bytes[i];
|
||||
}
|
||||
|
||||
Some(commit)
|
||||
}
|
||||
}
|
||||
|
||||
/// Hash-Addressed Content
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct Content {
|
||||
/// URL of the content
|
||||
pub url: String,
|
||||
/// MIME type of the content
|
||||
pub mime: String,
|
||||
/// Content owner address
|
||||
pub owner: Address,
|
||||
}
|
||||
|
||||
/// Result of resolving id to URL
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum URLHintResult {
|
||||
/// Dapp
|
||||
Dapp(GithubApp),
|
||||
/// Content
|
||||
Content(Content),
|
||||
}
|
||||
|
||||
/// URLHint Contract interface
|
||||
pub trait URLHint {
|
||||
/// Resolves given id to registrar entry.
|
||||
fn resolve(&self, id: Bytes) -> Option<URLHintResult>;
|
||||
}
|
||||
|
||||
/// `URLHintContract` API
|
||||
pub struct URLHintContract {
|
||||
urlhint: Contract,
|
||||
registrar: Contract,
|
||||
client: Arc<ContractClient>,
|
||||
}
|
||||
|
||||
impl URLHintContract {
|
||||
/// Creates new `URLHintContract`
|
||||
pub fn new(client: Arc<ContractClient>) -> Self {
|
||||
let urlhint = Interface::load(include_bytes!("../res/urlhint.json")).expect("urlhint.json is valid ABI");
|
||||
let registrar = Interface::load(include_bytes!("../res/registrar.json")).expect("registrar.json is valid ABI");
|
||||
|
||||
URLHintContract {
|
||||
urlhint: Contract::new(urlhint),
|
||||
registrar: Contract::new(registrar),
|
||||
client: client,
|
||||
}
|
||||
}
|
||||
|
||||
fn urlhint_address(&self) -> Option<Address> {
|
||||
let res = || {
|
||||
let get_address = try!(self.registrar.function("getAddress".into()).map_err(as_string));
|
||||
let params = try!(get_address.encode_call(
|
||||
vec![Token::FixedBytes((*"githubhint".sha3()).to_vec()), Token::String("A".into())]
|
||||
).map_err(as_string));
|
||||
let output = try!(self.client.call(try!(self.client.registrar()), params));
|
||||
let result = try!(get_address.decode_output(output).map_err(as_string));
|
||||
|
||||
match result.get(0) {
|
||||
Some(&Token::Address(address)) if address != *Address::default() => Ok(address.into()),
|
||||
Some(&Token::Address(_)) => Err(format!("Contract not found.")),
|
||||
e => Err(format!("Invalid result: {:?}", e)),
|
||||
}
|
||||
};
|
||||
|
||||
match res() {
|
||||
Ok(res) => Some(res),
|
||||
Err(e) => {
|
||||
warn!(target: "dapps", "Error while calling registrar: {:?}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn encode_urlhint_call(&self, id: Bytes) -> Option<Bytes> {
|
||||
let call = self.urlhint
|
||||
.function("entries".into())
|
||||
.and_then(|f| f.encode_call(vec![Token::FixedBytes(id)]));
|
||||
|
||||
match call {
|
||||
Ok(res) => {
|
||||
Some(res)
|
||||
},
|
||||
Err(e) => {
|
||||
warn!(target: "dapps", "Error while encoding urlhint call: {:?}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_urlhint_output(&self, output: Bytes) -> Option<URLHintResult> {
|
||||
trace!(target: "dapps", "Output: {:?}", output.to_hex());
|
||||
let output = self.urlhint
|
||||
.function("entries".into())
|
||||
.and_then(|f| f.decode_output(output));
|
||||
|
||||
if let Ok(vec) = output {
|
||||
if vec.len() != 3 {
|
||||
warn!(target: "dapps", "Invalid contract output: {:?}", vec);
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut it = vec.into_iter();
|
||||
let account_slash_repo = it.next().expect("element 0 of 3-len vector known to exist; qed");
|
||||
let commit = it.next().expect("element 1 of 3-len vector known to exist; qed");
|
||||
let owner = it.next().expect("element 2 of 3-len vector known to exist qed");
|
||||
|
||||
match (account_slash_repo, commit, owner) {
|
||||
(Token::String(account_slash_repo), Token::FixedBytes(commit), Token::Address(owner)) => {
|
||||
let owner = owner.into();
|
||||
if owner == Address::default() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let commit = GithubApp::commit(&commit);
|
||||
if commit == Some(Default::default()) {
|
||||
let mime = guess_mime_type(&account_slash_repo).unwrap_or("application/octet-stream".into());
|
||||
return Some(URLHintResult::Content(Content {
|
||||
url: account_slash_repo,
|
||||
mime: mime,
|
||||
owner: owner,
|
||||
}));
|
||||
}
|
||||
|
||||
let (account, repo) = {
|
||||
let mut it = account_slash_repo.split('/');
|
||||
match (it.next(), it.next()) {
|
||||
(Some(account), Some(repo)) => (account.into(), repo.into()),
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
|
||||
commit.map(|commit| URLHintResult::Dapp(GithubApp {
|
||||
account: account,
|
||||
repo: repo,
|
||||
commit: commit,
|
||||
owner: owner,
|
||||
}))
|
||||
},
|
||||
e => {
|
||||
warn!(target: "dapps", "Invalid contract output parameters: {:?}", e);
|
||||
None
|
||||
},
|
||||
}
|
||||
} else {
|
||||
warn!(target: "dapps", "Invalid contract output: {:?}", output);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl URLHint for URLHintContract {
|
||||
fn resolve(&self, id: Bytes) -> Option<URLHintResult> {
|
||||
self.urlhint_address().and_then(|address| {
|
||||
// Prepare contract call
|
||||
self.encode_urlhint_call(id)
|
||||
.and_then(|data| {
|
||||
let call = self.client.call(address, data);
|
||||
if let Err(ref e) = call {
|
||||
warn!(target: "dapps", "Error while calling urlhint: {:?}", e);
|
||||
}
|
||||
call.ok()
|
||||
})
|
||||
.and_then(|output| self.decode_urlhint_output(output))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn guess_mime_type(url: &str) -> Option<String> {
|
||||
const CONTENT_TYPE: &'static str = "content-type=";
|
||||
|
||||
let mut it = url.split('#');
|
||||
// skip url
|
||||
let url = it.next();
|
||||
// get meta headers
|
||||
let metas = it.next();
|
||||
if let Some(metas) = metas {
|
||||
for meta in metas.split('&') {
|
||||
let meta = meta.to_lowercase();
|
||||
if meta.starts_with(CONTENT_TYPE) {
|
||||
return Some(meta[CONTENT_TYPE.len()..].to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
url.and_then(|url| {
|
||||
url.split('.').last()
|
||||
}).and_then(|extension| {
|
||||
mime_guess::get_mime_type_str(extension).map(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
fn as_string<T: fmt::Debug>(e: T) -> String {
|
||||
format!("{:?}", e)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::sync::Arc;
|
||||
use std::str::FromStr;
|
||||
use rustc_serialize::hex::FromHex;
|
||||
|
||||
use super::*;
|
||||
use super::guess_mime_type;
|
||||
use util::{Bytes, Address, Mutex, ToPretty};
|
||||
|
||||
struct FakeRegistrar {
|
||||
pub calls: Arc<Mutex<Vec<(String, String)>>>,
|
||||
pub responses: Mutex<Vec<Result<Bytes, String>>>,
|
||||
}
|
||||
|
||||
const REGISTRAR: &'static str = "8e4e9b13d4b45cb0befc93c3061b1408f67316b2";
|
||||
const URLHINT: &'static str = "deadbeefcafe0000000000000000000000000000";
|
||||
|
||||
impl FakeRegistrar {
|
||||
fn new() -> Self {
|
||||
FakeRegistrar {
|
||||
calls: Arc::new(Mutex::new(Vec::new())),
|
||||
responses: Mutex::new(
|
||||
vec![
|
||||
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
|
||||
Ok(Vec::new())
|
||||
]
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ContractClient for FakeRegistrar {
|
||||
|
||||
fn registrar(&self) -> Result<Address, String> {
|
||||
Ok(REGISTRAR.parse().unwrap())
|
||||
}
|
||||
|
||||
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String> {
|
||||
self.calls.lock().push((address.to_hex(), data.to_hex()));
|
||||
self.responses.lock().remove(0)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_call_registrar_and_urlhint_contracts() {
|
||||
// given
|
||||
let registrar = FakeRegistrar::new();
|
||||
let calls = registrar.calls.clone();
|
||||
let urlhint = URLHintContract::new(Arc::new(registrar));
|
||||
|
||||
// when
|
||||
let res = urlhint.resolve("test".bytes().collect());
|
||||
let calls = calls.lock();
|
||||
let call0 = calls.get(0).expect("Registrar resolve called");
|
||||
let call1 = calls.get(1).expect("URLHint Resolve called");
|
||||
|
||||
// then
|
||||
assert!(res.is_none());
|
||||
assert_eq!(call0.0, REGISTRAR);
|
||||
assert_eq!(call0.1,
|
||||
"6795dbcd058740ee9a5a3fb9f1cfa10752baec87e09cc45cd7027fd54708271aca300c75000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000014100000000000000000000000000000000000000000000000000000000000000".to_owned()
|
||||
);
|
||||
assert_eq!(call1.0, URLHINT);
|
||||
assert_eq!(call1.1,
|
||||
"267b69227465737400000000000000000000000000000000000000000000000000000000".to_owned()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_decode_urlhint_output() {
|
||||
// given
|
||||
let mut registrar = FakeRegistrar::new();
|
||||
registrar.responses = Mutex::new(vec![
|
||||
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
|
||||
Ok("0000000000000000000000000000000000000000000000000000000000000060ec4c1fe06c808fe3739858c347109b1f5f1ed4b5000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff0000000000000000000000000000000000000000000000000000000000000011657468636f72652f64616f2e636c61696d000000000000000000000000000000".from_hex().unwrap()),
|
||||
]);
|
||||
let urlhint = URLHintContract::new(Arc::new(registrar));
|
||||
|
||||
// when
|
||||
let res = urlhint.resolve("test".bytes().collect());
|
||||
|
||||
// then
|
||||
assert_eq!(res, Some(URLHintResult::Dapp(GithubApp {
|
||||
account: "ethcore".into(),
|
||||
repo: "dao.claim".into(),
|
||||
commit: GithubApp::commit(&"ec4c1fe06c808fe3739858c347109b1f5f1ed4b5".from_hex().unwrap()).unwrap(),
|
||||
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
|
||||
})))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_decode_urlhint_content_output() {
|
||||
// given
|
||||
let mut registrar = FakeRegistrar::new();
|
||||
registrar.responses = Mutex::new(vec![
|
||||
Ok(format!("000000000000000000000000{}", URLHINT).from_hex().unwrap()),
|
||||
Ok("00000000000000000000000000000000000000000000000000000000000000600000000000000000000000000000000000000000000000000000000000000000000000000000000000000000deadcafebeefbeefcafedeaddeedfeedffffffff000000000000000000000000000000000000000000000000000000000000003d68747470733a2f2f657468636f72652e696f2f6173736574732f696d616765732f657468636f72652d626c61636b2d686f72697a6f6e74616c2e706e67000000".from_hex().unwrap()),
|
||||
]);
|
||||
let urlhint = URLHintContract::new(Arc::new(registrar));
|
||||
|
||||
// when
|
||||
let res = urlhint.resolve("test".bytes().collect());
|
||||
|
||||
// then
|
||||
assert_eq!(res, Some(URLHintResult::Content(Content {
|
||||
url: "https://ethcore.io/assets/images/ethcore-black-horizontal.png".into(),
|
||||
mime: "image/png".into(),
|
||||
owner: Address::from_str("deadcafebeefbeefcafedeaddeedfeedffffffff").unwrap(),
|
||||
})))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_return_valid_url() {
|
||||
// given
|
||||
let app = GithubApp {
|
||||
account: "test".into(),
|
||||
repo: "xyz".into(),
|
||||
commit: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19],
|
||||
owner: Address::default(),
|
||||
};
|
||||
|
||||
// when
|
||||
let url = app.url();
|
||||
|
||||
// then
|
||||
assert_eq!(url, "https://codeload.github.com/test/xyz/zip/000102030405060708090a0b0c0d0e0f10111213".to_owned());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_guess_mime_type_from_url() {
|
||||
let url1 = "https://ethcore.io/parity";
|
||||
let url2 = "https://ethcore.io/parity#content-type=image/png";
|
||||
let url3 = "https://ethcore.io/parity#something&content-type=image/png";
|
||||
let url4 = "https://ethcore.io/parity.png#content-type=image/jpeg";
|
||||
let url5 = "https://ethcore.io/parity.png";
|
||||
|
||||
|
||||
assert_eq!(guess_mime_type(url1), None);
|
||||
assert_eq!(guess_mime_type(url2), Some("image/png".into()));
|
||||
assert_eq!(guess_mime_type(url3), Some("image/png".into()));
|
||||
assert_eq!(guess_mime_type(url4), Some("image/jpeg".into()));
|
||||
assert_eq!(guess_mime_type(url5), Some("image/png".into()));
|
||||
}
|
||||
}
|
17
updater/Cargo.toml
Normal file
17
updater/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
description = "Parity Updater Service."
|
||||
name = "parity-updater"
|
||||
version = "1.5.0"
|
||||
license = "GPL-3.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
|
||||
[dependencies]
|
||||
log = "0.3"
|
||||
ethabi = "0.2.2"
|
||||
ethcore = { path = "../ethcore" }
|
||||
ethcore-util = { path = "../util" }
|
||||
parity-hash-fetch = { path = "../hash-fetch" }
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
lto = false
|
28
updater/src/lib.rs
Normal file
28
updater/src/lib.rs
Normal file
@ -0,0 +1,28 @@
|
||||
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Updater for Parity executables
|
||||
|
||||
#[macro_use] extern crate log;
|
||||
extern crate ethcore_util as util;
|
||||
extern crate parity_hash_fetch as hash_fetch;
|
||||
extern crate ethcore;
|
||||
extern crate ethabi;
|
||||
|
||||
mod updater;
|
||||
mod operations;
|
||||
|
||||
pub use updater::{Updater, UpdateFilter, UpdatePolicy, ReleaseInfo, OperationsInfo, CapState};
|
359
updater/src/operations.rs
Normal file
359
updater/src/operations.rs
Normal file
File diff suppressed because one or more lines are too long
393
updater/src/updater.rs
Normal file
393
updater/src/updater.rs
Normal file
@ -0,0 +1,393 @@
|
||||
// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::sync::{Arc, Weak};
|
||||
use std::{fs, env};
|
||||
use std::io::Write;
|
||||
use std::path::{PathBuf};
|
||||
use util::misc::{VersionInfo, ReleaseTrack/*, platform*/};
|
||||
use util::{Address, H160, H256, FixedHash, Mutex, Bytes};
|
||||
use ethcore::client::{BlockId, BlockChainClient, ChainNotify};
|
||||
use hash_fetch::{self as fetch, HashFetch};
|
||||
use operations::Operations;
|
||||
|
||||
/// Filter for releases.
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub enum UpdateFilter {
|
||||
/// All releases following the same track.
|
||||
All,
|
||||
/// As with `All`, but only those which are known to be critical.
|
||||
Critical,
|
||||
/// None.
|
||||
None,
|
||||
}
|
||||
|
||||
/// The policy for auto-updating.
|
||||
#[derive(Debug, Eq, PartialEq, Clone)]
|
||||
pub struct UpdatePolicy {
|
||||
/// Download potential updates.
|
||||
pub enable_downloading: bool,
|
||||
/// Disable client if we know we're incapable of syncing.
|
||||
pub require_consensus: bool,
|
||||
/// Which of those downloaded should be automatically installed.
|
||||
pub filter: UpdateFilter,
|
||||
}
|
||||
|
||||
impl Default for UpdatePolicy {
|
||||
fn default() -> Self {
|
||||
UpdatePolicy {
|
||||
enable_downloading: false,
|
||||
require_consensus: true,
|
||||
filter: UpdateFilter::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Information regarding a particular release of Parity
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ReleaseInfo {
|
||||
/// Information on the version.
|
||||
pub version: VersionInfo,
|
||||
/// Does this release contain critical security updates?
|
||||
pub is_critical: bool,
|
||||
/// The latest fork that this release can handle.
|
||||
pub fork: u64,
|
||||
/// Our platform's binary, if known.
|
||||
pub binary: Option<H256>,
|
||||
}
|
||||
|
||||
/// Information on our operations environment.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct OperationsInfo {
|
||||
/// Our blockchain's latest fork.
|
||||
pub fork: u64,
|
||||
|
||||
/// Last fork our client supports, if known.
|
||||
pub this_fork: Option<u64>,
|
||||
|
||||
/// Information on our track's latest release.
|
||||
pub track: ReleaseInfo,
|
||||
/// Information on our minor version's latest release.
|
||||
pub minor: Option<ReleaseInfo>,
|
||||
}
|
||||
|
||||
/// Information on the current version's consensus capabililty.
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum CapState {
|
||||
/// Unknown.
|
||||
Unknown,
|
||||
/// Capable of consensus indefinitely.
|
||||
Capable,
|
||||
/// Capable of consensus up until a definite block.
|
||||
CapableUntil(u64),
|
||||
/// Incapable of consensus since a particular block.
|
||||
IncapableSince(u64),
|
||||
}
|
||||
|
||||
impl Default for CapState {
|
||||
fn default() -> Self { CapState::Unknown }
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct UpdaterState {
|
||||
latest: Option<OperationsInfo>,
|
||||
|
||||
fetching: Option<ReleaseInfo>,
|
||||
ready: Option<ReleaseInfo>,
|
||||
installed: Option<ReleaseInfo>,
|
||||
|
||||
capability: CapState,
|
||||
}
|
||||
|
||||
/// Service for checking for updates and determining whether we can achieve consensus.
|
||||
pub struct Updater {
|
||||
// Useful environmental stuff.
|
||||
update_policy: UpdatePolicy,
|
||||
weak_self: Mutex<Weak<Updater>>,
|
||||
client: Weak<BlockChainClient>,
|
||||
fetcher: Mutex<Option<fetch::Client>>,
|
||||
operations: Mutex<Option<Operations>>,
|
||||
exit_handler: Mutex<Option<Box<Fn() + 'static + Send>>>,
|
||||
|
||||
// Our version info (static)
|
||||
this: VersionInfo,
|
||||
|
||||
// All the other info - this changes so leave it behind a Mutex.
|
||||
state: Mutex<UpdaterState>,
|
||||
}
|
||||
|
||||
const CLIENT_ID: &'static str = "parity";
|
||||
|
||||
// TODO!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! REMOVE!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
fn platform() -> String {
|
||||
"test".to_owned()
|
||||
}
|
||||
|
||||
impl Updater {
|
||||
pub fn new(client: Weak<BlockChainClient>, update_policy: UpdatePolicy) -> Arc<Self> {
|
||||
let mut u = Updater {
|
||||
update_policy: update_policy,
|
||||
weak_self: Mutex::new(Default::default()),
|
||||
client: client.clone(),
|
||||
fetcher: Mutex::new(None),
|
||||
operations: Mutex::new(None),
|
||||
exit_handler: Mutex::new(None),
|
||||
this: VersionInfo::this(),
|
||||
state: Mutex::new(Default::default()),
|
||||
};
|
||||
|
||||
// TODO!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! REMOVE!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
if u.this.track == ReleaseTrack::Unknown {
|
||||
u.this.track = ReleaseTrack::Nightly;
|
||||
}
|
||||
|
||||
let r = Arc::new(u);
|
||||
*r.fetcher.lock() = Some(fetch::Client::new(r.clone()));
|
||||
*r.weak_self.lock() = Arc::downgrade(&r);
|
||||
|
||||
r.poll();
|
||||
|
||||
r
|
||||
}
|
||||
|
||||
/// Is the currently running client capable of supporting the current chain?
|
||||
/// We default to true if there's no clear information.
|
||||
pub fn capability(&self) -> CapState {
|
||||
self.state.lock().capability
|
||||
}
|
||||
|
||||
/// The release which is ready to be upgraded to, if any. If this returns `Some`, then
|
||||
/// `execute_upgrade` may be called.
|
||||
pub fn upgrade_ready(&self) -> Option<ReleaseInfo> {
|
||||
self.state.lock().ready.clone()
|
||||
}
|
||||
|
||||
/// Actually upgrades the client. Assumes that the binary has been downloaded.
|
||||
/// @returns `true` on success.
|
||||
pub fn execute_upgrade(&self) -> bool {
|
||||
(|| -> Result<bool, String> {
|
||||
let mut s = self.state.lock();
|
||||
if let Some(r) = s.ready.take() {
|
||||
let p = Self::update_file_name(&r.version);
|
||||
let n = Self::updates_path("latest");
|
||||
// TODO: creating then writing is a bit fragile. would be nice to make it atomic.
|
||||
match fs::File::create(&n).and_then(|mut f| f.write_all(p.as_bytes())) {
|
||||
Ok(_) => {
|
||||
info!("Completed upgrade to {}", &r.version);
|
||||
s.installed = Some(r);
|
||||
if let Some(ref h) = *self.exit_handler.lock() {
|
||||
(*h)();
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Err(e) => {
|
||||
s.ready = Some(r);
|
||||
Err(format!("Unable to create soft-link for update {:?}", e))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn!("Execute upgrade called when no upgrade ready.");
|
||||
Ok(false)
|
||||
}
|
||||
})().unwrap_or_else(|e| { warn!("{}", e); false })
|
||||
}
|
||||
|
||||
/// Our version info.
|
||||
pub fn version_info(&self) -> &VersionInfo { &self.this }
|
||||
|
||||
/// Information gathered concerning the release.
|
||||
pub fn info(&self) -> Option<OperationsInfo> { self.state.lock().latest.clone() }
|
||||
|
||||
/// Set a closure to call when we want to restart the client
|
||||
pub fn set_exit_handler<F>(&self, f: F) where F: Fn() + 'static + Send {
|
||||
*self.exit_handler.lock() = Some(Box::new(f));
|
||||
}
|
||||
|
||||
fn collect_release_info(operations: &Operations, release_id: &H256) -> Result<ReleaseInfo, String> {
|
||||
let (fork, track, semver, is_critical) = operations.release(CLIENT_ID, release_id)?;
|
||||
let latest_binary = operations.checksum(CLIENT_ID, release_id, &platform())?;
|
||||
Ok(ReleaseInfo {
|
||||
version: VersionInfo::from_raw(semver, track, release_id.clone().into()),
|
||||
is_critical: is_critical,
|
||||
fork: fork as u64,
|
||||
binary: if latest_binary.is_zero() { None } else { Some(latest_binary) },
|
||||
})
|
||||
}
|
||||
|
||||
fn collect_latest(&self) -> Result<OperationsInfo, String> {
|
||||
if let Some(ref operations) = *self.operations.lock() {
|
||||
let this_fork = operations.release(CLIENT_ID, &self.this.hash.into()).ok()
|
||||
.and_then(|(fork, track, _, _)| if track > 0 {Some(fork as u64)} else {None});
|
||||
|
||||
if self.this.track == ReleaseTrack::Unknown {
|
||||
return Err(format!("Current executable ({}) is unreleased.", H160::from(self.this.hash)));
|
||||
}
|
||||
|
||||
let latest_in_track = operations.latest_in_track(CLIENT_ID, self.this.track.into())?;
|
||||
let in_track = Self::collect_release_info(operations, &latest_in_track)?;
|
||||
let mut in_minor = Some(in_track.clone());
|
||||
const PROOF: &'static str = "in_minor initialised and assigned with Some; loop breaks if None assigned; qed";
|
||||
while in_minor.as_ref().expect(PROOF).version.track != self.this.track {
|
||||
let track = match in_minor.as_ref().expect(PROOF).version.track {
|
||||
ReleaseTrack::Beta => ReleaseTrack::Stable,
|
||||
ReleaseTrack::Nightly => ReleaseTrack::Beta,
|
||||
_ => { in_minor = None; break; }
|
||||
};
|
||||
in_minor = Some(Self::collect_release_info(operations, &operations.latest_in_track(CLIENT_ID, track.into())?)?);
|
||||
}
|
||||
|
||||
Ok(OperationsInfo {
|
||||
fork: operations.latest_fork()? as u64,
|
||||
this_fork: this_fork,
|
||||
track: in_track,
|
||||
minor: in_minor,
|
||||
})
|
||||
} else {
|
||||
Err("Operations not available".into())
|
||||
}
|
||||
}
|
||||
|
||||
fn update_file_name(v: &VersionInfo) -> String {
|
||||
format!("parity-{}.{}.{}-{:?}", v.version.major, v.version.minor, v.version.patch, v.hash)
|
||||
}
|
||||
|
||||
fn updates_path(name: &str) -> PathBuf {
|
||||
let mut dest = PathBuf::from(env::home_dir().unwrap().to_str().expect("env filesystem paths really should be valid; qed"));
|
||||
dest.push(".parity-updates");
|
||||
dest.push(name);
|
||||
dest
|
||||
}
|
||||
|
||||
fn fetch_done(&self, result: Result<PathBuf, fetch::Error>) {
|
||||
(|| -> Result<(), String> {
|
||||
let auto = {
|
||||
let mut s = self.state.lock();
|
||||
let fetched = s.fetching.take().unwrap();
|
||||
let b = result.map_err(|e| format!("Unable to fetch update ({}): {:?}", fetched.version, e))?;
|
||||
info!("Fetched latest version ({}) OK to {}", fetched.version, b.display());
|
||||
let dest = Self::updates_path(&Self::update_file_name(&fetched.version));
|
||||
fs::create_dir_all(dest.parent().expect("at least one thing pushed; qed")).map_err(|e| format!("Unable to create updates path: {:?}", e))?;
|
||||
fs::copy(&b, &dest).map_err(|e| format!("Unable to copy update: {:?}", e))?;
|
||||
info!("Copied file to {}", dest.display());
|
||||
let auto = match self.update_policy.filter {
|
||||
UpdateFilter::All => true,
|
||||
UpdateFilter::Critical if fetched.is_critical /* TODO: or is on a bad fork */ => true,
|
||||
_ => false,
|
||||
};
|
||||
s.ready = Some(fetched);
|
||||
auto
|
||||
};
|
||||
if auto {
|
||||
// will lock self.state, so ensure it's outside of previous block.
|
||||
self.execute_upgrade();
|
||||
}
|
||||
Ok(())
|
||||
})().unwrap_or_else(|e| warn!("{}", e));
|
||||
}
|
||||
|
||||
fn poll(&self) {
|
||||
info!(target: "updater", "Current release is {}", self.this);
|
||||
|
||||
if self.operations.lock().is_none() {
|
||||
if let Some(ops_addr) = self.client.upgrade().and_then(|c| c.registry_address("operations".into())) {
|
||||
trace!(target: "client", "Found operations at {}", ops_addr);
|
||||
let client = self.client.clone();
|
||||
*self.operations.lock() = Some(Operations::new(ops_addr, move |a, d| client.upgrade().ok_or("No client!".into()).and_then(|c| c.call_contract(a, d))));
|
||||
} else {
|
||||
// No Operations contract - bail.
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let current_number = self.client.upgrade().map_or(0, |c| c.block_number(BlockId::Latest).unwrap_or(0));
|
||||
|
||||
let mut capability = CapState::Unknown;
|
||||
let latest = self.collect_latest().ok();
|
||||
if let Some(ref latest) = latest {
|
||||
info!(target: "updater", "Latest release in our track is v{} it is {}critical ({} binary is {})",
|
||||
latest.track.version,
|
||||
if latest.track.is_critical {""} else {"non-"},
|
||||
platform(),
|
||||
if let Some(ref b) = latest.track.binary {
|
||||
format!("{}", b)
|
||||
} else {
|
||||
"unreleased".into()
|
||||
}
|
||||
);
|
||||
let mut s = self.state.lock();
|
||||
let running_latest = latest.track.version.hash == self.version_info().hash;
|
||||
let already_have_latest = s.installed.as_ref().or(s.ready.as_ref()).map_or(false, |t| *t == latest.track);
|
||||
if self.update_policy.enable_downloading && !running_latest && !already_have_latest {
|
||||
if let Some(b) = latest.track.binary {
|
||||
if s.fetching.is_none() {
|
||||
info!("Attempting to get parity binary {}", b);
|
||||
s.fetching = Some(latest.track.clone());
|
||||
let weak_self = self.weak_self.lock().clone();
|
||||
let f = move |r: Result<PathBuf, fetch::Error>| if let Some(this) = weak_self.upgrade() { this.fetch_done(r) };
|
||||
self.fetcher.lock().as_ref().expect("Created on `new`; qed").fetch(b, Box::new(f)).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
info!(target: "updater", "Fork: this/current/latest/latest-known: {}/#{}/#{}/#{}", match latest.this_fork { Some(f) => format!("#{}", f), None => "unknown".into(), }, current_number, latest.track.fork, latest.fork);
|
||||
|
||||
if let Some(this_fork) = latest.this_fork {
|
||||
if this_fork < latest.fork {
|
||||
// We're behind the latest fork. Now is the time to be upgrading; perhaps we're too late...
|
||||
if let Some(c) = self.client.upgrade() {
|
||||
let current_number = c.block_number(BlockId::Latest).unwrap_or(0);
|
||||
if current_number >= latest.fork - 1 {
|
||||
// We're at (or past) the last block we can import. Disable the client.
|
||||
if self.update_policy.require_consensus {
|
||||
c.disable();
|
||||
}
|
||||
capability = CapState::IncapableSince(latest.fork);
|
||||
} else {
|
||||
capability = CapState::CapableUntil(latest.fork);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
capability = CapState::Capable;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut s = self.state.lock();
|
||||
s.latest = latest;
|
||||
s.capability = capability;
|
||||
}
|
||||
}
|
||||
|
||||
impl ChainNotify for Updater {
|
||||
fn new_blocks(&self, _imported: Vec<H256>, _invalid: Vec<H256>, _enacted: Vec<H256>, _retracted: Vec<H256>, _sealed: Vec<H256>, _duration: u64) {
|
||||
// TODO: something like this
|
||||
// if !self.client.upgrade().map_or(true, |c| c.is_major_syncing()) {
|
||||
self.poll();
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
impl fetch::urlhint::ContractClient for Updater {
|
||||
fn registrar(&self) -> Result<Address, String> {
|
||||
self.client.upgrade().ok_or_else(|| "Client not available".to_owned())?
|
||||
.registrar_address()
|
||||
.ok_or_else(|| "Registrar not available".into())
|
||||
}
|
||||
|
||||
fn call(&self, address: Address, data: Bytes) -> Result<Bytes, String> {
|
||||
self.client.upgrade().ok_or_else(|| "Client not available".to_owned())?
|
||||
.call_contract(address, data)
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user