Node table persistency

This commit is contained in:
arkpar 2016-02-15 11:54:38 +01:00
parent 986448ce7a
commit 186c7585d2
6 changed files with 124 additions and 7 deletions

View File

@ -170,8 +170,8 @@ pub trait BytesConvertable {
fn to_bytes(&self) -> Bytes { self.as_slice().to_vec() } fn to_bytes(&self) -> Bytes { self.as_slice().to_vec() }
} }
impl<T> BytesConvertable for T where T: Deref<Target = [u8]> { impl<T> BytesConvertable for T where T: AsRef<[u8]> {
fn bytes(&self) -> &[u8] { self.deref() } fn bytes(&self) -> &[u8] { self.as_ref() }
} }
#[test] #[test]

View File

@ -86,6 +86,13 @@ macro_rules! impl_hash {
} }
} }
impl AsRef<[u8]> for $from {
#[inline]
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl DerefMut for $from { impl DerefMut for $from {
#[inline] #[inline]
fn deref_mut(&mut self) -> &mut [u8] { fn deref_mut(&mut self) -> &mut [u8] {

View File

@ -256,6 +256,11 @@ impl<Message> Handler for IoManager<Message> where Message: Send + Clone + Sync
IoMessage::DeregisterStream { handler_id, token } => { IoMessage::DeregisterStream { handler_id, token } => {
let handler = self.handlers.get(handler_id).expect("Unknown handler id").clone(); let handler = self.handlers.get(handler_id).expect("Unknown handler id").clone();
handler.deregister_stream(token, event_loop); handler.deregister_stream(token, event_loop);
// unregister a timer associated with the token (if any)
let timer_id = token + handler_id * TOKENS_PER_HANDLER;
if let Some(timer) = self.timers.write().unwrap().remove(&timer_id) {
event_loop.clear_timeout(timer.timeout);
}
}, },
IoMessage::UpdateStreamRegistration { handler_id, token } => { IoMessage::UpdateStreamRegistration { handler_id, token } => {
let handler = self.handlers.get(handler_id).expect("Unknown handler id").clone(); let handler = self.handlers.get(handler_id).expect("Unknown handler id").clone();

View File

@ -209,7 +209,7 @@ impl Discovery {
rlp.append(&timestamp); rlp.append(&timestamp);
let bytes = rlp.drain(); let bytes = rlp.drain();
let hash = bytes.sha3(); let hash = bytes.as_ref().sha3();
let signature = match ec::sign(&self.secret, &hash) { let signature = match ec::sign(&self.secret, &hash) {
Ok(s) => s, Ok(s) => s,
Err(_) => { Err(_) => {

View File

@ -20,11 +20,17 @@ use std::net::{SocketAddr, ToSocketAddrs, SocketAddrV4, SocketAddrV6, Ipv4Addr,
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::str::{FromStr}; use std::str::{FromStr};
use std::collections::HashMap; use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::path::{PathBuf, Path};
use std::fmt;
use std::fs;
use std::io::{Read, Write};
use hash::*; use hash::*;
use rlp::*; use rlp::*;
use time::Tm; use time::Tm;
use error::*; use error::*;
use network::discovery::TableUpdates; use network::discovery::TableUpdates;
pub use rustc_serialize::json::Json;
/// Node public key /// Node public key
pub type NodeId = H512; pub type NodeId = H512;
@ -135,6 +141,17 @@ impl Node {
} }
} }
impl Display for Node {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.endpoint.udp_port != self.endpoint.address.port() {
write!(f, "enode://{}@{}+{}", self.id.hex(), self.endpoint.address, self.endpoint.udp_port);
} else {
write!(f, "enode://{}@{}", self.id.hex(), self.endpoint.address);
}
Ok(())
}
}
impl FromStr for Node { impl FromStr for Node {
type Err = UtilError; type Err = UtilError;
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
@ -170,13 +187,15 @@ impl Hash for Node {
/// Node table backed by disk file. /// Node table backed by disk file.
pub struct NodeTable { pub struct NodeTable {
nodes: HashMap<NodeId, Node> nodes: HashMap<NodeId, Node>,
path: Option<String>,
} }
impl NodeTable { impl NodeTable {
pub fn new(_path: Option<String>) -> NodeTable { pub fn new(path: Option<String>) -> NodeTable {
NodeTable { NodeTable {
nodes: HashMap::new() path: path.clone(),
nodes: NodeTable::load(path),
} }
} }
@ -208,11 +227,86 @@ impl NodeTable {
} }
} }
/// Increase failure counte for a node
pub fn note_failure(&mut self, id: &NodeId) { pub fn note_failure(&mut self, id: &NodeId) {
if let Some(node) = self.nodes.get_mut(id) { if let Some(node) = self.nodes.get_mut(id) {
node.failures += 1; node.failures += 1;
} }
} }
fn save(&self) {
if let Some(ref path) = self.path {
let mut path_buf = PathBuf::from(path);
path_buf.push("nodes.json");
let mut json = String::new();
json.push_str("{\n");
json.push_str("nodes: [\n");
let node_ids = self.nodes();
for i in 0 .. node_ids.len() {
let node = self.nodes.get(&node_ids[i]).unwrap();
json.push_str(&format!("\t{{ url: \"{}\", failures: {} }}{}\n", node, node.failures, if i == node_ids.len() - 1 {""} else {","}))
}
json.push_str("]\n");
json.push_str("}");
let mut file = match fs::File::create(path_buf.as_path()) {
Ok(file) => file,
Err(e) => {
warn!("Error creating node table file: {:?}", e);
return;
}
};
if let Err(e) = file.write(&json.into_bytes()) {
warn!("Error writing node table file: {:?}", e);
}
}
}
fn load(path: Option<String>) -> HashMap<NodeId, Node> {
let mut nodes: HashMap<NodeId, Node> = HashMap::new();
if let Some(path) = path {
let mut file = match fs::File::open(path.clone()) {
Ok(file) => file,
Err(e) => {
warn!("Error opening node table file: {:?}", e);
return nodes;
}
};
let mut buf = String::new();
match file.read_to_string(&mut buf) {
Ok(_) => {},
Err(e) => {
warn!("Error reading node table file: {:?}", e);
return nodes;
}
}
let json = match Json::from_str(&buf) {
Ok(json) => json,
Err(e) => {
warn!("Error parsing node table file: {:?}", e);
return nodes;
}
};
if let Some(list) = json.as_object().and_then(|o| o.get("nodes")).and_then(|n| n.as_array()) {
for n in list.iter().filter_map(|n| n.as_object()) {
if let Some(url) = n.get("url").and_then(|u| u.as_string()) {
if let Ok(mut node) = Node::from_str(url) {
if let Some(failures) = n.get("failures").and_then(|f| f.as_u64()) {
node.failures = failures as u32;
}
nodes.insert(node.id.clone(), node);
}
}
}
}
}
nodes
}
}
impl Drop for NodeTable {
fn drop(&mut self) {
self.save();
}
} }
#[cfg(test)] #[cfg(test)]
@ -247,4 +341,15 @@ mod tests {
H512::from_str("a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap(), H512::from_str("a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap(),
node.id); node.id);
} }
#[test]
fn table_failure_order() {
let node1 = Node::from_str("enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770");
let node2 = Node::from_str("enode://b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770");
let node3 = Node::from_str("enode://c979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770");
let id1 = H512::from_str("a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
let id2 = H512::from_str("b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
let id3 = H512::from_str("3979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
let mut table = NodeTable::new(None);
}
} }

View File

@ -66,7 +66,7 @@ impl<T> Hashable for T where T: BytesConvertable {
#[test] #[test]
fn sha3_empty() { fn sha3_empty() {
assert_eq!([0u8; 0].sha3(), SHA3_EMPTY); assert_eq!((&[0u8; 0]).sha3(), SHA3_EMPTY);
} }
#[test] #[test]
fn sha3_as() { fn sha3_as() {