Merge branch 'jsonrpc2' into rpc_poll_ids
This commit is contained in:
commit
4ce9aad749
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -15,6 +15,7 @@ dependencies = [
|
||||
"fdlimit 0.1.0",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -10,6 +10,7 @@ log = "0.3"
|
||||
env_logger = "0.3"
|
||||
rustc-serialize = "0.3"
|
||||
docopt = "0.6"
|
||||
time = "0.1"
|
||||
ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" }
|
||||
clippy = { version = "0.0.44", optional = true }
|
||||
ethcore-util = { path = "util" }
|
||||
|
@ -110,9 +110,8 @@ impl<'a, D> ChainFilter<'a, D> where D: FilterDataSource
|
||||
// map them to offsets
|
||||
.map(|li| li.index * level_size)
|
||||
// get all blocks that may contain our bloom
|
||||
.map(|off| self.blocks(bloom, from_block, to_block, level - 1, off))
|
||||
// filter existing ones
|
||||
.filter_map(|x| x)
|
||||
.filter_map(|off| self.blocks(bloom, from_block, to_block, level - 1, off))
|
||||
// flatten nested structures
|
||||
.flat_map(|v| v)
|
||||
.collect();
|
||||
@ -161,9 +160,8 @@ impl<'a, D> ChainFilter<'a, D> where D: FilterDataSource
|
||||
self.indexer.lower_level_bloom_indexes(&index)
|
||||
.into_iter()
|
||||
// get blooms
|
||||
.map(bloom_at)
|
||||
// filter existing ones
|
||||
.filter_map(|b| b)
|
||||
.filter_map(bloom_at)
|
||||
// BitOr all of them
|
||||
.fold(H2048::new(), |acc, bloom| acc | bloom)
|
||||
};
|
||||
|
@ -482,8 +482,7 @@ impl BlockChainClient for Client {
|
||||
|
||||
fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry> {
|
||||
let mut blocks = filter.bloom_possibilities().iter()
|
||||
.map(|bloom| self.blocks_with_bloom(bloom, filter.from_block.clone(), filter.to_block.clone()))
|
||||
.filter_map(|m| m)
|
||||
.filter_map(|bloom| self.blocks_with_bloom(bloom, filter.from_block.clone(), filter.to_block.clone()))
|
||||
.flat_map(|m| m)
|
||||
// remove duplicate elements
|
||||
.collect::<HashSet<u64>>()
|
||||
@ -493,17 +492,14 @@ impl BlockChainClient for Client {
|
||||
blocks.sort();
|
||||
|
||||
blocks.into_iter()
|
||||
.map(|number| self.chain.read().unwrap().block_hash(number).map(|hash| (number, hash)))
|
||||
.filter_map(|m| m)
|
||||
.map(|(number, hash)| self.chain.read().unwrap().block_receipts(&hash).map(|r| (number, hash, r.receipts)))
|
||||
.filter_map(|m| m)
|
||||
.map(|(number, hash, receipts)| self.chain.read().unwrap().block(&hash).map(|ref b| (number, hash, receipts, BlockView::new(b).transaction_hashes())))
|
||||
.filter_map(|m| m)
|
||||
.map(|(number, hash, receipts, hashes)| {
|
||||
.filter_map(|number| self.chain.read().unwrap().block_hash(number).map(|hash| (number, hash)))
|
||||
.filter_map(|(number, hash)| self.chain.read().unwrap().block_receipts(&hash).map(|r| (number, hash, r.receipts)))
|
||||
.filter_map(|(number, hash, receipts)| self.chain.read().unwrap().block(&hash).map(|ref b| (number, hash, receipts, BlockView::new(b).transaction_hashes())))
|
||||
.flat_map(|(number, hash, receipts, hashes)| {
|
||||
let mut log_index = 0;
|
||||
receipts.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, receipt)| {
|
||||
.flat_map(|(index, receipt)| {
|
||||
log_index += receipt.logs.len();
|
||||
receipt.logs.into_iter()
|
||||
.enumerate()
|
||||
@ -518,11 +514,9 @@ impl BlockChainClient for Client {
|
||||
})
|
||||
.collect::<Vec<LocalizedLogEntry>>()
|
||||
})
|
||||
.flat_map(|m| m)
|
||||
.collect::<Vec<LocalizedLogEntry>>()
|
||||
|
||||
})
|
||||
.flat_map(|m| m)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
@ -73,30 +73,26 @@ impl Filter {
|
||||
|
||||
self.topics.iter().fold(blooms, | bs, topic | match *topic {
|
||||
None => bs,
|
||||
Some(ref topics) => bs.into_iter().map(|bloom| {
|
||||
Some(ref topics) => bs.into_iter().flat_map(|bloom| {
|
||||
topics.into_iter().map(|topic| {
|
||||
let mut b = bloom.clone();
|
||||
b.shift_bloomed(&topic.sha3());
|
||||
b
|
||||
}).collect::<Vec<H2048>>()
|
||||
}).flat_map(|m| m).collect()
|
||||
}).collect()
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns true if given log entry matches filter.
|
||||
pub fn matches(&self, log: &LogEntry) -> bool {
|
||||
let matches = match self.address {
|
||||
Some(ref addresses) if !addresses.is_empty() => addresses.iter().fold(false, |res, address| {
|
||||
res || &log.address == address
|
||||
}),
|
||||
Some(ref addresses) if !addresses.is_empty() => addresses.iter().any(|address| &log.address == address),
|
||||
_ => true
|
||||
};
|
||||
|
||||
matches && self.topics.iter().enumerate().fold(true, |res, (i, topic)| match *topic {
|
||||
Some(ref topics) if !topics.is_empty() => res && topics.iter().fold(false, | acc, topic | {
|
||||
acc || log.topics.get(i) == Some(topic)
|
||||
}),
|
||||
_ => res,
|
||||
matches && self.topics.iter().enumerate().all(|(i, topic)| match *topic {
|
||||
Some(ref topics) if !topics.is_empty() => topics.iter().any(|topic| log.topics.get(i) == Some(topic)),
|
||||
_ => true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ extern crate env_logger;
|
||||
extern crate ctrlc;
|
||||
extern crate fdlimit;
|
||||
extern crate daemonize;
|
||||
extern crate time;
|
||||
|
||||
#[cfg(feature = "rpc")]
|
||||
extern crate ethcore_rpc as rpc;
|
||||
@ -38,7 +39,6 @@ use std::net::{SocketAddr};
|
||||
use std::env;
|
||||
use std::process::exit;
|
||||
use std::path::PathBuf;
|
||||
use rlog::{LogLevelFilter};
|
||||
use env_logger::LogBuilder;
|
||||
use ctrlc::CtrlC;
|
||||
use util::*;
|
||||
@ -112,6 +112,8 @@ struct Args {
|
||||
}
|
||||
|
||||
fn setup_log(init: &Option<String>) {
|
||||
use rlog::*;
|
||||
|
||||
let mut builder = LogBuilder::new();
|
||||
builder.filter(None, LogLevelFilter::Info);
|
||||
|
||||
@ -123,6 +125,15 @@ fn setup_log(init: &Option<String>) {
|
||||
builder.parse(s);
|
||||
}
|
||||
|
||||
let format = |record: &LogRecord| {
|
||||
let timestamp = time::strftime("%Y-%m-%d %H:%M:%S %Z", &time::now()).unwrap();
|
||||
if max_log_level() <= LogLevelFilter::Info {
|
||||
format!("{}{}", timestamp, record.args())
|
||||
} else {
|
||||
format!("{}{}:{}: {}", timestamp, record.level(), record.target(), record.args())
|
||||
}
|
||||
};
|
||||
builder.format(format);
|
||||
builder.init().unwrap();
|
||||
}
|
||||
|
||||
|
@ -500,7 +500,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
||||
match TcpStream::connect(&address) {
|
||||
Ok(socket) => socket,
|
||||
Err(e) => {
|
||||
warn!("Can't connect to node: {:?}", e);
|
||||
warn!("Can't connect to address {:?}: {:?}", address, e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user