Merge branch 'jsonrpc2' into rpc_poll_ids
This commit is contained in:
commit
4ce9aad749
1
Cargo.lock
generated
1
Cargo.lock
generated
@ -15,6 +15,7 @@ dependencies = [
|
|||||||
"fdlimit 0.1.0",
|
"fdlimit 0.1.0",
|
||||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -10,6 +10,7 @@ log = "0.3"
|
|||||||
env_logger = "0.3"
|
env_logger = "0.3"
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
docopt = "0.6"
|
docopt = "0.6"
|
||||||
|
time = "0.1"
|
||||||
ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" }
|
ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" }
|
||||||
clippy = { version = "0.0.44", optional = true }
|
clippy = { version = "0.0.44", optional = true }
|
||||||
ethcore-util = { path = "util" }
|
ethcore-util = { path = "util" }
|
||||||
|
@ -110,9 +110,8 @@ impl<'a, D> ChainFilter<'a, D> where D: FilterDataSource
|
|||||||
// map them to offsets
|
// map them to offsets
|
||||||
.map(|li| li.index * level_size)
|
.map(|li| li.index * level_size)
|
||||||
// get all blocks that may contain our bloom
|
// get all blocks that may contain our bloom
|
||||||
.map(|off| self.blocks(bloom, from_block, to_block, level - 1, off))
|
|
||||||
// filter existing ones
|
// filter existing ones
|
||||||
.filter_map(|x| x)
|
.filter_map(|off| self.blocks(bloom, from_block, to_block, level - 1, off))
|
||||||
// flatten nested structures
|
// flatten nested structures
|
||||||
.flat_map(|v| v)
|
.flat_map(|v| v)
|
||||||
.collect();
|
.collect();
|
||||||
@ -161,9 +160,8 @@ impl<'a, D> ChainFilter<'a, D> where D: FilterDataSource
|
|||||||
self.indexer.lower_level_bloom_indexes(&index)
|
self.indexer.lower_level_bloom_indexes(&index)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
// get blooms
|
// get blooms
|
||||||
.map(bloom_at)
|
|
||||||
// filter existing ones
|
// filter existing ones
|
||||||
.filter_map(|b| b)
|
.filter_map(bloom_at)
|
||||||
// BitOr all of them
|
// BitOr all of them
|
||||||
.fold(H2048::new(), |acc, bloom| acc | bloom)
|
.fold(H2048::new(), |acc, bloom| acc | bloom)
|
||||||
};
|
};
|
||||||
|
@ -482,8 +482,7 @@ impl BlockChainClient for Client {
|
|||||||
|
|
||||||
fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry> {
|
fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry> {
|
||||||
let mut blocks = filter.bloom_possibilities().iter()
|
let mut blocks = filter.bloom_possibilities().iter()
|
||||||
.map(|bloom| self.blocks_with_bloom(bloom, filter.from_block.clone(), filter.to_block.clone()))
|
.filter_map(|bloom| self.blocks_with_bloom(bloom, filter.from_block.clone(), filter.to_block.clone()))
|
||||||
.filter_map(|m| m)
|
|
||||||
.flat_map(|m| m)
|
.flat_map(|m| m)
|
||||||
// remove duplicate elements
|
// remove duplicate elements
|
||||||
.collect::<HashSet<u64>>()
|
.collect::<HashSet<u64>>()
|
||||||
@ -493,17 +492,14 @@ impl BlockChainClient for Client {
|
|||||||
blocks.sort();
|
blocks.sort();
|
||||||
|
|
||||||
blocks.into_iter()
|
blocks.into_iter()
|
||||||
.map(|number| self.chain.read().unwrap().block_hash(number).map(|hash| (number, hash)))
|
.filter_map(|number| self.chain.read().unwrap().block_hash(number).map(|hash| (number, hash)))
|
||||||
.filter_map(|m| m)
|
.filter_map(|(number, hash)| self.chain.read().unwrap().block_receipts(&hash).map(|r| (number, hash, r.receipts)))
|
||||||
.map(|(number, hash)| self.chain.read().unwrap().block_receipts(&hash).map(|r| (number, hash, r.receipts)))
|
.filter_map(|(number, hash, receipts)| self.chain.read().unwrap().block(&hash).map(|ref b| (number, hash, receipts, BlockView::new(b).transaction_hashes())))
|
||||||
.filter_map(|m| m)
|
.flat_map(|(number, hash, receipts, hashes)| {
|
||||||
.map(|(number, hash, receipts)| self.chain.read().unwrap().block(&hash).map(|ref b| (number, hash, receipts, BlockView::new(b).transaction_hashes())))
|
|
||||||
.filter_map(|m| m)
|
|
||||||
.map(|(number, hash, receipts, hashes)| {
|
|
||||||
let mut log_index = 0;
|
let mut log_index = 0;
|
||||||
receipts.into_iter()
|
receipts.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(index, receipt)| {
|
.flat_map(|(index, receipt)| {
|
||||||
log_index += receipt.logs.len();
|
log_index += receipt.logs.len();
|
||||||
receipt.logs.into_iter()
|
receipt.logs.into_iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
@ -518,11 +514,9 @@ impl BlockChainClient for Client {
|
|||||||
})
|
})
|
||||||
.collect::<Vec<LocalizedLogEntry>>()
|
.collect::<Vec<LocalizedLogEntry>>()
|
||||||
})
|
})
|
||||||
.flat_map(|m| m)
|
|
||||||
.collect::<Vec<LocalizedLogEntry>>()
|
.collect::<Vec<LocalizedLogEntry>>()
|
||||||
|
|
||||||
})
|
})
|
||||||
.flat_map(|m| m)
|
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -73,30 +73,26 @@ impl Filter {
|
|||||||
|
|
||||||
self.topics.iter().fold(blooms, | bs, topic | match *topic {
|
self.topics.iter().fold(blooms, | bs, topic | match *topic {
|
||||||
None => bs,
|
None => bs,
|
||||||
Some(ref topics) => bs.into_iter().map(|bloom| {
|
Some(ref topics) => bs.into_iter().flat_map(|bloom| {
|
||||||
topics.into_iter().map(|topic| {
|
topics.into_iter().map(|topic| {
|
||||||
let mut b = bloom.clone();
|
let mut b = bloom.clone();
|
||||||
b.shift_bloomed(&topic.sha3());
|
b.shift_bloomed(&topic.sha3());
|
||||||
b
|
b
|
||||||
}).collect::<Vec<H2048>>()
|
}).collect::<Vec<H2048>>()
|
||||||
}).flat_map(|m| m).collect()
|
}).collect()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if given log entry matches filter.
|
/// Returns true if given log entry matches filter.
|
||||||
pub fn matches(&self, log: &LogEntry) -> bool {
|
pub fn matches(&self, log: &LogEntry) -> bool {
|
||||||
let matches = match self.address {
|
let matches = match self.address {
|
||||||
Some(ref addresses) if !addresses.is_empty() => addresses.iter().fold(false, |res, address| {
|
Some(ref addresses) if !addresses.is_empty() => addresses.iter().any(|address| &log.address == address),
|
||||||
res || &log.address == address
|
|
||||||
}),
|
|
||||||
_ => true
|
_ => true
|
||||||
};
|
};
|
||||||
|
|
||||||
matches && self.topics.iter().enumerate().fold(true, |res, (i, topic)| match *topic {
|
matches && self.topics.iter().enumerate().all(|(i, topic)| match *topic {
|
||||||
Some(ref topics) if !topics.is_empty() => res && topics.iter().fold(false, | acc, topic | {
|
Some(ref topics) if !topics.is_empty() => topics.iter().any(|topic| log.topics.get(i) == Some(topic)),
|
||||||
acc || log.topics.get(i) == Some(topic)
|
_ => true
|
||||||
}),
|
|
||||||
_ => res,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,7 @@ extern crate env_logger;
|
|||||||
extern crate ctrlc;
|
extern crate ctrlc;
|
||||||
extern crate fdlimit;
|
extern crate fdlimit;
|
||||||
extern crate daemonize;
|
extern crate daemonize;
|
||||||
|
extern crate time;
|
||||||
|
|
||||||
#[cfg(feature = "rpc")]
|
#[cfg(feature = "rpc")]
|
||||||
extern crate ethcore_rpc as rpc;
|
extern crate ethcore_rpc as rpc;
|
||||||
@ -38,7 +39,6 @@ use std::net::{SocketAddr};
|
|||||||
use std::env;
|
use std::env;
|
||||||
use std::process::exit;
|
use std::process::exit;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use rlog::{LogLevelFilter};
|
|
||||||
use env_logger::LogBuilder;
|
use env_logger::LogBuilder;
|
||||||
use ctrlc::CtrlC;
|
use ctrlc::CtrlC;
|
||||||
use util::*;
|
use util::*;
|
||||||
@ -112,6 +112,8 @@ struct Args {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn setup_log(init: &Option<String>) {
|
fn setup_log(init: &Option<String>) {
|
||||||
|
use rlog::*;
|
||||||
|
|
||||||
let mut builder = LogBuilder::new();
|
let mut builder = LogBuilder::new();
|
||||||
builder.filter(None, LogLevelFilter::Info);
|
builder.filter(None, LogLevelFilter::Info);
|
||||||
|
|
||||||
@ -123,6 +125,15 @@ fn setup_log(init: &Option<String>) {
|
|||||||
builder.parse(s);
|
builder.parse(s);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let format = |record: &LogRecord| {
|
||||||
|
let timestamp = time::strftime("%Y-%m-%d %H:%M:%S %Z", &time::now()).unwrap();
|
||||||
|
if max_log_level() <= LogLevelFilter::Info {
|
||||||
|
format!("{}{}", timestamp, record.args())
|
||||||
|
} else {
|
||||||
|
format!("{}{}:{}: {}", timestamp, record.level(), record.target(), record.args())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
builder.format(format);
|
||||||
builder.init().unwrap();
|
builder.init().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -500,7 +500,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
|||||||
match TcpStream::connect(&address) {
|
match TcpStream::connect(&address) {
|
||||||
Ok(socket) => socket,
|
Ok(socket) => socket,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
warn!("Can't connect to node: {:?}", e);
|
warn!("Can't connect to address {:?}: {:?}", address, e);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user