2019-01-07 11:33:07 +01:00
|
|
|
// Copyright 2015-2019 Parity Technologies (UK) Ltd.
|
|
|
|
// This file is part of Parity Ethereum.
|
2016-04-21 13:57:27 +02:00
|
|
|
|
2019-01-07 11:33:07 +01:00
|
|
|
// Parity Ethereum is free software: you can redistribute it and/or modify
|
2016-04-21 13:57:27 +02:00
|
|
|
// it under the terms of the GNU General Public License as published by
|
|
|
|
// the Free Software Foundation, either version 3 of the License, or
|
|
|
|
// (at your option) any later version.
|
|
|
|
|
2019-01-07 11:33:07 +01:00
|
|
|
// Parity Ethereum is distributed in the hope that it will be useful,
|
2016-04-21 13:57:27 +02:00
|
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
// GNU General Public License for more details.
|
|
|
|
|
|
|
|
// You should have received a copy of the GNU General Public License
|
2019-01-07 11:33:07 +01:00
|
|
|
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
|
2016-04-21 13:57:27 +02:00
|
|
|
|
2016-05-24 20:30:21 +02:00
|
|
|
extern crate ansi_term;
|
2016-07-17 23:00:57 +02:00
|
|
|
use self::ansi_term::{
|
2020-08-05 06:08:03 +02:00
|
|
|
Colour,
|
2016-07-17 23:00:57 +02:00
|
|
|
Colour::{Blue, Cyan, Green, White, Yellow},
|
2017-09-01 16:57:57 +02:00
|
|
|
Style,
|
|
|
|
};
|
2016-05-24 20:30:21 +02:00
|
|
|
|
2016-07-20 12:36:20 +02:00
|
|
|
use std::{
|
2016-12-11 02:02:40 +01:00
|
|
|
sync::{
|
|
|
|
atomic::{AtomicBool, AtomicUsize, Ordering as AtomicOrdering},
|
2020-08-05 06:08:03 +02:00
|
|
|
Arc,
|
|
|
|
},
|
2016-05-25 09:57:31 +02:00
|
|
|
time::{Duration, Instant},
|
|
|
|
};
|
2017-07-10 13:21:11 +02:00
|
|
|
|
2018-04-11 12:56:37 +02:00
|
|
|
use atty;
|
2018-03-13 11:49:57 +01:00
|
|
|
use ethcore::{
|
|
|
|
client::{
|
|
|
|
BlockChainClient, BlockChainInfo, BlockId, BlockInfo, BlockQueueInfo, ChainInfo,
|
2018-12-19 10:24:14 +01:00
|
|
|
ChainNotify, Client, ClientIoMessage, ClientReport, NewBlocks,
|
2020-08-05 06:08:03 +02:00
|
|
|
},
|
2018-12-19 10:24:14 +01:00
|
|
|
snapshot::{service::Service as SnapshotService, RestorationStatus, SnapshotService as SS},
|
2018-03-13 11:49:57 +01:00
|
|
|
};
|
2018-01-10 13:35:18 +01:00
|
|
|
use ethereum_types::H256;
|
2017-07-10 13:21:11 +02:00
|
|
|
use io::{IoContext, IoHandler, TimerToken};
|
2018-05-01 14:16:03 +02:00
|
|
|
use light::{
|
|
|
|
client::{LightChainClient, LightChainNotify},
|
2017-07-10 13:21:11 +02:00
|
|
|
Cache as LightDataCache,
|
2020-08-05 06:08:03 +02:00
|
|
|
};
|
2016-04-21 13:57:27 +02:00
|
|
|
use number_prefix::{binary_prefix, Prefixed, Standalone};
|
2018-07-23 15:42:08 +02:00
|
|
|
use parity_rpc::{informant::RpcStats, is_major_importing_or_waiting};
|
2017-09-02 20:09:13 +02:00
|
|
|
use parking_lot::{Mutex, RwLock};
|
|
|
|
use sync::{LightSync, LightSyncProvider, ManageNetwork, SyncProvider};
|
2019-01-04 14:05:46 +01:00
|
|
|
use types::BlockNumber;
|
2016-04-21 13:57:27 +02:00
|
|
|
|
2016-10-24 18:27:23 +02:00
|
|
|
/// Format byte counts to standard denominations.
|
|
|
|
pub fn format_bytes(b: usize) -> String {
|
|
|
|
match binary_prefix(b as f64) {
|
|
|
|
Standalone(bytes) => format!("{} bytes", bytes),
|
|
|
|
Prefixed(prefix, n) => format!("{:.0} {}B", n, prefix),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-09-16 12:18:27 +02:00
|
|
|
/// Something that can be converted to milliseconds.
|
2016-09-15 16:56:10 +02:00
|
|
|
pub trait MillisecondDuration {
|
2016-09-16 12:18:27 +02:00
|
|
|
/// Get the value in milliseconds.
|
2016-05-25 09:57:31 +02:00
|
|
|
fn as_milliseconds(&self) -> u64;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MillisecondDuration for Duration {
|
|
|
|
fn as_milliseconds(&self) -> u64 {
|
2017-02-04 22:18:19 +01:00
|
|
|
self.as_secs() * 1000 + self.subsec_nanos() as u64 / 1_000_000
|
2016-05-25 09:57:31 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
#[derive(Default)]
|
|
|
|
struct CacheSizes {
|
|
|
|
sizes: ::std::collections::BTreeMap<&'static str, usize>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl CacheSizes {
|
|
|
|
fn insert(&mut self, key: &'static str, bytes: usize) {
|
|
|
|
self.sizes.insert(key, bytes);
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
fn display<F>(&self, style: Style, paint: F) -> String
|
|
|
|
where
|
|
|
|
F: Fn(Style, String) -> String,
|
|
|
|
{
|
|
|
|
use std::fmt::Write;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
let mut buf = String::new();
|
|
|
|
for (name, &size) in &self.sizes {
|
|
|
|
write!(buf, " {:>8} {}", paint(style, format_bytes(size)), name)
|
|
|
|
.expect("writing to string won't fail unless OOM; qed")
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
buf
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct SyncInfo {
|
|
|
|
last_imported_block_number: BlockNumber,
|
|
|
|
last_imported_old_block_number: Option<BlockNumber>,
|
|
|
|
num_peers: usize,
|
|
|
|
max_peers: u32,
|
2017-09-21 10:12:27 +02:00
|
|
|
snapshot_sync: bool,
|
2017-07-10 13:21:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub struct Report {
|
|
|
|
importing: bool,
|
|
|
|
chain_info: BlockChainInfo,
|
|
|
|
client_report: ClientReport,
|
|
|
|
queue_info: BlockQueueInfo,
|
|
|
|
cache_sizes: CacheSizes,
|
|
|
|
sync_info: Option<SyncInfo>,
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Something which can provide data to the informant.
|
|
|
|
pub trait InformantData: Send + Sync {
|
|
|
|
/// Whether it executes transactions
|
|
|
|
fn executes_transactions(&self) -> bool;
|
|
|
|
|
|
|
|
/// Whether it is currently importing (also included in `Report`)
|
|
|
|
fn is_major_importing(&self) -> bool;
|
|
|
|
|
|
|
|
/// Generate a report of blockchain status, memory usage, and sync info.
|
|
|
|
fn report(&self) -> Report;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Informant data for a full node.
|
|
|
|
pub struct FullNodeInformantData {
|
|
|
|
pub client: Arc<Client>,
|
2020-07-29 10:36:15 +02:00
|
|
|
pub sync: Option<Arc<dyn SyncProvider>>,
|
|
|
|
pub net: Option<Arc<dyn ManageNetwork>>,
|
2017-07-10 13:21:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
impl InformantData for FullNodeInformantData {
|
|
|
|
fn executes_transactions(&self) -> bool {
|
|
|
|
true
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
fn is_major_importing(&self) -> bool {
|
|
|
|
let state = self.sync.as_ref().map(|sync| sync.status().state);
|
2018-07-23 15:42:08 +02:00
|
|
|
is_major_importing_or_waiting(state, self.client.queue_info(), false)
|
2017-07-10 13:21:11 +02:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
fn report(&self) -> Report {
|
|
|
|
let (client_report, queue_info, blockchain_cache_info) = (
|
|
|
|
self.client.report(),
|
|
|
|
self.client.queue_info(),
|
|
|
|
self.client.blockchain_cache_info(),
|
|
|
|
);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
let chain_info = self.client.chain_info();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
let mut cache_sizes = CacheSizes::default();
|
|
|
|
cache_sizes.insert("db", client_report.state_db_mem);
|
|
|
|
cache_sizes.insert("queue", queue_info.mem_used);
|
|
|
|
cache_sizes.insert("chain", blockchain_cache_info.total());
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-07-23 15:42:08 +02:00
|
|
|
let importing = self.is_major_importing();
|
|
|
|
let sync_info = match (self.sync.as_ref(), self.net.as_ref()) {
|
2017-07-10 13:21:11 +02:00
|
|
|
(Some(sync), Some(net)) => {
|
|
|
|
let status = sync.status();
|
2018-06-01 09:49:46 +02:00
|
|
|
let num_peers_range = net.num_peers_range();
|
2019-01-22 09:51:40 +01:00
|
|
|
debug_assert!(num_peers_range.end() >= num_peers_range.start());
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
cache_sizes.insert("sync", status.mem_used);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-07-23 15:42:08 +02:00
|
|
|
Some(SyncInfo {
|
2017-07-10 13:21:11 +02:00
|
|
|
last_imported_block_number: status
|
|
|
|
.last_imported_block_number
|
|
|
|
.unwrap_or(chain_info.best_block_number),
|
|
|
|
last_imported_old_block_number: status.last_imported_old_block_number,
|
|
|
|
num_peers: status.num_peers,
|
2019-01-22 09:51:40 +01:00
|
|
|
max_peers: status
|
|
|
|
.current_max_peers(*num_peers_range.start(), *num_peers_range.end()),
|
2017-09-21 10:12:27 +02:00
|
|
|
snapshot_sync: status.is_snapshot_syncing(),
|
2018-07-23 15:42:08 +02:00
|
|
|
})
|
2017-07-10 13:21:11 +02:00
|
|
|
}
|
2018-07-23 15:42:08 +02:00
|
|
|
_ => None,
|
2017-07-10 13:21:11 +02:00
|
|
|
};
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
Report {
|
|
|
|
importing,
|
|
|
|
chain_info,
|
|
|
|
client_report,
|
|
|
|
queue_info,
|
|
|
|
cache_sizes,
|
|
|
|
sync_info,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Informant data for a light node -- note that the network is required.
|
|
|
|
pub struct LightNodeInformantData {
|
2020-07-29 10:36:15 +02:00
|
|
|
pub client: Arc<dyn LightChainClient>,
|
2017-07-10 13:21:11 +02:00
|
|
|
pub sync: Arc<LightSync>,
|
|
|
|
pub cache: Arc<Mutex<LightDataCache>>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl InformantData for LightNodeInformantData {
|
|
|
|
fn executes_transactions(&self) -> bool {
|
|
|
|
false
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
fn is_major_importing(&self) -> bool {
|
2018-11-26 12:05:02 +01:00
|
|
|
self.sync.is_major_importing()
|
2017-07-10 13:21:11 +02:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
fn report(&self) -> Report {
|
|
|
|
let (client_report, queue_info, chain_info) = (
|
|
|
|
self.client.report(),
|
|
|
|
self.client.queue_info(),
|
|
|
|
self.client.chain_info(),
|
|
|
|
);
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
let mut cache_sizes = CacheSizes::default();
|
|
|
|
cache_sizes.insert("queue", queue_info.mem_used);
|
|
|
|
cache_sizes.insert("cache", self.cache.lock().mem_used());
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
let peer_numbers = self.sync.peer_numbers();
|
|
|
|
let sync_info = Some(SyncInfo {
|
|
|
|
last_imported_block_number: chain_info.best_block_number,
|
|
|
|
last_imported_old_block_number: None,
|
|
|
|
num_peers: peer_numbers.connected,
|
|
|
|
max_peers: peer_numbers.max as u32,
|
2017-09-21 10:12:27 +02:00
|
|
|
snapshot_sync: false,
|
2017-07-10 13:21:11 +02:00
|
|
|
});
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
Report {
|
|
|
|
importing: self.sync.is_major_importing(),
|
|
|
|
chain_info,
|
|
|
|
client_report,
|
|
|
|
queue_info,
|
|
|
|
cache_sizes,
|
|
|
|
sync_info,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct Informant<T> {
|
|
|
|
last_tick: RwLock<Instant>,
|
|
|
|
with_color: bool,
|
|
|
|
target: T,
|
|
|
|
snapshot: Option<Arc<SnapshotService>>,
|
|
|
|
rpc_stats: Option<Arc<RpcStats>>,
|
|
|
|
last_import: Mutex<Instant>,
|
|
|
|
skipped: AtomicUsize,
|
|
|
|
skipped_txs: AtomicUsize,
|
|
|
|
in_shutdown: AtomicBool,
|
|
|
|
last_report: Mutex<ClientReport>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T: InformantData> Informant<T> {
|
2016-05-25 09:57:31 +02:00
|
|
|
/// Make a new instance potentially `with_color` output.
|
2017-02-04 22:18:19 +01:00
|
|
|
pub fn new(
|
2017-07-10 13:21:11 +02:00
|
|
|
target: T,
|
2017-02-04 22:18:19 +01:00
|
|
|
snapshot: Option<Arc<SnapshotService>>,
|
|
|
|
rpc_stats: Option<Arc<RpcStats>>,
|
|
|
|
with_color: bool,
|
|
|
|
) -> Self {
|
2016-05-25 09:57:31 +02:00
|
|
|
Informant {
|
|
|
|
last_tick: RwLock::new(Instant::now()),
|
|
|
|
with_color: with_color,
|
2017-07-10 13:21:11 +02:00
|
|
|
target: target,
|
2016-10-18 18:16:00 +02:00
|
|
|
snapshot: snapshot,
|
2017-02-04 22:18:19 +01:00
|
|
|
rpc_stats: rpc_stats,
|
2016-07-20 12:36:20 +02:00
|
|
|
last_import: Mutex::new(Instant::now()),
|
|
|
|
skipped: AtomicUsize::new(0),
|
2016-10-26 13:57:54 +02:00
|
|
|
skipped_txs: AtomicUsize::new(0),
|
2016-12-11 02:02:40 +01:00
|
|
|
in_shutdown: AtomicBool::new(false),
|
2017-07-10 13:21:11 +02:00
|
|
|
last_report: Mutex::new(Default::default()),
|
2016-05-25 09:57:31 +02:00
|
|
|
}
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-12-11 02:02:40 +01:00
|
|
|
/// Signal that we're shutting down; no more output necessary.
|
|
|
|
pub fn shutdown(&self) {
|
|
|
|
self.in_shutdown
|
|
|
|
.store(true, ::std::sync::atomic::Ordering::SeqCst);
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-07-20 12:36:20 +02:00
|
|
|
pub fn tick(&self) {
|
2018-11-25 08:53:41 +01:00
|
|
|
let now = Instant::now();
|
|
|
|
let elapsed = now.duration_since(*self.last_tick.read());
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-16 17:43:43 +02:00
|
|
|
let (client_report, full_report) = {
|
2020-07-29 11:00:04 +02:00
|
|
|
let last_report = self.last_report.lock();
|
2017-07-16 17:43:43 +02:00
|
|
|
let full_report = self.target.report();
|
|
|
|
let diffed = full_report.client_report.clone() - &*last_report;
|
|
|
|
(diffed, full_report)
|
|
|
|
};
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
let Report {
|
|
|
|
importing,
|
|
|
|
chain_info,
|
|
|
|
queue_info,
|
|
|
|
cache_sizes,
|
|
|
|
sync_info,
|
2017-07-16 17:43:43 +02:00
|
|
|
..
|
|
|
|
} = full_report;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-04 22:18:19 +01:00
|
|
|
let rpc_stats = self.rpc_stats.as_ref();
|
2018-05-16 22:01:55 +02:00
|
|
|
let snapshot_sync = sync_info.as_ref().map_or(false, |s| s.snapshot_sync)
|
|
|
|
&& self.snapshot.as_ref().map_or(false, |s| match s.status() {
|
|
|
|
RestorationStatus::Ongoing { .. } | RestorationStatus::Initializing { .. } => true,
|
|
|
|
_ => false,
|
2016-10-18 18:16:00 +02:00
|
|
|
});
|
|
|
|
if !importing && !snapshot_sync && elapsed < Duration::from_secs(30) {
|
2016-07-17 23:00:57 +02:00
|
|
|
return;
|
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-11-25 08:53:41 +01:00
|
|
|
*self.last_tick.write() = now;
|
|
|
|
*self.last_report.lock() = full_report.client_report.clone();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-04-11 12:56:37 +02:00
|
|
|
let paint = |c: Style, t: String| match self.with_color && atty::is(atty::Stream::Stdout) {
|
2016-05-25 09:57:31 +02:00
|
|
|
true => format!("{}", c.paint(t)),
|
|
|
|
false => t,
|
|
|
|
};
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-02-04 22:18:19 +01:00
|
|
|
info!(target: "import", "{} {} {} {}",
|
2016-07-17 23:00:57 +02:00
|
|
|
match importing {
|
2016-10-18 18:16:00 +02:00
|
|
|
true => match snapshot_sync {
|
2017-02-04 22:18:19 +01:00
|
|
|
false => format!("Syncing {} {} {} {}+{} Qed",
|
2016-10-18 18:16:00 +02:00
|
|
|
paint(White.bold(), format!("{:>8}", format!("#{}", chain_info.best_block_number))),
|
|
|
|
paint(White.bold(), format!("{}", chain_info.best_block_hash)),
|
2017-07-10 13:21:11 +02:00
|
|
|
if self.target.executes_transactions() {
|
2016-10-18 18:16:00 +02:00
|
|
|
format!("{} blk/s {} tx/s {} Mgas/s",
|
2018-06-06 14:15:13 +02:00
|
|
|
paint(Yellow.bold(), format!("{:7.2}", (client_report.blocks_imported * 1000) as f64 / elapsed.as_milliseconds() as f64)),
|
2018-05-22 04:19:00 +02:00
|
|
|
paint(Yellow.bold(), format!("{:6.1}", (client_report.transactions_applied * 1000) as f64 / elapsed.as_milliseconds() as f64)),
|
2018-11-25 08:53:41 +01:00
|
|
|
paint(Yellow.bold(), format!("{:6.1}", (client_report.gas_processed / 1000).low_u64() as f64 / elapsed.as_milliseconds() as f64))
|
2017-07-10 13:21:11 +02:00
|
|
|
)
|
|
|
|
} else {
|
|
|
|
format!("{} hdr/s",
|
2018-05-22 04:19:00 +02:00
|
|
|
paint(Yellow.bold(), format!("{:6.1}", (client_report.blocks_imported * 1000) as f64 / elapsed.as_milliseconds() as f64))
|
2017-07-10 13:21:11 +02:00
|
|
|
)
|
2016-10-18 18:16:00 +02:00
|
|
|
},
|
|
|
|
paint(Green.bold(), format!("{:5}", queue_info.unverified_queue_size)),
|
|
|
|
paint(Green.bold(), format!("{:5}", queue_info.verified_queue_size))
|
|
|
|
),
|
2018-05-16 22:01:55 +02:00
|
|
|
true => {
|
|
|
|
self.snapshot.as_ref().map_or(String::new(), |s|
|
|
|
|
match s.status() {
|
|
|
|
RestorationStatus::Ongoing { state_chunks, block_chunks, state_chunks_done, block_chunks_done } => {
|
|
|
|
format!("Syncing snapshot {}/{}", state_chunks_done + block_chunks_done, state_chunks + block_chunks)
|
|
|
|
},
|
|
|
|
RestorationStatus::Initializing { chunks_done } => {
|
|
|
|
format!("Snapshot initializing ({} chunks restored)", chunks_done)
|
|
|
|
},
|
|
|
|
_ => String::new(),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
},
|
2016-10-18 18:16:00 +02:00
|
|
|
},
|
2016-07-17 23:00:57 +02:00
|
|
|
false => String::new(),
|
|
|
|
},
|
2017-07-10 13:21:11 +02:00
|
|
|
match sync_info.as_ref() {
|
|
|
|
Some(ref sync_info) => format!("{}{}/{} peers",
|
2016-07-17 23:00:57 +02:00
|
|
|
match importing {
|
2018-06-26 11:31:22 +02:00
|
|
|
true => format!("{}",
|
|
|
|
if self.target.executes_transactions() {
|
|
|
|
paint(Green.bold(), format!("{:>8} ", format!("#{}", sync_info.last_imported_block_number)))
|
|
|
|
} else {
|
|
|
|
String::new()
|
|
|
|
}
|
|
|
|
),
|
2016-10-18 18:16:00 +02:00
|
|
|
false => match sync_info.last_imported_old_block_number {
|
|
|
|
Some(number) => format!("{} ", paint(Yellow.bold(), format!("{:>8}", format!("#{}", number)))),
|
|
|
|
None => String::new(),
|
|
|
|
}
|
2016-07-17 23:00:57 +02:00
|
|
|
},
|
|
|
|
paint(Cyan.bold(), format!("{:2}", sync_info.num_peers)),
|
2017-07-10 13:21:11 +02:00
|
|
|
paint(Cyan.bold(), format!("{:2}", sync_info.max_peers)),
|
2016-07-17 23:00:57 +02:00
|
|
|
),
|
2016-07-20 12:36:20 +02:00
|
|
|
_ => String::new(),
|
2016-07-17 23:00:57 +02:00
|
|
|
},
|
2017-07-10 13:21:11 +02:00
|
|
|
cache_sizes.display(Blue.bold(), &paint),
|
2017-02-04 22:18:19 +01:00
|
|
|
match rpc_stats {
|
|
|
|
Some(ref rpc_stats) => format!(
|
|
|
|
"RPC: {} conn, {} req/s, {} µs",
|
|
|
|
paint(Blue.bold(), format!("{:2}", rpc_stats.sessions())),
|
2018-05-22 04:19:00 +02:00
|
|
|
paint(Blue.bold(), format!("{:4}", rpc_stats.requests_rate())),
|
|
|
|
paint(Blue.bold(), format!("{:4}", rpc_stats.approximated_roundtrip())),
|
2017-02-04 22:18:19 +01:00
|
|
|
),
|
|
|
|
_ => String::new(),
|
|
|
|
},
|
2016-07-17 23:00:57 +02:00
|
|
|
);
|
2016-04-21 13:57:27 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
impl ChainNotify for Informant<FullNodeInformantData> {
|
2018-12-19 10:24:14 +01:00
|
|
|
fn new_blocks(&self, new_blocks: NewBlocks) {
|
|
|
|
if new_blocks.has_more_blocks_to_import {
|
|
|
|
return;
|
|
|
|
}
|
2016-07-20 12:36:20 +02:00
|
|
|
let mut last_import = self.last_import.lock();
|
2017-07-10 13:21:11 +02:00
|
|
|
let client = &self.target.client;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
let importing = self.target.is_major_importing();
|
2016-10-26 13:57:54 +02:00
|
|
|
let ripe = Instant::now() > *last_import + Duration::from_secs(1) && !importing;
|
2018-12-19 10:24:14 +01:00
|
|
|
let txs_imported = new_blocks
|
|
|
|
.imported
|
|
|
|
.iter()
|
|
|
|
.take(
|
|
|
|
new_blocks
|
|
|
|
.imported
|
|
|
|
.len()
|
|
|
|
.saturating_sub(if ripe { 1 } else { 0 }),
|
2020-08-05 06:08:03 +02:00
|
|
|
)
|
2017-07-10 13:21:11 +02:00
|
|
|
.filter_map(|h| client.block(BlockId::Hash(*h)))
|
2016-12-28 13:44:51 +01:00
|
|
|
.map(|b| b.transactions_count())
|
2016-10-26 13:57:54 +02:00
|
|
|
.sum();
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-10-26 13:57:54 +02:00
|
|
|
if ripe {
|
2018-12-19 10:24:14 +01:00
|
|
|
if let Some(block) = new_blocks
|
|
|
|
.imported
|
|
|
|
.last()
|
|
|
|
.and_then(|h| client.block(BlockId::Hash(*h)))
|
|
|
|
{
|
2016-12-28 13:44:51 +01:00
|
|
|
let header_view = block.header_view();
|
|
|
|
let size = block.rlp().as_raw().len();
|
2018-12-19 10:24:14 +01:00
|
|
|
let (skipped, skipped_txs) = (
|
|
|
|
self.skipped.load(AtomicOrdering::Relaxed) + new_blocks.imported.len() - 1,
|
|
|
|
self.skipped_txs.load(AtomicOrdering::Relaxed) + txs_imported,
|
|
|
|
);
|
2016-07-24 17:38:29 +02:00
|
|
|
info!(target: "import", "Imported {} {} ({} txs, {} Mgas, {} ms, {} KiB){}",
|
2016-12-28 13:44:51 +01:00
|
|
|
Colour::White.bold().paint(format!("#{}", header_view.number())),
|
|
|
|
Colour::White.bold().paint(format!("{}", header_view.hash())),
|
|
|
|
Colour::Yellow.bold().paint(format!("{}", block.transactions_count())),
|
|
|
|
Colour::Yellow.bold().paint(format!("{:.2}", header_view.gas_used().low_u64() as f32 / 1000000f32)),
|
2018-12-19 10:24:14 +01:00
|
|
|
Colour::Purple.bold().paint(format!("{}", new_blocks.duration.as_milliseconds())),
|
2016-07-20 12:36:20 +02:00
|
|
|
Colour::Blue.bold().paint(format!("{:.2}", size as f32 / 1024f32)),
|
2016-10-26 13:57:54 +02:00
|
|
|
if skipped > 0 {
|
|
|
|
format!(" + another {} block(s) containing {} tx(s)",
|
|
|
|
Colour::Red.bold().paint(format!("{}", skipped)),
|
|
|
|
Colour::Red.bold().paint(format!("{}", skipped_txs))
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
String::new()
|
|
|
|
}
|
2016-07-24 17:38:29 +02:00
|
|
|
);
|
2016-10-26 13:57:54 +02:00
|
|
|
self.skipped.store(0, AtomicOrdering::Relaxed);
|
|
|
|
self.skipped_txs.store(0, AtomicOrdering::Relaxed);
|
2016-07-24 17:38:29 +02:00
|
|
|
*last_import = Instant::now();
|
2016-07-20 12:36:20 +02:00
|
|
|
}
|
|
|
|
} else {
|
2018-12-19 10:24:14 +01:00
|
|
|
self.skipped
|
|
|
|
.fetch_add(new_blocks.imported.len(), AtomicOrdering::Relaxed);
|
2016-10-26 13:57:54 +02:00
|
|
|
self.skipped_txs
|
|
|
|
.fetch_add(txs_imported, AtomicOrdering::Relaxed);
|
2016-07-20 12:36:20 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-01 14:16:03 +02:00
|
|
|
impl LightChainNotify for Informant<LightNodeInformantData> {
|
|
|
|
fn new_headers(&self, good: &[H256]) {
|
|
|
|
let mut last_import = self.last_import.lock();
|
|
|
|
let client = &self.target.client;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-05-01 14:16:03 +02:00
|
|
|
let importing = self.target.is_major_importing();
|
|
|
|
let ripe = Instant::now() > *last_import + Duration::from_secs(1) && !importing;
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2018-05-01 14:16:03 +02:00
|
|
|
if ripe {
|
|
|
|
if let Some(header) = good
|
|
|
|
.last()
|
|
|
|
.and_then(|h| client.block_header(BlockId::Hash(*h)))
|
|
|
|
{
|
|
|
|
info!(target: "import", "Imported {} {} ({} Mgas){}",
|
2018-09-04 16:36:34 +02:00
|
|
|
Colour::White.bold().paint(format!("#{}", header.number())),
|
|
|
|
Colour::White.bold().paint(format!("{}", header.hash())),
|
|
|
|
Colour::Yellow.bold().paint(format!("{:.2}", header.gas_used().low_u64() as f32 / 1000000f32)),
|
|
|
|
if good.len() > 1 {
|
|
|
|
format!(" + another {} header(s)",
|
|
|
|
Colour::Red.bold().paint(format!("{}", good.len() - 1)))
|
|
|
|
} else {
|
|
|
|
String::new()
|
|
|
|
}
|
2018-05-01 14:16:03 +02:00
|
|
|
);
|
|
|
|
*last_import = Instant::now();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-11 02:02:40 +01:00
|
|
|
const INFO_TIMER: TimerToken = 0;
|
|
|
|
|
2017-07-10 13:21:11 +02:00
|
|
|
impl<T: InformantData> IoHandler<ClientIoMessage> for Informant<T> {
|
2016-12-10 23:58:39 +01:00
|
|
|
fn initialize(&self, io: &IoContext<ClientIoMessage>) {
|
2018-04-14 21:35:58 +02:00
|
|
|
io.register_timer(INFO_TIMER, Duration::from_secs(5))
|
|
|
|
.expect("Error registering timer");
|
2016-12-10 23:58:39 +01:00
|
|
|
}
|
2020-08-05 06:08:03 +02:00
|
|
|
|
2016-12-10 23:58:39 +01:00
|
|
|
fn timeout(&self, _io: &IoContext<ClientIoMessage>, timer: TimerToken) {
|
2016-12-11 02:02:40 +01:00
|
|
|
if timer == INFO_TIMER && !self.in_shutdown.load(AtomicOrdering::SeqCst) {
|
|
|
|
self.tick();
|
2016-12-10 23:58:39 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|