Merge branch 'master' into client-ipc-refact

This commit is contained in:
NikVolf 2016-07-06 20:20:44 +03:00
commit 2abf1df667
13 changed files with 133 additions and 129 deletions

View File

@ -37,10 +37,7 @@ use util::rlp::{RlpStream, Rlp, UntrustedRlp};
use util::journaldb;
use util::journaldb::JournalDB;
use util::kvdb::*;
use util::Itertools;
use util::PerfTimer;
use util::View;
use util::Stream;
use util::{Applyable, Stream, View, PerfTimer, Itertools, Colour};
// other
use views::BlockView;
@ -63,8 +60,7 @@ use block_queue::{BlockQueue, BlockQueueInfo};
use blockchain::{BlockChain, BlockProvider, TreeRoute, ImportRoute};
use client::{BlockID, TransactionID, UncleID, TraceId, ClientConfig,
DatabaseCompactionProfile, BlockChainClient, MiningBlockChainClient,
TraceFilter, CallAnalytics, BlockImportError, TransactionImportError,
TransactionImportResult, Mode};
TraceFilter, CallAnalytics, BlockImportError, Mode};
use client::Error as ClientError;
use env_info::EnvInfo;
use executive::{Executive, Executed, TransactOptions, contract_address};
@ -72,7 +68,7 @@ use receipt::LocalizedReceipt;
use trace::{TraceDB, ImportRequest as TraceImportRequest, LocalizedTrace, Database as TraceDatabase};
use trace;
use evm::Factory as EvmFactory;
use miner::{Miner, MinerService, AccountDetails};
use miner::{Miner, MinerService};
use util::TrieFactory;
use ipc::IpcConfig;
use ipc::binary::{BinaryConvertError};
@ -147,6 +143,7 @@ pub struct Client {
liveness: AtomicBool,
io_channel: IoChannel<NetSyncMessage>,
queue_transactions: AtomicUsize,
previous_enode: Mutex<Option<String>>,
}
const HISTORY: u64 = 1200;
@ -232,6 +229,7 @@ impl Client {
miner: miner,
io_channel: message_channel,
queue_transactions: AtomicUsize::new(0),
previous_enode: Mutex::new(None),
};
Ok(Arc::new(client))
}
@ -437,12 +435,8 @@ impl Client {
pub fn import_queued_transactions(&self, transactions: &[Bytes]) -> usize {
let _timer = PerfTimer::new("import_queued_transactions");
self.queue_transactions.fetch_sub(transactions.len(), AtomicOrdering::SeqCst);
let fetch_account = |a: &Address| AccountDetails {
nonce: self.latest_nonce(a),
balance: self.latest_balance(a),
};
let tx = transactions.iter().filter_map(|bytes| UntrustedRlp::new(&bytes).as_val().ok()).collect();
let results = self.miner.import_transactions(self, tx, fetch_account);
let txs = transactions.iter().filter_map(|bytes| UntrustedRlp::new(&bytes).as_val().ok()).collect();
let results = self.miner.import_external_transactions(self, txs);
results.len()
}
@ -591,6 +585,18 @@ impl Client {
}
}
}
/// Notify us that the network has been started.
pub fn network_started(&self, url: &String) {
let mut previous_enode = self.previous_enode.lock().unwrap();
if let Some(ref u) = *previous_enode {
if u == url {
return;
}
}
*previous_enode = Some(url.clone());
info!(target: "mode", "Public node URL: {}", url.apply(Colour::White.bold()));
}
}
#[derive(Ipc)]
@ -891,18 +897,6 @@ impl BlockChainClient for Client {
self.build_last_hashes(self.chain.best_block_hash())
}
fn import_transactions(&self, transactions: Vec<SignedTransaction>) -> Vec<Result<TransactionImportResult, TransactionImportError>> {
let fetch_account = |a: &Address| AccountDetails {
nonce: self.latest_nonce(a),
balance: self.latest_balance(a),
};
self.miner.import_transactions(self, transactions, &fetch_account)
.into_iter()
.map(|res| res.map_err(|e| e.into()))
.collect()
}
fn queue_transactions(&self, transactions: Vec<Bytes>) {
if self.queue_transactions.load(AtomicOrdering::Relaxed) > MAX_TX_QUEUE_SIZE {
debug!("Ignoring {} transactions: queue is full", transactions.len());

View File

@ -48,7 +48,8 @@ use trace::LocalizedTrace;
use evm::Factory as EvmFactory;
pub use types::call_analytics::CallAnalytics;
pub use block_import_error::BlockImportError;
pub use transaction_import::{TransactionImportResult, TransactionImportError};
pub use transaction_import::TransactionImportResult;
pub use transaction_import::TransactionImportError;
mod client {
//! Blockchain database client.
@ -188,9 +189,6 @@ pub trait BlockChainClient : Sync + Send {
/// Get last hashes starting from best block.
fn last_hashes(&self) -> LastHashes;
/// import transactions from network/other 3rd party
fn import_transactions(&self, transactions: Vec<SignedTransaction>) -> Vec<Result<TransactionImportResult, TransactionImportError>>;
/// Queue transactions for importing.
fn queue_transactions(&self, transactions: Vec<Bytes>);

View File

@ -22,7 +22,7 @@ use transaction::{Transaction, LocalizedTransaction, SignedTransaction, Action};
use blockchain::TreeRoute;
use client::{BlockChainClient, MiningBlockChainClient, BlockChainInfo, BlockStatus, BlockID,
TransactionID, UncleID, TraceId, TraceFilter, LastHashes, CallAnalytics,
TransactionImportError, BlockImportError};
BlockImportError};
use header::{Header as BlockHeader, BlockNumber};
use filter::Filter;
use log_entry::LocalizedLogEntry;
@ -39,8 +39,6 @@ use executive::Executed;
use error::ExecutionError;
use trace::LocalizedTrace;
use miner::{TransactionImportResult, AccountDetails};
/// Test client.
pub struct TestBlockChainClient {
/// Blocks.
@ -275,6 +273,10 @@ impl BlockChainClient for TestBlockChainClient {
}
}
fn latest_nonce(&self, address: &Address) -> U256 {
self.nonce(address, BlockID::Latest).unwrap()
}
fn code(&self, address: &Address) -> Option<Bytes> {
self.code.read().unwrap().get(address).cloned()
}
@ -287,6 +289,10 @@ impl BlockChainClient for TestBlockChainClient {
}
}
fn latest_balance(&self, address: &Address) -> U256 {
self.balance(address, BlockID::Latest).unwrap()
}
fn storage_at(&self, address: &Address, position: &H256, id: BlockID) -> Option<H256> {
if let BlockID::Latest = id {
Some(self.storage.read().unwrap().get(&(address.clone(), position.clone())).cloned().unwrap_or_else(H256::new))
@ -488,24 +494,10 @@ impl BlockChainClient for TestBlockChainClient {
unimplemented!();
}
fn import_transactions(&self, transactions: Vec<SignedTransaction>) -> Vec<Result<TransactionImportResult, TransactionImportError>> {
let nonces = self.nonces.read().unwrap();
let balances = self.balances.read().unwrap();
let fetch_account = |a: &Address| AccountDetails {
nonce: nonces[a],
balance: balances[a],
};
self.miner.import_transactions(self, transactions, &fetch_account)
.into_iter()
.map(|res| res.map_err(|e| e.into()))
.collect()
}
fn queue_transactions(&self, transactions: Vec<Bytes>) {
// import right here
let tx = transactions.into_iter().filter_map(|bytes| UntrustedRlp::new(&bytes).as_val().ok()).collect();
self.import_transactions(tx);
let txs = transactions.into_iter().filter_map(|bytes| UntrustedRlp::new(&bytes).as_val().ok()).collect();
self.miner.import_external_transactions(self, txs);
}
fn pending_transactions(&self) -> Vec<SignedTransaction> {

View File

@ -20,7 +20,6 @@ use std::time::{Instant, Duration};
use util::*;
use util::using_queue::{UsingQueue, GetAction};
use util::Colour::White;
use account_provider::AccountProvider;
use views::{BlockView, HeaderView};
use client::{MiningBlockChainClient, Executive, Executed, EnvInfo, TransactOptions, BlockID, CallAnalytics};
@ -316,6 +315,19 @@ impl Miner {
!have_work
}
fn add_transactions_to_queue(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, origin: TransactionOrigin, transaction_queue: &mut TransactionQueue) ->
Vec<Result<TransactionImportResult, Error>> {
let fetch_account = |a: &Address| AccountDetails {
nonce: chain.latest_nonce(a),
balance: chain.latest_balance(a),
};
transactions.into_iter()
.map(|tx| transaction_queue.add(tx, &fetch_account, origin))
.collect()
}
/// Are we allowed to do a non-mandatory reseal?
fn tx_reseal_allowed(&self) -> bool { Instant::now() > *self.next_allowed_reseal.lock().unwrap() }
}
@ -478,27 +490,24 @@ impl MinerService for Miner {
self.gas_range_target.read().unwrap().1
}
fn import_transactions<T>(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, fetch_account: T) ->
Vec<Result<TransactionImportResult, Error>>
where T: Fn(&Address) -> AccountDetails {
let results: Vec<Result<TransactionImportResult, Error>> = {
let mut transaction_queue = self.transaction_queue.lock().unwrap();
transactions.into_iter()
.map(|tx| transaction_queue.add(tx, &fetch_account, TransactionOrigin::External))
.collect()
};
if !results.is_empty() && self.options.reseal_on_external_tx && self.tx_reseal_allowed() {
fn import_external_transactions(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>) ->
Vec<Result<TransactionImportResult, Error>> {
let mut transaction_queue = self.transaction_queue.lock().unwrap();
let results = self.add_transactions_to_queue(chain, transactions, TransactionOrigin::External,
&mut transaction_queue);
if !results.is_empty() && self.options.reseal_on_external_tx && self.tx_reseal_allowed() {
self.update_sealing(chain);
}
results
}
fn import_own_transaction<T>(
fn import_own_transaction(
&self,
chain: &MiningBlockChainClient,
transaction: SignedTransaction,
fetch_account: T
) -> Result<TransactionImportResult, Error> where T: Fn(&Address) -> AccountDetails {
) -> Result<TransactionImportResult, Error> {
let hash = transaction.hash();
trace!(target: "own_tx", "Importing transaction: {:?}", transaction);
@ -506,7 +515,7 @@ impl MinerService for Miner {
let imported = {
// Be sure to release the lock before we call enable_and_prepare_sealing
let mut transaction_queue = self.transaction_queue.lock().unwrap();
let import = transaction_queue.add(transaction, &fetch_account, TransactionOrigin::Local);
let import = self.add_transactions_to_queue(chain, vec![transaction], TransactionOrigin::Local, &mut transaction_queue).pop().unwrap();
match import {
Ok(ref res) => {
@ -645,7 +654,7 @@ impl MinerService for Miner {
let n = sealed.header().number();
let h = sealed.header().hash();
try!(chain.import_sealed_block(sealed));
info!(target: "miner", "Mined block imported OK. #{}: {}", paint(White.bold(), format!("{}", n)), paint(White.bold(), h.hex()));
info!(target: "miner", "Mined block imported OK. #{}: {}", format!("{}", n).apply(Colour::White.bold()), h.hex().apply(Colour::White.bold()));
Ok(())
})
}
@ -657,7 +666,12 @@ impl MinerService for Miner {
// Client should send message after commit to db and inserting to chain.
.expect("Expected in-chain blocks.");
let block = BlockView::new(&block);
block.transactions()
let txs = block.transactions();
// populate sender
for tx in &txs {
let _sender = tx.sender();
}
txs
}
// 1. We ignore blocks that were `imported` (because it means that they are not in canon-chain, and transactions
@ -674,14 +688,10 @@ impl MinerService for Miner {
.par_iter()
.map(|h| fetch_transactions(chain, h));
out_of_chain.for_each(|txs| {
// populate sender
for tx in &txs {
let _sender = tx.sender();
}
let _ = self.import_transactions(chain, txs, |a| AccountDetails {
nonce: chain.latest_nonce(a),
balance: chain.latest_balance(a),
});
let mut transaction_queue = self.transaction_queue.lock().unwrap();
let _ = self.add_transactions_to_queue(
chain, txs, TransactionOrigin::External, &mut transaction_queue
);
});
}

View File

@ -107,14 +107,12 @@ pub trait MinerService : Send + Sync {
fn set_tx_gas_limit(&self, limit: U256);
/// Imports transactions to transaction queue.
fn import_transactions<T>(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, fetch_account: T) ->
Vec<Result<TransactionImportResult, Error>>
where T: Fn(&Address) -> AccountDetails, Self: Sized;
fn import_external_transactions(&self, chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>) ->
Vec<Result<TransactionImportResult, Error>>;
/// Imports own (node owner) transaction to queue.
fn import_own_transaction<T>(&self, chain: &MiningBlockChainClient, transaction: SignedTransaction, fetch_account: T) ->
Result<TransactionImportResult, Error>
where T: Fn(&Address) -> AccountDetails, Self: Sized;
fn import_own_transaction(&self, chain: &MiningBlockChainClient, transaction: SignedTransaction) ->
Result<TransactionImportResult, Error>;
/// Returns hashes of transactions currently in pending
fn pending_transactions_hashes(&self) -> Vec<H256>;

View File

@ -17,7 +17,6 @@
//! Creates and registers client and network services.
use util::*;
use util::Colour::{Yellow, White};
use util::panics::*;
use spec::Spec;
use error::*;
@ -72,7 +71,7 @@ impl ClientService {
try!(net_service.start());
}
info!("Configured for {} using {} engine", paint(White.bold(), spec.name.clone()), paint(Yellow.bold(), spec.engine.name().to_owned()));
info!("Configured for {} using {} engine", spec.name.clone().apply(Colour::White.bold()), spec.engine.name().apply(Colour::Yellow.bold()));
let client = try!(Client::new(config, spec, db_path, miner, net_service.io().channel()));
panic_handler.forward_from(client.deref());
let client_io = Arc::new(ClientIoHandler {
@ -135,16 +134,14 @@ impl IoHandler<NetSyncMessage> for ClientIoHandler {
#[cfg_attr(feature="dev", allow(single_match))]
fn message(&self, io: &IoContext<NetSyncMessage>, net_message: &NetSyncMessage) {
if let UserMessage(ref message) = *net_message {
match *message {
SyncMessage::BlockVerified => {
self.client.import_verified_blocks(&io.channel());
},
SyncMessage::NewTransactions(ref transactions) => {
self.client.import_queued_transactions(&transactions);
},
_ => {}, // ignore other messages
}
match *net_message {
UserMessage(ref message) => match *message {
SyncMessage::BlockVerified => { self.client.import_verified_blocks(&io.channel()); }
SyncMessage::NewTransactions(ref transactions) => { self.client.import_queued_transactions(&transactions); }
_ => {} // ignore other messages
},
NetworkIoMessage::NetworkStarted(ref url) => { self.client.network_started(url); }
_ => {} // ignore other messages
}
}
}

View File

@ -181,7 +181,7 @@ impl Configuration {
let wei_per_usd: f32 = 1.0e18 / usd_per_eth;
let gas_per_tx: f32 = 21000.0;
let wei_per_gas: f32 = wei_per_usd * usd_per_tx / gas_per_tx;
info!("Using a conversion rate of Ξ1 = {} ({} wei/gas)", paint(White.bold(), format!("US${}", usd_per_eth)), paint(Yellow.bold(), format!("{}", wei_per_gas)));
info!("Using a conversion rate of Ξ1 = {} ({} wei/gas)", format!("US${}", usd_per_eth).apply(White.bold()), format!("{}", wei_per_gas).apply(Yellow.bold()));
U256::from_dec_str(&format!("{:.0}", wei_per_gas)).unwrap()
}
}
@ -338,7 +338,7 @@ impl Configuration {
if let journaldb::Algorithm::Archive = client_config.pruning {
client_config.trie_spec = TrieSpec::Fat;
} else {
die!("Fatdb is not supported. Please rerun with --pruning=archive")
die!("Fatdb is not supported. Please re-run with --pruning=archive")
}
}
@ -353,7 +353,7 @@ impl Configuration {
};
if self.args.flag_jitvm {
client_config.vm_type = VMType::jit().unwrap_or_else(|| die!("Parity built without jit vm."))
client_config.vm_type = VMType::jit().unwrap_or_else(|| die!("Parity is built without the JIT EVM."))
}
trace!(target: "parity", "Using pruning strategy of {}", client_config.pruning);

View File

@ -80,7 +80,7 @@ use std::thread::sleep;
use std::time::Duration;
use rustc_serialize::hex::FromHex;
use ctrlc::CtrlC;
use util::{H256, ToPretty, NetworkConfiguration, PayloadInfo, Bytes, UtilError, paint, Colour, version};
use util::{H256, ToPretty, NetworkConfiguration, PayloadInfo, Bytes, UtilError, Colour, Applyable, version, journaldb};
use util::panics::{MayPanic, ForwardPanic, PanicHandler};
use ethcore::client::{Mode, BlockID, BlockChainClient, ClientConfig, get_db_path, BlockImportError};
use ethcore::error::{ImportError};
@ -188,10 +188,21 @@ fn execute_client(conf: Configuration, spec: Spec, client_config: ClientConfig)
// Raise fdlimit
unsafe { ::fdlimit::raise_fd_limit(); }
info!("Starting {}", paint(Colour::White.bold(), format!("{}", version())));
info!("Starting {}", format!("{}", version()).apply(Colour::White.bold()));
info!("Using state DB journalling strategy {}", match client_config.pruning {
journaldb::Algorithm::Archive => "archive",
journaldb::Algorithm::EarlyMerge => "light",
journaldb::Algorithm::OverlayRecent => "fast",
journaldb::Algorithm::RefCounted => "basic",
}.apply(Colour::White.bold()));
let net_settings = conf.net_settings(&spec);
let sync_config = conf.sync_config(&spec);
// Display warning about using experimental journaldb types
match client_config.pruning {
journaldb::Algorithm::EarlyMerge | journaldb::Algorithm::RefCounted => {
warn!("Your chosen strategy is {}! You can re-run with --pruning to change.", "unstable".apply(Colour::Red.bold()));
}
_ => {}
}
// Display warning about using unlock with signer
if conf.signer_enabled() && conf.args.flag_unlock.is_some() {
@ -204,6 +215,9 @@ fn execute_client(conf: Configuration, spec: Spec, client_config: ClientConfig)
warn!("Value given for --policy, yet no proposed forks exist. Ignoring.");
}
let net_settings = conf.net_settings(&spec);
let sync_config = conf.sync_config(&spec);
// Secret Store
let account_service = Arc::new(conf.account_service());

View File

@ -14,11 +14,10 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
extern crate ansi_term;
use self::ansi_term::Colour::White;
use std::io;
use std::path::PathBuf;
use std::sync::Arc;
use util::{Colour, Applyable};
use util::panics::{PanicHandler, ForwardPanic};
use util::path::restrict_permissions_owner;
use die::*;
@ -67,7 +66,7 @@ pub fn new_token(path: String) -> io::Result<()> {
let mut codes = try!(signer::AuthCodes::from_file(&path));
let code = try!(codes.generate_new());
try!(codes.to_file(&path));
println!("This key code will authorise your System Signer UI: {}", White.bold().paint(code));
println!("This key code will authorise your System Signer UI: {}", code.apply(Colour::White.bold()));
Ok(())
}

View File

@ -56,7 +56,7 @@ pub use self::rpc::RpcClient;
use v1::helpers::TransactionRequest;
use v1::types::H256 as NH256;
use ethcore::error::Error as EthcoreError;
use ethcore::miner::{AccountDetails, MinerService};
use ethcore::miner::MinerService;
use ethcore::client::MiningBlockChainClient;
use ethcore::transaction::{Action, SignedTransaction, Transaction};
use ethcore::account_provider::{AccountProvider, Error as AccountError};
@ -80,12 +80,7 @@ fn dispatch_transaction<C, M>(client: &C, miner: &M, signed_transaction: SignedT
where C: MiningBlockChainClient, M: MinerService {
let hash = NH256::from(signed_transaction.hash());
let import = miner.import_own_transaction(client, signed_transaction, |a: &Address| {
AccountDetails {
nonce: client.latest_nonce(&a),
balance: client.latest_balance(&a),
}
});
let import = miner.import_own_transaction(client, signed_transaction);
import
.map_err(transaction_error)

View File

@ -23,7 +23,7 @@ use ethcore::client::{MiningBlockChainClient, Executed, CallAnalytics};
use ethcore::block::{ClosedBlock, IsBlock};
use ethcore::transaction::SignedTransaction;
use ethcore::receipt::Receipt;
use ethcore::miner::{MinerService, MinerStatus, AccountDetails, TransactionImportResult};
use ethcore::miner::{MinerService, MinerStatus, TransactionImportResult};
/// Test miner service.
pub struct TestMinerService {
@ -130,14 +130,13 @@ impl MinerService for TestMinerService {
}
/// Imports transactions to transaction queue.
fn import_transactions<T>(&self, _chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>, fetch_account: T) ->
Vec<Result<TransactionImportResult, Error>>
where T: Fn(&Address) -> AccountDetails {
fn import_external_transactions(&self, _chain: &MiningBlockChainClient, transactions: Vec<SignedTransaction>) ->
Vec<Result<TransactionImportResult, Error>> {
// lets assume that all txs are valid
self.imported_transactions.lock().unwrap().extend_from_slice(&transactions);
for sender in transactions.iter().filter_map(|t| t.sender().ok()) {
let nonce = self.last_nonce(&sender).unwrap_or(fetch_account(&sender).nonce);
let nonce = self.last_nonce(&sender).expect("last_nonce must be populated in tests");
self.last_nonces.write().unwrap().insert(sender, nonce + U256::from(1));
}
transactions
@ -147,9 +146,8 @@ impl MinerService for TestMinerService {
}
/// Imports transactions to transaction queue.
fn import_own_transaction<T>(&self, chain: &MiningBlockChainClient, transaction: SignedTransaction, _fetch_account: T) ->
Result<TransactionImportResult, Error>
where T: Fn(&Address) -> AccountDetails {
fn import_own_transaction(&self, chain: &MiningBlockChainClient, transaction: SignedTransaction) ->
Result<TransactionImportResult, Error> {
// keep the pending nonces up to date
if let Ok(ref sender) = transaction.sender() {

View File

@ -17,6 +17,7 @@
//! Common log helper functions
use std::env;
use std::borrow::Cow;
use rlog::{LogLevelFilter};
use env_logger::LogBuilder;
use std::sync::{RwLock, RwLockReadGuard};
@ -28,12 +29,20 @@ lazy_static! {
static ref USE_COLOR: AtomicBool = AtomicBool::new(false);
}
/// Paint, using colour if desired.
pub fn paint(c: Style, t: String) -> String {
match USE_COLOR.load(Ordering::Relaxed) {
true => format!("{}", c.paint(t)),
false => t,
}
/// Something which can be apply()ed.
pub trait Applyable: AsRef<str> {
/// Apply the style `c` to ourself, returning us styled in that manner.
fn apply(&self, c: Style) -> Cow<str>;
}
impl<T: AsRef<str>> Applyable for T {
fn apply(&self, c: Style) -> Cow<str> {
let s = self.as_ref();
match USE_COLOR.load(Ordering::Relaxed) {
true => Cow::Owned(format!("{}", c.paint(s))),
false => Cow::Borrowed(s),
}
}
}
lazy_static! {

View File

@ -32,8 +32,6 @@ use misc::version;
use crypto::*;
use sha3::Hashable;
use rlp::*;
use log::Colour::White;
use log::paint;
use network::session::{Session, SessionData};
use error::*;
use io::*;
@ -162,6 +160,8 @@ pub enum NetworkIoMessage<Message> where Message: Send + Sync + Clone {
Disconnect(PeerId),
/// Disconnect and temporary disable peer.
DisablePeer(PeerId),
/// Network has been started with the host as the given enode.
NetworkStarted(String),
/// User message
User(Message),
}
@ -345,12 +345,13 @@ pub struct Host<Message> where Message: Send + Sync + Clone {
reserved_nodes: RwLock<HashSet<NodeId>>,
num_sessions: AtomicUsize,
stopping: AtomicBool,
first_time: AtomicBool,
}
impl<Message> Host<Message> where Message: Send + Sync + Clone {
/// Create a new instance
pub fn new(config: NetworkConfiguration, stats: Arc<NetworkStats>) -> Result<Host<Message>, UtilError> {
trace!(target: "host", "Creating new Host object");
let mut listen_address = match config.listen_address {
None => SocketAddr::from_str("0.0.0.0:30304").unwrap(),
Some(addr) => addr,
@ -401,7 +402,6 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
reserved_nodes: RwLock::new(HashSet::new()),
num_sessions: AtomicUsize::new(0),
stopping: AtomicBool::new(false),
first_time: AtomicBool::new(true),
};
for n in boot_nodes {
@ -538,9 +538,8 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
self.info.write().unwrap().public_endpoint = Some(public_endpoint.clone());
if self.first_time.load(AtomicOrdering::Relaxed) {
info!("Public node URL: {}", paint(White.bold(), self.external_url().unwrap()));
self.first_time.store(false, AtomicOrdering::Relaxed);
if let Some(url) = self.external_url() {
io.message(NetworkIoMessage::NetworkStarted(url)).unwrap_or_else(|e| warn!("Error sending IO notification: {:?}", e));
}
// Initialize discovery.
@ -1038,6 +1037,7 @@ impl<Message> IoHandler<NetworkIoMessage<Message>> for Host<Message> where Messa
h.message(&NetworkContext::new(io, p, None, self.sessions.clone(), &reserved), &message);
}
}
_ => {} // ignore others.
}
}