commit
1f8e0f86ac
12
Cargo.lock
generated
12
Cargo.lock
generated
@ -2,7 +2,7 @@
|
|||||||
name = "parity"
|
name = "parity"
|
||||||
version = "0.9.99"
|
version = "0.9.99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ctrlc 1.1.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)",
|
"ctrlc 1.1.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)",
|
||||||
"daemonize 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"daemonize 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)",
|
"docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -94,7 +94,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clippy"
|
name = "clippy"
|
||||||
version = "0.0.49"
|
version = "0.0.50"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"regex-syntax 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"regex-syntax 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -207,7 +207,7 @@ dependencies = [
|
|||||||
name = "ethcore"
|
name = "ethcore"
|
||||||
version = "0.9.99"
|
version = "0.9.99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethash 0.9.99",
|
"ethash 0.9.99",
|
||||||
@ -233,7 +233,7 @@ dependencies = [
|
|||||||
name = "ethcore-rpc"
|
name = "ethcore-rpc"
|
||||||
version = "0.9.99"
|
version = "0.9.99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethash 0.9.99",
|
"ethash 0.9.99",
|
||||||
"ethcore 0.9.99",
|
"ethcore 0.9.99",
|
||||||
"ethcore-util 0.9.99",
|
"ethcore-util 0.9.99",
|
||||||
@ -256,7 +256,7 @@ dependencies = [
|
|||||||
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"bigint 0.1.0",
|
"bigint 0.1.0",
|
||||||
"chrono 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
"chrono 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"crossbeam 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"crossbeam 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -288,7 +288,7 @@ dependencies = [
|
|||||||
name = "ethsync"
|
name = "ethsync"
|
||||||
version = "0.9.99"
|
version = "0.9.99"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.50 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethcore 0.9.99",
|
"ethcore 0.9.99",
|
||||||
"ethcore-util 0.9.99",
|
"ethcore-util 0.9.99",
|
||||||
|
@ -19,7 +19,7 @@ ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" }
|
|||||||
fdlimit = { path = "util/fdlimit" }
|
fdlimit = { path = "util/fdlimit" }
|
||||||
daemonize = "0.2"
|
daemonize = "0.2"
|
||||||
number_prefix = "0.2"
|
number_prefix = "0.2"
|
||||||
clippy = { version = "0.0.49", optional = true }
|
clippy = { version = "0.0.50", optional = true }
|
||||||
ethcore = { path = "ethcore" }
|
ethcore = { path = "ethcore" }
|
||||||
ethcore-util = { path = "util" }
|
ethcore-util = { path = "util" }
|
||||||
ethsync = { path = "sync" }
|
ethsync = { path = "sync" }
|
||||||
|
@ -17,7 +17,7 @@ ethcore-util = { path = "../util" }
|
|||||||
evmjit = { path = "../evmjit", optional = true }
|
evmjit = { path = "../evmjit", optional = true }
|
||||||
ethash = { path = "../ethash" }
|
ethash = { path = "../ethash" }
|
||||||
num_cpus = "0.2"
|
num_cpus = "0.2"
|
||||||
clippy = { version = "0.0.49", optional = true }
|
clippy = { version = "0.0.50", optional = true }
|
||||||
crossbeam = "0.1.5"
|
crossbeam = "0.1.5"
|
||||||
lazy_static = "0.1"
|
lazy_static = "0.1"
|
||||||
ethcore-devtools = { path = "../devtools" }
|
ethcore-devtools = { path = "../devtools" }
|
||||||
|
@ -523,7 +523,7 @@ mod tests {
|
|||||||
let engine = spec.to_engine().unwrap();
|
let engine = spec.to_engine().unwrap();
|
||||||
let mut config = BlockQueueConfig::default();
|
let mut config = BlockQueueConfig::default();
|
||||||
config.max_mem_use = super::MIN_MEM_LIMIT; // empty queue uses about 15000
|
config.max_mem_use = super::MIN_MEM_LIMIT; // empty queue uses about 15000
|
||||||
let mut queue = BlockQueue::new(config, Arc::new(engine), IoChannel::disconnected());
|
let queue = BlockQueue::new(config, Arc::new(engine), IoChannel::disconnected());
|
||||||
assert!(!queue.queue_info().is_full());
|
assert!(!queue.queue_info().is_full());
|
||||||
let mut blocks = get_good_dummy_block_seq(50);
|
let mut blocks = get_good_dummy_block_seq(50);
|
||||||
for b in blocks.drain(..) {
|
for b in blocks.drain(..) {
|
||||||
|
@ -28,9 +28,15 @@ pub struct MemoryCache {
|
|||||||
blooms: HashMap<BloomIndex, H2048>,
|
blooms: HashMap<BloomIndex, H2048>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for MemoryCache {
|
||||||
|
fn default() -> Self {
|
||||||
|
MemoryCache::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl MemoryCache {
|
impl MemoryCache {
|
||||||
/// Default constructor for MemoryCache
|
/// Default constructor for MemoryCache
|
||||||
pub fn new() -> MemoryCache {
|
pub fn new() -> Self {
|
||||||
MemoryCache { blooms: HashMap::new() }
|
MemoryCache { blooms: HashMap::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,9 +57,15 @@ pub enum EachBlockWith {
|
|||||||
UncleAndTransaction
|
UncleAndTransaction
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for TestBlockChainClient {
|
||||||
|
fn default() -> Self {
|
||||||
|
TestBlockChainClient::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TestBlockChainClient {
|
impl TestBlockChainClient {
|
||||||
/// Creates new test client.
|
/// Creates new test client.
|
||||||
pub fn new() -> TestBlockChainClient {
|
pub fn new() -> Self {
|
||||||
|
|
||||||
let mut client = TestBlockChainClient {
|
let mut client = TestBlockChainClient {
|
||||||
blocks: RwLock::new(HashMap::new()),
|
blocks: RwLock::new(HashMap::new()),
|
||||||
|
@ -301,8 +301,14 @@ mod tests {
|
|||||||
env_info: EnvInfo
|
env_info: EnvInfo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for TestSetup {
|
||||||
|
fn default() -> Self {
|
||||||
|
TestSetup::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TestSetup {
|
impl TestSetup {
|
||||||
fn new() -> TestSetup {
|
fn new() -> Self {
|
||||||
TestSetup {
|
TestSetup {
|
||||||
state: get_temp_state(),
|
state: get_temp_state(),
|
||||||
engine: get_test_spec().to_engine().unwrap(),
|
engine: get_test_spec().to_engine().unwrap(),
|
||||||
|
@ -20,6 +20,7 @@ use error::Error;
|
|||||||
use header::Header;
|
use header::Header;
|
||||||
use super::Verifier;
|
use super::Verifier;
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
pub struct NoopVerifier;
|
pub struct NoopVerifier;
|
||||||
|
|
||||||
impl Verifier for NoopVerifier {
|
impl Verifier for NoopVerifier {
|
||||||
|
@ -255,8 +255,14 @@ mod tests {
|
|||||||
numbers: HashMap<BlockNumber, H256>,
|
numbers: HashMap<BlockNumber, H256>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for TestBlockChain {
|
||||||
|
fn default() -> Self {
|
||||||
|
TestBlockChain::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TestBlockChain {
|
impl TestBlockChain {
|
||||||
pub fn new() -> TestBlockChain {
|
pub fn new() -> Self {
|
||||||
TestBlockChain {
|
TestBlockChain {
|
||||||
blocks: HashMap::new(),
|
blocks: HashMap::new(),
|
||||||
numbers: HashMap::new(),
|
numbers: HashMap::new(),
|
||||||
|
@ -314,7 +314,7 @@ impl Configuration {
|
|||||||
fn init_nodes(&self, spec: &Spec) -> Vec<String> {
|
fn init_nodes(&self, spec: &Spec) -> Vec<String> {
|
||||||
let mut r = if self.args.flag_no_bootstrap { Vec::new() } else { spec.nodes().clone() };
|
let mut r = if self.args.flag_no_bootstrap { Vec::new() } else { spec.nodes().clone() };
|
||||||
if let Some(ref x) = self.args.flag_bootnodes {
|
if let Some(ref x) = self.args.flag_bootnodes {
|
||||||
r.extend(x.split(",").map(|s| Self::normalize_enode(s).unwrap_or_else(|| die!("{}: Invalid node address format given for a boot node.", s))));
|
r.extend(x.split(',').map(|s| Self::normalize_enode(s).unwrap_or_else(|| die!("{}: Invalid node address format given for a boot node.", s))));
|
||||||
}
|
}
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
@ -327,7 +327,7 @@ impl Configuration {
|
|||||||
let host = IpAddr::from_str(host).unwrap_or_else(|_| die!("Invalid host given with `--nat extip:{}`", host));
|
let host = IpAddr::from_str(host).unwrap_or_else(|_| die!("Invalid host given with `--nat extip:{}`", host));
|
||||||
Some(SocketAddr::new(host, self.args.flag_port))
|
Some(SocketAddr::new(host, self.args.flag_port))
|
||||||
} else {
|
} else {
|
||||||
listen_address.clone()
|
listen_address
|
||||||
};
|
};
|
||||||
(listen_address, public_address)
|
(listen_address, public_address)
|
||||||
}
|
}
|
||||||
@ -388,12 +388,13 @@ impl Configuration {
|
|||||||
}
|
}
|
||||||
if self.args.cmd_list {
|
if self.args.cmd_list {
|
||||||
println!("Known addresses:");
|
println!("Known addresses:");
|
||||||
for &(addr, _) in secret_store.accounts().unwrap().iter() {
|
for &(addr, _) in &secret_store.accounts().unwrap() {
|
||||||
println!("{:?}", addr);
|
println!("{:?}", addr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg_attr(feature="dev", allow(useless_format))]
|
||||||
fn execute_client(&self) {
|
fn execute_client(&self) {
|
||||||
// Setup panic handler
|
// Setup panic handler
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
let panic_handler = PanicHandler::new_in_arc();
|
||||||
@ -406,7 +407,11 @@ impl Configuration {
|
|||||||
let spec = self.spec();
|
let spec = self.spec();
|
||||||
let net_settings = self.net_settings(&spec);
|
let net_settings = self.net_settings(&spec);
|
||||||
let mut sync_config = SyncConfig::default();
|
let mut sync_config = SyncConfig::default();
|
||||||
sync_config.network_id = self.args.flag_networkid.as_ref().map(|id| U256::from_str(id).unwrap_or_else(|_| die!("{}: Invalid index given with --networkid", id))).unwrap_or(spec.network_id());
|
sync_config.network_id = self.args.flag_networkid.as_ref().map_or(spec.network_id(), |id| {
|
||||||
|
U256::from_str(id).unwrap_or_else(|_| {
|
||||||
|
die!("{}: Invalid index given with --networkid", id)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
// Build client
|
// Build client
|
||||||
let mut client_config = ClientConfig::default();
|
let mut client_config = ClientConfig::default();
|
||||||
@ -421,8 +426,7 @@ impl Configuration {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
client_config.pruning = match self.args.flag_pruning.as_str() {
|
client_config.pruning = match self.args.flag_pruning.as_str() {
|
||||||
"" => journaldb::Algorithm::Archive,
|
"" | "archive" => journaldb::Algorithm::Archive,
|
||||||
"archive" => journaldb::Algorithm::Archive,
|
|
||||||
"pruned" => journaldb::Algorithm::EarlyMerge,
|
"pruned" => journaldb::Algorithm::EarlyMerge,
|
||||||
"fast" => journaldb::Algorithm::OverlayRecent,
|
"fast" => journaldb::Algorithm::OverlayRecent,
|
||||||
// "slow" => journaldb::Algorithm::RefCounted, // TODO: @gavofyork uncomment this once ref-count algo is merged.
|
// "slow" => journaldb::Algorithm::RefCounted, // TODO: @gavofyork uncomment this once ref-count algo is merged.
|
||||||
@ -452,7 +456,7 @@ impl Configuration {
|
|||||||
let cors = self.args.flag_rpccorsdomain.as_ref().unwrap_or(&self.args.flag_jsonrpc_cors);
|
let cors = self.args.flag_rpccorsdomain.as_ref().unwrap_or(&self.args.flag_jsonrpc_cors);
|
||||||
// TODO: use this as the API list.
|
// TODO: use this as the API list.
|
||||||
let apis = self.args.flag_rpcapi.as_ref().unwrap_or(&self.args.flag_jsonrpc_apis);
|
let apis = self.args.flag_rpcapi.as_ref().unwrap_or(&self.args.flag_jsonrpc_apis);
|
||||||
let server_handler = setup_rpc_server(service.client(), sync.clone(), account_service.clone(), &url, cors, apis.split(",").collect());
|
let server_handler = setup_rpc_server(service.client(), sync.clone(), account_service.clone(), &url, cors, apis.split(',').collect());
|
||||||
if let Some(handler) = server_handler {
|
if let Some(handler) = server_handler {
|
||||||
panic_handler.forward_from(handler.deref());
|
panic_handler.forward_from(handler.deref());
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,7 @@ ethcore-util = { path = "../util" }
|
|||||||
ethcore = { path = "../ethcore" }
|
ethcore = { path = "../ethcore" }
|
||||||
ethash = { path = "../ethash" }
|
ethash = { path = "../ethash" }
|
||||||
ethsync = { path = "../sync" }
|
ethsync = { path = "../sync" }
|
||||||
clippy = { version = "0.0.49", optional = true }
|
clippy = { version = "0.0.50", optional = true }
|
||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
transient-hashmap = "0.1"
|
transient-hashmap = "0.1"
|
||||||
serde_macros = { version = "0.7.0", optional = true }
|
serde_macros = { version = "0.7.0", optional = true }
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
use util::numbers::*;
|
use util::numbers::*;
|
||||||
use ethcore::transaction::{LocalizedTransaction, Action};
|
use ethcore::transaction::{LocalizedTransaction, Action};
|
||||||
use v1::types::{Bytes, OptionalValue};
|
use v1::types::{Bytes, OptionalValue};
|
||||||
use serde::Error;
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize)]
|
#[derive(Debug, Default, Serialize)]
|
||||||
pub struct Transaction {
|
pub struct Transaction {
|
||||||
|
@ -10,7 +10,7 @@ authors = ["Ethcore <admin@ethcore.io"]
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
ethcore = { path = "../ethcore" }
|
ethcore = { path = "../ethcore" }
|
||||||
clippy = { version = "0.0.49", optional = true }
|
clippy = { version = "0.0.50", optional = true }
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
env_logger = "0.3"
|
env_logger = "0.3"
|
||||||
time = "0.1.34"
|
time = "0.1.34"
|
||||||
|
@ -27,7 +27,7 @@ crossbeam = "0.2"
|
|||||||
slab = "0.1"
|
slab = "0.1"
|
||||||
sha3 = { path = "sha3" }
|
sha3 = { path = "sha3" }
|
||||||
serde = "0.7.0"
|
serde = "0.7.0"
|
||||||
clippy = { version = "0.0.49", optional = true }
|
clippy = { version = "0.0.50", optional = true }
|
||||||
json-tests = { path = "json-tests" }
|
json-tests = { path = "json-tests" }
|
||||||
rustc_version = "0.1.0"
|
rustc_version = "0.1.0"
|
||||||
igd = "0.4.2"
|
igd = "0.4.2"
|
||||||
|
@ -33,14 +33,14 @@ use super::JournalDB;
|
|||||||
/// immediately. Rather some age (based on a linear but arbitrary metric) must pass before
|
/// immediately. Rather some age (based on a linear but arbitrary metric) must pass before
|
||||||
/// the removals actually take effect.
|
/// the removals actually take effect.
|
||||||
///
|
///
|
||||||
/// There are two memory overlays:
|
/// There are two memory overlays:
|
||||||
/// - Transaction overlay contains current transaction data. It is merged with with history
|
/// - Transaction overlay contains current transaction data. It is merged with with history
|
||||||
/// overlay on each `commit()`
|
/// overlay on each `commit()`
|
||||||
/// - History overlay contains all data inserted during the history period. When the node
|
/// - History overlay contains all data inserted during the history period. When the node
|
||||||
/// in the overlay becomes ancient it is written to disk on `commit()`
|
/// in the overlay becomes ancient it is written to disk on `commit()`
|
||||||
///
|
///
|
||||||
/// There is also a journal maintained in memory and on the disk as well which lists insertions
|
/// There is also a journal maintained in memory and on the disk as well which lists insertions
|
||||||
/// and removals for each commit during the history period. This is used to track
|
/// and removals for each commit during the history period. This is used to track
|
||||||
/// data nodes that go out of history scope and must be written to disk.
|
/// data nodes that go out of history scope and must be written to disk.
|
||||||
///
|
///
|
||||||
/// Commit workflow:
|
/// Commit workflow:
|
||||||
@ -50,12 +50,12 @@ use super::JournalDB;
|
|||||||
/// 3. Clear the transaction overlay.
|
/// 3. Clear the transaction overlay.
|
||||||
/// 4. For a canonical journal record that becomes ancient inserts its insertions into the disk DB
|
/// 4. For a canonical journal record that becomes ancient inserts its insertions into the disk DB
|
||||||
/// 5. For each journal record that goes out of the history scope (becomes ancient) remove its
|
/// 5. For each journal record that goes out of the history scope (becomes ancient) remove its
|
||||||
/// insertions from the history overlay, decreasing the reference counter and removing entry if
|
/// insertions from the history overlay, decreasing the reference counter and removing entry if
|
||||||
/// if reaches zero.
|
/// if reaches zero.
|
||||||
/// 6. For a canonical journal record that becomes ancient delete its removals from the disk only if
|
/// 6. For a canonical journal record that becomes ancient delete its removals from the disk only if
|
||||||
/// the removed key is not present in the history overlay.
|
/// the removed key is not present in the history overlay.
|
||||||
/// 7. Delete ancient record from memory and disk.
|
/// 7. Delete ancient record from memory and disk.
|
||||||
///
|
|
||||||
pub struct OverlayRecentDB {
|
pub struct OverlayRecentDB {
|
||||||
transaction_overlay: MemoryDB,
|
transaction_overlay: MemoryDB,
|
||||||
backing: Arc<Database>,
|
backing: Arc<Database>,
|
||||||
@ -223,7 +223,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
let mut tx = self.transaction_overlay.drain();
|
let mut tx = self.transaction_overlay.drain();
|
||||||
let inserted_keys: Vec<_> = tx.iter().filter_map(|(k, &(_, c))| if c > 0 { Some(k.clone()) } else { None }).collect();
|
let inserted_keys: Vec<_> = tx.iter().filter_map(|(k, &(_, c))| if c > 0 { Some(k.clone()) } else { None }).collect();
|
||||||
let removed_keys: Vec<_> = tx.iter().filter_map(|(k, &(_, c))| if c < 0 { Some(k.clone()) } else { None }).collect();
|
let removed_keys: Vec<_> = tx.iter().filter_map(|(k, &(_, c))| if c < 0 { Some(k.clone()) } else { None }).collect();
|
||||||
// Increase counter for each inserted key no matter if the block is canonical or not.
|
// Increase counter for each inserted key no matter if the block is canonical or not.
|
||||||
let insertions = tx.drain().filter_map(|(k, (v, c))| if c > 0 { Some((k, v)) } else { None });
|
let insertions = tx.drain().filter_map(|(k, (v, c))| if c > 0 { Some((k, v)) } else { None });
|
||||||
r.append(id);
|
r.append(id);
|
||||||
r.begin_list(inserted_keys.len());
|
r.begin_list(inserted_keys.len());
|
||||||
@ -236,7 +236,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
r.append(&removed_keys);
|
r.append(&removed_keys);
|
||||||
|
|
||||||
let mut k = RlpStream::new_list(3);
|
let mut k = RlpStream::new_list(3);
|
||||||
let index = journal_overlay.journal.get(&now).map(|j| j.len()).unwrap_or(0);
|
let index = journal_overlay.journal.get(&now).map_or(0, |j| j.len());
|
||||||
k.append(&now);
|
k.append(&now);
|
||||||
k.append(&index);
|
k.append(&index);
|
||||||
k.append(&&PADDING[..]);
|
k.append(&&PADDING[..]);
|
||||||
@ -345,14 +345,14 @@ impl HashDB for OverlayRecentDB {
|
|||||||
self.lookup(key).is_some()
|
self.lookup(key).is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, value: &[u8]) -> H256 {
|
fn insert(&mut self, value: &[u8]) -> H256 {
|
||||||
self.transaction_overlay.insert(value)
|
self.transaction_overlay.insert(value)
|
||||||
}
|
}
|
||||||
fn emplace(&mut self, key: H256, value: Bytes) {
|
fn emplace(&mut self, key: H256, value: Bytes) {
|
||||||
self.transaction_overlay.emplace(key, value);
|
self.transaction_overlay.emplace(key, value);
|
||||||
}
|
}
|
||||||
fn kill(&mut self, key: &H256) {
|
fn kill(&mut self, key: &H256) {
|
||||||
self.transaction_overlay.kill(key);
|
self.transaction_overlay.kill(key);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -749,7 +749,7 @@ mod tests {
|
|||||||
assert!(jdb.can_reconstruct_refs());
|
assert!(jdb.can_reconstruct_refs());
|
||||||
assert!(!jdb.exists(&foo));
|
assert!(!jdb.exists(&foo));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn reopen_test() {
|
fn reopen_test() {
|
||||||
let mut dir = ::std::env::temp_dir();
|
let mut dir = ::std::env::temp_dir();
|
||||||
@ -784,7 +784,7 @@ mod tests {
|
|||||||
jdb.commit(7, &b"7".sha3(), Some((3, b"3".sha3()))).unwrap();
|
jdb.commit(7, &b"7".sha3(), Some((3, b"3".sha3()))).unwrap();
|
||||||
assert!(jdb.can_reconstruct_refs());
|
assert!(jdb.can_reconstruct_refs());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn reopen_remove_three() {
|
fn reopen_remove_three() {
|
||||||
init_log();
|
init_log();
|
||||||
@ -838,7 +838,7 @@ mod tests {
|
|||||||
assert!(!jdb.exists(&foo));
|
assert!(!jdb.exists(&foo));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn reopen_fork() {
|
fn reopen_fork() {
|
||||||
let mut dir = ::std::env::temp_dir();
|
let mut dir = ::std::env::temp_dir();
|
||||||
|
@ -120,9 +120,15 @@ impl AccountProvider for AccountService {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for AccountService {
|
||||||
|
fn default() -> Self {
|
||||||
|
AccountService::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl AccountService {
|
impl AccountService {
|
||||||
/// New account service with the default location
|
/// New account service with the default location
|
||||||
pub fn new() -> AccountService {
|
pub fn new() -> Self {
|
||||||
let secret_store = RwLock::new(SecretStore::new());
|
let secret_store = RwLock::new(SecretStore::new());
|
||||||
secret_store.write().unwrap().try_import_existing();
|
secret_store.write().unwrap().try_import_existing();
|
||||||
AccountService {
|
AccountService {
|
||||||
@ -568,7 +574,7 @@ mod tests {
|
|||||||
let temp = RandomTempPath::create_dir();
|
let temp = RandomTempPath::create_dir();
|
||||||
let mut sstore = SecretStore::new_test(&temp);
|
let mut sstore = SecretStore::new_test(&temp);
|
||||||
let addr = sstore.new_account("test").unwrap();
|
let addr = sstore.new_account("test").unwrap();
|
||||||
let _ok = sstore.unlock_account(&addr, "test").unwrap();
|
sstore.unlock_account(&addr, "test").unwrap();
|
||||||
let secret = sstore.account_secret(&addr).unwrap();
|
let secret = sstore.account_secret(&addr).unwrap();
|
||||||
let kp = KeyPair::from_secret(secret).unwrap();
|
let kp = KeyPair::from_secret(secret).unwrap();
|
||||||
assert_eq!(Address::from(kp.public().sha3()), addr);
|
assert_eq!(Address::from(kp.public().sha3()), addr);
|
||||||
|
@ -160,12 +160,12 @@ impl Connection {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get socket token
|
/// Get socket token
|
||||||
pub fn token(&self) -> StreamToken {
|
pub fn token(&self) -> StreamToken {
|
||||||
self.token
|
self.token
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replace socket token
|
/// Replace socket token
|
||||||
pub fn set_token(&mut self, token: StreamToken) {
|
pub fn set_token(&mut self, token: StreamToken) {
|
||||||
self.token = token;
|
self.token = token;
|
||||||
}
|
}
|
||||||
@ -261,13 +261,13 @@ pub struct EncryptedConnection {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl EncryptedConnection {
|
impl EncryptedConnection {
|
||||||
|
|
||||||
/// Get socket token
|
/// Get socket token
|
||||||
pub fn token(&self) -> StreamToken {
|
pub fn token(&self) -> StreamToken {
|
||||||
self.connection.token
|
self.connection.token
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Replace socket token
|
/// Replace socket token
|
||||||
pub fn set_token(&mut self, token: StreamToken) {
|
pub fn set_token(&mut self, token: StreamToken) {
|
||||||
self.connection.set_token(token);
|
self.connection.set_token(token);
|
||||||
}
|
}
|
||||||
@ -513,8 +513,14 @@ mod tests {
|
|||||||
buf_size: usize,
|
buf_size: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Default for TestSocket {
|
||||||
|
fn default() -> Self {
|
||||||
|
TestSocket::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TestSocket {
|
impl TestSocket {
|
||||||
fn new() -> TestSocket {
|
fn new() -> Self {
|
||||||
TestSocket {
|
TestSocket {
|
||||||
read_buffer: vec![],
|
read_buffer: vec![],
|
||||||
write_buffer: vec![],
|
write_buffer: vec![],
|
||||||
@ -593,8 +599,14 @@ mod tests {
|
|||||||
|
|
||||||
type TestConnection = GenericConnection<TestSocket>;
|
type TestConnection = GenericConnection<TestSocket>;
|
||||||
|
|
||||||
|
impl Default for TestConnection {
|
||||||
|
fn default() -> Self {
|
||||||
|
TestConnection::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TestConnection {
|
impl TestConnection {
|
||||||
pub fn new() -> TestConnection {
|
pub fn new() -> Self {
|
||||||
TestConnection {
|
TestConnection {
|
||||||
token: 999998888usize,
|
token: 999998888usize,
|
||||||
socket: TestSocket::new(),
|
socket: TestSocket::new(),
|
||||||
@ -609,8 +621,14 @@ mod tests {
|
|||||||
|
|
||||||
type TestBrokenConnection = GenericConnection<TestBrokenSocket>;
|
type TestBrokenConnection = GenericConnection<TestBrokenSocket>;
|
||||||
|
|
||||||
|
impl Default for TestBrokenConnection {
|
||||||
|
fn default() -> Self {
|
||||||
|
TestBrokenConnection::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl TestBrokenConnection {
|
impl TestBrokenConnection {
|
||||||
pub fn new() -> TestBrokenConnection {
|
pub fn new() -> Self {
|
||||||
TestBrokenConnection {
|
TestBrokenConnection {
|
||||||
token: 999998888usize,
|
token: 999998888usize,
|
||||||
socket: TestBrokenSocket { error: "test broken socket".to_owned() },
|
socket: TestBrokenSocket { error: "test broken socket".to_owned() },
|
||||||
|
Loading…
Reference in New Issue
Block a user