Merge branch 'master' into ipc-hypervisor

This commit is contained in:
NikVolf 2016-04-14 21:57:24 +03:00
commit dc7e105ef8
36 changed files with 391 additions and 147 deletions

28
Cargo.lock generated
View File

@ -3,7 +3,7 @@ name = "parity"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"bincode 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "bincode 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.61 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)", "ctrlc 1.1.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)",
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
@ -21,7 +21,6 @@ dependencies = [
"fdlimit 0.1.0", "fdlimit 0.1.0",
"hyper 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"number_prefix 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "number_prefix 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rpassword 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "rpassword 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -123,7 +122,7 @@ dependencies = [
[[package]] [[package]]
name = "clippy" name = "clippy"
version = "0.0.61" version = "0.0.63"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -255,7 +254,7 @@ dependencies = [
name = "ethcore" name = "ethcore"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"clippy 0.0.61 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.1.0", "ethash 1.1.0",
@ -283,7 +282,7 @@ name = "ethcore-ipc"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"ethcore-devtools 1.1.0", "ethcore-devtools 1.1.0",
"nanomsg 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "nanomsg 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -305,14 +304,14 @@ version = "1.1.0"
dependencies = [ dependencies = [
"ethcore-ipc 1.1.0", "ethcore-ipc 1.1.0",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "nanomsg 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git)",
] ]
[[package]] [[package]]
name = "ethcore-rpc" name = "ethcore-rpc"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"clippy 0.0.61 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 1.1.0", "ethash 1.1.0",
"ethcore 1.1.0", "ethcore 1.1.0",
"ethcore-util 1.1.0", "ethcore-util 1.1.0",
@ -336,7 +335,7 @@ dependencies = [
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"bigint 0.1.0", "bigint 0.1.0",
"chrono 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.61 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -368,7 +367,7 @@ dependencies = [
name = "ethcore-webapp" name = "ethcore-webapp"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"clippy 0.0.61 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-rpc 1.1.0", "ethcore-rpc 1.1.0",
"ethcore-util 1.1.0", "ethcore-util 1.1.0",
"hyper 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -397,7 +396,7 @@ dependencies = [
name = "ethminer" name = "ethminer"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"clippy 0.0.61 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.1.0", "ethcore 1.1.0",
"ethcore-util 1.1.0", "ethcore-util 1.1.0",
@ -411,7 +410,7 @@ dependencies = [
name = "ethsync" name = "ethsync"
version = "1.1.0" version = "1.1.0"
dependencies = [ dependencies = [
"clippy 0.0.61 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.63 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 1.1.0", "ethcore 1.1.0",
"ethcore-util 1.1.0", "ethcore-util 1.1.0",
@ -716,17 +715,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "nanomsg" name = "nanomsg"
version = "0.5.0" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/ethcore/nanomsg.rs.git#bcb1615462da2cf99a68a4e6107b9e19794bd699"
dependencies = [ dependencies = [
"libc 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg-sys 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "nanomsg-sys 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git)",
] ]
[[package]] [[package]]
name = "nanomsg-sys" name = "nanomsg-sys"
version = "0.5.0" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "git+https://github.com/ethcore/nanomsg.rs.git#bcb1615462da2cf99a68a4e6107b9e19794bd699"
dependencies = [ dependencies = [
"gcc 0.3.26 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]

View File

@ -24,7 +24,7 @@ daemonize = "0.2"
num_cpus = "0.2" num_cpus = "0.2"
number_prefix = "0.2" number_prefix = "0.2"
rpassword = "0.1" rpassword = "0.1"
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
ethcore = { path = "ethcore" } ethcore = { path = "ethcore" }
ethcore-util = { path = "util" } ethcore-util = { path = "util" }
ethsync = { path = "sync" } ethsync = { path = "sync" }
@ -33,7 +33,6 @@ ethcore-devtools = { path = "devtools" }
ethcore-rpc = { path = "rpc", optional = true } ethcore-rpc = { path = "rpc", optional = true }
ethcore-webapp = { path = "webapp", optional = true } ethcore-webapp = { path = "webapp", optional = true }
semver = "0.2" semver = "0.2"
nanomsg = "0.5.0"
ethcore-ipc-nano = { path = "ipc/nano" } ethcore-ipc-nano = { path = "ipc/nano" }
"ethcore-ipc" = { path = "ipc/rpc" } "ethcore-ipc" = { path = "ipc/rpc" }
bincode = "*" bincode = "*"

View File

@ -19,7 +19,7 @@ First (if you don't already have it) get multirust:
- Linux: - Linux:
```bash ```bash
curl -sf https://raw.githubusercontent.com/brson/multirust/master/quick-install.sh | sudo sh -s -- --yes curl -sf https://raw.githubusercontent.com/brson/multirust/master/quick-install.sh | sh
``` ```
- OSX with Homebrew: - OSX with Homebrew:

View File

@ -27,7 +27,7 @@ use std::ptr;
use sha3; use sha3;
use std::slice; use std::slice;
use std::path::PathBuf; use std::path::PathBuf;
use std::io::{Read, Write, self}; use std::io::{self, Read, Write};
use std::fs::{self, File}; use std::fs::{self, File};
pub const ETHASH_EPOCH_LENGTH: u64 = 30000; pub const ETHASH_EPOCH_LENGTH: u64 = 30000;
@ -44,14 +44,14 @@ const NODE_WORDS: usize = 64 / 4;
const NODE_BYTES: usize = 64; const NODE_BYTES: usize = 64;
const MIX_WORDS: usize = ETHASH_MIX_BYTES / 4; const MIX_WORDS: usize = ETHASH_MIX_BYTES / 4;
const MIX_NODES: usize = MIX_WORDS / NODE_WORDS; const MIX_NODES: usize = MIX_WORDS / NODE_WORDS;
const FNV_PRIME: u32 = 0x01000193; const FNV_PRIME: u32 = 0x01000193;
/// Computation result /// Computation result
pub struct ProofOfWork { pub struct ProofOfWork {
/// Difficulty boundary /// Difficulty boundary
pub value: H256, pub value: H256,
/// Mix /// Mix
pub mix_hash: H256 pub mix_hash: H256,
} }
struct Node { struct Node {
@ -148,14 +148,16 @@ impl Light {
pub struct SeedHashCompute { pub struct SeedHashCompute {
prev_epoch: Cell<u64>, prev_epoch: Cell<u64>,
prev_seedhash: Cell<H256> prev_seedhash: Cell<H256>,
} }
impl SeedHashCompute { impl SeedHashCompute {
#[inline] #[inline]
pub fn new() -> SeedHashCompute { pub fn new() -> SeedHashCompute {
SeedHashCompute { prev_epoch: Cell::new(0), prev_seedhash: Cell::new([0u8; 32]) } SeedHashCompute {
prev_epoch: Cell::new(0),
prev_seedhash: Cell::new([0u8; 32]),
}
} }
#[inline] #[inline]
@ -181,7 +183,7 @@ impl SeedHashCompute {
#[inline] #[inline]
pub fn resume_compute_seedhash(mut hash: H256, start_epoch: u64, end_epoch: u64) -> H256 { pub fn resume_compute_seedhash(mut hash: H256, start_epoch: u64, end_epoch: u64) -> H256 {
for _ in start_epoch .. end_epoch { for _ in start_epoch..end_epoch {
unsafe { sha3::sha3_256(hash[..].as_mut_ptr(), 32, hash[..].as_ptr(), 32) }; unsafe { sha3::sha3_256(hash[..].as_mut_ptr(), 32, hash[..].as_ptr(), 32) };
} }
hash hash
@ -201,22 +203,22 @@ fn sha3_512(input: &[u8], output: &mut [u8]) {
#[inline] #[inline]
fn get_cache_size(block_number: u64) -> usize { fn get_cache_size(block_number: u64) -> usize {
let mut sz: u64 = CACHE_BYTES_INIT + CACHE_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH); let mut sz: u64 = CACHE_BYTES_INIT + CACHE_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH);
sz = sz - NODE_BYTES as u64; sz = sz - NODE_BYTES as u64;
while !is_prime(sz / NODE_BYTES as u64) { while !is_prime(sz / NODE_BYTES as u64) {
sz = sz - 2 * NODE_BYTES as u64; sz = sz - 2 * NODE_BYTES as u64;
} }
sz as usize sz as usize
} }
#[inline] #[inline]
fn get_data_size(block_number: u64) -> usize { fn get_data_size(block_number: u64) -> usize {
let mut sz: u64 = DATASET_BYTES_INIT + DATASET_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH); let mut sz: u64 = DATASET_BYTES_INIT + DATASET_BYTES_GROWTH * (block_number / ETHASH_EPOCH_LENGTH);
sz = sz - ETHASH_MIX_BYTES as u64; sz = sz - ETHASH_MIX_BYTES as u64;
while !is_prime(sz / ETHASH_MIX_BYTES as u64) { while !is_prime(sz / ETHASH_MIX_BYTES as u64) {
sz = sz - 2 * ETHASH_MIX_BYTES as u64; sz = sz - 2 * ETHASH_MIX_BYTES as u64;
} }
sz as usize sz as usize
} }
@ -249,12 +251,12 @@ pub fn light_compute(light: &Light, header_hash: &H256, nonce: u64) -> ProofOfWo
hash_compute(light, full_size, header_hash, nonce) hash_compute(light, full_size, header_hash, nonce)
} }
fn hash_compute(light: &Light, full_size: usize, header_hash: &H256, nonce: u64) -> ProofOfWork { fn hash_compute(light: &Light, full_size: usize, header_hash: &H256, nonce: u64) -> ProofOfWork {
if full_size % MIX_WORDS != 0 { if full_size % MIX_WORDS != 0 {
panic!("Unaligned full size"); panic!("Unaligned full size");
} }
// pack hash and nonce together into first 40 bytes of s_mix // pack hash and nonce together into first 40 bytes of s_mix
let mut s_mix: [Node; MIX_NODES + 1] = [ Node::default(), Node::default(), Node::default() ]; let mut s_mix: [Node; MIX_NODES + 1] = [Node::default(), Node::default(), Node::default()];
unsafe { ptr::copy_nonoverlapping(header_hash.as_ptr(), s_mix.get_unchecked_mut(0).bytes.as_mut_ptr(), 32) }; unsafe { ptr::copy_nonoverlapping(header_hash.as_ptr(), s_mix.get_unchecked_mut(0).bytes.as_mut_ptr(), 32) };
unsafe { ptr::copy_nonoverlapping(mem::transmute(&nonce), s_mix.get_unchecked_mut(0).bytes[32..].as_mut_ptr(), 8) }; unsafe { ptr::copy_nonoverlapping(mem::transmute(&nonce), s_mix.get_unchecked_mut(0).bytes[32..].as_mut_ptr(), 8) };
@ -295,7 +297,7 @@ fn hash_compute(light: &Light, full_size: usize, header_hash: &H256, nonce: u64
ptr::copy_nonoverlapping(mix.get_unchecked_mut(0).bytes.as_ptr(), buf[64..].as_mut_ptr(), 32); ptr::copy_nonoverlapping(mix.get_unchecked_mut(0).bytes.as_ptr(), buf[64..].as_mut_ptr(), 32);
ptr::copy_nonoverlapping(mix.get_unchecked_mut(0).bytes.as_ptr(), mix_hash.as_mut_ptr(), 32); ptr::copy_nonoverlapping(mix.get_unchecked_mut(0).bytes.as_ptr(), mix_hash.as_mut_ptr(), 32);
let mut value: H256 = [0u8; 32]; let mut value: H256 = [0u8; 32];
sha3::sha3_256(value.as_mut_ptr(), value.len(), buf.as_ptr(), buf.len()); sha3::sha3_256(value.as_mut_ptr(), value.len(), buf.as_ptr(), buf.len());
ProofOfWork { ProofOfWork {
mix_hash: mix_hash, mix_hash: mix_hash,
value: value, value: value,
@ -348,7 +350,7 @@ fn light_new(block_number: u64) -> Light {
let idx = *nodes.get_unchecked_mut(i).as_words().get_unchecked(0) as usize % num_nodes; let idx = *nodes.get_unchecked_mut(i).as_words().get_unchecked(0) as usize % num_nodes;
let mut data = nodes.get_unchecked((num_nodes - 1 + i) % num_nodes).clone(); let mut data = nodes.get_unchecked((num_nodes - 1 + i) % num_nodes).clone();
for w in 0..NODE_WORDS { for w in 0..NODE_WORDS {
*data.as_words_mut().get_unchecked_mut(w) ^= *nodes.get_unchecked(idx).as_words().get_unchecked(w) ; *data.as_words_mut().get_unchecked_mut(w) ^= *nodes.get_unchecked(idx).as_words().get_unchecked(w);
} }
sha3_512(&data.bytes, &mut nodes.get_unchecked_mut(i).bytes); sha3_512(&data.bytes, &mut nodes.get_unchecked_mut(i).bytes);
} }
@ -362,7 +364,7 @@ fn light_new(block_number: u64) -> Light {
} }
} }
static CHARS: &'static[u8] = b"0123456789abcdef"; static CHARS: &'static [u8] = b"0123456789abcdef";
fn to_hex(bytes: &[u8]) -> String { fn to_hex(bytes: &[u8]) -> String {
let mut v = Vec::with_capacity(bytes.len() * 2); let mut v = Vec::with_capacity(bytes.len() * 2);
for &byte in bytes.iter() { for &byte in bytes.iter() {
@ -370,9 +372,7 @@ fn to_hex(bytes: &[u8]) -> String {
v.push(CHARS[(byte & 0xf) as usize]); v.push(CHARS[(byte & 0xf) as usize]);
} }
unsafe { unsafe { String::from_utf8_unchecked(v) }
String::from_utf8_unchecked(v)
}
} }
#[test] #[test]
@ -402,8 +402,8 @@ fn test_get_data_size() {
#[test] #[test]
fn test_difficulty_test() { fn test_difficulty_test() {
let hash = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72]; let hash = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72];
let mix_hash = [0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce, 0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a, 0x64, 0x31, 0xab, 0x6d ]; let mix_hash = [0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce, 0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a, 0x64, 0x31, 0xab, 0x6d];
let nonce = 0xd7b3ac70a301a249; let nonce = 0xd7b3ac70a301a249;
let boundary_good = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84]; let boundary_good = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84];
assert_eq!(quick_get_difficulty(&hash, nonce, &mix_hash)[..], boundary_good[..]); assert_eq!(quick_get_difficulty(&hash, nonce, &mix_hash)[..], boundary_good[..]);
@ -413,8 +413,8 @@ fn test_difficulty_test() {
#[test] #[test]
fn test_light_compute() { fn test_light_compute() {
let hash = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72]; let hash = [0xf5, 0x7e, 0x6f, 0x3a, 0xcf, 0xc0, 0xdd, 0x4b, 0x5b, 0xf2, 0xbe, 0xe4, 0x0a, 0xb3, 0x35, 0x8a, 0xa6, 0x87, 0x73, 0xa8, 0xd0, 0x9f, 0x5e, 0x59, 0x5e, 0xab, 0x55, 0x94, 0x05, 0x52, 0x7d, 0x72];
let mix_hash = [0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce, 0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a, 0x64, 0x31, 0xab, 0x6d ]; let mix_hash = [0x1f, 0xff, 0x04, 0xce, 0xc9, 0x41, 0x73, 0xfd, 0x59, 0x1e, 0x3d, 0x89, 0x60, 0xce, 0x6b, 0xdf, 0x8b, 0x19, 0x71, 0x04, 0x8c, 0x71, 0xff, 0x93, 0x7b, 0xb2, 0xd3, 0x2a, 0x64, 0x31, 0xab, 0x6d];
let boundary = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84]; let boundary = [0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x3e, 0x9b, 0x6c, 0x69, 0xbc, 0x2c, 0xe2, 0xa2, 0x4a, 0x8e, 0x95, 0x69, 0xef, 0xc7, 0xd7, 0x1b, 0x33, 0x35, 0xdf, 0x36, 0x8c, 0x9a, 0xe9, 0x7e, 0x53, 0x84];
let nonce = 0xd7b3ac70a301a249; let nonce = 0xd7b3ac70a301a249;
// difficulty = 0x085657254bd9u64; // difficulty = 0x085657254bd9u64;

View File

@ -24,7 +24,7 @@ mod compute;
use std::mem; use std::mem;
use compute::Light; use compute::Light;
pub use compute::{SeedHashCompute, quick_get_difficulty, H256, ProofOfWork, ETHASH_EPOCH_LENGTH}; pub use compute::{ETHASH_EPOCH_LENGTH, H256, ProofOfWork, SeedHashCompute, quick_get_difficulty};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
@ -76,7 +76,7 @@ impl EthashManager {
lights.recent.clone() lights.recent.clone()
} }
_ => None, _ => None,
} },
}; };
match light { match light {
None => { None => {
@ -95,7 +95,7 @@ impl EthashManager {
lights.prev = mem::replace(&mut lights.recent, Some(light.clone())); lights.prev = mem::replace(&mut lights.recent, Some(light.clone()));
light light
} }
Some(light) => light Some(light) => light,
} }
}; };
light.compute(header_hash, nonce) light.compute(header_hash, nonce)

View File

@ -17,7 +17,7 @@ ethcore-util = { path = "../util" }
evmjit = { path = "../evmjit", optional = true } evmjit = { path = "../evmjit", optional = true }
ethash = { path = "../ethash" } ethash = { path = "../ethash" }
num_cpus = "0.2" num_cpus = "0.2"
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
crossbeam = "0.1.5" crossbeam = "0.1.5"
lazy_static = "0.1" lazy_static = "0.1"
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }

View File

@ -17,6 +17,7 @@
//! Blockchain database client. //! Blockchain database client.
use std::marker::PhantomData; use std::marker::PhantomData;
use std::path::PathBuf;
use util::*; use util::*;
use util::panics::*; use util::panics::*;
use views::BlockView; use views::BlockView;
@ -126,22 +127,31 @@ impl Client<CanonVerifier> {
} }
} }
/// Get the path for the databases given the root path and information on the databases.
pub fn get_db_path(path: &Path, pruning: journaldb::Algorithm, genesis_hash: H256) -> PathBuf {
let mut dir = path.to_path_buf();
dir.push(H64::from(genesis_hash).hex());
//TODO: sec/fat: pruned/full versioning
// version here is a bit useless now, since it's controlled only be the pruning algo.
dir.push(format!("v{}-sec-{}", CLIENT_DB_VER_STR, pruning));
dir
}
/// Append a path element to the given path and return the string.
pub fn append_path(path: &Path, item: &str) -> String {
let mut p = path.to_path_buf();
p.push(item);
p.to_str().unwrap().to_owned()
}
impl<V> Client<V> where V: Verifier { impl<V> Client<V> where V: Verifier {
/// Create a new client with given spec and DB path and custom verifier. /// Create a new client with given spec and DB path and custom verifier.
pub fn new_with_verifier(config: ClientConfig, spec: Spec, path: &Path, message_channel: IoChannel<NetSyncMessage> ) -> Result<Arc<Client<V>>, Error> { pub fn new_with_verifier(config: ClientConfig, spec: Spec, path: &Path, message_channel: IoChannel<NetSyncMessage> ) -> Result<Arc<Client<V>>, Error> {
let mut dir = path.to_path_buf(); let path = get_db_path(path, config.pruning, spec.genesis_header().hash());
dir.push(H64::from(spec.genesis_header().hash()).hex());
//TODO: sec/fat: pruned/full versioning
// version here is a bit useless now, since it's controlled only be the pruning algo.
dir.push(format!("v{}-sec-{}", CLIENT_DB_VER_STR, config.pruning));
let path = dir.as_path();
let gb = spec.genesis_block(); let gb = spec.genesis_block();
let chain = Arc::new(BlockChain::new(config.blockchain, &gb, path)); let chain = Arc::new(BlockChain::new(config.blockchain, &gb, &path));
let mut state_path = path.to_path_buf();
state_path.push("state");
let state_path_str = state_path.to_str().unwrap(); let mut state_db = journaldb::new(&append_path(&path, "state"), config.pruning);
let mut state_db = journaldb::new(state_path_str, config.pruning);
if state_db.is_empty() && spec.ensure_db_good(state_db.as_hashdb_mut()) { if state_db.is_empty() && spec.ensure_db_good(state_db.as_hashdb_mut()) {
state_db.commit(0, &spec.genesis_header().hash(), None).expect("Error commiting genesis state to state DB"); state_db.commit(0, &spec.genesis_header().hash(), None).expect("Error commiting genesis state to state DB");

View File

@ -140,10 +140,8 @@ impl<'a> Executive<'a> {
let init_gas = t.gas - base_gas_required; let init_gas = t.gas - base_gas_required;
// validate transaction nonce // validate transaction nonce
if check_nonce { if check_nonce && t.nonce != nonce {
if t.nonce != nonce { return Err(From::from(ExecutionError::InvalidNonce { expected: nonce, got: t.nonce }));
return Err(From::from(ExecutionError::InvalidNonce { expected: nonce, got: t.nonce }));
}
} }
// validate if transaction fits into given block // validate if transaction fits into given block

View File

@ -67,6 +67,8 @@ pub struct Externalities<'a, T> where T: 'a + Tracer {
} }
impl<'a, T> Externalities<'a, T> where T: 'a + Tracer { impl<'a, T> Externalities<'a, T> where T: 'a + Tracer {
#[cfg_attr(feature="dev", allow(too_many_arguments))]
/// Basic `Externalities` constructor. /// Basic `Externalities` constructor.
pub fn new(state: &'a mut State, pub fn new(state: &'a mut State,
env_info: &'a EnvInfo, env_info: &'a EnvInfo,

View File

@ -19,7 +19,7 @@ use common::*;
/// State changes which should be applied in finalize, /// State changes which should be applied in finalize,
/// after transaction is fully executed. /// after transaction is fully executed.
#[derive(Debug)] #[derive(Debug, Default)]
pub struct Substate { pub struct Substate {
/// Any accounts that have suicided. /// Any accounts that have suicided.
pub suicides: HashSet<Address>, pub suicides: HashSet<Address>,

View File

@ -17,7 +17,7 @@
//! Tracing datatypes. //! Tracing datatypes.
use common::*; use common::*;
/// TraceCall result. /// `TraceCall` result.
#[derive(Debug, Clone, PartialEq, Default)] #[derive(Debug, Clone, PartialEq, Default)]
pub struct TraceCallResult { pub struct TraceCallResult {
/// Gas used by call. /// Gas used by call.
@ -26,7 +26,7 @@ pub struct TraceCallResult {
pub output: Bytes, pub output: Bytes,
} }
/// TraceCreate result. /// `TraceCreate` result.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct TraceCreateResult { pub struct TraceCreateResult {
/// Gas used by create. /// Gas used by create.

View File

@ -8,5 +8,5 @@ license = "GPL-3.0"
[dependencies] [dependencies]
"ethcore-ipc" = { path = "../rpc" } "ethcore-ipc" = { path = "../rpc" }
nanomsg = "0.5.0" nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git" }
log = "0.3" log = "0.3"

View File

@ -9,4 +9,4 @@ license = "GPL-3.0"
[dependencies] [dependencies]
ethcore-devtools = { path = "../../devtools" } ethcore-devtools = { path = "../../devtools" }
semver = "0.2.0" semver = "0.2.0"
nanomsg = "0.5.0" nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git" }

View File

@ -13,7 +13,7 @@ bincode = "*"
serde = "0.7.0" serde = "0.7.0"
ethcore-devtools = { path = "../../devtools" } ethcore-devtools = { path = "../../devtools" }
semver = "0.2.0" semver = "0.2.0"
nanomsg = "0.5.0" nanomsg = { git = "https://github.com/ethcore/nanomsg.rs.git" }
ethcore-ipc-nano = { path = "../nano" } ethcore-ipc-nano = { path = "../nano" }
ethcore-util = { path = "../../util" } ethcore-util = { path = "../../util" }

View File

@ -10,7 +10,7 @@ rustc-serialize = "0.3"
serde = "0.7.0" serde = "0.7.0"
serde_json = "0.7.0" serde_json = "0.7.0"
serde_macros = { version = "0.7.0", optional = true } serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.7.0", optional = true } serde_codegen = { version = "0.7.0", optional = true }

View File

@ -17,7 +17,7 @@ log = "0.3"
env_logger = "0.3" env_logger = "0.3"
rustc-serialize = "0.3" rustc-serialize = "0.3"
rayon = "0.3.1" rayon = "0.3.1"
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
[features] [features]
default = [] default = []

View File

@ -64,7 +64,7 @@ mod transaction_queue;
pub use transaction_queue::{TransactionQueue, AccountDetails}; pub use transaction_queue::{TransactionQueue, AccountDetails};
pub use miner::{Miner}; pub use miner::{Miner};
use util::{H256, U256, Address, FixedHash, Bytes}; use util::{H256, U256, Address, Bytes};
use ethcore::client::{BlockChainClient}; use ethcore::client::{BlockChainClient};
use ethcore::block::{ClosedBlock}; use ethcore::block::{ClosedBlock};
use ethcore::error::{Error}; use ethcore::error::{Error};
@ -77,14 +77,29 @@ pub trait MinerService : Send + Sync {
fn status(&self) -> MinerStatus; fn status(&self) -> MinerStatus;
/// Get the author that we will seal blocks as. /// Get the author that we will seal blocks as.
fn author(&self) -> Address { Address::zero() } fn author(&self) -> Address;
/// Get the extra_data that we will seal blocks wuth. /// Set the author that we will seal blocks as.
fn extra_data(&self) -> Bytes { vec![] } fn set_author(&self, author: Address);
/// Get the extra_data that we will seal blocks with.
fn extra_data(&self) -> Bytes;
/// Set the extra_data that we will seal blocks with.
fn set_extra_data(&self, extra_data: Bytes);
/// Get current minimal gas price for transactions accepted to queue.
fn minimal_gas_price(&self) -> U256;
/// Set minimal gas price of transaction to be accepted for mining.
fn set_minimal_gas_price(&self, min_gas_price: U256);
/// Get the gas limit we wish to target when sealing a new block. /// Get the gas limit we wish to target when sealing a new block.
fn gas_floor_target(&self) -> U256; fn gas_floor_target(&self) -> U256;
/// Set the gas limit we wish to target when sealing a new block.
fn set_gas_floor_target(&self, target: U256);
/// Imports transactions to transaction queue. /// Imports transactions to transaction queue.
fn import_transactions<T>(&self, transactions: Vec<SignedTransaction>, fetch_account: T) -> Vec<Result<(), Error>> fn import_transactions<T>(&self, transactions: Vec<SignedTransaction>, fetch_account: T) -> Vec<Result<(), Error>>
where T: Fn(&Address) -> AccountDetails; where T: Fn(&Address) -> AccountDetails;

View File

@ -69,26 +69,6 @@ impl Miner {
}) })
} }
/// Set the author that we will seal blocks as.
pub fn set_author(&self, author: Address) {
*self.author.write().unwrap() = author;
}
/// Set the extra_data that we will seal blocks with.
pub fn set_extra_data(&self, extra_data: Bytes) {
*self.extra_data.write().unwrap() = extra_data;
}
/// Set the gas limit we wish to target when sealing a new block.
pub fn set_gas_floor_target(&self, target: U256) {
*self.gas_floor_target.write().unwrap() = target;
}
/// Set minimal gas price of transaction to be accepted for mining.
pub fn set_minimal_gas_price(&self, min_gas_price: U256) {
self.transaction_queue.lock().unwrap().set_minimal_gas_price(min_gas_price);
}
/// Prepares new block for sealing including top transactions from queue. /// Prepares new block for sealing including top transactions from queue.
#[cfg_attr(feature="dev", allow(match_same_arms))] #[cfg_attr(feature="dev", allow(match_same_arms))]
fn prepare_sealing(&self, chain: &BlockChainClient) { fn prepare_sealing(&self, chain: &BlockChainClient) {
@ -195,6 +175,27 @@ impl MinerService for Miner {
} }
} }
fn set_author(&self, author: Address) {
*self.author.write().unwrap() = author;
}
fn set_extra_data(&self, extra_data: Bytes) {
*self.extra_data.write().unwrap() = extra_data;
}
/// Set the gas limit we wish to target when sealing a new block.
fn set_gas_floor_target(&self, target: U256) {
*self.gas_floor_target.write().unwrap() = target;
}
fn set_minimal_gas_price(&self, min_gas_price: U256) {
self.transaction_queue.lock().unwrap().set_minimal_gas_price(min_gas_price);
}
fn minimal_gas_price(&self) -> U256 {
*self.transaction_queue.lock().unwrap().minimal_gas_price()
}
fn sensible_gas_price(&self) -> U256 { fn sensible_gas_price(&self) -> U256 {
// 10% above our minimum. // 10% above our minimum.
*self.transaction_queue.lock().unwrap().minimal_gas_price() * x!(110) / x!(100) *self.transaction_queue.lock().unwrap().minimal_gas_price() * x!(110) / x!(100)

View File

@ -133,11 +133,11 @@ Networking Options:
API and Console Options: API and Console Options:
-j --jsonrpc Enable the JSON-RPC API server. -j --jsonrpc Enable the JSON-RPC API server.
--jsonrpc-port PORT Specify the port portion of the JSONRPC API server
[default: 8545].
--jsonrpc-interface IP Specify the hostname portion of the JSONRPC API --jsonrpc-interface IP Specify the hostname portion of the JSONRPC API
server, IP should be an interface's IP address, or server, IP should be an interface's IP address, or
all (all interfaces) or local [default: local]. all (all interfaces) or local [default: local].
--jsonrpc-port PORT Specify the port portion of the JSONRPC API server
[default: 8545].
--jsonrpc-cors URL Specify CORS header for JSON-RPC API responses --jsonrpc-cors URL Specify CORS header for JSON-RPC API responses
[default: null]. [default: null].
--jsonrpc-apis APIS Specify the APIs available through the JSONRPC --jsonrpc-apis APIS Specify the APIs available through the JSONRPC
@ -176,8 +176,14 @@ Sealing/Mining Options:
Footprint Options: Footprint Options:
--pruning METHOD Configure pruning of the state/storage trie. METHOD --pruning METHOD Configure pruning of the state/storage trie. METHOD
may be one of: archive, basic (experimental), fast may be one of auto, archive, basic, fast, light:
(experimental) [default: archive]. archive - keep all state trie data. No pruning.
basic - reference count in disk DB. Slow but light.
fast - maintain journal overlay. Fast but 50MB used.
light - early merges with partial tracking. Fast
and light. Experimental!
auto - use the method most recently synced or
default to archive if none synced [default: auto].
--cache-pref-size BYTES Specify the prefered size of the blockchain cache in --cache-pref-size BYTES Specify the prefered size of the blockchain cache in
bytes [default: 16384]. bytes [default: 16384].
--cache-max-size BYTES Specify the maximum size of the blockchain cache in --cache-max-size BYTES Specify the maximum size of the blockchain cache in
@ -543,7 +549,26 @@ impl Configuration {
ret ret
} }
fn client_config(&self) -> ClientConfig { fn find_best_db(&self, spec: &Spec) -> Option<journaldb::Algorithm> {
let mut ret = None;
let mut latest_era = None;
let jdb_types = [journaldb::Algorithm::Archive, journaldb::Algorithm::EarlyMerge, journaldb::Algorithm::OverlayRecent, journaldb::Algorithm::RefCounted];
for i in jdb_types.into_iter() {
let db = journaldb::new(&append_path(&get_db_path(&Path::new(&self.path()), *i, spec.genesis_header().hash()), "state"), *i);
trace!(target: "parity", "Looking for best DB: {} at {:?}", i, db.latest_era());
match (latest_era, db.latest_era()) {
(Some(best), Some(this)) if best >= this => {}
(_, None) => {}
(_, Some(this)) => {
latest_era = Some(this);
ret = Some(*i);
}
}
}
ret
}
fn client_config(&self, spec: &Spec) -> ClientConfig {
let mut client_config = ClientConfig::default(); let mut client_config = ClientConfig::default();
match self.args.flag_cache { match self.args.flag_cache {
Some(mb) => { Some(mb) => {
@ -560,8 +585,10 @@ impl Configuration {
"light" => journaldb::Algorithm::EarlyMerge, "light" => journaldb::Algorithm::EarlyMerge,
"fast" => journaldb::Algorithm::OverlayRecent, "fast" => journaldb::Algorithm::OverlayRecent,
"basic" => journaldb::Algorithm::RefCounted, "basic" => journaldb::Algorithm::RefCounted,
"auto" => self.find_best_db(spec).unwrap_or(journaldb::Algorithm::OverlayRecent),
_ => { die!("Invalid pruning method given."); } _ => { die!("Invalid pruning method given."); }
}; };
trace!(target: "parity", "Using pruning strategy of {}", client_config.pruning);
client_config.name = self.args.flag_identity.clone(); client_config.name = self.args.flag_identity.clone();
client_config.queue.max_mem_use = self.args.flag_queue_max_size; client_config.queue.max_mem_use = self.args.flag_queue_max_size;
client_config client_config
@ -656,13 +683,14 @@ impl Configuration {
let spec = self.spec(); let spec = self.spec();
let net_settings = self.net_settings(&spec); let net_settings = self.net_settings(&spec);
let sync_config = self.sync_config(&spec); let sync_config = self.sync_config(&spec);
let client_config = self.client_config(&spec);
// Secret Store // Secret Store
let account_service = Arc::new(self.account_service()); let account_service = Arc::new(self.account_service());
// Build client // Build client
let mut service = ClientService::start( let mut service = ClientService::start(
self.client_config(), spec, net_settings, &Path::new(&self.path()) client_config, spec, net_settings, &Path::new(&self.path())
).unwrap_or_else(|e| die_with_error(e)); ).unwrap_or_else(|e| die_with_error(e));
panic_handler.forward_from(&service); panic_handler.forward_from(&service);

View File

@ -18,13 +18,15 @@
use semver::Version; use semver::Version;
use std::collections::*; use std::collections::*;
use std::fs::File; use std::fs::{File, create_dir_all};
use std::env; use std::env;
use std::io::{Read, Write}; use std::io::{Read, Write};
#[cfg_attr(feature="dev", allow(enum_variant_names))]
#[derive(Debug)] #[derive(Debug)]
pub enum Error { pub enum Error {
CannotLockVersionFile, CannotCreateConfigPath,
CannotWriteVersionFile,
CannotUpdateVersionFile, CannotUpdateVersionFile,
} }
@ -65,7 +67,7 @@ fn dummy_upgrade() -> Result<(), Error> {
Ok(()) Ok(())
} }
fn push_updrades(upgrades: &mut UpgradeList) fn push_upgrades(upgrades: &mut UpgradeList)
{ {
// dummy upgrade (remove when the first one is in) // dummy upgrade (remove when the first one is in)
upgrades.insert( upgrades.insert(
@ -75,7 +77,7 @@ fn push_updrades(upgrades: &mut UpgradeList)
fn upgrade_from_version(previous_version: &Version) -> Result<usize, Error> { fn upgrade_from_version(previous_version: &Version) -> Result<usize, Error> {
let mut upgrades = HashMap::new(); let mut upgrades = HashMap::new();
push_updrades(&mut upgrades); push_upgrades(&mut upgrades);
let current_version = Version::parse(CURRENT_VERSION).unwrap(); let current_version = Version::parse(CURRENT_VERSION).unwrap();
@ -95,6 +97,7 @@ fn with_locked_version<F>(script: F) -> Result<usize, Error>
{ {
let mut path = env::home_dir().expect("Applications should have a home dir"); let mut path = env::home_dir().expect("Applications should have a home dir");
path.push(".parity"); path.push(".parity");
try!(create_dir_all(&path).map_err(|_| Error::CannotCreateConfigPath));
path.push("ver.lock"); path.push("ver.lock");
let version = let version =
@ -107,16 +110,12 @@ fn with_locked_version<F>(script: F) -> Result<usize, Error>
}) })
.unwrap_or_else(|| Version::parse("0.9.0").unwrap()); .unwrap_or_else(|| Version::parse("0.9.0").unwrap());
let script_result = { let mut lock = try!(File::create(&path).map_err(|_| Error::CannotWriteVersionFile));
let mut lock = try!(File::create(&path).map_err(|_| Error::CannotLockVersionFile)); let result = script(&version);
let result = script(&version);
let written_version = Version::parse(CURRENT_VERSION).unwrap(); let written_version = Version::parse(CURRENT_VERSION).unwrap();
try!(lock.write_all(written_version.to_string().as_bytes()).map_err(|_| Error::CannotUpdateVersionFile)); try!(lock.write_all(written_version.to_string().as_bytes()).map_err(|_| Error::CannotUpdateVersionFile));
result result
};
script_result
} }
pub fn upgrade() -> Result<usize, Error> { pub fn upgrade() -> Result<usize, Error> {

View File

@ -22,7 +22,7 @@ ethminer = { path = "../miner" }
rustc-serialize = "0.3" rustc-serialize = "0.3"
transient-hashmap = "0.1" transient-hashmap = "0.1"
serde_macros = { version = "0.7.0", optional = true } serde_macros = { version = "0.7.0", optional = true }
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
[build-dependencies] [build-dependencies]
serde_codegen = { version = "0.7.0", optional = true } serde_codegen = { version = "0.7.0", optional = true }

View File

@ -15,6 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Ethcore-specific rpc implementation. //! Ethcore-specific rpc implementation.
use util::{U256, Address};
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use jsonrpc_core::*; use jsonrpc_core::*;
use ethminer::{MinerService}; use ethminer::{MinerService};
@ -37,6 +38,39 @@ impl<M> EthcoreClient<M> where M: MinerService {
} }
impl<M> Ethcore for EthcoreClient<M> where M: MinerService + 'static { impl<M> Ethcore for EthcoreClient<M> where M: MinerService + 'static {
fn set_min_gas_price(&self, params: Params) -> Result<Value, Error> {
from_params::<(U256,)>(params).and_then(|(gas_price,)| {
take_weak!(self.miner).set_minimal_gas_price(gas_price);
to_value(&true)
})
}
fn set_gas_floor_target(&self, params: Params) -> Result<Value, Error> {
from_params::<(U256,)>(params).and_then(|(gas_floor_target,)| {
take_weak!(self.miner).set_gas_floor_target(gas_floor_target);
to_value(&true)
})
}
fn set_extra_data(&self, params: Params) -> Result<Value, Error> {
from_params::<(Bytes,)>(params).and_then(|(extra_data,)| {
take_weak!(self.miner).set_extra_data(extra_data.to_vec());
to_value(&true)
})
}
fn set_author(&self, params: Params) -> Result<Value, Error> {
from_params::<(Address,)>(params).and_then(|(author,)| {
take_weak!(self.miner).set_author(author);
to_value(&true)
})
}
fn min_gas_price(&self, _: Params) -> Result<Value, Error> {
to_value(&take_weak!(self.miner).minimal_gas_price())
}
fn extra_data(&self, _: Params) -> Result<Value, Error> { fn extra_data(&self, _: Params) -> Result<Value, Error> {
to_value(&Bytes::new(take_weak!(self.miner).extra_data())) to_value(&Bytes::new(take_weak!(self.miner).extra_data()))
} }

View File

@ -15,10 +15,13 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc; use std::sync::Arc;
use std::str::FromStr;
use jsonrpc_core::IoHandler; use jsonrpc_core::IoHandler;
use v1::{Ethcore, EthcoreClient}; use v1::{Ethcore, EthcoreClient};
use v1::tests::helpers::{TestMinerService}; use ethminer::MinerService;
use v1::tests::helpers::TestMinerService;
use util::numbers::*; use util::numbers::*;
use rustc_serialize::hex::FromHex;
fn miner_service() -> Arc<TestMinerService> { fn miner_service() -> Arc<TestMinerService> {
@ -52,3 +55,71 @@ fn rpc_ethcore_gas_floor_target() {
assert_eq!(io.handle_request(request), Some(response.to_owned())); assert_eq!(io.handle_request(request), Some(response.to_owned()));
} }
#[test]
fn rpc_ethcore_min_gas_price() {
let miner = miner_service();
let ethcore = EthcoreClient::new(&miner).to_delegate();
let io = IoHandler::new();
io.add_delegate(ethcore);
let request = r#"{"jsonrpc": "2.0", "method": "ethcore_minGasPrice", "params": [], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":"0x01312d00","id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_owned()));
}
#[test]
fn rpc_ethcore_set_min_gas_price() {
let miner = miner_service();
let ethcore = EthcoreClient::new(&miner).to_delegate();
let io = IoHandler::new();
io.add_delegate(ethcore);
let request = r#"{"jsonrpc": "2.0", "method": "ethcore_setMinGasPrice", "params":["0xcd1722f3947def4cf144679da39c4c32bdc35681"], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_owned()));
assert_eq!(miner.minimal_gas_price(), U256::from_str("cd1722f3947def4cf144679da39c4c32bdc35681").unwrap());
}
#[test]
fn rpc_ethcore_set_gas_floor_target() {
let miner = miner_service();
let ethcore = EthcoreClient::new(&miner).to_delegate();
let io = IoHandler::new();
io.add_delegate(ethcore);
let request = r#"{"jsonrpc": "2.0", "method": "ethcore_setGasFloorTarget", "params":["0xcd1722f3947def4cf144679da39c4c32bdc35681"], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_owned()));
assert_eq!(miner.gas_floor_target(), U256::from_str("cd1722f3947def4cf144679da39c4c32bdc35681").unwrap());
}
#[test]
fn rpc_ethcore_set_extra_data() {
let miner = miner_service();
let ethcore = EthcoreClient::new(&miner).to_delegate();
let io = IoHandler::new();
io.add_delegate(ethcore);
let request = r#"{"jsonrpc": "2.0", "method": "ethcore_setExtraData", "params":["0xcd1722f3947def4cf144679da39c4c32bdc35681"], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_owned()));
assert_eq!(miner.extra_data(), "cd1722f3947def4cf144679da39c4c32bdc35681".from_hex().unwrap());
}
#[test]
fn rpc_ethcore_set_author() {
let miner = miner_service();
let ethcore = EthcoreClient::new(&miner).to_delegate();
let io = IoHandler::new();
io.add_delegate(ethcore);
let request = r#"{"jsonrpc": "2.0", "method": "ethcore_setAuthor", "params":["0xcd1722f3947def4cf144679da39c4c32bdc35681"], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_owned()));
assert_eq!(miner.author(), Address::from_str("cd1722f3947def4cf144679da39c4c32bdc35681").unwrap());
}

View File

@ -16,7 +16,7 @@
//! Test implementation of miner service. //! Test implementation of miner service.
use util::{Address, H256, Bytes, U256}; use util::{Address, H256, Bytes, U256, FixedHash};
use util::standard::*; use util::standard::*;
use ethcore::error::Error; use ethcore::error::Error;
use ethcore::client::BlockChainClient; use ethcore::client::BlockChainClient;
@ -34,6 +34,11 @@ pub struct TestMinerService {
pub pending_transactions: Mutex<HashMap<H256, SignedTransaction>>, pub pending_transactions: Mutex<HashMap<H256, SignedTransaction>>,
/// Last nonces. /// Last nonces.
pub last_nonces: RwLock<HashMap<Address, U256>>, pub last_nonces: RwLock<HashMap<Address, U256>>,
min_gas_price: RwLock<U256>,
gas_floor_target: RwLock<U256>,
author: RwLock<Address>,
extra_data: RwLock<Bytes>,
} }
impl Default for TestMinerService { impl Default for TestMinerService {
@ -43,6 +48,10 @@ impl Default for TestMinerService {
latest_closed_block: Mutex::new(None), latest_closed_block: Mutex::new(None),
pending_transactions: Mutex::new(HashMap::new()), pending_transactions: Mutex::new(HashMap::new()),
last_nonces: RwLock::new(HashMap::new()), last_nonces: RwLock::new(HashMap::new()),
min_gas_price: RwLock::new(U256::from(20_000_000)),
gas_floor_target: RwLock::new(U256::from(12345)),
author: RwLock::new(Address::zero()),
extra_data: RwLock::new(vec![1, 2, 3, 4]),
} }
} }
} }
@ -58,6 +67,39 @@ impl MinerService for TestMinerService {
} }
} }
fn set_author(&self, author: Address) {
*self.author.write().unwrap() = author;
}
fn set_extra_data(&self, extra_data: Bytes) {
*self.extra_data.write().unwrap() = extra_data;
}
/// Set the gas limit we wish to target when sealing a new block.
fn set_gas_floor_target(&self, target: U256) {
*self.gas_floor_target.write().unwrap() = target;
}
fn set_minimal_gas_price(&self, min_gas_price: U256) {
*self.min_gas_price.write().unwrap() = min_gas_price;
}
fn author(&self) -> Address {
*self.author.read().unwrap()
}
fn minimal_gas_price(&self) -> U256 {
*self.min_gas_price.read().unwrap()
}
fn extra_data(&self) -> Bytes {
self.extra_data.read().unwrap().clone()
}
fn gas_floor_target(&self) -> U256 {
*self.gas_floor_target.read().unwrap()
}
/// Imports transactions to transaction queue. /// Imports transactions to transaction queue.
fn import_transactions<T>(&self, transactions: Vec<SignedTransaction>, _fetch_account: T) -> Vec<Result<(), Error>> fn import_transactions<T>(&self, transactions: Vec<SignedTransaction>, _fetch_account: T) -> Vec<Result<(), Error>>
where T: Fn(&Address) -> AccountDetails { where T: Fn(&Address) -> AccountDetails {
@ -111,12 +153,4 @@ impl MinerService for TestMinerService {
fn submit_seal(&self, _chain: &BlockChainClient, _pow_hash: H256, _seal: Vec<Bytes>) -> Result<(), Error> { fn submit_seal(&self, _chain: &BlockChainClient, _pow_hash: H256, _seal: Vec<Bytes>) -> Result<(), Error> {
unimplemented!(); unimplemented!();
} }
fn extra_data(&self) -> Bytes {
vec![1, 2, 3, 4]
}
fn gas_floor_target(&self) -> U256 {
U256::from(12345)
}
} }

View File

@ -20,17 +20,39 @@ use jsonrpc_core::*;
/// Ethcore-specific rpc interface. /// Ethcore-specific rpc interface.
pub trait Ethcore: Sized + Send + Sync + 'static { pub trait Ethcore: Sized + Send + Sync + 'static {
/// Sets new minimal gas price for mined blocks.
fn set_min_gas_price(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
/// Sets new gas floor target for mined blocks.
fn set_gas_floor_target(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
/// Sets new extra data for mined blocks.
fn set_extra_data(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
/// Sets new author for mined block.
fn set_author(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
/// Returns mining extra data. /// Returns mining extra data.
fn extra_data(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() } fn extra_data(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
/// Returns mining gas floor target. /// Returns mining gas floor target.
fn gas_floor_target(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() } fn gas_floor_target(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
/// Returns minimal gas price for transaction to be included in queue.
fn min_gas_price(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
/// Should be used to convert object to io delegate. /// Should be used to convert object to io delegate.
fn to_delegate(self) -> IoDelegate<Self> { fn to_delegate(self) -> IoDelegate<Self> {
let mut delegate = IoDelegate::new(Arc::new(self)); let mut delegate = IoDelegate::new(Arc::new(self));
delegate.add_method("ethcore_setMinGasPrice", Ethcore::set_min_gas_price);
delegate.add_method("ethcore_setGasFloorTarget", Ethcore::set_gas_floor_target);
delegate.add_method("ethcore_setExtraData", Ethcore::set_extra_data);
delegate.add_method("ethcore_setAuthor", Ethcore::set_author);
delegate.add_method("ethcore_extraData", Ethcore::extra_data); delegate.add_method("ethcore_extraData", Ethcore::extra_data);
delegate.add_method("ethcore_gasFloorTarget", Ethcore::gas_floor_target); delegate.add_method("ethcore_gasFloorTarget", Ethcore::gas_floor_target);
delegate.add_method("ethcore_minGasPrice", Ethcore::min_gas_price);
delegate delegate
} }
} }

View File

@ -61,7 +61,7 @@ pub struct Filter {
impl Into<EthFilter> for Filter { impl Into<EthFilter> for Filter {
fn into(self) -> EthFilter { fn into(self) -> EthFilter {
EthFilter { EthFilter {
from_block: self.from_block.map_or_else(|| BlockId::Earliest, Into::into), from_block: self.from_block.map_or_else(|| BlockId::Latest, Into::into),
to_block: self.to_block.map_or_else(|| BlockId::Latest, Into::into), to_block: self.to_block.map_or_else(|| BlockId::Latest, Into::into),
address: self.address.and_then(|address| match address { address: self.address.and_then(|address| match address {
VariadicValue::Null => None, VariadicValue::Null => None,

View File

@ -1,15 +1,17 @@
verbose=false verbose=false
max_width=150 max_width=1000
ideal_width=120 ideal_width=1000
tabs_spaces=4 tabs_spaces=4
fn_call_width=100 fn_call_width=1000
struct_lit_width=32
fn_arg_indent="Tabbed" fn_arg_indent="Tabbed"
single_line_if_else=true single_line_if_else=true
where_indent="Visual" where_indent="Visual"
where_trailing_comma=true where_trailing_comma=true
chain_base_indent="Inherit" chain_base_indent="Inherit"
chain_indent="Tabbed" chain_indent="Inherit"
reorder_imports=true reorder_imports=true
format_strings=false format_strings=false
chain_overflow_last=false
hard_tabs=true hard_tabs=true
wrap_match_arms=false wrap_match_arms=false

View File

@ -10,7 +10,7 @@ authors = ["Ethcore <admin@ethcore.io"]
[dependencies] [dependencies]
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
ethcore = { path = "../ethcore" } ethcore = { path = "../ethcore" }
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
ethminer = { path = "../miner" } ethminer = { path = "../miner" }
log = "0.3" log = "0.3"
env_logger = "0.3" env_logger = "0.3"

View File

@ -27,7 +27,7 @@ crossbeam = "0.2"
slab = "0.1" slab = "0.1"
sha3 = { path = "sha3" } sha3 = { path = "sha3" }
serde = "0.7.0" serde = "0.7.0"
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
json-tests = { path = "json-tests" } json-tests = { path = "json-tests" }
igd = "0.4.2" igd = "0.4.2"
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }

View File

@ -41,7 +41,7 @@ pub struct ArchiveDB {
// all keys must be at least 12 bytes // all keys must be at least 12 bytes
const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ]; const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ];
const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ]; const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ];
const DB_VERSION : u32 = 259; const DB_VERSION : u32 = 0x103;
impl ArchiveDB { impl ArchiveDB {
/// Create a new instance from file /// Create a new instance from file
@ -55,7 +55,7 @@ impl ArchiveDB {
if !backing.is_empty() { if !backing.is_empty() {
match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) { match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) {
Ok(Some(DB_VERSION)) => {}, Ok(Some(DB_VERSION)) => {},
v => panic!("Incompatible DB version, expected {}, got {:?}", DB_VERSION, v) v => panic!("Incompatible DB version, expected {}, got {:?}; to resolve, remove {} and restart.", DB_VERSION, v, path)
} }
} else { } else {
backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database"); backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database");
@ -168,6 +168,8 @@ impl JournalDB for ArchiveDB {
Ok((inserts + deletes) as u32) Ok((inserts + deletes) as u32)
} }
fn latest_era(&self) -> Option<u64> { self.latest_era }
fn state(&self, id: &H256) -> Option<Bytes> { fn state(&self, id: &H256) -> Option<Bytes> {
self.backing.get_by_prefix(&id.bytes()[0..12]).and_then(|b| Some(b.to_vec())) self.backing.get_by_prefix(&id.bytes()[0..12]).and_then(|b| Some(b.to_vec()))
} }

View File

@ -70,7 +70,7 @@ pub struct EarlyMergeDB {
// all keys must be at least 12 bytes // all keys must be at least 12 bytes
const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ]; const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ];
const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ]; const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ];
const DB_VERSION : u32 = 3; const DB_VERSION : u32 = 0x003;
const PADDING : [u8; 10] = [ 0u8; 10 ]; const PADDING : [u8; 10] = [ 0u8; 10 ];
impl EarlyMergeDB { impl EarlyMergeDB {
@ -85,7 +85,7 @@ impl EarlyMergeDB {
if !backing.is_empty() { if !backing.is_empty() {
match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) { match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) {
Ok(Some(DB_VERSION)) => {}, Ok(Some(DB_VERSION)) => {},
v => panic!("Incompatible DB version, expected {}, got {:?}", DB_VERSION, v) v => panic!("Incompatible DB version, expected {}, got {:?}; to resolve, remove {} and restart.", DB_VERSION, v, path)
} }
} else { } else {
backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database"); backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database");
@ -333,6 +333,8 @@ impl JournalDB for EarlyMergeDB {
self.backing.get(&LATEST_ERA_KEY).expect("Low level database error").is_none() self.backing.get(&LATEST_ERA_KEY).expect("Low level database error").is_none()
} }
fn latest_era(&self) -> Option<u64> { self.latest_era }
fn mem_used(&self) -> usize { fn mem_used(&self) -> usize {
self.overlay.mem_used() + match self.refs { self.overlay.mem_used() + match self.refs {
Some(ref c) => c.read().unwrap().heap_size_of_children(), Some(ref c) => c.read().unwrap().heap_size_of_children(),
@ -340,7 +342,6 @@ impl JournalDB for EarlyMergeDB {
} }
} }
#[cfg_attr(feature="dev", allow(cyclomatic_complexity))] #[cfg_attr(feature="dev", allow(cyclomatic_complexity))]
fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> { fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
// journal format: // journal format:

View File

@ -29,7 +29,7 @@ mod refcounteddb;
pub use self::traits::JournalDB; pub use self::traits::JournalDB;
/// A journal database algorithm. /// A journal database algorithm.
#[derive(Debug)] #[derive(Debug, Clone, Copy)]
pub enum Algorithm { pub enum Algorithm {
/// Keep all keys forever. /// Keep all keys forever.
Archive, Archive,

View File

@ -95,7 +95,7 @@ impl Clone for OverlayRecentDB {
// all keys must be at least 12 bytes // all keys must be at least 12 bytes
const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ]; const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ];
const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ]; const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ];
const DB_VERSION : u32 = 0x200 + 3; const DB_VERSION : u32 = 0x203;
const PADDING : [u8; 10] = [ 0u8; 10 ]; const PADDING : [u8; 10] = [ 0u8; 10 ];
impl OverlayRecentDB { impl OverlayRecentDB {
@ -115,7 +115,7 @@ impl OverlayRecentDB {
if !backing.is_empty() { if !backing.is_empty() {
match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) { match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) {
Ok(Some(DB_VERSION)) => {} Ok(Some(DB_VERSION)) => {}
v => panic!("Incompatible DB version, expected {}, got {:?}", DB_VERSION, v) v => panic!("Incompatible DB version, expected {}, got {:?}; to resolve, remove {} and restart.", DB_VERSION, v, path)
} }
} else { } else {
backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database"); backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database");
@ -213,6 +213,8 @@ impl JournalDB for OverlayRecentDB {
self.backing.get(&LATEST_ERA_KEY).expect("Low level database error").is_none() self.backing.get(&LATEST_ERA_KEY).expect("Low level database error").is_none()
} }
fn latest_era(&self) -> Option<u64> { self.journal_overlay.read().unwrap().latest_era }
fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> { fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
// record new commit's details. // record new commit's details.
trace!("commit: #{} ({}), end era: {:?}", now, id, end); trace!("commit: #{} ({}), end era: {:?}", now, id, end);

View File

@ -42,7 +42,7 @@ pub struct RefCountedDB {
const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ]; const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ];
const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ]; const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ];
const DB_VERSION : u32 = 512; const DB_VERSION : u32 = 0x200;
const PADDING : [u8; 10] = [ 0u8; 10 ]; const PADDING : [u8; 10] = [ 0u8; 10 ];
impl RefCountedDB { impl RefCountedDB {
@ -57,7 +57,7 @@ impl RefCountedDB {
if !backing.is_empty() { if !backing.is_empty() {
match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) { match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) {
Ok(Some(DB_VERSION)) => {}, Ok(Some(DB_VERSION)) => {},
v => panic!("Incompatible DB version, expected {}, got {:?}", DB_VERSION, v) v => panic!("Incompatible DB version, expected {}, got {:?}; to resolve, remove {} and restart.", DB_VERSION, v, path)
} }
} else { } else {
backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database"); backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database");
@ -112,6 +112,8 @@ impl JournalDB for RefCountedDB {
self.latest_era.is_none() self.latest_era.is_none()
} }
fn latest_era(&self) -> Option<u64> { self.latest_era }
fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> { fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
// journal format: // journal format:
// [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ] // [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ]
@ -220,6 +222,25 @@ mod tests {
assert!(!jdb.exists(&h)); assert!(!jdb.exists(&h));
} }
#[test]
fn latest_era_should_work() {
// history is 3
let mut jdb = RefCountedDB::new_temp();
assert_eq!(jdb.latest_era(), None);
let h = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert_eq!(jdb.latest_era(), Some(0));
jdb.remove(&h);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert_eq!(jdb.latest_era(), Some(1));
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert_eq!(jdb.latest_era(), Some(2));
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
assert_eq!(jdb.latest_era(), Some(3));
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
assert_eq!(jdb.latest_era(), Some(4));
}
#[test] #[test]
fn complex() { fn complex() {
// history is 1 // history is 1

View File

@ -31,6 +31,9 @@ pub trait JournalDB : HashDB + Send + Sync {
/// Check if this database has any commits /// Check if this database has any commits
fn is_empty(&self) -> bool; fn is_empty(&self) -> bool;
/// Get the latest era in the DB. None if there isn't yet any data in there.
fn latest_era(&self) -> Option<u64>;
/// Commit all recent insert operations and canonical historical commits' removals from the /// Commit all recent insert operations and canonical historical commits' removals from the
/// old era to the backing database, reverting any non-canonical historical commit's inserts. /// old era to the backing database, reverting any non-canonical historical commit's inserts.
fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError>; fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError>;

View File

@ -19,7 +19,7 @@ parity-webapp = { git = "https://github.com/tomusdrw/parity-webapp.git" }
# List of apps # List of apps
parity-status = { git = "https://github.com/tomusdrw/parity-status.git", version = "0.1.5" } parity-status = { git = "https://github.com/tomusdrw/parity-status.git", version = "0.1.5" }
parity-wallet = { git = "https://github.com/tomusdrw/parity-wallet.git", optional = true } parity-wallet = { git = "https://github.com/tomusdrw/parity-wallet.git", optional = true }
clippy = { version = "0.0.61", optional = true} clippy = { version = "0.0.63", optional = true}
[features] [features]
default = ["parity-wallet"] default = ["parity-wallet"]