Merge branch 'master' into plain_hasher

This commit is contained in:
debris
2017-08-28 12:30:05 +02:00
94 changed files with 930 additions and 1030 deletions

View File

@@ -19,7 +19,6 @@ rust-crypto = "0.2.34"
elastic-array = "0.9"
rlp = { path = "rlp" }
heapsize = "0.4"
itertools = "0.5"
sha3 = { path = "sha3" }
clippy = { version = "0.0.103", optional = true}
ethcore-devtools = { path = "../devtools" }

View File

@@ -8,7 +8,7 @@ version = "0.1.3"
authors = ["Parity Technologies <admin@parity.io>"]
[dependencies]
bigint = "3.0"
bigint = "4.0"
rustc-hex = "1.0"
rand = "0.3.12"
libc = "0.2"

View File

@@ -37,9 +37,9 @@ struct BitVecJournal {
impl BitVecJournal {
pub fn new(size: usize) -> BitVecJournal {
let extra = if size % 8 > 0 { 1 } else { 0 };
let extra = if size % 64 > 0 { 1 } else { 0 };
BitVecJournal {
elems: vec![0u64; size / 8 + extra],
elems: vec![0u64; size / 64 + extra],
journal: HashSet::new(),
}
}

View File

@@ -15,7 +15,7 @@
#![feature(test)]
extern crate test;
extern crate bigint;
extern crate ethcore_bigint as bigint;
extern crate rlp;
use test::Bencher;

View File

@@ -0,0 +1,15 @@
[package]
name = "rlp_derive"
version = "0.1.0"
authors = ["debris <marek.kotewicz@gmail.com>"]
[lib]
name = "rlp_derive"
proc-macro = true
[dependencies]
syn = "0.11.11"
quote = "0.3.15"
[dev-dependencies]
rlp = { path = "../rlp" }

139
util/rlp_derive/src/de.rs Normal file
View File

@@ -0,0 +1,139 @@
use {syn, quote};
struct ParseQuotes {
single: quote::Tokens,
list: quote::Tokens,
takes_index: bool,
}
fn decodable_parse_quotes() -> ParseQuotes {
ParseQuotes {
single: quote! { rlp.val_at },
list: quote! { rlp.list_at },
takes_index: true,
}
}
fn decodable_wrapper_parse_quotes() -> ParseQuotes {
ParseQuotes {
single: quote! { rlp.as_val },
list: quote! { rlp.as_list },
takes_index: false,
}
}
pub fn impl_decodable(ast: &syn::DeriveInput) -> quote::Tokens {
let body = match ast.body {
syn::Body::Struct(ref s) => s,
_ => panic!("#[derive(RlpDecodable)] is only defined for structs."),
};
let stmts: Vec<_> = match *body {
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) =>
fields.iter().enumerate().map(decodable_field_map).collect(),
syn::VariantData::Unit => panic!("#[derive(RlpDecodable)] is not defined for Unit structs."),
};
let name = &ast.ident;
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_DECODABLE_FOR_{}", name));
let impl_block = quote! {
impl rlp::Decodable for #name {
fn decode(rlp: &rlp::UntrustedRlp) -> Result<Self, rlp::DecoderError> {
let result = #name {
#(#stmts)*
};
Ok(result)
}
}
};
quote! {
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
const #dummy_const: () = {
extern crate rlp;
#impl_block
};
}
}
pub fn impl_decodable_wrapper(ast: &syn::DeriveInput) -> quote::Tokens {
let body = match ast.body {
syn::Body::Struct(ref s) => s,
_ => panic!("#[derive(RlpDecodableWrapper)] is only defined for structs."),
};
let stmt = match *body {
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) => {
if fields.len() == 1 {
let field = fields.first().expect("fields.len() == 1; qed");
decodable_field(0, field, decodable_wrapper_parse_quotes())
} else {
panic!("#[derive(RlpDecodableWrapper)] is only defined for structs with one field.")
}
},
syn::VariantData::Unit => panic!("#[derive(RlpDecodableWrapper)] is not defined for Unit structs."),
};
let name = &ast.ident;
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_DECODABLE_FOR_{}", name));
let impl_block = quote! {
impl rlp::Decodable for #name {
fn decode(rlp: &rlp::UntrustedRlp) -> Result<Self, rlp::DecoderError> {
let result = #name {
#stmt
};
Ok(result)
}
}
};
quote! {
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
const #dummy_const: () = {
extern crate rlp;
#impl_block
};
}
}
fn decodable_field_map(tuple: (usize, &syn::Field)) -> quote::Tokens {
decodable_field(tuple.0, tuple.1, decodable_parse_quotes())
}
fn decodable_field(index: usize, field: &syn::Field, quotes: ParseQuotes) -> quote::Tokens {
let ident = match field.ident {
Some(ref ident) => ident.to_string(),
None => index.to_string(),
};
let id = syn::Ident::new(ident);
let index = syn::Ident::new(index.to_string());
let single = quotes.single;
let list = quotes.list;
match field.ty {
syn::Ty::Path(_, ref path) => {
let ident = &path.segments.first().expect("there must be at least 1 segment").ident;
if &ident.to_string() == "Vec" {
if quotes.takes_index {
quote! { #id: #list(#index)?, }
} else {
quote! { #id: #list()?, }
}
} else {
if quotes.takes_index {
quote! { #id: #single(#index)?, }
} else {
quote! { #id: #single()?, }
}
}
},
_ => panic!("rlp_derive not supported"),
}
}

110
util/rlp_derive/src/en.rs Normal file
View File

@@ -0,0 +1,110 @@
use {syn, quote};
pub fn impl_encodable(ast: &syn::DeriveInput) -> quote::Tokens {
let body = match ast.body {
syn::Body::Struct(ref s) => s,
_ => panic!("#[derive(RlpEncodable)] is only defined for structs."),
};
let stmts: Vec<_> = match *body {
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) =>
fields.iter().enumerate().map(encodable_field_map).collect(),
syn::VariantData::Unit => panic!("#[derive(RlpEncodable)] is not defined for Unit structs."),
};
let name = &ast.ident;
let stmts_len = syn::Ident::new(stmts.len().to_string());
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_ENCODABLE_FOR_{}", name));
let impl_block = quote! {
impl rlp::Encodable for #name {
fn rlp_append(&self, stream: &mut rlp::RlpStream) {
stream.begin_list(#stmts_len);
#(#stmts)*
}
}
};
quote! {
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
const #dummy_const: () = {
extern crate rlp;
#impl_block
};
}
}
pub fn impl_encodable_wrapper(ast: &syn::DeriveInput) -> quote::Tokens {
let body = match ast.body {
syn::Body::Struct(ref s) => s,
_ => panic!("#[derive(RlpEncodableWrapper)] is only defined for structs."),
};
let stmt = match *body {
syn::VariantData::Struct(ref fields) | syn::VariantData::Tuple(ref fields) => {
if fields.len() == 1 {
let field = fields.first().expect("fields.len() == 1; qed");
encodable_field(0, field)
} else {
panic!("#[derive(RlpEncodableWrapper)] is only defined for structs with one field.")
}
},
syn::VariantData::Unit => panic!("#[derive(RlpEncodableWrapper)] is not defined for Unit structs."),
};
let name = &ast.ident;
let dummy_const = syn::Ident::new(format!("_IMPL_RLP_ENCODABLE_FOR_{}", name));
let impl_block = quote! {
impl rlp::Encodable for #name {
fn rlp_append(&self, stream: &mut rlp::RlpStream) {
#stmt
}
}
};
quote! {
#[allow(non_upper_case_globals, unused_attributes, unused_qualifications)]
const #dummy_const: () = {
extern crate rlp;
#impl_block
};
}
}
fn encodable_field_map(tuple: (usize, &syn::Field)) -> quote::Tokens {
encodable_field(tuple.0, tuple.1)
}
fn encodable_field(index: usize, field: &syn::Field) -> quote::Tokens {
let ident = match field.ident {
Some(ref ident) => ident.to_string(),
None => index.to_string(),
};
let id = syn::Ident::new(format!("self.{}", ident));
match field.ty {
syn::Ty::Path(_, ref path) => {
let top_segment = path.segments.first().expect("there must be at least 1 segment");
let ident = &top_segment.ident;
if &ident.to_string() == "Vec" {
let inner_ident = match top_segment.parameters {
syn::PathParameters::AngleBracketed(ref angle) => {
let ty = angle.types.first().expect("Vec has only one angle bracketed type; qed");
match *ty {
syn::Ty::Path(_, ref path) => &path.segments.first().expect("there must be at least 1 segment").ident,
_ => panic!("rlp_derive not supported"),
}
},
_ => unreachable!("Vec has only one angle bracketed type; qed"),
};
quote! { stream.append_list::<#inner_ident, _>(&#id); }
} else {
quote! { stream.append(&#id); }
}
},
_ => panic!("rlp_derive not supported"),
}
}

View File

@@ -0,0 +1,43 @@
extern crate proc_macro;
extern crate syn;
#[macro_use]
extern crate quote;
mod en;
mod de;
use proc_macro::TokenStream;
use en::{impl_encodable, impl_encodable_wrapper};
use de::{impl_decodable, impl_decodable_wrapper};
#[proc_macro_derive(RlpEncodable)]
pub fn encodable(input: TokenStream) -> TokenStream {
let s = input.to_string();
let ast = syn::parse_derive_input(&s).unwrap();
let gen = impl_encodable(&ast);
gen.parse().unwrap()
}
#[proc_macro_derive(RlpEncodableWrapper)]
pub fn encodable_wrapper(input: TokenStream) -> TokenStream {
let s = input.to_string();
let ast = syn::parse_derive_input(&s).unwrap();
let gen = impl_encodable_wrapper(&ast);
gen.parse().unwrap()
}
#[proc_macro_derive(RlpDecodable)]
pub fn decodable(input: TokenStream) -> TokenStream {
let s = input.to_string();
let ast = syn::parse_derive_input(&s).unwrap();
let gen = impl_decodable(&ast);
gen.parse().unwrap()
}
#[proc_macro_derive(RlpDecodableWrapper)]
pub fn decodable_wrapper(input: TokenStream) -> TokenStream {
let s = input.to_string();
let ast = syn::parse_derive_input(&s).unwrap();
let gen = impl_decodable_wrapper(&ast);
gen.parse().unwrap()
}

View File

@@ -0,0 +1,44 @@
extern crate rlp;
#[macro_use]
extern crate rlp_derive;
use rlp::{encode, decode};
#[derive(Debug, PartialEq, RlpEncodable, RlpDecodable)]
struct Foo {
a: String,
}
#[derive(Debug, PartialEq, RlpEncodableWrapper, RlpDecodableWrapper)]
struct FooWrapper {
a: String,
}
#[test]
fn test_encode_foo() {
let foo = Foo {
a: "cat".into(),
};
let expected = vec![0xc4, 0x83, b'c', b'a', b't'];
let out = encode(&foo).into_vec();
assert_eq!(out, expected);
let decoded = decode(&expected);
assert_eq!(foo, decoded);
}
#[test]
fn test_encode_foo_wrapper() {
let foo = FooWrapper {
a: "cat".into(),
};
let expected = vec![0x83, b'c', b'a', b't'];
let out = encode(&foo).into_vec();
assert_eq!(out, expected);
let decoded = decode(&expected);
assert_eq!(foo, decoded);
}

View File

@@ -21,7 +21,6 @@ use std::collections::HashMap;
use std::sync::Arc;
use parking_lot::RwLock;
use heapsize::HeapSizeOf;
use itertools::Itertools;
use rlp::*;
use hashdb::*;
use memorydb::*;
@@ -432,7 +431,9 @@ impl JournalDB for EarlyMergeDB {
// - we write the key into our journal for this block;
r.begin_list(inserts.len());
inserts.iter().foreach(|&(k, _)| {r.append(&k);});
for &(k, _) in &inserts {
r.append(&k);
}
r.append_list(&removes);
Self::insert_keys(&inserts, &*self.backing, self.column, &mut refs, batch, trace);

View File

@@ -493,6 +493,7 @@ impl Database {
}
opts.set_parsed_options(&format!("max_total_wal_size={}", 64 * 1024 * 1024))?;
opts.set_parsed_options("verify_checksums_in_compaction=0")?;
opts.set_parsed_options("keep_log_file_num=1")?;
opts.set_max_open_files(config.max_open_files);
opts.create_if_missing(true);
opts.set_use_fsync(false);

View File

@@ -106,7 +106,6 @@ extern crate rlp;
extern crate regex;
extern crate lru_cache;
extern crate heapsize;
extern crate itertools;
extern crate ethcore_logger;
#[macro_use]
@@ -153,7 +152,6 @@ pub use bigint::hash;
pub use ansi_term::{Colour, Style};
pub use heapsize::HeapSizeOf;
pub use itertools::Itertools;
pub use parking_lot::{Condvar, Mutex, MutexGuard, RwLock, RwLockReadGuard, RwLockWriteGuard};
/// 160-bit integer representing account address

View File

@@ -15,7 +15,6 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::fmt;
use itertools::Itertools;
use hashdb::*;
use nibbleslice::*;
use rlp::*;
@@ -276,9 +275,15 @@ impl<'a> TrieDBIterator<'a> {
/// Descend into a payload.
fn descend(&mut self, d: &[u8]) -> super::Result<()> {
let node = Node::decoded(&self.db.get_raw_or_lookup(d)?).into();
Ok(self.descend_into_node(node))
}
/// Descend into a payload.
fn descend_into_node(&mut self, node: OwnedNode) {
self.trail.push(Crumb {
status: Status::Entering,
node: Node::decoded(&self.db.get_raw_or_lookup(d)?).into(),
node: node,
});
match &self.trail.last().expect("just pushed item; qed").node {
&OwnedNode::Leaf(ref n, _) | &OwnedNode::Extension(ref n, _) => {
@@ -286,14 +291,20 @@ impl<'a> TrieDBIterator<'a> {
},
_ => {}
}
Ok(())
}
/// The present key.
fn key(&self) -> Bytes {
// collapse the key_nibbles down to bytes.
self.key_nibbles.iter().step(2).zip(self.key_nibbles.iter().skip(1).step(2)).map(|(h, l)| h * 16 + l).collect()
let nibbles = &self.key_nibbles;
let mut i = 1;
let mut result = Bytes::with_capacity(nibbles.len() / 2);
let len = nibbles.len();
while i < len {
result.push(nibbles[i - 1] * 16 + nibbles[i]);
i += 2;
}
result
}
}
@@ -311,52 +322,67 @@ impl<'a> Iterator for TrieDBIterator<'a> {
type Item = TrieItem<'a>;
fn next(&mut self) -> Option<Self::Item> {
enum IterStep {
Continue,
PopTrail,
Descend(super::Result<DBValue>),
}
loop {
let b = match self.trail.last_mut() {
Some(mut b) => { b.increment(); b.clone() },
None => return None,
let iter_step = {
match self.trail.last_mut() {
Some(b) => { b.increment(); },
None => return None,
}
let b = self.trail.last().expect("trail.last_mut().is_some(); qed");
match (b.status.clone(), &b.node) {
(Status::Exiting, n) => {
match *n {
OwnedNode::Leaf(ref n, _) | OwnedNode::Extension(ref n, _) => {
let l = self.key_nibbles.len();
self.key_nibbles.truncate(l - n.len());
},
OwnedNode::Branch(_, _) => { self.key_nibbles.pop(); },
_ => {}
}
IterStep::PopTrail
},
(Status::At, &OwnedNode::Leaf(_, ref v)) | (Status::At, &OwnedNode::Branch(_, Some(ref v))) => {
return Some(Ok((self.key(), v.clone())));
},
(Status::At, &OwnedNode::Extension(_, ref d)) => IterStep::Descend(self.db.get_raw_or_lookup(&*d)),
(Status::At, &OwnedNode::Branch(_, _)) => IterStep::Continue,
(Status::AtChild(i), &OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => {
match i {
0 => self.key_nibbles.push(0),
i => *self.key_nibbles.last_mut()
.expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8,
}
IterStep::Descend(self.db.get_raw_or_lookup(&*children[i]))
},
(Status::AtChild(i), &OwnedNode::Branch(_, _)) => {
if i == 0 {
self.key_nibbles.push(0);
}
IterStep::Continue
},
_ => panic!() // Should never see Entering or AtChild without a Branch here.
}
};
match (b.status, b.node) {
(Status::Exiting, n) => {
match n {
OwnedNode::Leaf(n, _) | OwnedNode::Extension(n, _) => {
let l = self.key_nibbles.len();
self.key_nibbles.truncate(l - n.len());
},
OwnedNode::Branch(_, _) => { self.key_nibbles.pop(); },
_ => {}
}
match iter_step {
IterStep::PopTrail => {
self.trail.pop();
// continue
},
(Status::At, OwnedNode::Leaf(_, v)) | (Status::At, OwnedNode::Branch(_, Some(v))) => {
return Some(Ok((self.key(), v)));
IterStep::Descend(Ok(d)) => {
self.descend_into_node(Node::decoded(&d).into())
},
(Status::At, OwnedNode::Extension(_, d)) => {
if let Err(e) = self.descend(&*d) {
return Some(Err(e));
}
// continue
},
(Status::At, OwnedNode::Branch(_, _)) => {},
(Status::AtChild(i), OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => {
match i {
0 => self.key_nibbles.push(0),
i => *self.key_nibbles.last_mut()
.expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8,
}
if let Err(e) = self.descend(&*children[i]) {
return Some(Err(e));
}
// continue
},
(Status::AtChild(i), OwnedNode::Branch(_, _)) => {
if i == 0 {
self.key_nibbles.push(0);
}
// continue
},
_ => panic!() // Should never see Entering or AtChild without a Branch here.
IterStep::Descend(Err(e)) => {
return Some(Err(e))
}
IterStep::Continue => {},
}
}
}