Merge branch 'pv63-state' into pv63-receipts

This commit is contained in:
Nikolay Volf 2016-03-12 18:52:37 +01:00
commit bd9cfb4ee5
34 changed files with 2156 additions and 733 deletions

118
Cargo.lock generated
View File

@ -2,7 +2,7 @@
name = "parity" name = "parity"
version = "0.9.99" version = "0.9.99"
dependencies = [ dependencies = [
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
"ctrlc 1.1.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)", "ctrlc 1.1.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)",
"daemonize 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "daemonize 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)", "docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)",
@ -31,7 +31,7 @@ dependencies = [
[[package]] [[package]]
name = "arrayvec" name = "arrayvec"
version = "0.3.15" version = "0.3.16"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -43,14 +43,14 @@ name = "aster"
version = "0.13.1" version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"syntex_syntax 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_syntax 0.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "bigint" name = "bigint"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"arrayvec 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
@ -65,7 +65,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "bitflags" name = "bitflags"
version = "0.4.0" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -85,7 +85,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "chrono" name = "chrono"
version = "0.2.19" version = "0.2.20"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"num 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", "num 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)",
@ -94,7 +94,7 @@ dependencies = [
[[package]] [[package]]
name = "clippy" name = "clippy"
version = "0.0.44" version = "0.0.49"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"regex-syntax 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "regex-syntax 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
@ -117,7 +117,7 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)", "time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
"url 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", "url 0.5.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -136,7 +136,7 @@ version = "1.1.1"
source = "git+https://github.com/tomusdrw/rust-ctrlc.git#f4927770f89eca80ec250911eea3adcbf579ac48" source = "git+https://github.com/tomusdrw/rust-ctrlc.git#f4927770f89eca80ec250911eea3adcbf579ac48"
dependencies = [ dependencies = [
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -145,7 +145,7 @@ name = "daemonize"
version = "0.2.1" version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -161,7 +161,7 @@ name = "docopt"
version = "0.6.78" version = "0.6.78"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"regex 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.55 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
"strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -177,7 +177,7 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.55 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -185,7 +185,7 @@ name = "eth-secp256k1"
version = "0.5.4" version = "0.5.4"
source = "git+https://github.com/ethcore/rust-secp256k1#283a0677d8327536be58a87e0494d7e0e7b1d1d8" source = "git+https://github.com/ethcore/rust-secp256k1#283a0677d8327536be58a87e0494d7e0e7b1d1d8"
dependencies = [ dependencies = [
"arrayvec 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -207,7 +207,7 @@ dependencies = [
name = "ethcore" name = "ethcore"
version = "0.9.99" version = "0.9.99"
dependencies = [ dependencies = [
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 0.9.99", "ethash 0.9.99",
@ -233,7 +233,7 @@ dependencies = [
name = "ethcore-rpc" name = "ethcore-rpc"
version = "0.9.99" version = "0.9.99"
dependencies = [ dependencies = [
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
"ethash 0.9.99", "ethash 0.9.99",
"ethcore 0.9.99", "ethcore 0.9.99",
"ethcore-util 0.9.99", "ethcore-util 0.9.99",
@ -253,10 +253,10 @@ dependencies = [
name = "ethcore-util" name = "ethcore-util"
version = "0.9.99" version = "0.9.99"
dependencies = [ dependencies = [
"arrayvec 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
"bigint 0.1.0", "bigint 0.1.0",
"chrono 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", "chrono 0.2.20 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -264,14 +264,14 @@ dependencies = [
"ethcore-devtools 0.9.99", "ethcore-devtools 0.9.99",
"heapsize 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"igd 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "igd 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.4.10 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)",
"json-tests 0.1.0", "json-tests 0.1.0",
"lazy_static 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"mio 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rocksdb 0.4.1 (git+https://github.com/arkpar/rust-rocksdb.git)", "rocksdb 0.4.2 (git+https://github.com/arkpar/rust-rocksdb.git)",
"rust-crypto 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
@ -288,7 +288,7 @@ dependencies = [
name = "ethsync" name = "ethsync"
version = "0.9.99" version = "0.9.99"
dependencies = [ dependencies = [
"clippy 0.0.44 (registry+https://github.com/rust-lang/crates.io-index)", "clippy 0.0.49 (registry+https://github.com/rust-lang/crates.io-index)",
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore 0.9.99", "ethcore 0.9.99",
"ethcore-util 0.9.99", "ethcore-util 0.9.99",
@ -304,7 +304,7 @@ dependencies = [
name = "fdlimit" name = "fdlimit"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -314,7 +314,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "glob" name = "glob"
version = "0.2.10" version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -328,7 +328,7 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.55 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -381,7 +381,7 @@ dependencies = [
"traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"unicase 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicase 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", "url 0.5.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -391,21 +391,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"hyper 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 0.1.54 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.1.55 (registry+https://github.com/rust-lang/crates.io-index)",
"xml-rs 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)", "xml-rs 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)",
"xmltree 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "xmltree 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "itertools" name = "itertools"
version = "0.4.10" version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "json-tests" name = "json-tests"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"glob 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", "glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -461,16 +461,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "libc" name = "libc"
version = "0.2.7" version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "librocksdb-sys" name = "librocksdb-sys"
version = "0.2.1" version = "0.2.2"
source = "git+https://github.com/arkpar/rust-rocksdb.git#2156621f583bda95c1c07e89e79e4019f75158ee" source = "git+https://github.com/arkpar/rust-rocksdb.git#a4f89fea20ee3ae92b692df65d56426a5c0b6fd5"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"pkg-config 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -478,7 +477,7 @@ name = "log"
version = "0.3.5" version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -491,7 +490,7 @@ name = "memchr"
version = "0.1.10" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -537,7 +536,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -561,7 +560,7 @@ dependencies = [
[[package]] [[package]]
name = "nom" name = "nom"
version = "1.2.1" version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -578,7 +577,7 @@ name = "num_cpus"
version = "0.2.11" version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -594,11 +593,6 @@ name = "odds"
version = "0.2.12" version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "pkg-config"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "primal" name = "primal"
version = "0.2.3" version = "0.2.3"
@ -645,7 +639,7 @@ name = "quasi"
version = "0.7.0" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"syntex_syntax 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_syntax 0.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -655,7 +649,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"aster 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", "aster 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex_syntax 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_syntax 0.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -663,7 +657,7 @@ name = "rand"
version = "0.3.14" version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -678,7 +672,7 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "0.1.54" version = "0.1.55"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -694,11 +688,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "rocksdb" name = "rocksdb"
version = "0.4.1" version = "0.4.2"
source = "git+https://github.com/arkpar/rust-rocksdb.git#2156621f583bda95c1c07e89e79e4019f75158ee" source = "git+https://github.com/arkpar/rust-rocksdb.git#a4f89fea20ee3ae92b692df65d56426a5c0b6fd5"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"librocksdb-sys 0.2.1 (git+https://github.com/arkpar/rust-rocksdb.git)", "librocksdb-sys 0.2.2 (git+https://github.com/arkpar/rust-rocksdb.git)",
] ]
[[package]] [[package]]
@ -707,7 +701,7 @@ version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"termios 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "termios 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -747,7 +741,7 @@ name = "semver"
version = "0.2.3" version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"nom 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "nom 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -772,7 +766,7 @@ dependencies = [
"quasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "quasi 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"quasi_codegen 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "quasi_codegen 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syntex_syntax 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_syntax 0.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -815,16 +809,16 @@ name = "syntex"
version = "0.29.0" version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"syntex_syntax 0.29.0 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_syntax 0.29.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "syntex_syntax" name = "syntex_syntax"
version = "0.29.0" version = "0.29.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
"term 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
@ -850,7 +844,7 @@ name = "termios"
version = "0.2.2" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -859,7 +853,7 @@ version = "0.1.34"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -924,7 +918,7 @@ dependencies = [
[[package]] [[package]]
name = "url" name = "url"
version = "0.5.5" version = "0.5.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -981,7 +975,7 @@ name = "xml-rs"
version = "0.1.26" version = "0.1.26"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]

View File

@ -19,7 +19,7 @@ ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" }
fdlimit = { path = "util/fdlimit" } fdlimit = { path = "util/fdlimit" }
daemonize = "0.2" daemonize = "0.2"
number_prefix = "0.2" number_prefix = "0.2"
clippy = { version = "0.0.44", optional = true } clippy = { version = "0.0.49", optional = true }
ethcore = { path = "ethcore" } ethcore = { path = "ethcore" }
ethcore-util = { path = "util" } ethcore-util = { path = "util" }
ethsync = { path = "sync" } ethsync = { path = "sync" }

View File

@ -17,7 +17,7 @@ ethcore-util = { path = "../util" }
evmjit = { path = "../evmjit", optional = true } evmjit = { path = "../evmjit", optional = true }
ethash = { path = "../ethash" } ethash = { path = "../ethash" }
num_cpus = "0.2" num_cpus = "0.2"
clippy = { version = "0.0.44", optional = true } clippy = { version = "0.0.49", optional = true }
crossbeam = "0.1.5" crossbeam = "0.1.5"
lazy_static = "0.1" lazy_static = "0.1"
ethcore-devtools = { path = "../devtools" } ethcore-devtools = { path = "../devtools" }

View File

@ -13,17 +13,14 @@ pub struct AccountDB<'db> {
#[inline] #[inline]
fn combine_key<'a>(address: &'a H256, key: &'a H256) -> H256 { fn combine_key<'a>(address: &'a H256, key: &'a H256) -> H256 {
let mut addr_hash = address.sha3(); address ^ key
// preserve 96 bits of original key for db lookup
addr_hash[0..12].clone_from_slice(&[0u8; 12]);
&addr_hash ^ key
} }
impl<'db> AccountDB<'db> { impl<'db> AccountDB<'db> {
pub fn new(db: &'db HashDB, address: &Address) -> AccountDB<'db> { pub fn new(db: &'db HashDB, address: &Address) -> AccountDB<'db> {
AccountDB { AccountDB {
db: db, db: db,
address: x!(address.clone()), address: x!(address),
} }
} }
} }
@ -70,7 +67,7 @@ impl<'db> AccountDBMut<'db> {
pub fn new(db: &'db mut HashDB, address: &Address) -> AccountDBMut<'db> { pub fn new(db: &'db mut HashDB, address: &Address) -> AccountDBMut<'db> {
AccountDBMut { AccountDBMut {
db: db, db: db,
address: x!(address.clone()), address: x!(address),
} }
} }

View File

@ -116,7 +116,7 @@ pub struct Client<V = CanonVerifier> where V: Verifier {
} }
const HISTORY: u64 = 1200; const HISTORY: u64 = 1200;
const CLIENT_DB_VER_STR: &'static str = "5.1"; const CLIENT_DB_VER_STR: &'static str = "5.3";
impl Client<CanonVerifier> { impl Client<CanonVerifier> {
/// Create a new client with given spec and DB path. /// Create a new client with given spec and DB path.

View File

@ -215,6 +215,12 @@ pub struct BlocksBlooms {
pub blooms: [H2048; 16], pub blooms: [H2048; 16],
} }
impl Default for BlocksBlooms {
fn default() -> Self {
BlocksBlooms::new()
}
}
impl BlocksBlooms { impl BlocksBlooms {
pub fn new() -> Self { pub fn new() -> Self {
BlocksBlooms { blooms: unsafe { ::std::mem::zeroed() }} BlocksBlooms { blooms: unsafe { ::std::mem::zeroed() }}

View File

@ -25,6 +25,8 @@
#![cfg_attr(feature="dev", allow(match_bool))] #![cfg_attr(feature="dev", allow(match_bool))]
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref. // Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
#![cfg_attr(feature="dev", allow(clone_on_copy))] #![cfg_attr(feature="dev", allow(clone_on_copy))]
// In most cases it expresses function flow better
#![cfg_attr(feature="dev", allow(if_not_else))]
//! Ethcore library //! Ethcore library
//! //!

View File

@ -31,6 +31,12 @@ pub struct Substate {
pub contracts_created: Vec<Address> pub contracts_created: Vec<Address>
} }
impl Default for Substate {
fn default() -> Self {
Substate::new()
}
}
impl Substate { impl Substate {
/// Creates new substate. /// Creates new substate.
pub fn new() -> Self { pub fn new() -> Self {

View File

@ -81,10 +81,9 @@ Protocol Options:
--testnet Equivalent to --chain testnet (geth-compatible). --testnet Equivalent to --chain testnet (geth-compatible).
--networkid INDEX Override the network identifier from the chain we are on. --networkid INDEX Override the network identifier from the chain we are on.
--pruning METHOD Configure pruning of the state/storage trie. METHOD may be one of: archive, --pruning METHOD Configure pruning of the state/storage trie. METHOD may be one of: archive,
light (experimental) [default: archive]. light (experimental), fast (experimental) [default: archive].
-d --datadir PATH Specify the database & configuration directory path [default: $HOME/.parity] -d --datadir PATH Specify the database & configuration directory path [default: $HOME/.parity]
--db-path PATH Specify the database & configuration directory path [default: $HOME/.parity] --db-path PATH Specify the database & configuration directory path [default: $HOME/.parity]
--pruning Client should prune the state/storage trie.
--keys-path PATH Specify the path for JSON key files to be found [default: $HOME/.web3/keys] --keys-path PATH Specify the path for JSON key files to be found [default: $HOME/.web3/keys]
--identity NAME Specify your node's name. --identity NAME Specify your node's name.
@ -104,7 +103,8 @@ API and Console Options:
--jsonrpc-port PORT Specify the port portion of the JSONRPC API server [default: 8545]. --jsonrpc-port PORT Specify the port portion of the JSONRPC API server [default: 8545].
--jsonrpc-cors URL Specify CORS header for JSON-RPC API responses [default: null]. --jsonrpc-cors URL Specify CORS header for JSON-RPC API responses [default: null].
--jsonrpc-apis APIS Specify the APIs available through the JSONRPC interface. APIS is a comma-delimited --jsonrpc-apis APIS Specify the APIs available through the JSONRPC interface. APIS is a comma-delimited
list of API name. Possible names are web3, eth and net. [default: web3,eth,net]. list of API name. Possible name are web3, eth and net. [default: web3,eth,net,personal].
--rpc Equivalent to --jsonrpc (geth-compatible). --rpc Equivalent to --jsonrpc (geth-compatible).
--rpcaddr HOST Equivalent to --jsonrpc-addr HOST (geth-compatible). --rpcaddr HOST Equivalent to --jsonrpc-addr HOST (geth-compatible).
--rpcport PORT Equivalent to --jsonrpc-port PORT (geth-compatible). --rpcport PORT Equivalent to --jsonrpc-port PORT (geth-compatible).
@ -231,6 +231,7 @@ fn setup_rpc_server(client: Arc<Client>, sync: Arc<EthSync>, secret_store: Arc<A
server.add_delegate(EthClient::new(&client, &sync, &secret_store).to_delegate()); server.add_delegate(EthClient::new(&client, &sync, &secret_store).to_delegate());
server.add_delegate(EthFilterClient::new(&client).to_delegate()); server.add_delegate(EthFilterClient::new(&client).to_delegate());
} }
"personal" => server.add_delegate(PersonalClient::new(&secret_store).to_delegate()),
_ => { _ => {
die!("{}: Invalid API name to be enabled.", api); die!("{}: Invalid API name to be enabled.", api);
} }
@ -423,7 +424,7 @@ impl Configuration {
"" => journaldb::Algorithm::Archive, "" => journaldb::Algorithm::Archive,
"archive" => journaldb::Algorithm::Archive, "archive" => journaldb::Algorithm::Archive,
"pruned" => journaldb::Algorithm::EarlyMerge, "pruned" => journaldb::Algorithm::EarlyMerge,
// "fast" => journaldb::Algorithm::OverlayRecent, // TODO: @arkpar uncomment this once option 2 is merged. "fast" => journaldb::Algorithm::OverlayRecent,
// "slow" => journaldb::Algorithm::RefCounted, // TODO: @gavofyork uncomment this once ref-count algo is merged. // "slow" => journaldb::Algorithm::RefCounted, // TODO: @gavofyork uncomment this once ref-count algo is merged.
_ => { die!("Invalid pruning method given."); } _ => { die!("Invalid pruning method given."); }
}; };

View File

@ -18,7 +18,7 @@ ethcore-util = { path = "../util" }
ethcore = { path = "../ethcore" } ethcore = { path = "../ethcore" }
ethash = { path = "../ethash" } ethash = { path = "../ethash" }
ethsync = { path = "../sync" } ethsync = { path = "../sync" }
clippy = { version = "0.0.44", optional = true } clippy = { version = "0.0.49", optional = true }
rustc-serialize = "0.3" rustc-serialize = "0.3"
transient-hashmap = "0.1" transient-hashmap = "0.1"
serde_macros = { version = "0.7.0", optional = true } serde_macros = { version = "0.7.0", optional = true }

View File

@ -22,20 +22,20 @@ use util::keys::store::*;
use util::Address; use util::Address;
/// Account management (personal) rpc implementation. /// Account management (personal) rpc implementation.
pub struct PersonalClient { pub struct PersonalClient<A> where A: AccountProvider {
accounts: Weak<AccountProvider>, accounts: Weak<A>,
} }
impl PersonalClient { impl<A> PersonalClient<A> where A: AccountProvider {
/// Creates new PersonalClient /// Creates new PersonalClient
pub fn new(store: &Arc<AccountProvider>) -> Self { pub fn new(store: &Arc<A>) -> Self {
PersonalClient { PersonalClient {
accounts: Arc::downgrade(store), accounts: Arc::downgrade(store),
} }
} }
} }
impl Personal for PersonalClient { impl<A> Personal for PersonalClient<A> where A: AccountProvider + 'static {
fn accounts(&self, _: Params) -> Result<Value, Error> { fn accounts(&self, _: Params) -> Result<Value, Error> {
let store = take_weak!(self.accounts); let store = take_weak!(self.accounts);
match store.accounts() { match store.accounts() {

View File

@ -17,4 +17,5 @@
//!TODO: load custom blockchain state and test //!TODO: load custom blockchain state and test
mod net; mod net;
mod web3;
mod helpers; mod helpers;

View File

@ -36,7 +36,7 @@ fn rpc_net_version() {
let request = r#"{"jsonrpc": "2.0", "method": "net_version", "params": [], "id": 1}"#; let request = r#"{"jsonrpc": "2.0", "method": "net_version", "params": [], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":"65","id":1}"#; let response = r#"{"jsonrpc":"2.0","result":"65","id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_string())); assert_eq!(io.handle_request(request), Some(response.to_owned()));
} }
#[test] #[test]
@ -49,7 +49,7 @@ fn rpc_net_peer_count() {
let request = r#"{"jsonrpc": "2.0", "method": "net_peerCount", "params": [], "id": 1}"#; let request = r#"{"jsonrpc": "2.0", "method": "net_peerCount", "params": [], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":"0x78","id":1}"#; let response = r#"{"jsonrpc":"2.0","result":"0x78","id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_string())); assert_eq!(io.handle_request(request), Some(response.to_owned()));
} }
#[test] #[test]
@ -62,5 +62,5 @@ fn rpc_net_listening() {
let request = r#"{"jsonrpc": "2.0", "method": "net_listening", "params": [], "id": 1}"#; let request = r#"{"jsonrpc": "2.0", "method": "net_listening", "params": [], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#; let response = r#"{"jsonrpc":"2.0","result":true,"id":1}"#;
assert_eq!(io.handle_request(request), Some(response.to_string())); assert_eq!(io.handle_request(request), Some(response.to_owned()));
} }

33
rpc/src/v1/tests/web3.rs Normal file
View File

@ -0,0 +1,33 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use jsonrpc_core::IoHandler;
use util::version;
use v1::{Web3, Web3Client};
#[test]
fn rpc_web3_version() {
let web3 = Web3Client::new().to_delegate();
let io = IoHandler::new();
io.add_delegate(web3);
let v = version().to_owned().replace("Parity/", "Parity//");
let request = r#"{"jsonrpc": "2.0", "method": "web3_clientVersion", "params": [], "id": 1}"#;
let response = r#"{"jsonrpc":"2.0","result":"VER","id":1}"#.to_owned().replace("VER", v.as_ref());
assert_eq!(io.handle_request(request), Some(response));
}

View File

@ -10,7 +10,7 @@ authors = ["Ethcore <admin@ethcore.io"]
[dependencies] [dependencies]
ethcore-util = { path = "../util" } ethcore-util = { path = "../util" }
ethcore = { path = "../ethcore" } ethcore = { path = "../ethcore" }
clippy = { version = "0.0.44", optional = true } clippy = { version = "0.0.49", optional = true }
log = "0.3" log = "0.3"
env_logger = "0.3" env_logger = "0.3"
time = "0.1.34" time = "0.1.34"

View File

@ -17,9 +17,10 @@
#![warn(missing_docs)] #![warn(missing_docs)]
#![cfg_attr(feature="dev", feature(plugin))] #![cfg_attr(feature="dev", feature(plugin))]
#![cfg_attr(feature="dev", plugin(clippy))] #![cfg_attr(feature="dev", plugin(clippy))]
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref. // Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
#![cfg_attr(feature="dev", allow(clone_on_copy))] #![cfg_attr(feature="dev", allow(clone_on_copy))]
// In most cases it expresses function flow better
#![cfg_attr(feature="dev", allow(if_not_else))]
//! Blockchain sync module //! Blockchain sync module
//! Implements ethereum protocol version 63 as specified here: //! Implements ethereum protocol version 63 as specified here:
@ -173,6 +174,7 @@ impl NetworkProtocolHandler<SyncMessage> for EthSync {
self.sync.write().unwrap().maintain_sync(&mut NetSyncIo::new(io, self.chain.deref())); self.sync.write().unwrap().maintain_sync(&mut NetSyncIo::new(io, self.chain.deref()));
} }
#[allow(single_match)]
fn message(&self, io: &NetworkContext<SyncMessage>, message: &SyncMessage) { fn message(&self, io: &NetworkContext<SyncMessage>, message: &SyncMessage) {
match *message { match *message {
SyncMessage::NewChainBlocks { ref good, ref bad, ref retracted } => { SyncMessage::NewChainBlocks { ref good, ref bad, ref retracted } => {

View File

@ -79,6 +79,7 @@
//! - When it's removed from `current` - all transactions from this sender (`current` & `future`) are recalculated. //! - When it's removed from `current` - all transactions from this sender (`current` & `future`) are recalculated.
//! //!
use std::default::Default;
use std::cmp::{Ordering}; use std::cmp::{Ordering};
use std::collections::{HashMap, BTreeSet}; use std::collections::{HashMap, BTreeSet};
use util::numbers::{Uint, U256}; use util::numbers::{Uint, U256};
@ -102,6 +103,7 @@ struct TransactionOrder {
hash: H256, hash: H256,
} }
impl TransactionOrder { impl TransactionOrder {
fn for_transaction(tx: &VerifiedTransaction, base_nonce: U256) -> Self { fn for_transaction(tx: &VerifiedTransaction, base_nonce: U256) -> Self {
TransactionOrder { TransactionOrder {
@ -253,6 +255,12 @@ pub struct TransactionQueue {
last_nonces: HashMap<Address, U256>, last_nonces: HashMap<Address, U256>,
} }
impl Default for TransactionQueue {
fn default() -> Self {
TransactionQueue::new()
}
}
impl TransactionQueue { impl TransactionQueue {
/// Creates new instance of this Queue /// Creates new instance of this Queue
pub fn new() -> Self { pub fn new() -> Self {

View File

@ -27,7 +27,7 @@ crossbeam = "0.2"
slab = "0.1" slab = "0.1"
sha3 = { path = "sha3" } sha3 = { path = "sha3" }
serde = "0.7.0" serde = "0.7.0"
clippy = { version = "0.0.44", optional = true } clippy = { version = "0.0.49", optional = true }
json-tests = { path = "json-tests" } json-tests = { path = "json-tests" }
rustc_version = "0.1.0" rustc_version = "0.1.0"
igd = "0.4.2" igd = "0.4.2"

View File

@ -15,7 +15,6 @@ rustc-serialize = "0.3"
arrayvec = "0.3" arrayvec = "0.3"
rand = "0.3.12" rand = "0.3.12"
serde = "0.7.0" serde = "0.7.0"
clippy = { version = "0.0.44", optional = true }
heapsize = "0.3" heapsize = "0.3"
[features] [features]

View File

@ -305,7 +305,6 @@ mod tests {
assert!(jdb.exists(&foo)); assert!(jdb.exists(&foo));
} }
#[test] #[test]
fn reopen() { fn reopen() {
let mut dir = ::std::env::temp_dir(); let mut dir = ::std::env::temp_dir();
@ -364,11 +363,12 @@ mod tests {
jdb.commit(5, &b"5".sha3(), Some((4, b"4".sha3()))).unwrap(); jdb.commit(5, &b"5".sha3(), Some((4, b"4".sha3()))).unwrap();
} }
} }
#[test] #[test]
fn reopen_fork() { fn reopen_fork() {
let mut dir = ::std::env::temp_dir(); let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex()); dir.push(H32::random().hex());
let (foo, bar, baz) = { let (foo, _, _) = {
let mut jdb = ArchiveDB::new(dir.to_str().unwrap()); let mut jdb = ArchiveDB::new(dir.to_str().unwrap());
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(b"foo");
@ -406,6 +406,5 @@ mod tests {
let state = jdb.state(&key); let state = jdb.state(&key);
assert!(state.is_some()); assert!(state.is_some());
} }
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -21,7 +21,8 @@ use common::*;
/// Export the journaldb module. /// Export the journaldb module.
pub mod traits; pub mod traits;
mod archivedb; mod archivedb;
mod optiononedb; mod earlymergedb;
mod overlayrecentdb;
/// Export the JournalDB trait. /// Export the JournalDB trait.
pub use self::traits::JournalDB; pub use self::traits::JournalDB;
@ -72,7 +73,8 @@ impl fmt::Display for Algorithm {
pub fn new(path: &str, algorithm: Algorithm) -> Box<JournalDB> { pub fn new(path: &str, algorithm: Algorithm) -> Box<JournalDB> {
match algorithm { match algorithm {
Algorithm::Archive => Box::new(archivedb::ArchiveDB::new(path)), Algorithm::Archive => Box::new(archivedb::ArchiveDB::new(path)),
Algorithm::EarlyMerge => Box::new(optiononedb::OptionOneDB::new(path)), Algorithm::EarlyMerge => Box::new(earlymergedb::EarlyMergeDB::new(path)),
Algorithm::OverlayRecent => Box::new(overlayrecentdb::OverlayRecentDB::new(path)),
_ => unimplemented!(), _ => unimplemented!(),
} }
} }

View File

@ -1,618 +0,0 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Disk-backed HashDB implementation.
use common::*;
use rlp::*;
use hashdb::*;
use memorydb::*;
use super::traits::JournalDB;
use kvdb::{Database, DBTransaction, DatabaseConfig};
#[cfg(test)]
use std::env;
/// Implementation of the HashDB trait for a disk-backed database with a memory overlay
/// and latent-removal semantics.
///
/// Like OverlayDB, there is a memory overlay; `commit()` must be called in order to
/// write operations out to disk. Unlike OverlayDB, `remove()` operations do not take effect
/// immediately. Rather some age (based on a linear but arbitrary metric) must pass before
/// the removals actually take effect.
pub struct OptionOneDB {
overlay: MemoryDB,
backing: Arc<Database>,
counters: Option<Arc<RwLock<HashMap<H256, i32>>>>,
}
// all keys must be at least 12 bytes
const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ];
const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ];
const DB_VERSION : u32 = 3;
const PADDING : [u8; 10] = [ 0u8; 10 ];
impl OptionOneDB {
/// Create a new instance from file
pub fn new(path: &str) -> OptionOneDB {
let opts = DatabaseConfig {
prefix_size: Some(12) //use 12 bytes as prefix, this must match account_db prefix
};
let backing = Database::open(&opts, path).unwrap_or_else(|e| {
panic!("Error opening state db: {}", e);
});
if !backing.is_empty() {
match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) {
Ok(Some(DB_VERSION)) => {},
v => panic!("Incompatible DB version, expected {}, got {:?}", DB_VERSION, v)
}
} else {
backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database");
}
let counters = Some(Arc::new(RwLock::new(OptionOneDB::read_counters(&backing))));
OptionOneDB {
overlay: MemoryDB::new(),
backing: Arc::new(backing),
counters: counters,
}
}
/// Create a new instance with an anonymous temporary database.
#[cfg(test)]
fn new_temp() -> OptionOneDB {
let mut dir = env::temp_dir();
dir.push(H32::random().hex());
Self::new(dir.to_str().unwrap())
}
fn morph_key(key: &H256, index: u8) -> Bytes {
let mut ret = key.bytes().to_owned();
ret.push(index);
ret
}
// The next three are valid only as long as there is an insert operation of `key` in the journal.
fn set_already_in(batch: &DBTransaction, key: &H256) { batch.put(&Self::morph_key(key, 0), &[1u8]).expect("Low-level database error. Some issue with your hard disk?"); }
fn reset_already_in(batch: &DBTransaction, key: &H256) { batch.delete(&Self::morph_key(key, 0)).expect("Low-level database error. Some issue with your hard disk?"); }
fn is_already_in(backing: &Database, key: &H256) -> bool {
backing.get(&Self::morph_key(key, 0)).expect("Low-level database error. Some issue with your hard disk?").is_some()
}
fn insert_keys(inserts: &[(H256, Bytes)], backing: &Database, counters: &mut HashMap<H256, i32>, batch: &DBTransaction) {
for &(ref h, ref d) in inserts {
if let Some(c) = counters.get_mut(h) {
// already counting. increment.
*c += 1;
continue;
}
// this is the first entry for this node in the journal.
if backing.get(&h.bytes()).expect("Low-level database error. Some issue with your hard disk?").is_some() {
// already in the backing DB. start counting, and remember it was already in.
Self::set_already_in(batch, &h);
counters.insert(h.clone(), 1);
continue;
}
// Gets removed when a key leaves the journal, so should never be set when we're placing a new key.
//Self::reset_already_in(&h);
assert!(!Self::is_already_in(backing, &h));
batch.put(&h.bytes(), d).expect("Low-level database error. Some issue with your hard disk?");
}
}
fn replay_keys(inserts: &[H256], backing: &Database, counters: &mut HashMap<H256, i32>) {
trace!("replay_keys: inserts={:?}, counters={:?}", inserts, counters);
for h in inserts {
if let Some(c) = counters.get_mut(h) {
// already counting. increment.
*c += 1;
continue;
}
// this is the first entry for this node in the journal.
// it is initialised to 1 if it was already in.
if Self::is_already_in(backing, h) {
trace!("replace_keys: Key {} was already in!", h);
counters.insert(h.clone(), 1);
}
}
trace!("replay_keys: (end) counters={:?}", counters);
}
fn kill_keys(deletes: Vec<H256>, counters: &mut HashMap<H256, i32>, batch: &DBTransaction) {
for h in deletes.into_iter() {
let mut n: Option<i32> = None;
if let Some(c) = counters.get_mut(&h) {
if *c > 1 {
*c -= 1;
continue;
} else {
n = Some(*c);
}
}
match n {
Some(i) if i == 1 => {
counters.remove(&h);
Self::reset_already_in(batch, &h);
}
None => {
// Gets removed when moving from 1 to 0 additional refs. Should never be here at 0 additional refs.
//assert!(!Self::is_already_in(db, &h));
batch.delete(&h.bytes()).expect("Low-level database error. Some issue with your hard disk?");
}
_ => panic!("Invalid value in counters: {:?}", n),
}
}
}
fn payload(&self, key: &H256) -> Option<Bytes> {
self.backing.get(&key.bytes()).expect("Low-level database error. Some issue with your hard disk?").map(|v| v.to_vec())
}
fn read_counters(db: &Database) -> HashMap<H256, i32> {
let mut counters = HashMap::new();
if let Some(val) = db.get(&LATEST_ERA_KEY).expect("Low-level database error.") {
let mut era = decode::<u64>(&val);
loop {
let mut index = 0usize;
while let Some(rlp_data) = db.get({
let mut r = RlpStream::new_list(3);
r.append(&era);
r.append(&index);
r.append(&&PADDING[..]);
&r.drain()
}).expect("Low-level database error.") {
trace!("read_counters: era={}, index={}", era, index);
let rlp = Rlp::new(&rlp_data);
let inserts: Vec<H256> = rlp.val_at(1);
Self::replay_keys(&inserts, db, &mut counters);
index += 1;
};
if index == 0 || era == 0 {
break;
}
era -= 1;
}
}
trace!("Recovered {} counters", counters.len());
counters
}
}
impl HashDB for OptionOneDB {
fn keys(&self) -> HashMap<H256, i32> {
let mut ret: HashMap<H256, i32> = HashMap::new();
for (key, _) in self.backing.iter() {
let h = H256::from_slice(key.deref());
ret.insert(h, 1);
}
for (key, refs) in self.overlay.keys().into_iter() {
let refs = *ret.get(&key).unwrap_or(&0) + refs;
ret.insert(key, refs);
}
ret
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
let k = self.overlay.raw(key);
match k {
Some(&(ref d, rc)) if rc > 0 => Some(d),
_ => {
if let Some(x) = self.payload(key) {
Some(&self.overlay.denote(key, x).0)
}
else {
None
}
}
}
}
fn exists(&self, key: &H256) -> bool {
self.lookup(key).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
self.overlay.insert(value)
}
fn emplace(&mut self, key: H256, value: Bytes) {
self.overlay.emplace(key, value);
}
fn kill(&mut self, key: &H256) {
self.overlay.kill(key);
}
}
impl JournalDB for OptionOneDB {
fn spawn(&self) -> Box<JournalDB> {
Box::new(OptionOneDB {
overlay: MemoryDB::new(),
backing: self.backing.clone(),
counters: self.counters.clone(),
})
}
fn mem_used(&self) -> usize {
self.overlay.mem_used() + match self.counters {
Some(ref c) => c.read().unwrap().heap_size_of_children(),
None => 0
}
}
fn is_empty(&self) -> bool {
self.backing.get(&LATEST_ERA_KEY).expect("Low level database error").is_none()
}
fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
// journal format:
// [era, 0] => [ id, [insert_0, ...], [remove_0, ...] ]
// [era, 1] => [ id, [insert_0, ...], [remove_0, ...] ]
// [era, n] => [ ... ]
// TODO: store reclaim_period.
// When we make a new commit, we make a journal of all blocks in the recent history and record
// all keys that were inserted and deleted. The journal is ordered by era; multiple commits can
// share the same era. This forms a data structure similar to a queue but whose items are tuples.
// By the time comes to remove a tuple from the queue (i.e. then the era passes from recent history
// into ancient history) then only one commit from the tuple is considered canonical. This commit
// is kept in the main backing database, whereas any others from the same era are reverted.
//
// It is possible that a key, properly available in the backing database be deleted and re-inserted
// in the recent history queue, yet have both operations in commits that are eventually non-canonical.
// To avoid the original, and still required, key from being deleted, we maintain a reference count
// which includes an original key, if any.
//
// The semantics of the `counter` are:
// insert key k:
// counter already contains k: count += 1
// counter doesn't contain k:
// backing db contains k: count = 1
// backing db doesn't contain k: insert into backing db, count = 0
// delete key k:
// counter contains k (count is asserted to be non-zero):
// count > 1: counter -= 1
// count == 1: remove counter
// count == 0: remove key from backing db
// counter doesn't contain k: remove key from backing db
//
// Practically, this means that for each commit block turning from recent to ancient we do the
// following:
// is_canonical:
// inserts: Ignored (left alone in the backing database).
// deletes: Enacted; however, recent history queue is checked for ongoing references. This is
// reduced as a preference to deletion from the backing database.
// !is_canonical:
// inserts: Reverted; however, recent history queue is checked for ongoing references. This is
// reduced as a preference to deletion from the backing database.
// deletes: Ignored (they were never inserted).
//
// record new commit's details.
trace!("commit: #{} ({}), end era: {:?}", now, id, end);
let mut counters = self.counters.as_ref().unwrap().write().unwrap();
let batch = DBTransaction::new();
{
let mut index = 0usize;
let mut last;
while try!(self.backing.get({
let mut r = RlpStream::new_list(3);
r.append(&now);
r.append(&index);
r.append(&&PADDING[..]);
last = r.drain();
&last
})).is_some() {
index += 1;
}
let drained = self.overlay.drain();
let removes: Vec<H256> = drained
.iter()
.filter_map(|(k, &(_, c))| if c < 0 {Some(k.clone())} else {None})
.collect();
let inserts: Vec<(H256, Bytes)> = drained
.into_iter()
.filter_map(|(k, (v, r))| if r > 0 { assert!(r == 1); Some((k, v)) } else { assert!(r >= -1); None })
.collect();
let mut r = RlpStream::new_list(3);
r.append(id);
// Process the new inserts.
// We use the inserts for three things. For each:
// - we place into the backing DB or increment the counter if already in;
// - we note in the backing db that it was already in;
// - we write the key into our journal for this block;
r.begin_list(inserts.len());
inserts.iter().foreach(|&(k, _)| {r.append(&k);});
r.append(&removes);
Self::insert_keys(&inserts, &self.backing, &mut counters, &batch);
try!(batch.put(&last, r.as_raw()));
try!(batch.put(&LATEST_ERA_KEY, &encode(&now)));
}
// apply old commits' details
if let Some((end_era, canon_id)) = end {
let mut index = 0usize;
let mut last;
while let Some(rlp_data) = try!(self.backing.get({
let mut r = RlpStream::new_list(3);
r.append(&end_era);
r.append(&index);
r.append(&&PADDING[..]);
last = r.drain();
&last
})) {
let rlp = Rlp::new(&rlp_data);
let inserts: Vec<H256> = rlp.val_at(1);
let deletes: Vec<H256> = rlp.val_at(2);
// Collect keys to be removed. These are removed keys for canonical block, inserted for non-canonical
Self::kill_keys(if canon_id == rlp.val_at(0) {deletes} else {inserts}, &mut counters, &batch);
try!(batch.delete(&last));
index += 1;
}
trace!("OptionOneDB: delete journal for time #{}.{}, (canon was {})", end_era, index, canon_id);
}
try!(self.backing.write(batch));
// trace!("OptionOneDB::commit() deleted {} nodes", deletes);
Ok(0)
}
}
#[cfg(test)]
mod tests {
use common::*;
use super::*;
use hashdb::*;
use journaldb::traits::JournalDB;
#[test]
fn insert_same_in_fork() {
// history is 1
let mut jdb = OptionOneDB::new_temp();
let x = jdb.insert(b"X");
jdb.commit(1, &b"1".sha3(), None).unwrap();
jdb.commit(2, &b"2".sha3(), None).unwrap();
jdb.commit(3, &b"1002a".sha3(), Some((1, b"1".sha3()))).unwrap();
jdb.commit(4, &b"1003a".sha3(), Some((2, b"2".sha3()))).unwrap();
jdb.remove(&x);
jdb.commit(3, &b"1002b".sha3(), Some((1, b"1".sha3()))).unwrap();
let x = jdb.insert(b"X");
jdb.commit(4, &b"1003b".sha3(), Some((2, b"2".sha3()))).unwrap();
jdb.commit(5, &b"1004a".sha3(), Some((3, b"1002a".sha3()))).unwrap();
jdb.commit(6, &b"1005a".sha3(), Some((4, b"1003a".sha3()))).unwrap();
assert!(jdb.exists(&x));
}
#[test]
fn long_history() {
// history is 3
let mut jdb = OptionOneDB::new_temp();
let h = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&h));
jdb.remove(&h);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.exists(&h));
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.exists(&h));
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&h));
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(!jdb.exists(&h));
}
#[test]
fn complex() {
// history is 1
let mut jdb = OptionOneDB::new_temp();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(jdb.exists(&baz));
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(!jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
}
#[test]
fn fork() {
// history is 1
let mut jdb = OptionOneDB::new_temp();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
jdb.remove(&bar);
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
}
#[test]
fn overwrite() {
// history is 1
let mut jdb = OptionOneDB::new_temp();
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.exists(&foo));
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
jdb.insert(b"foo");
assert!(jdb.exists(&foo));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.exists(&foo));
jdb.commit(3, &b"2".sha3(), Some((0, b"2".sha3()))).unwrap();
assert!(jdb.exists(&foo));
}
#[test]
fn fork_same_key() {
// history is 1
let mut jdb = OptionOneDB::new_temp();
jdb.commit(0, &b"0".sha3(), None).unwrap();
let foo = jdb.insert(b"foo");
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
jdb.insert(b"foo");
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.exists(&foo));
jdb.commit(2, &b"2a".sha3(), Some((1, b"1a".sha3()))).unwrap();
assert!(jdb.exists(&foo));
}
#[test]
fn reopen() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let bar = H256::random();
let foo = {
let mut jdb = OptionOneDB::new(dir.to_str().unwrap());
// history is 1
let foo = jdb.insert(b"foo");
jdb.emplace(bar.clone(), b"bar".to_vec());
jdb.commit(0, &b"0".sha3(), None).unwrap();
foo
};
{
let mut jdb = OptionOneDB::new(dir.to_str().unwrap());
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
}
{
let mut jdb = OptionOneDB::new(dir.to_str().unwrap());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(!jdb.exists(&foo));
}
}
#[test]
fn reopen_remove() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let foo = {
let mut jdb = OptionOneDB::new(dir.to_str().unwrap());
// history is 1
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
// foo is ancient history.
jdb.insert(b"foo");
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
foo
};
{
let mut jdb = OptionOneDB::new(dir.to_str().unwrap());
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.exists(&foo));
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
jdb.commit(5, &b"5".sha3(), Some((4, b"4".sha3()))).unwrap();
assert!(!jdb.exists(&foo));
}
}
#[test]
fn reopen_fork() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let (foo, bar, baz) = {
let mut jdb = OptionOneDB::new(dir.to_str().unwrap());
// history is 1
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
jdb.remove(&bar);
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
(foo, bar, baz)
};
{
let mut jdb = OptionOneDB::new(dir.to_str().unwrap());
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
}
}
}

View File

@ -0,0 +1,873 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! JournalDB over in-memory overlay
use common::*;
use rlp::*;
use hashdb::*;
use memorydb::*;
use kvdb::{Database, DBTransaction, DatabaseConfig};
#[cfg(test)]
use std::env;
use super::JournalDB;
/// Implementation of the JournalDB trait for a disk-backed database with a memory overlay
/// and, possibly, latent-removal semantics.
///
/// Like OverlayDB, there is a memory overlay; `commit()` must be called in order to
/// write operations out to disk. Unlike OverlayDB, `remove()` operations do not take effect
/// immediately. Rather some age (based on a linear but arbitrary metric) must pass before
/// the removals actually take effect.
///
/// There are two memory overlays:
/// - Transaction overlay contains current transaction data. It is merged with with history
/// overlay on each `commit()`
/// - History overlay contains all data inserted during the history period. When the node
/// in the overlay becomes ancient it is written to disk on `commit()`
///
/// There is also a journal maintained in memory and on the disk as well which lists insertions
/// and removals for each commit during the history period. This is used to track
/// data nodes that go out of history scope and must be written to disk.
///
/// Commit workflow:
/// 1. Create a new journal record from the transaction overlay.
/// 2. Inseart each node from the transaction overlay into the History overlay increasing reference
/// count if it is already there. Note that the reference counting is managed by `MemoryDB`
/// 3. Clear the transaction overlay.
/// 4. For a canonical journal record that becomes ancient inserts its insertions into the disk DB
/// 5. For each journal record that goes out of the history scope (becomes ancient) remove its
/// insertions from the history overlay, decreasing the reference counter and removing entry if
/// if reaches zero.
/// 6. For a canonical journal record that becomes ancient delete its removals from the disk only if
/// the removed key is not present in the history overlay.
/// 7. Delete ancient record from memory and disk.
///
pub struct OverlayRecentDB {
transaction_overlay: MemoryDB,
backing: Arc<Database>,
journal_overlay: Arc<RwLock<JournalOverlay>>,
}
#[derive(PartialEq)]
struct JournalOverlay {
backing_overlay: MemoryDB,
journal: HashMap<u64, Vec<JournalEntry>>,
latest_era: u64,
}
#[derive(PartialEq)]
struct JournalEntry {
id: H256,
insertions: Vec<H256>,
deletions: Vec<H256>,
}
impl HeapSizeOf for JournalEntry {
fn heap_size_of_children(&self) -> usize {
self.insertions.heap_size_of_children() + self.deletions.heap_size_of_children()
}
}
impl Clone for OverlayRecentDB {
fn clone(&self) -> OverlayRecentDB {
OverlayRecentDB {
transaction_overlay: MemoryDB::new(),
backing: self.backing.clone(),
journal_overlay: self.journal_overlay.clone(),
}
}
}
// all keys must be at least 12 bytes
const LATEST_ERA_KEY : [u8; 12] = [ b'l', b'a', b's', b't', 0, 0, 0, 0, 0, 0, 0, 0 ];
const VERSION_KEY : [u8; 12] = [ b'j', b'v', b'e', b'r', 0, 0, 0, 0, 0, 0, 0, 0 ];
const DB_VERSION : u32 = 0x200 + 3;
const PADDING : [u8; 10] = [ 0u8; 10 ];
impl OverlayRecentDB {
/// Create a new instance from file
pub fn new(path: &str) -> OverlayRecentDB {
Self::from_prefs(path)
}
/// Create a new instance from file
pub fn from_prefs(path: &str) -> OverlayRecentDB {
let opts = DatabaseConfig {
prefix_size: Some(12) //use 12 bytes as prefix, this must match account_db prefix
};
let backing = Database::open(&opts, path).unwrap_or_else(|e| {
panic!("Error opening state db: {}", e);
});
if !backing.is_empty() {
match backing.get(&VERSION_KEY).map(|d| d.map(|v| decode::<u32>(&v))) {
Ok(Some(DB_VERSION)) => {}
v => panic!("Incompatible DB version, expected {}, got {:?}", DB_VERSION, v)
}
} else {
backing.put(&VERSION_KEY, &encode(&DB_VERSION)).expect("Error writing version to database");
}
let journal_overlay = Arc::new(RwLock::new(OverlayRecentDB::read_overlay(&backing)));
OverlayRecentDB {
transaction_overlay: MemoryDB::new(),
backing: Arc::new(backing),
journal_overlay: journal_overlay,
}
}
/// Create a new instance with an anonymous temporary database.
#[cfg(test)]
pub fn new_temp() -> OverlayRecentDB {
let mut dir = env::temp_dir();
dir.push(H32::random().hex());
Self::new(dir.to_str().unwrap())
}
#[cfg(test)]
fn can_reconstruct_refs(&self) -> bool {
let reconstructed = Self::read_overlay(&self.backing);
let journal_overlay = self.journal_overlay.read().unwrap();
*journal_overlay == reconstructed
}
fn payload(&self, key: &H256) -> Option<Bytes> {
self.backing.get(&key.bytes()).expect("Low-level database error. Some issue with your hard disk?").map(|v| v.to_vec())
}
fn read_overlay(db: &Database) -> JournalOverlay {
let mut journal = HashMap::new();
let mut overlay = MemoryDB::new();
let mut count = 0;
let mut latest_era = 0;
if let Some(val) = db.get(&LATEST_ERA_KEY).expect("Low-level database error.") {
latest_era = decode::<u64>(&val);
let mut era = latest_era;
loop {
let mut index = 0usize;
while let Some(rlp_data) = db.get({
let mut r = RlpStream::new_list(3);
r.append(&era);
r.append(&index);
r.append(&&PADDING[..]);
&r.drain()
}).expect("Low-level database error.") {
trace!("read_overlay: era={}, index={}", era, index);
let rlp = Rlp::new(&rlp_data);
let id: H256 = rlp.val_at(0);
let insertions = rlp.at(1);
let deletions: Vec<H256> = rlp.val_at(2);
let mut inserted_keys = Vec::new();
for r in insertions.iter() {
let k: H256 = r.val_at(0);
let v: Bytes = r.val_at(1);
overlay.emplace(k.clone(), v);
inserted_keys.push(k);
count += 1;
}
journal.entry(era).or_insert_with(Vec::new).push(JournalEntry {
id: id,
insertions: inserted_keys,
deletions: deletions,
});
index += 1;
};
if index == 0 || era == 0 {
break;
}
era -= 1;
}
}
trace!("Recovered {} overlay entries, {} journal entries", count, journal.len());
JournalOverlay { backing_overlay: overlay, journal: journal, latest_era: latest_era }
}
}
impl JournalDB for OverlayRecentDB {
fn spawn(&self) -> Box<JournalDB> {
Box::new(self.clone())
}
fn mem_used(&self) -> usize {
let mut mem = self.transaction_overlay.mem_used();
let overlay = self.journal_overlay.read().unwrap();
mem += overlay.backing_overlay.mem_used();
mem += overlay.journal.heap_size_of_children();
mem
}
fn is_empty(&self) -> bool {
self.backing.get(&LATEST_ERA_KEY).expect("Low level database error").is_none()
}
fn commit(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
// record new commit's details.
trace!("commit: #{} ({}), end era: {:?}", now, id, end);
let mut journal_overlay = self.journal_overlay.write().unwrap();
let batch = DBTransaction::new();
{
let mut r = RlpStream::new_list(3);
let mut tx = self.transaction_overlay.drain();
let inserted_keys: Vec<_> = tx.iter().filter_map(|(k, &(_, c))| if c > 0 { Some(k.clone()) } else { None }).collect();
let removed_keys: Vec<_> = tx.iter().filter_map(|(k, &(_, c))| if c < 0 { Some(k.clone()) } else { None }).collect();
// Increase counter for each inserted key no matter if the block is canonical or not.
let insertions = tx.drain().filter_map(|(k, (v, c))| if c > 0 { Some((k, v)) } else { None });
r.append(id);
r.begin_list(inserted_keys.len());
for (k, v) in insertions {
r.begin_list(2);
r.append(&k);
r.append(&v);
journal_overlay.backing_overlay.emplace(k, v);
}
r.append(&removed_keys);
let mut k = RlpStream::new_list(3);
let index = journal_overlay.journal.get(&now).map(|j| j.len()).unwrap_or(0);
k.append(&now);
k.append(&index);
k.append(&&PADDING[..]);
try!(batch.put(&k.drain(), r.as_raw()));
if now >= journal_overlay.latest_era {
try!(batch.put(&LATEST_ERA_KEY, &encode(&now)));
journal_overlay.latest_era = now;
}
journal_overlay.journal.entry(now).or_insert_with(Vec::new).push(JournalEntry { id: id.clone(), insertions: inserted_keys, deletions: removed_keys });
}
let journal_overlay = journal_overlay.deref_mut();
// apply old commits' details
if let Some((end_era, canon_id)) = end {
if let Some(ref mut records) = journal_overlay.journal.get_mut(&end_era) {
let mut canon_insertions: Vec<(H256, Bytes)> = Vec::new();
let mut canon_deletions: Vec<H256> = Vec::new();
let mut overlay_deletions: Vec<H256> = Vec::new();
let mut index = 0usize;
for mut journal in records.drain(..) {
//delete the record from the db
let mut r = RlpStream::new_list(3);
r.append(&end_era);
r.append(&index);
r.append(&&PADDING[..]);
try!(batch.delete(&r.drain()));
trace!("commit: Delete journal for time #{}.{}: {}, (canon was {}): +{} -{} entries", end_era, index, journal.id, canon_id, journal.insertions.len(), journal.deletions.len());
{
if canon_id == journal.id {
for h in &journal.insertions {
if let Some(&(ref d, rc)) = journal_overlay.backing_overlay.raw(h) {
if rc > 0 {
canon_insertions.push((h.clone(), d.clone())); //TODO: optimize this to avoid data copy
}
}
}
canon_deletions = journal.deletions;
}
overlay_deletions.append(&mut journal.insertions);
}
index += 1;
}
// apply canon inserts first
for (k, v) in canon_insertions {
try!(batch.put(&k, &v));
}
// update the overlay
for k in overlay_deletions {
journal_overlay.backing_overlay.kill(&k);
}
// apply canon deletions
for k in canon_deletions {
if !journal_overlay.backing_overlay.exists(&k) {
try!(batch.delete(&k));
}
}
journal_overlay.backing_overlay.purge();
}
journal_overlay.journal.remove(&end_era);
}
try!(self.backing.write(batch));
Ok(0)
}
}
impl HashDB for OverlayRecentDB {
fn keys(&self) -> HashMap<H256, i32> {
let mut ret: HashMap<H256, i32> = HashMap::new();
for (key, _) in self.backing.iter() {
let h = H256::from_slice(key.deref());
ret.insert(h, 1);
}
for (key, refs) in self.transaction_overlay.keys().into_iter() {
let refs = *ret.get(&key).unwrap_or(&0) + refs;
ret.insert(key, refs);
}
ret
}
fn lookup(&self, key: &H256) -> Option<&[u8]> {
let k = self.transaction_overlay.raw(key);
match k {
Some(&(ref d, rc)) if rc > 0 => Some(d),
_ => {
let v = self.journal_overlay.read().unwrap().backing_overlay.lookup(key).map(|v| v.to_vec());
match v {
Some(x) => {
Some(&self.transaction_overlay.denote(key, x).0)
}
_ => {
if let Some(x) = self.payload(key) {
Some(&self.transaction_overlay.denote(key, x).0)
}
else {
None
}
}
}
}
}
}
fn exists(&self, key: &H256) -> bool {
self.lookup(key).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
self.transaction_overlay.insert(value)
}
fn emplace(&mut self, key: H256, value: Bytes) {
self.transaction_overlay.emplace(key, value);
}
fn kill(&mut self, key: &H256) {
self.transaction_overlay.kill(key);
}
}
#[cfg(test)]
mod tests {
use common::*;
use super::*;
use hashdb::*;
use log::init_log;
use journaldb::JournalDB;
#[test]
fn insert_same_in_fork() {
// history is 1
let mut jdb = OverlayRecentDB::new_temp();
let x = jdb.insert(b"X");
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(3, &b"1002a".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(4, &b"1003a".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&x);
jdb.commit(3, &b"1002b".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
let x = jdb.insert(b"X");
jdb.commit(4, &b"1003b".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(5, &b"1004a".sha3(), Some((3, b"1002a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(6, &b"1005a".sha3(), Some((4, b"1003a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&x));
}
#[test]
fn long_history() {
// history is 3
let mut jdb = OverlayRecentDB::new_temp();
let h = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
jdb.remove(&h);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
jdb.commit(3, &b"3".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&h));
jdb.commit(4, &b"4".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&h));
}
#[test]
fn complex() {
// history is 1
let mut jdb = OverlayRecentDB::new_temp();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(jdb.exists(&baz));
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
assert!(!jdb.exists(&bar));
assert!(!jdb.exists(&baz));
}
#[test]
fn fork() {
// history is 1
let mut jdb = OverlayRecentDB::new_temp();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
assert!(jdb.exists(&baz));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
}
#[test]
fn overwrite() {
// history is 1
let mut jdb = OverlayRecentDB::new_temp();
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
assert!(jdb.exists(&foo));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
jdb.commit(3, &b"2".sha3(), Some((0, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
}
#[test]
fn fork_same_key_one() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(1, &b"1c".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
jdb.commit(2, &b"2a".sha3(), Some((1, b"1a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
}
#[test]
fn fork_same_key_other() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(1, &b"1c".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
}
#[test]
fn fork_ins_del_ins() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(2, &b"2a".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(2, &b"2b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(3, &b"3a".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(3, &b"3b".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(4, &b"4a".sha3(), Some((2, b"2a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(5, &b"5a".sha3(), Some((3, b"3a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
#[test]
fn reopen() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let bar = H256::random();
let foo = {
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
// history is 1
let foo = jdb.insert(b"foo");
jdb.emplace(bar.clone(), b"bar".to_vec());
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
foo
};
{
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
{
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
assert!(jdb.exists(&foo));
assert!(jdb.exists(&bar));
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
}
}
#[test]
fn insert_delete_insert_delete_insert_expunge() {
init_log();
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
// history is 4
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(3, &b"3".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(4, &b"4".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
// expunge foo
jdb.commit(5, &b"5".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
#[test]
fn forked_insert_delete_insert_delete_insert_expunge() {
init_log();
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
// history is 4
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(1, &b"1a".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(1, &b"1b".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(2, &b"2a".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(2, &b"2b".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(3, &b"3a".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.commit(3, &b"3b".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(4, &b"4a".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(4, &b"4b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
// expunge foo
jdb.commit(5, &b"5".sha3(), Some((1, b"1a".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
#[test]
fn broken_assert() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
// history is 1
let foo = jdb.insert(b"foo");
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
// foo is ancient history.
jdb.remove(&foo);
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.commit(3, &b"3".sha3(), Some((2, b"2".sha3()))).unwrap(); // BROKEN
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(5, &b"5".sha3(), Some((4, b"4".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
}
#[test]
fn reopen_test() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
// history is 4
let foo = jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(2, &b"2".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(3, &b"3".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(4, &b"4".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
// foo is ancient history.
jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(5, &b"5".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&bar);
jdb.commit(6, &b"6".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(b"bar");
jdb.commit(7, &b"7".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
#[test]
fn reopen_remove_three() {
init_log();
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let foo = b"foo".sha3();
{
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
// history is 1
jdb.insert(b"foo");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit(1, &b"1".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
// foo is ancient history.
jdb.remove(&foo);
jdb.commit(2, &b"2".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
jdb.insert(b"foo");
jdb.commit(3, &b"3".sha3(), Some((1, b"1".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
// incantation to reopen the db
}; { let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.remove(&foo);
jdb.commit(4, &b"4".sha3(), Some((2, b"2".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
// incantation to reopen the db
}; { let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.commit(5, &b"5".sha3(), Some((3, b"3".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
// incantation to reopen the db
}; { let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.commit(6, &b"6".sha3(), Some((4, b"4".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.exists(&foo));
}
}
#[test]
fn reopen_fork() {
let mut dir = ::std::env::temp_dir();
dir.push(H32::random().hex());
let (foo, bar, baz) = {
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
// history is 1
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.commit(0, &b"0".sha3(), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.commit(1, &b"1a".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.commit(1, &b"1b".sha3(), Some((0, b"0".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
(foo, bar, baz)
};
{
let mut jdb = OverlayRecentDB::new(dir.to_str().unwrap());
jdb.commit(2, &b"2b".sha3(), Some((1, b"1b".sha3()))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.exists(&foo));
assert!(!jdb.exists(&baz));
assert!(!jdb.exists(&bar));
}
}
}

View File

@ -131,9 +131,15 @@ impl AccountService {
} }
} }
impl Default for SecretStore {
fn default() -> Self {
SecretStore::new()
}
}
impl SecretStore { impl SecretStore {
/// new instance of Secret Store in default home directory /// new instance of Secret Store in default home directory
pub fn new() -> SecretStore { pub fn new() -> Self {
let mut path = ::std::env::home_dir().expect("Failed to get home dir"); let mut path = ::std::env::home_dir().expect("Failed to get home dir");
path.push(".parity"); path.push(".parity");
path.push("keys"); path.push("keys");
@ -142,7 +148,7 @@ impl SecretStore {
} }
/// new instance of Secret Store in specific directory /// new instance of Secret Store in specific directory
pub fn new_in(path: &Path) -> SecretStore { pub fn new_in(path: &Path) -> Self {
SecretStore { SecretStore {
directory: KeyDirectory::new(path), directory: KeyDirectory::new(path),
unlocks: RwLock::new(HashMap::new()), unlocks: RwLock::new(HashMap::new()),
@ -214,12 +220,12 @@ impl SecretStore {
/// Creates new account /// Creates new account
pub fn new_account(&mut self, pass: &str) -> Result<Address, ::std::io::Error> { pub fn new_account(&mut self, pass: &str) -> Result<Address, ::std::io::Error> {
let secret = H256::random(); let key_pair = crypto::KeyPair::create().expect("Error creating key-pair. Something wrong with crypto libraries?");
let address = Address::from(key_pair.public().sha3());
let key_id = H128::random(); let key_id = H128::random();
self.insert(key_id.clone(), secret, pass); self.insert(key_id.clone(), key_pair.secret().clone(), pass);
let mut key_file = self.directory.get(&key_id).expect("the key was just inserted"); let mut key_file = self.directory.get(&key_id).expect("the key was just inserted");
let address = Address::random();
key_file.account = Some(address); key_file.account = Some(address);
try!(self.directory.save(key_file)); try!(self.directory.save(key_file));
Ok(address) Ok(address)
@ -381,6 +387,7 @@ mod tests {
use super::*; use super::*;
use devtools::*; use devtools::*;
use common::*; use common::*;
use crypto::KeyPair;
#[test] #[test]
fn can_insert() { fn can_insert() {
@ -555,4 +562,15 @@ mod tests {
let accounts = sstore.accounts().unwrap(); let accounts = sstore.accounts().unwrap();
assert_eq!(30, accounts.len()); assert_eq!(30, accounts.len());
} }
#[test]
fn validate_generated_addresses() {
let temp = RandomTempPath::create_dir();
let mut sstore = SecretStore::new_test(&temp);
let addr = sstore.new_account("test").unwrap();
let _ok = sstore.unlock_account(&addr, "test").unwrap();
let secret = sstore.account_secret(&addr).unwrap();
let kp = KeyPair::from_secret(secret).unwrap();
assert_eq!(Address::from(kp.public().sha3()), addr);
}
} }

View File

@ -16,6 +16,7 @@
//! Key-Value store abstraction with RocksDB backend. //! Key-Value store abstraction with RocksDB backend.
use std::default::Default;
use rocksdb::{DB, Writable, WriteBatch, IteratorMode, DBVector, DBIterator, use rocksdb::{DB, Writable, WriteBatch, IteratorMode, DBVector, DBIterator,
IndexType, Options, DBCompactionStyle, BlockBasedOptions, Direction}; IndexType, Options, DBCompactionStyle, BlockBasedOptions, Direction};
@ -24,6 +25,12 @@ pub struct DBTransaction {
batch: WriteBatch, batch: WriteBatch,
} }
impl Default for DBTransaction {
fn default() -> Self {
DBTransaction::new()
}
}
impl DBTransaction { impl DBTransaction {
/// Create new transaction. /// Create new transaction.
pub fn new() -> DBTransaction { pub fn new() -> DBTransaction {

View File

@ -27,6 +27,8 @@
#![cfg_attr(feature="dev", allow(match_same_arms))] #![cfg_attr(feature="dev", allow(match_same_arms))]
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref. // Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
#![cfg_attr(feature="dev", allow(clone_on_copy))] #![cfg_attr(feature="dev", allow(clone_on_copy))]
// In most cases it expresses function flow better
#![cfg_attr(feature="dev", allow(if_not_else))]
//! Ethcore-util library //! Ethcore-util library
//! //!

View File

@ -24,6 +24,7 @@ use hashdb::*;
use heapsize::*; use heapsize::*;
use std::mem; use std::mem;
use std::collections::HashMap; use std::collections::HashMap;
use std::default::Default;
#[derive(Debug,Clone)] #[derive(Debug,Clone)]
/// Reference-counted memory-based HashDB implementation. /// Reference-counted memory-based HashDB implementation.
@ -69,11 +70,18 @@ use std::collections::HashMap;
/// assert!(!m.exists(&k)); /// assert!(!m.exists(&k));
/// } /// }
/// ``` /// ```
#[derive(PartialEq)]
pub struct MemoryDB { pub struct MemoryDB {
data: HashMap<H256, (Bytes, i32)>, data: HashMap<H256, (Bytes, i32)>,
static_null_rlp: (Bytes, i32), static_null_rlp: (Bytes, i32),
} }
impl Default for MemoryDB {
fn default() -> Self {
MemoryDB::new()
}
}
impl MemoryDB { impl MemoryDB {
/// Create a new instance of the memory DB. /// Create a new instance of the memory DB.
pub fn new() -> MemoryDB { pub fn new() -> MemoryDB {

View File

@ -19,6 +19,7 @@ use std::net::SocketAddr;
use std::collections::{HashSet, HashMap, BTreeMap, VecDeque}; use std::collections::{HashSet, HashMap, BTreeMap, VecDeque};
use std::mem; use std::mem;
use std::cmp; use std::cmp;
use std::default::Default;
use mio::*; use mio::*;
use mio::udp::*; use mio::udp::*;
use sha3::*; use sha3::*;
@ -62,8 +63,14 @@ struct NodeBucket {
nodes: VecDeque<BucketEntry>, //sorted by last active nodes: VecDeque<BucketEntry>, //sorted by last active
} }
impl Default for NodeBucket {
fn default() -> Self {
NodeBucket::new()
}
}
impl NodeBucket { impl NodeBucket {
fn new() -> NodeBucket { fn new() -> Self {
NodeBucket { NodeBucket {
nodes: VecDeque::new() nodes: VecDeque::new()
} }

View File

@ -23,6 +23,7 @@ use std::ops::*;
use std::cmp::min; use std::cmp::min;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::default::Default;
use std::fs; use std::fs;
use mio::*; use mio::*;
use mio::tcp::*; use mio::tcp::*;
@ -75,9 +76,15 @@ pub struct NetworkConfiguration {
pub ideal_peers: u32, pub ideal_peers: u32,
} }
impl Default for NetworkConfiguration {
fn default() -> Self {
NetworkConfiguration::new()
}
}
impl NetworkConfiguration { impl NetworkConfiguration {
/// Create a new instance of default settings. /// Create a new instance of default settings.
pub fn new() -> NetworkConfiguration { pub fn new() -> Self {
NetworkConfiguration { NetworkConfiguration {
config_path: None, config_path: None,
listen_address: None, listen_address: None,

View File

@ -19,6 +19,7 @@
use std::thread; use std::thread;
use std::ops::DerefMut; use std::ops::DerefMut;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::default::Default;
/// Thread-safe closure for handling possible panics /// Thread-safe closure for handling possible panics
pub trait OnPanicListener: Send + Sync + 'static { pub trait OnPanicListener: Send + Sync + 'static {
@ -56,14 +57,20 @@ pub struct PanicHandler {
listeners: Mutex<Vec<Box<OnPanicListener>>> listeners: Mutex<Vec<Box<OnPanicListener>>>
} }
impl Default for PanicHandler {
fn default() -> Self {
PanicHandler::new()
}
}
impl PanicHandler { impl PanicHandler {
/// Creates new `PanicHandler` wrapped in `Arc` /// Creates new `PanicHandler` wrapped in `Arc`
pub fn new_in_arc() -> Arc<PanicHandler> { pub fn new_in_arc() -> Arc<Self> {
Arc::new(Self::new()) Arc::new(Self::new())
} }
/// Creates new `PanicHandler` /// Creates new `PanicHandler`
pub fn new() -> PanicHandler { pub fn new() -> Self {
PanicHandler { PanicHandler {
listeners: Mutex::new(vec![]) listeners: Mutex::new(vec![])
} }

View File

@ -15,6 +15,7 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::ops::Deref; use std::ops::Deref;
use std::default::Default;
use elastic_array::*; use elastic_array::*;
use rlp::bytes::{ToBytes, VecLike}; use rlp::bytes::{ToBytes, VecLike};
use rlp::{Stream, Encoder, Encodable}; use rlp::{Stream, Encoder, Encodable};
@ -44,6 +45,12 @@ pub struct RlpStream {
finished_list: bool, finished_list: bool,
} }
impl Default for RlpStream {
fn default() -> Self {
RlpStream::new()
}
}
impl Stream for RlpStream { impl Stream for RlpStream {
fn new() -> Self { fn new() -> Self {
RlpStream { RlpStream {
@ -190,8 +197,14 @@ struct BasicEncoder {
bytes: ElasticArray1024<u8>, bytes: ElasticArray1024<u8>,
} }
impl Default for BasicEncoder {
fn default() -> Self {
BasicEncoder::new()
}
}
impl BasicEncoder { impl BasicEncoder {
fn new() -> BasicEncoder { fn new() -> Self {
BasicEncoder { bytes: ElasticArray1024::new() } BasicEncoder { bytes: ElasticArray1024::new() }
} }

View File

@ -16,6 +16,7 @@
//! A collection associating pair of keys (row and column) with a single value. //! A collection associating pair of keys (row and column) with a single value.
use std::default::Default;
use std::hash::Hash; use std::hash::Hash;
use std::collections::HashMap; use std::collections::HashMap;
@ -30,11 +31,21 @@ pub struct Table<Row, Col, Val>
map: HashMap<Row, HashMap<Col, Val>>, map: HashMap<Row, HashMap<Col, Val>>,
} }
impl<Row, Col, Val> Default for Table<Row, Col, Val>
where Row: Eq + Hash + Clone,
Col: Eq + Hash {
fn default() -> Self {
Table::new()
}
}
// There is default but clippy does not detect it?
#[allow(new_without_default)]
impl<Row, Col, Val> Table<Row, Col, Val> impl<Row, Col, Val> Table<Row, Col, Val>
where Row: Eq + Hash + Clone, where Row: Eq + Hash + Clone,
Col: Eq + Hash { Col: Eq + Hash {
/// Creates new Table /// Creates new Table
pub fn new() -> Table<Row, Col, Val> { pub fn new() -> Self {
Table { Table {
map: HashMap::new(), map: HashMap::new(),
} }

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::default::Default;
use sha3::*; use sha3::*;
use hash::H256; use hash::H256;
use bytes::*; use bytes::*;
@ -39,6 +40,12 @@ pub struct Score {
#[derive(Debug)] #[derive(Debug)]
pub struct Journal (Vec<Operation>); pub struct Journal (Vec<Operation>);
impl Default for Journal {
fn default() -> Self {
Journal::new()
}
}
impl Journal { impl Journal {
/// Create a new, empty, object. /// Create a new, empty, object.
pub fn new() -> Journal { Journal(vec![]) } pub fn new() -> Journal { Journal(vec![]) }