Remove calls to heapsize (#10432)

* update memorydb trait
* use malloc_size_of instead of heapsize_of
* use jemalloc as default allocator for parity client.
This commit is contained in:
cheme 2019-06-19 13:54:05 +02:00 committed by GitHub
parent 859a41308c
commit 6fc5014b4d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 926 additions and 1074 deletions

210
Cargo.lock generated
View File

@ -121,7 +121,7 @@ version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)", "backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -312,7 +312,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "cfg-if" name = "cfg-if"
version = "0.1.5" version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -357,6 +357,14 @@ dependencies = [
"vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", "vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "clear_on_drop"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "cli-signer" name = "cli-signer"
version = "1.4.0" version = "1.4.0"
@ -403,9 +411,9 @@ dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethjson 0.1.0", "ethjson 0.1.0",
"ethkey 0.3.0", "ethkey 0.3.0",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp_derive 0.1.0", "rlp_derive 0.1.0",
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -520,7 +528,7 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -534,7 +542,7 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -547,7 +555,7 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -559,7 +567,7 @@ name = "crossbeam-utils"
version = "0.2.2" version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -572,7 +580,7 @@ name = "crossbeam-utils"
version = "0.6.2" version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -736,10 +744,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "elastic-array" name = "elastic-array"
version = "0.10.0" version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -777,7 +785,7 @@ source = "git+https://github.com/paritytech/rust-secp256k1#9791e79f21a5309dcb6e0
dependencies = [ dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)", "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)", "cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -873,8 +881,7 @@ dependencies = [
"evm 0.1.0", "evm 0.1.0",
"fetch 0.1.0", "fetch 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
"journaldb 0.2.0", "journaldb 0.2.0",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -888,13 +895,14 @@ dependencies = [
"lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"macros 0.1.0", "macros 0.1.0",
"memory-cache 0.1.0", "memory-cache 0.1.0",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", "num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-runtime 0.1.0", "parity-runtime 0.1.0",
"parity-snappy 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-snappy 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0", "patricia-trie-ethereum 0.1.0",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -910,8 +918,8 @@ dependencies = [
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"time-utils 0.1.0", "time-utils 0.1.0",
"trace-time 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "trace-time 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-standardmap 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)", "trie-standardmap 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"triehash-ethereum 0.2.0", "triehash-ethereum 0.2.0",
"unexpected 0.1.0", "unexpected 0.1.0",
"using_queue 0.1.0", "using_queue 0.1.0",
@ -945,13 +953,13 @@ dependencies = [
"ethcore-db 0.1.0", "ethcore-db 0.1.0",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethkey 0.3.0", "ethkey 0.3.0",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -983,8 +991,8 @@ version = "0.1.0"
dependencies = [ dependencies = [
"common-types 0.1.0", "common-types 0.1.0",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp_derive 0.1.0", "rlp_derive 0.1.0",
@ -1025,8 +1033,7 @@ dependencies = [
"failsafe 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "failsafe 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fastmap 0.1.0", "fastmap 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
"journaldb 0.2.0", "journaldb 0.2.0",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1035,8 +1042,9 @@ dependencies = [
"kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-cache 0.1.0", "memory-cache 0.1.0",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0", "patricia-trie-ethereum 0.1.0",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1047,7 +1055,7 @@ dependencies = [
"smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"stats 0.1.0", "stats 0.1.0",
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"triehash-ethereum 0.2.0", "triehash-ethereum 0.2.0",
"vm 0.1.0", "vm 0.1.0",
] ]
@ -1084,12 +1092,12 @@ dependencies = [
"ethkey 0.3.0", "ethkey 0.3.0",
"fetch 0.1.0", "fetch 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"hyper 0.12.19 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.12.19 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-runtime 0.1.0", "parity-runtime 0.1.0",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"price-info 1.12.0", "price-info 1.12.0",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1176,11 +1184,11 @@ dependencies = [
"ethkey 0.3.0", "ethkey 0.3.0",
"fetch 0.1.0", "fetch 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0", "patricia-trie-ethereum 0.1.0",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1193,7 +1201,7 @@ dependencies = [
"time-utils 0.1.0", "time-utils 0.1.0",
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"transaction-pool 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "transaction-pool 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1290,8 +1298,7 @@ dependencies = [
"ethstore 0.2.1", "ethstore 0.2.1",
"fastmap 0.1.0", "fastmap 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1", "keccak-hasher 0.1.1",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1300,6 +1307,7 @@ dependencies = [
"macros 0.1.0", "macros 0.1.0",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-runtime 0.1.0", "parity-runtime 0.1.0",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1343,8 +1351,8 @@ dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memzero 0.1.0",
"parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-wordlist 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "parity-wordlist 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1418,12 +1426,12 @@ dependencies = [
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"criterion 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "criterion 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-cache 0.1.0", "memory-cache 0.1.0",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"vm 0.1.0", "vm 0.1.0",
@ -1531,7 +1539,7 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1680,28 +1688,27 @@ dependencies = [
[[package]] [[package]]
name = "hash-db" name = "hash-db"
version = "0.11.0" version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "hash-db"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "hash256-std-hasher" name = "hash256-std-hasher"
version = "0.12.2" version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"crunchy 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "crunchy 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "hashmap_core"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "heapsize" name = "heapsize"
version = "0.4.2" version = "0.4.2"
source = "git+https://github.com/cheme/heapsize.git?branch=ec-macfix#421df390a930cb523a09e5528e6fe57b534b3b26" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"jemallocator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1985,15 +1992,15 @@ dependencies = [
"env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fastmap 0.1.0", "fastmap 0.1.0",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1", "keccak-hasher 0.1.1",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2141,18 +2148,18 @@ name = "keccak-hasher"
version = "0.1.1" version = "0.1.1"
dependencies = [ dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"plain_hasher 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "plain_hasher 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "keccak-hasher" name = "keccak-hasher"
version = "0.12.2" version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"hash-db 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"hash256-std-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)", "hash256-std-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2170,7 +2177,7 @@ name = "kvdb"
version = "0.1.0" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2188,7 +2195,7 @@ name = "kvdb-rocksdb"
version = "0.1.4" version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"fs-swap 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", "fs-swap 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"interleaved-ordered 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "interleaved-ordered 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2272,7 +2279,7 @@ name = "log"
version = "0.4.6" version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -2295,6 +2302,16 @@ dependencies = [
name = "macros" name = "macros"
version = "0.1.0" version = "0.1.0"
[[package]]
name = "malloc_size_of_derive"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.26 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "matches" name = "matches"
version = "0.1.8" version = "0.1.8"
@ -2305,7 +2322,7 @@ name = "memchr"
version = "2.1.0" version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2328,17 +2345,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
name = "memory-cache" name = "memory-cache"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "memory-db" name = "memory-db"
version = "0.11.0" version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)", "hashmap_core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -2346,10 +2364,6 @@ name = "memory_units"
version = "0.3.0" version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memzero"
version = "0.1.0"
[[package]] [[package]]
name = "memzero" name = "memzero"
version = "0.1.0" version = "0.1.0"
@ -2483,7 +2497,7 @@ name = "net2"
version = "0.2.33" version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2739,6 +2753,7 @@ dependencies = [
"parity-rpc 1.12.0", "parity-rpc 1.12.0",
"parity-runtime 0.1.0", "parity-runtime 0.1.0",
"parity-updater 1.12.0", "parity-updater 1.12.0",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-version 2.6.0", "parity-version 2.6.0",
"parity-whisper 0.1.0", "parity-whisper 0.1.0",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2989,6 +3004,21 @@ dependencies = [
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)", "tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "parity-util-mem"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"clear_on_drop 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"jemallocator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"malloc_size_of_derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "parity-version" name = "parity-version"
version = "2.6.0" version = "2.6.0"
@ -3023,8 +3053,8 @@ dependencies = [
"jsonrpc-derive 10.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-derive 10.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-pubsub 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-pubsub 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memzero 0.1.0",
"ordered-float 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", "ordered-float 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3115,16 +3145,16 @@ dependencies = [
name = "patricia-trie-ethereum" name = "patricia-trie-ethereum"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"journaldb 0.2.0", "journaldb 0.2.0",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1", "keccak-hasher 0.1.1",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -3627,7 +3657,7 @@ dependencies = [
name = "rlp_compress" name = "rlp_compress"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -3893,7 +3923,7 @@ name = "socket2"
version = "0.3.8" version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -4380,30 +4410,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "trie-db" name = "trie-db"
version = "0.11.0" version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"hashmap_core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "trie-standardmap" name = "trie-standardmap"
version = "0.12.3" version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"hash-db 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)", "keccak-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "triehash" name = "triehash"
version = "0.5.0" version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -4413,7 +4444,7 @@ version = "0.2.0"
dependencies = [ dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1", "keccak-hasher 0.1.1",
"triehash 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "triehash 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -4443,7 +4474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"crunchy 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "crunchy 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -4591,7 +4622,7 @@ dependencies = [
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0", "patricia-trie-ethereum 0.1.0",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)", "trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -4809,10 +4840,11 @@ dependencies = [
"checksum cast 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "926013f2860c46252efceabb19f4a6b308197505082c609025aa6706c011d427" "checksum cast 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "926013f2860c46252efceabb19f4a6b308197505082c609025aa6706c011d427"
"checksum cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4a8b715cb4597106ea87c7c84b2f1d452c7492033765df7f32651e66fcf749" "checksum cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4a8b715cb4597106ea87c7c84b2f1d452c7492033765df7f32651e66fcf749"
"checksum cesu8 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" "checksum cesu8 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c"
"checksum cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4e7bb64a8ebb0d856483e1e682ea3422f883c5f5615a90d51a2c82fe87fdd3" "checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33"
"checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878" "checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878"
"checksum cid 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0e37fba0087d9f3f4e269827a55dc511abf3e440cc097a0c154ff4e6584f988" "checksum cid 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0e37fba0087d9f3f4e269827a55dc511abf3e440cc097a0c154ff4e6584f988"
"checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e" "checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e"
"checksum clear_on_drop 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "97276801e127ffb46b66ce23f35cc96bd454fa311294bced4bbace7baa8b1d17"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum cmake 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "6ec65ee4f9c9d16f335091d23693457ed4928657ba4982289d7fafee03bc614a" "checksum cmake 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "6ec65ee4f9c9d16f335091d23693457ed4928657ba4982289d7fafee03bc614a"
"checksum combine 3.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fc1d011beeed29187b8db2ac3925c8dd4d3e87db463dc9d2d2833985388fc5bc" "checksum combine 3.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fc1d011beeed29187b8db2ac3925c8dd4d3e87db463dc9d2d2833985388fc5bc"
@ -4847,7 +4879,7 @@ dependencies = [
"checksum docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225" "checksum docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"
"checksum edit-distance 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3bd26878c3d921f89797a4e1a1711919f999a9f6946bb6f5a4ffda126d297b7e" "checksum edit-distance 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3bd26878c3d921f89797a4e1a1711919f999a9f6946bb6f5a4ffda126d297b7e"
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0" "checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
"checksum elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "88d4851b005ef16de812ea9acdb7bece2f0a40dd86c07b85631d7dafa54537bb" "checksum elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "073be79b6538296faf81c631872676600616073817dd9a440c477ad09b408983"
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180" "checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
"checksum env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)" = "15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38" "checksum env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)" = "15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38"
"checksum error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07e791d3be96241c77c43846b665ef1384606da2cd2a48730abe606a12906e02" "checksum error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07e791d3be96241c77c43846b665ef1384606da2cd2a48730abe606a12906e02"
@ -4880,10 +4912,10 @@ dependencies = [
"checksum h2 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "a27e7ed946e8335bdf9a191bc1b9b14a03ba822d013d2f58437f4fabcbd7fc2c" "checksum h2 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "a27e7ed946e8335bdf9a191bc1b9b14a03ba822d013d2f58437f4fabcbd7fc2c"
"checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1" "checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1"
"checksum handlebars 0.32.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d89ec99d1594f285d4590fc32bac5f75cdab383f1123d504d27862c644a807dd" "checksum handlebars 0.32.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d89ec99d1594f285d4590fc32bac5f75cdab383f1123d504d27862c644a807dd"
"checksum hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1b03501f6e1a2a97f1618879aba3156f14ca2847faa530c4e28859638bd11483" "checksum hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3c95a428c86ed4633d83e07ef9e0a147a906da01e931f07e74a85bedce5a43"
"checksum hash-db 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ba7fb417e5c470acdd61068c79767d0e65962e70836cf6c9dfd2409f06345ce0" "checksum hash256-std-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "663ce20dae36902c16d12c6aaae400ca40d922407a8cf2b4caf8cae9b39b4f03"
"checksum hash256-std-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f8b2027c19ec91eb304999abae7307d225cf93be42af53b0039f76e98ed5af86" "checksum hashmap_core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "8e04cb7a5051270ef3fa79f8c7604d581ecfa73d520e74f554e45541c4b5881a"
"checksum heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)" = "<none>" "checksum heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1679e6ea370dee694f91f1dc469bf94cf8f52051d147aec3e1f9497c6fc22461"
"checksum heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea04fa3ead4e05e51a7c806fc07271fdbde4e246a6c6d1efd52e72230b771b82" "checksum heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea04fa3ead4e05e51a7c806fc07271fdbde4e246a6c6d1efd52e72230b771b82"
"checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" "checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa"
"checksum hmac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f127a908633569f208325f86f71255d3363c79721d7f9fe31cd5569908819771" "checksum hmac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f127a908633569f208325f86f71255d3363c79721d7f9fe31cd5569908819771"
@ -4924,7 +4956,7 @@ dependencies = [
"checksum jsonrpc-tcp-server 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c873dac37a601fb88d40ba49eeac3f1aa60953c06b2e99ddbf0569b6f8028478" "checksum jsonrpc-tcp-server 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c873dac37a601fb88d40ba49eeac3f1aa60953c06b2e99ddbf0569b6f8028478"
"checksum jsonrpc-ws-server 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20b8333a5a6e6ccbcf5c90f90919de557cba4929efa164e9bd0e8e497eb20e46" "checksum jsonrpc-ws-server 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20b8333a5a6e6ccbcf5c90f90919de557cba4929efa164e9bd0e8e497eb20e46"
"checksum keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "69e8ee697b9aa6dcc34d7657565fa5052763a1627a5b59e4c3c0ae3ed0d70a65" "checksum keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "69e8ee697b9aa6dcc34d7657565fa5052763a1627a5b59e4c3c0ae3ed0d70a65"
"checksum keccak-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)" = "af672553b2abac1c86c29fd62c79880638b6abc91d96db4aa42a5baab2bc1ca9" "checksum keccak-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6c936c737d79690593c34275faf583151a0e8c0abf34eaecad10399eed0beb7d"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "72ae89206cea31c32014b39d5a454b96135894221610dbfd19cf4d2d044fa546" "checksum kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "72ae89206cea31c32014b39d5a454b96135894221610dbfd19cf4d2d044fa546"
"checksum kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "45bcdf5eb083602cff61a6f8438dce2a7900d714e893fc48781c39fb119d37aa" "checksum kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "45bcdf5eb083602cff61a6f8438dce2a7900d714e893fc48781c39fb119d37aa"
@ -4941,11 +4973,12 @@ dependencies = [
"checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6" "checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
"checksum lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" "checksum lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
"checksum lunarity-lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a1670671f305792567116d4660e6e5bd785d6fa973e817c3445c0a7a54cecb6" "checksum lunarity-lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a1670671f305792567116d4660e6e5bd785d6fa973e817c3445c0a7a54cecb6"
"checksum malloc_size_of_derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "35adee9ed962cf7d07d62cb58bc45029f3227f5b5b86246caa8632f06c187bc3"
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08" "checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
"checksum memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4b3629fe9fdbff6daa6c33b90f7c08355c1aca05a3d01fa8063b822fcf185f3b" "checksum memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4b3629fe9fdbff6daa6c33b90f7c08355c1aca05a3d01fa8063b822fcf185f3b"
"checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff" "checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff"
"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"
"checksum memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94da53143d45f6bad3753f532e56ad57a6a26c0ca6881794583310c7cb4c885f" "checksum memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1eeeeab44c01c7da4409e68ec5b5db74c92305386efab3615e495b1dacaec196"
"checksum memory_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d96e3f3c0b6325d8ccd83c33b28acb183edcb6c67938ba104ec546854b0882" "checksum memory_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d96e3f3c0b6325d8ccd83c33b28acb183edcb6c67938ba104ec546854b0882"
"checksum memzero 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "93c0d11ac30a033511ae414355d80f70d9f29a44a49140face477117a1ee90db" "checksum memzero 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "93c0d11ac30a033511ae414355d80f70d9f29a44a49140face477117a1ee90db"
"checksum mime 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "0a907b83e7b9e987032439a387e187119cddafc92d5c2aaeb1d92580a793f630" "checksum mime 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "0a907b83e7b9e987032439a387e187119cddafc92d5c2aaeb1d92580a793f630"
@ -4985,6 +5018,7 @@ dependencies = [
"checksum parity-snappy 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2c5f9d149b13134b8b354d93a92830efcbee6fe5b73a2e6e540fe70d4dd8a63" "checksum parity-snappy 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2c5f9d149b13134b8b354d93a92830efcbee6fe5b73a2e6e540fe70d4dd8a63"
"checksum parity-snappy-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1a413d51e5e1927320c9de992998e4a279dffb8c8a7363570198bd8383e66f1b" "checksum parity-snappy-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1a413d51e5e1927320c9de992998e4a279dffb8c8a7363570198bd8383e66f1b"
"checksum parity-tokio-ipc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb002c2d3539ccd3b82bd915ec060028d4ab350ad203dbffa20028c1e483af5b" "checksum parity-tokio-ipc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb002c2d3539ccd3b82bd915ec060028d4ab350ad203dbffa20028c1e483af5b"
"checksum parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "89e80f22052161e0cb55cb5a8a75890420c525031f95c9d262dbb0434aa85dc1"
"checksum parity-wasm 0.31.3 (registry+https://github.com/rust-lang/crates.io-index)" = "511379a8194230c2395d2f5fa627a5a7e108a9f976656ce723ae68fca4097bfc" "checksum parity-wasm 0.31.3 (registry+https://github.com/rust-lang/crates.io-index)" = "511379a8194230c2395d2f5fa627a5a7e108a9f976656ce723ae68fca4097bfc"
"checksum parity-wordlist 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf13102febd98f4ad416a526b42deb82daf482626ba6ab10d0ebf8f45327514c" "checksum parity-wordlist 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf13102febd98f4ad416a526b42deb82daf482626ba6ab10d0ebf8f45327514c"
"checksum parity-ws 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2fec5048fba72a2e01baeb0d08089db79aead4b57e2443df172fb1840075a233" "checksum parity-ws 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2fec5048fba72a2e01baeb0d08089db79aead4b57e2443df172fb1840075a233"
@ -5128,9 +5162,9 @@ dependencies = [
"checksum trace-time 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe82f2f0bf1991e163e757baf044282823155dd326e70f44ce2186c3c320cc9" "checksum trace-time 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe82f2f0bf1991e163e757baf044282823155dd326e70f44ce2186c3c320cc9"
"checksum transaction-pool 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8d8bd3123931aa6e49dd03bc8a2400490e14701d779458d1f1fff1f04c6f666" "checksum transaction-pool 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8d8bd3123931aa6e49dd03bc8a2400490e14701d779458d1f1fff1f04c6f666"
"checksum transient-hashmap 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aeb4b191d033a35edfce392a38cdcf9790b6cebcb30fa690c312c29da4dc433e" "checksum transient-hashmap 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aeb4b191d033a35edfce392a38cdcf9790b6cebcb30fa690c312c29da4dc433e"
"checksum trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c7319e28ca295f27359d944a682f7f65b419158bf1590c92cadc0000258d788" "checksum trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ae063390324bfcf36c7e8e4fb1f85f6f0fb5dd04e1cd282581eb7b8b34b32de7"
"checksum trie-standardmap 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ebaa4b340046196efad8872b2dffe585b5ea330230dc44ee14e399f77da29f51" "checksum trie-standardmap 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "40787fb1a63a97ed56d12bc303937ea274e09d1afa2e20e4f074eff2074b24d3"
"checksum triehash 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92148b4d8d55eff71bc8c9e3c5f714e266c2a05e724dce5405a10deabbf449a8" "checksum triehash 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b645ad3fc9871596897fb64a57c9c29adc9f5ece87c2d78766e3fc5a5da56b56"
"checksum try-lock 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee2aa4715743892880f70885373966c83d73ef1b0838a664ef0c76fffd35e7c2" "checksum try-lock 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee2aa4715743892880f70885373966c83d73ef1b0838a664ef0c76fffd35e7c2"
"checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382" "checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382"
"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169" "checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"

View File

@ -73,6 +73,8 @@ ethcore-secretstore = { path = "secret-store", optional = true }
registrar = { path = "util/registrar" } registrar = { path = "util/registrar" }
parity-util-mem = { version = "0.1", features = ["jemalloc-global"] }
[build-dependencies] [build-dependencies]
rustc_version = "0.2" rustc_version = "0.2"
@ -139,6 +141,3 @@ members = [
"util/fastmap", "util/fastmap",
"util/time-utils" "util/time-utils"
] ]
[patch.crates-io]
heapsize = { git = "https://github.com/cheme/heapsize.git", branch = "ec-macfix" }

View File

@ -11,7 +11,7 @@ eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" }
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
lazy_static = "1.0" lazy_static = "1.0"
log = "0.4" log = "0.4"
memzero = { path = "../../util/memzero" } parity-util-mem = "0.1"
parity-wordlist = "1.2" parity-wordlist = "1.2"
quick-error = "1.2.2" quick-error = "1.2.2"
rand = "0.6" rand = "0.6"

View File

@ -20,7 +20,7 @@ extern crate byteorder;
extern crate edit_distance; extern crate edit_distance;
extern crate parity_crypto; extern crate parity_crypto;
extern crate ethereum_types; extern crate ethereum_types;
extern crate memzero; extern crate parity_util_mem;
extern crate parity_wordlist; extern crate parity_wordlist;
#[macro_use] #[macro_use]
extern crate quick_error; extern crate quick_error;

View File

@ -21,7 +21,7 @@ use rustc_hex::ToHex;
use secp256k1::constants::{SECRET_KEY_SIZE as SECP256K1_SECRET_KEY_SIZE}; use secp256k1::constants::{SECRET_KEY_SIZE as SECP256K1_SECRET_KEY_SIZE};
use secp256k1::key; use secp256k1::key;
use ethereum_types::H256; use ethereum_types::H256;
use memzero::Memzero; use parity_util_mem::Memzero;
use {Error, SECP256K1}; use {Error, SECP256K1};
#[derive(Clone, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]

View File

@ -31,8 +31,8 @@ ethjson = { path = "../json" }
ethkey = { path = "../accounts/ethkey" } ethkey = { path = "../accounts/ethkey" }
evm = { path = "evm" } evm = { path = "evm" }
futures = "0.1" futures = "0.1"
hash-db = "0.11.0" hash-db = "0.12.4"
heapsize = "0.4" parity-util-mem = "0.1"
itertools = "0.5" itertools = "0.5"
journaldb = { path = "../util/journaldb" } journaldb = { path = "../util/journaldb" }
keccak-hash = "0.2.0" keccak-hash = "0.2.0"
@ -46,14 +46,14 @@ log = "0.4"
lru-cache = "0.1" lru-cache = "0.1"
macros = { path = "../util/macros" } macros = { path = "../util/macros" }
memory-cache = { path = "../util/memory-cache" } memory-cache = { path = "../util/memory-cache" }
memory-db = "0.11.0" memory-db = "0.12.4"
num = { version = "0.1", default-features = false, features = ["bigint"] } num = { version = "0.1", default-features = false, features = ["bigint"] }
num_cpus = "1.2" num_cpus = "1.2"
parity-bytes = "0.1" parity-bytes = "0.1"
parity-crypto = "0.4.0" parity-crypto = "0.4.0"
parity-snappy = "0.1" parity-snappy = "0.1"
parking_lot = "0.7" parking_lot = "0.7"
trie-db = "0.11.0" trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../util/patricia-trie-ethereum" } patricia-trie-ethereum = { path = "../util/patricia-trie-ethereum" }
rand = "0.6" rand = "0.6"
rayon = "1.0" rayon = "1.0"
@ -83,7 +83,7 @@ kvdb-rocksdb = "0.1.3"
parity-runtime = { path = "../util/runtime" } parity-runtime = { path = "../util/runtime" }
rlp_compress = { path = "../util/rlp-compress" } rlp_compress = { path = "../util/rlp-compress" }
tempdir = "0.3" tempdir = "0.3"
trie-standardmap = "0.12.3" trie-standardmap = "0.12.4"
[features] [features]
parity = ["work-notify", "price-info", "stratum"] parity = ["work-notify", "price-info", "stratum"]

View File

@ -13,7 +13,7 @@ blooms-db = { path = "../../util/blooms-db" }
common-types = { path = "../types" } common-types = { path = "../types" }
ethcore-db = { path = "../db" } ethcore-db = { path = "../db" }
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
heapsize = "0.4" parity-util-mem = "0.1"
itertools = "0.5" itertools = "0.5"
kvdb = "0.1" kvdb = "0.1"
log = "0.4" log = "0.4"

View File

@ -39,7 +39,7 @@ use ethcore_db::cache_manager::CacheManager;
use ethcore_db::keys::{BlockReceipts, BlockDetails, TransactionAddress, EPOCH_KEY_PREFIX, EpochTransitions}; use ethcore_db::keys::{BlockReceipts, BlockDetails, TransactionAddress, EPOCH_KEY_PREFIX, EpochTransitions};
use ethcore_db::{self as db, Writable, Readable, CacheUpdatePolicy}; use ethcore_db::{self as db, Writable, Readable, CacheUpdatePolicy};
use ethereum_types::{H256, Bloom, BloomRef, U256}; use ethereum_types::{H256, Bloom, BloomRef, U256};
use heapsize::HeapSizeOf; use util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use itertools::Itertools; use itertools::Itertools;
use kvdb::{DBTransaction, KeyValueDB}; use kvdb::{DBTransaction, KeyValueDB};
use log::{trace, warn, info}; use log::{trace, warn, info};
@ -1489,11 +1489,12 @@ impl BlockChain {
/// Get current cache size. /// Get current cache size.
pub fn cache_size(&self) -> CacheSize { pub fn cache_size(&self) -> CacheSize {
let mut ops = new_malloc_size_ops();
CacheSize { CacheSize {
blocks: self.block_headers.read().heap_size_of_children() + self.block_bodies.read().heap_size_of_children(), blocks: self.block_headers.size_of(&mut ops) + self.block_bodies.size_of(&mut ops),
block_details: self.block_details.read().heap_size_of_children(), block_details: self.block_details.size_of(&mut ops),
transaction_addresses: self.transaction_addresses.read().heap_size_of_children(), transaction_addresses: self.transaction_addresses.size_of(&mut ops),
block_receipts: self.block_receipts.read().heap_size_of_children(), block_receipts: self.block_receipts.size_of(&mut ops),
} }
} }
@ -1528,12 +1529,13 @@ impl BlockChain {
transaction_addresses.shrink_to_fit(); transaction_addresses.shrink_to_fit();
block_receipts.shrink_to_fit(); block_receipts.shrink_to_fit();
block_headers.heap_size_of_children() + let mut ops = new_malloc_size_ops();
block_bodies.heap_size_of_children() + block_headers.size_of(&mut ops) +
block_details.heap_size_of_children() + block_bodies.size_of(&mut ops) +
block_hashes.heap_size_of_children() + block_details.size_of(&mut ops) +
transaction_addresses.heap_size_of_children() + block_hashes.size_of(&mut ops) +
block_receipts.heap_size_of_children() transaction_addresses.size_of(&mut ops) +
block_receipts.size_of(&mut ops)
}); });
} }

View File

@ -18,6 +18,9 @@
#![warn(missing_docs)] #![warn(missing_docs)]
extern crate parity_util_mem as util_mem;
extern crate parity_util_mem as malloc_size_of;
mod best_block; mod best_block;
mod block_info; mod block_info;
mod blockchain; mod blockchain;

View File

@ -10,8 +10,8 @@ edition = "2018"
[dependencies] [dependencies]
common-types = { path = "../types" } common-types = { path = "../types" }
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
heapsize = "0.4"
kvdb = "0.1" kvdb = "0.1"
parity-util-mem = "0.1"
parking_lot = "0.7" parking_lot = "0.7"
rlp = "0.4.0" rlp = "0.4.0"
rlp_derive = { path = "../../util/rlp-derive" } rlp_derive = { path = "../../util/rlp-derive" }

View File

@ -23,7 +23,7 @@ use common_types::BlockNumber;
use common_types::engines::epoch::Transition as EpochTransition; use common_types::engines::epoch::Transition as EpochTransition;
use common_types::receipt::Receipt; use common_types::receipt::Receipt;
use ethereum_types::{H256, H264, U256}; use ethereum_types::{H256, H264, U256};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use kvdb::PREFIX_LEN as DB_PREFIX_LEN; use kvdb::PREFIX_LEN as DB_PREFIX_LEN;
use rlp; use rlp;
use rlp_derive::{RlpEncodableWrapper, RlpDecodableWrapper, RlpEncodable, RlpDecodable}; use rlp_derive::{RlpEncodableWrapper, RlpDecodableWrapper, RlpEncodable, RlpDecodable};
@ -140,7 +140,7 @@ impl Key<EpochTransitions> for u64 {
} }
/// Familial details concerning a block /// Familial details concerning a block
#[derive(Debug, Clone)] #[derive(Debug, Clone, MallocSizeOf)]
pub struct BlockDetails { pub struct BlockDetails {
/// Block number /// Block number
pub number: BlockNumber, pub number: BlockNumber,
@ -195,14 +195,8 @@ impl rlp::Decodable for BlockDetails {
} }
} }
impl HeapSizeOf for BlockDetails {
fn heap_size_of_children(&self) -> usize {
self.children.heap_size_of_children()
}
}
/// Represents address of certain transaction within block /// Represents address of certain transaction within block
#[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable)] #[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable, MallocSizeOf)]
pub struct TransactionAddress { pub struct TransactionAddress {
/// Block hash /// Block hash
pub block_hash: H256, pub block_hash: H256,
@ -210,12 +204,8 @@ pub struct TransactionAddress {
pub index: usize pub index: usize
} }
impl HeapSizeOf for TransactionAddress {
fn heap_size_of_children(&self) -> usize { 0 }
}
/// Contains all block receipts. /// Contains all block receipts.
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)] #[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper, MallocSizeOf)]
pub struct BlockReceipts { pub struct BlockReceipts {
/// Block receipts /// Block receipts
pub receipts: Vec<Receipt>, pub receipts: Vec<Receipt>,
@ -230,12 +220,6 @@ impl BlockReceipts {
} }
} }
impl HeapSizeOf for BlockReceipts {
fn heap_size_of_children(&self) -> usize {
self.receipts.heap_size_of_children()
}
}
/// Candidate transitions to an epoch with specific number. /// Candidate transitions to an epoch with specific number.
#[derive(Clone, RlpEncodable, RlpDecodable)] #[derive(Clone, RlpEncodable, RlpDecodable)]
pub struct EpochTransitions { pub struct EpochTransitions {

View File

@ -18,6 +18,9 @@
#![warn(missing_docs)] #![warn(missing_docs)]
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
mod db; mod db;
pub mod keys; pub mod keys;

View File

@ -7,7 +7,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
bit-set = "0.4" bit-set = "0.4"
parity-bytes = "0.1" parity-bytes = "0.1"
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
heapsize = "0.4" parity-util-mem = "0.1"
lazy_static = "1.0" lazy_static = "1.0"
log = "0.4" log = "0.4"
vm = { path = "../vm" } vm = { path = "../vm" }

View File

@ -21,7 +21,7 @@ extern crate criterion;
extern crate bit_set; extern crate bit_set;
extern crate ethereum_types; extern crate ethereum_types;
extern crate parking_lot; extern crate parking_lot;
extern crate heapsize; extern crate parity_util_mem as mem;
extern crate vm; extern crate vm;
extern crate evm; extern crate evm;
extern crate keccak_hash as hash; extern crate keccak_hash as hash;

View File

@ -16,7 +16,7 @@
use std::sync::Arc; use std::sync::Arc;
use hash::KECCAK_EMPTY; use hash::KECCAK_EMPTY;
use heapsize::HeapSizeOf; use parity_util_mem::{MallocSizeOf, MallocSizeOfOps};
use ethereum_types::H256; use ethereum_types::H256;
use parking_lot::Mutex; use parking_lot::Mutex;
use memory_cache::MemoryLruCache; use memory_cache::MemoryLruCache;
@ -25,11 +25,12 @@ use super::super::instructions::{self, Instruction};
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024; const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
// stub for a HeapSizeOf implementation. /// Stub for a sharing `BitSet` data in cache (reference counted)
/// and implementing MallocSizeOf on it.
struct Bits(Arc<BitSet>); struct Bits(Arc<BitSet>);
impl HeapSizeOf for Bits { impl MallocSizeOf for Bits {
fn heap_size_of_children(&self) -> usize { fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// dealing in bits here // dealing in bits here
self.0.capacity() * 8 self.0.capacity() * 8
} }

View File

@ -19,7 +19,7 @@
extern crate bit_set; extern crate bit_set;
extern crate ethereum_types; extern crate ethereum_types;
extern crate parking_lot; extern crate parking_lot;
extern crate heapsize; extern crate parity_util_mem;
extern crate vm; extern crate vm;
extern crate keccak_hash as hash; extern crate keccak_hash as hash;
extern crate memory_cache; extern crate memory_cache;

View File

@ -15,14 +15,14 @@ ethcore = { path = ".."}
ethcore-db = { path = "../db" } ethcore-db = { path = "../db" }
ethcore-blockchain = { path = "../blockchain" } ethcore-blockchain = { path = "../blockchain" }
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
memory-db = "0.11.0" memory-db = "0.12.4"
trie-db = "0.11.0" trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" } patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
ethcore-network = { path = "../../util/network" } ethcore-network = { path = "../../util/network" }
ethcore-miner = { path = "../../miner" } ethcore-miner = { path = "../../miner" }
ethcore-io = { path = "../../util/io" } ethcore-io = { path = "../../util/io" }
hash-db = "0.11.0" hash-db = "0.12.4"
heapsize = "0.4" parity-util-mem = "0.1"
vm = { path = "../vm" } vm = { path = "../vm" }
fastmap = { path = "../../util/fastmap" } fastmap = { path = "../../util/fastmap" }
failsafe = { version = "0.3.0", default-features = false, features = ["parking_lot_mutex"] } failsafe = { version = "0.3.0", default-features = false, features = ["parking_lot_mutex"] }

View File

@ -21,12 +21,12 @@
//! vector of all gas prices from a recent range of blocks. //! vector of all gas prices from a recent range of blocks.
use std::time::{Instant, Duration}; use std::time::{Instant, Duration};
use parity_util_mem::{MallocSizeOf, MallocSizeOfOps, MallocSizeOfExt};
use common_types::encoded; use common_types::encoded;
use common_types::BlockNumber; use common_types::BlockNumber;
use common_types::receipt::Receipt; use common_types::receipt::Receipt;
use ethereum_types::{H256, U256}; use ethereum_types::{H256, U256};
use heapsize::HeapSizeOf;
use memory_cache::MemoryLruCache; use memory_cache::MemoryLruCache;
use stats::Corpus; use stats::Corpus;
@ -157,18 +157,20 @@ impl Cache {
/// Get the memory used. /// Get the memory used.
pub fn mem_used(&self) -> usize { pub fn mem_used(&self) -> usize {
self.heap_size_of_children() self.malloc_size_of()
} }
} }
impl HeapSizeOf for Cache {
fn heap_size_of_children(&self) -> usize { // This is fast method: it is possible to have a more exhaustive implementation
impl MallocSizeOf for Cache {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
self.headers.current_size() self.headers.current_size()
+ self.canon_hashes.current_size() + self.canon_hashes.current_size()
+ self.bodies.current_size() + self.bodies.current_size()
+ self.receipts.current_size() + self.receipts.current_size()
+ self.chain_score.current_size() + self.chain_score.current_size()
// TODO: + corpus // `self.corpus` is skipped
} }
} }

View File

@ -95,7 +95,8 @@ pub struct BlockInfo {
/// Build an in-memory CHT from a closure which provides necessary information /// Build an in-memory CHT from a closure which provides necessary information
/// about blocks. If the fetcher ever fails to provide the info, the CHT /// about blocks. If the fetcher ever fails to provide the info, the CHT
/// will not be generated. /// will not be generated.
pub fn build<F>(cht_num: u64, mut fetcher: F) -> Option<CHT<MemoryDB<KeccakHasher, DBValue>>> pub fn build<F>(cht_num: u64, mut fetcher: F)
-> Option<CHT<MemoryDB<KeccakHasher, memory_db::HashKey<KeccakHasher>, DBValue>>>
where F: FnMut(BlockId) -> Option<BlockInfo> where F: FnMut(BlockId) -> Option<BlockInfo>
{ {
let mut db = new_memory_db(); let mut db = new_memory_db();
@ -154,7 +155,7 @@ pub fn compute_root<I>(cht_num: u64, iterable: I) -> Option<H256>
pub fn check_proof(proof: &[Bytes], num: u64, root: H256) -> Option<(H256, U256)> { pub fn check_proof(proof: &[Bytes], num: u64, root: H256) -> Option<(H256, U256)> {
let mut db = new_memory_db(); let mut db = new_memory_db();
for node in proof { db.insert(&node[..]); } for node in proof { db.insert(hash_db::EMPTY_PREFIX, &node[..]); }
let res = match TrieDB::new(&db, &root) { let res = match TrieDB::new(&db, &root) {
Err(_) => return None, Err(_) => return None,
Ok(trie) => trie.get_with(&key!(num), |val: &[u8]| { Ok(trie) => trie.get_with(&key!(num), |val: &[u8]| {

View File

@ -38,7 +38,7 @@ use ethcore::engines::epoch::{Transition as EpochTransition, PendingTransition a
use ethcore::error::{Error, EthcoreResult, BlockError}; use ethcore::error::{Error, EthcoreResult, BlockError};
use ethcore::spec::{Spec, SpecHardcodedSync}; use ethcore::spec::{Spec, SpecHardcodedSync};
use ethereum_types::{H256, H264, U256}; use ethereum_types::{H256, H264, U256};
use heapsize::HeapSizeOf; use parity_util_mem::{MallocSizeOf, MallocSizeOfOps};
use kvdb::{DBTransaction, KeyValueDB}; use kvdb::{DBTransaction, KeyValueDB};
use parking_lot::{Mutex, RwLock}; use parking_lot::{Mutex, RwLock};
use fastmap::H256FastMap; use fastmap::H256FastMap;
@ -95,8 +95,8 @@ struct Entry {
canonical_hash: H256, canonical_hash: H256,
} }
impl HeapSizeOf for Entry { impl MallocSizeOf for Entry {
fn heap_size_of_children(&self) -> usize { fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
if self.candidates.spilled() { if self.candidates.spilled() {
self.candidates.capacity() * ::std::mem::size_of::<Candidate>() self.candidates.capacity() * ::std::mem::size_of::<Candidate>()
} else { } else {
@ -202,14 +202,21 @@ pub enum HardcodedSync {
Deny, Deny,
} }
#[derive(MallocSizeOf)]
/// Header chain. See module docs for more details. /// Header chain. See module docs for more details.
pub struct HeaderChain { pub struct HeaderChain {
#[ignore_malloc_size_of = "ignored for performance reason"]
genesis_header: encoded::Header, // special-case the genesis. genesis_header: encoded::Header, // special-case the genesis.
candidates: RwLock<BTreeMap<u64, Entry>>, candidates: RwLock<BTreeMap<u64, Entry>>,
#[ignore_malloc_size_of = "ignored for performance reason"]
best_block: RwLock<BlockDescriptor>, best_block: RwLock<BlockDescriptor>,
#[ignore_malloc_size_of = "ignored for performance reason"]
live_epoch_proofs: RwLock<H256FastMap<EpochTransition>>, live_epoch_proofs: RwLock<H256FastMap<EpochTransition>>,
#[ignore_malloc_size_of = "ignored for performance reason"]
db: Arc<KeyValueDB>, db: Arc<KeyValueDB>,
#[ignore_malloc_size_of = "ignored for performance reason"]
col: Option<u32>, col: Option<u32>,
#[ignore_malloc_size_of = "ignored for performance reason"]
cache: Arc<Mutex<Cache>>, cache: Arc<Mutex<Cache>>,
} }
@ -838,12 +845,6 @@ impl HeaderChain {
} }
} }
impl HeapSizeOf for HeaderChain {
fn heap_size_of_children(&self) -> usize {
self.candidates.read().heap_size_of_children()
}
}
/// Iterator over a block's ancestry. /// Iterator over a block's ancestry.
pub struct AncestryIter<'a> { pub struct AncestryIter<'a> {
next: Option<encoded::Header>, next: Option<encoded::Header>,

View File

@ -362,9 +362,9 @@ impl<T: ChainDataFetcher> Client<T> {
/// Get blockchain mem usage in bytes. /// Get blockchain mem usage in bytes.
pub fn chain_mem_used(&self) -> usize { pub fn chain_mem_used(&self) -> usize {
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOfExt;
self.chain.heap_size_of_children() self.chain.malloc_size_of()
} }
/// Set a closure to call when the client wants to be restarted. /// Set a closure to call when the client wants to be restarted.

View File

@ -64,7 +64,9 @@ extern crate ethereum_types;
extern crate ethcore_miner as miner; extern crate ethcore_miner as miner;
extern crate ethcore; extern crate ethcore;
extern crate hash_db; extern crate hash_db;
extern crate heapsize; extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
extern crate failsafe; extern crate failsafe;
extern crate futures; extern crate futures;
extern crate itertools; extern crate itertools;

View File

@ -981,7 +981,7 @@ impl Account {
let state_root = header.state_root(); let state_root = header.state_root();
let mut db = journaldb::new_memory_db(); let mut db = journaldb::new_memory_db();
for node in proof { db.insert(&node[..]); } for node in proof { db.insert(hash_db::EMPTY_PREFIX, &node[..]); }
match TrieDB::new(&db, &state_root).and_then(|t| t.get(keccak(&self.address).as_bytes()))? { match TrieDB::new(&db, &state_root).and_then(|t| t.get(keccak(&self.address).as_bytes()))? {
Some(val) => { Some(val) => {

View File

@ -20,13 +20,13 @@ ethjson = { path = "../../json" }
ethkey = { path = "../../accounts/ethkey" } ethkey = { path = "../../accounts/ethkey" }
fetch = { path = "../../util/fetch" } fetch = { path = "../../util/fetch" }
futures = "0.1" futures = "0.1"
heapsize = "0.4" parity-util-mem = "0.1"
keccak-hash = "0.2.0" keccak-hash = "0.2.0"
log = "0.4" log = "0.4"
parity-bytes = "0.1" parity-bytes = "0.1"
parity-crypto = "0.4.0" parity-crypto = "0.4.0"
parking_lot = "0.7" parking_lot = "0.7"
trie-db = "0.11.0" trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" } patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
rand = "0.3" rand = "0.3"
rlp = "0.4.0" rlp = "0.4.0"

View File

@ -34,7 +34,7 @@ extern crate ethjson;
extern crate ethkey; extern crate ethkey;
extern crate fetch; extern crate fetch;
extern crate futures; extern crate futures;
extern crate heapsize; extern crate parity_util_mem;
extern crate keccak_hash as hash; extern crate keccak_hash as hash;
extern crate parity_bytes as bytes; extern crate parity_bytes as bytes;
extern crate parity_crypto as crypto; extern crate parity_crypto as crypto;

View File

@ -21,7 +21,7 @@ use std::collections::{HashMap, HashSet};
use bytes::Bytes; use bytes::Bytes;
use ethcore_miner::pool; use ethcore_miner::pool;
use ethereum_types::{H256, U256, Address}; use ethereum_types::{H256, U256, Address};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOfExt;
use ethkey::Signature; use ethkey::Signature;
use messages::PrivateTransaction; use messages::PrivateTransaction;
use parking_lot::RwLock; use parking_lot::RwLock;
@ -59,7 +59,7 @@ impl txpool::VerifiedTransaction for VerifiedPrivateTransaction {
} }
fn mem_usage(&self) -> usize { fn mem_usage(&self) -> usize {
self.transaction.heap_size_of_children() self.transaction.malloc_size_of()
} }
fn sender(&self) -> &Address { fn sender(&self) -> &Address {

View File

@ -17,7 +17,7 @@
//! DB backend wrapper for Account trie //! DB backend wrapper for Account trie
use ethereum_types::H256; use ethereum_types::H256;
use hash::{KECCAK_NULL_RLP, keccak}; use hash::{KECCAK_NULL_RLP, keccak};
use hash_db::{HashDB, AsHashDB}; use hash_db::{HashDB, AsHashDB, Prefix};
use keccak_hasher::KeccakHasher; use keccak_hasher::KeccakHasher;
use kvdb::DBValue; use kvdb::DBValue;
use rlp::NULL_RLP; use rlp::NULL_RLP;
@ -103,29 +103,29 @@ impl<'db> AsHashDB<KeccakHasher, DBValue> for AccountDB<'db> {
} }
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDB<'db> { impl<'db> HashDB<KeccakHasher, DBValue> for AccountDB<'db> {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP)); return Some(DBValue::from_slice(&NULL_RLP));
} }
self.db.get(&combine_key(&self.address_hash, key)) self.db.get(&combine_key(&self.address_hash, key), prefix)
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return true; return true;
} }
self.db.contains(&combine_key(&self.address_hash, key)) self.db.contains(&combine_key(&self.address_hash, key), prefix)
} }
fn insert(&mut self, _value: &[u8]) -> H256 { fn insert(&mut self, _prefix: Prefix, _value: &[u8]) -> H256 {
unimplemented!() unimplemented!()
} }
fn emplace(&mut self, _key: H256, _value: DBValue) { fn emplace(&mut self, _key: H256, _prefix: Prefix, _value: DBValue) {
unimplemented!() unimplemented!()
} }
fn remove(&mut self, _key: &H256) { fn remove(&mut self, _key: &H256, _prefix: Prefix) {
unimplemented!() unimplemented!()
} }
} }
@ -158,44 +158,44 @@ impl<'db> AccountDBMut<'db> {
} }
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDBMut<'db>{ impl<'db> HashDB<KeccakHasher, DBValue> for AccountDBMut<'db>{
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP)); return Some(DBValue::from_slice(&NULL_RLP));
} }
self.db.get(&combine_key(&self.address_hash, key)) self.db.get(&combine_key(&self.address_hash, key), prefix)
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return true; return true;
} }
self.db.contains(&combine_key(&self.address_hash, key)) self.db.contains(&combine_key(&self.address_hash, key), prefix)
} }
fn insert(&mut self, value: &[u8]) -> H256 { fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
if value == &NULL_RLP { if value == &NULL_RLP {
return KECCAK_NULL_RLP.clone(); return KECCAK_NULL_RLP.clone();
} }
let k = keccak(value); let k = keccak(value);
let ak = combine_key(&self.address_hash, &k); let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, DBValue::from_slice(value)); self.db.emplace(ak, prefix, DBValue::from_slice(value));
k k
} }
fn emplace(&mut self, key: H256, value: DBValue) { fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
if key == KECCAK_NULL_RLP { if key == KECCAK_NULL_RLP {
return; return;
} }
let key = combine_key(&self.address_hash, &key); let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value) self.db.emplace(key, prefix, value)
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256, prefix: Prefix) {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return; return;
} }
let key = combine_key(&self.address_hash, key); let key = combine_key(&self.address_hash, key);
self.db.remove(&key) self.db.remove(&key, prefix)
} }
} }
@ -212,29 +212,29 @@ impl<'db> AsHashDB<KeccakHasher, DBValue> for Wrapping<'db> {
} }
impl<'db> HashDB<KeccakHasher, DBValue> for Wrapping<'db> { impl<'db> HashDB<KeccakHasher, DBValue> for Wrapping<'db> {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP)); return Some(DBValue::from_slice(&NULL_RLP));
} }
self.0.get(key) self.0.get(key, prefix)
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return true; return true;
} }
self.0.contains(key) self.0.contains(key, prefix)
} }
fn insert(&mut self, _value: &[u8]) -> H256 { fn insert(&mut self, _prefix: Prefix, _value: &[u8]) -> H256 {
unimplemented!() unimplemented!()
} }
fn emplace(&mut self, _key: H256, _value: DBValue) { fn emplace(&mut self, _key: H256, _prefix: Prefix, _value: DBValue) {
unimplemented!() unimplemented!()
} }
fn remove(&mut self, _key: &H256) { fn remove(&mut self, _key: &H256, _prefix: Prefix) {
unimplemented!() unimplemented!()
} }
} }
@ -246,38 +246,38 @@ impl<'db> AsHashDB<KeccakHasher, DBValue> for WrappingMut<'db> {
} }
impl<'db> HashDB<KeccakHasher, DBValue> for WrappingMut<'db>{ impl<'db> HashDB<KeccakHasher, DBValue> for WrappingMut<'db>{
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP)); return Some(DBValue::from_slice(&NULL_RLP));
} }
self.0.get(key) self.0.get(key, prefix)
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return true; return true;
} }
self.0.contains(key) self.0.contains(key, prefix)
} }
fn insert(&mut self, value: &[u8]) -> H256 { fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
if value == &NULL_RLP { if value == &NULL_RLP {
return KECCAK_NULL_RLP.clone(); return KECCAK_NULL_RLP.clone();
} }
self.0.insert(value) self.0.insert(prefix, value)
} }
fn emplace(&mut self, key: H256, value: DBValue) { fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
if key == KECCAK_NULL_RLP { if key == KECCAK_NULL_RLP {
return; return;
} }
self.0.emplace(key, value) self.0.emplace(key, prefix, value)
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256, prefix: Prefix) {
if key == &KECCAK_NULL_RLP { if key == &KECCAK_NULL_RLP {
return; return;
} }
self.0.remove(key) self.0.remove(key, prefix)
} }
} }

View File

@ -566,6 +566,7 @@ mod tests {
use types::header::Header; use types::header::Header;
use types::view; use types::view;
use types::views::BlockView; use types::views::BlockView;
use hash_db::EMPTY_PREFIX;
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header /// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
fn enact_bytes( fn enact_bytes(
@ -668,7 +669,8 @@ mod tests {
let db = e.drain().state.drop().1; let db = e.drain().state.drop().1;
assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys()); assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys());
assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0) != db.journal_db().get(k.0)).next() == None); assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0, EMPTY_PREFIX)
!= db.journal_db().get(k.0, EMPTY_PREFIX)).next() == None);
} }
#[test] #[test]
@ -702,6 +704,7 @@ mod tests {
let db = e.drain().state.drop().1; let db = e.drain().state.drop().1;
assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys()); assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys());
assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0) != db.journal_db().get(k.0)).next() == None); assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0, EMPTY_PREFIX)
!= db.journal_db().get(k.0, EMPTY_PREFIX)).next() == None);
} }
} }

View File

@ -43,7 +43,7 @@ use types::log_entry::LocalizedLogEntry;
use types::receipt::{Receipt, LocalizedReceipt}; use types::receipt::{Receipt, LocalizedReceipt};
use types::{BlockNumber, header::{Header, ExtendedHeader}}; use types::{BlockNumber, header::{Header, ExtendedHeader}};
use vm::{EnvInfo, LastHashes}; use vm::{EnvInfo, LastHashes};
use hash_db::EMPTY_PREFIX;
use block::{LockedBlock, Drain, ClosedBlock, OpenBlock, enact_verified, SealedBlock}; use block::{LockedBlock, Drain, ClosedBlock, OpenBlock, enact_verified, SealedBlock};
use client::ancient_import::AncientVerifier; use client::ancient_import::AncientVerifier;
use client::{ use client::{
@ -743,7 +743,7 @@ impl Client {
config.history config.history
}; };
if !chain.block_header_data(&chain.best_block_hash()).map_or(true, |h| state_db.journal_db().contains(&h.state_root())) { if !chain.block_header_data(&chain.best_block_hash()).map_or(true, |h| state_db.journal_db().contains(&h.state_root(), EMPTY_PREFIX)) {
warn!("State root not found for block #{} ({:x})", chain.best_block_number(), chain.best_block_hash()); warn!("State root not found for block #{} ({:x})", chain.best_block_number(), chain.best_block_hash());
} }

View File

@ -16,7 +16,7 @@
/// Preconfigured validator list. /// Preconfigured validator list.
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, Address}; use ethereum_types::{H256, Address};
use machine::{AuxiliaryData, Call, EthereumMachine}; use machine::{AuxiliaryData, Call, EthereumMachine};
@ -25,7 +25,7 @@ use types::header::Header;
use super::ValidatorSet; use super::ValidatorSet;
/// Validator set containing a known set of addresses. /// Validator set containing a known set of addresses.
#[derive(Clone, Debug, PartialEq, Eq, Default)] #[derive(Clone, Debug, PartialEq, Eq, Default, MallocSizeOf)]
pub struct SimpleList { pub struct SimpleList {
validators: Vec<Address>, validators: Vec<Address>,
} }
@ -58,12 +58,6 @@ impl From<Vec<Address>> for SimpleList {
} }
} }
impl HeapSizeOf for SimpleList {
fn heap_size_of_children(&self) -> usize {
self.validators.heap_size_of_children()
}
}
impl ValidatorSet for SimpleList { impl ValidatorSet for SimpleList {
fn default_caller(&self, _block_id: ::types::ids::BlockId) -> Box<Call> { fn default_caller(&self, _block_id: ::types::ids::BlockId) -> Box<Call> {
Box::new(|_, _| Err("Simple list doesn't require calls.".into())) Box::new(|_, _| Err("Simple list doesn't require calls.".into()))

View File

@ -19,10 +19,10 @@
use std::str::FromStr; use std::str::FromStr;
use std::sync::Arc; use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering}; use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
use parity_util_mem::MallocSizeOf;
use bytes::Bytes; use bytes::Bytes;
use ethereum_types::{H256, Address}; use ethereum_types::{H256, Address};
use heapsize::HeapSizeOf;
use types::BlockNumber; use types::BlockNumber;
use types::header::Header; use types::header::Header;
@ -30,9 +30,12 @@ use machine::{AuxiliaryData, Call, EthereumMachine};
use super::{ValidatorSet, SimpleList}; use super::{ValidatorSet, SimpleList};
/// Set used for testing with a single validator. /// Set used for testing with a single validator.
#[derive(MallocSizeOf)]
pub struct TestSet { pub struct TestSet {
validator: SimpleList, validator: SimpleList,
#[ignore_malloc_size_of = "zero sized"]
last_malicious: Arc<AtomicUsize>, last_malicious: Arc<AtomicUsize>,
#[ignore_malloc_size_of = "zero sized"]
last_benign: Arc<AtomicUsize>, last_benign: Arc<AtomicUsize>,
} }
@ -52,12 +55,6 @@ impl TestSet {
} }
} }
impl HeapSizeOf for TestSet {
fn heap_size_of_children(&self) -> usize {
self.validator.heap_size_of_children()
}
}
impl ValidatorSet for TestSet { impl ValidatorSet for TestSet {
fn default_caller(&self, _block_id: ::types::ids::BlockId) -> Box<Call> { fn default_caller(&self, _block_id: ::types::ids::BlockId) -> Box<Call> {
Box::new(|_, _| Err("Test set doesn't require calls.".into())) Box::new(|_, _| Err("Test set doesn't require calls.".into()))

View File

@ -72,7 +72,6 @@ extern crate ethjson;
extern crate ethkey; extern crate ethkey;
extern crate futures; extern crate futures;
extern crate hash_db; extern crate hash_db;
extern crate heapsize;
extern crate itertools; extern crate itertools;
extern crate journaldb; extern crate journaldb;
extern crate keccak_hash as hash; extern crate keccak_hash as hash;
@ -98,6 +97,9 @@ extern crate patricia_trie_ethereum as ethtrie;
extern crate rand; extern crate rand;
extern crate rayon; extern crate rayon;
extern crate rlp; extern crate rlp;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
extern crate rustc_hex; extern crate rustc_hex;
extern crate serde; extern crate serde;
extern crate stats; extern crate stats;

View File

@ -81,7 +81,7 @@ impl PodAccount {
/// Place additional data into given hash DB. /// Place additional data into given hash DB.
pub fn insert_additional(&self, db: &mut dyn HashDB<KeccakHasher, DBValue>, factory: &TrieFactory<KeccakHasher, RlpCodec>) { pub fn insert_additional(&self, db: &mut dyn HashDB<KeccakHasher, DBValue>, factory: &TrieFactory<KeccakHasher, RlpCodec>) {
match self.code { match self.code {
Some(ref c) if !c.is_empty() => { db.insert(c); } Some(ref c) if !c.is_empty() => { db.insert(hash_db::EMPTY_PREFIX, c); }
_ => {} _ => {}
} }
let mut r = H256::zero(); let mut r = H256::zero();

View File

@ -95,7 +95,7 @@ pub fn to_fat_rlps(
} else if used_code.contains(&acc.code_hash) { } else if used_code.contains(&acc.code_hash) {
account_stream.append(&CodeState::Hash.raw()).append(&acc.code_hash); account_stream.append(&CodeState::Hash.raw()).append(&acc.code_hash);
} else { } else {
match acct_db.get(&acc.code_hash) { match acct_db.get(&acc.code_hash, hash_db::EMPTY_PREFIX) {
Some(c) => { Some(c) => {
used_code.insert(acc.code_hash.clone()); used_code.insert(acc.code_hash.clone());
account_stream.append(&CodeState::Inline.raw()).append(&&*c); account_stream.append(&CodeState::Inline.raw()).append(&&*c);
@ -182,7 +182,7 @@ pub fn from_fat_rlp(
CodeState::Empty => (KECCAK_EMPTY, None), CodeState::Empty => (KECCAK_EMPTY, None),
CodeState::Inline => { CodeState::Inline => {
let code: Bytes = rlp.val_at(3)?; let code: Bytes = rlp.val_at(3)?;
let code_hash = acct_db.insert(&code); let code_hash = acct_db.insert(hash_db::EMPTY_PREFIX, &code);
(code_hash, Some(code)) (code_hash, Some(code))
} }
@ -228,7 +228,7 @@ mod tests {
use hash::{KECCAK_EMPTY, KECCAK_NULL_RLP, keccak}; use hash::{KECCAK_EMPTY, KECCAK_NULL_RLP, keccak};
use ethereum_types::{H256, Address}; use ethereum_types::{H256, Address};
use hash_db::HashDB; use hash_db::{HashDB, EMPTY_PREFIX};
use kvdb::DBValue; use kvdb::DBValue;
use rlp::Rlp; use rlp::Rlp;
@ -324,12 +324,12 @@ mod tests {
let code_hash = { let code_hash = {
let mut acct_db = AccountDBMut::new(db.as_hash_db_mut(), &addr1); let mut acct_db = AccountDBMut::new(db.as_hash_db_mut(), &addr1);
acct_db.insert(b"this is definitely code") acct_db.insert(EMPTY_PREFIX, b"this is definitely code")
}; };
{ {
let mut acct_db = AccountDBMut::new(db.as_hash_db_mut(), &addr2); let mut acct_db = AccountDBMut::new(db.as_hash_db_mut(), &addr2);
acct_db.emplace(code_hash.clone(), DBValue::from_slice(b"this is definitely code")); acct_db.emplace(code_hash.clone(), EMPTY_PREFIX, DBValue::from_slice(b"this is definitely code"));
} }
let account1 = BasicAccount { let account1 = BasicAccount {

View File

@ -448,7 +448,7 @@ impl StateRebuilder {
for (code_hash, code, first_with) in status.new_code { for (code_hash, code, first_with) in status.new_code {
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) { for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
let mut db = AccountDBMut::from_hash(self.db.as_hash_db_mut(), addr_hash); let mut db = AccountDBMut::from_hash(self.db.as_hash_db_mut(), addr_hash);
db.emplace(code_hash, DBValue::from_slice(&code)); db.emplace(code_hash, hash_db::EMPTY_PREFIX, DBValue::from_slice(&code));
} }
self.known_code.insert(code_hash, first_with); self.known_code.insert(code_hash, first_with);
@ -545,11 +545,11 @@ fn rebuild_accounts(
Some(&first_with) => { Some(&first_with) => {
// if so, load it from the database. // if so, load it from the database.
let code = AccountDB::from_hash(db, first_with) let code = AccountDB::from_hash(db, first_with)
.get(&code_hash) .get(&code_hash, hash_db::EMPTY_PREFIX)
.ok_or_else(|| Error::MissingCode(vec![first_with]))?; .ok_or_else(|| Error::MissingCode(vec![first_with]))?;
// and write it again under a different mangled key // and write it again under a different mangled key
AccountDBMut::from_hash(db, hash).emplace(code_hash, code); AccountDBMut::from_hash(db, hash).emplace(code_hash, hash_db::EMPTY_PREFIX, code);
} }
// if not, queue it up to be filled later // if not, queue it up to be filled later
None => status.missing_code.push((hash, code_hash)), None => status.missing_code.push((hash, code_hash)),

View File

@ -40,7 +40,7 @@ const RNG_SEED: [u8; 16] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16];
#[test] #[test]
fn snap_and_restore() { fn snap_and_restore() {
use hash_db::HashDB; use hash_db::{HashDB, EMPTY_PREFIX};
let mut producer = StateProducer::new(); let mut producer = StateProducer::new();
let mut rng = XorShiftRng::from_seed(RNG_SEED); let mut rng = XorShiftRng::from_seed(RNG_SEED);
let mut old_db = journaldb::new_memory_db(); let mut old_db = journaldb::new_memory_db();
@ -97,7 +97,7 @@ fn snap_and_restore() {
let keys = old_db.keys(); let keys = old_db.keys();
for key in keys.keys() { for key in keys.keys() {
assert_eq!(old_db.get(&key).unwrap(), new_db.as_hash_db().get(&key).unwrap()); assert_eq!(old_db.get(&key, EMPTY_PREFIX).unwrap(), new_db.as_hash_db().get(&key, EMPTY_PREFIX).unwrap());
} }
} }
@ -106,7 +106,7 @@ fn get_code_from_prev_chunk() {
use std::collections::HashSet; use std::collections::HashSet;
use rlp::RlpStream; use rlp::RlpStream;
use ethereum_types::{H256, U256}; use ethereum_types::{H256, U256};
use hash_db::HashDB; use hash_db::{HashDB, EMPTY_PREFIX};
use account_db::{AccountDBMut, AccountDB}; use account_db::{AccountDBMut, AccountDB};
@ -128,7 +128,7 @@ fn get_code_from_prev_chunk() {
let mut make_chunk = |acc, hash| { let mut make_chunk = |acc, hash| {
let mut db = journaldb::new_memory_db(); let mut db = journaldb::new_memory_db();
AccountDBMut::from_hash(&mut db, hash).insert(&code[..]); AccountDBMut::from_hash(&mut db, hash).insert(EMPTY_PREFIX, &code[..]);
let p = Progress::default(); let p = Progress::default();
let fat_rlp = account::to_fat_rlps(&hash, &acc, &AccountDB::from_hash(&db, hash), &mut used_code, usize::max_value(), usize::max_value(), &p).unwrap(); let fat_rlp = account::to_fat_rlps(&hash, &acc, &AccountDB::from_hash(&db, hash), &mut used_code, usize::max_value(), usize::max_value(), &p).unwrap();
let mut stream = RlpStream::new_list(1); let mut stream = RlpStream::new_list(1);

View File

@ -813,7 +813,7 @@ impl Spec {
/// Ensure that the given state DB has the trie nodes in for the genesis state. /// Ensure that the given state DB has the trie nodes in for the genesis state.
pub fn ensure_db_good<T: Backend>(&self, db: T, factories: &Factories) -> Result<T, Error> { pub fn ensure_db_good<T: Backend>(&self, db: T, factories: &Factories) -> Result<T, Error> {
if db.as_hash_db().contains(&self.state_root()) { if db.as_hash_db().contains(&self.state_root(), hash_db::EMPTY_PREFIX) {
return Ok(db); return Ok(db);
} }

View File

@ -364,7 +364,7 @@ impl Account {
if self.is_cached() { return Some(self.code_cache.clone()); } if self.is_cached() { return Some(self.code_cache.clone()); }
match db.get(&self.code_hash) { match db.get(&self.code_hash, hash_db::EMPTY_PREFIX) {
Some(x) => { Some(x) => {
self.code_size = Some(x.len()); self.code_size = Some(x.len());
self.code_cache = Arc::new(x.into_vec()); self.code_cache = Arc::new(x.into_vec());
@ -393,7 +393,7 @@ impl Account {
trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty()); trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size.is_some() || self.code_size.is_some() ||
if self.code_hash != KECCAK_EMPTY { if self.code_hash != KECCAK_EMPTY {
match db.get(&self.code_hash) { match db.get(&self.code_hash, hash_db::EMPTY_PREFIX) {
Some(x) => { Some(x) => {
self.code_size = Some(x.len()); self.code_size = Some(x.len());
true true
@ -507,7 +507,7 @@ impl Account {
self.code_filth = Filth::Clean; self.code_filth = Filth::Clean;
}, },
(true, false) => { (true, false) => {
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache)); db.emplace(self.code_hash.clone(), hash_db::EMPTY_PREFIX, DBValue::from_slice(&*self.code_cache));
self.code_size = Some(self.code_cache.len()); self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Clean; self.code_filth = Filth::Clean;
}, },

View File

@ -27,8 +27,8 @@ use std::sync::Arc;
use state::Account; use state::Account;
use parking_lot::Mutex; use parking_lot::Mutex;
use ethereum_types::{Address, H256}; use ethereum_types::{Address, H256};
use memory_db::MemoryDB; use memory_db::{MemoryDB, HashKey};
use hash_db::{AsHashDB, HashDB}; use hash_db::{AsHashDB, HashDB, Prefix, EMPTY_PREFIX};
use kvdb::DBValue; use kvdb::DBValue;
use keccak_hasher::KeccakHasher; use keccak_hasher::KeccakHasher;
use journaldb::AsKeyedHashDB; use journaldb::AsKeyedHashDB;
@ -78,13 +78,13 @@ pub trait Backend: Send {
// TODO: when account lookup moved into backends, this won't rely as tenuously on intended // TODO: when account lookup moved into backends, this won't rely as tenuously on intended
// usage. // usage.
#[derive(Clone, PartialEq)] #[derive(Clone, PartialEq)]
pub struct ProofCheck(MemoryDB<KeccakHasher, DBValue>); pub struct ProofCheck(MemoryDB<KeccakHasher, HashKey<KeccakHasher>, DBValue>);
impl ProofCheck { impl ProofCheck {
/// Create a new `ProofCheck` backend from the given state items. /// Create a new `ProofCheck` backend from the given state items.
pub fn new(proof: &[DBValue]) -> Self { pub fn new(proof: &[DBValue]) -> Self {
let mut db = journaldb::new_memory_db(); let mut db = journaldb::new_memory_db();
for item in proof { db.insert(item); } for item in proof { db.insert(EMPTY_PREFIX, item); }
ProofCheck(db) ProofCheck(db)
} }
} }
@ -94,23 +94,23 @@ impl journaldb::KeyedHashDB for ProofCheck {
} }
impl HashDB<KeccakHasher, DBValue> for ProofCheck { impl HashDB<KeccakHasher, DBValue> for ProofCheck {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
self.0.get(key) self.0.get(key, prefix)
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.0.contains(key) self.0.contains(key, prefix)
} }
fn insert(&mut self, value: &[u8]) -> H256 { fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.0.insert(value) self.0.insert(prefix, value)
} }
fn emplace(&mut self, key: H256, value: DBValue) { fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.0.emplace(key, value) self.0.emplace(key, prefix, value)
} }
fn remove(&mut self, _key: &H256) { } fn remove(&mut self, _key: &H256, _prefix: Prefix) { }
} }
impl AsHashDB<KeccakHasher, DBValue> for ProofCheck { impl AsHashDB<KeccakHasher, DBValue> for ProofCheck {
@ -141,7 +141,7 @@ impl Backend for ProofCheck {
/// This doesn't cache anything or rely on the canonical state caches. /// This doesn't cache anything or rely on the canonical state caches.
pub struct Proving<H> { pub struct Proving<H> {
base: H, // state we're proving values from. base: H, // state we're proving values from.
changed: MemoryDB<KeccakHasher, DBValue>, // changed state via insertions. changed: MemoryDB<KeccakHasher, HashKey<KeccakHasher>, DBValue>, // changed state via insertions.
proof: Mutex<HashSet<DBValue>>, proof: Mutex<HashSet<DBValue>>,
} }
@ -163,32 +163,32 @@ impl<H: AsKeyedHashDB + Send + Sync> journaldb::KeyedHashDB for Proving<H> {
} }
impl<H: AsHashDB<KeccakHasher, DBValue> + Send + Sync> HashDB<KeccakHasher, DBValue> for Proving<H> { impl<H: AsHashDB<KeccakHasher, DBValue> + Send + Sync> HashDB<KeccakHasher, DBValue> for Proving<H> {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
match self.base.as_hash_db().get(key) { match self.base.as_hash_db().get(key, prefix) {
Some(val) => { Some(val) => {
self.proof.lock().insert(val.clone()); self.proof.lock().insert(val.clone());
Some(val) Some(val)
} }
None => self.changed.get(key) None => self.changed.get(key, prefix)
} }
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key).is_some() self.get(key, prefix).is_some()
} }
fn insert(&mut self, value: &[u8]) -> H256 { fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.changed.insert(value) self.changed.insert(prefix, value)
} }
fn emplace(&mut self, key: H256, value: DBValue) { fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.changed.emplace(key, value) self.changed.emplace(key, prefix, value)
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256, prefix: Prefix) {
// only remove from `changed` // only remove from `changed`
if self.changed.contains(key) { if self.changed.contains(key, prefix) {
self.changed.remove(key) self.changed.remove(key, prefix)
} }
} }
} }

View File

@ -381,7 +381,7 @@ impl<B: Backend> State<B> {
/// Creates new state with existing state root /// Creates new state with existing state root
pub fn from_existing(db: B, root: H256, account_start_nonce: U256, factories: Factories) -> TrieResult<State<B>> { pub fn from_existing(db: B, root: H256, account_start_nonce: U256, factories: Factories) -> TrieResult<State<B>> {
if !db.as_hash_db().contains(&root) { if !db.as_hash_db().contains(&root, hash_db::EMPTY_PREFIX) {
return Err(Box::new(TrieError::InvalidStateRoot(root))); return Err(Box::new(TrieError::InvalidStateRoot(root)));
} }

View File

@ -17,12 +17,12 @@
//! Trace database. //! Trace database.
use std::collections::HashMap; use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
use parity_util_mem::MallocSizeOfExt;
use blockchain::BlockChainDB; use blockchain::BlockChainDB;
use db::cache_manager::CacheManager; use db::cache_manager::CacheManager;
use db::{self, Key, Writable, Readable, CacheUpdatePolicy}; use db::{self, Key, Writable, Readable, CacheUpdatePolicy};
use ethereum_types::{H256, H264}; use ethereum_types::{H256, H264};
use heapsize::HeapSizeOf;
use kvdb::{DBTransaction}; use kvdb::{DBTransaction};
use parking_lot::RwLock; use parking_lot::RwLock;
use types::BlockNumber; use types::BlockNumber;
@ -91,7 +91,7 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
} }
fn cache_size(&self) -> usize { fn cache_size(&self) -> usize {
self.traces.read().heap_size_of_children() self.traces.read().malloc_size_of()
} }
/// Let the cache system know that a cacheable item has been used. /// Let the cache system know that a cacheable item has been used.
@ -113,7 +113,7 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
} }
traces.shrink_to_fit(); traces.shrink_to_fit();
traces.heap_size_of_children() traces.malloc_size_of()
}); });
} }

View File

@ -17,17 +17,19 @@
//! Flat trace module //! Flat trace module
use rlp::{Rlp, RlpStream, Decodable, Encodable, DecoderError}; use rlp::{Rlp, RlpStream, Decodable, Encodable, DecoderError};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use ethereum_types::Bloom; use ethereum_types::Bloom;
use super::trace::{Action, Res}; use super::trace::{Action, Res};
/// Trace localized in vector of traces produced by a single transaction. /// Trace localized in vector of traces produced by a single transaction.
/// ///
/// Parent and children indexes refer to positions in this vector. /// Parent and children indexes refer to positions in this vector.
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone, MallocSizeOf)]
pub struct FlatTrace { pub struct FlatTrace {
#[ignore_malloc_size_of = "ignored for performance reason"]
/// Type of action performed by a transaction. /// Type of action performed by a transaction.
pub action: Action, pub action: Action,
#[ignore_malloc_size_of = "ignored for performance reason"]
/// Result of this action. /// Result of this action.
pub result: Res, pub result: Res,
/// Number of subtraces. /// Number of subtraces.
@ -45,12 +47,6 @@ impl FlatTrace {
} }
} }
impl HeapSizeOf for FlatTrace {
fn heap_size_of_children(&self) -> usize {
self.trace_address.heap_size_of_children()
}
}
impl Encodable for FlatTrace { impl Encodable for FlatTrace {
fn rlp_append(&self, s: &mut RlpStream) { fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4); s.begin_list(4);
@ -76,7 +72,7 @@ impl Decodable for FlatTrace {
} }
/// Represents all traces produced by a single transaction. /// Represents all traces produced by a single transaction.
#[derive(Debug, PartialEq, Clone, RlpEncodableWrapper, RlpDecodableWrapper)] #[derive(Debug, PartialEq, Clone, RlpEncodableWrapper, RlpDecodableWrapper, MallocSizeOf)]
pub struct FlatTransactionTraces(Vec<FlatTrace>); pub struct FlatTransactionTraces(Vec<FlatTrace>);
impl From<Vec<FlatTrace>> for FlatTransactionTraces { impl From<Vec<FlatTrace>> for FlatTransactionTraces {
@ -85,12 +81,6 @@ impl From<Vec<FlatTrace>> for FlatTransactionTraces {
} }
} }
impl HeapSizeOf for FlatTransactionTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl FlatTransactionTraces { impl FlatTransactionTraces {
/// Returns bloom of all traces in the collection. /// Returns bloom of all traces in the collection.
pub fn bloom(&self) -> Bloom { pub fn bloom(&self) -> Bloom {
@ -105,15 +95,9 @@ impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
} }
/// Represents all traces produced by transactions in a single block. /// Represents all traces produced by transactions in a single block.
#[derive(Debug, PartialEq, Clone, Default, RlpEncodableWrapper, RlpDecodableWrapper)] #[derive(Debug, PartialEq, Clone, Default, RlpEncodableWrapper, RlpDecodableWrapper, MallocSizeOf)]
pub struct FlatBlockTraces(Vec<FlatTransactionTraces>); pub struct FlatBlockTraces(Vec<FlatTransactionTraces>);
impl HeapSizeOf for FlatBlockTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl From<Vec<FlatTransactionTraces>> for FlatBlockTraces { impl From<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn from(v: Vec<FlatTransactionTraces>) -> Self { fn from(v: Vec<FlatTransactionTraces>) -> Self {
FlatBlockTraces(v) FlatBlockTraces(v)

View File

@ -19,7 +19,7 @@
use engines::EthEngine; use engines::EthEngine;
use error::Error; use error::Error;
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, U256}; use ethereum_types::{H256, U256};
pub use self::blocks::Blocks; pub use self::blocks::Blocks;
@ -49,13 +49,13 @@ pub trait BlockLike {
/// consistent. /// consistent.
pub trait Kind: 'static + Sized + Send + Sync { pub trait Kind: 'static + Sized + Send + Sync {
/// The first stage: completely unverified. /// The first stage: completely unverified.
type Input: Sized + Send + BlockLike + HeapSizeOf; type Input: Sized + Send + BlockLike + MallocSizeOf;
/// The second stage: partially verified. /// The second stage: partially verified.
type Unverified: Sized + Send + BlockLike + HeapSizeOf; type Unverified: Sized + Send + BlockLike + MallocSizeOf;
/// The third stage: completely verified. /// The third stage: completely verified.
type Verified: Sized + Send + BlockLike + HeapSizeOf; type Verified: Sized + Send + BlockLike + MallocSizeOf;
/// Attempt to create the `Unverified` item from the input. /// Attempt to create the `Unverified` item from the input.
fn create(input: Self::Input, engine: &dyn EthEngine, check_seal: bool) -> Result<Self::Unverified, (Self::Input, Error)>; fn create(input: Self::Input, engine: &dyn EthEngine, check_seal: bool) -> Result<Self::Unverified, (Self::Input, Error)>;
@ -74,7 +74,7 @@ pub mod blocks {
use verification::{PreverifiedBlock, verify_block_basic, verify_block_unordered}; use verification::{PreverifiedBlock, verify_block_basic, verify_block_unordered};
use types::transaction::UnverifiedTransaction; use types::transaction::UnverifiedTransaction;
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, U256}; use ethereum_types::{H256, U256};
use bytes::Bytes; use bytes::Bytes;
@ -113,7 +113,7 @@ pub mod blocks {
} }
/// An unverified block. /// An unverified block.
#[derive(PartialEq, Debug)] #[derive(PartialEq, Debug, MallocSizeOf)]
pub struct Unverified { pub struct Unverified {
/// Unverified block header. /// Unverified block header.
pub header: Header, pub header: Header,
@ -146,15 +146,6 @@ pub mod blocks {
} }
} }
impl HeapSizeOf for Unverified {
fn heap_size_of_children(&self) -> usize {
self.header.heap_size_of_children()
+ self.transactions.heap_size_of_children()
+ self.uncles.heap_size_of_children()
+ self.bytes.heap_size_of_children()
}
}
impl BlockLike for Unverified { impl BlockLike for Unverified {
fn hash(&self) -> H256 { fn hash(&self) -> H256 {
self.header.hash() self.header.hash()

View File

@ -22,7 +22,7 @@ use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering as AtomicOrdering};
use std::sync::Arc; use std::sync::Arc;
use std::cmp; use std::cmp;
use std::collections::{VecDeque, HashSet, HashMap}; use std::collections::{VecDeque, HashSet, HashMap};
use heapsize::HeapSizeOf; use parity_util_mem::{MallocSizeOf, MallocSizeOfExt};
use ethereum_types::{H256, U256}; use ethereum_types::{H256, U256};
use parking_lot::{Condvar, Mutex, RwLock}; use parking_lot::{Condvar, Mutex, RwLock};
use io::*; use io::*;
@ -96,17 +96,12 @@ enum State {
} }
/// An item which is in the process of being verified. /// An item which is in the process of being verified.
#[derive(MallocSizeOf)]
pub struct Verifying<K: Kind> { pub struct Verifying<K: Kind> {
hash: H256, hash: H256,
output: Option<K::Verified>, output: Option<K::Verified>,
} }
impl<K: Kind> HeapSizeOf for Verifying<K> {
fn heap_size_of_children(&self) -> usize {
self.output.heap_size_of_children()
}
}
/// Status of items in the queue. /// Status of items in the queue.
pub enum Status { pub enum Status {
/// Currently queued. /// Currently queued.
@ -353,7 +348,7 @@ impl<K: Kind> VerificationQueue<K> {
None => continue, None => continue,
}; };
verification.sizes.unverified.fetch_sub(item.heap_size_of_children(), AtomicOrdering::SeqCst); verification.sizes.unverified.fetch_sub(item.malloc_size_of(), AtomicOrdering::SeqCst);
verifying.push_back(Verifying { hash: item.hash(), output: None }); verifying.push_back(Verifying { hash: item.hash(), output: None });
item item
}; };
@ -367,7 +362,7 @@ impl<K: Kind> VerificationQueue<K> {
if e.hash == hash { if e.hash == hash {
idx = Some(i); idx = Some(i);
verification.sizes.verifying.fetch_add(verified.heap_size_of_children(), AtomicOrdering::SeqCst); verification.sizes.verifying.fetch_add(verified.malloc_size_of(), AtomicOrdering::SeqCst);
e.output = Some(verified); e.output = Some(verified);
break; break;
} }
@ -417,7 +412,7 @@ impl<K: Kind> VerificationQueue<K> {
while let Some(output) = verifying.front_mut().and_then(|x| x.output.take()) { while let Some(output) = verifying.front_mut().and_then(|x| x.output.take()) {
assert!(verifying.pop_front().is_some()); assert!(verifying.pop_front().is_some());
let size = output.heap_size_of_children(); let size = output.malloc_size_of();
removed_size += size; removed_size += size;
if bad.contains(&output.parent_hash()) { if bad.contains(&output.parent_hash()) {
@ -490,7 +485,7 @@ impl<K: Kind> VerificationQueue<K> {
match K::create(input, &*self.engine, self.verification.check_seal) { match K::create(input, &*self.engine, self.verification.check_seal) {
Ok(item) => { Ok(item) => {
self.verification.sizes.unverified.fetch_add(item.heap_size_of_children(), AtomicOrdering::SeqCst); self.verification.sizes.unverified.fetch_add(item.malloc_size_of(), AtomicOrdering::SeqCst);
self.processing.write().insert(hash, item.difficulty()); self.processing.write().insert(hash, item.difficulty());
{ {
@ -537,7 +532,7 @@ impl<K: Kind> VerificationQueue<K> {
let mut removed_size = 0; let mut removed_size = 0;
for output in verified.drain(..) { for output in verified.drain(..) {
if bad.contains(&output.parent_hash()) { if bad.contains(&output.parent_hash()) {
removed_size += output.heap_size_of_children(); removed_size += output.malloc_size_of();
bad.insert(output.hash()); bad.insert(output.hash());
if let Some(difficulty) = processing.remove(&output.hash()) { if let Some(difficulty) = processing.remove(&output.hash()) {
let mut td = self.total_difficulty.write(); let mut td = self.total_difficulty.write();
@ -574,7 +569,7 @@ impl<K: Kind> VerificationQueue<K> {
let count = cmp::min(max, verified.len()); let count = cmp::min(max, verified.len());
let result = verified.drain(..count).collect::<Vec<_>>(); let result = verified.drain(..count).collect::<Vec<_>>();
let drained_size = result.iter().map(HeapSizeOf::heap_size_of_children).fold(0, |a, c| a + c); let drained_size = result.iter().map(MallocSizeOfExt::malloc_size_of).fold(0, |a, c| a + c);
self.verification.sizes.verified.fetch_sub(drained_size, AtomicOrdering::SeqCst); self.verification.sizes.verified.fetch_sub(drained_size, AtomicOrdering::SeqCst);
self.ready_signal.reset(); self.ready_signal.reset();

View File

@ -26,7 +26,7 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
use bytes::Bytes; use bytes::Bytes;
use hash::keccak; use hash::keccak;
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use rlp::Rlp; use rlp::Rlp;
use triehash::ordered_trie_root; use triehash::ordered_trie_root;
use unexpected::{Mismatch, OutOfBounds}; use unexpected::{Mismatch, OutOfBounds};
@ -44,6 +44,7 @@ use verification::queue::kind::blocks::Unverified;
use time_utils::CheckedSystemTime; use time_utils::CheckedSystemTime;
/// Preprocessed block data gathered in `verify_block_unordered` call /// Preprocessed block data gathered in `verify_block_unordered` call
#[derive(MallocSizeOf)]
pub struct PreverifiedBlock { pub struct PreverifiedBlock {
/// Populated block header /// Populated block header
pub header: Header, pub header: Header,
@ -55,14 +56,6 @@ pub struct PreverifiedBlock {
pub bytes: Bytes, pub bytes: Bytes,
} }
impl HeapSizeOf for PreverifiedBlock {
fn heap_size_of_children(&self) -> usize {
self.header.heap_size_of_children()
+ self.transactions.heap_size_of_children()
+ self.bytes.heap_size_of_children()
}
}
/// Phase 1 quick block verification. Only does checks that are cheap. Operates on a single block /// Phase 1 quick block verification. Only does checks that are cheap. Operates on a single block
pub fn verify_block_basic(block: &Unverified, engine: &dyn EthEngine, check_seal: bool) -> Result<(), Error> { pub fn verify_block_basic(block: &Unverified, engine: &dyn EthEngine, check_seal: bool) -> Result<(), Error> {
verify_header_params(&block.header, engine, true, check_seal)?; verify_header_params(&block.header, engine, true, check_seal)?;

View File

@ -19,8 +19,7 @@ ethereum-types = "0.6.0"
ethkey = { path = "../../accounts/ethkey" } ethkey = { path = "../../accounts/ethkey" }
ethstore = { path = "../../accounts/ethstore" } ethstore = { path = "../../accounts/ethstore" }
fastmap = { path = "../../util/fastmap" } fastmap = { path = "../../util/fastmap" }
hash-db = "0.11.0" hash-db = "0.12.4"
heapsize = "0.4"
keccak-hash = "0.2.0" keccak-hash = "0.2.0"
keccak-hasher = { path = "../../util/keccak-hasher" } keccak-hasher = { path = "../../util/keccak-hasher" }
kvdb = "0.1" kvdb = "0.1"
@ -28,6 +27,7 @@ log = "0.4"
macros = { path = "../../util/macros" } macros = { path = "../../util/macros" }
parity-bytes = "0.1" parity-bytes = "0.1"
parking_lot = "0.7" parking_lot = "0.7"
parity-util-mem = "0.1"
rand = "0.6" rand = "0.6"
rlp = "0.4.0" rlp = "0.4.0"
trace-time = "0.1" trace-time = "0.1"

View File

@ -65,7 +65,7 @@ pub const ETH_PROTOCOL: ProtocolId = *b"eth";
pub const LIGHT_PROTOCOL: ProtocolId = *b"pip"; pub const LIGHT_PROTOCOL: ProtocolId = *b"pip";
/// Determine warp sync status. /// Determine warp sync status.
#[derive(Debug, Clone, Copy, PartialEq, Eq)] #[derive(Debug, Clone, Copy, PartialEq, Eq, MallocSizeOf)]
pub enum WarpSync { pub enum WarpSync {
/// Warp sync is enabled. /// Warp sync is enabled.
Enabled, Enabled,

View File

@ -20,7 +20,7 @@
use std::collections::{HashSet, VecDeque}; use std::collections::{HashSet, VecDeque};
use std::cmp; use std::cmp;
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use ethereum_types::H256; use ethereum_types::H256;
use rlp::{self, Rlp}; use rlp::{self, Rlp};
use types::BlockNumber; use types::BlockNumber;
@ -60,7 +60,7 @@ macro_rules! debug_sync {
}; };
} }
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug, MallocSizeOf)]
/// Downloader state /// Downloader state
pub enum State { pub enum State {
/// No active downloads. /// No active downloads.
@ -113,6 +113,7 @@ impl From<rlp::DecoderError> for BlockDownloaderImportError {
/// Block downloader strategy. /// Block downloader strategy.
/// Manages state and block data for a block download process. /// Manages state and block data for a block download process.
#[derive(MallocSizeOf)]
pub struct BlockDownloader { pub struct BlockDownloader {
/// Which set of blocks to download /// Which set of blocks to download
block_set: BlockSet, block_set: BlockSet,
@ -223,11 +224,6 @@ impl BlockDownloader {
self.state = State::Blocks; self.state = State::Blocks;
} }
/// Returns used heap memory size.
pub fn heap_size(&self) -> usize {
self.blocks.heap_size() + self.round_parents.heap_size_of_children()
}
/// Returns best imported block number. /// Returns best imported block number.
pub fn last_imported_block_number(&self) -> BlockNumber { pub fn last_imported_block_number(&self) -> BlockNumber {
self.last_imported_block self.last_imported_block

View File

@ -16,7 +16,7 @@
use std::collections::{HashSet, HashMap, hash_map}; use std::collections::{HashSet, HashMap, hash_map};
use hash::{keccak, KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP}; use hash::{keccak, KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use ethereum_types::H256; use ethereum_types::H256;
use triehash_ethereum::ordered_trie_root; use triehash_ethereum::ordered_trie_root;
use bytes::Bytes; use bytes::Bytes;
@ -26,21 +26,15 @@ use ethcore::verification::queue::kind::blocks::Unverified;
use types::transaction::UnverifiedTransaction; use types::transaction::UnverifiedTransaction;
use types::header::Header as BlockHeader; use types::header::Header as BlockHeader;
known_heap_size!(0, HeaderId); malloc_size_of_is_0!(HeaderId);
#[derive(PartialEq, Debug, Clone)] #[derive(PartialEq, Debug, Clone)]
#[derive(MallocSizeOf)]
pub struct SyncHeader { pub struct SyncHeader {
pub bytes: Bytes, pub bytes: Bytes,
pub header: BlockHeader, pub header: BlockHeader,
} }
impl HeapSizeOf for SyncHeader {
fn heap_size_of_children(&self) -> usize {
self.bytes.heap_size_of_children()
+ self.header.heap_size_of_children()
}
}
impl SyncHeader { impl SyncHeader {
pub fn from_rlp(bytes: Bytes) -> Result<Self, DecoderError> { pub fn from_rlp(bytes: Bytes) -> Result<Self, DecoderError> {
let result = SyncHeader { let result = SyncHeader {
@ -52,6 +46,7 @@ impl SyncHeader {
} }
} }
#[derive(MallocSizeOf)]
pub struct SyncBody { pub struct SyncBody {
pub transactions_bytes: Bytes, pub transactions_bytes: Bytes,
pub transactions: Vec<UnverifiedTransaction>, pub transactions: Vec<UnverifiedTransaction>,
@ -85,16 +80,8 @@ impl SyncBody {
} }
} }
impl HeapSizeOf for SyncBody {
fn heap_size_of_children(&self) -> usize {
self.transactions_bytes.heap_size_of_children()
+ self.transactions.heap_size_of_children()
+ self.uncles_bytes.heap_size_of_children()
+ self.uncles.heap_size_of_children()
}
}
/// Block data with optional body. /// Block data with optional body.
#[derive(MallocSizeOf)]
struct SyncBlock { struct SyncBlock {
header: SyncHeader, header: SyncHeader,
body: Option<SyncBody>, body: Option<SyncBody>,
@ -102,12 +89,6 @@ struct SyncBlock {
receipts_root: H256, receipts_root: H256,
} }
impl HeapSizeOf for SyncBlock {
fn heap_size_of_children(&self) -> usize {
self.header.heap_size_of_children() + self.body.heap_size_of_children()
}
}
fn unverified_from_sync(header: SyncHeader, body: Option<SyncBody>) -> Unverified { fn unverified_from_sync(header: SyncHeader, body: Option<SyncBody>) -> Unverified {
let mut stream = RlpStream::new_list(3); let mut stream = RlpStream::new_list(3);
stream.append_raw(&header.bytes, 1); stream.append_raw(&header.bytes, 1);
@ -141,7 +122,7 @@ struct HeaderId {
/// A collection of blocks and subchain pointers being downloaded. This keeps track of /// A collection of blocks and subchain pointers being downloaded. This keeps track of
/// which headers/bodies need to be downloaded, which are being downloaded and also holds /// which headers/bodies need to be downloaded, which are being downloaded and also holds
/// the downloaded blocks. /// the downloaded blocks.
#[derive(Default)] #[derive(Default, MallocSizeOf)]
pub struct BlockCollection { pub struct BlockCollection {
/// Does this collection need block receipts. /// Does this collection need block receipts.
need_receipts: bool, need_receipts: bool,
@ -399,16 +380,6 @@ impl BlockCollection {
self.heads.len() self.heads.len()
} }
/// Return used heap size.
pub fn heap_size(&self) -> usize {
self.heads.heap_size_of_children()
+ self.blocks.heap_size_of_children()
+ self.parents.heap_size_of_children()
+ self.header_ids.heap_size_of_children()
+ self.downloading_headers.heap_size_of_children()
+ self.downloading_bodies.heap_size_of_children()
}
/// Check if given block hash is marked as being downloaded. /// Check if given block hash is marked as being downloaded.
pub fn is_downloading(&self, hash: &H256) -> bool { pub fn is_downloading(&self, hash: &H256) -> bool {
self.downloading_headers.contains(hash) || self.downloading_bodies.contains(hash) self.downloading_headers.contains(hash) || self.downloading_bodies.contains(hash)

View File

@ -98,7 +98,7 @@ use std::collections::{HashSet, HashMap, BTreeMap};
use std::cmp; use std::cmp;
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use hash::keccak; use hash::keccak;
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOfExt;
use futures::sync::mpsc as futures_mpsc; use futures::sync::mpsc as futures_mpsc;
use api::Notification; use api::Notification;
use ethereum_types::{H256, U256}; use ethereum_types::{H256, U256};
@ -132,7 +132,7 @@ use self::propagator::SyncPropagator;
use self::requester::SyncRequester; use self::requester::SyncRequester;
pub(crate) use self::supplier::SyncSupplier; pub(crate) use self::supplier::SyncSupplier;
known_heap_size!(0, PeerInfo); malloc_size_of_is_0!(PeerInfo);
pub type PacketDecodeError = DecoderError; pub type PacketDecodeError = DecoderError;
@ -179,7 +179,7 @@ const SNAPSHOT_DATA_TIMEOUT: Duration = Duration::from_secs(120);
/// (so we might sent only to some part of the peers we originally intended to send to) /// (so we might sent only to some part of the peers we originally intended to send to)
const PRIORITY_TASK_DEADLINE: Duration = Duration::from_millis(100); const PRIORITY_TASK_DEADLINE: Duration = Duration::from_millis(100);
#[derive(Copy, Clone, Eq, PartialEq, Debug)] #[derive(Copy, Clone, Eq, PartialEq, Debug, MallocSizeOf)]
/// Sync state /// Sync state
pub enum SyncState { pub enum SyncState {
/// Collecting enough peers to start syncing. /// Collecting enough peers to start syncing.
@ -273,7 +273,7 @@ pub enum PeerAsking {
SnapshotData, SnapshotData,
} }
#[derive(PartialEq, Eq, Debug, Clone, Copy)] #[derive(PartialEq, Eq, Debug, Clone, Copy, MallocSizeOf)]
/// Block downloader channel. /// Block downloader channel.
pub enum BlockSet { pub enum BlockSet {
/// New blocks better than out best blocks /// New blocks better than out best blocks
@ -585,6 +585,7 @@ enum PeerState {
/// Blockchain sync handler. /// Blockchain sync handler.
/// See module documentation for more details. /// See module documentation for more details.
#[derive(MallocSizeOf)]
pub struct ChainSync { pub struct ChainSync {
/// Sync state /// Sync state
state: SyncState, state: SyncState,
@ -618,10 +619,12 @@ pub struct ChainSync {
/// Enable ancient block downloading /// Enable ancient block downloading
download_old_blocks: bool, download_old_blocks: bool,
/// Shared private tx service. /// Shared private tx service.
#[ignore_malloc_size_of = "arc on dyn trait here seems tricky, ignoring"]
private_tx_handler: Option<Arc<PrivateTxHandler>>, private_tx_handler: Option<Arc<PrivateTxHandler>>,
/// Enable warp sync. /// Enable warp sync.
warp_sync: WarpSync, warp_sync: WarpSync,
#[ignore_malloc_size_of = "mpsc unmettered, ignoring"]
status_sinks: Vec<futures_mpsc::UnboundedSender<SyncState>> status_sinks: Vec<futures_mpsc::UnboundedSender<SyncState>>
} }
@ -677,10 +680,7 @@ impl ChainSync {
num_active_peers: self.peers.values().filter(|p| p.is_allowed() && p.asking != PeerAsking::Nothing).count(), num_active_peers: self.peers.values().filter(|p| p.is_allowed() && p.asking != PeerAsking::Nothing).count(),
num_snapshot_chunks: self.snapshot.total_chunks(), num_snapshot_chunks: self.snapshot.total_chunks(),
snapshot_chunks_done: self.snapshot.done_chunks(), snapshot_chunks_done: self.snapshot.done_chunks(),
mem_used: mem_used: self.malloc_size_of(),
self.new_blocks.heap_size()
+ self.old_blocks.as_ref().map_or(0, |d| d.heap_size())
+ self.peers.heap_size_of_children(),
} }
} }

View File

@ -53,8 +53,10 @@ extern crate enum_primitive;
extern crate macros; extern crate macros;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
#[macro_use] #[macro_use]
extern crate heapsize; extern crate parity_util_mem as malloc_size_of;
#[macro_use] #[macro_use]
extern crate trace_time; extern crate trace_time;

View File

@ -27,6 +27,7 @@ pub enum ChunkType {
Block(H256), Block(H256),
} }
#[derive(MallocSizeOf)]
pub struct Snapshot { pub struct Snapshot {
pending_state_chunks: Vec<H256>, pending_state_chunks: Vec<H256>,
pending_block_chunks: Vec<H256>, pending_block_chunks: Vec<H256>,

View File

@ -23,7 +23,7 @@ use types::BlockNumber;
type NodeId = H512; type NodeId = H512;
#[derive(Debug, PartialEq, Clone)] #[derive(Debug, PartialEq, Clone, MallocSizeOf)]
pub struct Stats { pub struct Stats {
first_seen: BlockNumber, first_seen: BlockNumber,
propagated_to: HashMap<NodeId, usize>, propagated_to: HashMap<NodeId, usize>,
@ -50,7 +50,7 @@ impl<'a> From<&'a Stats> for TransactionStats {
} }
} }
#[derive(Debug, Default)] #[derive(Debug, Default, MallocSizeOf)]
pub struct TransactionsStats { pub struct TransactionsStats {
pending_transactions: H256FastMap<Stats>, pending_transactions: H256FastMap<Stats>,
} }

View File

@ -7,8 +7,8 @@ authors = ["Parity Technologies <admin@parity.io>"]
[dependencies] [dependencies]
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
ethjson = { path = "../../json" } ethjson = { path = "../../json" }
parity-util-mem = "0.1"
ethkey = { path = "../../accounts/ethkey" } ethkey = { path = "../../accounts/ethkey" }
heapsize = "0.4"
keccak-hash = "0.2.0" keccak-hash = "0.2.0"
parity-bytes = "0.1" parity-bytes = "0.1"
rlp = "0.4.0" rlp = "0.4.0"

View File

@ -27,20 +27,16 @@ use block::Block as FullBlock;
use ethereum_types::{H256, Bloom, U256, Address}; use ethereum_types::{H256, Bloom, U256, Address};
use hash::keccak; use hash::keccak;
use header::{Header as FullHeader}; use header::{Header as FullHeader};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use rlp::{self, Rlp, RlpStream}; use rlp::{self, Rlp, RlpStream};
use transaction::UnverifiedTransaction; use transaction::UnverifiedTransaction;
use views::{self, BlockView, HeaderView, BodyView}; use views::{self, BlockView, HeaderView, BodyView};
use BlockNumber; use BlockNumber;
/// Owning header view. /// Owning header view.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Header(Vec<u8>); pub struct Header(Vec<u8>);
impl HeapSizeOf for Header {
fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() }
}
impl Header { impl Header {
/// Create a new owning header view. /// Create a new owning header view.
/// Expects the data to be an RLP-encoded header -- any other case will likely lead to /// Expects the data to be an RLP-encoded header -- any other case will likely lead to
@ -113,13 +109,9 @@ impl Header {
} }
/// Owning block body view. /// Owning block body view.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Body(Vec<u8>); pub struct Body(Vec<u8>);
impl HeapSizeOf for Body {
fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() }
}
impl Body { impl Body {
/// Create a new owning block body view. The raw bytes passed in must be an rlp-encoded block /// Create a new owning block body view. The raw bytes passed in must be an rlp-encoded block
/// body. /// body.
@ -178,13 +170,9 @@ impl Body {
} }
/// Owning block view. /// Owning block view.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Block(Vec<u8>); pub struct Block(Vec<u8>);
impl HeapSizeOf for Block {
fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() }
}
impl Block { impl Block {
/// Create a new owning block view. The raw bytes passed in must be an rlp-encoded block. /// Create a new owning block view. The raw bytes passed in must be an rlp-encoded block.
pub fn new(raw: Vec<u8>) -> Self { Block(raw) } pub fn new(raw: Vec<u8>) -> Self { Block(raw) }

View File

@ -17,7 +17,7 @@
//! Block header. //! Block header.
use hash::{KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP, keccak}; use hash::{KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP, keccak};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, U256, Address, Bloom}; use ethereum_types::{H256, U256, Address, Bloom};
use bytes::Bytes; use bytes::Bytes;
use rlp::{Rlp, RlpStream, Encodable, DecoderError, Decodable}; use rlp::{Rlp, RlpStream, Encodable, DecoderError, Decodable};
@ -49,7 +49,7 @@ pub struct ExtendedHeader {
/// which is non-specific. /// which is non-specific.
/// ///
/// Doesn't do all that much on its own. /// Doesn't do all that much on its own.
#[derive(Debug, Clone, Eq)] #[derive(Debug, Clone, Eq, MallocSizeOf)]
pub struct Header { pub struct Header {
/// Parent hash. /// Parent hash.
parent_hash: H256, parent_hash: H256,
@ -361,12 +361,6 @@ impl Encodable for Header {
} }
} }
impl HeapSizeOf for Header {
fn heap_size_of_children(&self) -> usize {
self.extra_data.heap_size_of_children() + self.seal.heap_size_of_children()
}
}
impl ExtendedHeader { impl ExtendedHeader {
/// Returns combined difficulty of all ancestors together with the difficulty of this header. /// Returns combined difficulty of all ancestors together with the difficulty of this header.
pub fn total_score(&self) -> U256 { pub fn total_score(&self) -> U256 {

View File

@ -36,7 +36,6 @@
extern crate ethereum_types; extern crate ethereum_types;
extern crate ethjson; extern crate ethjson;
extern crate ethkey; extern crate ethkey;
extern crate heapsize;
extern crate keccak_hash as hash; extern crate keccak_hash as hash;
extern crate parity_bytes as bytes; extern crate parity_bytes as bytes;
extern crate rlp; extern crate rlp;
@ -44,6 +43,9 @@ extern crate unexpected;
#[macro_use] #[macro_use]
extern crate rlp_derive; extern crate rlp_derive;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
#[cfg(test)] #[cfg(test)]
extern crate rustc_hex; extern crate rustc_hex;

View File

@ -17,7 +17,7 @@
//! Log entry type definition. //! Log entry type definition.
use std::ops::Deref; use std::ops::Deref;
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use bytes::Bytes; use bytes::Bytes;
use ethereum_types::{H256, Address, Bloom, BloomInput}; use ethereum_types::{H256, Address, Bloom, BloomInput};
@ -25,7 +25,7 @@ use {BlockNumber};
use ethjson; use ethjson;
/// A record of execution for a `LOG` operation. /// A record of execution for a `LOG` operation.
#[derive(Default, Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)] #[derive(Default, Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable, MallocSizeOf)]
pub struct LogEntry { pub struct LogEntry {
/// The address of the contract executing at the point of the `LOG` operation. /// The address of the contract executing at the point of the `LOG` operation.
pub address: Address, pub address: Address,
@ -35,12 +35,6 @@ pub struct LogEntry {
pub data: Bytes, pub data: Bytes,
} }
impl HeapSizeOf for LogEntry {
fn heap_size_of_children(&self) -> usize {
self.topics.heap_size_of_children() + self.data.heap_size_of_children()
}
}
impl LogEntry { impl LogEntry {
/// Calculates the bloom of this log entry. /// Calculates the bloom of this log entry.
pub fn bloom(&self) -> Bloom { pub fn bloom(&self) -> Bloom {

View File

@ -17,14 +17,14 @@
//! Receipt //! Receipt
use ethereum_types::{H160, H256, U256, Address, Bloom}; use ethereum_types::{H160, H256, U256, Address, Bloom};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use rlp::{Rlp, RlpStream, Encodable, Decodable, DecoderError}; use rlp::{Rlp, RlpStream, Encodable, Decodable, DecoderError};
use BlockNumber; use BlockNumber;
use log_entry::{LogEntry, LocalizedLogEntry}; use log_entry::{LogEntry, LocalizedLogEntry};
/// Transaction outcome store in the receipt. /// Transaction outcome store in the receipt.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub enum TransactionOutcome { pub enum TransactionOutcome {
/// Status and state root are unknown under EIP-98 rules. /// Status and state root are unknown under EIP-98 rules.
Unknown, Unknown,
@ -35,7 +35,7 @@ pub enum TransactionOutcome {
} }
/// Information describing execution of a transaction. /// Information describing execution of a transaction.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Receipt { pub struct Receipt {
/// The total gas used in the block following execution of the transaction. /// The total gas used in the block following execution of the transaction.
pub gas_used: U256, pub gas_used: U256,
@ -110,12 +110,6 @@ impl Decodable for Receipt {
} }
} }
impl HeapSizeOf for Receipt {
fn heap_size_of_children(&self) -> usize {
self.logs.heap_size_of_children()
}
}
/// Receipt with additional info. /// Receipt with additional info.
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub struct RichReceipt { pub struct RichReceipt {

View File

@ -22,7 +22,8 @@ use ethereum_types::{H256, H160, Address, U256, BigEndianHash};
use ethjson; use ethjson;
use ethkey::{self, Signature, Secret, Public, recover, public_to_address}; use ethkey::{self, Signature, Secret, Public, recover, public_to_address};
use hash::keccak; use hash::keccak;
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOf;
use rlp::{self, RlpStream, Rlp, DecoderError, Encodable}; use rlp::{self, RlpStream, Rlp, DecoderError, Encodable};
use transaction::error; use transaction::error;
@ -37,7 +38,7 @@ pub const UNSIGNED_SENDER: Address = H160([0xff; 20]);
pub const SYSTEM_ADDRESS: Address = H160([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xfe]); pub const SYSTEM_ADDRESS: Address = H160([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xfe]);
/// Transaction action type. /// Transaction action type.
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub enum Action { pub enum Action {
/// Create creates new contract. /// Create creates new contract.
Create, Create,
@ -99,7 +100,7 @@ pub mod signature {
/// A set of information describing an externally-originating message call /// A set of information describing an externally-originating message call
/// or contract creation operation. /// or contract creation operation.
#[derive(Default, Debug, Clone, PartialEq, Eq)] #[derive(Default, Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Transaction { pub struct Transaction {
/// Nonce. /// Nonce.
pub nonce: U256, pub nonce: U256,
@ -133,12 +134,6 @@ impl Transaction {
} }
} }
impl HeapSizeOf for Transaction {
fn heap_size_of_children(&self) -> usize {
self.data.heap_size_of_children()
}
}
impl From<ethjson::state::Transaction> for SignedTransaction { impl From<ethjson::state::Transaction> for SignedTransaction {
fn from(t: ethjson::state::Transaction) -> Self { fn from(t: ethjson::state::Transaction) -> Self {
let to: Option<ethjson::hash::Address> = t.to.into(); let to: Option<ethjson::hash::Address> = t.to.into();
@ -255,7 +250,7 @@ impl Transaction {
} }
/// Signed transaction information without verified signature. /// Signed transaction information without verified signature.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq, MallocSizeOf)]
pub struct UnverifiedTransaction { pub struct UnverifiedTransaction {
/// Plain Transaction. /// Plain Transaction.
unsigned: Transaction, unsigned: Transaction,
@ -270,12 +265,6 @@ pub struct UnverifiedTransaction {
hash: H256, hash: H256,
} }
impl HeapSizeOf for UnverifiedTransaction {
fn heap_size_of_children(&self) -> usize {
self.unsigned.heap_size_of_children()
}
}
impl Deref for UnverifiedTransaction { impl Deref for UnverifiedTransaction {
type Target = Transaction; type Target = Transaction;
@ -407,19 +396,13 @@ impl UnverifiedTransaction {
} }
/// A `UnverifiedTransaction` with successfully recovered `sender`. /// A `UnverifiedTransaction` with successfully recovered `sender`.
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq, MallocSizeOf)]
pub struct SignedTransaction { pub struct SignedTransaction {
transaction: UnverifiedTransaction, transaction: UnverifiedTransaction,
sender: Address, sender: Address,
public: Option<Public>, public: Option<Public>,
} }
impl HeapSizeOf for SignedTransaction {
fn heap_size_of_children(&self) -> usize {
self.transaction.heap_size_of_children()
}
}
impl rlp::Encodable for SignedTransaction { impl rlp::Encodable for SignedTransaction {
fn rlp_append(&self, s: &mut RlpStream) { self.transaction.rlp_append_sealed_transaction(s) } fn rlp_append(&self, s: &mut RlpStream) { self.transaction.rlp_append_sealed_transaction(s) }
} }

View File

@ -7,7 +7,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
byteorder = "1.0" byteorder = "1.0"
parity-bytes = "0.1" parity-bytes = "0.1"
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
trie-db = "0.11.0" trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" } patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
log = "0.4" log = "0.4"
ethjson = { path = "../../json" } ethjson = { path = "../../json" }

View File

@ -23,7 +23,7 @@ ethabi-contract = "8.0"
ethcore-call-contract = { path = "../ethcore/call-contract" } ethcore-call-contract = { path = "../ethcore/call-contract" }
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
futures = "0.1" futures = "0.1"
heapsize = "0.4" parity-util-mem = "0.1"
keccak-hash = "0.2.0" keccak-hash = "0.2.0"
linked-hash-map = "0.5" linked-hash-map = "0.5"
log = "0.4" log = "0.4"

View File

@ -25,7 +25,8 @@ extern crate ethabi;
extern crate ethcore_call_contract as call_contract; extern crate ethcore_call_contract as call_contract;
extern crate ethereum_types; extern crate ethereum_types;
extern crate futures; extern crate futures;
extern crate heapsize;
extern crate parity_util_mem;
extern crate keccak_hash as hash; extern crate keccak_hash as hash;
extern crate linked_hash_map; extern crate linked_hash_map;
extern crate parity_runtime; extern crate parity_runtime;

View File

@ -17,7 +17,7 @@
//! Transaction Pool //! Transaction Pool
use ethereum_types::{U256, H256, Address}; use ethereum_types::{U256, H256, Address};
use heapsize::HeapSizeOf; use parity_util_mem::MallocSizeOfExt;
use types::transaction; use types::transaction;
use txpool; use txpool;
@ -176,7 +176,7 @@ impl txpool::VerifiedTransaction for VerifiedTransaction {
} }
fn mem_usage(&self) -> usize { fn mem_usage(&self) -> usize {
self.transaction.heap_size_of_children() self.transaction.malloc_size_of()
} }
fn sender(&self) -> &Address { fn sender(&self) -> &Address {

View File

@ -8,12 +8,12 @@ license = "GPL3"
[dependencies] [dependencies]
parity-bytes = "0.1" parity-bytes = "0.1"
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
hash-db = "0.11.0" hash-db = "0.12.4"
heapsize = "0.4" parity-util-mem = "0.1"
keccak-hasher = { path = "../keccak-hasher" } keccak-hasher = { path = "../keccak-hasher" }
kvdb = "0.1" kvdb = "0.1"
log = "0.4" log = "0.4"
memory-db = "0.11.0" memory-db = "0.12.4"
parking_lot = "0.7" parking_lot = "0.7"
fastmap = { path = "../../util/fastmap" } fastmap = { path = "../../util/fastmap" }
rlp = "0.4.0" rlp = "0.4.0"

View File

@ -23,12 +23,12 @@ use std::sync::Arc;
use bytes::Bytes; use bytes::Bytes;
use ethereum_types::H256; use ethereum_types::H256;
use hash_db::{HashDB}; use parity_util_mem::MallocSizeOfExt;
use hash_db::{HashDB, Prefix};
use keccak_hasher::KeccakHasher; use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue}; use kvdb::{KeyValueDB, DBTransaction, DBValue};
use rlp::{encode, decode}; use rlp::{encode, decode};
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY, error_key_already_exists, error_negatively_reference_hash}; use super::{DB_PREFIX_LEN, LATEST_ERA_KEY, error_key_already_exists, error_negatively_reference_hash};
use super::memory_db::*;
use traits::JournalDB; use traits::JournalDB;
/// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay /// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay
@ -39,7 +39,7 @@ use traits::JournalDB;
/// immediately. As this is an "archive" database, nothing is ever removed. This means /// immediately. As this is an "archive" database, nothing is ever removed. This means
/// that the states of any block the node has ever processed will be accessible. /// that the states of any block the node has ever processed will be accessible.
pub struct ArchiveDB { pub struct ArchiveDB {
overlay: MemoryDB<KeccakHasher, DBValue>, overlay: super::MemoryDB,
backing: Arc<KeyValueDB>, backing: Arc<KeyValueDB>,
latest_era: Option<u64>, latest_era: Option<u64>,
column: Option<u32>, column: Option<u32>,
@ -66,8 +66,8 @@ impl ArchiveDB {
} }
impl HashDB<KeccakHasher, DBValue> for ArchiveDB { impl HashDB<KeccakHasher, DBValue> for ArchiveDB {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if let Some((d, rc)) = self.overlay.raw(key) { if let Some((d, rc)) = self.overlay.raw(key, prefix) {
if rc > 0 { if rc > 0 {
return Some(d.clone()); return Some(d.clone());
} }
@ -75,20 +75,20 @@ impl HashDB<KeccakHasher, DBValue> for ArchiveDB {
self.payload(key) self.payload(key)
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key).is_some() self.get(key, prefix).is_some()
} }
fn insert(&mut self, value: &[u8]) -> H256 { fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.overlay.insert(value) self.overlay.insert(prefix, value)
} }
fn emplace(&mut self, key: H256, value: DBValue) { fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.overlay.emplace(key, value); self.overlay.emplace(key, prefix, value);
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256, prefix: Prefix) {
self.overlay.remove(key); self.overlay.remove(key, prefix);
} }
} }
@ -124,7 +124,7 @@ impl JournalDB for ArchiveDB {
} }
fn mem_used(&self) -> usize { fn mem_used(&self) -> usize {
self.overlay.mem_used() self.overlay.malloc_size_of()
} }
fn is_empty(&self) -> bool { fn is_empty(&self) -> bool {
@ -197,7 +197,7 @@ impl JournalDB for ArchiveDB {
&self.backing &self.backing
} }
fn consolidate(&mut self, with: MemoryDB<KeccakHasher, DBValue>) { fn consolidate(&mut self, with: super::MemoryDB) {
self.overlay.consolidate(with); self.overlay.consolidate(with);
} }
} }
@ -206,7 +206,7 @@ impl JournalDB for ArchiveDB {
mod tests { mod tests {
use keccak::keccak; use keccak::keccak;
use hash_db::HashDB; use hash_db::{HashDB, EMPTY_PREFIX};
use super::*; use super::*;
use {kvdb_memorydb, JournalDB}; use {kvdb_memorydb, JournalDB};
@ -215,50 +215,50 @@ mod tests {
// history is 1 // history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let x = jdb.insert(b"X"); let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
jdb.commit_batch(3, &keccak(b"1002a"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"1002a"), Some((1, keccak(b"1")))).unwrap();
jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap();
jdb.remove(&x); jdb.remove(&x, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap();
let x = jdb.insert(b"X"); let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap();
jdb.commit_batch(5, &keccak(b"1004a"), Some((3, keccak(b"1002a")))).unwrap(); jdb.commit_batch(5, &keccak(b"1004a"), Some((3, keccak(b"1002a")))).unwrap();
jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap(); jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap();
assert!(jdb.contains(&x)); assert!(jdb.contains(&x, EMPTY_PREFIX));
} }
#[test] #[test]
fn long_history() { fn long_history() {
// history is 3 // history is 3
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let h = jdb.insert(b"foo"); let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h); jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
} }
#[test] #[test]
#[should_panic] #[should_panic]
fn multiple_owed_removal_not_allowed() { fn multiple_owed_removal_not_allowed() {
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let h = jdb.insert(b"foo"); let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h); jdb.remove(&h, EMPTY_PREFIX);
jdb.remove(&h); jdb.remove(&h, EMPTY_PREFIX);
// commit_batch would call journal_under(), // commit_batch would call journal_under(),
// and we don't allow multiple owned removals. // and we don't allow multiple owned removals.
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -269,29 +269,29 @@ mod tests {
// history is 1 // history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz); jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
} }
@ -301,25 +301,25 @@ mod tests {
// history is 1 // history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -327,18 +327,18 @@ mod tests {
// history is 1 // history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -347,15 +347,15 @@ mod tests {
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap(); jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -366,22 +366,22 @@ mod tests {
let foo = { let foo = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None); let mut jdb = ArchiveDB::new(shared_db.clone(), None);
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar")); jdb.emplace(bar.clone(), EMPTY_PREFIX, DBValue::from_slice(b"bar"));
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
foo foo
}; };
{ {
let mut jdb = ArchiveDB::new(shared_db.clone(), None); let mut jdb = ArchiveDB::new(shared_db.clone(), None);
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
} }
{ {
let mut jdb = ArchiveDB::new(shared_db, None); let mut jdb = ArchiveDB::new(shared_db, None);
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
} }
} }
@ -393,23 +393,23 @@ mod tests {
let foo = { let foo = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None); let mut jdb = ArchiveDB::new(shared_db.clone(), None);
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
// foo is ancient history. // foo is ancient history.
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
foo foo
}; };
{ {
let mut jdb = ArchiveDB::new(shared_db, None); let mut jdb = ArchiveDB::new(shared_db, None);
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap(); jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap();
} }
@ -421,14 +421,14 @@ mod tests {
let (foo, _, _) = { let (foo, _, _) = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None); let mut jdb = ArchiveDB::new(shared_db.clone(), None);
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
(foo, bar, baz) (foo, bar, baz)
}; };
@ -436,7 +436,7 @@ mod tests {
{ {
let mut jdb = ArchiveDB::new(shared_db, None); let mut jdb = ArchiveDB::new(shared_db, None);
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
} }
@ -446,7 +446,7 @@ mod tests {
let key = { let key = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None); let mut jdb = ArchiveDB::new(shared_db.clone(), None);
let key = jdb.insert(b"foo"); let key = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
key key
}; };
@ -461,13 +461,13 @@ mod tests {
#[test] #[test]
fn inject() { fn inject() {
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None); let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let key = jdb.insert(b"dog"); let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog")); assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key); jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none()); assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
} }
} }

View File

@ -23,27 +23,22 @@ use std::sync::Arc;
use bytes::Bytes; use bytes::Bytes;
use ethereum_types::H256; use ethereum_types::H256;
use hash_db::{HashDB}; use hash_db::{HashDB, Prefix};
use heapsize::HeapSizeOf; use parity_util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use keccak_hasher::KeccakHasher; use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue}; use kvdb::{KeyValueDB, DBTransaction, DBValue};
use memory_db::*;
use parking_lot::RwLock; use parking_lot::RwLock;
use rlp::{encode, decode}; use rlp::{encode, decode};
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY, error_negatively_reference_hash, error_key_already_exists}; use super::{DB_PREFIX_LEN, LATEST_ERA_KEY, error_negatively_reference_hash, error_key_already_exists};
use super::traits::JournalDB; use super::traits::JournalDB;
use util::{DatabaseKey, DatabaseValueView, DatabaseValueRef}; use util::{DatabaseKey, DatabaseValueView, DatabaseValueRef};
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
struct RefInfo { struct RefInfo {
queue_refs: usize, queue_refs: usize,
in_archive: bool, in_archive: bool,
} }
impl HeapSizeOf for RefInfo {
fn heap_size_of_children(&self) -> usize { 0 }
}
#[derive(Clone, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]
enum RemoveFrom { enum RemoveFrom {
Queue, Queue,
@ -107,7 +102,7 @@ enum RemoveFrom {
/// ///
/// TODO: `store_reclaim_period` /// TODO: `store_reclaim_period`
pub struct EarlyMergeDB { pub struct EarlyMergeDB {
overlay: MemoryDB<KeccakHasher, DBValue>, overlay: super::MemoryDB,
backing: Arc<KeyValueDB>, backing: Arc<KeyValueDB>,
refs: Option<Arc<RwLock<HashMap<H256, RefInfo>>>>, refs: Option<Arc<RwLock<HashMap<H256, RefInfo>>>>,
latest_era: Option<u64>, latest_era: Option<u64>,
@ -292,8 +287,8 @@ impl EarlyMergeDB {
} }
impl HashDB<KeccakHasher, DBValue> for EarlyMergeDB { impl HashDB<KeccakHasher, DBValue> for EarlyMergeDB {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if let Some((d, rc)) = self.overlay.raw(key) { if let Some((d, rc)) = self.overlay.raw(key, prefix) {
if rc > 0 { if rc > 0 {
return Some(d.clone()) return Some(d.clone())
} }
@ -301,18 +296,18 @@ impl HashDB<KeccakHasher, DBValue> for EarlyMergeDB {
self.payload(key) self.payload(key)
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key).is_some() self.get(key, prefix).is_some()
} }
fn insert(&mut self, value: &[u8]) -> H256 { fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.overlay.insert(value) self.overlay.insert(prefix, value)
} }
fn emplace(&mut self, key: H256, value: DBValue) { fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.overlay.emplace(key, value); self.overlay.emplace(key, prefix, value);
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256, prefix: Prefix) {
self.overlay.remove(key); self.overlay.remove(key, prefix);
} }
} }
@ -358,8 +353,9 @@ impl JournalDB for EarlyMergeDB {
fn latest_era(&self) -> Option<u64> { self.latest_era } fn latest_era(&self) -> Option<u64> { self.latest_era }
fn mem_used(&self) -> usize { fn mem_used(&self) -> usize {
self.overlay.mem_used() + match self.refs { let mut ops = new_malloc_size_ops();
Some(ref c) => c.read().heap_size_of_children(), self.overlay.size_of(&mut ops) + match self.refs {
Some(ref c) => c.read().size_of(&mut ops),
None => 0 None => 0
} }
} }
@ -520,7 +516,7 @@ impl JournalDB for EarlyMergeDB {
Ok(ops) Ok(ops)
} }
fn consolidate(&mut self, with: MemoryDB<KeccakHasher, DBValue>) { fn consolidate(&mut self, with: super::MemoryDB) {
self.overlay.consolidate(with); self.overlay.consolidate(with);
} }
} }
@ -529,7 +525,7 @@ impl JournalDB for EarlyMergeDB {
mod tests { mod tests {
use keccak::keccak; use keccak::keccak;
use hash_db::HashDB; use hash_db::{HashDB, EMPTY_PREFIX};
use super::*; use super::*;
use super::super::traits::JournalDB; use super::super::traits::JournalDB;
use kvdb_memorydb; use kvdb_memorydb;
@ -539,7 +535,7 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let x = jdb.insert(b"X"); let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
@ -549,10 +545,10 @@ mod tests {
jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&x); jdb.remove(&x, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let x = jdb.insert(b"X"); let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
@ -561,50 +557,50 @@ mod tests {
jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap(); jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&x)); assert!(jdb.contains(&x, EMPTY_PREFIX));
} }
#[test] #[test]
fn insert_older_era() { fn insert_older_era() {
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0a"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0a")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0a")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(0, &keccak(b"0b"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
} }
#[test] #[test]
fn long_history() { fn long_history() {
// history is 3 // history is 3
let mut jdb = new_db(); let mut jdb = new_db();
let h = jdb.insert(b"foo"); let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h); jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&h)); assert!(!jdb.contains(&h, EMPTY_PREFIX));
} }
#[test] #[test]
@ -612,42 +608,42 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz); jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
} }
#[test] #[test]
@ -655,31 +651,31 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
} }
#[test] #[test]
@ -687,22 +683,22 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -712,23 +708,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap(); jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -737,23 +733,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -762,23 +758,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(2, &keccak(b"2a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3a"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"3a"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3b"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"3b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
@ -802,8 +798,8 @@ mod tests {
let foo = { let foo = {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None); let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar")); jdb.emplace(bar.clone(), EMPTY_PREFIX, DBValue::from_slice(b"bar"));
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
foo foo
@ -811,18 +807,18 @@ mod tests {
{ {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None); let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
} }
{ {
let mut jdb = EarlyMergeDB::new(shared_db, None); let mut jdb = EarlyMergeDB::new(shared_db, None);
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
} }
} }
@ -833,19 +829,19 @@ mod tests {
let mut jdb = new_db(); let mut jdb = new_db();
// history is 4 // history is 4
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), None).unwrap(); jdb.commit_batch(3, &keccak(b"3"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
// expunge foo // expunge foo
@ -859,39 +855,39 @@ mod tests {
let mut jdb = new_db(); let mut jdb = new_db();
// history is 4 // history is 4
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1a"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2a"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2b"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3a"), None).unwrap(); jdb.commit_batch(3, &keccak(b"3a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3b"), None).unwrap(); jdb.commit_batch(3, &keccak(b"3b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(4, &keccak(b"4a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(4, &keccak(b"4b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
@ -905,28 +901,28 @@ mod tests {
let mut jdb = new_db(); let mut jdb = new_db();
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
// foo is ancient history. // foo is ancient history.
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); // BROKEN jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); // BROKEN
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap(); jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -934,7 +930,7 @@ mod tests {
let mut jdb = new_db(); let mut jdb = new_db();
// history is 4 // history is 4
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -948,16 +944,16 @@ mod tests {
// foo is ancient history. // foo is ancient history.
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(5, &keccak(b"5"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(5, &keccak(b"5"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(6, &keccak(b"6"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(6, &keccak(b"6"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.insert(b"bar"); jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(7, &keccak(b"7"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(7, &keccak(b"7"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
} }
@ -972,7 +968,7 @@ mod tests {
{ {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None); let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
// history is 1 // history is 1
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -980,24 +976,24 @@ mod tests {
// foo is ancient history. // foo is ancient history.
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db // incantation to reopen the db
}; { }; {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None); let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db // incantation to reopen the db
}; { }; {
@ -1005,7 +1001,7 @@ mod tests {
jdb.commit_batch(5, &keccak(b"5"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(5, &keccak(b"5"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db // incantation to reopen the db
}; { }; {
@ -1013,7 +1009,7 @@ mod tests {
jdb.commit_batch(6, &keccak(b"6"), Some((4, keccak(b"4")))).unwrap(); jdb.commit_batch(6, &keccak(b"6"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
} }
} }
@ -1024,16 +1020,16 @@ mod tests {
let (foo, bar, baz) = { let (foo, bar, baz) = {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None); let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
(foo, bar, baz) (foo, bar, baz)
@ -1043,22 +1039,22 @@ mod tests {
let mut jdb = EarlyMergeDB::new(shared_db, None); let mut jdb = EarlyMergeDB::new(shared_db, None);
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
} }
} }
#[test] #[test]
fn inject() { fn inject() {
let mut jdb = new_db(); let mut jdb = new_db();
let key = jdb.insert(b"dog"); let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog")); assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key); jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none()); assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
} }
} }

View File

@ -16,7 +16,9 @@
//! `JournalDB` interface and implementation. //! `JournalDB` interface and implementation.
extern crate heapsize; extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
@ -59,6 +61,14 @@ pub use self::traits::KeyedHashDB;
/// Export as keyed hash trait /// Export as keyed hash trait
pub use self::traits::AsKeyedHashDB; pub use self::traits::AsKeyedHashDB;
/// Alias to ethereum MemoryDB
type MemoryDB = memory_db::MemoryDB<
keccak_hasher::KeccakHasher,
memory_db::HashKey<keccak_hasher::KeccakHasher>,
kvdb::DBValue,
>;
/// Journal database operating strategy. /// Journal database operating strategy.
#[derive(Debug, PartialEq, Clone, Copy)] #[derive(Debug, PartialEq, Clone, Copy)]
pub enum Algorithm { pub enum Algorithm {
@ -163,8 +173,8 @@ fn error_negatively_reference_hash(hash: &ethereum_types::H256) -> io::Error {
io::Error::new(io::ErrorKind::Other, format!("Entry {} removed from database more times than it was added.", hash)) io::Error::new(io::ErrorKind::Other, format!("Entry {} removed from database more times than it was added.", hash))
} }
pub fn new_memory_db() -> memory_db::MemoryDB<keccak_hasher::KeccakHasher, kvdb::DBValue> { pub fn new_memory_db() -> MemoryDB {
memory_db::MemoryDB::from_null_node(&rlp::NULL_RLP, rlp::NULL_RLP.as_ref().into()) MemoryDB::from_null_node(&rlp::NULL_RLP, rlp::NULL_RLP.as_ref().into())
} }
#[cfg(test)] #[cfg(test)]

View File

@ -23,9 +23,8 @@ use std::sync::Arc;
use ethereum_types::H256; use ethereum_types::H256;
use rlp::{Rlp, RlpStream, Encodable, DecoderError, Decodable, encode, decode}; use rlp::{Rlp, RlpStream, Encodable, DecoderError, Decodable, encode, decode};
use hash_db::{HashDB}; use hash_db::{HashDB, Prefix, EMPTY_PREFIX};
use keccak_hasher::KeccakHasher; use keccak_hasher::KeccakHasher;
use memory_db::*;
use kvdb::{KeyValueDB, DBTransaction, DBValue}; use kvdb::{KeyValueDB, DBTransaction, DBValue};
use super::{error_negatively_reference_hash}; use super::{error_negatively_reference_hash};
@ -39,7 +38,7 @@ use super::{error_negatively_reference_hash};
/// queries have an immediate effect in terms of these functions. /// queries have an immediate effect in terms of these functions.
#[derive(Clone)] #[derive(Clone)]
pub struct OverlayDB { pub struct OverlayDB {
overlay: MemoryDB<KeccakHasher, DBValue>, overlay: super::MemoryDB,
backing: Arc<KeyValueDB>, backing: Arc<KeyValueDB>,
column: Option<u32>, column: Option<u32>,
} }
@ -134,7 +133,7 @@ impl OverlayDB {
pub fn revert(&mut self) { self.overlay.clear(); } pub fn revert(&mut self) { self.overlay.clear(); }
/// Get the number of references that would be committed. /// Get the number of references that would be committed.
pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key).map_or(0, |(_, refs)| refs) } pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key, EMPTY_PREFIX).map_or(0, |(_, refs)| refs) }
/// Get the refs and value of the given key. /// Get the refs and value of the given key.
fn payload(&self, key: &H256) -> Option<Payload> { fn payload(&self, key: &H256) -> Option<Payload> {
@ -182,10 +181,10 @@ impl crate::KeyedHashDB for OverlayDB {
} }
impl HashDB<KeccakHasher, DBValue> for OverlayDB { impl HashDB<KeccakHasher, DBValue> for OverlayDB {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
// return ok if positive; if negative, check backing - might be enough references there to make // return ok if positive; if negative, check backing - might be enough references there to make
// it positive again. // it positive again.
let k = self.overlay.raw(key); let k = self.overlay.raw(key, prefix);
let memrc = { let memrc = {
if let Some((d, rc)) = k { if let Some((d, rc)) = k {
if rc > 0 { return Some(d.clone()); } if rc > 0 { return Some(d.clone()); }
@ -209,10 +208,10 @@ impl HashDB<KeccakHasher, DBValue> for OverlayDB {
} }
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
// return ok if positive; if negative, check backing - might be enough references there to make // return ok if positive; if negative, check backing - might be enough references there to make
// it positive again. // it positive again.
let k = self.overlay.raw(key); let k = self.overlay.raw(key, prefix);
match k { match k {
Some((_, rc)) if rc > 0 => true, Some((_, rc)) if rc > 0 => true,
_ => { _ => {
@ -229,111 +228,111 @@ impl HashDB<KeccakHasher, DBValue> for OverlayDB {
} }
} }
fn insert(&mut self, value: &[u8]) -> H256 { self.overlay.insert(value) } fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 { self.overlay.insert(prefix, value) }
fn emplace(&mut self, key: H256, value: DBValue) { self.overlay.emplace(key, value); } fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) { self.overlay.emplace(key, prefix, value); }
fn remove(&mut self, key: &H256) { self.overlay.remove(key); } fn remove(&mut self, key: &H256, prefix: Prefix) { self.overlay.remove(key, prefix); }
} }
#[test] #[test]
fn overlaydb_revert() { fn overlaydb_revert() {
let mut m = OverlayDB::new_temp(); let mut m = OverlayDB::new_temp();
let foo = m.insert(b"foo"); // insert foo. let foo = m.insert(EMPTY_PREFIX, b"foo"); // insert foo.
let mut batch = m.backing.transaction(); let mut batch = m.backing.transaction();
m.commit_to_batch(&mut batch).unwrap(); // commit - new operations begin here... m.commit_to_batch(&mut batch).unwrap(); // commit - new operations begin here...
m.backing.write(batch).unwrap(); m.backing.write(batch).unwrap();
let bar = m.insert(b"bar"); // insert bar. let bar = m.insert(EMPTY_PREFIX, b"bar"); // insert bar.
m.remove(&foo); // remove foo. m.remove(&foo, EMPTY_PREFIX); // remove foo.
assert!(!m.contains(&foo)); // foo is gone. assert!(!m.contains(&foo, EMPTY_PREFIX)); // foo is gone.
assert!(m.contains(&bar)); // bar is here. assert!(m.contains(&bar, EMPTY_PREFIX)); // bar is here.
m.revert(); // revert the last two operations. m.revert(); // revert the last two operations.
assert!(m.contains(&foo)); // foo is here. assert!(m.contains(&foo, EMPTY_PREFIX)); // foo is here.
assert!(!m.contains(&bar)); // bar is gone. assert!(!m.contains(&bar, EMPTY_PREFIX)); // bar is gone.
} }
#[test] #[test]
fn overlaydb_overlay_insert_and_remove() { fn overlaydb_overlay_insert_and_remove() {
let mut trie = OverlayDB::new_temp(); let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world"); let h = trie.insert(EMPTY_PREFIX, b"hello world");
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world")); assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
trie.remove(&h); trie.remove(&h, EMPTY_PREFIX);
assert_eq!(trie.get(&h), None); assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
} }
#[test] #[test]
fn overlaydb_backing_insert_revert() { fn overlaydb_backing_insert_revert() {
let mut trie = OverlayDB::new_temp(); let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world"); let h = trie.insert(EMPTY_PREFIX, b"hello world");
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world")); assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
trie.commit().unwrap(); trie.commit().unwrap();
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world")); assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
trie.revert(); trie.revert();
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world")); assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
} }
#[test] #[test]
fn overlaydb_backing_remove() { fn overlaydb_backing_remove() {
let mut trie = OverlayDB::new_temp(); let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world"); let h = trie.insert(EMPTY_PREFIX, b"hello world");
trie.commit().unwrap(); trie.commit().unwrap();
trie.remove(&h); trie.remove(&h, EMPTY_PREFIX);
assert_eq!(trie.get(&h), None); assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
trie.commit().unwrap(); trie.commit().unwrap();
assert_eq!(trie.get(&h), None); assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
trie.revert(); trie.revert();
assert_eq!(trie.get(&h), None); assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
} }
#[test] #[test]
fn overlaydb_backing_remove_revert() { fn overlaydb_backing_remove_revert() {
let mut trie = OverlayDB::new_temp(); let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world"); let h = trie.insert(EMPTY_PREFIX, b"hello world");
trie.commit().unwrap(); trie.commit().unwrap();
trie.remove(&h); trie.remove(&h, EMPTY_PREFIX);
assert_eq!(trie.get(&h), None); assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
trie.revert(); trie.revert();
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world")); assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
} }
#[test] #[test]
fn overlaydb_negative() { fn overlaydb_negative() {
let mut trie = OverlayDB::new_temp(); let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world"); let h = trie.insert(EMPTY_PREFIX, b"hello world");
trie.commit().unwrap(); trie.commit().unwrap();
trie.remove(&h); trie.remove(&h, EMPTY_PREFIX);
trie.remove(&h); //bad - sends us into negative refs. trie.remove(&h, EMPTY_PREFIX); //bad - sends us into negative refs.
assert_eq!(trie.get(&h), None); assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
assert!(trie.commit().is_err()); assert!(trie.commit().is_err());
} }
#[test] #[test]
fn overlaydb_complex() { fn overlaydb_complex() {
let mut trie = OverlayDB::new_temp(); let mut trie = OverlayDB::new_temp();
let hfoo = trie.insert(b"foo"); let hfoo = trie.insert(EMPTY_PREFIX, b"foo");
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
let hbar = trie.insert(b"bar"); let hbar = trie.insert(EMPTY_PREFIX, b"bar");
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar")); assert_eq!(trie.get(&hbar, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"bar"));
trie.commit().unwrap(); trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar")); assert_eq!(trie.get(&hbar, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"bar"));
trie.insert(b"foo"); // two refs trie.insert(EMPTY_PREFIX, b"foo"); // two refs
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.commit().unwrap(); trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar")); assert_eq!(trie.get(&hbar, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"bar"));
trie.remove(&hbar); // zero refs - delete trie.remove(&hbar, EMPTY_PREFIX); // zero refs - delete
assert_eq!(trie.get(&hbar), None); assert_eq!(trie.get(&hbar, EMPTY_PREFIX), None);
trie.remove(&hfoo); // one ref - keep trie.remove(&hfoo, EMPTY_PREFIX); // one ref - keep
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.commit().unwrap(); trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.remove(&hfoo); // zero ref - would delete, but... trie.remove(&hfoo, EMPTY_PREFIX); // zero ref - would delete, but...
assert_eq!(trie.get(&hfoo), None); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX), None);
trie.insert(b"foo"); // one ref - keep after all. trie.insert(EMPTY_PREFIX, b"foo"); // one ref - keep after all.
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.commit().unwrap(); trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo")); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.remove(&hfoo); // zero ref - delete trie.remove(&hfoo, EMPTY_PREFIX); // zero ref - delete
assert_eq!(trie.get(&hfoo), None); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX), None);
trie.commit().unwrap(); // trie.commit().unwrap(); //
assert_eq!(trie.get(&hfoo), None); assert_eq!(trie.get(&hfoo, EMPTY_PREFIX), None);
} }

View File

@ -23,11 +23,10 @@ use std::sync::Arc;
use bytes::Bytes; use bytes::Bytes;
use ethereum_types::H256; use ethereum_types::H256;
use hash_db::{HashDB}; use hash_db::{HashDB, Prefix, EMPTY_PREFIX};
use heapsize::HeapSizeOf; use parity_util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use keccak_hasher::KeccakHasher; use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue}; use kvdb::{KeyValueDB, DBTransaction, DBValue};
use memory_db::*;
use parking_lot::RwLock; use parking_lot::RwLock;
use fastmap::H256FastMap; use fastmap::H256FastMap;
use rlp::{Rlp, RlpStream, encode, decode, DecoderError, Decodable, Encodable}; use rlp::{Rlp, RlpStream, encode, decode, DecoderError, Decodable, Encodable};
@ -66,7 +65,7 @@ use util::DatabaseKey;
/// 7. Delete ancient record from memory and disk. /// 7. Delete ancient record from memory and disk.
pub struct OverlayRecentDB { pub struct OverlayRecentDB {
transaction_overlay: MemoryDB<KeccakHasher, DBValue>, transaction_overlay: super::MemoryDB,
backing: Arc<KeyValueDB>, backing: Arc<KeyValueDB>,
journal_overlay: Arc<RwLock<JournalOverlay>>, journal_overlay: Arc<RwLock<JournalOverlay>>,
column: Option<u32>, column: Option<u32>,
@ -120,7 +119,7 @@ impl<'a> Encodable for DatabaseValueRef<'a> {
#[derive(PartialEq)] #[derive(PartialEq)]
struct JournalOverlay { struct JournalOverlay {
backing_overlay: MemoryDB<KeccakHasher, DBValue>, // Nodes added in the history period backing_overlay: super::MemoryDB, // Nodes added in the history period
pending_overlay: H256FastMap<DBValue>, // Nodes being transfered from backing_overlay to backing db pending_overlay: H256FastMap<DBValue>, // Nodes being transfered from backing_overlay to backing db
journal: HashMap<u64, Vec<JournalEntry>>, journal: HashMap<u64, Vec<JournalEntry>>,
latest_era: Option<u64>, latest_era: Option<u64>,
@ -128,19 +127,13 @@ struct JournalOverlay {
cumulative_size: usize, // cumulative size of all entries. cumulative_size: usize, // cumulative size of all entries.
} }
#[derive(PartialEq)] #[derive(PartialEq, MallocSizeOf)]
struct JournalEntry { struct JournalEntry {
id: H256, id: H256,
insertions: Vec<H256>, insertions: Vec<H256>,
deletions: Vec<H256>, deletions: Vec<H256>,
} }
impl HeapSizeOf for JournalEntry {
fn heap_size_of_children(&self) -> usize {
self.insertions.heap_size_of_children() + self.deletions.heap_size_of_children()
}
}
impl Clone for OverlayRecentDB { impl Clone for OverlayRecentDB {
fn clone(&self) -> OverlayRecentDB { fn clone(&self) -> OverlayRecentDB {
OverlayRecentDB { OverlayRecentDB {
@ -204,11 +197,11 @@ impl OverlayRecentDB {
for (k, v) in value.inserts { for (k, v) in value.inserts {
let short_key = to_short_key(&k); let short_key = to_short_key(&k);
if !overlay.contains(&short_key) { if !overlay.contains(&short_key, EMPTY_PREFIX) {
cumulative_size += v.len(); cumulative_size += v.len();
} }
overlay.emplace(short_key, v); overlay.emplace(short_key, EMPTY_PREFIX, v);
inserted_keys.push(k); inserted_keys.push(k);
} }
journal.entry(era).or_insert_with(Vec::new).push(JournalEntry { journal.entry(era).or_insert_with(Vec::new).push(JournalEntry {
@ -272,12 +265,13 @@ impl JournalDB for OverlayRecentDB {
} }
fn mem_used(&self) -> usize { fn mem_used(&self) -> usize {
let mut mem = self.transaction_overlay.mem_used(); let mut ops = new_malloc_size_ops();
let mut mem = self.transaction_overlay.size_of(&mut ops);
let overlay = self.journal_overlay.read(); let overlay = self.journal_overlay.read();
mem += overlay.backing_overlay.mem_used(); mem += overlay.backing_overlay.size_of(&mut ops);
mem += overlay.pending_overlay.heap_size_of_children(); mem += overlay.pending_overlay.size_of(&mut ops);
mem += overlay.journal.heap_size_of_children(); mem += overlay.journal.size_of(&mut ops);
mem mem
} }
@ -302,7 +296,7 @@ impl JournalDB for OverlayRecentDB {
fn state(&self, key: &H256) -> Option<Bytes> { fn state(&self, key: &H256) -> Option<Bytes> {
let journal_overlay = self.journal_overlay.read(); let journal_overlay = self.journal_overlay.read();
let key = to_short_key(key); let key = to_short_key(key);
journal_overlay.backing_overlay.get(&key).map(|v| v.into_vec()) journal_overlay.backing_overlay.get(&key, EMPTY_PREFIX).map(|v| v.into_vec())
.or_else(|| journal_overlay.pending_overlay.get(&key).map(|d| d.clone().into_vec())) .or_else(|| journal_overlay.pending_overlay.get(&key).map(|d| d.clone().into_vec()))
.or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.into_vec())) .or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.into_vec()))
} }
@ -334,11 +328,11 @@ impl JournalDB for OverlayRecentDB {
for (k, v) in insertions { for (k, v) in insertions {
let short_key = to_short_key(&k); let short_key = to_short_key(&k);
if !journal_overlay.backing_overlay.contains(&short_key) { if !journal_overlay.backing_overlay.contains(&short_key, EMPTY_PREFIX) {
journal_overlay.cumulative_size += v.len(); journal_overlay.cumulative_size += v.len();
} }
journal_overlay.backing_overlay.emplace(short_key, v); journal_overlay.backing_overlay.emplace(short_key, EMPTY_PREFIX, v);
} }
let index = journal_overlay.journal.get(&now).map_or(0, |j| j.len()); let index = journal_overlay.journal.get(&now).map_or(0, |j| j.len());
@ -387,7 +381,7 @@ impl JournalDB for OverlayRecentDB {
{ {
if *canon_id == journal.id { if *canon_id == journal.id {
for h in &journal.insertions { for h in &journal.insertions {
if let Some((d, rc)) = journal_overlay.backing_overlay.raw(&to_short_key(h)) { if let Some((d, rc)) = journal_overlay.backing_overlay.raw(&to_short_key(h), EMPTY_PREFIX) {
if rc > 0 { if rc > 0 {
canon_insertions.push((h.clone(), d.clone())); //TODO: optimize this to avoid data copy canon_insertions.push((h.clone(), d.clone())); //TODO: optimize this to avoid data copy
} }
@ -410,13 +404,13 @@ impl JournalDB for OverlayRecentDB {
} }
// update the overlay // update the overlay
for k in overlay_deletions { for k in overlay_deletions {
if let Some(val) = journal_overlay.backing_overlay.remove_and_purge(&to_short_key(&k)) { if let Some(val) = journal_overlay.backing_overlay.remove_and_purge(&to_short_key(&k), EMPTY_PREFIX) {
journal_overlay.cumulative_size -= val.len(); journal_overlay.cumulative_size -= val.len();
} }
} }
// apply canon deletions // apply canon deletions
for k in canon_deletions { for k in canon_deletions {
if !journal_overlay.backing_overlay.contains(&to_short_key(&k)) { if !journal_overlay.backing_overlay.contains(&to_short_key(&k), EMPTY_PREFIX) {
batch.delete(self.column, k.as_bytes()); batch.delete(self.column, k.as_bytes());
} }
} }
@ -458,14 +452,14 @@ impl JournalDB for OverlayRecentDB {
Ok(ops) Ok(ops)
} }
fn consolidate(&mut self, with: MemoryDB<KeccakHasher, DBValue>) { fn consolidate(&mut self, with: super::MemoryDB) {
self.transaction_overlay.consolidate(with); self.transaction_overlay.consolidate(with);
} }
} }
impl HashDB<KeccakHasher, DBValue> for OverlayRecentDB { impl HashDB<KeccakHasher, DBValue> for OverlayRecentDB {
fn get(&self, key: &H256) -> Option<DBValue> { fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if let Some((d, rc)) = self.transaction_overlay.raw(key) { if let Some((d, rc)) = self.transaction_overlay.raw(key, prefix) {
if rc > 0 { if rc > 0 {
return Some(d.clone()) return Some(d.clone())
} }
@ -473,24 +467,24 @@ impl HashDB<KeccakHasher, DBValue> for OverlayRecentDB {
let v = { let v = {
let journal_overlay = self.journal_overlay.read(); let journal_overlay = self.journal_overlay.read();
let key = to_short_key(key); let key = to_short_key(key);
journal_overlay.backing_overlay.get(&key) journal_overlay.backing_overlay.get(&key, prefix)
.or_else(|| journal_overlay.pending_overlay.get(&key).cloned()) .or_else(|| journal_overlay.pending_overlay.get(&key).cloned())
}; };
v.or_else(|| self.payload(key)) v.or_else(|| self.payload(key))
} }
fn contains(&self, key: &H256) -> bool { fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key).is_some() self.get(key, prefix).is_some()
} }
fn insert(&mut self, value: &[u8]) -> H256 { fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.transaction_overlay.insert(value) self.transaction_overlay.insert(prefix, value)
} }
fn emplace(&mut self, key: H256, value: DBValue) { fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.transaction_overlay.emplace(key, value); self.transaction_overlay.emplace(key, prefix, value);
} }
fn remove(&mut self, key: &H256) { fn remove(&mut self, key: &H256, prefix: Prefix) {
self.transaction_overlay.remove(key); self.transaction_overlay.remove(key, prefix);
} }
} }
@ -499,7 +493,7 @@ mod tests {
use keccak::keccak; use keccak::keccak;
use super::*; use super::*;
use hash_db::HashDB; use hash_db::{HashDB, EMPTY_PREFIX};
use {kvdb_memorydb, JournalDB}; use {kvdb_memorydb, JournalDB};
fn new_db() -> OverlayRecentDB { fn new_db() -> OverlayRecentDB {
@ -512,7 +506,7 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let x = jdb.insert(b"X"); let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
@ -522,10 +516,10 @@ mod tests {
jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&x); jdb.remove(&x, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let x = jdb.insert(b"X"); let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
@ -534,30 +528,30 @@ mod tests {
jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap(); jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&x)); assert!(jdb.contains(&x, EMPTY_PREFIX));
} }
#[test] #[test]
fn long_history() { fn long_history() {
// history is 3 // history is 3
let mut jdb = new_db(); let mut jdb = new_db();
let h = jdb.insert(b"foo"); let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h); jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&h)); assert!(!jdb.contains(&h, EMPTY_PREFIX));
} }
#[test] #[test]
@ -565,42 +559,42 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz); jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
} }
#[test] #[test]
@ -608,31 +602,31 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
} }
#[test] #[test]
@ -640,22 +634,22 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -664,23 +658,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap(); jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -690,23 +684,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
@ -716,23 +710,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(2, &keccak(b"2a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3a"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"3a"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3b"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"3b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
@ -751,8 +745,8 @@ mod tests {
let foo = { let foo = {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None); let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar")); jdb.emplace(bar.clone(), EMPTY_PREFIX, DBValue::from_slice(b"bar"));
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
foo foo
@ -760,18 +754,18 @@ mod tests {
{ {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None); let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
} }
{ {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None); let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
} }
} }
@ -781,19 +775,19 @@ mod tests {
let mut jdb = new_db(); let mut jdb = new_db();
// history is 4 // history is 4
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), None).unwrap(); jdb.commit_batch(3, &keccak(b"3"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
// expunge foo // expunge foo
@ -807,39 +801,39 @@ mod tests {
let mut jdb = new_db(); let mut jdb = new_db();
// history is 4 // history is 4
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1a"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2a"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2b"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3a"), None).unwrap(); jdb.commit_batch(3, &keccak(b"3a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3b"), None).unwrap(); jdb.commit_batch(3, &keccak(b"3b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(4, &keccak(b"4a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(4, &keccak(b"4b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
@ -852,35 +846,35 @@ mod tests {
fn broken_assert() { fn broken_assert() {
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
// foo is ancient history. // foo is ancient history.
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); // BROKEN jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); // BROKEN
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap(); jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
} }
#[test] #[test]
fn reopen_test() { fn reopen_test() {
let mut jdb = new_db(); let mut jdb = new_db();
// history is 4 // history is 4
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -894,16 +888,16 @@ mod tests {
// foo is ancient history. // foo is ancient history.
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(5, &keccak(b"5"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(5, &keccak(b"5"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(6, &keccak(b"6"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(6, &keccak(b"6"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.insert(b"bar"); jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(7, &keccak(b"7"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(7, &keccak(b"7"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
} }
@ -918,7 +912,7 @@ mod tests {
{ {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None); let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
// history is 1 // history is 1
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -926,24 +920,24 @@ mod tests {
// foo is ancient history. // foo is ancient history.
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.insert(b"foo"); jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db // incantation to reopen the db
}; { }; {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None); let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db // incantation to reopen the db
}; { }; {
@ -951,7 +945,7 @@ mod tests {
jdb.commit_batch(5, &keccak(b"5"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(5, &keccak(b"5"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db // incantation to reopen the db
}; { }; {
@ -959,7 +953,7 @@ mod tests {
jdb.commit_batch(6, &keccak(b"6"), Some((4, keccak(b"4")))).unwrap(); jdb.commit_batch(6, &keccak(b"6"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
} }
} }
@ -970,16 +964,16 @@ mod tests {
let (foo, bar, baz) = { let (foo, bar, baz) = {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None); let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
// history is 1 // history is 1
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
(foo, bar, baz) (foo, bar, baz)
@ -989,43 +983,43 @@ mod tests {
let mut jdb = OverlayRecentDB::new(shared_db, None); let mut jdb = OverlayRecentDB::new(shared_db, None);
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
} }
} }
#[test] #[test]
fn insert_older_era() { fn insert_older_era() {
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0a"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0a"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0a")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0a")))).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(0, &keccak(b"0b"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0b"), None).unwrap();
assert!(jdb.can_reconstruct_refs()); assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
} }
#[test] #[test]
fn inject() { fn inject() {
let mut jdb = new_db(); let mut jdb = new_db();
let key = jdb.insert(b"dog"); let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog")); assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key); jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none()); assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
} }
#[test] #[test]
@ -1037,7 +1031,7 @@ mod tests {
assert!(jdb.earliest_era().is_none()); assert!(jdb.earliest_era().is_none());
// single journalled era. // single journalled era.
let _key = jdb.insert(b"hello!"); let _key = jdb.insert(EMPTY_PREFIX, b"hello!");
let mut batch = jdb.backing().transaction(); let mut batch = jdb.backing().transaction();
jdb.journal_under(&mut batch, 0, &keccak(b"0")).unwrap(); jdb.journal_under(&mut batch, 0, &keccak(b"0")).unwrap();
jdb.backing().write_buffered(batch); jdb.backing().write_buffered(batch);

View File

@ -22,11 +22,10 @@ use std::sync::Arc;
use bytes::Bytes; use bytes::Bytes;
use ethereum_types::H256; use ethereum_types::H256;
use hash_db::{HashDB}; use hash_db::{HashDB, Prefix, EMPTY_PREFIX};
use heapsize::HeapSizeOf; use parity_util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use keccak_hasher::KeccakHasher; use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue}; use kvdb::{KeyValueDB, DBTransaction, DBValue};
use memory_db::MemoryDB;
use overlaydb::OverlayDB; use overlaydb::OverlayDB;
use rlp::{encode, decode}; use rlp::{encode, decode};
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY}; use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
@ -81,11 +80,11 @@ impl RefCountedDB {
} }
impl HashDB<KeccakHasher, DBValue> for RefCountedDB { impl HashDB<KeccakHasher, DBValue> for RefCountedDB {
fn get(&self, key: &H256) -> Option<DBValue> { self.forward.get(key) } fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> { self.forward.get(key, prefix) }
fn contains(&self, key: &H256) -> bool { self.forward.contains(key) } fn contains(&self, key: &H256, prefix: Prefix) -> bool { self.forward.contains(key, prefix) }
fn insert(&mut self, value: &[u8]) -> H256 { let r = self.forward.insert(value); self.inserts.push(r.clone()); r } fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 { let r = self.forward.insert(prefix, value); self.inserts.push(r.clone()); r }
fn emplace(&mut self, key: H256, value: DBValue) { self.inserts.push(key.clone()); self.forward.emplace(key, value); } fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) { self.inserts.push(key.clone()); self.forward.emplace(key, prefix, value); }
fn remove(&mut self, key: &H256) { self.removes.push(key.clone()); } fn remove(&mut self, key: &H256, _prefix: Prefix) { self.removes.push(key.clone()); }
} }
impl ::traits::KeyedHashDB for RefCountedDB { impl ::traits::KeyedHashDB for RefCountedDB {
@ -105,7 +104,8 @@ impl JournalDB for RefCountedDB {
} }
fn mem_used(&self) -> usize { fn mem_used(&self) -> usize {
self.inserts.heap_size_of_children() + self.removes.heap_size_of_children() let mut ops = new_malloc_size_ops();
self.inserts.size_of(&mut ops) + self.removes.size_of(&mut ops)
} }
fn is_empty(&self) -> bool { fn is_empty(&self) -> bool {
@ -184,7 +184,7 @@ impl JournalDB for RefCountedDB {
}.expect("rlp read from db; qed"); }.expect("rlp read from db; qed");
trace!(target: "rcdb", "delete journal for time #{}.{}=>{}, (canon was {}): deleting {:?}", end_era, db_key.index, our_id, canon_id, to_remove); trace!(target: "rcdb", "delete journal for time #{}.{}=>{}, (canon was {}): deleting {:?}", end_era, db_key.index, our_id, canon_id, to_remove);
for i in &to_remove { for i in &to_remove {
self.forward.remove(i); self.forward.remove(i, EMPTY_PREFIX);
} }
batch.delete(self.column, &last); batch.delete(self.column, &last);
db_key.index += 1; db_key.index += 1;
@ -197,19 +197,19 @@ impl JournalDB for RefCountedDB {
fn inject(&mut self, batch: &mut DBTransaction) -> io::Result<u32> { fn inject(&mut self, batch: &mut DBTransaction) -> io::Result<u32> {
self.inserts.clear(); self.inserts.clear();
for remove in self.removes.drain(..) { for remove in self.removes.drain(..) {
self.forward.remove(&remove); self.forward.remove(&remove, EMPTY_PREFIX);
} }
self.forward.commit_to_batch(batch) self.forward.commit_to_batch(batch)
} }
fn consolidate(&mut self, mut with: MemoryDB<KeccakHasher, DBValue>) { fn consolidate(&mut self, mut with: super::MemoryDB) {
for (key, (value, rc)) in with.drain() { for (key, (value, rc)) in with.drain() {
for _ in 0..rc { for _ in 0..rc {
self.emplace(key, value.clone()); self.emplace(key, EMPTY_PREFIX, value.clone());
} }
for _ in rc..0 { for _ in rc..0 {
self.remove(&key); self.remove(&key, EMPTY_PREFIX);
} }
} }
} }
@ -219,7 +219,7 @@ impl JournalDB for RefCountedDB {
mod tests { mod tests {
use keccak::keccak; use keccak::keccak;
use hash_db::HashDB; use hash_db::{HashDB, EMPTY_PREFIX};
use super::*; use super::*;
use {JournalDB, kvdb_memorydb}; use {JournalDB, kvdb_memorydb};
@ -232,18 +232,18 @@ mod tests {
fn long_history() { fn long_history() {
// history is 3 // history is 3
let mut jdb = new_db(); let mut jdb = new_db();
let h = jdb.insert(b"foo"); let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h); jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&h)); assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(!jdb.contains(&h)); assert!(!jdb.contains(&h, EMPTY_PREFIX));
} }
#[test] #[test]
@ -251,10 +251,10 @@ mod tests {
// history is 3 // history is 3
let mut jdb = new_db(); let mut jdb = new_db();
assert_eq!(jdb.latest_era(), None); assert_eq!(jdb.latest_era(), None);
let h = jdb.insert(b"foo"); let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert_eq!(jdb.latest_era(), Some(0)); assert_eq!(jdb.latest_era(), Some(0));
jdb.remove(&h); jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap(); jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert_eq!(jdb.latest_era(), Some(1)); assert_eq!(jdb.latest_era(), Some(1));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap(); jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
@ -270,37 +270,37 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz); jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap(); jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap(); jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(!jdb.contains(&foo)); assert!(!jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
} }
#[test] #[test]
@ -308,39 +308,39 @@ mod tests {
// history is 1 // history is 1
let mut jdb = new_db(); let mut jdb = new_db();
let foo = jdb.insert(b"foo"); let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(b"bar"); let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap(); jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo); jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(b"baz"); let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.remove(&bar); jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap(); jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar)); assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz)); assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap(); jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.contains(&foo)); assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz)); assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar)); assert!(!jdb.contains(&bar, EMPTY_PREFIX));
} }
#[test] #[test]
fn inject() { fn inject() {
let mut jdb = new_db(); let mut jdb = new_db();
let key = jdb.insert(b"dog"); let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog")); assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key); jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap(); jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none()); assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
} }
} }

View File

@ -93,7 +93,7 @@ pub trait JournalDB: KeyedHashDB {
fn flush(&self) {} fn flush(&self) {}
/// Consolidate all the insertions and deletions in the given memory overlay. /// Consolidate all the insertions and deletions in the given memory overlay.
fn consolidate(&mut self, overlay: ::memory_db::MemoryDB<KeccakHasher, DBValue>); fn consolidate(&mut self, overlay: super::MemoryDB);
/// Commit all changes in a single batch /// Commit all changes in a single batch
#[cfg(test)] #[cfg(test)]

View File

@ -8,5 +8,5 @@ license = "GPL-3.0"
[dependencies] [dependencies]
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
tiny-keccak = "1.4.2" tiny-keccak = "1.4.2"
hash-db = "0.11.0" hash-db = "0.12.4"
plain_hasher = "0.2" plain_hasher = "0.2"

View File

@ -6,5 +6,5 @@ description = "An LRU-cache which operates on memory used"
license = "GPL3" license = "GPL3"
[dependencies] [dependencies]
heapsize = "0.4" parity-util-mem = "0.1"
lru-cache = "0.1" lru-cache = "0.1"

View File

@ -18,10 +18,10 @@
//! crate. //! crate.
// TODO: push changes upstream in a clean way. // TODO: push changes upstream in a clean way.
extern crate heapsize; extern crate parity_util_mem;
extern crate lru_cache; extern crate lru_cache;
use heapsize::HeapSizeOf; use parity_util_mem::{MallocSizeOf, MallocSizeOfExt};
use lru_cache::LruCache; use lru_cache::LruCache;
use std::hash::Hash; use std::hash::Hash;
@ -29,18 +29,18 @@ use std::hash::Hash;
const INITIAL_CAPACITY: usize = 4; const INITIAL_CAPACITY: usize = 4;
/// An LRU-cache which operates on memory used. /// An LRU-cache which operates on memory used.
pub struct MemoryLruCache<K: Eq + Hash, V: HeapSizeOf> { pub struct MemoryLruCache<K: Eq + Hash, V> {
inner: LruCache<K, V>, inner: LruCache<K, V>,
cur_size: usize, cur_size: usize,
max_size: usize, max_size: usize,
} }
// amount of memory used when the item will be put on the heap. // amount of memory used when the item will be put on the heap.
fn heap_size_of<T: HeapSizeOf>(val: &T) -> usize { fn heap_size_of<T: MallocSizeOf>(val: &T) -> usize {
::std::mem::size_of::<T>() + val.heap_size_of_children() ::std::mem::size_of::<T>() + val.malloc_size_of()
} }
impl<K: Eq + Hash, V: HeapSizeOf> MemoryLruCache<K, V> { impl<K: Eq + Hash, V: MallocSizeOf> MemoryLruCache<K, V> {
/// Create a new cache with a maximum size in bytes. /// Create a new cache with a maximum size in bytes.
pub fn new(max_size: usize) -> Self { pub fn new(max_size: usize) -> Self {
MemoryLruCache { MemoryLruCache {

View File

@ -1,10 +0,0 @@
[package]
name = "memzero"
version = "0.1.0"
description = "A wrapper for zero-ing out memory when dropped"
license = "GPL-3.0"
homepage = "https://parity.io"
repository = "https://github.com/paritytech/parity-ethereum"
documentation = "https://docs.rs/crate/memzero"
authors = ["Parity Technologies <admin@parity.io>"]
edition = "2018"

View File

@ -1,54 +0,0 @@
// Copyright 2015-2019 Parity Technologies (UK) Ltd.
// This file is part of Parity Ethereum.
// Parity Ethereum is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity Ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
use std::ops::{Deref, DerefMut};
use std::ptr;
/// Wrapper to zero out memory when dropped.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Memzero<T: AsMut<[u8]>> {
mem: T,
}
impl<T: AsMut<[u8]>> From<T> for Memzero<T> {
fn from(mem: T) -> Memzero<T> {
Memzero { mem }
}
}
impl<T: AsMut<[u8]>> Drop for Memzero<T> {
fn drop(&mut self) {
unsafe {
for byte_ref in self.mem.as_mut() {
ptr::write_volatile(byte_ref, 0)
}
}
}
}
impl<T: AsMut<[u8]>> Deref for Memzero<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.mem
}
}
impl<T: AsMut<[u8]>> DerefMut for Memzero<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.mem
}
}

View File

@ -6,15 +6,15 @@ description = "Merkle-Patricia Trie (Ethereum Style)"
license = "GPL-3.0" license = "GPL-3.0"
[dependencies] [dependencies]
trie-db = "0.11.0" trie-db = "0.12.4"
keccak-hasher = { version = "0.1.1", path = "../keccak-hasher" } keccak-hasher = { version = "0.1.1", path = "../keccak-hasher" }
hash-db = "0.11.0" hash-db = "0.12.4"
rlp = "0.4.0" rlp = "0.4.0"
parity-bytes = "0.1" parity-bytes = "0.1"
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
elastic-array = "0.10" elastic-array = "0.10"
[dev-dependencies] [dev-dependencies]
memory-db = "0.11.0" memory-db = "0.12.4"
keccak-hash = "0.2.0" keccak-hash = "0.2.0"
journaldb = { path = "../journaldb" } journaldb = { path = "../journaldb" }

View File

@ -6,6 +6,6 @@ description = "Trie-root helpers, ethereum style"
license = "GPL-3.0" license = "GPL-3.0"
[dependencies] [dependencies]
triehash = "0.5.0" triehash = "0.6.0"
ethereum-types = "0.6.0" ethereum-types = "0.6.0"
keccak-hasher = { path = "../keccak-hasher" } keccak-hasher = { path = "../keccak-hasher" }

View File

@ -13,7 +13,7 @@ ring = "0.14.6"
ethkey = { path = "../accounts/ethkey" } ethkey = { path = "../accounts/ethkey" }
hex = "0.2" hex = "0.2"
log = "0.4" log = "0.4"
memzero = { path = "../util/memzero" } parity-util-mem = "0.1"
ordered-float = "0.5" ordered-float = "0.5"
parking_lot = "0.7" parking_lot = "0.7"
rand = "0.6" rand = "0.6"

View File

@ -24,7 +24,7 @@ extern crate ethcore_network as network;
extern crate ethereum_types; extern crate ethereum_types;
extern crate ethkey; extern crate ethkey;
extern crate hex; extern crate hex;
extern crate memzero; extern crate parity_util_mem;
extern crate ordered_float; extern crate ordered_float;
extern crate parking_lot; extern crate parking_lot;
extern crate rand; extern crate rand;

View File

@ -20,7 +20,7 @@ use aes_gcm::{Encryptor, Decryptor};
use ethkey::crypto::ecies; use ethkey::crypto::ecies;
use ethereum_types::H256; use ethereum_types::H256;
use ethkey::{self, Public, Secret}; use ethkey::{self, Public, Secret};
use memzero::Memzero; use parity_util_mem::Memzero;
/// Length of AES key /// Length of AES key
pub const AES_KEY_LEN: usize = 32; pub const AES_KEY_LEN: usize = 32;

View File

@ -23,7 +23,7 @@ use std::collections::HashMap;
use ethereum_types::H256; use ethereum_types::H256;
use ethkey::{KeyPair, Public, Secret}; use ethkey::{KeyPair, Public, Secret};
use memzero::Memzero; use parity_util_mem::Memzero;
use rand::{Rng, rngs::OsRng}; use rand::{Rng, rngs::OsRng};
use rpc::crypto::{AES_KEY_LEN, EncryptionInstance, DecryptionInstance}; use rpc::crypto::{AES_KEY_LEN, EncryptionInstance, DecryptionInstance};

View File

@ -28,7 +28,7 @@ use jsonrpc_derive::rpc;
use jsonrpc_pubsub::{Session, PubSubMetadata, SubscriptionId, typed::Subscriber}; use jsonrpc_pubsub::{Session, PubSubMetadata, SubscriptionId, typed::Subscriber};
use ethereum_types::H256; use ethereum_types::H256;
use memzero::Memzero; use parity_util_mem::Memzero;
use parking_lot::RwLock; use parking_lot::RwLock;
use self::filter::Filter; use self::filter::Filter;