Remove calls to heapsize (#10432)

* update memorydb trait
* use malloc_size_of instead of heapsize_of
* use jemalloc as default allocator for parity client.
This commit is contained in:
cheme 2019-06-19 13:54:05 +02:00 committed by GitHub
parent 859a41308c
commit 6fc5014b4d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 926 additions and 1074 deletions

210
Cargo.lock generated
View File

@ -121,7 +121,7 @@ version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace-sys 0.1.24 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-demangle 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -312,7 +312,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "cfg-if"
version = "0.1.5"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
@ -357,6 +357,14 @@ dependencies = [
"vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "clear_on_drop"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "cli-signer"
version = "1.4.0"
@ -403,9 +411,9 @@ dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethjson 0.1.0",
"ethkey 0.3.0",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp_derive 0.1.0",
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -520,7 +528,7 @@ version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -534,7 +542,7 @@ version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -547,7 +555,7 @@ version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -559,7 +567,7 @@ name = "crossbeam-utils"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -572,7 +580,7 @@ name = "crossbeam-utils"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -736,10 +744,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "elastic-array"
version = "0.10.0"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -777,7 +785,7 @@ source = "git+https://github.com/paritytech/rust-secp256k1#9791e79f21a5309dcb6e0
dependencies = [
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -873,8 +881,7 @@ dependencies = [
"evm 0.1.0",
"fetch 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
"journaldb 0.2.0",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -888,13 +895,14 @@ dependencies = [
"lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"macros 0.1.0",
"memory-cache 0.1.0",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-runtime 0.1.0",
"parity-snappy 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -910,8 +918,8 @@ dependencies = [
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"time-utils 0.1.0",
"trace-time 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-standardmap 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-standardmap 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"triehash-ethereum 0.2.0",
"unexpected 0.1.0",
"using_queue 0.1.0",
@ -945,13 +953,13 @@ dependencies = [
"ethcore-db 0.1.0",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethkey 0.3.0",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -983,8 +991,8 @@ version = "0.1.0"
dependencies = [
"common-types 0.1.0",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp_derive 0.1.0",
@ -1025,8 +1033,7 @@ dependencies = [
"failsafe 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fastmap 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
"journaldb 0.2.0",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1035,8 +1042,9 @@ dependencies = [
"kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-cache 0.1.0",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1047,7 +1055,7 @@ dependencies = [
"smallvec 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"stats 0.1.0",
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"triehash-ethereum 0.2.0",
"vm 0.1.0",
]
@ -1084,12 +1092,12 @@ dependencies = [
"ethkey 0.3.0",
"fetch 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"hyper 0.12.19 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"linked-hash-map 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-runtime 0.1.0",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"price-info 1.12.0",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1176,11 +1184,11 @@ dependencies = [
"ethkey 0.3.0",
"fetch 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0",
"rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1193,7 +1201,7 @@ dependencies = [
"time-utils 0.1.0",
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"transaction-pool 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1290,8 +1298,7 @@ dependencies = [
"ethstore 0.2.1",
"fastmap 0.1.0",
"futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1300,6 +1307,7 @@ dependencies = [
"macros 0.1.0",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-runtime 0.1.0",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1343,8 +1351,8 @@ dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memzero 0.1.0",
"parity-crypto 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-wordlist 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1418,12 +1426,12 @@ dependencies = [
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"criterion 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-cache 0.1.0",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
"vm 0.1.0",
@ -1531,7 +1539,7 @@ version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1680,28 +1688,27 @@ dependencies = [
[[package]]
name = "hash-db"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "hash-db"
version = "0.12.2"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "hash256-std-hasher"
version = "0.12.2"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crunchy 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "hashmap_core"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "heapsize"
version = "0.4.2"
source = "git+https://github.com/cheme/heapsize.git?branch=ec-macfix#421df390a930cb523a09e5528e6fe57b534b3b26"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"jemallocator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -1985,15 +1992,15 @@ dependencies = [
"env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"fastmap 0.1.0",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2141,18 +2148,18 @@ name = "keccak-hasher"
version = "0.1.1"
dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"plain_hasher 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "keccak-hasher"
version = "0.12.2"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"hash-db 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hash256-std-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"hash256-std-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2170,7 +2177,7 @@ name = "kvdb"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2188,7 +2195,7 @@ name = "kvdb-rocksdb"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"fs-swap 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
"interleaved-ordered 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2272,7 +2279,7 @@ name = "log"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -2295,6 +2302,16 @@ dependencies = [
name = "macros"
version = "0.1.0"
[[package]]
name = "malloc_size_of_derive"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"syn 0.15.26 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "matches"
version = "0.1.8"
@ -2305,7 +2322,7 @@ name = "memchr"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2328,17 +2345,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
name = "memory-cache"
version = "0.1.0"
dependencies = [
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "memory-db"
version = "0.11.0"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"hashmap_core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -2346,10 +2364,6 @@ name = "memory_units"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "memzero"
version = "0.1.0"
[[package]]
name = "memzero"
version = "0.1.0"
@ -2483,7 +2497,7 @@ name = "net2"
version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -2739,6 +2753,7 @@ dependencies = [
"parity-rpc 1.12.0",
"parity-runtime 0.1.0",
"parity-updater 1.12.0",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-version 2.6.0",
"parity-whisper 0.1.0",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2989,6 +3004,21 @@ dependencies = [
"tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parity-util-mem"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"clear_on_drop 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"jemallocator 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"malloc_size_of_derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "parity-version"
version = "2.6.0"
@ -3023,8 +3053,8 @@ dependencies = [
"jsonrpc-derive 10.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"jsonrpc-pubsub 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"memzero 0.1.0",
"ordered-float 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -3115,16 +3145,16 @@ dependencies = [
name = "patricia-trie-ethereum"
version = "0.1.0"
dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"journaldb 0.2.0",
"keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1",
"memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -3627,7 +3657,7 @@ dependencies = [
name = "rlp_compress"
version = "0.1.0"
dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -3893,7 +3923,7 @@ name = "socket2"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.48 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -4380,30 +4410,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "trie-db"
version = "0.11.0"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"hashmap_core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "trie-standardmap"
version = "0.12.3"
version = "0.12.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"hash-db 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "triehash"
version = "0.5.0"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -4413,7 +4444,7 @@ version = "0.2.0"
dependencies = [
"ethereum-types 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"keccak-hasher 0.1.1",
"triehash 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"triehash 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -4443,7 +4474,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"byteorder 1.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"crunchy 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)",
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hex 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -4591,7 +4622,7 @@ dependencies = [
"parity-bytes 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"patricia-trie-ethereum 0.1.0",
"rlp 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)",
"trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@ -4809,10 +4840,11 @@ dependencies = [
"checksum cast 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "926013f2860c46252efceabb19f4a6b308197505082c609025aa6706c011d427"
"checksum cc 1.0.28 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4a8b715cb4597106ea87c7c84b2f1d452c7492033765df7f32651e66fcf749"
"checksum cesu8 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c"
"checksum cfg-if 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0c4e7bb64a8ebb0d856483e1e682ea3422f883c5f5615a90d51a2c82fe87fdd3"
"checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33"
"checksum chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "45912881121cb26fad7c38c17ba7daa18764771836b34fab7d3fbd93ed633878"
"checksum cid 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0e37fba0087d9f3f4e269827a55dc511abf3e440cc097a0c154ff4e6584f988"
"checksum clap 2.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b957d88f4b6a63b9d70d5f454ac8011819c6efa7727858f458ab71c756ce2d3e"
"checksum clear_on_drop 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "97276801e127ffb46b66ce23f35cc96bd454fa311294bced4bbace7baa8b1d17"
"checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
"checksum cmake 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "6ec65ee4f9c9d16f335091d23693457ed4928657ba4982289d7fafee03bc614a"
"checksum combine 3.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fc1d011beeed29187b8db2ac3925c8dd4d3e87db463dc9d2d2833985388fc5bc"
@ -4847,7 +4879,7 @@ dependencies = [
"checksum docopt 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "db2906c2579b5b7207fc1e328796a9a8835dc44e22dbe8e460b1d636f9a7b225"
"checksum edit-distance 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3bd26878c3d921f89797a4e1a1711919f999a9f6946bb6f5a4ffda126d297b7e"
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
"checksum elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "88d4851b005ef16de812ea9acdb7bece2f0a40dd86c07b85631d7dafa54537bb"
"checksum elastic-array 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "073be79b6538296faf81c631872676600616073817dd9a440c477ad09b408983"
"checksum enum_primitive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "be4551092f4d519593039259a9ed8daedf0da12e5109c5280338073eaeb81180"
"checksum env_logger 0.5.13 (registry+https://github.com/rust-lang/crates.io-index)" = "15b0a4d2e39f8420210be8b27eeda28029729e2fd4291019455016c348240c38"
"checksum error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07e791d3be96241c77c43846b665ef1384606da2cd2a48730abe606a12906e02"
@ -4880,10 +4912,10 @@ dependencies = [
"checksum h2 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "a27e7ed946e8335bdf9a191bc1b9b14a03ba822d013d2f58437f4fabcbd7fc2c"
"checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1"
"checksum handlebars 0.32.4 (registry+https://github.com/rust-lang/crates.io-index)" = "d89ec99d1594f285d4590fc32bac5f75cdab383f1123d504d27862c644a807dd"
"checksum hash-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1b03501f6e1a2a97f1618879aba3156f14ca2847faa530c4e28859638bd11483"
"checksum hash-db 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)" = "ba7fb417e5c470acdd61068c79767d0e65962e70836cf6c9dfd2409f06345ce0"
"checksum hash256-std-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f8b2027c19ec91eb304999abae7307d225cf93be42af53b0039f76e98ed5af86"
"checksum heapsize 0.4.2 (git+https://github.com/cheme/heapsize.git?branch=ec-macfix)" = "<none>"
"checksum hash-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "0b3c95a428c86ed4633d83e07ef9e0a147a906da01e931f07e74a85bedce5a43"
"checksum hash256-std-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "663ce20dae36902c16d12c6aaae400ca40d922407a8cf2b4caf8cae9b39b4f03"
"checksum hashmap_core 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "8e04cb7a5051270ef3fa79f8c7604d581ecfa73d520e74f554e45541c4b5881a"
"checksum heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1679e6ea370dee694f91f1dc469bf94cf8f52051d147aec3e1f9497c6fc22461"
"checksum heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea04fa3ead4e05e51a7c806fc07271fdbde4e246a6c6d1efd52e72230b771b82"
"checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa"
"checksum hmac 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f127a908633569f208325f86f71255d3363c79721d7f9fe31cd5569908819771"
@ -4924,7 +4956,7 @@ dependencies = [
"checksum jsonrpc-tcp-server 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c873dac37a601fb88d40ba49eeac3f1aa60953c06b2e99ddbf0569b6f8028478"
"checksum jsonrpc-ws-server 10.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20b8333a5a6e6ccbcf5c90f90919de557cba4929efa164e9bd0e8e497eb20e46"
"checksum keccak-hash 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "69e8ee697b9aa6dcc34d7657565fa5052763a1627a5b59e4c3c0ae3ed0d70a65"
"checksum keccak-hasher 0.12.2 (registry+https://github.com/rust-lang/crates.io-index)" = "af672553b2abac1c86c29fd62c79880638b6abc91d96db4aa42a5baab2bc1ca9"
"checksum keccak-hasher 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "6c936c737d79690593c34275faf583151a0e8c0abf34eaecad10399eed0beb7d"
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
"checksum kvdb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "72ae89206cea31c32014b39d5a454b96135894221610dbfd19cf4d2d044fa546"
"checksum kvdb-memorydb 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "45bcdf5eb083602cff61a6f8438dce2a7900d714e893fc48781c39fb119d37aa"
@ -4941,11 +4973,12 @@ dependencies = [
"checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
"checksum lru-cache 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
"checksum lunarity-lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8a1670671f305792567116d4660e6e5bd785d6fa973e817c3445c0a7a54cecb6"
"checksum malloc_size_of_derive 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "35adee9ed962cf7d07d62cb58bc45029f3227f5b5b86246caa8632f06c187bc3"
"checksum matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "7ffc5c5338469d4d3ea17d269fa8ea3512ad247247c30bd2df69e68309ed0a08"
"checksum memchr 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4b3629fe9fdbff6daa6c33b90f7c08355c1aca05a3d01fa8063b822fcf185f3b"
"checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff"
"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"
"checksum memory-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "94da53143d45f6bad3753f532e56ad57a6a26c0ca6881794583310c7cb4c885f"
"checksum memory-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1eeeeab44c01c7da4409e68ec5b5db74c92305386efab3615e495b1dacaec196"
"checksum memory_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d96e3f3c0b6325d8ccd83c33b28acb183edcb6c67938ba104ec546854b0882"
"checksum memzero 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "93c0d11ac30a033511ae414355d80f70d9f29a44a49140face477117a1ee90db"
"checksum mime 0.3.12 (registry+https://github.com/rust-lang/crates.io-index)" = "0a907b83e7b9e987032439a387e187119cddafc92d5c2aaeb1d92580a793f630"
@ -4985,6 +5018,7 @@ dependencies = [
"checksum parity-snappy 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2c5f9d149b13134b8b354d93a92830efcbee6fe5b73a2e6e540fe70d4dd8a63"
"checksum parity-snappy-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1a413d51e5e1927320c9de992998e4a279dffb8c8a7363570198bd8383e66f1b"
"checksum parity-tokio-ipc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "eb002c2d3539ccd3b82bd915ec060028d4ab350ad203dbffa20028c1e483af5b"
"checksum parity-util-mem 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "89e80f22052161e0cb55cb5a8a75890420c525031f95c9d262dbb0434aa85dc1"
"checksum parity-wasm 0.31.3 (registry+https://github.com/rust-lang/crates.io-index)" = "511379a8194230c2395d2f5fa627a5a7e108a9f976656ce723ae68fca4097bfc"
"checksum parity-wordlist 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cf13102febd98f4ad416a526b42deb82daf482626ba6ab10d0ebf8f45327514c"
"checksum parity-ws 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2fec5048fba72a2e01baeb0d08089db79aead4b57e2443df172fb1840075a233"
@ -5128,9 +5162,9 @@ dependencies = [
"checksum trace-time 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dbe82f2f0bf1991e163e757baf044282823155dd326e70f44ce2186c3c320cc9"
"checksum transaction-pool 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f8d8bd3123931aa6e49dd03bc8a2400490e14701d779458d1f1fff1f04c6f666"
"checksum transient-hashmap 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aeb4b191d033a35edfce392a38cdcf9790b6cebcb30fa690c312c29da4dc433e"
"checksum trie-db 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3c7319e28ca295f27359d944a682f7f65b419158bf1590c92cadc0000258d788"
"checksum trie-standardmap 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ebaa4b340046196efad8872b2dffe585b5ea330230dc44ee14e399f77da29f51"
"checksum triehash 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "92148b4d8d55eff71bc8c9e3c5f714e266c2a05e724dce5405a10deabbf449a8"
"checksum trie-db 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "ae063390324bfcf36c7e8e4fb1f85f6f0fb5dd04e1cd282581eb7b8b34b32de7"
"checksum trie-standardmap 0.12.4 (registry+https://github.com/rust-lang/crates.io-index)" = "40787fb1a63a97ed56d12bc303937ea274e09d1afa2e20e4f074eff2074b24d3"
"checksum triehash 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b645ad3fc9871596897fb64a57c9c29adc9f5ece87c2d78766e3fc5a5da56b56"
"checksum try-lock 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee2aa4715743892880f70885373966c83d73ef1b0838a664ef0c76fffd35e7c2"
"checksum try-lock 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e604eb7b43c06650e854be16a2a03155743d3752dd1c943f6829e26b7a36e382"
"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"

View File

@ -73,6 +73,8 @@ ethcore-secretstore = { path = "secret-store", optional = true }
registrar = { path = "util/registrar" }
parity-util-mem = { version = "0.1", features = ["jemalloc-global"] }
[build-dependencies]
rustc_version = "0.2"
@ -139,6 +141,3 @@ members = [
"util/fastmap",
"util/time-utils"
]
[patch.crates-io]
heapsize = { git = "https://github.com/cheme/heapsize.git", branch = "ec-macfix" }

View File

@ -11,7 +11,7 @@ eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" }
ethereum-types = "0.6.0"
lazy_static = "1.0"
log = "0.4"
memzero = { path = "../../util/memzero" }
parity-util-mem = "0.1"
parity-wordlist = "1.2"
quick-error = "1.2.2"
rand = "0.6"

View File

@ -20,7 +20,7 @@ extern crate byteorder;
extern crate edit_distance;
extern crate parity_crypto;
extern crate ethereum_types;
extern crate memzero;
extern crate parity_util_mem;
extern crate parity_wordlist;
#[macro_use]
extern crate quick_error;

View File

@ -21,7 +21,7 @@ use rustc_hex::ToHex;
use secp256k1::constants::{SECRET_KEY_SIZE as SECP256K1_SECRET_KEY_SIZE};
use secp256k1::key;
use ethereum_types::H256;
use memzero::Memzero;
use parity_util_mem::Memzero;
use {Error, SECP256K1};
#[derive(Clone, PartialEq, Eq)]

View File

@ -31,8 +31,8 @@ ethjson = { path = "../json" }
ethkey = { path = "../accounts/ethkey" }
evm = { path = "evm" }
futures = "0.1"
hash-db = "0.11.0"
heapsize = "0.4"
hash-db = "0.12.4"
parity-util-mem = "0.1"
itertools = "0.5"
journaldb = { path = "../util/journaldb" }
keccak-hash = "0.2.0"
@ -46,14 +46,14 @@ log = "0.4"
lru-cache = "0.1"
macros = { path = "../util/macros" }
memory-cache = { path = "../util/memory-cache" }
memory-db = "0.11.0"
memory-db = "0.12.4"
num = { version = "0.1", default-features = false, features = ["bigint"] }
num_cpus = "1.2"
parity-bytes = "0.1"
parity-crypto = "0.4.0"
parity-snappy = "0.1"
parking_lot = "0.7"
trie-db = "0.11.0"
trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../util/patricia-trie-ethereum" }
rand = "0.6"
rayon = "1.0"
@ -83,7 +83,7 @@ kvdb-rocksdb = "0.1.3"
parity-runtime = { path = "../util/runtime" }
rlp_compress = { path = "../util/rlp-compress" }
tempdir = "0.3"
trie-standardmap = "0.12.3"
trie-standardmap = "0.12.4"
[features]
parity = ["work-notify", "price-info", "stratum"]

View File

@ -13,7 +13,7 @@ blooms-db = { path = "../../util/blooms-db" }
common-types = { path = "../types" }
ethcore-db = { path = "../db" }
ethereum-types = "0.6.0"
heapsize = "0.4"
parity-util-mem = "0.1"
itertools = "0.5"
kvdb = "0.1"
log = "0.4"

View File

@ -39,7 +39,7 @@ use ethcore_db::cache_manager::CacheManager;
use ethcore_db::keys::{BlockReceipts, BlockDetails, TransactionAddress, EPOCH_KEY_PREFIX, EpochTransitions};
use ethcore_db::{self as db, Writable, Readable, CacheUpdatePolicy};
use ethereum_types::{H256, Bloom, BloomRef, U256};
use heapsize::HeapSizeOf;
use util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use itertools::Itertools;
use kvdb::{DBTransaction, KeyValueDB};
use log::{trace, warn, info};
@ -1489,11 +1489,12 @@ impl BlockChain {
/// Get current cache size.
pub fn cache_size(&self) -> CacheSize {
let mut ops = new_malloc_size_ops();
CacheSize {
blocks: self.block_headers.read().heap_size_of_children() + self.block_bodies.read().heap_size_of_children(),
block_details: self.block_details.read().heap_size_of_children(),
transaction_addresses: self.transaction_addresses.read().heap_size_of_children(),
block_receipts: self.block_receipts.read().heap_size_of_children(),
blocks: self.block_headers.size_of(&mut ops) + self.block_bodies.size_of(&mut ops),
block_details: self.block_details.size_of(&mut ops),
transaction_addresses: self.transaction_addresses.size_of(&mut ops),
block_receipts: self.block_receipts.size_of(&mut ops),
}
}
@ -1528,12 +1529,13 @@ impl BlockChain {
transaction_addresses.shrink_to_fit();
block_receipts.shrink_to_fit();
block_headers.heap_size_of_children() +
block_bodies.heap_size_of_children() +
block_details.heap_size_of_children() +
block_hashes.heap_size_of_children() +
transaction_addresses.heap_size_of_children() +
block_receipts.heap_size_of_children()
let mut ops = new_malloc_size_ops();
block_headers.size_of(&mut ops) +
block_bodies.size_of(&mut ops) +
block_details.size_of(&mut ops) +
block_hashes.size_of(&mut ops) +
transaction_addresses.size_of(&mut ops) +
block_receipts.size_of(&mut ops)
});
}

View File

@ -18,6 +18,9 @@
#![warn(missing_docs)]
extern crate parity_util_mem as util_mem;
extern crate parity_util_mem as malloc_size_of;
mod best_block;
mod block_info;
mod blockchain;

View File

@ -10,8 +10,8 @@ edition = "2018"
[dependencies]
common-types = { path = "../types" }
ethereum-types = "0.6.0"
heapsize = "0.4"
kvdb = "0.1"
parity-util-mem = "0.1"
parking_lot = "0.7"
rlp = "0.4.0"
rlp_derive = { path = "../../util/rlp-derive" }

View File

@ -23,7 +23,7 @@ use common_types::BlockNumber;
use common_types::engines::epoch::Transition as EpochTransition;
use common_types::receipt::Receipt;
use ethereum_types::{H256, H264, U256};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use kvdb::PREFIX_LEN as DB_PREFIX_LEN;
use rlp;
use rlp_derive::{RlpEncodableWrapper, RlpDecodableWrapper, RlpEncodable, RlpDecodable};
@ -140,7 +140,7 @@ impl Key<EpochTransitions> for u64 {
}
/// Familial details concerning a block
#[derive(Debug, Clone)]
#[derive(Debug, Clone, MallocSizeOf)]
pub struct BlockDetails {
/// Block number
pub number: BlockNumber,
@ -195,14 +195,8 @@ impl rlp::Decodable for BlockDetails {
}
}
impl HeapSizeOf for BlockDetails {
fn heap_size_of_children(&self) -> usize {
self.children.heap_size_of_children()
}
}
/// Represents address of certain transaction within block
#[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable)]
#[derive(Debug, PartialEq, Clone, RlpEncodable, RlpDecodable, MallocSizeOf)]
pub struct TransactionAddress {
/// Block hash
pub block_hash: H256,
@ -210,12 +204,8 @@ pub struct TransactionAddress {
pub index: usize
}
impl HeapSizeOf for TransactionAddress {
fn heap_size_of_children(&self) -> usize { 0 }
}
/// Contains all block receipts.
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
#[derive(Clone, RlpEncodableWrapper, RlpDecodableWrapper, MallocSizeOf)]
pub struct BlockReceipts {
/// Block receipts
pub receipts: Vec<Receipt>,
@ -230,12 +220,6 @@ impl BlockReceipts {
}
}
impl HeapSizeOf for BlockReceipts {
fn heap_size_of_children(&self) -> usize {
self.receipts.heap_size_of_children()
}
}
/// Candidate transitions to an epoch with specific number.
#[derive(Clone, RlpEncodable, RlpDecodable)]
pub struct EpochTransitions {

View File

@ -18,6 +18,9 @@
#![warn(missing_docs)]
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
mod db;
pub mod keys;

View File

@ -7,7 +7,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
bit-set = "0.4"
parity-bytes = "0.1"
ethereum-types = "0.6.0"
heapsize = "0.4"
parity-util-mem = "0.1"
lazy_static = "1.0"
log = "0.4"
vm = { path = "../vm" }

View File

@ -21,7 +21,7 @@ extern crate criterion;
extern crate bit_set;
extern crate ethereum_types;
extern crate parking_lot;
extern crate heapsize;
extern crate parity_util_mem as mem;
extern crate vm;
extern crate evm;
extern crate keccak_hash as hash;

View File

@ -16,7 +16,7 @@
use std::sync::Arc;
use hash::KECCAK_EMPTY;
use heapsize::HeapSizeOf;
use parity_util_mem::{MallocSizeOf, MallocSizeOfOps};
use ethereum_types::H256;
use parking_lot::Mutex;
use memory_cache::MemoryLruCache;
@ -25,11 +25,12 @@ use super::super::instructions::{self, Instruction};
const DEFAULT_CACHE_SIZE: usize = 4 * 1024 * 1024;
// stub for a HeapSizeOf implementation.
/// Stub for a sharing `BitSet` data in cache (reference counted)
/// and implementing MallocSizeOf on it.
struct Bits(Arc<BitSet>);
impl HeapSizeOf for Bits {
fn heap_size_of_children(&self) -> usize {
impl MallocSizeOf for Bits {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
// dealing in bits here
self.0.capacity() * 8
}

View File

@ -19,7 +19,7 @@
extern crate bit_set;
extern crate ethereum_types;
extern crate parking_lot;
extern crate heapsize;
extern crate parity_util_mem;
extern crate vm;
extern crate keccak_hash as hash;
extern crate memory_cache;

View File

@ -15,14 +15,14 @@ ethcore = { path = ".."}
ethcore-db = { path = "../db" }
ethcore-blockchain = { path = "../blockchain" }
ethereum-types = "0.6.0"
memory-db = "0.11.0"
trie-db = "0.11.0"
memory-db = "0.12.4"
trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
ethcore-network = { path = "../../util/network" }
ethcore-miner = { path = "../../miner" }
ethcore-io = { path = "../../util/io" }
hash-db = "0.11.0"
heapsize = "0.4"
hash-db = "0.12.4"
parity-util-mem = "0.1"
vm = { path = "../vm" }
fastmap = { path = "../../util/fastmap" }
failsafe = { version = "0.3.0", default-features = false, features = ["parking_lot_mutex"] }

View File

@ -21,12 +21,12 @@
//! vector of all gas prices from a recent range of blocks.
use std::time::{Instant, Duration};
use parity_util_mem::{MallocSizeOf, MallocSizeOfOps, MallocSizeOfExt};
use common_types::encoded;
use common_types::BlockNumber;
use common_types::receipt::Receipt;
use ethereum_types::{H256, U256};
use heapsize::HeapSizeOf;
use memory_cache::MemoryLruCache;
use stats::Corpus;
@ -157,18 +157,20 @@ impl Cache {
/// Get the memory used.
pub fn mem_used(&self) -> usize {
self.heap_size_of_children()
self.malloc_size_of()
}
}
impl HeapSizeOf for Cache {
fn heap_size_of_children(&self) -> usize {
// This is fast method: it is possible to have a more exhaustive implementation
impl MallocSizeOf for Cache {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
self.headers.current_size()
+ self.canon_hashes.current_size()
+ self.bodies.current_size()
+ self.receipts.current_size()
+ self.chain_score.current_size()
// TODO: + corpus
// `self.corpus` is skipped
}
}

View File

@ -95,7 +95,8 @@ pub struct BlockInfo {
/// Build an in-memory CHT from a closure which provides necessary information
/// about blocks. If the fetcher ever fails to provide the info, the CHT
/// will not be generated.
pub fn build<F>(cht_num: u64, mut fetcher: F) -> Option<CHT<MemoryDB<KeccakHasher, DBValue>>>
pub fn build<F>(cht_num: u64, mut fetcher: F)
-> Option<CHT<MemoryDB<KeccakHasher, memory_db::HashKey<KeccakHasher>, DBValue>>>
where F: FnMut(BlockId) -> Option<BlockInfo>
{
let mut db = new_memory_db();
@ -154,7 +155,7 @@ pub fn compute_root<I>(cht_num: u64, iterable: I) -> Option<H256>
pub fn check_proof(proof: &[Bytes], num: u64, root: H256) -> Option<(H256, U256)> {
let mut db = new_memory_db();
for node in proof { db.insert(&node[..]); }
for node in proof { db.insert(hash_db::EMPTY_PREFIX, &node[..]); }
let res = match TrieDB::new(&db, &root) {
Err(_) => return None,
Ok(trie) => trie.get_with(&key!(num), |val: &[u8]| {

View File

@ -38,7 +38,7 @@ use ethcore::engines::epoch::{Transition as EpochTransition, PendingTransition a
use ethcore::error::{Error, EthcoreResult, BlockError};
use ethcore::spec::{Spec, SpecHardcodedSync};
use ethereum_types::{H256, H264, U256};
use heapsize::HeapSizeOf;
use parity_util_mem::{MallocSizeOf, MallocSizeOfOps};
use kvdb::{DBTransaction, KeyValueDB};
use parking_lot::{Mutex, RwLock};
use fastmap::H256FastMap;
@ -95,8 +95,8 @@ struct Entry {
canonical_hash: H256,
}
impl HeapSizeOf for Entry {
fn heap_size_of_children(&self) -> usize {
impl MallocSizeOf for Entry {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
if self.candidates.spilled() {
self.candidates.capacity() * ::std::mem::size_of::<Candidate>()
} else {
@ -202,14 +202,21 @@ pub enum HardcodedSync {
Deny,
}
#[derive(MallocSizeOf)]
/// Header chain. See module docs for more details.
pub struct HeaderChain {
#[ignore_malloc_size_of = "ignored for performance reason"]
genesis_header: encoded::Header, // special-case the genesis.
candidates: RwLock<BTreeMap<u64, Entry>>,
#[ignore_malloc_size_of = "ignored for performance reason"]
best_block: RwLock<BlockDescriptor>,
#[ignore_malloc_size_of = "ignored for performance reason"]
live_epoch_proofs: RwLock<H256FastMap<EpochTransition>>,
#[ignore_malloc_size_of = "ignored for performance reason"]
db: Arc<KeyValueDB>,
#[ignore_malloc_size_of = "ignored for performance reason"]
col: Option<u32>,
#[ignore_malloc_size_of = "ignored for performance reason"]
cache: Arc<Mutex<Cache>>,
}
@ -838,12 +845,6 @@ impl HeaderChain {
}
}
impl HeapSizeOf for HeaderChain {
fn heap_size_of_children(&self) -> usize {
self.candidates.read().heap_size_of_children()
}
}
/// Iterator over a block's ancestry.
pub struct AncestryIter<'a> {
next: Option<encoded::Header>,

View File

@ -362,9 +362,9 @@ impl<T: ChainDataFetcher> Client<T> {
/// Get blockchain mem usage in bytes.
pub fn chain_mem_used(&self) -> usize {
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOfExt;
self.chain.heap_size_of_children()
self.chain.malloc_size_of()
}
/// Set a closure to call when the client wants to be restarted.

View File

@ -64,7 +64,9 @@ extern crate ethereum_types;
extern crate ethcore_miner as miner;
extern crate ethcore;
extern crate hash_db;
extern crate heapsize;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
extern crate failsafe;
extern crate futures;
extern crate itertools;

View File

@ -981,7 +981,7 @@ impl Account {
let state_root = header.state_root();
let mut db = journaldb::new_memory_db();
for node in proof { db.insert(&node[..]); }
for node in proof { db.insert(hash_db::EMPTY_PREFIX, &node[..]); }
match TrieDB::new(&db, &state_root).and_then(|t| t.get(keccak(&self.address).as_bytes()))? {
Some(val) => {

View File

@ -20,13 +20,13 @@ ethjson = { path = "../../json" }
ethkey = { path = "../../accounts/ethkey" }
fetch = { path = "../../util/fetch" }
futures = "0.1"
heapsize = "0.4"
parity-util-mem = "0.1"
keccak-hash = "0.2.0"
log = "0.4"
parity-bytes = "0.1"
parity-crypto = "0.4.0"
parking_lot = "0.7"
trie-db = "0.11.0"
trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
rand = "0.3"
rlp = "0.4.0"

View File

@ -34,7 +34,7 @@ extern crate ethjson;
extern crate ethkey;
extern crate fetch;
extern crate futures;
extern crate heapsize;
extern crate parity_util_mem;
extern crate keccak_hash as hash;
extern crate parity_bytes as bytes;
extern crate parity_crypto as crypto;

View File

@ -21,7 +21,7 @@ use std::collections::{HashMap, HashSet};
use bytes::Bytes;
use ethcore_miner::pool;
use ethereum_types::{H256, U256, Address};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOfExt;
use ethkey::Signature;
use messages::PrivateTransaction;
use parking_lot::RwLock;
@ -59,7 +59,7 @@ impl txpool::VerifiedTransaction for VerifiedPrivateTransaction {
}
fn mem_usage(&self) -> usize {
self.transaction.heap_size_of_children()
self.transaction.malloc_size_of()
}
fn sender(&self) -> &Address {

View File

@ -17,7 +17,7 @@
//! DB backend wrapper for Account trie
use ethereum_types::H256;
use hash::{KECCAK_NULL_RLP, keccak};
use hash_db::{HashDB, AsHashDB};
use hash_db::{HashDB, AsHashDB, Prefix};
use keccak_hasher::KeccakHasher;
use kvdb::DBValue;
use rlp::NULL_RLP;
@ -103,29 +103,29 @@ impl<'db> AsHashDB<KeccakHasher, DBValue> for AccountDB<'db> {
}
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDB<'db> {
fn get(&self, key: &H256) -> Option<DBValue> {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP));
}
self.db.get(&combine_key(&self.address_hash, key))
self.db.get(&combine_key(&self.address_hash, key), prefix)
}
fn contains(&self, key: &H256) -> bool {
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
self.db.contains(&combine_key(&self.address_hash, key), prefix)
}
fn insert(&mut self, _value: &[u8]) -> H256 {
fn insert(&mut self, _prefix: Prefix, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
fn emplace(&mut self, _key: H256, _prefix: Prefix, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
fn remove(&mut self, _key: &H256, _prefix: Prefix) {
unimplemented!()
}
}
@ -158,44 +158,44 @@ impl<'db> AccountDBMut<'db> {
}
impl<'db> HashDB<KeccakHasher, DBValue> for AccountDBMut<'db>{
fn get(&self, key: &H256) -> Option<DBValue> {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP));
}
self.db.get(&combine_key(&self.address_hash, key))
self.db.get(&combine_key(&self.address_hash, key), prefix)
}
fn contains(&self, key: &H256) -> bool {
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP {
return true;
}
self.db.contains(&combine_key(&self.address_hash, key))
self.db.contains(&combine_key(&self.address_hash, key), prefix)
}
fn insert(&mut self, value: &[u8]) -> H256 {
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return KECCAK_NULL_RLP.clone();
}
let k = keccak(value);
let ak = combine_key(&self.address_hash, &k);
self.db.emplace(ak, DBValue::from_slice(value));
self.db.emplace(ak, prefix, DBValue::from_slice(value));
k
}
fn emplace(&mut self, key: H256, value: DBValue) {
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
if key == KECCAK_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, &key);
self.db.emplace(key, value)
self.db.emplace(key, prefix, value)
}
fn remove(&mut self, key: &H256) {
fn remove(&mut self, key: &H256, prefix: Prefix) {
if key == &KECCAK_NULL_RLP {
return;
}
let key = combine_key(&self.address_hash, key);
self.db.remove(&key)
self.db.remove(&key, prefix)
}
}
@ -212,29 +212,29 @@ impl<'db> AsHashDB<KeccakHasher, DBValue> for Wrapping<'db> {
}
impl<'db> HashDB<KeccakHasher, DBValue> for Wrapping<'db> {
fn get(&self, key: &H256) -> Option<DBValue> {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP));
}
self.0.get(key)
self.0.get(key, prefix)
}
fn contains(&self, key: &H256) -> bool {
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP {
return true;
}
self.0.contains(key)
self.0.contains(key, prefix)
}
fn insert(&mut self, _value: &[u8]) -> H256 {
fn insert(&mut self, _prefix: Prefix, _value: &[u8]) -> H256 {
unimplemented!()
}
fn emplace(&mut self, _key: H256, _value: DBValue) {
fn emplace(&mut self, _key: H256, _prefix: Prefix, _value: DBValue) {
unimplemented!()
}
fn remove(&mut self, _key: &H256) {
fn remove(&mut self, _key: &H256, _prefix: Prefix) {
unimplemented!()
}
}
@ -246,38 +246,38 @@ impl<'db> AsHashDB<KeccakHasher, DBValue> for WrappingMut<'db> {
}
impl<'db> HashDB<KeccakHasher, DBValue> for WrappingMut<'db>{
fn get(&self, key: &H256) -> Option<DBValue> {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if key == &KECCAK_NULL_RLP {
return Some(DBValue::from_slice(&NULL_RLP));
}
self.0.get(key)
self.0.get(key, prefix)
}
fn contains(&self, key: &H256) -> bool {
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
if key == &KECCAK_NULL_RLP {
return true;
}
self.0.contains(key)
self.0.contains(key, prefix)
}
fn insert(&mut self, value: &[u8]) -> H256 {
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
if value == &NULL_RLP {
return KECCAK_NULL_RLP.clone();
}
self.0.insert(value)
self.0.insert(prefix, value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
if key == KECCAK_NULL_RLP {
return;
}
self.0.emplace(key, value)
self.0.emplace(key, prefix, value)
}
fn remove(&mut self, key: &H256) {
fn remove(&mut self, key: &H256, prefix: Prefix) {
if key == &KECCAK_NULL_RLP {
return;
}
self.0.remove(key)
self.0.remove(key, prefix)
}
}

View File

@ -566,6 +566,7 @@ mod tests {
use types::header::Header;
use types::view;
use types::views::BlockView;
use hash_db::EMPTY_PREFIX;
/// Enact the block given by `block_bytes` using `engine` on the database `db` with given `parent` block header
fn enact_bytes(
@ -668,7 +669,8 @@ mod tests {
let db = e.drain().state.drop().1;
assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys());
assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0) != db.journal_db().get(k.0)).next() == None);
assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0, EMPTY_PREFIX)
!= db.journal_db().get(k.0, EMPTY_PREFIX)).next() == None);
}
#[test]
@ -702,6 +704,7 @@ mod tests {
let db = e.drain().state.drop().1;
assert_eq!(orig_db.journal_db().keys(), db.journal_db().keys());
assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0) != db.journal_db().get(k.0)).next() == None);
assert!(orig_db.journal_db().keys().iter().filter(|k| orig_db.journal_db().get(k.0, EMPTY_PREFIX)
!= db.journal_db().get(k.0, EMPTY_PREFIX)).next() == None);
}
}

View File

@ -43,7 +43,7 @@ use types::log_entry::LocalizedLogEntry;
use types::receipt::{Receipt, LocalizedReceipt};
use types::{BlockNumber, header::{Header, ExtendedHeader}};
use vm::{EnvInfo, LastHashes};
use hash_db::EMPTY_PREFIX;
use block::{LockedBlock, Drain, ClosedBlock, OpenBlock, enact_verified, SealedBlock};
use client::ancient_import::AncientVerifier;
use client::{
@ -743,7 +743,7 @@ impl Client {
config.history
};
if !chain.block_header_data(&chain.best_block_hash()).map_or(true, |h| state_db.journal_db().contains(&h.state_root())) {
if !chain.block_header_data(&chain.best_block_hash()).map_or(true, |h| state_db.journal_db().contains(&h.state_root(), EMPTY_PREFIX)) {
warn!("State root not found for block #{} ({:x})", chain.best_block_number(), chain.best_block_hash());
}

View File

@ -16,7 +16,7 @@
/// Preconfigured validator list.
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, Address};
use machine::{AuxiliaryData, Call, EthereumMachine};
@ -25,7 +25,7 @@ use types::header::Header;
use super::ValidatorSet;
/// Validator set containing a known set of addresses.
#[derive(Clone, Debug, PartialEq, Eq, Default)]
#[derive(Clone, Debug, PartialEq, Eq, Default, MallocSizeOf)]
pub struct SimpleList {
validators: Vec<Address>,
}
@ -58,12 +58,6 @@ impl From<Vec<Address>> for SimpleList {
}
}
impl HeapSizeOf for SimpleList {
fn heap_size_of_children(&self) -> usize {
self.validators.heap_size_of_children()
}
}
impl ValidatorSet for SimpleList {
fn default_caller(&self, _block_id: ::types::ids::BlockId) -> Box<Call> {
Box::new(|_, _| Err("Simple list doesn't require calls.".into()))

View File

@ -19,10 +19,10 @@
use std::str::FromStr;
use std::sync::Arc;
use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
use parity_util_mem::MallocSizeOf;
use bytes::Bytes;
use ethereum_types::{H256, Address};
use heapsize::HeapSizeOf;
use types::BlockNumber;
use types::header::Header;
@ -30,9 +30,12 @@ use machine::{AuxiliaryData, Call, EthereumMachine};
use super::{ValidatorSet, SimpleList};
/// Set used for testing with a single validator.
#[derive(MallocSizeOf)]
pub struct TestSet {
validator: SimpleList,
#[ignore_malloc_size_of = "zero sized"]
last_malicious: Arc<AtomicUsize>,
#[ignore_malloc_size_of = "zero sized"]
last_benign: Arc<AtomicUsize>,
}
@ -52,12 +55,6 @@ impl TestSet {
}
}
impl HeapSizeOf for TestSet {
fn heap_size_of_children(&self) -> usize {
self.validator.heap_size_of_children()
}
}
impl ValidatorSet for TestSet {
fn default_caller(&self, _block_id: ::types::ids::BlockId) -> Box<Call> {
Box::new(|_, _| Err("Test set doesn't require calls.".into()))

View File

@ -72,7 +72,6 @@ extern crate ethjson;
extern crate ethkey;
extern crate futures;
extern crate hash_db;
extern crate heapsize;
extern crate itertools;
extern crate journaldb;
extern crate keccak_hash as hash;
@ -98,6 +97,9 @@ extern crate patricia_trie_ethereum as ethtrie;
extern crate rand;
extern crate rayon;
extern crate rlp;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
extern crate rustc_hex;
extern crate serde;
extern crate stats;

View File

@ -81,7 +81,7 @@ impl PodAccount {
/// Place additional data into given hash DB.
pub fn insert_additional(&self, db: &mut dyn HashDB<KeccakHasher, DBValue>, factory: &TrieFactory<KeccakHasher, RlpCodec>) {
match self.code {
Some(ref c) if !c.is_empty() => { db.insert(c); }
Some(ref c) if !c.is_empty() => { db.insert(hash_db::EMPTY_PREFIX, c); }
_ => {}
}
let mut r = H256::zero();

View File

@ -95,7 +95,7 @@ pub fn to_fat_rlps(
} else if used_code.contains(&acc.code_hash) {
account_stream.append(&CodeState::Hash.raw()).append(&acc.code_hash);
} else {
match acct_db.get(&acc.code_hash) {
match acct_db.get(&acc.code_hash, hash_db::EMPTY_PREFIX) {
Some(c) => {
used_code.insert(acc.code_hash.clone());
account_stream.append(&CodeState::Inline.raw()).append(&&*c);
@ -182,7 +182,7 @@ pub fn from_fat_rlp(
CodeState::Empty => (KECCAK_EMPTY, None),
CodeState::Inline => {
let code: Bytes = rlp.val_at(3)?;
let code_hash = acct_db.insert(&code);
let code_hash = acct_db.insert(hash_db::EMPTY_PREFIX, &code);
(code_hash, Some(code))
}
@ -228,7 +228,7 @@ mod tests {
use hash::{KECCAK_EMPTY, KECCAK_NULL_RLP, keccak};
use ethereum_types::{H256, Address};
use hash_db::HashDB;
use hash_db::{HashDB, EMPTY_PREFIX};
use kvdb::DBValue;
use rlp::Rlp;
@ -324,12 +324,12 @@ mod tests {
let code_hash = {
let mut acct_db = AccountDBMut::new(db.as_hash_db_mut(), &addr1);
acct_db.insert(b"this is definitely code")
acct_db.insert(EMPTY_PREFIX, b"this is definitely code")
};
{
let mut acct_db = AccountDBMut::new(db.as_hash_db_mut(), &addr2);
acct_db.emplace(code_hash.clone(), DBValue::from_slice(b"this is definitely code"));
acct_db.emplace(code_hash.clone(), EMPTY_PREFIX, DBValue::from_slice(b"this is definitely code"));
}
let account1 = BasicAccount {

View File

@ -448,7 +448,7 @@ impl StateRebuilder {
for (code_hash, code, first_with) in status.new_code {
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
let mut db = AccountDBMut::from_hash(self.db.as_hash_db_mut(), addr_hash);
db.emplace(code_hash, DBValue::from_slice(&code));
db.emplace(code_hash, hash_db::EMPTY_PREFIX, DBValue::from_slice(&code));
}
self.known_code.insert(code_hash, first_with);
@ -545,11 +545,11 @@ fn rebuild_accounts(
Some(&first_with) => {
// if so, load it from the database.
let code = AccountDB::from_hash(db, first_with)
.get(&code_hash)
.get(&code_hash, hash_db::EMPTY_PREFIX)
.ok_or_else(|| Error::MissingCode(vec![first_with]))?;
// and write it again under a different mangled key
AccountDBMut::from_hash(db, hash).emplace(code_hash, code);
AccountDBMut::from_hash(db, hash).emplace(code_hash, hash_db::EMPTY_PREFIX, code);
}
// if not, queue it up to be filled later
None => status.missing_code.push((hash, code_hash)),

View File

@ -40,7 +40,7 @@ const RNG_SEED: [u8; 16] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16];
#[test]
fn snap_and_restore() {
use hash_db::HashDB;
use hash_db::{HashDB, EMPTY_PREFIX};
let mut producer = StateProducer::new();
let mut rng = XorShiftRng::from_seed(RNG_SEED);
let mut old_db = journaldb::new_memory_db();
@ -97,7 +97,7 @@ fn snap_and_restore() {
let keys = old_db.keys();
for key in keys.keys() {
assert_eq!(old_db.get(&key).unwrap(), new_db.as_hash_db().get(&key).unwrap());
assert_eq!(old_db.get(&key, EMPTY_PREFIX).unwrap(), new_db.as_hash_db().get(&key, EMPTY_PREFIX).unwrap());
}
}
@ -106,7 +106,7 @@ fn get_code_from_prev_chunk() {
use std::collections::HashSet;
use rlp::RlpStream;
use ethereum_types::{H256, U256};
use hash_db::HashDB;
use hash_db::{HashDB, EMPTY_PREFIX};
use account_db::{AccountDBMut, AccountDB};
@ -128,7 +128,7 @@ fn get_code_from_prev_chunk() {
let mut make_chunk = |acc, hash| {
let mut db = journaldb::new_memory_db();
AccountDBMut::from_hash(&mut db, hash).insert(&code[..]);
AccountDBMut::from_hash(&mut db, hash).insert(EMPTY_PREFIX, &code[..]);
let p = Progress::default();
let fat_rlp = account::to_fat_rlps(&hash, &acc, &AccountDB::from_hash(&db, hash), &mut used_code, usize::max_value(), usize::max_value(), &p).unwrap();
let mut stream = RlpStream::new_list(1);

View File

@ -813,7 +813,7 @@ impl Spec {
/// Ensure that the given state DB has the trie nodes in for the genesis state.
pub fn ensure_db_good<T: Backend>(&self, db: T, factories: &Factories) -> Result<T, Error> {
if db.as_hash_db().contains(&self.state_root()) {
if db.as_hash_db().contains(&self.state_root(), hash_db::EMPTY_PREFIX) {
return Ok(db);
}

View File

@ -364,7 +364,7 @@ impl Account {
if self.is_cached() { return Some(self.code_cache.clone()); }
match db.get(&self.code_hash) {
match db.get(&self.code_hash, hash_db::EMPTY_PREFIX) {
Some(x) => {
self.code_size = Some(x.len());
self.code_cache = Arc::new(x.into_vec());
@ -393,7 +393,7 @@ impl Account {
trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
self.code_size.is_some() ||
if self.code_hash != KECCAK_EMPTY {
match db.get(&self.code_hash) {
match db.get(&self.code_hash, hash_db::EMPTY_PREFIX) {
Some(x) => {
self.code_size = Some(x.len());
true
@ -507,7 +507,7 @@ impl Account {
self.code_filth = Filth::Clean;
},
(true, false) => {
db.emplace(self.code_hash.clone(), DBValue::from_slice(&*self.code_cache));
db.emplace(self.code_hash.clone(), hash_db::EMPTY_PREFIX, DBValue::from_slice(&*self.code_cache));
self.code_size = Some(self.code_cache.len());
self.code_filth = Filth::Clean;
},

View File

@ -27,8 +27,8 @@ use std::sync::Arc;
use state::Account;
use parking_lot::Mutex;
use ethereum_types::{Address, H256};
use memory_db::MemoryDB;
use hash_db::{AsHashDB, HashDB};
use memory_db::{MemoryDB, HashKey};
use hash_db::{AsHashDB, HashDB, Prefix, EMPTY_PREFIX};
use kvdb::DBValue;
use keccak_hasher::KeccakHasher;
use journaldb::AsKeyedHashDB;
@ -78,13 +78,13 @@ pub trait Backend: Send {
// TODO: when account lookup moved into backends, this won't rely as tenuously on intended
// usage.
#[derive(Clone, PartialEq)]
pub struct ProofCheck(MemoryDB<KeccakHasher, DBValue>);
pub struct ProofCheck(MemoryDB<KeccakHasher, HashKey<KeccakHasher>, DBValue>);
impl ProofCheck {
/// Create a new `ProofCheck` backend from the given state items.
pub fn new(proof: &[DBValue]) -> Self {
let mut db = journaldb::new_memory_db();
for item in proof { db.insert(item); }
for item in proof { db.insert(EMPTY_PREFIX, item); }
ProofCheck(db)
}
}
@ -94,23 +94,23 @@ impl journaldb::KeyedHashDB for ProofCheck {
}
impl HashDB<KeccakHasher, DBValue> for ProofCheck {
fn get(&self, key: &H256) -> Option<DBValue> {
self.0.get(key)
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
self.0.get(key, prefix)
}
fn contains(&self, key: &H256) -> bool {
self.0.contains(key)
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.0.contains(key, prefix)
}
fn insert(&mut self, value: &[u8]) -> H256 {
self.0.insert(value)
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.0.insert(prefix, value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
self.0.emplace(key, value)
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.0.emplace(key, prefix, value)
}
fn remove(&mut self, _key: &H256) { }
fn remove(&mut self, _key: &H256, _prefix: Prefix) { }
}
impl AsHashDB<KeccakHasher, DBValue> for ProofCheck {
@ -141,7 +141,7 @@ impl Backend for ProofCheck {
/// This doesn't cache anything or rely on the canonical state caches.
pub struct Proving<H> {
base: H, // state we're proving values from.
changed: MemoryDB<KeccakHasher, DBValue>, // changed state via insertions.
changed: MemoryDB<KeccakHasher, HashKey<KeccakHasher>, DBValue>, // changed state via insertions.
proof: Mutex<HashSet<DBValue>>,
}
@ -163,32 +163,32 @@ impl<H: AsKeyedHashDB + Send + Sync> journaldb::KeyedHashDB for Proving<H> {
}
impl<H: AsHashDB<KeccakHasher, DBValue> + Send + Sync> HashDB<KeccakHasher, DBValue> for Proving<H> {
fn get(&self, key: &H256) -> Option<DBValue> {
match self.base.as_hash_db().get(key) {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
match self.base.as_hash_db().get(key, prefix) {
Some(val) => {
self.proof.lock().insert(val.clone());
Some(val)
}
None => self.changed.get(key)
None => self.changed.get(key, prefix)
}
}
fn contains(&self, key: &H256) -> bool {
self.get(key).is_some()
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key, prefix).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
self.changed.insert(value)
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.changed.insert(prefix, value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
self.changed.emplace(key, value)
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.changed.emplace(key, prefix, value)
}
fn remove(&mut self, key: &H256) {
fn remove(&mut self, key: &H256, prefix: Prefix) {
// only remove from `changed`
if self.changed.contains(key) {
self.changed.remove(key)
if self.changed.contains(key, prefix) {
self.changed.remove(key, prefix)
}
}
}

View File

@ -381,7 +381,7 @@ impl<B: Backend> State<B> {
/// Creates new state with existing state root
pub fn from_existing(db: B, root: H256, account_start_nonce: U256, factories: Factories) -> TrieResult<State<B>> {
if !db.as_hash_db().contains(&root) {
if !db.as_hash_db().contains(&root, hash_db::EMPTY_PREFIX) {
return Err(Box::new(TrieError::InvalidStateRoot(root)));
}

View File

@ -17,12 +17,12 @@
//! Trace database.
use std::collections::HashMap;
use std::sync::Arc;
use parity_util_mem::MallocSizeOfExt;
use blockchain::BlockChainDB;
use db::cache_manager::CacheManager;
use db::{self, Key, Writable, Readable, CacheUpdatePolicy};
use ethereum_types::{H256, H264};
use heapsize::HeapSizeOf;
use kvdb::{DBTransaction};
use parking_lot::RwLock;
use types::BlockNumber;
@ -91,7 +91,7 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
}
fn cache_size(&self) -> usize {
self.traces.read().heap_size_of_children()
self.traces.read().malloc_size_of()
}
/// Let the cache system know that a cacheable item has been used.
@ -113,7 +113,7 @@ impl<T> TraceDB<T> where T: DatabaseExtras {
}
traces.shrink_to_fit();
traces.heap_size_of_children()
traces.malloc_size_of()
});
}

View File

@ -17,17 +17,19 @@
//! Flat trace module
use rlp::{Rlp, RlpStream, Decodable, Encodable, DecoderError};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use ethereum_types::Bloom;
use super::trace::{Action, Res};
/// Trace localized in vector of traces produced by a single transaction.
///
/// Parent and children indexes refer to positions in this vector.
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq, Clone, MallocSizeOf)]
pub struct FlatTrace {
#[ignore_malloc_size_of = "ignored for performance reason"]
/// Type of action performed by a transaction.
pub action: Action,
#[ignore_malloc_size_of = "ignored for performance reason"]
/// Result of this action.
pub result: Res,
/// Number of subtraces.
@ -45,12 +47,6 @@ impl FlatTrace {
}
}
impl HeapSizeOf for FlatTrace {
fn heap_size_of_children(&self) -> usize {
self.trace_address.heap_size_of_children()
}
}
impl Encodable for FlatTrace {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
@ -76,7 +72,7 @@ impl Decodable for FlatTrace {
}
/// Represents all traces produced by a single transaction.
#[derive(Debug, PartialEq, Clone, RlpEncodableWrapper, RlpDecodableWrapper)]
#[derive(Debug, PartialEq, Clone, RlpEncodableWrapper, RlpDecodableWrapper, MallocSizeOf)]
pub struct FlatTransactionTraces(Vec<FlatTrace>);
impl From<Vec<FlatTrace>> for FlatTransactionTraces {
@ -85,12 +81,6 @@ impl From<Vec<FlatTrace>> for FlatTransactionTraces {
}
}
impl HeapSizeOf for FlatTransactionTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl FlatTransactionTraces {
/// Returns bloom of all traces in the collection.
pub fn bloom(&self) -> Bloom {
@ -105,15 +95,9 @@ impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
}
/// Represents all traces produced by transactions in a single block.
#[derive(Debug, PartialEq, Clone, Default, RlpEncodableWrapper, RlpDecodableWrapper)]
#[derive(Debug, PartialEq, Clone, Default, RlpEncodableWrapper, RlpDecodableWrapper, MallocSizeOf)]
pub struct FlatBlockTraces(Vec<FlatTransactionTraces>);
impl HeapSizeOf for FlatBlockTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl From<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn from(v: Vec<FlatTransactionTraces>) -> Self {
FlatBlockTraces(v)

View File

@ -19,7 +19,7 @@
use engines::EthEngine;
use error::Error;
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, U256};
pub use self::blocks::Blocks;
@ -49,13 +49,13 @@ pub trait BlockLike {
/// consistent.
pub trait Kind: 'static + Sized + Send + Sync {
/// The first stage: completely unverified.
type Input: Sized + Send + BlockLike + HeapSizeOf;
type Input: Sized + Send + BlockLike + MallocSizeOf;
/// The second stage: partially verified.
type Unverified: Sized + Send + BlockLike + HeapSizeOf;
type Unverified: Sized + Send + BlockLike + MallocSizeOf;
/// The third stage: completely verified.
type Verified: Sized + Send + BlockLike + HeapSizeOf;
type Verified: Sized + Send + BlockLike + MallocSizeOf;
/// Attempt to create the `Unverified` item from the input.
fn create(input: Self::Input, engine: &dyn EthEngine, check_seal: bool) -> Result<Self::Unverified, (Self::Input, Error)>;
@ -74,7 +74,7 @@ pub mod blocks {
use verification::{PreverifiedBlock, verify_block_basic, verify_block_unordered};
use types::transaction::UnverifiedTransaction;
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, U256};
use bytes::Bytes;
@ -113,7 +113,7 @@ pub mod blocks {
}
/// An unverified block.
#[derive(PartialEq, Debug)]
#[derive(PartialEq, Debug, MallocSizeOf)]
pub struct Unverified {
/// Unverified block header.
pub header: Header,
@ -146,15 +146,6 @@ pub mod blocks {
}
}
impl HeapSizeOf for Unverified {
fn heap_size_of_children(&self) -> usize {
self.header.heap_size_of_children()
+ self.transactions.heap_size_of_children()
+ self.uncles.heap_size_of_children()
+ self.bytes.heap_size_of_children()
}
}
impl BlockLike for Unverified {
fn hash(&self) -> H256 {
self.header.hash()

View File

@ -22,7 +22,7 @@ use std::sync::atomic::{AtomicBool, AtomicUsize, Ordering as AtomicOrdering};
use std::sync::Arc;
use std::cmp;
use std::collections::{VecDeque, HashSet, HashMap};
use heapsize::HeapSizeOf;
use parity_util_mem::{MallocSizeOf, MallocSizeOfExt};
use ethereum_types::{H256, U256};
use parking_lot::{Condvar, Mutex, RwLock};
use io::*;
@ -96,17 +96,12 @@ enum State {
}
/// An item which is in the process of being verified.
#[derive(MallocSizeOf)]
pub struct Verifying<K: Kind> {
hash: H256,
output: Option<K::Verified>,
}
impl<K: Kind> HeapSizeOf for Verifying<K> {
fn heap_size_of_children(&self) -> usize {
self.output.heap_size_of_children()
}
}
/// Status of items in the queue.
pub enum Status {
/// Currently queued.
@ -353,7 +348,7 @@ impl<K: Kind> VerificationQueue<K> {
None => continue,
};
verification.sizes.unverified.fetch_sub(item.heap_size_of_children(), AtomicOrdering::SeqCst);
verification.sizes.unverified.fetch_sub(item.malloc_size_of(), AtomicOrdering::SeqCst);
verifying.push_back(Verifying { hash: item.hash(), output: None });
item
};
@ -367,7 +362,7 @@ impl<K: Kind> VerificationQueue<K> {
if e.hash == hash {
idx = Some(i);
verification.sizes.verifying.fetch_add(verified.heap_size_of_children(), AtomicOrdering::SeqCst);
verification.sizes.verifying.fetch_add(verified.malloc_size_of(), AtomicOrdering::SeqCst);
e.output = Some(verified);
break;
}
@ -417,7 +412,7 @@ impl<K: Kind> VerificationQueue<K> {
while let Some(output) = verifying.front_mut().and_then(|x| x.output.take()) {
assert!(verifying.pop_front().is_some());
let size = output.heap_size_of_children();
let size = output.malloc_size_of();
removed_size += size;
if bad.contains(&output.parent_hash()) {
@ -490,7 +485,7 @@ impl<K: Kind> VerificationQueue<K> {
match K::create(input, &*self.engine, self.verification.check_seal) {
Ok(item) => {
self.verification.sizes.unverified.fetch_add(item.heap_size_of_children(), AtomicOrdering::SeqCst);
self.verification.sizes.unverified.fetch_add(item.malloc_size_of(), AtomicOrdering::SeqCst);
self.processing.write().insert(hash, item.difficulty());
{
@ -537,7 +532,7 @@ impl<K: Kind> VerificationQueue<K> {
let mut removed_size = 0;
for output in verified.drain(..) {
if bad.contains(&output.parent_hash()) {
removed_size += output.heap_size_of_children();
removed_size += output.malloc_size_of();
bad.insert(output.hash());
if let Some(difficulty) = processing.remove(&output.hash()) {
let mut td = self.total_difficulty.write();
@ -574,7 +569,7 @@ impl<K: Kind> VerificationQueue<K> {
let count = cmp::min(max, verified.len());
let result = verified.drain(..count).collect::<Vec<_>>();
let drained_size = result.iter().map(HeapSizeOf::heap_size_of_children).fold(0, |a, c| a + c);
let drained_size = result.iter().map(MallocSizeOfExt::malloc_size_of).fold(0, |a, c| a + c);
self.verification.sizes.verified.fetch_sub(drained_size, AtomicOrdering::SeqCst);
self.ready_signal.reset();

View File

@ -26,7 +26,7 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
use bytes::Bytes;
use hash::keccak;
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use rlp::Rlp;
use triehash::ordered_trie_root;
use unexpected::{Mismatch, OutOfBounds};
@ -44,6 +44,7 @@ use verification::queue::kind::blocks::Unverified;
use time_utils::CheckedSystemTime;
/// Preprocessed block data gathered in `verify_block_unordered` call
#[derive(MallocSizeOf)]
pub struct PreverifiedBlock {
/// Populated block header
pub header: Header,
@ -55,14 +56,6 @@ pub struct PreverifiedBlock {
pub bytes: Bytes,
}
impl HeapSizeOf for PreverifiedBlock {
fn heap_size_of_children(&self) -> usize {
self.header.heap_size_of_children()
+ self.transactions.heap_size_of_children()
+ self.bytes.heap_size_of_children()
}
}
/// Phase 1 quick block verification. Only does checks that are cheap. Operates on a single block
pub fn verify_block_basic(block: &Unverified, engine: &dyn EthEngine, check_seal: bool) -> Result<(), Error> {
verify_header_params(&block.header, engine, true, check_seal)?;

View File

@ -19,8 +19,7 @@ ethereum-types = "0.6.0"
ethkey = { path = "../../accounts/ethkey" }
ethstore = { path = "../../accounts/ethstore" }
fastmap = { path = "../../util/fastmap" }
hash-db = "0.11.0"
heapsize = "0.4"
hash-db = "0.12.4"
keccak-hash = "0.2.0"
keccak-hasher = { path = "../../util/keccak-hasher" }
kvdb = "0.1"
@ -28,6 +27,7 @@ log = "0.4"
macros = { path = "../../util/macros" }
parity-bytes = "0.1"
parking_lot = "0.7"
parity-util-mem = "0.1"
rand = "0.6"
rlp = "0.4.0"
trace-time = "0.1"

View File

@ -65,7 +65,7 @@ pub const ETH_PROTOCOL: ProtocolId = *b"eth";
pub const LIGHT_PROTOCOL: ProtocolId = *b"pip";
/// Determine warp sync status.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, MallocSizeOf)]
pub enum WarpSync {
/// Warp sync is enabled.
Enabled,

View File

@ -20,7 +20,7 @@
use std::collections::{HashSet, VecDeque};
use std::cmp;
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use ethereum_types::H256;
use rlp::{self, Rlp};
use types::BlockNumber;
@ -60,7 +60,7 @@ macro_rules! debug_sync {
};
}
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[derive(Copy, Clone, Eq, PartialEq, Debug, MallocSizeOf)]
/// Downloader state
pub enum State {
/// No active downloads.
@ -113,6 +113,7 @@ impl From<rlp::DecoderError> for BlockDownloaderImportError {
/// Block downloader strategy.
/// Manages state and block data for a block download process.
#[derive(MallocSizeOf)]
pub struct BlockDownloader {
/// Which set of blocks to download
block_set: BlockSet,
@ -223,11 +224,6 @@ impl BlockDownloader {
self.state = State::Blocks;
}
/// Returns used heap memory size.
pub fn heap_size(&self) -> usize {
self.blocks.heap_size() + self.round_parents.heap_size_of_children()
}
/// Returns best imported block number.
pub fn last_imported_block_number(&self) -> BlockNumber {
self.last_imported_block

View File

@ -16,7 +16,7 @@
use std::collections::{HashSet, HashMap, hash_map};
use hash::{keccak, KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use ethereum_types::H256;
use triehash_ethereum::ordered_trie_root;
use bytes::Bytes;
@ -26,21 +26,15 @@ use ethcore::verification::queue::kind::blocks::Unverified;
use types::transaction::UnverifiedTransaction;
use types::header::Header as BlockHeader;
known_heap_size!(0, HeaderId);
malloc_size_of_is_0!(HeaderId);
#[derive(PartialEq, Debug, Clone)]
#[derive(MallocSizeOf)]
pub struct SyncHeader {
pub bytes: Bytes,
pub header: BlockHeader,
}
impl HeapSizeOf for SyncHeader {
fn heap_size_of_children(&self) -> usize {
self.bytes.heap_size_of_children()
+ self.header.heap_size_of_children()
}
}
impl SyncHeader {
pub fn from_rlp(bytes: Bytes) -> Result<Self, DecoderError> {
let result = SyncHeader {
@ -52,6 +46,7 @@ impl SyncHeader {
}
}
#[derive(MallocSizeOf)]
pub struct SyncBody {
pub transactions_bytes: Bytes,
pub transactions: Vec<UnverifiedTransaction>,
@ -85,16 +80,8 @@ impl SyncBody {
}
}
impl HeapSizeOf for SyncBody {
fn heap_size_of_children(&self) -> usize {
self.transactions_bytes.heap_size_of_children()
+ self.transactions.heap_size_of_children()
+ self.uncles_bytes.heap_size_of_children()
+ self.uncles.heap_size_of_children()
}
}
/// Block data with optional body.
#[derive(MallocSizeOf)]
struct SyncBlock {
header: SyncHeader,
body: Option<SyncBody>,
@ -102,12 +89,6 @@ struct SyncBlock {
receipts_root: H256,
}
impl HeapSizeOf for SyncBlock {
fn heap_size_of_children(&self) -> usize {
self.header.heap_size_of_children() + self.body.heap_size_of_children()
}
}
fn unverified_from_sync(header: SyncHeader, body: Option<SyncBody>) -> Unverified {
let mut stream = RlpStream::new_list(3);
stream.append_raw(&header.bytes, 1);
@ -141,7 +122,7 @@ struct HeaderId {
/// A collection of blocks and subchain pointers being downloaded. This keeps track of
/// which headers/bodies need to be downloaded, which are being downloaded and also holds
/// the downloaded blocks.
#[derive(Default)]
#[derive(Default, MallocSizeOf)]
pub struct BlockCollection {
/// Does this collection need block receipts.
need_receipts: bool,
@ -399,16 +380,6 @@ impl BlockCollection {
self.heads.len()
}
/// Return used heap size.
pub fn heap_size(&self) -> usize {
self.heads.heap_size_of_children()
+ self.blocks.heap_size_of_children()
+ self.parents.heap_size_of_children()
+ self.header_ids.heap_size_of_children()
+ self.downloading_headers.heap_size_of_children()
+ self.downloading_bodies.heap_size_of_children()
}
/// Check if given block hash is marked as being downloaded.
pub fn is_downloading(&self, hash: &H256) -> bool {
self.downloading_headers.contains(hash) || self.downloading_bodies.contains(hash)

View File

@ -98,7 +98,7 @@ use std::collections::{HashSet, HashMap, BTreeMap};
use std::cmp;
use std::time::{Duration, Instant};
use hash::keccak;
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOfExt;
use futures::sync::mpsc as futures_mpsc;
use api::Notification;
use ethereum_types::{H256, U256};
@ -132,7 +132,7 @@ use self::propagator::SyncPropagator;
use self::requester::SyncRequester;
pub(crate) use self::supplier::SyncSupplier;
known_heap_size!(0, PeerInfo);
malloc_size_of_is_0!(PeerInfo);
pub type PacketDecodeError = DecoderError;
@ -179,7 +179,7 @@ const SNAPSHOT_DATA_TIMEOUT: Duration = Duration::from_secs(120);
/// (so we might sent only to some part of the peers we originally intended to send to)
const PRIORITY_TASK_DEADLINE: Duration = Duration::from_millis(100);
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
#[derive(Copy, Clone, Eq, PartialEq, Debug, MallocSizeOf)]
/// Sync state
pub enum SyncState {
/// Collecting enough peers to start syncing.
@ -273,7 +273,7 @@ pub enum PeerAsking {
SnapshotData,
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
#[derive(PartialEq, Eq, Debug, Clone, Copy, MallocSizeOf)]
/// Block downloader channel.
pub enum BlockSet {
/// New blocks better than out best blocks
@ -585,6 +585,7 @@ enum PeerState {
/// Blockchain sync handler.
/// See module documentation for more details.
#[derive(MallocSizeOf)]
pub struct ChainSync {
/// Sync state
state: SyncState,
@ -618,10 +619,12 @@ pub struct ChainSync {
/// Enable ancient block downloading
download_old_blocks: bool,
/// Shared private tx service.
#[ignore_malloc_size_of = "arc on dyn trait here seems tricky, ignoring"]
private_tx_handler: Option<Arc<PrivateTxHandler>>,
/// Enable warp sync.
warp_sync: WarpSync,
#[ignore_malloc_size_of = "mpsc unmettered, ignoring"]
status_sinks: Vec<futures_mpsc::UnboundedSender<SyncState>>
}
@ -677,10 +680,7 @@ impl ChainSync {
num_active_peers: self.peers.values().filter(|p| p.is_allowed() && p.asking != PeerAsking::Nothing).count(),
num_snapshot_chunks: self.snapshot.total_chunks(),
snapshot_chunks_done: self.snapshot.done_chunks(),
mem_used:
self.new_blocks.heap_size()
+ self.old_blocks.as_ref().map_or(0, |d| d.heap_size())
+ self.peers.heap_size_of_children(),
mem_used: self.malloc_size_of(),
}
}

View File

@ -53,8 +53,10 @@ extern crate enum_primitive;
extern crate macros;
#[macro_use]
extern crate log;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
#[macro_use]
extern crate heapsize;
extern crate parity_util_mem as malloc_size_of;
#[macro_use]
extern crate trace_time;

View File

@ -27,6 +27,7 @@ pub enum ChunkType {
Block(H256),
}
#[derive(MallocSizeOf)]
pub struct Snapshot {
pending_state_chunks: Vec<H256>,
pending_block_chunks: Vec<H256>,

View File

@ -23,7 +23,7 @@ use types::BlockNumber;
type NodeId = H512;
#[derive(Debug, PartialEq, Clone)]
#[derive(Debug, PartialEq, Clone, MallocSizeOf)]
pub struct Stats {
first_seen: BlockNumber,
propagated_to: HashMap<NodeId, usize>,
@ -50,7 +50,7 @@ impl<'a> From<&'a Stats> for TransactionStats {
}
}
#[derive(Debug, Default)]
#[derive(Debug, Default, MallocSizeOf)]
pub struct TransactionsStats {
pending_transactions: H256FastMap<Stats>,
}

View File

@ -7,8 +7,8 @@ authors = ["Parity Technologies <admin@parity.io>"]
[dependencies]
ethereum-types = "0.6.0"
ethjson = { path = "../../json" }
parity-util-mem = "0.1"
ethkey = { path = "../../accounts/ethkey" }
heapsize = "0.4"
keccak-hash = "0.2.0"
parity-bytes = "0.1"
rlp = "0.4.0"

View File

@ -27,20 +27,16 @@ use block::Block as FullBlock;
use ethereum_types::{H256, Bloom, U256, Address};
use hash::keccak;
use header::{Header as FullHeader};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use rlp::{self, Rlp, RlpStream};
use transaction::UnverifiedTransaction;
use views::{self, BlockView, HeaderView, BodyView};
use BlockNumber;
/// Owning header view.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Header(Vec<u8>);
impl HeapSizeOf for Header {
fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() }
}
impl Header {
/// Create a new owning header view.
/// Expects the data to be an RLP-encoded header -- any other case will likely lead to
@ -113,13 +109,9 @@ impl Header {
}
/// Owning block body view.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Body(Vec<u8>);
impl HeapSizeOf for Body {
fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() }
}
impl Body {
/// Create a new owning block body view. The raw bytes passed in must be an rlp-encoded block
/// body.
@ -178,13 +170,9 @@ impl Body {
}
/// Owning block view.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Block(Vec<u8>);
impl HeapSizeOf for Block {
fn heap_size_of_children(&self) -> usize { self.0.heap_size_of_children() }
}
impl Block {
/// Create a new owning block view. The raw bytes passed in must be an rlp-encoded block.
pub fn new(raw: Vec<u8>) -> Self { Block(raw) }

View File

@ -17,7 +17,7 @@
//! Block header.
use hash::{KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP, keccak};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use ethereum_types::{H256, U256, Address, Bloom};
use bytes::Bytes;
use rlp::{Rlp, RlpStream, Encodable, DecoderError, Decodable};
@ -49,7 +49,7 @@ pub struct ExtendedHeader {
/// which is non-specific.
///
/// Doesn't do all that much on its own.
#[derive(Debug, Clone, Eq)]
#[derive(Debug, Clone, Eq, MallocSizeOf)]
pub struct Header {
/// Parent hash.
parent_hash: H256,
@ -361,12 +361,6 @@ impl Encodable for Header {
}
}
impl HeapSizeOf for Header {
fn heap_size_of_children(&self) -> usize {
self.extra_data.heap_size_of_children() + self.seal.heap_size_of_children()
}
}
impl ExtendedHeader {
/// Returns combined difficulty of all ancestors together with the difficulty of this header.
pub fn total_score(&self) -> U256 {

View File

@ -36,7 +36,6 @@
extern crate ethereum_types;
extern crate ethjson;
extern crate ethkey;
extern crate heapsize;
extern crate keccak_hash as hash;
extern crate parity_bytes as bytes;
extern crate rlp;
@ -44,6 +43,9 @@ extern crate unexpected;
#[macro_use]
extern crate rlp_derive;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
#[cfg(test)]
extern crate rustc_hex;

View File

@ -17,7 +17,7 @@
//! Log entry type definition.
use std::ops::Deref;
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use bytes::Bytes;
use ethereum_types::{H256, Address, Bloom, BloomInput};
@ -25,7 +25,7 @@ use {BlockNumber};
use ethjson;
/// A record of execution for a `LOG` operation.
#[derive(Default, Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable)]
#[derive(Default, Debug, Clone, PartialEq, Eq, RlpEncodable, RlpDecodable, MallocSizeOf)]
pub struct LogEntry {
/// The address of the contract executing at the point of the `LOG` operation.
pub address: Address,
@ -35,12 +35,6 @@ pub struct LogEntry {
pub data: Bytes,
}
impl HeapSizeOf for LogEntry {
fn heap_size_of_children(&self) -> usize {
self.topics.heap_size_of_children() + self.data.heap_size_of_children()
}
}
impl LogEntry {
/// Calculates the bloom of this log entry.
pub fn bloom(&self) -> Bloom {

View File

@ -17,14 +17,14 @@
//! Receipt
use ethereum_types::{H160, H256, U256, Address, Bloom};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use rlp::{Rlp, RlpStream, Encodable, Decodable, DecoderError};
use BlockNumber;
use log_entry::{LogEntry, LocalizedLogEntry};
/// Transaction outcome store in the receipt.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub enum TransactionOutcome {
/// Status and state root are unknown under EIP-98 rules.
Unknown,
@ -35,7 +35,7 @@ pub enum TransactionOutcome {
}
/// Information describing execution of a transaction.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Receipt {
/// The total gas used in the block following execution of the transaction.
pub gas_used: U256,
@ -110,12 +110,6 @@ impl Decodable for Receipt {
}
}
impl HeapSizeOf for Receipt {
fn heap_size_of_children(&self) -> usize {
self.logs.heap_size_of_children()
}
}
/// Receipt with additional info.
#[derive(Debug, Clone, PartialEq)]
pub struct RichReceipt {

View File

@ -22,7 +22,8 @@ use ethereum_types::{H256, H160, Address, U256, BigEndianHash};
use ethjson;
use ethkey::{self, Signature, Secret, Public, recover, public_to_address};
use hash::keccak;
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOf;
use rlp::{self, RlpStream, Rlp, DecoderError, Encodable};
use transaction::error;
@ -37,7 +38,7 @@ pub const UNSIGNED_SENDER: Address = H160([0xff; 20]);
pub const SYSTEM_ADDRESS: Address = H160([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xff,0xff, 0xff, 0xff, 0xfe]);
/// Transaction action type.
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub enum Action {
/// Create creates new contract.
Create,
@ -99,7 +100,7 @@ pub mod signature {
/// A set of information describing an externally-originating message call
/// or contract creation operation.
#[derive(Default, Debug, Clone, PartialEq, Eq)]
#[derive(Default, Debug, Clone, PartialEq, Eq, MallocSizeOf)]
pub struct Transaction {
/// Nonce.
pub nonce: U256,
@ -133,12 +134,6 @@ impl Transaction {
}
}
impl HeapSizeOf for Transaction {
fn heap_size_of_children(&self) -> usize {
self.data.heap_size_of_children()
}
}
impl From<ethjson::state::Transaction> for SignedTransaction {
fn from(t: ethjson::state::Transaction) -> Self {
let to: Option<ethjson::hash::Address> = t.to.into();
@ -255,7 +250,7 @@ impl Transaction {
}
/// Signed transaction information without verified signature.
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, MallocSizeOf)]
pub struct UnverifiedTransaction {
/// Plain Transaction.
unsigned: Transaction,
@ -270,12 +265,6 @@ pub struct UnverifiedTransaction {
hash: H256,
}
impl HeapSizeOf for UnverifiedTransaction {
fn heap_size_of_children(&self) -> usize {
self.unsigned.heap_size_of_children()
}
}
impl Deref for UnverifiedTransaction {
type Target = Transaction;
@ -407,19 +396,13 @@ impl UnverifiedTransaction {
}
/// A `UnverifiedTransaction` with successfully recovered `sender`.
#[derive(Debug, Clone, Eq, PartialEq)]
#[derive(Debug, Clone, Eq, PartialEq, MallocSizeOf)]
pub struct SignedTransaction {
transaction: UnverifiedTransaction,
sender: Address,
public: Option<Public>,
}
impl HeapSizeOf for SignedTransaction {
fn heap_size_of_children(&self) -> usize {
self.transaction.heap_size_of_children()
}
}
impl rlp::Encodable for SignedTransaction {
fn rlp_append(&self, s: &mut RlpStream) { self.transaction.rlp_append_sealed_transaction(s) }
}

View File

@ -7,7 +7,7 @@ authors = ["Parity Technologies <admin@parity.io>"]
byteorder = "1.0"
parity-bytes = "0.1"
ethereum-types = "0.6.0"
trie-db = "0.11.0"
trie-db = "0.12.4"
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
log = "0.4"
ethjson = { path = "../../json" }

View File

@ -23,7 +23,7 @@ ethabi-contract = "8.0"
ethcore-call-contract = { path = "../ethcore/call-contract" }
ethereum-types = "0.6.0"
futures = "0.1"
heapsize = "0.4"
parity-util-mem = "0.1"
keccak-hash = "0.2.0"
linked-hash-map = "0.5"
log = "0.4"

View File

@ -25,7 +25,8 @@ extern crate ethabi;
extern crate ethcore_call_contract as call_contract;
extern crate ethereum_types;
extern crate futures;
extern crate heapsize;
extern crate parity_util_mem;
extern crate keccak_hash as hash;
extern crate linked_hash_map;
extern crate parity_runtime;

View File

@ -17,7 +17,7 @@
//! Transaction Pool
use ethereum_types::{U256, H256, Address};
use heapsize::HeapSizeOf;
use parity_util_mem::MallocSizeOfExt;
use types::transaction;
use txpool;
@ -176,7 +176,7 @@ impl txpool::VerifiedTransaction for VerifiedTransaction {
}
fn mem_usage(&self) -> usize {
self.transaction.heap_size_of_children()
self.transaction.malloc_size_of()
}
fn sender(&self) -> &Address {

View File

@ -8,12 +8,12 @@ license = "GPL3"
[dependencies]
parity-bytes = "0.1"
ethereum-types = "0.6.0"
hash-db = "0.11.0"
heapsize = "0.4"
hash-db = "0.12.4"
parity-util-mem = "0.1"
keccak-hasher = { path = "../keccak-hasher" }
kvdb = "0.1"
log = "0.4"
memory-db = "0.11.0"
memory-db = "0.12.4"
parking_lot = "0.7"
fastmap = { path = "../../util/fastmap" }
rlp = "0.4.0"

View File

@ -23,12 +23,12 @@ use std::sync::Arc;
use bytes::Bytes;
use ethereum_types::H256;
use hash_db::{HashDB};
use parity_util_mem::MallocSizeOfExt;
use hash_db::{HashDB, Prefix};
use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue};
use rlp::{encode, decode};
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY, error_key_already_exists, error_negatively_reference_hash};
use super::memory_db::*;
use traits::JournalDB;
/// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay
@ -39,7 +39,7 @@ use traits::JournalDB;
/// immediately. As this is an "archive" database, nothing is ever removed. This means
/// that the states of any block the node has ever processed will be accessible.
pub struct ArchiveDB {
overlay: MemoryDB<KeccakHasher, DBValue>,
overlay: super::MemoryDB,
backing: Arc<KeyValueDB>,
latest_era: Option<u64>,
column: Option<u32>,
@ -66,8 +66,8 @@ impl ArchiveDB {
}
impl HashDB<KeccakHasher, DBValue> for ArchiveDB {
fn get(&self, key: &H256) -> Option<DBValue> {
if let Some((d, rc)) = self.overlay.raw(key) {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if let Some((d, rc)) = self.overlay.raw(key, prefix) {
if rc > 0 {
return Some(d.clone());
}
@ -75,20 +75,20 @@ impl HashDB<KeccakHasher, DBValue> for ArchiveDB {
self.payload(key)
}
fn contains(&self, key: &H256) -> bool {
self.get(key).is_some()
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key, prefix).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
self.overlay.insert(value)
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.overlay.insert(prefix, value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
self.overlay.emplace(key, value);
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.overlay.emplace(key, prefix, value);
}
fn remove(&mut self, key: &H256) {
self.overlay.remove(key);
fn remove(&mut self, key: &H256, prefix: Prefix) {
self.overlay.remove(key, prefix);
}
}
@ -124,7 +124,7 @@ impl JournalDB for ArchiveDB {
}
fn mem_used(&self) -> usize {
self.overlay.mem_used()
self.overlay.malloc_size_of()
}
fn is_empty(&self) -> bool {
@ -197,7 +197,7 @@ impl JournalDB for ArchiveDB {
&self.backing
}
fn consolidate(&mut self, with: MemoryDB<KeccakHasher, DBValue>) {
fn consolidate(&mut self, with: super::MemoryDB) {
self.overlay.consolidate(with);
}
}
@ -206,7 +206,7 @@ impl JournalDB for ArchiveDB {
mod tests {
use keccak::keccak;
use hash_db::HashDB;
use hash_db::{HashDB, EMPTY_PREFIX};
use super::*;
use {kvdb_memorydb, JournalDB};
@ -215,50 +215,50 @@ mod tests {
// history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let x = jdb.insert(b"X");
let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
jdb.commit_batch(3, &keccak(b"1002a"), Some((1, keccak(b"1")))).unwrap();
jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap();
jdb.remove(&x);
jdb.remove(&x, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap();
let x = jdb.insert(b"X");
let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap();
jdb.commit_batch(5, &keccak(b"1004a"), Some((3, keccak(b"1002a")))).unwrap();
jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap();
assert!(jdb.contains(&x));
assert!(jdb.contains(&x, EMPTY_PREFIX));
}
#[test]
fn long_history() {
// history is 3
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let h = jdb.insert(b"foo");
let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&h));
jdb.remove(&h);
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
}
#[test]
#[should_panic]
fn multiple_owed_removal_not_allowed() {
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let h = jdb.insert(b"foo");
let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&h));
jdb.remove(&h);
jdb.remove(&h);
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h, EMPTY_PREFIX);
jdb.remove(&h, EMPTY_PREFIX);
// commit_batch would call journal_under(),
// and we don't allow multiple owned removals.
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -269,29 +269,29 @@ mod tests {
// history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
}
@ -301,25 +301,25 @@ mod tests {
// history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -327,18 +327,18 @@ mod tests {
// history is 1
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
jdb.insert(b"foo");
assert!(jdb.contains(&foo));
jdb.insert(EMPTY_PREFIX, b"foo");
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -347,15 +347,15 @@ mod tests {
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -366,22 +366,22 @@ mod tests {
let foo = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None);
// history is 1
let foo = jdb.insert(b"foo");
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar"));
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.emplace(bar.clone(), EMPTY_PREFIX, DBValue::from_slice(b"bar"));
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
foo
};
{
let mut jdb = ArchiveDB::new(shared_db.clone(), None);
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
}
{
let mut jdb = ArchiveDB::new(shared_db, None);
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
}
}
@ -393,23 +393,23 @@ mod tests {
let foo = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None);
// history is 1
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
// foo is ancient history.
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
foo
};
{
let mut jdb = ArchiveDB::new(shared_db, None);
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo));
jdb.remove(&foo);
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap();
}
@ -421,14 +421,14 @@ mod tests {
let (foo, _, _) = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None);
// history is 1
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
(foo, bar, baz)
};
@ -436,7 +436,7 @@ mod tests {
{
let mut jdb = ArchiveDB::new(shared_db, None);
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
}
@ -446,7 +446,7 @@ mod tests {
let key = {
let mut jdb = ArchiveDB::new(shared_db.clone(), None);
let key = jdb.insert(b"foo");
let key = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
key
};
@ -461,13 +461,13 @@ mod tests {
#[test]
fn inject() {
let mut jdb = ArchiveDB::new(Arc::new(kvdb_memorydb::create(0)), None);
let key = jdb.insert(b"dog");
let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key);
assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none());
assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
}
}

View File

@ -23,27 +23,22 @@ use std::sync::Arc;
use bytes::Bytes;
use ethereum_types::H256;
use hash_db::{HashDB};
use heapsize::HeapSizeOf;
use hash_db::{HashDB, Prefix};
use parity_util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue};
use memory_db::*;
use parking_lot::RwLock;
use rlp::{encode, decode};
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY, error_negatively_reference_hash, error_key_already_exists};
use super::traits::JournalDB;
use util::{DatabaseKey, DatabaseValueView, DatabaseValueRef};
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, MallocSizeOf)]
struct RefInfo {
queue_refs: usize,
in_archive: bool,
}
impl HeapSizeOf for RefInfo {
fn heap_size_of_children(&self) -> usize { 0 }
}
#[derive(Clone, PartialEq, Eq)]
enum RemoveFrom {
Queue,
@ -107,7 +102,7 @@ enum RemoveFrom {
///
/// TODO: `store_reclaim_period`
pub struct EarlyMergeDB {
overlay: MemoryDB<KeccakHasher, DBValue>,
overlay: super::MemoryDB,
backing: Arc<KeyValueDB>,
refs: Option<Arc<RwLock<HashMap<H256, RefInfo>>>>,
latest_era: Option<u64>,
@ -292,8 +287,8 @@ impl EarlyMergeDB {
}
impl HashDB<KeccakHasher, DBValue> for EarlyMergeDB {
fn get(&self, key: &H256) -> Option<DBValue> {
if let Some((d, rc)) = self.overlay.raw(key) {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if let Some((d, rc)) = self.overlay.raw(key, prefix) {
if rc > 0 {
return Some(d.clone())
}
@ -301,18 +296,18 @@ impl HashDB<KeccakHasher, DBValue> for EarlyMergeDB {
self.payload(key)
}
fn contains(&self, key: &H256) -> bool {
self.get(key).is_some()
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key, prefix).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
self.overlay.insert(value)
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.overlay.insert(prefix, value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
self.overlay.emplace(key, value);
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.overlay.emplace(key, prefix, value);
}
fn remove(&mut self, key: &H256) {
self.overlay.remove(key);
fn remove(&mut self, key: &H256, prefix: Prefix) {
self.overlay.remove(key, prefix);
}
}
@ -358,8 +353,9 @@ impl JournalDB for EarlyMergeDB {
fn latest_era(&self) -> Option<u64> { self.latest_era }
fn mem_used(&self) -> usize {
self.overlay.mem_used() + match self.refs {
Some(ref c) => c.read().heap_size_of_children(),
let mut ops = new_malloc_size_ops();
self.overlay.size_of(&mut ops) + match self.refs {
Some(ref c) => c.read().size_of(&mut ops),
None => 0
}
}
@ -520,7 +516,7 @@ impl JournalDB for EarlyMergeDB {
Ok(ops)
}
fn consolidate(&mut self, with: MemoryDB<KeccakHasher, DBValue>) {
fn consolidate(&mut self, with: super::MemoryDB) {
self.overlay.consolidate(with);
}
}
@ -529,7 +525,7 @@ impl JournalDB for EarlyMergeDB {
mod tests {
use keccak::keccak;
use hash_db::HashDB;
use hash_db::{HashDB, EMPTY_PREFIX};
use super::*;
use super::super::traits::JournalDB;
use kvdb_memorydb;
@ -539,7 +535,7 @@ mod tests {
// history is 1
let mut jdb = new_db();
let x = jdb.insert(b"X");
let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
@ -549,10 +545,10 @@ mod tests {
jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&x);
jdb.remove(&x, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
let x = jdb.insert(b"X");
let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
@ -561,50 +557,50 @@ mod tests {
jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&x));
assert!(jdb.contains(&x, EMPTY_PREFIX));
}
#[test]
fn insert_older_era() {
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let bar = jdb.insert(b"bar");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0a")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(0, &keccak(b"0b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
}
#[test]
fn long_history() {
// history is 3
let mut jdb = new_db();
let h = jdb.insert(b"foo");
let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
jdb.remove(&h);
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&h));
assert!(!jdb.contains(&h, EMPTY_PREFIX));
}
#[test]
@ -612,42 +608,42 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
}
#[test]
@ -655,31 +651,31 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
}
#[test]
@ -687,22 +683,22 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
assert!(jdb.contains(&foo));
jdb.insert(EMPTY_PREFIX, b"foo");
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -712,23 +708,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -737,23 +733,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -762,23 +758,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3a"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
@ -802,8 +798,8 @@ mod tests {
let foo = {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
// history is 1
let foo = jdb.insert(b"foo");
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar"));
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.emplace(bar.clone(), EMPTY_PREFIX, DBValue::from_slice(b"bar"));
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
foo
@ -811,18 +807,18 @@ mod tests {
{
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
{
let mut jdb = EarlyMergeDB::new(shared_db, None);
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
}
}
@ -833,19 +829,19 @@ mod tests {
let mut jdb = new_db();
// history is 4
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
// expunge foo
@ -859,39 +855,39 @@ mod tests {
let mut jdb = new_db();
// history is 4
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
@ -905,28 +901,28 @@ mod tests {
let mut jdb = new_db();
// history is 1
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
// foo is ancient history.
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); // BROKEN
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -934,7 +930,7 @@ mod tests {
let mut jdb = new_db();
// history is 4
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -948,16 +944,16 @@ mod tests {
// foo is ancient history.
jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(5, &keccak(b"5"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&bar);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(6, &keccak(b"6"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(b"bar");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(7, &keccak(b"7"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
@ -972,7 +968,7 @@ mod tests {
{
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
// history is 1
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -980,24 +976,24 @@ mod tests {
// foo is ancient history.
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db
}; {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db
}; {
@ -1005,7 +1001,7 @@ mod tests {
jdb.commit_batch(5, &keccak(b"5"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db
}; {
@ -1013,7 +1009,7 @@ mod tests {
jdb.commit_batch(6, &keccak(b"6"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
}
}
@ -1024,16 +1020,16 @@ mod tests {
let (foo, bar, baz) = {
let mut jdb = EarlyMergeDB::new(shared_db.clone(), None);
// history is 1
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
(foo, bar, baz)
@ -1043,22 +1039,22 @@ mod tests {
let mut jdb = EarlyMergeDB::new(shared_db, None);
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
}
}
#[test]
fn inject() {
let mut jdb = new_db();
let key = jdb.insert(b"dog");
let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key);
assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none());
assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
}
}

View File

@ -16,7 +16,9 @@
//! `JournalDB` interface and implementation.
extern crate heapsize;
extern crate parity_util_mem;
extern crate parity_util_mem as mem;
extern crate parity_util_mem as malloc_size_of;
#[macro_use]
extern crate log;
@ -59,6 +61,14 @@ pub use self::traits::KeyedHashDB;
/// Export as keyed hash trait
pub use self::traits::AsKeyedHashDB;
/// Alias to ethereum MemoryDB
type MemoryDB = memory_db::MemoryDB<
keccak_hasher::KeccakHasher,
memory_db::HashKey<keccak_hasher::KeccakHasher>,
kvdb::DBValue,
>;
/// Journal database operating strategy.
#[derive(Debug, PartialEq, Clone, Copy)]
pub enum Algorithm {
@ -163,8 +173,8 @@ fn error_negatively_reference_hash(hash: &ethereum_types::H256) -> io::Error {
io::Error::new(io::ErrorKind::Other, format!("Entry {} removed from database more times than it was added.", hash))
}
pub fn new_memory_db() -> memory_db::MemoryDB<keccak_hasher::KeccakHasher, kvdb::DBValue> {
memory_db::MemoryDB::from_null_node(&rlp::NULL_RLP, rlp::NULL_RLP.as_ref().into())
pub fn new_memory_db() -> MemoryDB {
MemoryDB::from_null_node(&rlp::NULL_RLP, rlp::NULL_RLP.as_ref().into())
}
#[cfg(test)]

View File

@ -23,9 +23,8 @@ use std::sync::Arc;
use ethereum_types::H256;
use rlp::{Rlp, RlpStream, Encodable, DecoderError, Decodable, encode, decode};
use hash_db::{HashDB};
use hash_db::{HashDB, Prefix, EMPTY_PREFIX};
use keccak_hasher::KeccakHasher;
use memory_db::*;
use kvdb::{KeyValueDB, DBTransaction, DBValue};
use super::{error_negatively_reference_hash};
@ -39,7 +38,7 @@ use super::{error_negatively_reference_hash};
/// queries have an immediate effect in terms of these functions.
#[derive(Clone)]
pub struct OverlayDB {
overlay: MemoryDB<KeccakHasher, DBValue>,
overlay: super::MemoryDB,
backing: Arc<KeyValueDB>,
column: Option<u32>,
}
@ -134,7 +133,7 @@ impl OverlayDB {
pub fn revert(&mut self) { self.overlay.clear(); }
/// Get the number of references that would be committed.
pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key).map_or(0, |(_, refs)| refs) }
pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key, EMPTY_PREFIX).map_or(0, |(_, refs)| refs) }
/// Get the refs and value of the given key.
fn payload(&self, key: &H256) -> Option<Payload> {
@ -182,10 +181,10 @@ impl crate::KeyedHashDB for OverlayDB {
}
impl HashDB<KeccakHasher, DBValue> for OverlayDB {
fn get(&self, key: &H256) -> Option<DBValue> {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
// return ok if positive; if negative, check backing - might be enough references there to make
// it positive again.
let k = self.overlay.raw(key);
let k = self.overlay.raw(key, prefix);
let memrc = {
if let Some((d, rc)) = k {
if rc > 0 { return Some(d.clone()); }
@ -209,10 +208,10 @@ impl HashDB<KeccakHasher, DBValue> for OverlayDB {
}
}
fn contains(&self, key: &H256) -> bool {
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
// return ok if positive; if negative, check backing - might be enough references there to make
// it positive again.
let k = self.overlay.raw(key);
let k = self.overlay.raw(key, prefix);
match k {
Some((_, rc)) if rc > 0 => true,
_ => {
@ -229,111 +228,111 @@ impl HashDB<KeccakHasher, DBValue> for OverlayDB {
}
}
fn insert(&mut self, value: &[u8]) -> H256 { self.overlay.insert(value) }
fn emplace(&mut self, key: H256, value: DBValue) { self.overlay.emplace(key, value); }
fn remove(&mut self, key: &H256) { self.overlay.remove(key); }
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 { self.overlay.insert(prefix, value) }
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) { self.overlay.emplace(key, prefix, value); }
fn remove(&mut self, key: &H256, prefix: Prefix) { self.overlay.remove(key, prefix); }
}
#[test]
fn overlaydb_revert() {
let mut m = OverlayDB::new_temp();
let foo = m.insert(b"foo"); // insert foo.
let foo = m.insert(EMPTY_PREFIX, b"foo"); // insert foo.
let mut batch = m.backing.transaction();
m.commit_to_batch(&mut batch).unwrap(); // commit - new operations begin here...
m.backing.write(batch).unwrap();
let bar = m.insert(b"bar"); // insert bar.
m.remove(&foo); // remove foo.
assert!(!m.contains(&foo)); // foo is gone.
assert!(m.contains(&bar)); // bar is here.
let bar = m.insert(EMPTY_PREFIX, b"bar"); // insert bar.
m.remove(&foo, EMPTY_PREFIX); // remove foo.
assert!(!m.contains(&foo, EMPTY_PREFIX)); // foo is gone.
assert!(m.contains(&bar, EMPTY_PREFIX)); // bar is here.
m.revert(); // revert the last two operations.
assert!(m.contains(&foo)); // foo is here.
assert!(!m.contains(&bar)); // bar is gone.
assert!(m.contains(&foo, EMPTY_PREFIX)); // foo is here.
assert!(!m.contains(&bar, EMPTY_PREFIX)); // bar is gone.
}
#[test]
fn overlaydb_overlay_insert_and_remove() {
let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world");
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
trie.remove(&h);
assert_eq!(trie.get(&h), None);
let h = trie.insert(EMPTY_PREFIX, b"hello world");
assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
trie.remove(&h, EMPTY_PREFIX);
assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
}
#[test]
fn overlaydb_backing_insert_revert() {
let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world");
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
let h = trie.insert(EMPTY_PREFIX, b"hello world");
assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
trie.commit().unwrap();
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
trie.revert();
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
}
#[test]
fn overlaydb_backing_remove() {
let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world");
let h = trie.insert(EMPTY_PREFIX, b"hello world");
trie.commit().unwrap();
trie.remove(&h);
assert_eq!(trie.get(&h), None);
trie.remove(&h, EMPTY_PREFIX);
assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
trie.commit().unwrap();
assert_eq!(trie.get(&h), None);
assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
trie.revert();
assert_eq!(trie.get(&h), None);
assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
}
#[test]
fn overlaydb_backing_remove_revert() {
let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world");
let h = trie.insert(EMPTY_PREFIX, b"hello world");
trie.commit().unwrap();
trie.remove(&h);
assert_eq!(trie.get(&h), None);
trie.remove(&h, EMPTY_PREFIX);
assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
trie.revert();
assert_eq!(trie.get(&h).unwrap(), DBValue::from_slice(b"hello world"));
assert_eq!(trie.get(&h, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"hello world"));
}
#[test]
fn overlaydb_negative() {
let mut trie = OverlayDB::new_temp();
let h = trie.insert(b"hello world");
let h = trie.insert(EMPTY_PREFIX, b"hello world");
trie.commit().unwrap();
trie.remove(&h);
trie.remove(&h); //bad - sends us into negative refs.
assert_eq!(trie.get(&h), None);
trie.remove(&h, EMPTY_PREFIX);
trie.remove(&h, EMPTY_PREFIX); //bad - sends us into negative refs.
assert_eq!(trie.get(&h, EMPTY_PREFIX), None);
assert!(trie.commit().is_err());
}
#[test]
fn overlaydb_complex() {
let mut trie = OverlayDB::new_temp();
let hfoo = trie.insert(b"foo");
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
let hbar = trie.insert(b"bar");
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar"));
let hfoo = trie.insert(EMPTY_PREFIX, b"foo");
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
let hbar = trie.insert(EMPTY_PREFIX, b"bar");
assert_eq!(trie.get(&hbar, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"bar"));
trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar"));
trie.insert(b"foo"); // two refs
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hbar, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"bar"));
trie.insert(EMPTY_PREFIX, b"foo"); // two refs
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hbar).unwrap(), DBValue::from_slice(b"bar"));
trie.remove(&hbar); // zero refs - delete
assert_eq!(trie.get(&hbar), None);
trie.remove(&hfoo); // one ref - keep
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hbar, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"bar"));
trie.remove(&hbar, EMPTY_PREFIX); // zero refs - delete
assert_eq!(trie.get(&hbar, EMPTY_PREFIX), None);
trie.remove(&hfoo, EMPTY_PREFIX); // one ref - keep
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
trie.remove(&hfoo); // zero ref - would delete, but...
assert_eq!(trie.get(&hfoo), None);
trie.insert(b"foo"); // one ref - keep after all.
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.remove(&hfoo, EMPTY_PREFIX); // zero ref - would delete, but...
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX), None);
trie.insert(EMPTY_PREFIX, b"foo"); // one ref - keep after all.
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.commit().unwrap();
assert_eq!(trie.get(&hfoo).unwrap(), DBValue::from_slice(b"foo"));
trie.remove(&hfoo); // zero ref - delete
assert_eq!(trie.get(&hfoo), None);
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"foo"));
trie.remove(&hfoo, EMPTY_PREFIX); // zero ref - delete
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX), None);
trie.commit().unwrap(); //
assert_eq!(trie.get(&hfoo), None);
assert_eq!(trie.get(&hfoo, EMPTY_PREFIX), None);
}

View File

@ -23,11 +23,10 @@ use std::sync::Arc;
use bytes::Bytes;
use ethereum_types::H256;
use hash_db::{HashDB};
use heapsize::HeapSizeOf;
use hash_db::{HashDB, Prefix, EMPTY_PREFIX};
use parity_util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue};
use memory_db::*;
use parking_lot::RwLock;
use fastmap::H256FastMap;
use rlp::{Rlp, RlpStream, encode, decode, DecoderError, Decodable, Encodable};
@ -66,7 +65,7 @@ use util::DatabaseKey;
/// 7. Delete ancient record from memory and disk.
pub struct OverlayRecentDB {
transaction_overlay: MemoryDB<KeccakHasher, DBValue>,
transaction_overlay: super::MemoryDB,
backing: Arc<KeyValueDB>,
journal_overlay: Arc<RwLock<JournalOverlay>>,
column: Option<u32>,
@ -120,7 +119,7 @@ impl<'a> Encodable for DatabaseValueRef<'a> {
#[derive(PartialEq)]
struct JournalOverlay {
backing_overlay: MemoryDB<KeccakHasher, DBValue>, // Nodes added in the history period
backing_overlay: super::MemoryDB, // Nodes added in the history period
pending_overlay: H256FastMap<DBValue>, // Nodes being transfered from backing_overlay to backing db
journal: HashMap<u64, Vec<JournalEntry>>,
latest_era: Option<u64>,
@ -128,19 +127,13 @@ struct JournalOverlay {
cumulative_size: usize, // cumulative size of all entries.
}
#[derive(PartialEq)]
#[derive(PartialEq, MallocSizeOf)]
struct JournalEntry {
id: H256,
insertions: Vec<H256>,
deletions: Vec<H256>,
}
impl HeapSizeOf for JournalEntry {
fn heap_size_of_children(&self) -> usize {
self.insertions.heap_size_of_children() + self.deletions.heap_size_of_children()
}
}
impl Clone for OverlayRecentDB {
fn clone(&self) -> OverlayRecentDB {
OverlayRecentDB {
@ -204,11 +197,11 @@ impl OverlayRecentDB {
for (k, v) in value.inserts {
let short_key = to_short_key(&k);
if !overlay.contains(&short_key) {
if !overlay.contains(&short_key, EMPTY_PREFIX) {
cumulative_size += v.len();
}
overlay.emplace(short_key, v);
overlay.emplace(short_key, EMPTY_PREFIX, v);
inserted_keys.push(k);
}
journal.entry(era).or_insert_with(Vec::new).push(JournalEntry {
@ -272,12 +265,13 @@ impl JournalDB for OverlayRecentDB {
}
fn mem_used(&self) -> usize {
let mut mem = self.transaction_overlay.mem_used();
let mut ops = new_malloc_size_ops();
let mut mem = self.transaction_overlay.size_of(&mut ops);
let overlay = self.journal_overlay.read();
mem += overlay.backing_overlay.mem_used();
mem += overlay.pending_overlay.heap_size_of_children();
mem += overlay.journal.heap_size_of_children();
mem += overlay.backing_overlay.size_of(&mut ops);
mem += overlay.pending_overlay.size_of(&mut ops);
mem += overlay.journal.size_of(&mut ops);
mem
}
@ -302,7 +296,7 @@ impl JournalDB for OverlayRecentDB {
fn state(&self, key: &H256) -> Option<Bytes> {
let journal_overlay = self.journal_overlay.read();
let key = to_short_key(key);
journal_overlay.backing_overlay.get(&key).map(|v| v.into_vec())
journal_overlay.backing_overlay.get(&key, EMPTY_PREFIX).map(|v| v.into_vec())
.or_else(|| journal_overlay.pending_overlay.get(&key).map(|d| d.clone().into_vec()))
.or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.into_vec()))
}
@ -334,11 +328,11 @@ impl JournalDB for OverlayRecentDB {
for (k, v) in insertions {
let short_key = to_short_key(&k);
if !journal_overlay.backing_overlay.contains(&short_key) {
if !journal_overlay.backing_overlay.contains(&short_key, EMPTY_PREFIX) {
journal_overlay.cumulative_size += v.len();
}
journal_overlay.backing_overlay.emplace(short_key, v);
journal_overlay.backing_overlay.emplace(short_key, EMPTY_PREFIX, v);
}
let index = journal_overlay.journal.get(&now).map_or(0, |j| j.len());
@ -387,7 +381,7 @@ impl JournalDB for OverlayRecentDB {
{
if *canon_id == journal.id {
for h in &journal.insertions {
if let Some((d, rc)) = journal_overlay.backing_overlay.raw(&to_short_key(h)) {
if let Some((d, rc)) = journal_overlay.backing_overlay.raw(&to_short_key(h), EMPTY_PREFIX) {
if rc > 0 {
canon_insertions.push((h.clone(), d.clone())); //TODO: optimize this to avoid data copy
}
@ -410,13 +404,13 @@ impl JournalDB for OverlayRecentDB {
}
// update the overlay
for k in overlay_deletions {
if let Some(val) = journal_overlay.backing_overlay.remove_and_purge(&to_short_key(&k)) {
if let Some(val) = journal_overlay.backing_overlay.remove_and_purge(&to_short_key(&k), EMPTY_PREFIX) {
journal_overlay.cumulative_size -= val.len();
}
}
// apply canon deletions
for k in canon_deletions {
if !journal_overlay.backing_overlay.contains(&to_short_key(&k)) {
if !journal_overlay.backing_overlay.contains(&to_short_key(&k), EMPTY_PREFIX) {
batch.delete(self.column, k.as_bytes());
}
}
@ -458,14 +452,14 @@ impl JournalDB for OverlayRecentDB {
Ok(ops)
}
fn consolidate(&mut self, with: MemoryDB<KeccakHasher, DBValue>) {
fn consolidate(&mut self, with: super::MemoryDB) {
self.transaction_overlay.consolidate(with);
}
}
impl HashDB<KeccakHasher, DBValue> for OverlayRecentDB {
fn get(&self, key: &H256) -> Option<DBValue> {
if let Some((d, rc)) = self.transaction_overlay.raw(key) {
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> {
if let Some((d, rc)) = self.transaction_overlay.raw(key, prefix) {
if rc > 0 {
return Some(d.clone())
}
@ -473,24 +467,24 @@ impl HashDB<KeccakHasher, DBValue> for OverlayRecentDB {
let v = {
let journal_overlay = self.journal_overlay.read();
let key = to_short_key(key);
journal_overlay.backing_overlay.get(&key)
journal_overlay.backing_overlay.get(&key, prefix)
.or_else(|| journal_overlay.pending_overlay.get(&key).cloned())
};
v.or_else(|| self.payload(key))
}
fn contains(&self, key: &H256) -> bool {
self.get(key).is_some()
fn contains(&self, key: &H256, prefix: Prefix) -> bool {
self.get(key, prefix).is_some()
}
fn insert(&mut self, value: &[u8]) -> H256 {
self.transaction_overlay.insert(value)
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 {
self.transaction_overlay.insert(prefix, value)
}
fn emplace(&mut self, key: H256, value: DBValue) {
self.transaction_overlay.emplace(key, value);
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) {
self.transaction_overlay.emplace(key, prefix, value);
}
fn remove(&mut self, key: &H256) {
self.transaction_overlay.remove(key);
fn remove(&mut self, key: &H256, prefix: Prefix) {
self.transaction_overlay.remove(key, prefix);
}
}
@ -499,7 +493,7 @@ mod tests {
use keccak::keccak;
use super::*;
use hash_db::HashDB;
use hash_db::{HashDB, EMPTY_PREFIX};
use {kvdb_memorydb, JournalDB};
fn new_db() -> OverlayRecentDB {
@ -512,7 +506,7 @@ mod tests {
// history is 1
let mut jdb = new_db();
let x = jdb.insert(b"X");
let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
@ -522,10 +516,10 @@ mod tests {
jdb.commit_batch(4, &keccak(b"1003a"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&x);
jdb.remove(&x, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"1002b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
let x = jdb.insert(b"X");
let x = jdb.insert(EMPTY_PREFIX, b"X");
jdb.commit_batch(4, &keccak(b"1003b"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
@ -534,30 +528,30 @@ mod tests {
jdb.commit_batch(6, &keccak(b"1005a"), Some((4, keccak(b"1003a")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&x));
assert!(jdb.contains(&x, EMPTY_PREFIX));
}
#[test]
fn long_history() {
// history is 3
let mut jdb = new_db();
let h = jdb.insert(b"foo");
let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
jdb.remove(&h);
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&h));
assert!(!jdb.contains(&h, EMPTY_PREFIX));
}
#[test]
@ -565,42 +559,42 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
}
#[test]
@ -608,31 +602,31 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
}
#[test]
@ -640,22 +634,22 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
assert!(jdb.contains(&foo));
jdb.insert(EMPTY_PREFIX, b"foo");
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"2"), Some((0, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -664,23 +658,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2a"), Some((1, keccak(b"1a")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -690,23 +684,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1c"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
@ -716,23 +710,23 @@ mod tests {
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3a"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3b"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
@ -751,8 +745,8 @@ mod tests {
let foo = {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
// history is 1
let foo = jdb.insert(b"foo");
jdb.emplace(bar.clone(), DBValue::from_slice(b"bar"));
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.emplace(bar.clone(), EMPTY_PREFIX, DBValue::from_slice(b"bar"));
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
foo
@ -760,18 +754,18 @@ mod tests {
{
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
{
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
}
}
@ -781,19 +775,19 @@ mod tests {
let mut jdb = new_db();
// history is 4
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
// expunge foo
@ -807,39 +801,39 @@ mod tests {
let mut jdb = new_db();
// history is 4
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(2, &keccak(b"2b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(4, &keccak(b"4b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
@ -852,35 +846,35 @@ mod tests {
fn broken_assert() {
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
// foo is ancient history.
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap(); // BROKEN
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(5, &keccak(b"5"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
}
#[test]
fn reopen_test() {
let mut jdb = new_db();
// history is 4
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -894,16 +888,16 @@ mod tests {
// foo is ancient history.
jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(5, &keccak(b"5"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
jdb.remove(&bar);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(6, &keccak(b"6"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.insert(b"foo");
jdb.insert(b"bar");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(7, &keccak(b"7"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
}
@ -918,7 +912,7 @@ mod tests {
{
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
// history is 1
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
@ -926,24 +920,24 @@ mod tests {
// foo is ancient history.
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
jdb.insert(b"foo");
jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(3, &keccak(b"3"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db
}; {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(4, &keccak(b"4"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db
}; {
@ -951,7 +945,7 @@ mod tests {
jdb.commit_batch(5, &keccak(b"5"), Some((3, keccak(b"3")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
// incantation to reopen the db
}; {
@ -959,7 +953,7 @@ mod tests {
jdb.commit_batch(6, &keccak(b"6"), Some((4, keccak(b"4")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
}
}
@ -970,16 +964,16 @@ mod tests {
let (foo, bar, baz) = {
let mut jdb = OverlayRecentDB::new(shared_db.clone(), None);
// history is 1
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.can_reconstruct_refs());
(foo, bar, baz)
@ -989,43 +983,43 @@ mod tests {
let mut jdb = OverlayRecentDB::new(shared_db, None);
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.can_reconstruct_refs());
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
}
}
#[test]
fn insert_older_era() {
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0a"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
let bar = jdb.insert(b"bar");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0a")))).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(0, &keccak(b"0b"), None).unwrap();
assert!(jdb.can_reconstruct_refs());
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
}
#[test]
fn inject() {
let mut jdb = new_db();
let key = jdb.insert(b"dog");
let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key);
assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none());
assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
}
#[test]
@ -1037,7 +1031,7 @@ mod tests {
assert!(jdb.earliest_era().is_none());
// single journalled era.
let _key = jdb.insert(b"hello!");
let _key = jdb.insert(EMPTY_PREFIX, b"hello!");
let mut batch = jdb.backing().transaction();
jdb.journal_under(&mut batch, 0, &keccak(b"0")).unwrap();
jdb.backing().write_buffered(batch);

View File

@ -22,11 +22,10 @@ use std::sync::Arc;
use bytes::Bytes;
use ethereum_types::H256;
use hash_db::{HashDB};
use heapsize::HeapSizeOf;
use hash_db::{HashDB, Prefix, EMPTY_PREFIX};
use parity_util_mem::{MallocSizeOf, allocators::new_malloc_size_ops};
use keccak_hasher::KeccakHasher;
use kvdb::{KeyValueDB, DBTransaction, DBValue};
use memory_db::MemoryDB;
use overlaydb::OverlayDB;
use rlp::{encode, decode};
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
@ -81,11 +80,11 @@ impl RefCountedDB {
}
impl HashDB<KeccakHasher, DBValue> for RefCountedDB {
fn get(&self, key: &H256) -> Option<DBValue> { self.forward.get(key) }
fn contains(&self, key: &H256) -> bool { self.forward.contains(key) }
fn insert(&mut self, value: &[u8]) -> H256 { let r = self.forward.insert(value); self.inserts.push(r.clone()); r }
fn emplace(&mut self, key: H256, value: DBValue) { self.inserts.push(key.clone()); self.forward.emplace(key, value); }
fn remove(&mut self, key: &H256) { self.removes.push(key.clone()); }
fn get(&self, key: &H256, prefix: Prefix) -> Option<DBValue> { self.forward.get(key, prefix) }
fn contains(&self, key: &H256, prefix: Prefix) -> bool { self.forward.contains(key, prefix) }
fn insert(&mut self, prefix: Prefix, value: &[u8]) -> H256 { let r = self.forward.insert(prefix, value); self.inserts.push(r.clone()); r }
fn emplace(&mut self, key: H256, prefix: Prefix, value: DBValue) { self.inserts.push(key.clone()); self.forward.emplace(key, prefix, value); }
fn remove(&mut self, key: &H256, _prefix: Prefix) { self.removes.push(key.clone()); }
}
impl ::traits::KeyedHashDB for RefCountedDB {
@ -105,7 +104,8 @@ impl JournalDB for RefCountedDB {
}
fn mem_used(&self) -> usize {
self.inserts.heap_size_of_children() + self.removes.heap_size_of_children()
let mut ops = new_malloc_size_ops();
self.inserts.size_of(&mut ops) + self.removes.size_of(&mut ops)
}
fn is_empty(&self) -> bool {
@ -184,7 +184,7 @@ impl JournalDB for RefCountedDB {
}.expect("rlp read from db; qed");
trace!(target: "rcdb", "delete journal for time #{}.{}=>{}, (canon was {}): deleting {:?}", end_era, db_key.index, our_id, canon_id, to_remove);
for i in &to_remove {
self.forward.remove(i);
self.forward.remove(i, EMPTY_PREFIX);
}
batch.delete(self.column, &last);
db_key.index += 1;
@ -197,19 +197,19 @@ impl JournalDB for RefCountedDB {
fn inject(&mut self, batch: &mut DBTransaction) -> io::Result<u32> {
self.inserts.clear();
for remove in self.removes.drain(..) {
self.forward.remove(&remove);
self.forward.remove(&remove, EMPTY_PREFIX);
}
self.forward.commit_to_batch(batch)
}
fn consolidate(&mut self, mut with: MemoryDB<KeccakHasher, DBValue>) {
fn consolidate(&mut self, mut with: super::MemoryDB) {
for (key, (value, rc)) in with.drain() {
for _ in 0..rc {
self.emplace(key, value.clone());
self.emplace(key, EMPTY_PREFIX, value.clone());
}
for _ in rc..0 {
self.remove(&key);
self.remove(&key, EMPTY_PREFIX);
}
}
}
@ -219,7 +219,7 @@ impl JournalDB for RefCountedDB {
mod tests {
use keccak::keccak;
use hash_db::HashDB;
use hash_db::{HashDB, EMPTY_PREFIX};
use super::*;
use {JournalDB, kvdb_memorydb};
@ -232,18 +232,18 @@ mod tests {
fn long_history() {
// history is 3
let mut jdb = new_db();
let h = jdb.insert(b"foo");
let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&h));
jdb.remove(&h);
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(3, &keccak(b"3"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&h));
assert!(jdb.contains(&h, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((1, keccak(b"1")))).unwrap();
assert!(!jdb.contains(&h));
assert!(!jdb.contains(&h, EMPTY_PREFIX));
}
#[test]
@ -251,10 +251,10 @@ mod tests {
// history is 3
let mut jdb = new_db();
assert_eq!(jdb.latest_era(), None);
let h = jdb.insert(b"foo");
let h = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert_eq!(jdb.latest_era(), Some(0));
jdb.remove(&h);
jdb.remove(&h, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1"), None).unwrap();
assert_eq!(jdb.latest_era(), Some(1));
jdb.commit_batch(2, &keccak(b"2"), None).unwrap();
@ -270,37 +270,37 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&bar);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
jdb.remove(&bar, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
let foo = jdb.insert(b"foo");
jdb.remove(&baz);
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
jdb.remove(&baz, EMPTY_PREFIX);
jdb.commit_batch(2, &keccak(b"2"), Some((1, keccak(b"1")))).unwrap();
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.remove(&foo);
jdb.remove(&foo, EMPTY_PREFIX);
jdb.commit_batch(3, &keccak(b"3"), Some((2, keccak(b"2")))).unwrap();
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(4, &keccak(b"4"), Some((3, keccak(b"3")))).unwrap();
assert!(!jdb.contains(&foo));
assert!(!jdb.contains(&bar));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
}
#[test]
@ -308,39 +308,39 @@ mod tests {
// history is 1
let mut jdb = new_db();
let foo = jdb.insert(b"foo");
let bar = jdb.insert(b"bar");
let foo = jdb.insert(EMPTY_PREFIX, b"foo");
let bar = jdb.insert(EMPTY_PREFIX, b"bar");
jdb.commit_batch(0, &keccak(b"0"), None).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
jdb.remove(&foo);
let baz = jdb.insert(b"baz");
jdb.remove(&foo, EMPTY_PREFIX);
let baz = jdb.insert(EMPTY_PREFIX, b"baz");
jdb.commit_batch(1, &keccak(b"1a"), Some((0, keccak(b"0")))).unwrap();
jdb.remove(&bar);
jdb.remove(&bar, EMPTY_PREFIX);
jdb.commit_batch(1, &keccak(b"1b"), Some((0, keccak(b"0")))).unwrap();
assert!(jdb.contains(&foo));
assert!(jdb.contains(&bar));
assert!(jdb.contains(&baz));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(jdb.contains(&bar, EMPTY_PREFIX));
assert!(jdb.contains(&baz, EMPTY_PREFIX));
jdb.commit_batch(2, &keccak(b"2b"), Some((1, keccak(b"1b")))).unwrap();
assert!(jdb.contains(&foo));
assert!(!jdb.contains(&baz));
assert!(!jdb.contains(&bar));
assert!(jdb.contains(&foo, EMPTY_PREFIX));
assert!(!jdb.contains(&baz, EMPTY_PREFIX));
assert!(!jdb.contains(&bar, EMPTY_PREFIX));
}
#[test]
fn inject() {
let mut jdb = new_db();
let key = jdb.insert(b"dog");
let key = jdb.insert(EMPTY_PREFIX, b"dog");
jdb.inject_batch().unwrap();
assert_eq!(jdb.get(&key).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key);
assert_eq!(jdb.get(&key, EMPTY_PREFIX).unwrap(), DBValue::from_slice(b"dog"));
jdb.remove(&key, EMPTY_PREFIX);
jdb.inject_batch().unwrap();
assert!(jdb.get(&key).is_none());
assert!(jdb.get(&key, EMPTY_PREFIX).is_none());
}
}

View File

@ -93,7 +93,7 @@ pub trait JournalDB: KeyedHashDB {
fn flush(&self) {}
/// Consolidate all the insertions and deletions in the given memory overlay.
fn consolidate(&mut self, overlay: ::memory_db::MemoryDB<KeccakHasher, DBValue>);
fn consolidate(&mut self, overlay: super::MemoryDB);
/// Commit all changes in a single batch
#[cfg(test)]

View File

@ -8,5 +8,5 @@ license = "GPL-3.0"
[dependencies]
ethereum-types = "0.6.0"
tiny-keccak = "1.4.2"
hash-db = "0.11.0"
hash-db = "0.12.4"
plain_hasher = "0.2"

View File

@ -6,5 +6,5 @@ description = "An LRU-cache which operates on memory used"
license = "GPL3"
[dependencies]
heapsize = "0.4"
parity-util-mem = "0.1"
lru-cache = "0.1"

View File

@ -18,10 +18,10 @@
//! crate.
// TODO: push changes upstream in a clean way.
extern crate heapsize;
extern crate parity_util_mem;
extern crate lru_cache;
use heapsize::HeapSizeOf;
use parity_util_mem::{MallocSizeOf, MallocSizeOfExt};
use lru_cache::LruCache;
use std::hash::Hash;
@ -29,18 +29,18 @@ use std::hash::Hash;
const INITIAL_CAPACITY: usize = 4;
/// An LRU-cache which operates on memory used.
pub struct MemoryLruCache<K: Eq + Hash, V: HeapSizeOf> {
pub struct MemoryLruCache<K: Eq + Hash, V> {
inner: LruCache<K, V>,
cur_size: usize,
max_size: usize,
}
// amount of memory used when the item will be put on the heap.
fn heap_size_of<T: HeapSizeOf>(val: &T) -> usize {
::std::mem::size_of::<T>() + val.heap_size_of_children()
fn heap_size_of<T: MallocSizeOf>(val: &T) -> usize {
::std::mem::size_of::<T>() + val.malloc_size_of()
}
impl<K: Eq + Hash, V: HeapSizeOf> MemoryLruCache<K, V> {
impl<K: Eq + Hash, V: MallocSizeOf> MemoryLruCache<K, V> {
/// Create a new cache with a maximum size in bytes.
pub fn new(max_size: usize) -> Self {
MemoryLruCache {

View File

@ -1,10 +0,0 @@
[package]
name = "memzero"
version = "0.1.0"
description = "A wrapper for zero-ing out memory when dropped"
license = "GPL-3.0"
homepage = "https://parity.io"
repository = "https://github.com/paritytech/parity-ethereum"
documentation = "https://docs.rs/crate/memzero"
authors = ["Parity Technologies <admin@parity.io>"]
edition = "2018"

View File

@ -1,54 +0,0 @@
// Copyright 2015-2019 Parity Technologies (UK) Ltd.
// This file is part of Parity Ethereum.
// Parity Ethereum is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity Ethereum is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity Ethereum. If not, see <http://www.gnu.org/licenses/>.
use std::ops::{Deref, DerefMut};
use std::ptr;
/// Wrapper to zero out memory when dropped.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Memzero<T: AsMut<[u8]>> {
mem: T,
}
impl<T: AsMut<[u8]>> From<T> for Memzero<T> {
fn from(mem: T) -> Memzero<T> {
Memzero { mem }
}
}
impl<T: AsMut<[u8]>> Drop for Memzero<T> {
fn drop(&mut self) {
unsafe {
for byte_ref in self.mem.as_mut() {
ptr::write_volatile(byte_ref, 0)
}
}
}
}
impl<T: AsMut<[u8]>> Deref for Memzero<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.mem
}
}
impl<T: AsMut<[u8]>> DerefMut for Memzero<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.mem
}
}

View File

@ -6,15 +6,15 @@ description = "Merkle-Patricia Trie (Ethereum Style)"
license = "GPL-3.0"
[dependencies]
trie-db = "0.11.0"
trie-db = "0.12.4"
keccak-hasher = { version = "0.1.1", path = "../keccak-hasher" }
hash-db = "0.11.0"
hash-db = "0.12.4"
rlp = "0.4.0"
parity-bytes = "0.1"
ethereum-types = "0.6.0"
elastic-array = "0.10"
[dev-dependencies]
memory-db = "0.11.0"
memory-db = "0.12.4"
keccak-hash = "0.2.0"
journaldb = { path = "../journaldb" }

View File

@ -6,6 +6,6 @@ description = "Trie-root helpers, ethereum style"
license = "GPL-3.0"
[dependencies]
triehash = "0.5.0"
triehash = "0.6.0"
ethereum-types = "0.6.0"
keccak-hasher = { path = "../keccak-hasher" }

View File

@ -13,7 +13,7 @@ ring = "0.14.6"
ethkey = { path = "../accounts/ethkey" }
hex = "0.2"
log = "0.4"
memzero = { path = "../util/memzero" }
parity-util-mem = "0.1"
ordered-float = "0.5"
parking_lot = "0.7"
rand = "0.6"

View File

@ -24,7 +24,7 @@ extern crate ethcore_network as network;
extern crate ethereum_types;
extern crate ethkey;
extern crate hex;
extern crate memzero;
extern crate parity_util_mem;
extern crate ordered_float;
extern crate parking_lot;
extern crate rand;

View File

@ -20,7 +20,7 @@ use aes_gcm::{Encryptor, Decryptor};
use ethkey::crypto::ecies;
use ethereum_types::H256;
use ethkey::{self, Public, Secret};
use memzero::Memzero;
use parity_util_mem::Memzero;
/// Length of AES key
pub const AES_KEY_LEN: usize = 32;

View File

@ -23,7 +23,7 @@ use std::collections::HashMap;
use ethereum_types::H256;
use ethkey::{KeyPair, Public, Secret};
use memzero::Memzero;
use parity_util_mem::Memzero;
use rand::{Rng, rngs::OsRng};
use rpc::crypto::{AES_KEY_LEN, EncryptionInstance, DecryptionInstance};

View File

@ -28,7 +28,7 @@ use jsonrpc_derive::rpc;
use jsonrpc_pubsub::{Session, PubSubMetadata, SubscriptionId, typed::Subscriber};
use ethereum_types::H256;
use memzero::Memzero;
use parity_util_mem::Memzero;
use parking_lot::RwLock;
use self::filter::Filter;