Make HashDB generic (#8739)
The `patricia_trie` crate is generic over the hasher (by way of HashDB) and node encoding scheme. Adds a new `patricia_trie_ethereum` crate with concrete impls for Keccak/RLP.
This commit is contained in:
parent
202c54d423
commit
9caa868603
161
Cargo.lock
generated
161
Cargo.lock
generated
@ -388,7 +388,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"app_dirs 1.2.1 (git+https://github.com/paritytech/app-dirs-rs)",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"journaldb 0.1.0",
|
||||
"journaldb 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -435,6 +435,18 @@ dependencies = [
|
||||
"regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.5.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"atty 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "error-chain"
|
||||
version = "0.11.0"
|
||||
@ -548,11 +560,12 @@ dependencies = [
|
||||
"fake-hardware-wallet 0.0.1",
|
||||
"fetch 0.1.0",
|
||||
"hardware-wallet 1.12.0",
|
||||
"hashdb 0.1.1",
|
||||
"hashdb 0.2.0",
|
||||
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"journaldb 0.1.0",
|
||||
"journaldb 0.2.0",
|
||||
"keccak-hash 0.1.2",
|
||||
"keccak-hasher 0.1.0",
|
||||
"kvdb 0.1.0",
|
||||
"kvdb-memorydb 0.1.0",
|
||||
"kvdb-rocksdb 0.1.0",
|
||||
@ -561,12 +574,13 @@ dependencies = [
|
||||
"lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"macros 0.1.0",
|
||||
"memory-cache 0.1.0",
|
||||
"memorydb 0.1.1",
|
||||
"memorydb 0.2.0",
|
||||
"num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parity-machine 0.1.0",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"patricia-trie 0.1.0",
|
||||
"patricia-trie 0.2.0",
|
||||
"patricia-trie-ethereum 0.1.0",
|
||||
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.1",
|
||||
@ -641,18 +655,20 @@ dependencies = [
|
||||
"ethcore-transaction 0.1.0",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hashdb 0.1.1",
|
||||
"hashdb 0.2.0",
|
||||
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"keccak-hash 0.1.2",
|
||||
"keccak-hasher 0.1.0",
|
||||
"kvdb 0.1.0",
|
||||
"kvdb-memorydb 0.1.0",
|
||||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memory-cache 0.1.0",
|
||||
"memorydb 0.1.1",
|
||||
"memorydb 0.2.0",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"patricia-trie 0.1.0",
|
||||
"plain_hasher 0.1.0",
|
||||
"patricia-trie 0.2.0",
|
||||
"patricia-trie-ethereum 0.1.0",
|
||||
"plain_hasher 0.2.0",
|
||||
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.1",
|
||||
"rlp_derive 0.1.0",
|
||||
@ -784,7 +800,8 @@ dependencies = [
|
||||
"keccak-hash 0.1.2",
|
||||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"patricia-trie 0.1.0",
|
||||
"patricia-trie 0.2.0",
|
||||
"patricia-trie-ethereum 0.1.0",
|
||||
"rand 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.1",
|
||||
"rlp_derive 0.1.0",
|
||||
@ -885,15 +902,17 @@ dependencies = [
|
||||
"ethcore-transaction 0.1.0",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethkey 0.3.0",
|
||||
"hashdb 0.2.0",
|
||||
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ipnetwork 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"keccak-hash 0.1.2",
|
||||
"keccak-hasher 0.1.0",
|
||||
"kvdb 0.1.0",
|
||||
"kvdb-memorydb 0.1.0",
|
||||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"macros 0.1.0",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"plain_hasher 0.1.0",
|
||||
"plain_hasher 0.2.0",
|
||||
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.1",
|
||||
"rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -1228,10 +1247,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hashdb"
|
||||
version = "0.1.1"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -1269,6 +1288,14 @@ name = "httparse"
|
||||
version = "1.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "humantime"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.11.24"
|
||||
@ -1375,20 +1402,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "journaldb"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"ethcore-bytes 0.1.0",
|
||||
"ethcore-logger 1.12.0",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hashdb 0.1.1",
|
||||
"hashdb 0.2.0",
|
||||
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"keccak-hash 0.1.2",
|
||||
"keccak-hasher 0.1.0",
|
||||
"kvdb 0.1.0",
|
||||
"kvdb-memorydb 0.1.0",
|
||||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memorydb 0.1.1",
|
||||
"memorydb 0.2.0",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"plain_hasher 0.1.0",
|
||||
"plain_hasher 0.2.0",
|
||||
"rlp 0.2.1",
|
||||
"util-error 0.1.0",
|
||||
]
|
||||
@ -1498,6 +1526,16 @@ dependencies = [
|
||||
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "keccak-hasher"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hashdb 0.2.0",
|
||||
"plain_hasher 0.2.0",
|
||||
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kernel32-sys"
|
||||
version = "0.2.2"
|
||||
@ -1678,15 +1716,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "memorydb"
|
||||
version = "0.1.1"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hashdb 0.1.1",
|
||||
"hashdb 0.2.0",
|
||||
"heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"keccak-hash 0.1.2",
|
||||
"plain_hasher 0.1.0",
|
||||
"keccak-hasher 0.1.0",
|
||||
"plain_hasher 0.2.0",
|
||||
"rlp 0.2.1",
|
||||
"tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2035,7 +2075,7 @@ dependencies = [
|
||||
"futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ipnetwork 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"journaldb 0.1.0",
|
||||
"journaldb 0.2.0",
|
||||
"jsonrpc-core 8.0.1 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)",
|
||||
"keccak-hash 0.1.2",
|
||||
"kvdb 0.1.0",
|
||||
@ -2254,7 +2294,7 @@ dependencies = [
|
||||
"parity-updater 1.12.0",
|
||||
"parity-version 1.12.0",
|
||||
"parking_lot 0.5.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"patricia-trie 0.1.0",
|
||||
"patricia-trie 0.2.0",
|
||||
"pretty_assertions 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.1",
|
||||
@ -2421,22 +2461,37 @@ version = "0.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "patricia-trie"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore-bytes 0.1.0",
|
||||
"ethcore-logger 1.12.0",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hashdb 0.1.1",
|
||||
"hashdb 0.2.0",
|
||||
"keccak-hash 0.1.2",
|
||||
"keccak-hasher 0.1.0",
|
||||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memorydb 0.1.1",
|
||||
"memorydb 0.2.0",
|
||||
"patricia-trie-ethereum 0.1.0",
|
||||
"rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rlp 0.2.1",
|
||||
"trie-standardmap 0.1.0",
|
||||
"triehash 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "patricia-trie-ethereum"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore-bytes 0.1.0",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hashdb 0.2.0",
|
||||
"keccak-hasher 0.1.0",
|
||||
"patricia-trie 0.2.0",
|
||||
"rlp 0.2.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "percent-encoding"
|
||||
version = "1.0.0"
|
||||
@ -2488,10 +2543,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "plain_hasher"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"crunchy 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"hashdb 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2720,11 +2776,31 @@ dependencies = [
|
||||
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "registrar"
|
||||
version = "0.0.1"
|
||||
@ -3173,6 +3249,14 @@ dependencies = [
|
||||
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termcolor"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "termion"
|
||||
version = "1.5.1"
|
||||
@ -3505,6 +3589,11 @@ dependencies = [
|
||||
"trie-standardmap 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ucd-util"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "uint"
|
||||
version = "0.2.1"
|
||||
@ -3649,7 +3738,8 @@ dependencies = [
|
||||
"ethjson 0.1.0",
|
||||
"keccak-hash 0.1.2",
|
||||
"log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"patricia-trie 0.1.0",
|
||||
"patricia-trie 0.2.0",
|
||||
"patricia-trie-ethereum 0.1.0",
|
||||
"rlp 0.2.1",
|
||||
]
|
||||
|
||||
@ -3749,6 +3839,14 @@ name = "winapi-x86_64-pc-windows-gnu"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "wincolor"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ws"
|
||||
version = "0.7.5"
|
||||
@ -3858,6 +3956,7 @@ dependencies = [
|
||||
"checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3"
|
||||
"checksum elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "88d4851b005ef16de812ea9acdb7bece2f0a40dd86c07b85631d7dafa54537bb"
|
||||
"checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b"
|
||||
"checksum env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0e6e40ebb0e66918a37b38c7acab4e10d299e0463fe2af5d29b9cc86710cfd2a"
|
||||
"checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3"
|
||||
"checksum error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07e791d3be96241c77c43846b665ef1384606da2cd2a48730abe606a12906e02"
|
||||
"checksum eth-secp256k1 0.5.7 (git+https://github.com/paritytech/rust-secp256k1)" = "<none>"
|
||||
@ -3888,6 +3987,7 @@ dependencies = [
|
||||
"checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa"
|
||||
"checksum hidapi 0.3.1 (git+https://github.com/paritytech/hidapi-rs)" = "<none>"
|
||||
"checksum httparse 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "af2f2dd97457e8fb1ae7c5a420db346af389926e36f43768b96f101546b04a07"
|
||||
"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e"
|
||||
"checksum hyper 0.11.24 (registry+https://github.com/rust-lang/crates.io-index)" = "df4dd5dae401458087396b6db7fabc4d6760aa456a5fa8e92bda549f39cae661"
|
||||
"checksum hyper-rustls 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d6cdc1751771a14b8175764394f025e309a28c825ed9eaf97fa62bb831dc8c5"
|
||||
"checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d"
|
||||
@ -3988,7 +4088,9 @@ dependencies = [
|
||||
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
|
||||
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
|
||||
"checksum regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "744554e01ccbd98fff8c457c3b092cd67af62a555a43bfe97ae8a0451f7799fa"
|
||||
"checksum regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13c93d55961981ba9226a213b385216f83ab43bd6ac53ab16b2eeb47e337cf4e"
|
||||
"checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db"
|
||||
"checksum regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05b06a75f5217880fc5e905952a42750bf44787e56a6c6d6852ed0992f5e1d54"
|
||||
"checksum relay 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1576e382688d7e9deecea24417e350d3062d97e32e45d70b1cde65994ff1489a"
|
||||
"checksum ring 0.12.1 (git+https://github.com/paritytech/ring)" = "<none>"
|
||||
"checksum rocksdb 0.4.5 (git+https://github.com/paritytech/rust-rocksdb)" = "<none>"
|
||||
@ -4038,6 +4140,7 @@ dependencies = [
|
||||
"checksum tempfile 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "11ce2fe9db64b842314052e2421ac61a73ce41b898dc8e3750398b219c5fc1e0"
|
||||
"checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1"
|
||||
"checksum term_size 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9e5b9a66db815dcfd2da92db471106457082577c3c278d4138ab3e3b4e189327"
|
||||
"checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83"
|
||||
"checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
|
||||
"checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693"
|
||||
"checksum thread-id 3.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7fbf4c9d56b320106cd64fd024dadfa0be7cb4706725fc44a7d7ce952d820c1"
|
||||
@ -4065,6 +4168,7 @@ dependencies = [
|
||||
"checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e"
|
||||
"checksum transient-hashmap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "715254c8f0811be1a79ad3ea5e6fa3c8eddec2b03d7f5ba78cf093e56d79c24f"
|
||||
"checksum trezor-sys 1.0.0 (git+https://github.com/paritytech/trezor-sys)" = "<none>"
|
||||
"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d"
|
||||
"checksum uint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "38051a96565903d81c9a9210ce11076b2218f3b352926baa1f5f6abbdfce8273"
|
||||
"checksum unicase 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33"
|
||||
"checksum unicase 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284b6d3db520d67fbe88fd778c21510d1b0ba4a551e5d0fbb023d33405f6de8a"
|
||||
@ -4091,6 +4195,7 @@ dependencies = [
|
||||
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||
"checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
|
||||
"checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
|
||||
"checksum wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb06499a3a4d44302791052df005d5232b927ed1a9658146d842165c4de7767"
|
||||
"checksum ws 0.7.5 (git+https://github.com/tomusdrw/ws-rs)" = "<none>"
|
||||
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
|
||||
"checksum xdg 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a66b7c2281ebde13cf4391d70d4c7e5946c3c25e72a7b859ca8f677dcd0b0c61"
|
||||
|
@ -133,6 +133,8 @@ members = [
|
||||
"transaction-pool",
|
||||
"whisper",
|
||||
"whisper/cli",
|
||||
"util/keccak-hasher",
|
||||
"util/patricia-trie-ethereum",
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
|
@ -20,6 +20,7 @@ fetch = { path = "../util/fetch" }
|
||||
hashdb = { path = "../util/hashdb" }
|
||||
memorydb = { path = "../util/memorydb" }
|
||||
patricia-trie = { path = "../util/patricia_trie" }
|
||||
patricia-trie-ethereum = { path = "../util/patricia-trie-ethereum" }
|
||||
ethcore-crypto = { path = "crypto" }
|
||||
error-chain = { version = "0.12", default-features = false }
|
||||
ethcore-io = { path = "../util/io" }
|
||||
@ -66,8 +67,9 @@ keccak-hash = { path = "../util/hash" }
|
||||
triehash = { path = "../util/triehash" }
|
||||
unexpected = { path = "../util/unexpected" }
|
||||
journaldb = { path = "../util/journaldb" }
|
||||
tempdir = { version = "0.3", optional = true }
|
||||
keccak-hasher = { path = "../util/keccak-hasher" }
|
||||
kvdb-rocksdb = { path = "../util/kvdb-rocksdb" }
|
||||
tempdir = {version="0.3", optional = true}
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "macos", target_os = "windows", target_os = "android"))'.dependencies]
|
||||
hardware-wallet = { path = "../hw" }
|
||||
@ -76,8 +78,8 @@ hardware-wallet = { path = "../hw" }
|
||||
fake-hardware-wallet = { path = "../util/fake-hardware-wallet" }
|
||||
|
||||
[dev-dependencies]
|
||||
trie-standardmap = { path = "../util/trie-standardmap" }
|
||||
tempdir = "0.3"
|
||||
trie-standardmap = { path = "../util/trie-standardmap" }
|
||||
|
||||
[features]
|
||||
# Display EVM debug traces.
|
||||
|
@ -14,6 +14,7 @@ ethcore-transaction = { path = "../transaction" }
|
||||
ethereum-types = "0.3"
|
||||
memorydb = { path = "../../util/memorydb" }
|
||||
patricia-trie = { path = "../../util/patricia_trie" }
|
||||
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
|
||||
ethcore-network = { path = "../../util/network" }
|
||||
ethcore-io = { path = "../../util/io" }
|
||||
hashdb = { path = "../../util/hashdb" }
|
||||
@ -32,6 +33,7 @@ serde_derive = "1.0"
|
||||
parking_lot = "0.5"
|
||||
stats = { path = "../../util/stats" }
|
||||
keccak-hash = { path = "../../util/hash" }
|
||||
keccak-hasher = { path = "../../util/keccak-hasher" }
|
||||
triehash = { path = "../../util/triehash" }
|
||||
kvdb = { path = "../../util/kvdb" }
|
||||
memory-cache = { path = "../../util/memory_cache" }
|
||||
|
@ -26,9 +26,11 @@
|
||||
use ethcore::ids::BlockId;
|
||||
use ethereum_types::{H256, U256};
|
||||
use hashdb::HashDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use memorydb::MemoryDB;
|
||||
use bytes::Bytes;
|
||||
use trie::{self, TrieMut, TrieDBMut, Trie, TrieDB, Recorder};
|
||||
use trie::{TrieMut, Trie, Recorder};
|
||||
use ethtrie::{self, TrieDB, TrieDBMut};
|
||||
use rlp::{RlpStream, Rlp};
|
||||
|
||||
// encode a key.
|
||||
@ -50,13 +52,13 @@ pub const SIZE: u64 = 2048;
|
||||
/// A canonical hash trie. This is generic over any database it can query.
|
||||
/// See module docs for more details.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CHT<DB: HashDB> {
|
||||
pub struct CHT<DB: HashDB<KeccakHasher>> {
|
||||
db: DB,
|
||||
root: H256, // the root of this CHT.
|
||||
number: u64,
|
||||
}
|
||||
|
||||
impl<DB: HashDB> CHT<DB> {
|
||||
impl<DB: HashDB<KeccakHasher>> CHT<DB> {
|
||||
/// Query the root of the CHT.
|
||||
pub fn root(&self) -> H256 { self.root }
|
||||
|
||||
@ -66,7 +68,7 @@ impl<DB: HashDB> CHT<DB> {
|
||||
/// Generate an inclusion proof for the entry at a specific block.
|
||||
/// Nodes before level `from_level` will be omitted.
|
||||
/// Returns an error on an incomplete trie, and `Ok(None)` on an unprovable request.
|
||||
pub fn prove(&self, num: u64, from_level: u32) -> trie::Result<Option<Vec<Bytes>>> {
|
||||
pub fn prove(&self, num: u64, from_level: u32) -> ethtrie::Result<Option<Vec<Bytes>>> {
|
||||
if block_to_cht_number(num) != Some(self.number) { return Ok(None) }
|
||||
|
||||
let mut recorder = Recorder::with_depth(from_level);
|
||||
@ -90,10 +92,10 @@ pub struct BlockInfo {
|
||||
/// Build an in-memory CHT from a closure which provides necessary information
|
||||
/// about blocks. If the fetcher ever fails to provide the info, the CHT
|
||||
/// will not be generated.
|
||||
pub fn build<F>(cht_num: u64, mut fetcher: F) -> Option<CHT<MemoryDB>>
|
||||
pub fn build<F>(cht_num: u64, mut fetcher: F) -> Option<CHT<MemoryDB<KeccakHasher>>>
|
||||
where F: FnMut(BlockId) -> Option<BlockInfo>
|
||||
{
|
||||
let mut db = MemoryDB::new();
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
|
||||
// start from the last block by number and work backwards.
|
||||
let last_num = start_number(cht_num + 1) - 1;
|
||||
@ -147,7 +149,7 @@ pub fn compute_root<I>(cht_num: u64, iterable: I) -> Option<H256>
|
||||
/// verify the given trie branch and extract the canonical hash and total difficulty.
|
||||
// TODO: better support for partially-checked queries.
|
||||
pub fn check_proof(proof: &[Bytes], num: u64, root: H256) -> Option<(H256, U256)> {
|
||||
let mut db = MemoryDB::new();
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
|
||||
for node in proof { db.insert(&node[..]); }
|
||||
let res = match TrieDB::new(&db, &root) {
|
||||
|
@ -28,28 +28,21 @@
|
||||
use std::collections::BTreeMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use cache::Cache;
|
||||
use cht;
|
||||
|
||||
use ethcore::block_status::BlockStatus;
|
||||
use ethcore::error::{Error, BlockImportError, BlockImportErrorKind, BlockError};
|
||||
use ethcore::encoded;
|
||||
use ethcore::engines::epoch::{Transition as EpochTransition, PendingTransition as PendingEpochTransition};
|
||||
use ethcore::error::{Error, BlockImportError, BlockImportErrorKind, BlockError};
|
||||
use ethcore::header::Header;
|
||||
use ethcore::ids::BlockId;
|
||||
use ethcore::spec::{Spec, SpecHardcodedSync};
|
||||
use ethcore::engines::epoch::{
|
||||
Transition as EpochTransition,
|
||||
PendingTransition as PendingEpochTransition
|
||||
};
|
||||
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, Rlp};
|
||||
use heapsize::HeapSizeOf;
|
||||
use ethereum_types::{H256, H264, U256};
|
||||
use plain_hasher::H256FastMap;
|
||||
use heapsize::HeapSizeOf;
|
||||
use kvdb::{DBTransaction, KeyValueDB};
|
||||
|
||||
use cache::Cache;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
|
||||
use plain_hasher::H256FastMap;
|
||||
use rlp::{Encodable, Decodable, DecoderError, RlpStream, Rlp};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// Store at least this many candidate headers at all times.
|
||||
|
@ -64,8 +64,10 @@ extern crate hashdb;
|
||||
extern crate heapsize;
|
||||
extern crate futures;
|
||||
extern crate itertools;
|
||||
extern crate keccak_hasher;
|
||||
extern crate memorydb;
|
||||
extern crate patricia_trie as trie;
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
extern crate plain_hasher;
|
||||
extern crate rand;
|
||||
extern crate rlp;
|
||||
|
@ -20,22 +20,20 @@
|
||||
|
||||
use transaction::UnverifiedTransaction;
|
||||
|
||||
use io::TimerToken;
|
||||
use network::{NetworkProtocolHandler, NetworkContext, PeerId};
|
||||
use rlp::{RlpStream, Rlp};
|
||||
use ethereum_types::{H256, U256};
|
||||
use io::TimerToken;
|
||||
use kvdb::DBValue;
|
||||
use network::{NetworkProtocolHandler, NetworkContext, PeerId};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fmt;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::ops::{BitOr, BitAnd, Not};
|
||||
|
||||
use provider::Provider;
|
||||
use request::{Request, NetworkRequests as Requests, Response};
|
||||
use rlp::{RlpStream, Rlp};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fmt;
|
||||
use std::ops::{BitOr, BitAnd, Not};
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use self::request_credits::{Credits, FlowParams};
|
||||
use self::context::{Ctx, TickCtx};
|
||||
|
@ -16,8 +16,8 @@
|
||||
|
||||
//! Peer status and capabilities.
|
||||
|
||||
use rlp::{DecoderError, Encodable, Decodable, RlpStream, Rlp};
|
||||
use ethereum_types::{H256, U256};
|
||||
use rlp::{DecoderError, Encodable, Decodable, RlpStream, Rlp};
|
||||
|
||||
use super::request_credits::FlowParams;
|
||||
|
||||
|
@ -19,20 +19,18 @@
|
||||
|
||||
use ethcore::blockchain_info::BlockChainInfo;
|
||||
use ethcore::client::{EachBlockWith, TestBlockChainClient};
|
||||
use ethcore::ids::BlockId;
|
||||
use ethcore::encoded;
|
||||
use network::{PeerId, NodeId};
|
||||
use transaction::{Action, PendingTransaction};
|
||||
|
||||
use ethcore::ids::BlockId;
|
||||
use ethereum_types::{H256, U256, Address};
|
||||
use net::{LightProtocol, Params, packet, Peer};
|
||||
use net::context::IoContext;
|
||||
use net::status::{Capabilities, Status};
|
||||
use net::{LightProtocol, Params, packet, Peer};
|
||||
use network::{PeerId, NodeId};
|
||||
use provider::Provider;
|
||||
use request;
|
||||
use request::*;
|
||||
|
||||
use rlp::{Rlp, RlpStream};
|
||||
use ethereum_types::{H256, U256, Address};
|
||||
use transaction::{Action, PendingTransaction};
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
@ -18,26 +18,25 @@
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use bytes::Bytes;
|
||||
use ethcore::basic_account::BasicAccount;
|
||||
use ethcore::encoded;
|
||||
use ethcore::engines::{EthEngine, StateDependentProof};
|
||||
use ethcore::machine::EthereumMachine;
|
||||
use ethcore::receipt::Receipt;
|
||||
use ethcore::state::{self, ProvedExecution};
|
||||
use transaction::SignedTransaction;
|
||||
use vm::EnvInfo;
|
||||
use hash::{KECCAK_NULL_RLP, KECCAK_EMPTY, KECCAK_EMPTY_LIST_RLP, keccak};
|
||||
|
||||
use request::{self as net_request, IncompleteRequest, CompleteRequest, Output, OutputKind, Field};
|
||||
|
||||
use rlp::{RlpStream, Rlp};
|
||||
use ethereum_types::{H256, U256, Address};
|
||||
use parking_lot::Mutex;
|
||||
use ethtrie::{TrieError, TrieDB};
|
||||
use hash::{KECCAK_NULL_RLP, KECCAK_EMPTY, KECCAK_EMPTY_LIST_RLP, keccak};
|
||||
use hashdb::HashDB;
|
||||
use kvdb::DBValue;
|
||||
use bytes::Bytes;
|
||||
use memorydb::MemoryDB;
|
||||
use trie::{Trie, TrieDB, TrieError};
|
||||
use parking_lot::Mutex;
|
||||
use request::{self as net_request, IncompleteRequest, CompleteRequest, Output, OutputKind, Field};
|
||||
use rlp::{RlpStream, Rlp};
|
||||
use transaction::SignedTransaction;
|
||||
use trie::Trie;
|
||||
use vm::EnvInfo;
|
||||
|
||||
const SUPPLIED_MATCHES: &'static str = "supplied responses always match produced requests; enforced by `check_response`; qed";
|
||||
|
||||
@ -935,11 +934,12 @@ mod tests {
|
||||
use ethereum_types::{H256, Address};
|
||||
use memorydb::MemoryDB;
|
||||
use parking_lot::Mutex;
|
||||
use trie::{Trie, TrieMut, SecTrieDB, SecTrieDBMut};
|
||||
use trie::recorder::Recorder;
|
||||
use trie::{Trie, TrieMut};
|
||||
use ethtrie::{SecTrieDB, SecTrieDBMut};
|
||||
use trie::Recorder;
|
||||
use hash::keccak;
|
||||
|
||||
use ethcore::client::{BlockChainClient, BlockInfo, TestBlockChainClient, EachBlockWith};
|
||||
use ::ethcore::client::{BlockChainClient, BlockInfo, TestBlockChainClient, EachBlockWith};
|
||||
use ethcore::header::Header;
|
||||
use ethcore::encoded;
|
||||
use ethcore::receipt::{Receipt, TransactionOutcome};
|
||||
|
@ -26,6 +26,7 @@ keccak-hash = { path = "../../util/hash" }
|
||||
log = "0.3"
|
||||
parking_lot = "0.5"
|
||||
patricia-trie = { path = "../../util/patricia_trie" }
|
||||
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
|
||||
rand = "0.3"
|
||||
rlp = { path = "../../util/rlp" }
|
||||
rlp_derive = { path = "../../util/rlp_derive" }
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
use ethereum_types::Address;
|
||||
use rlp::DecoderError;
|
||||
use trie::TrieError;
|
||||
use ethtrie::TrieError;
|
||||
use ethcore::account_provider::SignError;
|
||||
use ethcore::error::{Error as EthcoreError, ExecutionError};
|
||||
use transaction::Error as TransactionError;
|
||||
|
@ -40,6 +40,7 @@ extern crate futures;
|
||||
extern crate keccak_hash as hash;
|
||||
extern crate parking_lot;
|
||||
extern crate patricia_trie as trie;
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
extern crate rlp;
|
||||
extern crate url;
|
||||
extern crate rustc_hex;
|
||||
|
@ -15,12 +15,13 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! DB backend wrapper for Account trie
|
||||
use std::collections::HashMap;
|
||||
use hash::{KECCAK_NULL_RLP, keccak};
|
||||
use ethereum_types::H256;
|
||||
use hash::{KECCAK_NULL_RLP, keccak};
|
||||
use hashdb::{HashDB, AsHashDB};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::DBValue;
|
||||
use hashdb::HashDB;
|
||||
use rlp::NULL_RLP;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[cfg(test)]
|
||||
use ethereum_types::Address;
|
||||
@ -44,7 +45,7 @@ fn combine_key<'a>(address_hash: &'a H256, key: &'a H256) -> H256 {
|
||||
/// A factory for different kinds of account dbs.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Factory {
|
||||
/// Mangle hashes based on address.
|
||||
/// Mangle hashes based on address. This is the default.
|
||||
Mangled,
|
||||
/// Don't mangle hashes.
|
||||
Plain,
|
||||
@ -57,7 +58,7 @@ impl Default for Factory {
|
||||
impl Factory {
|
||||
/// Create a read-only accountdb.
|
||||
/// This will panic when write operations are called.
|
||||
pub fn readonly<'db>(&self, db: &'db HashDB, address_hash: H256) -> Box<HashDB + 'db> {
|
||||
pub fn readonly<'db>(&self, db: &'db HashDB<KeccakHasher>, address_hash: H256) -> Box<HashDB<KeccakHasher> + 'db> {
|
||||
match *self {
|
||||
Factory::Mangled => Box::new(AccountDB::from_hash(db, address_hash)),
|
||||
Factory::Plain => Box::new(Wrapping(db)),
|
||||
@ -65,7 +66,7 @@ impl Factory {
|
||||
}
|
||||
|
||||
/// Create a new mutable hashdb.
|
||||
pub fn create<'db>(&self, db: &'db mut HashDB, address_hash: H256) -> Box<HashDB + 'db> {
|
||||
pub fn create<'db>(&self, db: &'db mut HashDB<KeccakHasher>, address_hash: H256) -> Box<HashDB<KeccakHasher> + 'db> {
|
||||
match *self {
|
||||
Factory::Mangled => Box::new(AccountDBMut::from_hash(db, address_hash)),
|
||||
Factory::Plain => Box::new(WrappingMut(db)),
|
||||
@ -77,19 +78,19 @@ impl Factory {
|
||||
/// DB backend wrapper for Account trie
|
||||
/// Transforms trie node keys for the database
|
||||
pub struct AccountDB<'db> {
|
||||
db: &'db HashDB,
|
||||
db: &'db HashDB<KeccakHasher>,
|
||||
address_hash: H256,
|
||||
}
|
||||
|
||||
impl<'db> AccountDB<'db> {
|
||||
/// Create a new AccountDB from an address.
|
||||
#[cfg(test)]
|
||||
pub fn new(db: &'db HashDB, address: &Address) -> Self {
|
||||
pub fn new(db: &'db HashDB<KeccakHasher>, address: &Address) -> Self {
|
||||
Self::from_hash(db, keccak(address))
|
||||
}
|
||||
|
||||
/// Create a new AcountDB from an address' hash.
|
||||
pub fn from_hash(db: &'db HashDB, address_hash: H256) -> Self {
|
||||
pub fn from_hash(db: &'db HashDB<KeccakHasher>, address_hash: H256) -> Self {
|
||||
AccountDB {
|
||||
db: db,
|
||||
address_hash: address_hash,
|
||||
@ -97,7 +98,12 @@ impl<'db> AccountDB<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> HashDB for AccountDB<'db>{
|
||||
impl<'db> AsHashDB<KeccakHasher> for AccountDB<'db> {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl<'db> HashDB<KeccakHasher> for AccountDB<'db> {
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
unimplemented!()
|
||||
}
|
||||
@ -131,19 +137,19 @@ impl<'db> HashDB for AccountDB<'db>{
|
||||
|
||||
/// DB backend wrapper for Account trie
|
||||
pub struct AccountDBMut<'db> {
|
||||
db: &'db mut HashDB,
|
||||
db: &'db mut HashDB<KeccakHasher>,
|
||||
address_hash: H256,
|
||||
}
|
||||
|
||||
impl<'db> AccountDBMut<'db> {
|
||||
/// Create a new AccountDB from an address.
|
||||
#[cfg(test)]
|
||||
pub fn new(db: &'db mut HashDB, address: &Address) -> Self {
|
||||
pub fn new(db: &'db mut HashDB<KeccakHasher>, address: &Address) -> Self {
|
||||
Self::from_hash(db, keccak(address))
|
||||
}
|
||||
|
||||
/// Create a new AcountDB from an address' hash.
|
||||
pub fn from_hash(db: &'db mut HashDB, address_hash: H256) -> Self {
|
||||
pub fn from_hash(db: &'db mut HashDB<KeccakHasher>, address_hash: H256) -> Self {
|
||||
AccountDBMut {
|
||||
db: db,
|
||||
address_hash: address_hash,
|
||||
@ -156,7 +162,7 @@ impl<'db> AccountDBMut<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> HashDB for AccountDBMut<'db>{
|
||||
impl<'db> HashDB<KeccakHasher> for AccountDBMut<'db>{
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
unimplemented!()
|
||||
}
|
||||
@ -202,9 +208,19 @@ impl<'db> HashDB for AccountDBMut<'db>{
|
||||
}
|
||||
}
|
||||
|
||||
struct Wrapping<'db>(&'db HashDB);
|
||||
impl<'db> AsHashDB<KeccakHasher> for AccountDBMut<'db> {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl<'db> HashDB for Wrapping<'db> {
|
||||
struct Wrapping<'db>(&'db HashDB<KeccakHasher>);
|
||||
|
||||
impl<'db> AsHashDB<KeccakHasher> for Wrapping<'db> {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl<'db> HashDB<KeccakHasher> for Wrapping<'db> {
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
unimplemented!()
|
||||
}
|
||||
@ -236,9 +252,13 @@ impl<'db> HashDB for Wrapping<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
struct WrappingMut<'db>(&'db mut HashDB);
|
||||
struct WrappingMut<'db>(&'db mut HashDB<KeccakHasher>);
|
||||
impl<'db> AsHashDB<KeccakHasher> for WrappingMut<'db> {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl<'db> HashDB for WrappingMut<'db>{
|
||||
impl<'db> HashDB<KeccakHasher> for WrappingMut<'db>{
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
@ -17,28 +17,27 @@
|
||||
//! Blockchain block.
|
||||
|
||||
use std::cmp;
|
||||
use std::sync::Arc;
|
||||
use std::collections::HashSet;
|
||||
use hash::{keccak, KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP};
|
||||
use triehash::ordered_trie_root;
|
||||
use std::sync::Arc;
|
||||
|
||||
use rlp::{Rlp, RlpStream, Encodable, Decodable, DecoderError, encode_list};
|
||||
use ethereum_types::{H256, U256, Address, Bloom};
|
||||
use bytes::Bytes;
|
||||
use unexpected::{Mismatch, OutOfBounds};
|
||||
|
||||
use vm::{EnvInfo, LastHashes};
|
||||
use engines::EthEngine;
|
||||
use error::{Error, BlockError};
|
||||
use ethereum_types::{H256, U256, Address, Bloom};
|
||||
use factory::Factories;
|
||||
use hash::{keccak, KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP};
|
||||
use header::{Header, ExtendedHeader};
|
||||
use receipt::{Receipt, TransactionOutcome};
|
||||
use state::State;
|
||||
use rlp::{Rlp, RlpStream, Encodable, Decodable, DecoderError, encode_list};
|
||||
use state_db::StateDB;
|
||||
use state::State;
|
||||
use trace::Tracing;
|
||||
use transaction::{UnverifiedTransaction, SignedTransaction, Error as TransactionError};
|
||||
use triehash::ordered_trie_root;
|
||||
use unexpected::{Mismatch, OutOfBounds};
|
||||
use verification::PreverifiedBlock;
|
||||
use views::BlockView;
|
||||
use vm::{EnvInfo, LastHashes};
|
||||
|
||||
/// A block, encoded as it is on the block chain.
|
||||
#[derive(Default, Debug, Clone, PartialEq)]
|
||||
|
@ -17,37 +17,38 @@
|
||||
//! Blockchain database.
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::{mem, io};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::{mem, io};
|
||||
use itertools::Itertools;
|
||||
use blooms_db;
|
||||
use heapsize::HeapSizeOf;
|
||||
use ethereum_types::{H256, Bloom, BloomRef, U256};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use bytes::Bytes;
|
||||
use rlp::RlpStream;
|
||||
use rlp_compress::{compress, decompress, blocks_swapper};
|
||||
use header::*;
|
||||
use transaction::*;
|
||||
use views::{BlockView, HeaderView};
|
||||
use log_entry::{LogEntry, LocalizedLogEntry};
|
||||
use receipt::Receipt;
|
||||
|
||||
use ansi_term::Colour;
|
||||
use blockchain::{CacheSize, ImportRoute, Config};
|
||||
use blockchain::best_block::{BestBlock, BestAncientBlock};
|
||||
use blockchain::block_info::{BlockInfo, BlockLocation, BranchBecomingCanonChainData};
|
||||
use blockchain::extras::{BlockReceipts, BlockDetails, TransactionAddress, EPOCH_KEY_PREFIX, EpochTransitions};
|
||||
use blockchain::update::{ExtrasUpdate, ExtrasInsert};
|
||||
use blooms_db;
|
||||
use bytes::Bytes;
|
||||
use cache_manager::CacheManager;
|
||||
use db::{self, Writable, Readable, CacheUpdatePolicy};
|
||||
use encoded;
|
||||
use engines::epoch::{Transition as EpochTransition, PendingTransition as PendingEpochTransition};
|
||||
use engines::ForkChoice;
|
||||
use ethereum_types::{H256, Bloom, BloomRef, U256};
|
||||
use header::*;
|
||||
use heapsize::HeapSizeOf;
|
||||
use itertools::Itertools;
|
||||
use kvdb::{DBTransaction, KeyValueDB};
|
||||
use log_entry::{LogEntry, LocalizedLogEntry};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use rayon::prelude::*;
|
||||
use receipt::Receipt;
|
||||
use rlp_compress::{compress, decompress, blocks_swapper};
|
||||
use rlp::RlpStream;
|
||||
use transaction::*;
|
||||
use types::blockchain_info::BlockChainInfo;
|
||||
use types::tree_route::TreeRoute;
|
||||
use blockchain::update::{ExtrasUpdate, ExtrasInsert};
|
||||
use blockchain::{CacheSize, ImportRoute, Config};
|
||||
use db::{self, Writable, Readable, CacheUpdatePolicy};
|
||||
use cache_manager::CacheManager;
|
||||
use encoded;
|
||||
use engines::ForkChoice;
|
||||
use engines::epoch::{Transition as EpochTransition, PendingTransition as PendingEpochTransition};
|
||||
use rayon::prelude::*;
|
||||
use ansi_term::Colour;
|
||||
use kvdb::{DBTransaction, KeyValueDB};
|
||||
use views::{BlockView, HeaderView};
|
||||
|
||||
/// Database backing `BlockChain`.
|
||||
pub trait BlockChainDB: Send + Sync {
|
||||
|
@ -16,18 +16,18 @@
|
||||
|
||||
//! Blockchain DB extras.
|
||||
|
||||
use std::ops;
|
||||
use std::io::Write;
|
||||
use std::ops;
|
||||
|
||||
use db::Key;
|
||||
use engines::epoch::{Transition as EpochTransition};
|
||||
use ethereum_types::{H256, H264, U256};
|
||||
use header::BlockNumber;
|
||||
use heapsize::HeapSizeOf;
|
||||
use kvdb::PREFIX_LEN as DB_PREFIX_LEN;
|
||||
use receipt::Receipt;
|
||||
use rlp;
|
||||
|
||||
use heapsize::HeapSizeOf;
|
||||
use ethereum_types::{H256, H264, U256};
|
||||
use kvdb::PREFIX_LEN as DB_PREFIX_LEN;
|
||||
|
||||
/// Represents index of extra data in database
|
||||
#[derive(Copy, Debug, Hash, Eq, PartialEq, Clone)]
|
||||
pub enum ExtrasIndex {
|
||||
@ -252,6 +252,7 @@ pub struct EpochTransitions {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rlp::*;
|
||||
|
||||
use super::BlockReceipts;
|
||||
|
||||
#[test]
|
||||
|
@ -1713,7 +1713,7 @@ impl BlockChainClient for Client {
|
||||
|
||||
fn list_storage(&self, id: BlockId, account: &Address, after: Option<&H256>, count: u64) -> Option<Vec<H256>> {
|
||||
if !self.factories.trie.is_fat() {
|
||||
trace!(target: "fatdb", "list_stroage: Not a fat DB");
|
||||
trace!(target: "fatdb", "list_storage: Not a fat DB");
|
||||
return None;
|
||||
}
|
||||
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
use std::fmt::{Display, Formatter, Error as FmtError};
|
||||
use util_error::UtilError;
|
||||
use trie::TrieError;
|
||||
use ethtrie::TrieError;
|
||||
|
||||
/// Client configuration errors.
|
||||
#[derive(Debug)]
|
||||
|
@ -25,12 +25,13 @@ use {state, state_db, client, executive, trace, transaction, db, spec, pod_state
|
||||
use factory::Factories;
|
||||
use evm::{VMType, FinalizationResult};
|
||||
use vm::{self, ActionParams};
|
||||
use ethtrie;
|
||||
|
||||
/// EVM test Error.
|
||||
#[derive(Debug)]
|
||||
pub enum EvmTestError {
|
||||
/// Trie integrity error.
|
||||
Trie(trie::TrieError),
|
||||
Trie(Box<ethtrie::TrieError>),
|
||||
/// EVM error.
|
||||
Evm(vm::Error),
|
||||
/// Initialization error.
|
||||
|
@ -52,7 +52,6 @@ use miner::{self, Miner, MinerService};
|
||||
use spec::Spec;
|
||||
use types::basic_account::BasicAccount;
|
||||
use types::pruning_info::PruningInfo;
|
||||
|
||||
use verification::queue::QueueInfo;
|
||||
use block::{OpenBlock, SealedBlock, ClosedBlock};
|
||||
use executive::Executed;
|
||||
@ -62,7 +61,7 @@ use state_db::StateDB;
|
||||
use header::Header;
|
||||
use encoded;
|
||||
use engines::EthEngine;
|
||||
use trie;
|
||||
use ethtrie;
|
||||
use state::StateInfo;
|
||||
use views::BlockView;
|
||||
|
||||
@ -581,10 +580,10 @@ impl Call for TestBlockChainClient {
|
||||
}
|
||||
|
||||
impl StateInfo for () {
|
||||
fn nonce(&self, _address: &Address) -> trie::Result<U256> { unimplemented!() }
|
||||
fn balance(&self, _address: &Address) -> trie::Result<U256> { unimplemented!() }
|
||||
fn storage_at(&self, _address: &Address, _key: &H256) -> trie::Result<H256> { unimplemented!() }
|
||||
fn code(&self, _address: &Address) -> trie::Result<Option<Arc<Bytes>>> { unimplemented!() }
|
||||
fn nonce(&self, _address: &Address) -> ethtrie::Result<U256> { unimplemented!() }
|
||||
fn balance(&self, _address: &Address) -> ethtrie::Result<U256> { unimplemented!() }
|
||||
fn storage_at(&self, _address: &Address, _key: &H256) -> ethtrie::Result<H256> { unimplemented!() }
|
||||
fn code(&self, _address: &Address) -> ethtrie::Result<Option<Arc<Bytes>>> { unimplemented!() }
|
||||
}
|
||||
|
||||
impl StateClient for TestBlockChainClient {
|
||||
|
@ -34,12 +34,9 @@ use ethjson;
|
||||
use machine::{AuxiliaryData, Call, EthereumMachine};
|
||||
use hash::keccak;
|
||||
use header::{Header, BlockNumber, ExtendedHeader};
|
||||
|
||||
use super::signer::EngineSigner;
|
||||
use super::validator_set::{ValidatorSet, SimpleList, new_validator_set};
|
||||
|
||||
use self::finality::RollingFinality;
|
||||
|
||||
use ethkey::{self, Password, Signature};
|
||||
use io::{IoContext, IoHandler, TimerToken, IoService};
|
||||
use itertools::{self, Itertools};
|
||||
|
@ -16,15 +16,15 @@
|
||||
|
||||
//! Tendermint message handling.
|
||||
|
||||
use std::cmp;
|
||||
use hash::keccak;
|
||||
use ethereum_types::{H256, H520, Address};
|
||||
use bytes::Bytes;
|
||||
use super::{Height, View, BlockHash, Step};
|
||||
use error::Error;
|
||||
use ethereum_types::{H256, H520, Address};
|
||||
use ethkey::{recover, public_to_address};
|
||||
use hash::keccak;
|
||||
use header::Header;
|
||||
use rlp::{Rlp, RlpStream, Encodable, Decodable, DecoderError};
|
||||
use ethkey::{recover, public_to_address};
|
||||
use std::cmp;
|
||||
use super::{Height, View, BlockHash, Step};
|
||||
use super::super::vote_collector::Message;
|
||||
|
||||
/// Message transmitted between consensus participants.
|
||||
|
@ -145,7 +145,7 @@ impl <F> super::EpochVerifier<EthereumMachine> for EpochVerifier<F>
|
||||
fn check_finality_proof(&self, proof: &[u8]) -> Option<Vec<H256>> {
|
||||
match ::rlp::decode(proof) {
|
||||
Ok(header) => self.verify_light(&header).ok().map(|_| vec![header.hash()]),
|
||||
Err(_) => None // REVIEW: log perhaps? Not sure what the policy is.
|
||||
Err(_) => None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -16,27 +16,23 @@
|
||||
|
||||
/// Validator set maintained in a contract, updated using `getValidators` method.
|
||||
|
||||
use std::sync::{Weak, Arc};
|
||||
use hash::keccak;
|
||||
|
||||
use ethereum_types::{H256, U256, Address, Bloom};
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use bytes::Bytes;
|
||||
use memory_cache::MemoryLruCache;
|
||||
use unexpected::Mismatch;
|
||||
use rlp::{Rlp, RlpStream};
|
||||
use kvdb::DBValue;
|
||||
|
||||
use client::EngineClient;
|
||||
use machine::{AuxiliaryData, Call, EthereumMachine, AuxiliaryRequest};
|
||||
use ethereum_types::{H256, U256, Address, Bloom};
|
||||
use hash::keccak;
|
||||
use header::Header;
|
||||
use ids::BlockId;
|
||||
use kvdb::DBValue;
|
||||
use log_entry::LogEntry;
|
||||
use machine::{AuxiliaryData, Call, EthereumMachine, AuxiliaryRequest};
|
||||
use memory_cache::MemoryLruCache;
|
||||
use parking_lot::RwLock;
|
||||
use receipt::Receipt;
|
||||
|
||||
use rlp::{Rlp, RlpStream};
|
||||
use std::sync::{Weak, Arc};
|
||||
use super::{SystemCall, ValidatorSet};
|
||||
use super::simple_list::SimpleList;
|
||||
use unexpected::Mismatch;
|
||||
|
||||
use_contract!(validator_set, "ValidatorSet", "res/contracts/validator_set.json");
|
||||
|
||||
|
@ -22,7 +22,7 @@ use ethereum_types::{H256, U256, Address, Bloom};
|
||||
use util_error::{self, UtilError};
|
||||
use snappy::InvalidInput;
|
||||
use unexpected::{Mismatch, OutOfBounds};
|
||||
use trie::TrieError;
|
||||
use ethtrie::TrieError;
|
||||
use io::*;
|
||||
use header::BlockNumber;
|
||||
use client::Error as ClientError;
|
||||
|
@ -18,7 +18,7 @@
|
||||
|
||||
use ethereum_types::{U256, U512, Address};
|
||||
use bytes::Bytes;
|
||||
use trie;
|
||||
use ethtrie;
|
||||
use vm;
|
||||
use trace::{VMTrace, FlatTrace};
|
||||
use log_entry::LogEntry;
|
||||
@ -117,9 +117,14 @@ pub enum ExecutionError {
|
||||
TransactionMalformed(String),
|
||||
}
|
||||
|
||||
impl From<Box<trie::TrieError>> for ExecutionError {
|
||||
fn from(err: Box<trie::TrieError>) -> Self {
|
||||
ExecutionError::Internal(format!("{}", err))
|
||||
impl From<Box<ethtrie::TrieError>> for ExecutionError {
|
||||
fn from(err: Box<ethtrie::TrieError>) -> Self {
|
||||
ExecutionError::Internal(format!("{:?}", err))
|
||||
}
|
||||
}
|
||||
impl From<ethtrie::TrieError> for ExecutionError {
|
||||
fn from(err: ethtrie::TrieError) -> Self {
|
||||
ExecutionError::Internal(format!("{:?}", err))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -15,10 +15,12 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use trie::TrieFactory;
|
||||
use ethtrie::RlpCodec;
|
||||
use account_db::Factory as AccountFactory;
|
||||
use evm::{Factory as EvmFactory, VMType};
|
||||
use vm::{Vm, ActionParams, Schedule};
|
||||
use wasm::WasmInterpreter;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
|
||||
const WASM_MAGIC_NUMBER: &'static [u8; 4] = b"\0asm";
|
||||
|
||||
@ -54,7 +56,7 @@ pub struct Factories {
|
||||
/// factory for evm.
|
||||
pub vm: VmFactory,
|
||||
/// factory for tries.
|
||||
pub trie: TrieFactory,
|
||||
pub trie: TrieFactory<KeccakHasher, RlpCodec>,
|
||||
/// factory for account databases.
|
||||
pub accountdb: AccountFactory,
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ use ethjson;
|
||||
use trace::{Tracer, NoopTracer};
|
||||
use trace::{VMTracer, NoopVMTracer};
|
||||
use bytes::{Bytes, BytesRef};
|
||||
use trie;
|
||||
use ethtrie;
|
||||
use rlp::RlpStream;
|
||||
use hash::keccak;
|
||||
use machine::EthereumMachine as Machine;
|
||||
@ -93,7 +93,7 @@ impl<'a, T: 'a, V: 'a, B: 'a> TestExt<'a, T, V, B>
|
||||
address: Address,
|
||||
tracer: &'a mut T,
|
||||
vm_tracer: &'a mut V,
|
||||
) -> trie::Result<Self> {
|
||||
) -> ethtrie::Result<Self> {
|
||||
let static_call = false;
|
||||
Ok(TestExt {
|
||||
nonce: state.nonce(&address)?,
|
||||
|
@ -16,8 +16,10 @@
|
||||
|
||||
use ethjson;
|
||||
use trie::{TrieFactory, TrieSpec};
|
||||
use ethtrie::RlpCodec;
|
||||
use ethereum_types::H256;
|
||||
use memorydb::MemoryDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
|
||||
use super::HookType;
|
||||
|
||||
@ -28,13 +30,13 @@ pub use self::secure::run_test_file as run_secure_test_file;
|
||||
|
||||
fn test_trie<H: FnMut(&str, HookType)>(json: &[u8], trie: TrieSpec, start_stop_hook: &mut H) -> Vec<String> {
|
||||
let tests = ethjson::trie::Test::load(json).unwrap();
|
||||
let factory = TrieFactory::new(trie);
|
||||
let factory = TrieFactory::<_, RlpCodec>::new(trie);
|
||||
let mut result = vec![];
|
||||
|
||||
for (name, test) in tests.into_iter() {
|
||||
start_stop_hook(&name, HookType::OnStart);
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::default();
|
||||
let mut t = factory.create(&mut memdb, &mut root);
|
||||
|
||||
|
@ -91,9 +91,11 @@ extern crate rayon;
|
||||
extern crate rlp;
|
||||
extern crate rlp_compress;
|
||||
extern crate keccak_hash as hash;
|
||||
extern crate keccak_hasher;
|
||||
extern crate heapsize;
|
||||
extern crate memorydb;
|
||||
extern crate patricia_trie as trie;
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
extern crate triehash;
|
||||
extern crate ansi_term;
|
||||
extern crate unexpected;
|
||||
|
@ -20,9 +20,11 @@ use itertools::Itertools;
|
||||
use hash::{keccak};
|
||||
use ethereum_types::{H256, U256};
|
||||
use hashdb::HashDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use triehash::sec_trie_root;
|
||||
use bytes::Bytes;
|
||||
use trie::TrieFactory;
|
||||
use ethtrie::RlpCodec;
|
||||
use state::Account;
|
||||
use ethjson;
|
||||
use types::account_diff::*;
|
||||
@ -65,7 +67,7 @@ impl PodAccount {
|
||||
}
|
||||
|
||||
/// Place additional data into given hash DB.
|
||||
pub fn insert_additional(&self, db: &mut HashDB, factory: &TrieFactory) {
|
||||
pub fn insert_additional(&self, db: &mut HashDB<KeccakHasher>, factory: &TrieFactory<KeccakHasher, RlpCodec>) {
|
||||
match self.code {
|
||||
Some(ref c) if !c.is_empty() => { db.insert(c); }
|
||||
_ => {}
|
||||
|
@ -18,16 +18,15 @@
|
||||
|
||||
use account_db::{AccountDB, AccountDBMut};
|
||||
use basic_account::BasicAccount;
|
||||
use snapshot::Error;
|
||||
use hash::{KECCAK_EMPTY, KECCAK_NULL_RLP};
|
||||
|
||||
use ethereum_types::{H256, U256};
|
||||
use hashdb::HashDB;
|
||||
use bytes::Bytes;
|
||||
use trie::{TrieDB, Trie};
|
||||
use ethereum_types::{H256, U256};
|
||||
use ethtrie::{TrieDB, TrieDBMut};
|
||||
use hash::{KECCAK_EMPTY, KECCAK_NULL_RLP};
|
||||
use hashdb::HashDB;
|
||||
use rlp::{RlpStream, Rlp};
|
||||
|
||||
use snapshot::Error;
|
||||
use std::collections::HashSet;
|
||||
use trie::{Trie, TrieMut};
|
||||
|
||||
// An empty account -- these were replaced with RLP null data for a space optimization in v1.
|
||||
const ACC_EMPTY: BasicAccount = BasicAccount {
|
||||
@ -151,7 +150,6 @@ pub fn from_fat_rlp(
|
||||
rlp: Rlp,
|
||||
mut storage_root: H256,
|
||||
) -> Result<(BasicAccount, Option<Bytes>), Error> {
|
||||
use trie::{TrieDBMut, TrieMut};
|
||||
|
||||
// check for special case of empty account.
|
||||
if rlp.is_empty() {
|
||||
|
@ -19,7 +19,6 @@
|
||||
use block::Block;
|
||||
use header::Header;
|
||||
use hash::keccak;
|
||||
|
||||
use views::BlockView;
|
||||
use rlp::{DecoderError, RlpStream, Rlp};
|
||||
use ethereum_types::H256;
|
||||
|
@ -38,6 +38,7 @@ use ethereum_types::{H256, U256};
|
||||
use kvdb::KeyValueDB;
|
||||
use bytes::Bytes;
|
||||
|
||||
|
||||
/// Snapshot creation and restoration for PoA chains.
|
||||
/// Chunk format:
|
||||
///
|
||||
|
@ -21,7 +21,7 @@ use std::fmt;
|
||||
use ids::BlockId;
|
||||
|
||||
use ethereum_types::H256;
|
||||
use trie::TrieError;
|
||||
use ethtrie::TrieError;
|
||||
use rlp::DecoderError;
|
||||
|
||||
/// Snapshot-related errors.
|
||||
|
@ -32,13 +32,15 @@ use ids::BlockId;
|
||||
|
||||
use ethereum_types::{H256, U256};
|
||||
use hashdb::HashDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::DBValue;
|
||||
use snappy;
|
||||
use bytes::Bytes;
|
||||
use parking_lot::Mutex;
|
||||
use journaldb::{self, Algorithm, JournalDB};
|
||||
use kvdb::KeyValueDB;
|
||||
use trie::{TrieDB, TrieDBMut, Trie, TrieMut};
|
||||
use trie::{Trie, TrieMut};
|
||||
use ethtrie::{TrieDB, TrieDBMut};
|
||||
use rlp::{RlpStream, Rlp};
|
||||
use bloom_journal::Bloom;
|
||||
|
||||
@ -126,7 +128,7 @@ pub fn take_snapshot<W: SnapshotWriter + Send>(
|
||||
engine: &EthEngine,
|
||||
chain: &BlockChain,
|
||||
block_at: H256,
|
||||
state_db: &HashDB,
|
||||
state_db: &HashDB<KeccakHasher>,
|
||||
writer: W,
|
||||
p: &Progress
|
||||
) -> Result<(), Error> {
|
||||
@ -264,7 +266,7 @@ impl<'a> StateChunker<'a> {
|
||||
///
|
||||
/// Returns a list of hashes of chunks created, or any error it may
|
||||
/// have encountered.
|
||||
pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter + 'a>, progress: &'a Progress) -> Result<Vec<H256>, Error> {
|
||||
pub fn chunk_state<'a>(db: &HashDB<KeccakHasher>, root: &H256, writer: &Mutex<SnapshotWriter + 'a>, progress: &'a Progress) -> Result<Vec<H256>, Error> {
|
||||
let account_trie = TrieDB::new(db, &root)?;
|
||||
|
||||
let mut chunker = StateChunker {
|
||||
@ -414,7 +416,7 @@ struct RebuiltStatus {
|
||||
// rebuild a set of accounts and their storage.
|
||||
// returns a status detailing newly-loaded code and accounts missing code.
|
||||
fn rebuild_accounts(
|
||||
db: &mut HashDB,
|
||||
db: &mut HashDB<KeccakHasher>,
|
||||
account_fat_rlps: Rlp,
|
||||
out_chunk: &mut [(H256, Bytes)],
|
||||
known_code: &HashMap<H256, H256>,
|
||||
|
@ -36,8 +36,10 @@ use rand::Rng;
|
||||
use kvdb::DBValue;
|
||||
use ethereum_types::H256;
|
||||
use hashdb::HashDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use journaldb;
|
||||
use trie::{SecTrieDBMut, TrieMut, TrieDB, TrieDBMut, Trie};
|
||||
use trie::{TrieMut, Trie};
|
||||
use ethtrie::{SecTrieDBMut, TrieDB, TrieDBMut};
|
||||
use self::trie_standardmap::{Alphabet, StandardMap, ValueMode};
|
||||
|
||||
// the proportion of accounts we will alter each tick.
|
||||
@ -60,7 +62,7 @@ impl StateProducer {
|
||||
|
||||
/// Tick the state producer. This alters the state, writing new data into
|
||||
/// the database.
|
||||
pub fn tick<R: Rng>(&mut self, rng: &mut R, db: &mut HashDB) {
|
||||
pub fn tick<R: Rng>(&mut self, rng: &mut R, db: &mut HashDB<KeccakHasher>) {
|
||||
// modify existing accounts.
|
||||
let mut accounts_to_modify: Vec<_> = {
|
||||
let trie = TrieDB::new(&*db, &self.state_root).unwrap();
|
||||
@ -129,7 +131,7 @@ pub fn fill_storage(mut db: AccountDBMut, root: &mut H256, seed: &mut H256) {
|
||||
}
|
||||
|
||||
/// Compare two state dbs.
|
||||
pub fn compare_dbs(one: &HashDB, two: &HashDB) {
|
||||
pub fn compare_dbs(one: &HashDB<KeccakHasher>, two: &HashDB<KeccakHasher>) {
|
||||
let keys = one.keys();
|
||||
|
||||
for key in keys.keys() {
|
||||
|
@ -23,10 +23,11 @@ use hash::{KECCAK_EMPTY, KECCAK_NULL_RLP, keccak};
|
||||
use ethereum_types::{H256, U256, Address};
|
||||
use error::Error;
|
||||
use hashdb::HashDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::DBValue;
|
||||
use bytes::{Bytes, ToPretty};
|
||||
use trie;
|
||||
use trie::{SecTrieDB, Trie, TrieFactory, TrieError};
|
||||
use trie::{Trie, Recorder};
|
||||
use ethtrie::{TrieFactory, TrieDB, SecTrieDB, Result as TrieResult};
|
||||
use pod_account::*;
|
||||
use rlp::{RlpStream, encode};
|
||||
use lru_cache::LruCache;
|
||||
@ -199,7 +200,7 @@ impl Account {
|
||||
|
||||
/// Get (and cache) the contents of the trie's storage at `key`.
|
||||
/// Takes modified storage into account.
|
||||
pub fn storage_at(&self, db: &HashDB, key: &H256) -> trie::Result<H256> {
|
||||
pub fn storage_at(&self, db: &HashDB<KeccakHasher>, key: &H256) -> TrieResult<H256> {
|
||||
if let Some(value) = self.cached_storage_at(key) {
|
||||
return Ok(value);
|
||||
}
|
||||
@ -278,7 +279,7 @@ impl Account {
|
||||
}
|
||||
|
||||
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
|
||||
pub fn cache_code(&mut self, db: &HashDB) -> Option<Arc<Bytes>> {
|
||||
pub fn cache_code(&mut self, db: &HashDB<KeccakHasher>) -> Option<Arc<Bytes>> {
|
||||
// TODO: fill out self.code_cache;
|
||||
trace!("Account::cache_code: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
|
||||
|
||||
@ -307,7 +308,7 @@ impl Account {
|
||||
}
|
||||
|
||||
/// Provide a database to get `code_size`. Should not be called if it is a contract without code.
|
||||
pub fn cache_code_size(&mut self, db: &HashDB) -> bool {
|
||||
pub fn cache_code_size(&mut self, db: &HashDB<KeccakHasher>) -> bool {
|
||||
// TODO: fill out self.code_cache;
|
||||
trace!("Account::cache_code_size: ic={}; self.code_hash={:?}, self.code_cache={}", self.is_cached(), self.code_hash, self.code_cache.pretty());
|
||||
self.code_size.is_some() ||
|
||||
@ -374,7 +375,7 @@ impl Account {
|
||||
}
|
||||
|
||||
/// Commit the `storage_changes` to the backing DB and update `storage_root`.
|
||||
pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut HashDB) -> trie::Result<()> {
|
||||
pub fn commit_storage(&mut self, trie_factory: &TrieFactory, db: &mut HashDB<KeccakHasher>) -> TrieResult<()> {
|
||||
let mut t = trie_factory.from_existing(db, &mut self.storage_root)?;
|
||||
for (k, v) in self.storage_changes.drain() {
|
||||
// cast key and value to trait type,
|
||||
@ -390,7 +391,7 @@ impl Account {
|
||||
}
|
||||
|
||||
/// Commit any unsaved code. `code_hash` will always return the hash of the `code_cache` after this.
|
||||
pub fn commit_code(&mut self, db: &mut HashDB) {
|
||||
pub fn commit_code(&mut self, db: &mut HashDB<KeccakHasher>) {
|
||||
trace!("Commiting code of {:?} - {:?}, {:?}", self, self.code_filth == Filth::Dirty, self.code_cache.is_empty());
|
||||
match (self.code_filth == Filth::Dirty, self.code_cache.is_empty()) {
|
||||
(true, true) => {
|
||||
@ -472,10 +473,7 @@ impl Account {
|
||||
/// trie.
|
||||
/// `storage_key` is the hash of the desired storage key, meaning
|
||||
/// this will only work correctly under a secure trie.
|
||||
pub fn prove_storage(&self, db: &HashDB, storage_key: H256) -> Result<(Vec<Bytes>, H256), Box<TrieError>> {
|
||||
use trie::{Trie, TrieDB};
|
||||
use trie::recorder::Recorder;
|
||||
|
||||
pub fn prove_storage(&self, db: &HashDB<KeccakHasher>, storage_key: H256) -> TrieResult<(Vec<Bytes>, H256)> {
|
||||
let mut recorder = Recorder::new();
|
||||
|
||||
let trie = TrieDB::new(db, &self.storage_root)?;
|
||||
|
@ -29,14 +29,15 @@ use parking_lot::Mutex;
|
||||
use ethereum_types::{Address, H256};
|
||||
use memorydb::MemoryDB;
|
||||
use hashdb::{AsHashDB, HashDB, DBValue};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
|
||||
/// State backend. See module docs for more details.
|
||||
pub trait Backend: Send {
|
||||
/// Treat the backend as a read-only hashdb.
|
||||
fn as_hashdb(&self) -> &HashDB;
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher>;
|
||||
|
||||
/// Treat the backend as a writeable hashdb.
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB;
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher>;
|
||||
|
||||
/// Add an account entry to the cache.
|
||||
fn add_to_account_cache(&mut self, addr: Address, data: Option<Account>, modified: bool);
|
||||
@ -75,18 +76,18 @@ pub trait Backend: Send {
|
||||
// TODO: when account lookup moved into backends, this won't rely as tenuously on intended
|
||||
// usage.
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub struct ProofCheck(MemoryDB);
|
||||
pub struct ProofCheck(MemoryDB<KeccakHasher>);
|
||||
|
||||
impl ProofCheck {
|
||||
/// Create a new `ProofCheck` backend from the given state items.
|
||||
pub fn new(proof: &[DBValue]) -> Self {
|
||||
let mut db = MemoryDB::new();
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
for item in proof { db.insert(item); }
|
||||
ProofCheck(db)
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDB for ProofCheck {
|
||||
impl HashDB<KeccakHasher> for ProofCheck {
|
||||
fn keys(&self) -> HashMap<H256, i32> { self.0.keys() }
|
||||
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||
self.0.get(key)
|
||||
@ -107,9 +108,14 @@ impl HashDB for ProofCheck {
|
||||
fn remove(&mut self, _key: &H256) { }
|
||||
}
|
||||
|
||||
impl AsHashDB<KeccakHasher> for ProofCheck {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl Backend for ProofCheck {
|
||||
fn as_hashdb(&self) -> &HashDB { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB { self }
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
fn add_to_account_cache(&mut self, _addr: Address, _data: Option<Account>, _modified: bool) {}
|
||||
fn cache_code(&self, _hash: H256, _code: Arc<Vec<u8>>) {}
|
||||
fn get_cached_account(&self, _addr: &Address) -> Option<Option<Account>> { None }
|
||||
@ -128,13 +134,18 @@ impl Backend for ProofCheck {
|
||||
/// The proof-of-execution can be extracted with `extract_proof`.
|
||||
///
|
||||
/// This doesn't cache anything or rely on the canonical state caches.
|
||||
pub struct Proving<H: AsHashDB> {
|
||||
pub struct Proving<H: AsHashDB<KeccakHasher>> {
|
||||
base: H, // state we're proving values from.
|
||||
changed: MemoryDB, // changed state via insertions.
|
||||
changed: MemoryDB<KeccakHasher>, // changed state via insertions.
|
||||
proof: Mutex<HashSet<DBValue>>,
|
||||
}
|
||||
|
||||
impl<H: AsHashDB + Send + Sync> HashDB for Proving<H> {
|
||||
impl<AH: AsHashDB<KeccakHasher> + Send + Sync> AsHashDB<KeccakHasher> for Proving<AH> {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl<H: AsHashDB<KeccakHasher> + Send + Sync> HashDB<KeccakHasher> for Proving<H> {
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
let mut keys = self.base.as_hashdb().keys();
|
||||
keys.extend(self.changed.keys());
|
||||
@ -171,14 +182,10 @@ impl<H: AsHashDB + Send + Sync> HashDB for Proving<H> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: AsHashDB + Send + Sync> Backend for Proving<H> {
|
||||
fn as_hashdb(&self) -> &HashDB {
|
||||
self
|
||||
}
|
||||
impl<H: AsHashDB<KeccakHasher> + Send + Sync> Backend for Proving<H> {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB {
|
||||
self
|
||||
}
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
|
||||
fn add_to_account_cache(&mut self, _: Address, _: Option<Account>, _: bool) { }
|
||||
|
||||
@ -197,13 +204,13 @@ impl<H: AsHashDB + Send + Sync> Backend for Proving<H> {
|
||||
fn is_known_null(&self, _: &Address) -> bool { false }
|
||||
}
|
||||
|
||||
impl<H: AsHashDB> Proving<H> {
|
||||
impl<H: AsHashDB<KeccakHasher>> Proving<H> {
|
||||
/// Create a new `Proving` over a base database.
|
||||
/// This will store all values ever fetched from that base.
|
||||
pub fn new(base: H) -> Self {
|
||||
Proving {
|
||||
base: base,
|
||||
changed: MemoryDB::new(),
|
||||
changed: MemoryDB::<KeccakHasher>::new(),
|
||||
proof: Mutex::new(HashSet::new()),
|
||||
}
|
||||
}
|
||||
@ -215,7 +222,7 @@ impl<H: AsHashDB> Proving<H> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: AsHashDB + Clone> Clone for Proving<H> {
|
||||
impl<H: AsHashDB<KeccakHasher> + Clone> Clone for Proving<H> {
|
||||
fn clone(&self) -> Self {
|
||||
Proving {
|
||||
base: self.base.clone(),
|
||||
@ -229,12 +236,12 @@ impl<H: AsHashDB + Clone> Clone for Proving<H> {
|
||||
/// it. Doesn't cache anything.
|
||||
pub struct Basic<H>(pub H);
|
||||
|
||||
impl<H: AsHashDB + Send + Sync> Backend for Basic<H> {
|
||||
fn as_hashdb(&self) -> &HashDB {
|
||||
impl<H: AsHashDB<KeccakHasher> + Send + Sync> Backend for Basic<H> {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> {
|
||||
self.0.as_hashdb()
|
||||
}
|
||||
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB {
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> {
|
||||
self.0.as_hashdb_mut()
|
||||
}
|
||||
|
||||
|
@ -44,12 +44,12 @@ use factory::VmFactory;
|
||||
|
||||
use ethereum_types::{H256, U256, Address};
|
||||
use hashdb::{HashDB, AsHashDB};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::DBValue;
|
||||
use bytes::Bytes;
|
||||
|
||||
use trie;
|
||||
use trie::{Trie, TrieError, TrieDB};
|
||||
use trie::recorder::Recorder;
|
||||
use trie::{Trie, TrieError, Recorder};
|
||||
use ethtrie::{TrieDB, Result as TrieResult};
|
||||
|
||||
mod account;
|
||||
mod substate;
|
||||
@ -225,7 +225,7 @@ pub fn check_proof(
|
||||
/// Prove a transaction on the given state.
|
||||
/// Returns `None` when the transacion could not be proved,
|
||||
/// and a proof otherwise.
|
||||
pub fn prove_transaction<H: AsHashDB + Send + Sync>(
|
||||
pub fn prove_transaction<H: AsHashDB<KeccakHasher> + Send + Sync>(
|
||||
db: H,
|
||||
root: H256,
|
||||
transaction: &SignedTransaction,
|
||||
@ -336,23 +336,23 @@ pub enum CleanupMode<'a> {
|
||||
/// Provides subset of `State` methods to query state information
|
||||
pub trait StateInfo {
|
||||
/// Get the nonce of account `a`.
|
||||
fn nonce(&self, a: &Address) -> trie::Result<U256>;
|
||||
fn nonce(&self, a: &Address) -> TrieResult<U256>;
|
||||
|
||||
/// Get the balance of account `a`.
|
||||
fn balance(&self, a: &Address) -> trie::Result<U256>;
|
||||
fn balance(&self, a: &Address) -> TrieResult<U256>;
|
||||
|
||||
/// Mutate storage of account `address` so that it is `value` for `key`.
|
||||
fn storage_at(&self, address: &Address, key: &H256) -> trie::Result<H256>;
|
||||
fn storage_at(&self, address: &Address, key: &H256) -> TrieResult<H256>;
|
||||
|
||||
/// Get accounts' code.
|
||||
fn code(&self, a: &Address) -> trie::Result<Option<Arc<Bytes>>>;
|
||||
fn code(&self, a: &Address) -> TrieResult<Option<Arc<Bytes>>>;
|
||||
}
|
||||
|
||||
impl<B: Backend> StateInfo for State<B> {
|
||||
fn nonce(&self, a: &Address) -> trie::Result<U256> { State::nonce(self, a) }
|
||||
fn balance(&self, a: &Address) -> trie::Result<U256> { State::balance(self, a) }
|
||||
fn storage_at(&self, address: &Address, key: &H256) -> trie::Result<H256> { State::storage_at(self, address, key) }
|
||||
fn code(&self, address: &Address) -> trie::Result<Option<Arc<Bytes>>> { State::code(self, address) }
|
||||
fn nonce(&self, a: &Address) -> TrieResult<U256> { State::nonce(self, a) }
|
||||
fn balance(&self, a: &Address) -> TrieResult<U256> { State::balance(self, a) }
|
||||
fn storage_at(&self, address: &Address, key: &H256) -> TrieResult<H256> { State::storage_at(self, address, key) }
|
||||
fn code(&self, address: &Address) -> TrieResult<Option<Arc<Bytes>>> { State::code(self, address) }
|
||||
}
|
||||
|
||||
const SEC_TRIE_DB_UNWRAP_STR: &'static str = "A state can only be created with valid root. Creating a SecTrieDB with a valid root will not fail. \
|
||||
@ -379,9 +379,9 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Creates new state with existing state root
|
||||
pub fn from_existing(db: B, root: H256, account_start_nonce: U256, factories: Factories) -> Result<State<B>, TrieError> {
|
||||
pub fn from_existing(db: B, root: H256, account_start_nonce: U256, factories: Factories) -> TrieResult<State<B>> {
|
||||
if !db.as_hashdb().contains(&root) {
|
||||
return Err(TrieError::InvalidStateRoot(root));
|
||||
return Err(Box::new(TrieError::InvalidStateRoot(root)));
|
||||
}
|
||||
|
||||
let state = State {
|
||||
@ -481,7 +481,7 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Destroy the current object and return single account data.
|
||||
pub fn into_account(self, account: &Address) -> trie::Result<(Option<Arc<Bytes>>, HashMap<H256, H256>)> {
|
||||
pub fn into_account(self, account: &Address) -> TrieResult<(Option<Arc<Bytes>>, HashMap<H256, H256>)> {
|
||||
// TODO: deconstruct without cloning.
|
||||
let account = self.require(account, true)?;
|
||||
Ok((account.code().clone(), account.storage_changes().clone()))
|
||||
@ -504,43 +504,43 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Determine whether an account exists.
|
||||
pub fn exists(&self, a: &Address) -> trie::Result<bool> {
|
||||
pub fn exists(&self, a: &Address) -> TrieResult<bool> {
|
||||
// Bloom filter does not contain empty accounts, so it is important here to
|
||||
// check if account exists in the database directly before EIP-161 is in effect.
|
||||
self.ensure_cached(a, RequireCache::None, false, |a| a.is_some())
|
||||
}
|
||||
|
||||
/// Determine whether an account exists and if not empty.
|
||||
pub fn exists_and_not_null(&self, a: &Address) -> trie::Result<bool> {
|
||||
pub fn exists_and_not_null(&self, a: &Address) -> TrieResult<bool> {
|
||||
self.ensure_cached(a, RequireCache::None, false, |a| a.map_or(false, |a| !a.is_null()))
|
||||
}
|
||||
|
||||
/// Determine whether an account exists and has code or non-zero nonce.
|
||||
pub fn exists_and_has_code_or_nonce(&self, a: &Address) -> trie::Result<bool> {
|
||||
pub fn exists_and_has_code_or_nonce(&self, a: &Address) -> TrieResult<bool> {
|
||||
self.ensure_cached(a, RequireCache::CodeSize, false,
|
||||
|a| a.map_or(false, |a| a.code_hash() != KECCAK_EMPTY || *a.nonce() != self.account_start_nonce))
|
||||
}
|
||||
|
||||
/// Get the balance of account `a`.
|
||||
pub fn balance(&self, a: &Address) -> trie::Result<U256> {
|
||||
pub fn balance(&self, a: &Address) -> TrieResult<U256> {
|
||||
self.ensure_cached(a, RequireCache::None, true,
|
||||
|a| a.as_ref().map_or(U256::zero(), |account| *account.balance()))
|
||||
}
|
||||
|
||||
/// Get the nonce of account `a`.
|
||||
pub fn nonce(&self, a: &Address) -> trie::Result<U256> {
|
||||
pub fn nonce(&self, a: &Address) -> TrieResult<U256> {
|
||||
self.ensure_cached(a, RequireCache::None, true,
|
||||
|a| a.as_ref().map_or(self.account_start_nonce, |account| *account.nonce()))
|
||||
}
|
||||
|
||||
/// Get the storage root of account `a`.
|
||||
pub fn storage_root(&self, a: &Address) -> trie::Result<Option<H256>> {
|
||||
pub fn storage_root(&self, a: &Address) -> TrieResult<Option<H256>> {
|
||||
self.ensure_cached(a, RequireCache::None, true,
|
||||
|a| a.as_ref().and_then(|account| account.storage_root().cloned()))
|
||||
}
|
||||
|
||||
/// Mutate storage of account `address` so that it is `value` for `key`.
|
||||
pub fn storage_at(&self, address: &Address, key: &H256) -> trie::Result<H256> {
|
||||
pub fn storage_at(&self, address: &Address, key: &H256) -> TrieResult<H256> {
|
||||
// Storage key search and update works like this:
|
||||
// 1. If there's an entry for the account in the local cache check for the key and return it if found.
|
||||
// 2. If there's an entry for the account in the global cache check for the key or load it into that account.
|
||||
@ -602,25 +602,25 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Get accounts' code.
|
||||
pub fn code(&self, a: &Address) -> trie::Result<Option<Arc<Bytes>>> {
|
||||
pub fn code(&self, a: &Address) -> TrieResult<Option<Arc<Bytes>>> {
|
||||
self.ensure_cached(a, RequireCache::Code, true,
|
||||
|a| a.as_ref().map_or(None, |a| a.code().clone()))
|
||||
}
|
||||
|
||||
/// Get an account's code hash.
|
||||
pub fn code_hash(&self, a: &Address) -> trie::Result<H256> {
|
||||
pub fn code_hash(&self, a: &Address) -> TrieResult<H256> {
|
||||
self.ensure_cached(a, RequireCache::None, true,
|
||||
|a| a.as_ref().map_or(KECCAK_EMPTY, |a| a.code_hash()))
|
||||
}
|
||||
|
||||
/// Get accounts' code size.
|
||||
pub fn code_size(&self, a: &Address) -> trie::Result<Option<usize>> {
|
||||
pub fn code_size(&self, a: &Address) -> TrieResult<Option<usize>> {
|
||||
self.ensure_cached(a, RequireCache::CodeSize, true,
|
||||
|a| a.as_ref().and_then(|a| a.code_size()))
|
||||
}
|
||||
|
||||
/// Add `incr` to the balance of account `a`.
|
||||
pub fn add_balance(&mut self, a: &Address, incr: &U256, cleanup_mode: CleanupMode) -> trie::Result<()> {
|
||||
pub fn add_balance(&mut self, a: &Address, incr: &U256, cleanup_mode: CleanupMode) -> TrieResult<()> {
|
||||
trace!(target: "state", "add_balance({}, {}): {}", a, incr, self.balance(a)?);
|
||||
let is_value_transfer = !incr.is_zero();
|
||||
if is_value_transfer || (cleanup_mode == CleanupMode::ForceCreate && !self.exists(a)?) {
|
||||
@ -635,7 +635,7 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Subtract `decr` from the balance of account `a`.
|
||||
pub fn sub_balance(&mut self, a: &Address, decr: &U256, cleanup_mode: &mut CleanupMode) -> trie::Result<()> {
|
||||
pub fn sub_balance(&mut self, a: &Address, decr: &U256, cleanup_mode: &mut CleanupMode) -> TrieResult<()> {
|
||||
trace!(target: "state", "sub_balance({}, {}): {}", a, decr, self.balance(a)?);
|
||||
if !decr.is_zero() || !self.exists(a)? {
|
||||
self.require(a, false)?.sub_balance(decr);
|
||||
@ -647,19 +647,19 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Subtracts `by` from the balance of `from` and adds it to that of `to`.
|
||||
pub fn transfer_balance(&mut self, from: &Address, to: &Address, by: &U256, mut cleanup_mode: CleanupMode) -> trie::Result<()> {
|
||||
pub fn transfer_balance(&mut self, from: &Address, to: &Address, by: &U256, mut cleanup_mode: CleanupMode) -> TrieResult<()> {
|
||||
self.sub_balance(from, by, &mut cleanup_mode)?;
|
||||
self.add_balance(to, by, cleanup_mode)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Increment the nonce of account `a` by 1.
|
||||
pub fn inc_nonce(&mut self, a: &Address) -> trie::Result<()> {
|
||||
pub fn inc_nonce(&mut self, a: &Address) -> TrieResult<()> {
|
||||
self.require(a, false).map(|mut x| x.inc_nonce())
|
||||
}
|
||||
|
||||
/// Mutate storage of account `a` so that it is `value` for `key`.
|
||||
pub fn set_storage(&mut self, a: &Address, key: H256, value: H256) -> trie::Result<()> {
|
||||
pub fn set_storage(&mut self, a: &Address, key: H256, value: H256) -> TrieResult<()> {
|
||||
trace!(target: "state", "set_storage({}:{:x} to {:x})", a, key, value);
|
||||
if self.storage_at(a, &key)? != value {
|
||||
self.require(a, false)?.set_storage(key, value)
|
||||
@ -670,13 +670,13 @@ impl<B: Backend> State<B> {
|
||||
|
||||
/// Initialise the code of account `a` so that it is `code`.
|
||||
/// NOTE: Account should have been created with `new_contract`.
|
||||
pub fn init_code(&mut self, a: &Address, code: Bytes) -> trie::Result<()> {
|
||||
pub fn init_code(&mut self, a: &Address, code: Bytes) -> TrieResult<()> {
|
||||
self.require_or_from(a, true, || Account::new_contract(0.into(), self.account_start_nonce), |_|{})?.init_code(code);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Reset the code of account `a` so that it is `code`.
|
||||
pub fn reset_code(&mut self, a: &Address, code: Bytes) -> trie::Result<()> {
|
||||
pub fn reset_code(&mut self, a: &Address, code: Bytes) -> TrieResult<()> {
|
||||
self.require_or_from(a, true, || Account::new_contract(0.into(), self.account_start_nonce), |_|{})?.reset_code(code);
|
||||
Ok(())
|
||||
}
|
||||
@ -753,7 +753,7 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
}
|
||||
|
||||
fn touch(&mut self, a: &Address) -> trie::Result<()> {
|
||||
fn touch(&mut self, a: &Address) -> TrieResult<()> {
|
||||
self.require(a, false)?;
|
||||
Ok(())
|
||||
}
|
||||
@ -809,7 +809,7 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Remove any touched empty or dust accounts.
|
||||
pub fn kill_garbage(&mut self, touched: &HashSet<Address>, remove_empty_touched: bool, min_balance: &Option<U256>, kill_contracts: bool) -> trie::Result<()> {
|
||||
pub fn kill_garbage(&mut self, touched: &HashSet<Address>, remove_empty_touched: bool, min_balance: &Option<U256>, kill_contracts: bool) -> TrieResult<()> {
|
||||
let to_kill: HashSet<_> = {
|
||||
self.cache.borrow().iter().filter_map(|(address, ref entry)|
|
||||
if touched.contains(address) && // Check all touched accounts
|
||||
@ -850,7 +850,7 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Populate a PodAccount map from this state, with another state as the account and storage query.
|
||||
pub fn to_pod_diff<X: Backend>(&mut self, query: &State<X>) -> trie::Result<PodState> {
|
||||
pub fn to_pod_diff<X: Backend>(&mut self, query: &State<X>) -> TrieResult<PodState> {
|
||||
assert!(self.checkpoints.borrow().is_empty());
|
||||
|
||||
// Merge PodAccount::to_pod for cache of self and `query`.
|
||||
@ -858,7 +858,7 @@ impl<B: Backend> State<B> {
|
||||
.chain(query.cache.borrow().keys().cloned())
|
||||
.collect::<BTreeSet<_>>();
|
||||
|
||||
Ok(PodState::from(all_addresses.into_iter().fold(Ok(BTreeMap::new()), |m: trie::Result<_>, address| {
|
||||
Ok(PodState::from(all_addresses.into_iter().fold(Ok(BTreeMap::new()), |m: TrieResult<_>, address| {
|
||||
let mut m = m?;
|
||||
|
||||
let account = self.ensure_cached(&address, RequireCache::Code, true, |acc| {
|
||||
@ -886,7 +886,7 @@ impl<B: Backend> State<B> {
|
||||
})?;
|
||||
|
||||
if let Some((balance, nonce, storage_keys, code)) = account {
|
||||
let storage = storage_keys.into_iter().fold(Ok(BTreeMap::new()), |s: trie::Result<_>, key| {
|
||||
let storage = storage_keys.into_iter().fold(Ok(BTreeMap::new()), |s: TrieResult<_>, key| {
|
||||
let mut s = s?;
|
||||
|
||||
s.insert(key, self.storage_at(&address, &key)?);
|
||||
@ -904,14 +904,14 @@ impl<B: Backend> State<B> {
|
||||
|
||||
/// Returns a `StateDiff` describing the difference from `orig` to `self`.
|
||||
/// Consumes self.
|
||||
pub fn diff_from<X: Backend>(&self, mut orig: State<X>) -> trie::Result<StateDiff> {
|
||||
pub fn diff_from<X: Backend>(&self, mut orig: State<X>) -> TrieResult<StateDiff> {
|
||||
let pod_state_post = self.to_pod();
|
||||
let pod_state_pre = orig.to_pod_diff(self)?;
|
||||
Ok(pod_state::diff_pod(&pod_state_pre, &pod_state_post))
|
||||
}
|
||||
|
||||
// load required account data from the databases.
|
||||
fn update_account_cache(require: RequireCache, account: &mut Account, state_db: &B, db: &HashDB) {
|
||||
fn update_account_cache(require: RequireCache, account: &mut Account, state_db: &B, db: &HashDB<KeccakHasher>) {
|
||||
if let RequireCache::None = require {
|
||||
return;
|
||||
}
|
||||
@ -943,7 +943,7 @@ impl<B: Backend> State<B> {
|
||||
/// Check caches for required data
|
||||
/// First searches for account in the local, then the shared cache.
|
||||
/// Populates local cache if nothing found.
|
||||
fn ensure_cached<F, U>(&self, a: &Address, require: RequireCache, check_null: bool, f: F) -> trie::Result<U>
|
||||
fn ensure_cached<F, U>(&self, a: &Address, require: RequireCache, check_null: bool, f: F) -> TrieResult<U>
|
||||
where F: Fn(Option<&Account>) -> U {
|
||||
// check local cache first
|
||||
if let Some(ref mut maybe_acc) = self.cache.borrow_mut().get_mut(a) {
|
||||
@ -984,13 +984,13 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Pull account `a` in our cache from the trie DB. `require_code` requires that the code be cached, too.
|
||||
fn require<'a>(&'a self, a: &Address, require_code: bool) -> trie::Result<RefMut<'a, Account>> {
|
||||
fn require<'a>(&'a self, a: &Address, require_code: bool) -> TrieResult<RefMut<'a, Account>> {
|
||||
self.require_or_from(a, require_code, || Account::new_basic(0u8.into(), self.account_start_nonce), |_|{})
|
||||
}
|
||||
|
||||
/// Pull account `a` in our cache from the trie DB. `require_code` requires that the code be cached, too.
|
||||
/// If it doesn't exist, make account equal the evaluation of `default`.
|
||||
fn require_or_from<'a, F, G>(&'a self, a: &Address, require_code: bool, default: F, not_default: G) -> trie::Result<RefMut<'a, Account>>
|
||||
fn require_or_from<'a, F, G>(&'a self, a: &Address, require_code: bool, default: F, not_default: G) -> TrieResult<RefMut<'a, Account>>
|
||||
where F: FnOnce() -> Account, G: FnOnce(&mut Account),
|
||||
{
|
||||
let contains_key = self.cache.borrow().contains_key(a);
|
||||
@ -1037,7 +1037,7 @@ impl<B: Backend> State<B> {
|
||||
}
|
||||
|
||||
/// Replace account code and storage. Creates account if it does not exist.
|
||||
pub fn patch_account(&self, a: &Address, code: Arc<Bytes>, storage: HashMap<H256, H256>) -> trie::Result<()> {
|
||||
pub fn patch_account(&self, a: &Address, code: Arc<Bytes>, storage: HashMap<H256, H256>) -> TrieResult<()> {
|
||||
Ok(self.require(a, false)?.reset_code_and_storage(code, storage))
|
||||
}
|
||||
}
|
||||
@ -1049,7 +1049,7 @@ impl<B: Backend> State<B> {
|
||||
/// If the account doesn't exist in the trie, prove that and return defaults.
|
||||
/// Requires a secure trie to be used for accurate results.
|
||||
/// `account_key` == keccak(address)
|
||||
pub fn prove_account(&self, account_key: H256) -> trie::Result<(Vec<Bytes>, BasicAccount)> {
|
||||
pub fn prove_account(&self, account_key: H256) -> TrieResult<(Vec<Bytes>, BasicAccount)> {
|
||||
let mut recorder = Recorder::new();
|
||||
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
|
||||
let maybe_account: Option<BasicAccount> = {
|
||||
@ -1074,7 +1074,7 @@ impl<B: Backend> State<B> {
|
||||
/// Requires a secure trie to be used for correctness.
|
||||
/// `account_key` == keccak(address)
|
||||
/// `storage_key` == keccak(key)
|
||||
pub fn prove_storage(&self, account_key: H256, storage_key: H256) -> trie::Result<(Vec<Bytes>, H256)> {
|
||||
pub fn prove_storage(&self, account_key: H256, storage_key: H256) -> TrieResult<(Vec<Bytes>, H256)> {
|
||||
// TODO: probably could look into cache somehow but it's keyed by
|
||||
// address, not keccak(address).
|
||||
let trie = TrieDB::new(self.db.as_hashdb(), &self.root)?;
|
||||
|
@ -16,22 +16,23 @@
|
||||
|
||||
//! State database abstraction. For more info, see the doc for `StateDB`
|
||||
|
||||
use std::collections::{VecDeque, HashSet};
|
||||
use std::sync::Arc;
|
||||
use lru_cache::LruCache;
|
||||
use memory_cache::MemoryLruCache;
|
||||
use bloom_journal::{Bloom, BloomJournal};
|
||||
use byteorder::{LittleEndian, ByteOrder};
|
||||
use db::COL_ACCOUNT_BLOOM;
|
||||
use ethereum_types::{H256, Address};
|
||||
use hash::keccak;
|
||||
use hashdb::HashDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use header::BlockNumber;
|
||||
use journaldb::JournalDB;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use ethereum_types::{H256, Address};
|
||||
use hashdb::HashDB;
|
||||
use state::{self, Account};
|
||||
use header::BlockNumber;
|
||||
use hash::keccak;
|
||||
use lru_cache::LruCache;
|
||||
use memory_cache::MemoryLruCache;
|
||||
use parking_lot::Mutex;
|
||||
use state::{self, Account};
|
||||
use std::collections::{VecDeque, HashSet};
|
||||
use std::sync::Arc;
|
||||
use util_error::UtilError;
|
||||
use bloom_journal::{Bloom, BloomJournal};
|
||||
use db::COL_ACCOUNT_BLOOM;
|
||||
use byteorder::{LittleEndian, ByteOrder};
|
||||
|
||||
/// Value used to initialize bloom bitmap size.
|
||||
///
|
||||
@ -310,12 +311,12 @@ impl StateDB {
|
||||
}
|
||||
|
||||
/// Conversion method to interpret self as `HashDB` reference
|
||||
pub fn as_hashdb(&self) -> &HashDB {
|
||||
pub fn as_hashdb(&self) -> &HashDB<KeccakHasher> {
|
||||
self.db.as_hashdb()
|
||||
}
|
||||
|
||||
/// Conversion method to interpret self as mutable `HashDB` reference
|
||||
pub fn as_hashdb_mut(&mut self) -> &mut HashDB {
|
||||
pub fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> {
|
||||
self.db.as_hashdb_mut()
|
||||
}
|
||||
|
||||
@ -410,11 +411,9 @@ impl StateDB {
|
||||
}
|
||||
|
||||
impl state::Backend for StateDB {
|
||||
fn as_hashdb(&self) -> &HashDB {
|
||||
self.db.as_hashdb()
|
||||
}
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self.db.as_hashdb() }
|
||||
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB {
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> {
|
||||
self.db.as_hashdb_mut()
|
||||
}
|
||||
|
||||
|
@ -16,10 +16,12 @@ ethcore-light = { path = "../light" }
|
||||
ethcore-transaction = { path = "../transaction" }
|
||||
ethcore = { path = ".." }
|
||||
ethereum-types = "0.3"
|
||||
plain_hasher = { path = "../../util/plain_hasher" }
|
||||
hashdb = { version = "0.2", path = "../../util/hashdb" }
|
||||
plain_hasher = { version = "0.2", path = "../../util/plain_hasher" }
|
||||
rlp = { path = "../../util/rlp" }
|
||||
rustc-hex = "1.0"
|
||||
keccak-hash = { path = "../../util/hash" }
|
||||
keccak-hasher = { path = "../../util/keccak-hasher" }
|
||||
triehash = { path = "../../util/triehash" }
|
||||
kvdb = { path = "../../util/kvdb" }
|
||||
macros = { path = "../../util/macros" }
|
||||
|
@ -22,6 +22,7 @@ use network::{self, PeerId};
|
||||
use parking_lot::RwLock;
|
||||
use rlp::{Rlp, RlpStream};
|
||||
use std::cmp;
|
||||
|
||||
use sync_io::SyncIo;
|
||||
|
||||
use super::{
|
||||
|
@ -30,6 +30,7 @@ extern crate ethcore_transaction as transaction;
|
||||
extern crate ethcore;
|
||||
extern crate ethereum_types;
|
||||
extern crate env_logger;
|
||||
extern crate hashdb;
|
||||
extern crate plain_hasher;
|
||||
extern crate rand;
|
||||
extern crate semver;
|
||||
@ -38,6 +39,7 @@ extern crate smallvec;
|
||||
extern crate rlp;
|
||||
extern crate ipnetwork;
|
||||
extern crate keccak_hash as hash;
|
||||
extern crate keccak_hasher;
|
||||
extern crate triehash;
|
||||
extern crate kvdb;
|
||||
|
||||
|
@ -8,6 +8,7 @@ byteorder = "1.0"
|
||||
ethcore-bytes = { path = "../../util/bytes" }
|
||||
ethereum-types = "0.3"
|
||||
patricia-trie = { path = "../../util/patricia_trie" }
|
||||
patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" }
|
||||
log = "0.3"
|
||||
common-types = { path = "../types" }
|
||||
ethjson = { path = "../../json" }
|
||||
|
@ -16,8 +16,8 @@
|
||||
|
||||
//! VM errors module
|
||||
|
||||
use trie;
|
||||
use std::fmt;
|
||||
use ethtrie;
|
||||
|
||||
/// VM errors.
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
@ -71,8 +71,13 @@ pub enum Error {
|
||||
Reverted,
|
||||
}
|
||||
|
||||
impl From<Box<trie::TrieError>> for Error {
|
||||
fn from(err: Box<trie::TrieError>) -> Self {
|
||||
impl From<Box<ethtrie::TrieError>> for Error {
|
||||
fn from(err: Box<ethtrie::TrieError>) -> Self {
|
||||
Error::Internal(format!("Internal error: {}", err))
|
||||
}
|
||||
}
|
||||
impl From<ethtrie::TrieError> for Error {
|
||||
fn from(err: ethtrie::TrieError) -> Self {
|
||||
Error::Internal(format!("Internal error: {}", err))
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ extern crate common_types as types;
|
||||
extern crate ethjson;
|
||||
extern crate rlp;
|
||||
extern crate keccak_hash as hash;
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
extern crate patricia_trie as trie;
|
||||
|
||||
mod action_params;
|
||||
|
@ -1,10 +1,10 @@
|
||||
[package]
|
||||
name = "hashdb"
|
||||
version = "0.1.1"
|
||||
version = "0.2.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "trait for hash-keyed databases."
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
elastic-array = "0.10"
|
||||
ethereum-types = "0.3"
|
||||
heapsize = "0.4"
|
@ -14,65 +14,70 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Database of byte-slices keyed to their Keccak hash.
|
||||
//! Database of byte-slices keyed to their hash.
|
||||
extern crate elastic_array;
|
||||
extern crate ethereum_types;
|
||||
extern crate heapsize;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use elastic_array::ElasticArray128;
|
||||
use ethereum_types::H256;
|
||||
use heapsize::HeapSizeOf;
|
||||
use std::collections::HashMap;
|
||||
use std::{fmt::Debug, hash::Hash};
|
||||
|
||||
/// Trait describing an object that can hash a slice of bytes. Used to abstract
|
||||
/// other types over the hashing algorithm. Defines a single `hash` method and an
|
||||
/// `Out` associated type with the necessary bounds.
|
||||
pub trait Hasher: Sync + Send {
|
||||
/// The output type of the `Hasher`
|
||||
type Out: AsRef<[u8]> + AsMut<[u8]> + Default + HeapSizeOf + Debug + PartialEq + Eq + Hash + Send + Sync + Clone + Copy;
|
||||
/// What to use to build `HashMap`s with this `Hasher`
|
||||
type StdHasher: Sync + Send + Default + std::hash::Hasher;
|
||||
/// The length in bytes of the `Hasher` output
|
||||
const LENGTH: usize;
|
||||
|
||||
/// Compute the hash of the provided slice of bytes returning the `Out` type of the `Hasher`
|
||||
fn hash(x: &[u8]) -> Self::Out;
|
||||
}
|
||||
|
||||
/// `HashDB` value type.
|
||||
pub type DBValue = ElasticArray128<u8>;
|
||||
|
||||
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
|
||||
pub trait HashDB: AsHashDB + Send + Sync {
|
||||
/// Trait modelling datastore keyed by a hash defined by the `Hasher`.
|
||||
pub trait HashDB<H: Hasher>: Send + Sync + AsHashDB<H> {
|
||||
/// Get the keys in the database together with number of underlying references.
|
||||
fn keys(&self) -> HashMap<H256, i32>;
|
||||
fn keys(&self) -> HashMap<H::Out, i32>;
|
||||
|
||||
/// Look up a given hash into the bytes that hash to it, returning None if the
|
||||
/// hash is not known.
|
||||
fn get(&self, key: &H256) -> Option<DBValue>;
|
||||
fn get(&self, key: &H::Out) -> Option<DBValue>;
|
||||
|
||||
/// Check for the existance of a hash-key.
|
||||
fn contains(&self, key: &H256) -> bool;
|
||||
fn contains(&self, key: &H::Out) -> bool;
|
||||
|
||||
/// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions
|
||||
/// are counted and the equivalent number of `remove()`s must be performed before the data
|
||||
/// is considered dead.
|
||||
fn insert(&mut self, value: &[u8]) -> H256;
|
||||
fn insert(&mut self, value: &[u8]) -> H::Out;
|
||||
|
||||
/// Like `insert()`, except you provide the key and the data is all moved.
|
||||
fn emplace(&mut self, key: H256, value: DBValue);
|
||||
fn emplace(&mut self, key: H::Out, value: DBValue);
|
||||
|
||||
/// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may
|
||||
/// happen without the data being eventually being inserted into the DB. It can be "owed" more than once.
|
||||
fn remove(&mut self, key: &H256);
|
||||
fn remove(&mut self, key: &H::Out);
|
||||
}
|
||||
|
||||
/// Upcast trait.
|
||||
pub trait AsHashDB {
|
||||
pub trait AsHashDB<H: Hasher> {
|
||||
/// Perform upcast to HashDB for anything that derives from HashDB.
|
||||
fn as_hashdb(&self) -> &HashDB;
|
||||
fn as_hashdb(&self) -> &HashDB<H>;
|
||||
/// Perform mutable upcast to HashDB for anything that derives from HashDB.
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB;
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<H>;
|
||||
}
|
||||
|
||||
impl<T: HashDB> AsHashDB for T {
|
||||
fn as_hashdb(&self) -> &HashDB {
|
||||
self
|
||||
}
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB {
|
||||
self
|
||||
}
|
||||
// NOTE: There used to be a `impl<T> AsHashDB for T` but that does not work with generics. See https://stackoverflow.com/questions/48432842/implementing-a-trait-for-reference-and-non-reference-types-causes-conflicting-im
|
||||
// This means we need concrete impls of AsHashDB in several places, which somewhat defeats the point of the trait.
|
||||
impl<'a, H: Hasher> AsHashDB<H> for &'a mut HashDB<H> {
|
||||
fn as_hashdb(&self) -> &HashDB<H> { &**self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<H> { &mut **self }
|
||||
}
|
||||
|
||||
impl<'a> AsHashDB for &'a mut HashDB {
|
||||
fn as_hashdb(&self) -> &HashDB {
|
||||
&**self
|
||||
}
|
||||
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB {
|
||||
&mut **self
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "journaldb"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "A `HashDB` which can manage a short-term journal potentially containing many forks of mutually exclusive actions"
|
||||
license = "GPL3"
|
||||
@ -8,11 +8,12 @@ license = "GPL3"
|
||||
[dependencies]
|
||||
ethcore-bytes = { path = "../bytes" }
|
||||
ethereum-types = "0.3"
|
||||
hashdb = { path = "../hashdb" }
|
||||
hashdb = { version = "0.2.0", path = "../hashdb" }
|
||||
heapsize = "0.4"
|
||||
keccak-hasher = { path = "../keccak-hasher" }
|
||||
kvdb = { path = "../kvdb" }
|
||||
log = "0.3"
|
||||
memorydb = { path = "../memorydb" }
|
||||
memorydb = { version="0.2.0", path = "../memorydb" }
|
||||
parking_lot = "0.5"
|
||||
plain_hasher = { path = "../plain_hasher" }
|
||||
rlp = { path = "../rlp" }
|
||||
|
@ -19,15 +19,17 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::sync::Arc;
|
||||
use rlp::{encode, decode};
|
||||
use hashdb::*;
|
||||
use super::memorydb::*;
|
||||
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
|
||||
use traits::JournalDB;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use ethereum_types::H256;
|
||||
use error::{BaseDataError, UtilError};
|
||||
|
||||
use bytes::Bytes;
|
||||
use error::{BaseDataError, UtilError};
|
||||
use ethereum_types::H256;
|
||||
use hashdb::*;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use rlp::{encode, decode};
|
||||
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
|
||||
use super::memorydb::*;
|
||||
use traits::JournalDB;
|
||||
|
||||
/// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay
|
||||
/// and latent-removal semantics.
|
||||
@ -37,7 +39,7 @@ use bytes::Bytes;
|
||||
/// immediately. As this is an "archive" database, nothing is ever removed. This means
|
||||
/// that the states of any block the node has ever processed will be accessible.
|
||||
pub struct ArchiveDB {
|
||||
overlay: MemoryDB,
|
||||
overlay: MemoryDB<KeccakHasher>,
|
||||
backing: Arc<KeyValueDB>,
|
||||
latest_era: Option<u64>,
|
||||
column: Option<u32>,
|
||||
@ -62,7 +64,7 @@ impl ArchiveDB {
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDB for ArchiveDB {
|
||||
impl HashDB<KeccakHasher> for ArchiveDB {
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
|
||||
.map(|(key, _)| (H256::from_slice(&*key), 1))
|
||||
@ -191,7 +193,7 @@ impl JournalDB for ArchiveDB {
|
||||
&self.backing
|
||||
}
|
||||
|
||||
fn consolidate(&mut self, with: MemoryDB) {
|
||||
fn consolidate(&mut self, with: MemoryDB<KeccakHasher>) {
|
||||
self.overlay.consolidate(with);
|
||||
}
|
||||
}
|
||||
|
49
util/journaldb/src/as_hash_db_impls.rs
Normal file
49
util/journaldb/src/as_hash_db_impls.rs
Normal file
@ -0,0 +1,49 @@
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Impls of the `AsHashDB` upcast trait for all different variants of DB
|
||||
use hashdb::{HashDB, AsHashDB};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use archivedb::ArchiveDB;
|
||||
use earlymergedb::EarlyMergeDB;
|
||||
use overlayrecentdb::OverlayRecentDB;
|
||||
use refcounteddb::RefCountedDB;
|
||||
use overlaydb::OverlayDB;
|
||||
|
||||
impl AsHashDB<KeccakHasher> for ArchiveDB {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl AsHashDB<KeccakHasher> for EarlyMergeDB {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl AsHashDB<KeccakHasher> for OverlayRecentDB {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl AsHashDB<KeccakHasher> for RefCountedDB {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
||||
|
||||
impl AsHashDB<KeccakHasher> for OverlayDB {
|
||||
fn as_hashdb(&self) -> &HashDB<KeccakHasher> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<KeccakHasher> { self }
|
||||
}
|
@ -19,17 +19,19 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::sync::Arc;
|
||||
use parking_lot::RwLock;
|
||||
use heapsize::HeapSizeOf;
|
||||
use rlp::{encode, decode};
|
||||
|
||||
use bytes::Bytes;
|
||||
use error::{BaseDataError, UtilError};
|
||||
use ethereum_types::H256;
|
||||
use hashdb::*;
|
||||
use heapsize::HeapSizeOf;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use memorydb::*;
|
||||
use parking_lot::RwLock;
|
||||
use rlp::{encode, decode};
|
||||
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
|
||||
use super::traits::JournalDB;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use ethereum_types::H256;
|
||||
use error::{BaseDataError, UtilError};
|
||||
use bytes::Bytes;
|
||||
use util::{DatabaseKey, DatabaseValueView, DatabaseValueRef};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@ -105,7 +107,7 @@ enum RemoveFrom {
|
||||
///
|
||||
/// TODO: `store_reclaim_period`
|
||||
pub struct EarlyMergeDB {
|
||||
overlay: MemoryDB,
|
||||
overlay: MemoryDB<KeccakHasher>,
|
||||
backing: Arc<KeyValueDB>,
|
||||
refs: Option<Arc<RwLock<HashMap<H256, RefInfo>>>>,
|
||||
latest_era: Option<u64>,
|
||||
@ -285,7 +287,7 @@ impl EarlyMergeDB {
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDB for EarlyMergeDB {
|
||||
impl HashDB<KeccakHasher> for EarlyMergeDB {
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
|
||||
.map(|(key, _)| (H256::from_slice(&*key), 1))
|
||||
@ -512,7 +514,7 @@ impl JournalDB for EarlyMergeDB {
|
||||
Ok(ops)
|
||||
}
|
||||
|
||||
fn consolidate(&mut self, with: MemoryDB) {
|
||||
fn consolidate(&mut self, with: MemoryDB<KeccakHasher>) {
|
||||
self.overlay.consolidate(with);
|
||||
}
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ extern crate log;
|
||||
extern crate ethereum_types;
|
||||
extern crate ethcore_bytes as bytes;
|
||||
extern crate hashdb;
|
||||
extern crate keccak_hasher;
|
||||
extern crate kvdb;
|
||||
extern crate memorydb;
|
||||
extern crate parking_lot;
|
||||
@ -47,6 +48,7 @@ mod earlymergedb;
|
||||
mod overlayrecentdb;
|
||||
mod refcounteddb;
|
||||
mod util;
|
||||
mod as_hash_db_impls;
|
||||
|
||||
pub mod overlaydb;
|
||||
|
||||
|
@ -23,6 +23,7 @@ use error::{Result, BaseDataError};
|
||||
use ethereum_types::H256;
|
||||
use rlp::{Rlp, RlpStream, Encodable, DecoderError, Decodable, encode, decode};
|
||||
use hashdb::*;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use memorydb::*;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
|
||||
@ -36,7 +37,7 @@ use kvdb::{KeyValueDB, DBTransaction};
|
||||
/// queries have an immediate effect in terms of these functions.
|
||||
#[derive(Clone)]
|
||||
pub struct OverlayDB {
|
||||
overlay: MemoryDB,
|
||||
overlay: MemoryDB<KeccakHasher>,
|
||||
backing: Arc<KeyValueDB>,
|
||||
column: Option<u32>,
|
||||
}
|
||||
@ -152,7 +153,7 @@ impl OverlayDB {
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDB for OverlayDB {
|
||||
impl HashDB<KeccakHasher> for OverlayDB {
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
|
||||
.map(|(key, _)| {
|
||||
|
@ -19,18 +19,20 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::sync::Arc;
|
||||
use parking_lot::RwLock;
|
||||
use heapsize::HeapSizeOf;
|
||||
use rlp::{Rlp, RlpStream, encode, decode, DecoderError, Decodable, Encodable};
|
||||
use hashdb::*;
|
||||
use memorydb::*;
|
||||
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use super::JournalDB;
|
||||
use ethereum_types::H256;
|
||||
use plain_hasher::H256FastMap;
|
||||
use error::{BaseDataError, UtilError};
|
||||
|
||||
use bytes::Bytes;
|
||||
use error::{BaseDataError, UtilError};
|
||||
use ethereum_types::H256;
|
||||
use hashdb::*;
|
||||
use heapsize::HeapSizeOf;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use memorydb::*;
|
||||
use parking_lot::RwLock;
|
||||
use plain_hasher::H256FastMap;
|
||||
use rlp::{Rlp, RlpStream, encode, decode, DecoderError, Decodable, Encodable};
|
||||
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
|
||||
use super::JournalDB;
|
||||
use util::DatabaseKey;
|
||||
|
||||
/// Implementation of the `JournalDB` trait for a disk-backed database with a memory overlay
|
||||
@ -65,7 +67,7 @@ use util::DatabaseKey;
|
||||
/// 7. Delete ancient record from memory and disk.
|
||||
|
||||
pub struct OverlayRecentDB {
|
||||
transaction_overlay: MemoryDB,
|
||||
transaction_overlay: MemoryDB<KeccakHasher>,
|
||||
backing: Arc<KeyValueDB>,
|
||||
journal_overlay: Arc<RwLock<JournalOverlay>>,
|
||||
column: Option<u32>,
|
||||
@ -119,7 +121,7 @@ impl<'a> Encodable for DatabaseValueRef<'a> {
|
||||
|
||||
#[derive(PartialEq)]
|
||||
struct JournalOverlay {
|
||||
backing_overlay: MemoryDB, // Nodes added in the history period
|
||||
backing_overlay: MemoryDB<KeccakHasher>, // Nodes added in the history period
|
||||
pending_overlay: H256FastMap<DBValue>, // Nodes being transfered from backing_overlay to backing db
|
||||
journal: HashMap<u64, Vec<JournalEntry>>,
|
||||
latest_era: Option<u64>,
|
||||
@ -433,12 +435,12 @@ impl JournalDB for OverlayRecentDB {
|
||||
Ok(ops)
|
||||
}
|
||||
|
||||
fn consolidate(&mut self, with: MemoryDB) {
|
||||
fn consolidate(&mut self, with: MemoryDB<KeccakHasher>) {
|
||||
self.transaction_overlay.consolidate(with);
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDB for OverlayRecentDB {
|
||||
impl HashDB<KeccakHasher> for OverlayRecentDB {
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
let mut ret: HashMap<H256, i32> = self.backing.iter(self.column)
|
||||
.map(|(key, _)| (H256::from_slice(&*key), 1))
|
||||
|
@ -18,17 +18,19 @@
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use heapsize::HeapSizeOf;
|
||||
use rlp::{encode, decode};
|
||||
|
||||
use bytes::Bytes;
|
||||
use error::UtilError;
|
||||
use ethereum_types::H256;
|
||||
use hashdb::*;
|
||||
use overlaydb::OverlayDB;
|
||||
use heapsize::HeapSizeOf;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use memorydb::MemoryDB;
|
||||
use overlaydb::OverlayDB;
|
||||
use rlp::{encode, decode};
|
||||
use super::{DB_PREFIX_LEN, LATEST_ERA_KEY};
|
||||
use super::traits::JournalDB;
|
||||
use kvdb::{KeyValueDB, DBTransaction};
|
||||
use ethereum_types::H256;
|
||||
use error::UtilError;
|
||||
use bytes::Bytes;
|
||||
use util::{DatabaseKey, DatabaseValueView, DatabaseValueRef};
|
||||
|
||||
/// Implementation of the `HashDB` trait for a disk-backed database with a memory overlay
|
||||
@ -78,7 +80,7 @@ impl RefCountedDB {
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDB for RefCountedDB {
|
||||
impl HashDB<KeccakHasher> for RefCountedDB {
|
||||
fn keys(&self) -> HashMap<H256, i32> { self.forward.keys() }
|
||||
fn get(&self, key: &H256) -> Option<DBValue> { self.forward.get(key) }
|
||||
fn contains(&self, key: &H256) -> bool { self.forward.contains(key) }
|
||||
@ -197,7 +199,7 @@ impl JournalDB for RefCountedDB {
|
||||
self.forward.commit_to_batch(batch)
|
||||
}
|
||||
|
||||
fn consolidate(&mut self, mut with: MemoryDB) {
|
||||
fn consolidate(&mut self, mut with: MemoryDB<KeccakHasher>) {
|
||||
for (key, (value, rc)) in with.drain() {
|
||||
for _ in 0..rc {
|
||||
self.emplace(key, value.clone());
|
||||
|
@ -16,16 +16,17 @@
|
||||
|
||||
//! Disk-backed `HashDB` implementation.
|
||||
|
||||
use std::sync::Arc;
|
||||
use hashdb::*;
|
||||
use kvdb::{self, DBTransaction};
|
||||
use ethereum_types::H256;
|
||||
use error::UtilError;
|
||||
use bytes::Bytes;
|
||||
use error::UtilError;
|
||||
use ethereum_types::H256;
|
||||
use hashdb::*;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use kvdb::{self, DBTransaction};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// A `HashDB` which can manage a short-term journal potentially containing many forks of mutually
|
||||
/// exclusive actions.
|
||||
pub trait JournalDB: HashDB {
|
||||
pub trait JournalDB: HashDB<KeccakHasher> {
|
||||
/// Return a copy of ourself, in a box.
|
||||
fn boxed_clone(&self) -> Box<JournalDB>;
|
||||
|
||||
@ -76,7 +77,7 @@ pub trait JournalDB: HashDB {
|
||||
fn flush(&self) {}
|
||||
|
||||
/// Consolidate all the insertions and deletions in the given memory overlay.
|
||||
fn consolidate(&mut self, overlay: ::memorydb::MemoryDB);
|
||||
fn consolidate(&mut self, overlay: ::memorydb::MemoryDB<KeccakHasher>);
|
||||
|
||||
/// Commit all changes in a single batch
|
||||
#[cfg(test)]
|
||||
|
12
util/keccak-hasher/Cargo.toml
Normal file
12
util/keccak-hasher/Cargo.toml
Normal file
@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "keccak-hasher"
|
||||
version = "0.1.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "Keccak-256 implementation of the Hasher trait"
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
ethereum-types = "0.3"
|
||||
tiny-keccak = "1.4.2"
|
||||
hashdb = { path = "../hashdb" }
|
||||
plain_hasher = { path = "../plain_hasher" }
|
39
util/keccak-hasher/src/lib.rs
Normal file
39
util/keccak-hasher/src/lib.rs
Normal file
@ -0,0 +1,39 @@
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Hasher implementation for the Keccak-256 hash
|
||||
extern crate hashdb;
|
||||
extern crate ethereum_types;
|
||||
extern crate tiny_keccak;
|
||||
extern crate plain_hasher;
|
||||
|
||||
use hashdb::Hasher;
|
||||
use ethereum_types::H256;
|
||||
use tiny_keccak::Keccak;
|
||||
use plain_hasher::PlainHasher;
|
||||
/// Concrete `Hasher` impl for the Keccak-256 hash
|
||||
#[derive(Default, Debug, Clone, PartialEq)]
|
||||
pub struct KeccakHasher;
|
||||
impl Hasher for KeccakHasher {
|
||||
type Out = H256;
|
||||
type StdHasher = PlainHasher;
|
||||
const LENGTH: usize = 32;
|
||||
fn hash(x: &[u8]) -> Self::Out {
|
||||
let mut out = [0;32];
|
||||
Keccak::keccak256(x, &mut out);
|
||||
out.into()
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "memorydb"
|
||||
version = "0.1.1"
|
||||
version = "0.2.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "in-memory implementation of hashdb"
|
||||
license = "GPL-3.0"
|
||||
@ -8,8 +8,12 @@ license = "GPL-3.0"
|
||||
[dependencies]
|
||||
elastic-array = "0.10"
|
||||
heapsize = "0.4"
|
||||
ethereum-types = "0.3"
|
||||
keccak-hash = { version = "0.1.0", path = "../hash" }
|
||||
hashdb = { version = "0.1.1", path = "../hashdb" }
|
||||
hashdb = { version = "0.2.0", path = "../hashdb" }
|
||||
plain_hasher = { path = "../plain_hasher" }
|
||||
rlp = { version = "0.2.1", path = "../rlp" }
|
||||
|
||||
[dev-dependencies]
|
||||
tiny-keccak = "1.4.2"
|
||||
ethereum-types = "0.3"
|
||||
keccak-hasher = { path = "../keccak-hasher" }
|
||||
keccak-hash = { path = "../hash" }
|
79
util/memorydb/benches/memdb.rs
Normal file
79
util/memorydb/benches/memdb.rs
Normal file
@ -0,0 +1,79 @@
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#![feature(test)]
|
||||
|
||||
extern crate hashdb;
|
||||
extern crate memorydb;
|
||||
extern crate keccak_hasher;
|
||||
extern crate keccak_hash;
|
||||
extern crate rlp;
|
||||
extern crate test;
|
||||
|
||||
use memorydb::MemoryDB;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use keccak_hash::KECCAK_NULL_RLP;
|
||||
use rlp::NULL_RLP;
|
||||
use test::{Bencher, black_box};
|
||||
|
||||
|
||||
#[bench]
|
||||
fn instantiation(b: &mut Bencher) {
|
||||
b.iter(|| {
|
||||
MemoryDB::<KeccakHasher>::new();
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn compare_to_null_embedded_in_struct(b: &mut Bencher) {
|
||||
struct X {a_hash: <KeccakHasher as Hasher>::Out};
|
||||
let x = X {a_hash: KeccakHasher::hash(&NULL_RLP)};
|
||||
let key = KeccakHasher::hash(b"abc");
|
||||
|
||||
b.iter(|| {
|
||||
black_box(key == x.a_hash);
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn compare_to_null_in_const(b: &mut Bencher) {
|
||||
let key = KeccakHasher::hash(b"abc");
|
||||
|
||||
b.iter(|| {
|
||||
black_box(key == KECCAK_NULL_RLP);
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn contains_with_non_null_key(b: &mut Bencher) {
|
||||
let mut m = MemoryDB::<KeccakHasher>::new();
|
||||
let key = KeccakHasher::hash(b"abc");
|
||||
m.insert(b"abcefghijklmnopqrstuvxyz");
|
||||
b.iter(|| {
|
||||
m.contains(&key);
|
||||
})
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn contains_with_null_key(b: &mut Bencher) {
|
||||
let mut m = MemoryDB::<KeccakHasher>::new();
|
||||
let null_key = KeccakHasher::hash(&NULL_RLP);
|
||||
m.insert(b"abcefghijklmnopqrstuvxyz");
|
||||
b.iter(|| {
|
||||
m.contains(&null_key);
|
||||
})
|
||||
}
|
@ -15,23 +15,25 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Reference-counted memory-based `HashDB` implementation.
|
||||
extern crate heapsize;
|
||||
extern crate ethereum_types;
|
||||
extern crate hashdb;
|
||||
extern crate keccak_hash as keccak;
|
||||
extern crate plain_hasher;
|
||||
extern crate rlp;
|
||||
extern crate elastic_array;
|
||||
extern crate hashdb;
|
||||
extern crate heapsize;
|
||||
extern crate rlp;
|
||||
#[cfg(test)] extern crate keccak_hasher;
|
||||
#[cfg(test)] extern crate tiny_keccak;
|
||||
#[cfg(test)] extern crate ethereum_types;
|
||||
|
||||
use std::mem;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry;
|
||||
use hashdb::{HashDB, Hasher as KeyHasher, DBValue, AsHashDB};
|
||||
use heapsize::HeapSizeOf;
|
||||
use ethereum_types::H256;
|
||||
use hashdb::{HashDB, DBValue};
|
||||
use keccak::{KECCAK_NULL_RLP, keccak};
|
||||
use plain_hasher::H256FastMap;
|
||||
use rlp::NULL_RLP;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::hash;
|
||||
use std::mem;
|
||||
|
||||
// Backing `HashMap` parametrized with a `Hasher` for the keys `Hasher::Out` and the `Hasher::StdHasher` as hash map builder.
|
||||
type FastMap<H, T> = HashMap<<H as KeyHasher>::Out, T, hash::BuildHasherDefault<<H as KeyHasher>::StdHasher>>;
|
||||
|
||||
/// Reference-counted memory-based `HashDB` implementation.
|
||||
///
|
||||
/// Use `new()` to create a new database. Insert items with `insert()`, remove items
|
||||
@ -42,11 +44,14 @@ use rlp::NULL_RLP;
|
||||
/// # Example
|
||||
/// ```rust
|
||||
/// extern crate hashdb;
|
||||
/// extern crate keccak_hasher;
|
||||
/// extern crate memorydb;
|
||||
///
|
||||
/// use hashdb::*;
|
||||
/// use keccak_hasher::KeccakHasher;
|
||||
/// use memorydb::*;
|
||||
/// fn main() {
|
||||
/// let mut m = MemoryDB::new();
|
||||
/// let mut m = MemoryDB::<KeccakHasher>::new();
|
||||
/// let d = "Hello world!".as_bytes();
|
||||
///
|
||||
/// let k = m.insert(d);
|
||||
@ -77,15 +82,17 @@ use rlp::NULL_RLP;
|
||||
/// }
|
||||
/// ```
|
||||
#[derive(Default, Clone, PartialEq)]
|
||||
pub struct MemoryDB {
|
||||
data: H256FastMap<(DBValue, i32)>,
|
||||
pub struct MemoryDB<H: KeyHasher> {
|
||||
data: FastMap<H, (DBValue, i32)>,
|
||||
hashed_null_node: H::Out,
|
||||
}
|
||||
|
||||
impl MemoryDB {
|
||||
impl<H: KeyHasher> MemoryDB<H> {
|
||||
/// Create a new instance of the memory DB.
|
||||
pub fn new() -> MemoryDB {
|
||||
pub fn new() -> MemoryDB<H> {
|
||||
MemoryDB {
|
||||
data: H256FastMap::default(),
|
||||
data: FastMap::<H,_>::default(),
|
||||
hashed_null_node: H::hash(&NULL_RLP)
|
||||
}
|
||||
}
|
||||
|
||||
@ -94,11 +101,15 @@ impl MemoryDB {
|
||||
/// # Examples
|
||||
/// ```rust
|
||||
/// extern crate hashdb;
|
||||
/// extern crate keccak_hasher;
|
||||
/// extern crate memorydb;
|
||||
///
|
||||
/// use hashdb::*;
|
||||
/// use keccak_hasher::KeccakHasher;
|
||||
/// use memorydb::*;
|
||||
///
|
||||
/// fn main() {
|
||||
/// let mut m = MemoryDB::new();
|
||||
/// let mut m = MemoryDB::<KeccakHasher>::new();
|
||||
/// let hello_bytes = "Hello world!".as_bytes();
|
||||
/// let hash = m.insert(hello_bytes);
|
||||
/// assert!(m.contains(&hash));
|
||||
@ -116,8 +127,8 @@ impl MemoryDB {
|
||||
}
|
||||
|
||||
/// Return the internal map of hashes to data, clearing the current state.
|
||||
pub fn drain(&mut self) -> H256FastMap<(DBValue, i32)> {
|
||||
mem::replace(&mut self.data, H256FastMap::default())
|
||||
pub fn drain(&mut self) -> FastMap<H, (DBValue, i32)> {
|
||||
mem::replace(&mut self.data, FastMap::<H,_>::default())
|
||||
}
|
||||
|
||||
/// Grab the raw information associated with a key. Returns None if the key
|
||||
@ -125,8 +136,8 @@ impl MemoryDB {
|
||||
///
|
||||
/// Even when Some is returned, the data is only guaranteed to be useful
|
||||
/// when the refs > 0.
|
||||
pub fn raw(&self, key: &H256) -> Option<(DBValue, i32)> {
|
||||
if key == &KECCAK_NULL_RLP {
|
||||
pub fn raw(&self, key: &<H as KeyHasher>::Out) -> Option<(DBValue, i32)> {
|
||||
if key == &self.hashed_null_node {
|
||||
return Some((DBValue::from_slice(&NULL_RLP), 1));
|
||||
}
|
||||
self.data.get(key).cloned()
|
||||
@ -139,8 +150,8 @@ impl MemoryDB {
|
||||
|
||||
/// Remove an element and delete it from storage if reference count reaches zero.
|
||||
/// If the value was purged, return the old value.
|
||||
pub fn remove_and_purge(&mut self, key: &H256) -> Option<DBValue> {
|
||||
if key == &KECCAK_NULL_RLP {
|
||||
pub fn remove_and_purge(&mut self, key: &<H as KeyHasher>::Out) -> Option<DBValue> {
|
||||
if key == &self.hashed_null_node {
|
||||
return None;
|
||||
}
|
||||
match self.data.entry(key.clone()) {
|
||||
@ -177,19 +188,9 @@ impl MemoryDB {
|
||||
}
|
||||
}
|
||||
|
||||
impl HashDB for MemoryDB {
|
||||
fn get(&self, key: &H256) -> Option<DBValue> {
|
||||
if key == &KECCAK_NULL_RLP {
|
||||
return Some(DBValue::from_slice(&NULL_RLP));
|
||||
}
|
||||
impl<H: KeyHasher> HashDB<H> for MemoryDB<H> {
|
||||
|
||||
match self.data.get(key) {
|
||||
Some(&(ref d, rc)) if rc > 0 => Some(d.clone()),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
fn keys(&self) -> HashMap<H256, i32> {
|
||||
fn keys(&self) -> HashMap<H::Out, i32> {
|
||||
self.data.iter()
|
||||
.filter_map(|(k, v)| if v.1 != 0 {
|
||||
Some((*k, v.1))
|
||||
@ -199,8 +200,19 @@ impl HashDB for MemoryDB {
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn contains(&self, key: &H256) -> bool {
|
||||
if key == &KECCAK_NULL_RLP {
|
||||
fn get(&self, key: &H::Out) -> Option<DBValue> {
|
||||
if key == &self.hashed_null_node {
|
||||
return Some(DBValue::from_slice(&NULL_RLP));
|
||||
}
|
||||
|
||||
match self.data.get(key) {
|
||||
Some(&(ref d, rc)) if rc > 0 => Some(d.clone()),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
fn contains(&self, key: &H::Out) -> bool {
|
||||
if key == &self.hashed_null_node {
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -210,15 +222,15 @@ impl HashDB for MemoryDB {
|
||||
}
|
||||
}
|
||||
|
||||
fn insert(&mut self, value: &[u8]) -> H256 {
|
||||
fn insert(&mut self, value: &[u8]) -> H::Out {
|
||||
if value == &NULL_RLP {
|
||||
return KECCAK_NULL_RLP.clone();
|
||||
return self.hashed_null_node.clone();
|
||||
}
|
||||
let key = keccak(value);
|
||||
let key = H::hash(value);
|
||||
match self.data.entry(key) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let &mut (ref mut old_value, ref mut rc) = entry.get_mut();
|
||||
if *rc >= -0x80000000i32 && *rc <= 0 {
|
||||
if *rc <= 0 {
|
||||
*old_value = DBValue::from_slice(value);
|
||||
}
|
||||
*rc += 1;
|
||||
@ -230,7 +242,7 @@ impl HashDB for MemoryDB {
|
||||
key
|
||||
}
|
||||
|
||||
fn emplace(&mut self, key: H256, value: DBValue) {
|
||||
fn emplace(&mut self, key:H::Out, value: DBValue) {
|
||||
if &*value == &NULL_RLP {
|
||||
return;
|
||||
}
|
||||
@ -238,7 +250,7 @@ impl HashDB for MemoryDB {
|
||||
match self.data.entry(key) {
|
||||
Entry::Occupied(mut entry) => {
|
||||
let &mut (ref mut old_value, ref mut rc) = entry.get_mut();
|
||||
if *rc >= -0x80000000i32 && *rc <= 0 {
|
||||
if *rc <= 0 {
|
||||
*old_value = value;
|
||||
}
|
||||
*rc += 1;
|
||||
@ -249,8 +261,8 @@ impl HashDB for MemoryDB {
|
||||
}
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &H256) {
|
||||
if key == &KECCAK_NULL_RLP {
|
||||
fn remove(&mut self, key: &H::Out) {
|
||||
if key == &self.hashed_null_node {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -266,17 +278,26 @@ impl HashDB for MemoryDB {
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: KeyHasher> AsHashDB<H> for MemoryDB<H> {
|
||||
fn as_hashdb(&self) -> &HashDB<H> { self }
|
||||
fn as_hashdb_mut(&mut self) -> &mut HashDB<H> { self }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use keccak::keccak;
|
||||
use super::*;
|
||||
use tiny_keccak::Keccak;
|
||||
use ethereum_types::H256;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
|
||||
#[test]
|
||||
fn memorydb_remove_and_purge() {
|
||||
let hello_bytes = b"Hello world!";
|
||||
let hello_key = keccak(hello_bytes);
|
||||
let mut hello_key = [0;32];
|
||||
Keccak::keccak256(hello_bytes, &mut hello_key);
|
||||
let hello_key = H256(hello_key);
|
||||
|
||||
let mut m = MemoryDB::new();
|
||||
let mut m = MemoryDB::<KeccakHasher>::new();
|
||||
m.remove(&hello_key);
|
||||
assert_eq!(m.raw(&hello_key).unwrap().1, -1);
|
||||
m.purge();
|
||||
@ -286,7 +307,7 @@ mod tests {
|
||||
m.purge();
|
||||
assert_eq!(m.raw(&hello_key), None);
|
||||
|
||||
let mut m = MemoryDB::new();
|
||||
let mut m = MemoryDB::<KeccakHasher>::new();
|
||||
assert!(m.remove_and_purge(&hello_key).is_none());
|
||||
assert_eq!(m.raw(&hello_key).unwrap().1, -1);
|
||||
m.insert(hello_bytes);
|
||||
@ -299,8 +320,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn consolidate() {
|
||||
let mut main = MemoryDB::new();
|
||||
let mut other = MemoryDB::new();
|
||||
let mut main = MemoryDB::<KeccakHasher>::new();
|
||||
let mut other = MemoryDB::<KeccakHasher>::new();
|
||||
let remove_key = other.insert(b"doggo");
|
||||
main.remove(&remove_key);
|
||||
|
||||
|
15
util/patricia-trie-ethereum/Cargo.toml
Normal file
15
util/patricia-trie-ethereum/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "patricia-trie-ethereum"
|
||||
version = "0.1.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "Merkle-Patricia Trie (Ethereum Style)"
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
patricia-trie = { path = "../patricia_trie" }
|
||||
keccak-hasher = { path = "../keccak-hasher" }
|
||||
hashdb = { path = "../hashdb" }
|
||||
rlp = { path = "../rlp" }
|
||||
ethcore-bytes = { path = "../bytes" }
|
||||
ethereum-types = "0.3"
|
||||
elastic-array = "0.10"
|
62
util/patricia-trie-ethereum/src/lib.rs
Normal file
62
util/patricia-trie-ethereum/src/lib.rs
Normal file
@ -0,0 +1,62 @@
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Façade crate for `patricia_trie` for Ethereum specific impls
|
||||
|
||||
pub extern crate patricia_trie as trie; // `pub` because we need to import this crate for the tests in `patricia_trie` and there were issues: https://gist.github.com/dvdplm/869251ee557a1b4bd53adc7c971979aa
|
||||
extern crate elastic_array;
|
||||
extern crate ethcore_bytes;
|
||||
extern crate ethereum_types;
|
||||
extern crate hashdb;
|
||||
extern crate keccak_hasher;
|
||||
extern crate rlp;
|
||||
|
||||
mod rlp_node_codec;
|
||||
|
||||
pub use rlp_node_codec::RlpNodeCodec;
|
||||
|
||||
use ethereum_types::H256;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use rlp::DecoderError;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak-flavoured `RlpNodeCodec`
|
||||
pub type RlpCodec = RlpNodeCodec<KeccakHasher>;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak/Rlp-flavoured `TrieDB`
|
||||
pub type TrieDB<'db> = trie::TrieDB<'db, KeccakHasher, RlpCodec>;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak/Rlp-flavoured `SecTrieDB`
|
||||
pub type SecTrieDB<'db> = trie::SecTrieDB<'db, KeccakHasher, RlpCodec>;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak/Rlp-flavoured `FatDB`
|
||||
pub type FatDB<'db> = trie::FatDB<'db, KeccakHasher, RlpCodec>;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak/Rlp-flavoured `TrieDBMut`
|
||||
pub type TrieDBMut<'db> = trie::TrieDBMut<'db, KeccakHasher, RlpCodec>;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak/Rlp-flavoured `SecTrieDBMut`
|
||||
pub type SecTrieDBMut<'db> = trie::SecTrieDBMut<'db, KeccakHasher, RlpCodec>;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak/Rlp-flavoured `FatDBMut`
|
||||
pub type FatDBMut<'db> = trie::FatDBMut<'db, KeccakHasher, RlpCodec>;
|
||||
|
||||
/// Convenience type alias to instantiate a Keccak/Rlp-flavoured `TrieFactory`
|
||||
pub type TrieFactory = trie::TrieFactory<KeccakHasher, RlpCodec>;
|
||||
|
||||
/// Convenience type alias for Keccak/Rlp flavoured trie errors
|
||||
pub type TrieError = trie::TrieError<H256, DecoderError>;
|
||||
/// Convenience type alias for Keccak/Rlp flavoured trie results
|
||||
pub type Result<T> = trie::Result<T, H256, DecoderError>;
|
124
util/patricia-trie-ethereum/src/rlp_node_codec.rs
Normal file
124
util/patricia-trie-ethereum/src/rlp_node_codec.rs
Normal file
@ -0,0 +1,124 @@
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! `NodeCodec` implementation for Rlp
|
||||
|
||||
use elastic_array::{ElasticArray1024, ElasticArray128};
|
||||
use ethereum_types::H256;
|
||||
use hashdb::Hasher;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use rlp::{DecoderError, RlpStream, Rlp, Prototype};
|
||||
use std::marker::PhantomData;
|
||||
use trie::{NibbleSlice, NodeCodec, node::Node, ChildReference};
|
||||
|
||||
/// Concrete implementation of a `NodeCodec` with Rlp encoding, generic over the `Hasher`
|
||||
#[derive(Default, Clone)]
|
||||
pub struct RlpNodeCodec<H: Hasher> {mark: PhantomData<H>}
|
||||
|
||||
// NOTE: what we'd really like here is:
|
||||
// `impl<H: Hasher> NodeCodec<H> for RlpNodeCodec<H> where H::Out: Decodable`
|
||||
// but due to the current limitations of Rust const evaluation we can't
|
||||
// do `const HASHED_NULL_NODE: H::Out = H::Out( … … )`. Perhaps one day soon?
|
||||
impl NodeCodec<KeccakHasher> for RlpNodeCodec<KeccakHasher> {
|
||||
type Error = DecoderError;
|
||||
const HASHED_NULL_NODE : H256 = H256( [0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21] );
|
||||
fn decode(data: &[u8]) -> ::std::result::Result<Node, Self::Error> {
|
||||
let r = Rlp::new(data);
|
||||
match r.prototype()? {
|
||||
// either leaf or extension - decode first item with NibbleSlice::???
|
||||
// and use is_leaf return to figure out which.
|
||||
// if leaf, second item is a value (is_data())
|
||||
// if extension, second item is a node (either SHA3 to be looked up and
|
||||
// fed back into this function or inline RLP which can be fed back into this function).
|
||||
Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0)?.data()?) {
|
||||
(slice, true) => Ok(Node::Leaf(slice, r.at(1)?.data()?)),
|
||||
(slice, false) => Ok(Node::Extension(slice, r.at(1)?.as_raw())),
|
||||
},
|
||||
// branch - first 16 are nodes, 17th is a value (or empty).
|
||||
Prototype::List(17) => {
|
||||
let mut nodes = [&[] as &[u8]; 16];
|
||||
for i in 0..16 {
|
||||
nodes[i] = r.at(i)?.as_raw();
|
||||
}
|
||||
Ok(Node::Branch(nodes, if r.at(16)?.is_empty() { None } else { Some(r.at(16)?.data()?) }))
|
||||
},
|
||||
// an empty branch index.
|
||||
Prototype::Data(0) => Ok(Node::Empty),
|
||||
// something went wrong.
|
||||
_ => Err(DecoderError::Custom("Rlp is not valid."))
|
||||
}
|
||||
}
|
||||
fn try_decode_hash(data: &[u8]) -> Option<<KeccakHasher as Hasher>::Out> {
|
||||
let r = Rlp::new(data);
|
||||
if r.is_data() && r.size() == KeccakHasher::LENGTH {
|
||||
Some(r.as_val().expect("Hash is the correct size; qed"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn is_empty_node(data: &[u8]) -> bool {
|
||||
Rlp::new(data).is_empty()
|
||||
}
|
||||
fn empty_node() -> ElasticArray1024<u8> {
|
||||
let mut stream = RlpStream::new();
|
||||
stream.append_empty_data();
|
||||
stream.drain()
|
||||
}
|
||||
|
||||
fn leaf_node(partial: &[u8], value: &[u8]) -> ElasticArray1024<u8> {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append(&partial);
|
||||
stream.append(&value);
|
||||
stream.drain()
|
||||
}
|
||||
|
||||
fn ext_node(partial: &[u8], child_ref: ChildReference<<KeccakHasher as Hasher>::Out>) -> ElasticArray1024<u8> {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append(&partial);
|
||||
match child_ref {
|
||||
ChildReference::Hash(h) => stream.append(&h),
|
||||
ChildReference::Inline(inline_data, len) => {
|
||||
let bytes = &AsRef::<[u8]>::as_ref(&inline_data)[..len];
|
||||
stream.append_raw(bytes, 1)
|
||||
},
|
||||
};
|
||||
stream.drain()
|
||||
}
|
||||
|
||||
fn branch_node<I>(children: I, value: Option<ElasticArray128<u8>>) -> ElasticArray1024<u8>
|
||||
where I: IntoIterator<Item=Option<ChildReference<<KeccakHasher as Hasher>::Out>>>
|
||||
{
|
||||
let mut stream = RlpStream::new_list(17);
|
||||
for child_ref in children {
|
||||
match child_ref {
|
||||
Some(c) => match c {
|
||||
ChildReference::Hash(h) => stream.append(&h),
|
||||
ChildReference::Inline(inline_data, len) => {
|
||||
let bytes = &AsRef::<[u8]>::as_ref(&inline_data)[..len];
|
||||
stream.append_raw(bytes, 1)
|
||||
},
|
||||
},
|
||||
None => stream.append_empty_data()
|
||||
};
|
||||
}
|
||||
if let Some(value) = value {
|
||||
stream.append(&&*value);
|
||||
} else {
|
||||
stream.append_empty_data();
|
||||
}
|
||||
stream.drain()
|
||||
}
|
||||
}
|
@ -1,22 +1,24 @@
|
||||
[package]
|
||||
name = "patricia-trie"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
description = "Merkle-Patricia Trie (Ethereum Style)"
|
||||
description = "Merkle-Patricia Trie generic over key hasher and node encoding"
|
||||
license = "GPL-3.0"
|
||||
|
||||
[dependencies]
|
||||
elastic-array = "0.10"
|
||||
log = "0.3"
|
||||
rand = "0.4"
|
||||
hashdb = { version = "0.2", path = "../hashdb" }
|
||||
ethcore-bytes = { version = "0.1.0", path = "../bytes" }
|
||||
ethereum-types = "0.3"
|
||||
keccak-hash = { version = "0.1.0", path = "../hash" }
|
||||
hashdb = { version = "0.1.1", path = "../hashdb" }
|
||||
rlp = { version = "0.2.1", path = "../rlp" }
|
||||
triehash = { version = "0.1.0", path = "../triehash" }
|
||||
memorydb = { version = "0.1.0", path = "../memorydb" }
|
||||
ethcore-logger = { version = "1.9.0", path = "../../logger" }
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger = "0.5"
|
||||
ethereum-types = "0.3"
|
||||
keccak-hash = { version = "0.1.0", path = "../hash" }
|
||||
keccak-hasher = { path = "../keccak-hasher" }
|
||||
memorydb = { version = "0.2", path = "../memorydb" }
|
||||
patricia-trie-ethereum = { path = "../patricia-trie-ethereum" }
|
||||
rlp = { version = "0.2.1", path = "../rlp" }
|
||||
trie-standardmap = { path = "../trie-standardmap" }
|
||||
triehash = { version = "0.1.0", path = "../triehash" }
|
||||
|
@ -21,16 +21,21 @@ extern crate ethcore_bytes;
|
||||
extern crate ethereum_types;
|
||||
extern crate memorydb;
|
||||
extern crate patricia_trie as trie;
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
extern crate keccak_hasher;
|
||||
extern crate keccak_hash;
|
||||
extern crate trie_standardmap;
|
||||
extern crate hashdb;
|
||||
|
||||
use ethcore_bytes::Bytes;
|
||||
use ethereum_types::H256;
|
||||
use keccak_hash::keccak;
|
||||
use memorydb::MemoryDB;
|
||||
use test::{Bencher, black_box};
|
||||
use trie::{TrieDBMut, TrieDB, TrieMut, Trie};
|
||||
use trie::{TrieMut, Trie};
|
||||
use trie_standardmap::{Alphabet, ValueMode, StandardMap};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::{TrieDB, TrieDBMut};
|
||||
|
||||
fn random_word(alphabet: &[u8], min_count: usize, diff_count: usize, seed: &mut H256) -> Vec<u8> {
|
||||
assert!(min_count + diff_count <= 32);
|
||||
@ -69,7 +74,7 @@ fn trie_insertions_32_mir_1k(b: &mut Bencher) {
|
||||
};
|
||||
let d = st.make();
|
||||
b.iter(&mut ||{
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
@ -87,7 +92,7 @@ fn trie_iter(b: &mut Bencher) {
|
||||
count: 1000,
|
||||
};
|
||||
let d = st.make();
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
@ -116,7 +121,7 @@ fn trie_insertions_32_ran_1k(b: &mut Bencher) {
|
||||
let d = st.make();
|
||||
let mut r = H256::new();
|
||||
b.iter(&mut ||{
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
@ -137,7 +142,7 @@ fn trie_insertions_six_high(b: &mut Bencher) {
|
||||
}
|
||||
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
@ -157,7 +162,7 @@ fn trie_insertions_six_mid(b: &mut Bencher) {
|
||||
d.push((k, v))
|
||||
}
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
@ -178,7 +183,7 @@ fn trie_insertions_random_mid(b: &mut Bencher) {
|
||||
}
|
||||
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
@ -199,7 +204,7 @@ fn trie_insertions_six_low(b: &mut Bencher) {
|
||||
}
|
||||
|
||||
b.iter(||{
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
for i in d.iter() {
|
||||
|
@ -14,66 +14,77 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use ethereum_types::H256;
|
||||
use keccak::keccak;
|
||||
use hashdb::HashDB;
|
||||
use super::{TrieDB, Trie, TrieDBIterator, TrieItem, TrieIterator, Query};
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use super::{Result, TrieDB, Trie, TrieDBIterator, TrieItem, TrieIterator, Query};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
||||
///
|
||||
/// Use it as a `Trie` or `TrieMut` trait object.
|
||||
pub struct FatDB<'db> {
|
||||
raw: TrieDB<'db>,
|
||||
pub struct FatDB<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDB<'db, H, C>,
|
||||
}
|
||||
|
||||
impl<'db> FatDB<'db> {
|
||||
impl<'db, H, C> FatDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
pub fn new(db: &'db HashDB, root: &'db H256) -> super::Result<Self> {
|
||||
let fatdb = FatDB {
|
||||
raw: TrieDB::new(db, root)?
|
||||
};
|
||||
|
||||
Ok(fatdb)
|
||||
pub fn new(db: &'db HashDB<H>, root: &'db H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(FatDB { raw: TrieDB::new(db, root)? })
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&self) -> &HashDB {
|
||||
self.raw.db()
|
||||
}
|
||||
pub fn db(&self) -> &HashDB<H> { self.raw.db() }
|
||||
}
|
||||
|
||||
impl<'db> Trie for FatDB<'db> {
|
||||
fn iter<'a>(&'a self) -> super::Result<Box<TrieIterator<Item = TrieItem> + 'a>> {
|
||||
FatDBIterator::new(&self.raw).map(|iter| Box::new(iter) as Box<_>)
|
||||
impl<'db, H, C> Trie<H, C> for FatDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&self) -> &H::Out { self.raw.root() }
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn root(&self) -> &H256 {
|
||||
self.raw.root()
|
||||
}
|
||||
|
||||
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||
self.raw.contains(&keccak(key))
|
||||
}
|
||||
|
||||
fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result<Option<Q::Item>>
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get_with(&keccak(key), query)
|
||||
self.raw.get_with(H::hash(key).as_ref(), query)
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error>> + 'a>, <H as Hasher>::Out, C::Error> {
|
||||
FatDBIterator::<H, C>::new(&self.raw).map(|iter| Box::new(iter) as Box<_>)
|
||||
}
|
||||
}
|
||||
|
||||
/// Itarator over inserted pairs of key values.
|
||||
pub struct FatDBIterator<'db> {
|
||||
trie_iterator: TrieDBIterator<'db>,
|
||||
trie: &'db TrieDB<'db>,
|
||||
pub struct FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H> + 'db
|
||||
{
|
||||
trie_iterator: TrieDBIterator<'db, H, C>,
|
||||
trie: &'db TrieDB<'db, H, C>,
|
||||
}
|
||||
|
||||
impl<'db> FatDBIterator<'db> {
|
||||
impl<'db, H, C> FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Creates new iterator.
|
||||
pub fn new(trie: &'db TrieDB) -> super::Result<Self> {
|
||||
pub fn new(trie: &'db TrieDB<H, C>) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(FatDBIterator {
|
||||
trie_iterator: TrieDBIterator::new(trie)?,
|
||||
trie: trie,
|
||||
@ -81,40 +92,56 @@ impl<'db> FatDBIterator<'db> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> TrieIterator for FatDBIterator<'db> {
|
||||
fn seek(&mut self, key: &[u8]) -> super::Result<()> {
|
||||
self.trie_iterator.seek(&keccak(key))
|
||||
impl<'db, H, C> TrieIterator<H, C> for FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, C::Error> {
|
||||
let hashed_key = H::hash(key);
|
||||
self.trie_iterator.seek(hashed_key.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Iterator for FatDBIterator<'db> {
|
||||
type Item = TrieItem<'db>;
|
||||
impl<'db, H, C> Iterator for FatDBIterator<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
type Item = TrieItem<'db, H::Out, C::Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.trie_iterator.next()
|
||||
.map(|res|
|
||||
.map(|res| {
|
||||
res.map(|(hash, value)| {
|
||||
let aux_hash = keccak(hash);
|
||||
let aux_hash = H::hash(&hash);
|
||||
(self.trie.db().get(&aux_hash).expect("Missing fatdb hash").into_vec(), value)
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use memorydb::MemoryDB;
|
||||
use hashdb::DBValue;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::trie::{Trie, TrieMut};
|
||||
use ethtrie::{FatDB, FatDBMut};
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn fatdb_to_trie() {
|
||||
use memorydb::MemoryDB;
|
||||
use hashdb::DBValue;
|
||||
use super::fatdbmut::FatDBMut;
|
||||
use super::TrieMut;
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut root = H256::default();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = FatDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
assert_eq!(t.iter().unwrap().map(Result::unwrap).collect::<Vec<_>>(), vec![(vec![0x01u8, 0x23], DBValue::from_slice(&[0x01u8, 0x23] as &[u8]))]);
|
||||
assert_eq!(
|
||||
t.iter().unwrap().map(Result::unwrap).collect::<Vec<_>>(),
|
||||
vec![(vec![0x01u8, 0x23], DBValue::from_slice(&[0x01u8, 0x23] as &[u8]))]);
|
||||
}
|
||||
}
|
||||
|
@ -14,105 +14,116 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use ethereum_types::H256;
|
||||
use keccak::keccak;
|
||||
use hashdb::{HashDB, DBValue};
|
||||
use super::{TrieDBMut, TrieMut};
|
||||
use hashdb::{HashDB, DBValue, Hasher};
|
||||
use super::{Result, TrieDBMut, TrieMut};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
||||
///
|
||||
/// Use it as a `Trie` or `TrieMut` trait object.
|
||||
pub struct FatDBMut<'db> {
|
||||
raw: TrieDBMut<'db>,
|
||||
pub struct FatDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDBMut<'db, H, C>,
|
||||
}
|
||||
|
||||
impl<'db> FatDBMut<'db> {
|
||||
impl<'db, H, C> FatDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
pub fn new(db: &'db mut HashDB, root: &'db mut H256) -> Self {
|
||||
pub fn new(db: &'db mut HashDB<H>, root: &'db mut H::Out) -> Self {
|
||||
FatDBMut { raw: TrieDBMut::new(db, root) }
|
||||
}
|
||||
|
||||
/// Create a new trie with the backing database `db` and `root`.
|
||||
///
|
||||
/// Returns an error if root does not exist.
|
||||
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> super::Result<Self> {
|
||||
pub fn from_existing(db: &'db mut HashDB<H>, root: &'db mut H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(FatDBMut { raw: TrieDBMut::from_existing(db, root)? })
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&self) -> &HashDB {
|
||||
pub fn db(&self) -> &HashDB<H> {
|
||||
self.raw.db()
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db_mut(&mut self) -> &mut HashDB {
|
||||
pub fn db_mut(&mut self) -> &mut HashDB<H> {
|
||||
self.raw.db_mut()
|
||||
}
|
||||
|
||||
fn to_aux_key(key: &[u8]) -> H256 {
|
||||
keccak(key)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> TrieMut for FatDBMut<'db> {
|
||||
fn root(&mut self) -> &H256 {
|
||||
self.raw.root()
|
||||
impl<'db, H, C> TrieMut<H, C> for FatDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&mut self) -> &H::Out { self.raw.root() }
|
||||
|
||||
fn is_empty(&self) -> bool { self.raw.is_empty() }
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.raw.is_empty()
|
||||
}
|
||||
|
||||
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||
self.raw.contains(&keccak(key))
|
||||
}
|
||||
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<DBValue>>
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get(&keccak(key))
|
||||
self.raw.get(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> super::Result<Option<DBValue>> {
|
||||
let hash = keccak(key);
|
||||
let out = self.raw.insert(&hash, value)?;
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
let hash = H::hash(key);
|
||||
let out = self.raw.insert(hash.as_ref(), value)?;
|
||||
let db = self.raw.db_mut();
|
||||
|
||||
// don't insert if it doesn't exist.
|
||||
if out.is_none() {
|
||||
db.emplace(Self::to_aux_key(&hash), DBValue::from_slice(key));
|
||||
let aux_hash = H::hash(hash.as_ref());
|
||||
db.emplace(aux_hash, DBValue::from_slice(key));
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &[u8]) -> super::Result<Option<DBValue>> {
|
||||
let hash = keccak(key);
|
||||
let out = self.raw.remove(&hash)?;
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
let hash = H::hash(key);
|
||||
let out = self.raw.remove(hash.as_ref())?;
|
||||
|
||||
// don't remove if it already exists.
|
||||
if out.is_some() {
|
||||
self.raw.db_mut().remove(&Self::to_aux_key(&hash));
|
||||
self.raw.db_mut().remove(&hash);
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fatdb_to_trie() {
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use hashdb::DBValue;
|
||||
use memorydb::MemoryDB;
|
||||
use super::TrieDB;
|
||||
use super::Trie;
|
||||
use ethtrie::trie::{Trie, TrieMut};
|
||||
use ethtrie::{TrieDB, FatDBMut};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use keccak;
|
||||
use ethereum_types::H256;
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut root = H256::default();
|
||||
#[test]
|
||||
fn fatdbmut_to_trie() {
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get(&keccak(&[0x01u8, 0x23])).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
assert_eq!(t.get(&keccak::keccak(&[0x01u8, 0x23])).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
}
|
||||
}
|
@ -15,26 +15,35 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Trie interface and implementation.
|
||||
extern crate rand;
|
||||
extern crate ethereum_types;
|
||||
extern crate keccak_hash as keccak;
|
||||
extern crate rlp;
|
||||
extern crate hashdb;
|
||||
extern crate ethcore_bytes as bytes;
|
||||
extern crate elastic_array;
|
||||
extern crate memorydb;
|
||||
extern crate ethcore_logger;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate trie_standardmap as standardmap;
|
||||
|
||||
extern crate ethcore_bytes as bytes;
|
||||
extern crate hashdb;
|
||||
extern crate rand;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
||||
#[cfg(test)]
|
||||
extern crate env_logger;
|
||||
#[cfg(test)]
|
||||
extern crate ethereum_types;
|
||||
#[cfg(test)]
|
||||
extern crate trie_standardmap as standardmap;
|
||||
#[cfg(test)]
|
||||
extern crate patricia_trie_ethereum as ethtrie;
|
||||
#[cfg(test)]
|
||||
extern crate memorydb;
|
||||
#[cfg(test)]
|
||||
extern crate rlp;
|
||||
#[cfg(test)]
|
||||
extern crate keccak_hash as keccak;
|
||||
#[cfg(test)]
|
||||
extern crate keccak_hasher;
|
||||
#[cfg(test)]
|
||||
extern crate triehash;
|
||||
|
||||
use std::{fmt, error};
|
||||
use ethereum_types::H256;
|
||||
use keccak::KECCAK_NULL_RLP;
|
||||
use hashdb::{HashDB, DBValue};
|
||||
use hashdb::{HashDB, DBValue, Hasher};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
pub mod node;
|
||||
pub mod triedb;
|
||||
@ -46,158 +55,154 @@ pub mod recorder;
|
||||
mod fatdb;
|
||||
mod fatdbmut;
|
||||
mod lookup;
|
||||
mod nibbleslice;
|
||||
mod nibblevec;
|
||||
mod nibbleslice;
|
||||
mod node_codec;
|
||||
|
||||
pub use self::triedbmut::TrieDBMut;
|
||||
pub use self::triedb::{TrieDB, TrieDBIterator};
|
||||
pub use self::triedbmut::{TrieDBMut, ChildReference};
|
||||
pub use self::sectriedbmut::SecTrieDBMut;
|
||||
pub use self::sectriedb::SecTrieDB;
|
||||
pub use self::fatdb::{FatDB, FatDBIterator};
|
||||
pub use self::fatdbmut::FatDBMut;
|
||||
pub use self::recorder::Recorder;
|
||||
pub use self::lookup::Lookup;
|
||||
pub use self::nibbleslice::NibbleSlice;
|
||||
pub use node_codec::NodeCodec;
|
||||
|
||||
/// Trie Errors.
|
||||
///
|
||||
/// These borrow the data within them to avoid excessive copying on every
|
||||
/// trie operation.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub enum TrieError {
|
||||
pub enum TrieError<T, E> {
|
||||
/// Attempted to create a trie with a state root not in the DB.
|
||||
InvalidStateRoot(H256),
|
||||
InvalidStateRoot(T),
|
||||
/// Trie item not found in the database,
|
||||
IncompleteDatabase(H256),
|
||||
IncompleteDatabase(T),
|
||||
/// Corrupt Trie item
|
||||
DecoderError(rlp::DecoderError),
|
||||
DecoderError(T, E),
|
||||
}
|
||||
|
||||
impl fmt::Display for TrieError {
|
||||
impl<T, E> fmt::Display for TrieError<T, E> where T: std::fmt::Debug, E: std::fmt::Debug {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
TrieError::InvalidStateRoot(ref root) => write!(f, "Invalid state root: {}", root),
|
||||
TrieError::IncompleteDatabase(ref missing) =>
|
||||
write!(f, "Database missing expected key: {}", missing),
|
||||
TrieError::DecoderError(ref err) => write!(f, "Decoding failed with {}", err),
|
||||
TrieError::InvalidStateRoot(ref root) => write!(f, "Invalid state root: {:?}", root),
|
||||
TrieError::IncompleteDatabase(ref missing) => write!(f, "Database missing expected key: {:?}", missing),
|
||||
TrieError::DecoderError(ref hash, ref decoder_err) => write!(f, "Decoding failed for hash {:?}; err: {:?}", hash, decoder_err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl error::Error for TrieError {
|
||||
impl<T, E> error::Error for TrieError<T, E> where T: std::fmt::Debug, E: std::error::Error {
|
||||
fn description(&self) -> &str {
|
||||
match *self {
|
||||
TrieError::InvalidStateRoot(_) => "Invalid state root",
|
||||
TrieError::IncompleteDatabase(_) => "Incomplete database",
|
||||
TrieError::DecoderError(ref e) => e.description(),
|
||||
TrieError::DecoderError(_, ref err) => err.description(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<rlp::DecoderError> for Box<TrieError> {
|
||||
fn from(e: rlp::DecoderError) -> Self { Box::new(TrieError::DecoderError(e)) }
|
||||
}
|
||||
/// Trie result type. Boxed to avoid copying around extra space for the `Hasher`s `Out` on successful queries.
|
||||
pub type Result<T, H, E> = ::std::result::Result<T, Box<TrieError<H, E>>>;
|
||||
|
||||
/// Trie result type. Boxed to avoid copying around extra space for `H256`s on successful queries.
|
||||
pub type Result<T> = ::std::result::Result<T, Box<TrieError>>;
|
||||
|
||||
/// Trie-Item type.
|
||||
pub type TrieItem<'a> = Result<(Vec<u8>, DBValue)>;
|
||||
/// Trie-Item type used for iterators over trie data.
|
||||
pub type TrieItem<'a, U, E> = Result<(Vec<u8>, DBValue), U, E>;
|
||||
|
||||
/// Description of what kind of query will be made to the trie.
|
||||
///
|
||||
/// This is implemented for any &mut recorder (where the query will return
|
||||
/// a DBValue), any function taking raw bytes (where no recording will be made),
|
||||
/// or any tuple of (&mut Recorder, FnOnce(&[u8]))
|
||||
pub trait Query {
|
||||
pub trait Query<H: Hasher> {
|
||||
/// Output item.
|
||||
type Item;
|
||||
|
||||
/// Decode a byte-slice into the desired item.
|
||||
fn decode(self, &[u8]) -> Self::Item;
|
||||
fn decode(self, data: &[u8]) -> Self::Item;
|
||||
|
||||
/// Record that a node has been passed through.
|
||||
fn record(&mut self, &H256, &[u8], u32) { }
|
||||
fn record(&mut self, _hash: &H::Out, _data: &[u8], _depth: u32) {}
|
||||
}
|
||||
|
||||
impl<'a> Query for &'a mut Recorder {
|
||||
impl<'a, H: Hasher> Query<H> for &'a mut Recorder<H::Out> {
|
||||
type Item = DBValue;
|
||||
|
||||
fn decode(self, value: &[u8]) -> DBValue { DBValue::from_slice(value) }
|
||||
fn record(&mut self, hash: &H256, data: &[u8], depth: u32) {
|
||||
fn record(&mut self, hash: &H::Out, data: &[u8], depth: u32) {
|
||||
(&mut **self).record(hash, data, depth);
|
||||
}
|
||||
}
|
||||
|
||||
impl<F, T> Query for F where F: for<'a> FnOnce(&'a [u8]) -> T {
|
||||
impl<F, T, H: Hasher> Query<H> for F where F: for<'a> FnOnce(&'a [u8]) -> T {
|
||||
type Item = T;
|
||||
|
||||
fn decode(self, value: &[u8]) -> T { (self)(value) }
|
||||
}
|
||||
|
||||
impl<'a, F, T> Query for (&'a mut Recorder, F) where F: FnOnce(&[u8]) -> T {
|
||||
impl<'a, F, T, H: Hasher> Query<H> for (&'a mut Recorder<H::Out>, F) where F: FnOnce(&[u8]) -> T {
|
||||
type Item = T;
|
||||
|
||||
fn decode(self, value: &[u8]) -> T { (self.1)(value) }
|
||||
fn record(&mut self, hash: &H256, data: &[u8], depth: u32) {
|
||||
fn record(&mut self, hash: &H::Out, data: &[u8], depth: u32) {
|
||||
self.0.record(hash, data, depth)
|
||||
}
|
||||
}
|
||||
|
||||
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
||||
pub trait Trie {
|
||||
pub trait Trie<H: Hasher, C: NodeCodec<H>> {
|
||||
/// Return the root of the trie.
|
||||
fn root(&self) -> &H256;
|
||||
fn root(&self) -> &H::Out;
|
||||
|
||||
/// Is the trie empty?
|
||||
fn is_empty(&self) -> bool { *self.root() == KECCAK_NULL_RLP }
|
||||
fn is_empty(&self) -> bool { *self.root() == C::HASHED_NULL_NODE }
|
||||
|
||||
/// Does the trie contain a given key?
|
||||
fn contains(&self, key: &[u8]) -> Result<bool> {
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.get(key).map(|x|x.is_some() )
|
||||
}
|
||||
|
||||
/// What is the value of the given key in this trie?
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>> where 'a: 'key {
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error> where 'a: 'key {
|
||||
self.get_with(key, DBValue::from_slice)
|
||||
}
|
||||
|
||||
/// Search for the key with the given query parameter. See the docs of the `Query`
|
||||
/// trait for more details.
|
||||
fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q)
|
||||
-> Result<Option<Q::Item>> where 'a: 'key;
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error> where 'a: 'key;
|
||||
|
||||
/// Returns a depth-first iterator over the elements of trie.
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<Item = TrieItem> + 'a>>;
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error >> + 'a>, H::Out, C::Error>;
|
||||
}
|
||||
|
||||
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
||||
pub trait TrieMut {
|
||||
pub trait TrieMut<H: Hasher, C: NodeCodec<H>> {
|
||||
/// Return the root of the trie.
|
||||
fn root(&mut self) -> &H256;
|
||||
fn root(&mut self) -> &H::Out;
|
||||
|
||||
/// Is the trie empty?
|
||||
fn is_empty(&self) -> bool;
|
||||
|
||||
/// Does the trie contain a given key?
|
||||
fn contains(&self, key: &[u8]) -> Result<bool> {
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.get(key).map(|x| x.is_some())
|
||||
}
|
||||
|
||||
/// What is the value of the given key in this trie?
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>> where 'a: 'key;
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error> where 'a: 'key;
|
||||
|
||||
/// Insert a `key`/`value` pair into the trie. An empty value is equivalent to removing
|
||||
/// `key` from the trie. Returns the old value associated with this key, if it existed.
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>>;
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error>;
|
||||
|
||||
/// Remove a `key` from the trie. Equivalent to making it equal to the empty
|
||||
/// value. Returns the old value associated with this key, if it existed.
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>>;
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error>;
|
||||
}
|
||||
|
||||
/// A trie iterator that also supports random access.
|
||||
pub trait TrieIterator : Iterator {
|
||||
/// A trie iterator that also supports random access (`seek()`).
|
||||
pub trait TrieIterator<H: Hasher, C: NodeCodec<H>>: Iterator {
|
||||
/// Position the iterator on the first element with key > `key`
|
||||
fn seek(&mut self, key: &[u8]) -> Result<()>;
|
||||
fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, <C as NodeCodec<H>>::Error>;
|
||||
}
|
||||
|
||||
/// Trie types
|
||||
@ -219,19 +224,21 @@ impl Default for TrieSpec {
|
||||
|
||||
/// Trie factory.
|
||||
#[derive(Default, Clone)]
|
||||
pub struct TrieFactory {
|
||||
pub struct TrieFactory<H: Hasher, C: NodeCodec<H>> {
|
||||
spec: TrieSpec,
|
||||
mark_hash: PhantomData<H>,
|
||||
mark_codec: PhantomData<C>,
|
||||
}
|
||||
|
||||
/// All different kinds of tries.
|
||||
/// This is used to prevent a heap allocation for every created trie.
|
||||
pub enum TrieKinds<'db> {
|
||||
pub enum TrieKinds<'db, H: Hasher + 'db, C: NodeCodec<H>> {
|
||||
/// A generic trie db.
|
||||
Generic(TrieDB<'db>),
|
||||
Generic(TrieDB<'db, H, C>),
|
||||
/// A secure trie db.
|
||||
Secure(SecTrieDB<'db>),
|
||||
Secure(SecTrieDB<'db, H, C>),
|
||||
/// A fat trie db.
|
||||
Fat(FatDB<'db>),
|
||||
Fat(FatDB<'db, H, C>),
|
||||
}
|
||||
|
||||
// wrapper macro for making the match easier to deal with.
|
||||
@ -245,8 +252,8 @@ macro_rules! wrapper {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Trie for TrieKinds<'db> {
|
||||
fn root(&self) -> &H256 {
|
||||
impl<'db, H: Hasher, C: NodeCodec<H>> Trie<H, C> for TrieKinds<'db, H, C> {
|
||||
fn root(&self) -> &H::Out {
|
||||
wrapper!(self, root,)
|
||||
}
|
||||
|
||||
@ -254,31 +261,33 @@ impl<'db> Trie for TrieKinds<'db> {
|
||||
wrapper!(self, is_empty,)
|
||||
}
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool> {
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
wrapper!(self, contains, key)
|
||||
}
|
||||
|
||||
fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>>
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
wrapper!(self, get_with, key, query)
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<Item = TrieItem> + 'a>> {
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error>> + 'a>, H::Out, C::Error> {
|
||||
wrapper!(self, iter,)
|
||||
}
|
||||
}
|
||||
|
||||
impl TrieFactory {
|
||||
impl<'db, H, C> TrieFactory<H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H> + 'db
|
||||
{
|
||||
/// Creates new factory.
|
||||
pub fn new(spec: TrieSpec) -> Self {
|
||||
TrieFactory {
|
||||
spec: spec,
|
||||
}
|
||||
TrieFactory { spec, mark_hash: PhantomData, mark_codec: PhantomData }
|
||||
}
|
||||
|
||||
/// Create new immutable instance of Trie.
|
||||
pub fn readonly<'db>(&self, db: &'db HashDB, root: &'db H256) -> Result<TrieKinds<'db>> {
|
||||
pub fn readonly(&self, db: &'db HashDB<H>, root: &'db H::Out) -> Result<TrieKinds<'db, H, C>, H::Out, <C as NodeCodec<H>>::Error> {
|
||||
match self.spec {
|
||||
TrieSpec::Generic => Ok(TrieKinds::Generic(TrieDB::new(db, root)?)),
|
||||
TrieSpec::Secure => Ok(TrieKinds::Secure(SecTrieDB::new(db, root)?)),
|
||||
@ -287,20 +296,20 @@ impl TrieFactory {
|
||||
}
|
||||
|
||||
/// Create new mutable instance of Trie.
|
||||
pub fn create<'db>(&self, db: &'db mut HashDB, root: &'db mut H256) -> Box<TrieMut + 'db> {
|
||||
pub fn create(&self, db: &'db mut HashDB<H>, root: &'db mut H::Out) -> Box<TrieMut<H, C> + 'db> {
|
||||
match self.spec {
|
||||
TrieSpec::Generic => Box::new(TrieDBMut::new(db, root)),
|
||||
TrieSpec::Secure => Box::new(SecTrieDBMut::new(db, root)),
|
||||
TrieSpec::Fat => Box::new(FatDBMut::new(db, root)),
|
||||
TrieSpec::Generic => Box::new(TrieDBMut::<_, C>::new(db, root)),
|
||||
TrieSpec::Secure => Box::new(SecTrieDBMut::<_, C>::new(db, root)),
|
||||
TrieSpec::Fat => Box::new(FatDBMut::<_, C>::new(db, root)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create new mutable instance of trie and check for errors.
|
||||
pub fn from_existing<'db>(&self, db: &'db mut HashDB, root: &'db mut H256) -> Result<Box<TrieMut + 'db>> {
|
||||
pub fn from_existing(&self, db: &'db mut HashDB<H>, root: &'db mut H::Out) -> Result<Box<TrieMut<H,C> + 'db>, H::Out, <C as NodeCodec<H>>::Error> {
|
||||
match self.spec {
|
||||
TrieSpec::Generic => Ok(Box::new(TrieDBMut::from_existing(db, root)?)),
|
||||
TrieSpec::Secure => Ok(Box::new(SecTrieDBMut::from_existing(db, root)?)),
|
||||
TrieSpec::Fat => Ok(Box::new(FatDBMut::from_existing(db, root)?)),
|
||||
TrieSpec::Generic => Ok(Box::new(TrieDBMut::<_, C>::from_existing(db, root)?)),
|
||||
TrieSpec::Secure => Ok(Box::new(SecTrieDBMut::<_, C>::from_existing(db, root)?)),
|
||||
TrieSpec::Fat => Ok(Box::new(FatDBMut::<_, C>::from_existing(db, root)?)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -16,27 +16,33 @@
|
||||
|
||||
//! Trie lookup via HashDB.
|
||||
|
||||
use hashdb::HashDB;
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use nibbleslice::NibbleSlice;
|
||||
use ethereum_types::H256;
|
||||
|
||||
use super::{TrieError, Query};
|
||||
use super::node::Node;
|
||||
use node::Node;
|
||||
use node_codec::NodeCodec;
|
||||
use super::{Result, TrieError, Query};
|
||||
use std::marker::PhantomData;
|
||||
|
||||
/// Trie lookup helper object.
|
||||
pub struct Lookup<'a, Q: Query> {
|
||||
pub struct Lookup<'a, H: Hasher + 'a, C: NodeCodec<H>, Q: Query<H>> {
|
||||
/// database to query from.
|
||||
pub db: &'a HashDB,
|
||||
pub db: &'a HashDB<H>,
|
||||
/// Query object to record nodes and transform data.
|
||||
pub query: Q,
|
||||
/// Hash to start at
|
||||
pub hash: H256,
|
||||
pub hash: H::Out,
|
||||
pub marker: PhantomData<C>, // TODO: probably not needed when all is said and done? When Query is made generic?
|
||||
}
|
||||
|
||||
impl<'a, Q: Query> Lookup<'a, Q> {
|
||||
impl<'a, H, C, Q> Lookup<'a, H, C, Q>
|
||||
where
|
||||
H: Hasher + 'a,
|
||||
C: NodeCodec<H> + 'a,
|
||||
Q: Query<H>,
|
||||
{
|
||||
/// Look up the given key. If the value is found, it will be passed to the given
|
||||
/// function to decode or copy.
|
||||
pub fn look_up(mut self, mut key: NibbleSlice) -> super::Result<Option<Q::Item>> {
|
||||
pub fn look_up(mut self, mut key: NibbleSlice) -> Result<Option<Q::Item>, H::Out, C::Error> {
|
||||
let mut hash = self.hash;
|
||||
|
||||
// this loop iterates through non-inline nodes.
|
||||
@ -55,7 +61,13 @@ impl<'a, Q: Query> Lookup<'a, Q> {
|
||||
// without incrementing the depth.
|
||||
let mut node_data = &node_data[..];
|
||||
loop {
|
||||
match Node::decoded(node_data)? {
|
||||
let decoded = match C::decode(node_data) {
|
||||
Ok(node) => node,
|
||||
Err(e) => {
|
||||
return Err(Box::new(TrieError::DecoderError(hash, e)))
|
||||
}
|
||||
};
|
||||
match decoded {
|
||||
Node::Leaf(slice, value) => {
|
||||
return Ok(match slice == key {
|
||||
true => Some(self.query.decode(value)),
|
||||
@ -81,7 +93,7 @@ impl<'a, Q: Query> Lookup<'a, Q> {
|
||||
}
|
||||
|
||||
// check if new node data is inline or hash.
|
||||
if let Some(h) = Node::try_decode_hash(&node_data) {
|
||||
if let Some(h) = C::try_decode_hash(&node_data) {
|
||||
hash = h;
|
||||
break
|
||||
}
|
||||
|
@ -105,9 +105,11 @@ impl<'a> NibbleSlice<'a> {
|
||||
pub fn is_empty(&self) -> bool { self.len() == 0 }
|
||||
|
||||
/// Get the length (in nibbles, naturally) of this slice.
|
||||
#[inline]
|
||||
pub fn len(&self) -> usize { (self.data.len() + self.data_encode_suffix.len()) * 2 - self.offset - self.offset_encode_suffix }
|
||||
|
||||
/// Get the nibble at position `i`.
|
||||
#[inline(always)]
|
||||
pub fn at(&self, i: usize) -> u8 {
|
||||
let l = self.data.len() * 2 - self.offset;
|
||||
if i < l {
|
||||
@ -154,6 +156,7 @@ impl<'a> NibbleSlice<'a> {
|
||||
}
|
||||
|
||||
/// Encode while nibble slice in prefixed hex notation, noting whether it `is_leaf`.
|
||||
#[inline]
|
||||
pub fn encoded(&self, is_leaf: bool) -> ElasticArray36<u8> {
|
||||
let l = self.len();
|
||||
let mut r = ElasticArray36::new();
|
||||
|
@ -41,12 +41,14 @@ impl NibbleVec {
|
||||
}
|
||||
|
||||
/// Length of the `NibbleVec`
|
||||
#[inline(always)]
|
||||
pub fn len(&self) -> usize { self.len }
|
||||
|
||||
/// Retrurns true if `NibbleVec` has zero length
|
||||
pub fn is_empty(&self) -> bool { self.len == 0 }
|
||||
|
||||
/// Try to get the nibble at the given offset.
|
||||
#[inline]
|
||||
pub fn at(&self, idx: usize) -> u8 {
|
||||
if idx % 2 == 0 {
|
||||
self.inner[idx / 2] >> 4
|
||||
|
@ -14,12 +14,9 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use ethereum_types::H256;
|
||||
use elastic_array::ElasticArray36;
|
||||
use nibbleslice::NibbleSlice;
|
||||
use nibblevec::NibbleVec;
|
||||
use bytes::*;
|
||||
use rlp::{Rlp, RlpStream, Prototype, DecoderError};
|
||||
use hashdb::DBValue;
|
||||
|
||||
/// Partial node key type.
|
||||
@ -35,83 +32,7 @@ pub enum Node<'a> {
|
||||
/// Extension node; has key slice and node data. Data may not be null.
|
||||
Extension(NibbleSlice<'a>, &'a [u8]),
|
||||
/// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data.
|
||||
Branch([&'a [u8]; 16], Option<&'a [u8]>)
|
||||
}
|
||||
|
||||
impl<'a> Node<'a> {
|
||||
/// Decode the `node_rlp` and return the Node.
|
||||
pub fn decoded(node_rlp: &'a [u8]) -> Result<Self, DecoderError> {
|
||||
let r = Rlp::new(node_rlp);
|
||||
match r.prototype()? {
|
||||
// either leaf or extension - decode first item with NibbleSlice::???
|
||||
// and use is_leaf return to figure out which.
|
||||
// if leaf, second item is a value (is_data())
|
||||
// if extension, second item is a node (either SHA3 to be looked up and
|
||||
// fed back into this function or inline RLP which can be fed back into this function).
|
||||
Prototype::List(2) => match NibbleSlice::from_encoded(r.at(0)?.data()?) {
|
||||
(slice, true) => Ok(Node::Leaf(slice, r.at(1)?.data()?)),
|
||||
(slice, false) => Ok(Node::Extension(slice, r.at(1)?.as_raw())),
|
||||
},
|
||||
// branch - first 16 are nodes, 17th is a value (or empty).
|
||||
Prototype::List(17) => {
|
||||
let mut nodes = [&[] as &[u8]; 16];
|
||||
for i in 0..16 {
|
||||
nodes[i] = r.at(i)?.as_raw();
|
||||
}
|
||||
Ok(Node::Branch(nodes, if r.at(16)?.is_empty() { None } else { Some(r.at(16)?.data()?) }))
|
||||
},
|
||||
// an empty branch index.
|
||||
Prototype::Data(0) => Ok(Node::Empty),
|
||||
// something went wrong.
|
||||
_ => Err(DecoderError::Custom("Rlp is not valid."))
|
||||
}
|
||||
}
|
||||
|
||||
/// Encode the node into RLP.
|
||||
///
|
||||
/// Will always return the direct node RLP even if it's 32 or more bytes. To get the
|
||||
/// RLP which would be valid for using in another node, use `encoded_and_added()`.
|
||||
pub fn encoded(&self) -> Bytes {
|
||||
match *self {
|
||||
Node::Leaf(ref slice, ref value) => {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append(&&*slice.encoded(true));
|
||||
stream.append(value);
|
||||
stream.out()
|
||||
},
|
||||
Node::Extension(ref slice, ref raw_rlp) => {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append(&&*slice.encoded(false));
|
||||
stream.append_raw(raw_rlp, 1);
|
||||
stream.out()
|
||||
},
|
||||
Node::Branch(ref nodes, ref value) => {
|
||||
let mut stream = RlpStream::new_list(17);
|
||||
for i in 0..16 {
|
||||
stream.append_raw(nodes[i], 1);
|
||||
}
|
||||
match *value {
|
||||
Some(ref n) => { stream.append(n); },
|
||||
None => { stream.append_empty_data(); },
|
||||
}
|
||||
stream.out()
|
||||
},
|
||||
Node::Empty => {
|
||||
let mut stream = RlpStream::new();
|
||||
stream.append_empty_data();
|
||||
stream.out()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_decode_hash(node_data: &[u8]) -> Option<H256> {
|
||||
let r = Rlp::new(node_data);
|
||||
if r.is_data() && r.size() == 32 {
|
||||
Some(r.as_val().expect("Hash is the correct size of 32 bytes; qed"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Branch([&'a [u8]; 16], Option<&'a [u8]>),
|
||||
}
|
||||
|
||||
/// An owning node type. Useful for trie iterators.
|
||||
|
55
util/patricia_trie/src/node_codec.rs
Normal file
55
util/patricia_trie/src/node_codec.rs
Normal file
@ -0,0 +1,55 @@
|
||||
// Copyright 2015-2018 Parity Technologies (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Generic trait for trie node encoding/decoding. Takes a `hashdb::Hasher`
|
||||
//! to parametrize the hashes used in the codec.
|
||||
|
||||
use hashdb::Hasher;
|
||||
use node::Node;
|
||||
use ChildReference;
|
||||
|
||||
use elastic_array::{ElasticArray1024, ElasticArray128};
|
||||
|
||||
/// Trait for trie node encoding/decoding
|
||||
pub trait NodeCodec<H: Hasher>: Sized {
|
||||
/// Encoding error type
|
||||
type Error: ::std::error::Error;
|
||||
|
||||
/// Null node type
|
||||
const HASHED_NULL_NODE: H::Out;
|
||||
|
||||
/// Decode bytes to a `Node`. Returns `Self::E` on failure.
|
||||
fn decode(data: &[u8]) -> Result<Node, Self::Error>;
|
||||
|
||||
/// Decode bytes to the `Hasher`s output type. Returns `None` on failure.
|
||||
fn try_decode_hash(data: &[u8]) -> Option<H::Out>;
|
||||
|
||||
/// Check if the provided bytes correspond to the codecs "empty" node.
|
||||
fn is_empty_node(data: &[u8]) -> bool;
|
||||
|
||||
/// Returns an empty node
|
||||
fn empty_node() -> ElasticArray1024<u8>;
|
||||
|
||||
/// Returns an encoded leaft node
|
||||
fn leaf_node(partial: &[u8], value: &[u8]) -> ElasticArray1024<u8>;
|
||||
|
||||
/// Returns an encoded extension node
|
||||
fn ext_node(partial: &[u8], child_ref: ChildReference<H::Out>) -> ElasticArray1024<u8>;
|
||||
|
||||
/// Returns an encoded branch node. Takes an iterator yielding `ChildReference<H::Out>` and an optional value
|
||||
fn branch_node<I>(children: I, value: Option<ElasticArray128<u8>>) -> ElasticArray1024<u8>
|
||||
where I: IntoIterator<Item=Option<ChildReference<H::Out>>>;
|
||||
}
|
@ -16,13 +16,11 @@
|
||||
|
||||
//! Trie query recorder.
|
||||
|
||||
use keccak::keccak;
|
||||
use ethereum_types::H256;
|
||||
use bytes::Bytes;
|
||||
|
||||
/// A record of a visited node.
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
pub struct Record {
|
||||
pub struct Record<HO> {
|
||||
/// The depth of this node.
|
||||
pub depth: u32,
|
||||
|
||||
@ -30,23 +28,23 @@ pub struct Record {
|
||||
pub data: Bytes,
|
||||
|
||||
/// The hash of the data.
|
||||
pub hash: H256,
|
||||
pub hash: HO,
|
||||
}
|
||||
|
||||
/// Records trie nodes as they pass it.
|
||||
#[derive(Debug)]
|
||||
pub struct Recorder {
|
||||
nodes: Vec<Record>,
|
||||
pub struct Recorder<HO> {
|
||||
nodes: Vec<Record<HO>>,
|
||||
min_depth: u32,
|
||||
}
|
||||
|
||||
impl Default for Recorder {
|
||||
impl<HO: Copy> Default for Recorder<HO> {
|
||||
fn default() -> Self {
|
||||
Recorder::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl Recorder {
|
||||
impl<HO: Copy> Recorder<HO> {
|
||||
/// Create a new `Recorder` which records all given nodes.
|
||||
#[inline]
|
||||
pub fn new() -> Self {
|
||||
@ -62,9 +60,7 @@ impl Recorder {
|
||||
}
|
||||
|
||||
/// Record a visited node, given its hash, data, and depth.
|
||||
pub fn record(&mut self, hash: &H256, data: &[u8], depth: u32) {
|
||||
debug_assert_eq!(keccak(data), *hash);
|
||||
|
||||
pub fn record(&mut self, hash: &HO, data: &[u8], depth: u32) {
|
||||
if depth >= self.min_depth {
|
||||
self.nodes.push(Record {
|
||||
depth: depth,
|
||||
@ -75,7 +71,7 @@ impl Recorder {
|
||||
}
|
||||
|
||||
/// Drain all visited records.
|
||||
pub fn drain(&mut self) -> Vec<Record> {
|
||||
pub fn drain(&mut self) -> Vec<Record<HO>> {
|
||||
::std::mem::replace(&mut self.nodes, Vec::new())
|
||||
}
|
||||
}
|
||||
@ -83,11 +79,13 @@ impl Recorder {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use keccak::keccak;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn basic_recorder() {
|
||||
let mut basic = Recorder::new();
|
||||
let mut basic = Recorder::<H256>::new();
|
||||
|
||||
let node1 = vec![1, 2, 3, 4];
|
||||
let node2 = vec![4, 5, 6, 7, 8, 9, 10];
|
||||
@ -105,15 +103,16 @@ mod tests {
|
||||
let record2 = Record {
|
||||
data: node2,
|
||||
hash: hash2,
|
||||
depth: 456
|
||||
depth: 456,
|
||||
};
|
||||
|
||||
|
||||
assert_eq!(basic.drain(), vec![record1, record2]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn basic_recorder_min_depth() {
|
||||
let mut basic = Recorder::with_depth(400);
|
||||
let mut basic = Recorder::<H256>::with_depth(400);
|
||||
|
||||
let node1 = vec![1, 2, 3, 4];
|
||||
let node2 = vec![4, 5, 6, 7, 8, 9, 10];
|
||||
@ -136,10 +135,11 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn trie_record() {
|
||||
use super::super::{TrieDB, TrieDBMut, Trie, TrieMut};
|
||||
use ethtrie::trie::{Trie, TrieMut, Recorder};
|
||||
use memorydb::MemoryDB;
|
||||
use ethtrie::{TrieDB, TrieDBMut};
|
||||
|
||||
let mut db = MemoryDB::new();
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
|
||||
let mut root = H256::default();
|
||||
|
||||
@ -157,7 +157,7 @@ mod tests {
|
||||
}
|
||||
|
||||
let trie = TrieDB::new(&db, &root).unwrap();
|
||||
let mut recorder = Recorder::new();
|
||||
let mut recorder = Recorder::<H256>::new();
|
||||
|
||||
trie.get_with(b"pirate", &mut recorder).unwrap().unwrap();
|
||||
|
||||
|
@ -14,71 +14,87 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use ethereum_types::H256;
|
||||
use keccak::keccak;
|
||||
use hashdb::HashDB;
|
||||
use hashdb::{HashDB, Hasher};
|
||||
use super::triedb::TrieDB;
|
||||
use super::{Trie, TrieItem, TrieIterator, Query};
|
||||
use super::{Result, Trie, TrieItem, TrieIterator, Query};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
///
|
||||
/// Use it as a `Trie` trait object. You can use `raw()` to get the backing `TrieDB` object.
|
||||
pub struct SecTrieDB<'db> {
|
||||
raw: TrieDB<'db>
|
||||
pub struct SecTrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDB<'db, H, C>
|
||||
}
|
||||
|
||||
impl<'db> SecTrieDB<'db> {
|
||||
impl<'db, H, C> SecTrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
///
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
/// Returns an error if root does not exist.
|
||||
pub fn new(db: &'db HashDB, root: &'db H256) -> super::Result<Self> {
|
||||
pub fn new(db: &'db HashDB<H>, root: &'db H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(SecTrieDB { raw: TrieDB::new(db, root)? })
|
||||
}
|
||||
|
||||
/// Get a reference to the underlying raw `TrieDB` struct.
|
||||
pub fn raw(&self) -> &TrieDB {
|
||||
pub fn raw(&self) -> &TrieDB<H, C> {
|
||||
&self.raw
|
||||
}
|
||||
|
||||
/// Get a mutable reference to the underlying raw `TrieDB` struct.
|
||||
pub fn raw_mut(&mut self) -> &mut TrieDB<'db> {
|
||||
pub fn raw_mut(&mut self) -> &mut TrieDB<'db, H, C> {
|
||||
&mut self.raw
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Trie for SecTrieDB<'db> {
|
||||
fn iter<'a>(&'a self) -> super::Result<Box<TrieIterator<Item = TrieItem> + 'a>> {
|
||||
TrieDB::iter(&self.raw)
|
||||
impl<'db, H, C> Trie<H, C> for SecTrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&self) -> &H::Out { self.raw.root() }
|
||||
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn root(&self) -> &H256 { self.raw.root() }
|
||||
|
||||
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||
self.raw.contains(&keccak(key))
|
||||
}
|
||||
|
||||
fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result<Option<Q::Item>>
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get_with(&keccak(key), query)
|
||||
self.raw.get_with(H::hash(key).as_ref(), query)
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item = TrieItem<H::Out, C::Error>> + 'a>, H::Out, C::Error> {
|
||||
TrieDB::iter(&self.raw)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use memorydb::MemoryDB;
|
||||
use hashdb::DBValue;
|
||||
use keccak;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::{TrieDBMut, SecTrieDB, trie::{Trie, TrieMut}};
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn trie_to_sectrie() {
|
||||
use memorydb::MemoryDB;
|
||||
use hashdb::DBValue;
|
||||
use super::triedbmut::TrieDBMut;
|
||||
use super::TrieMut;
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut root = H256::default();
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&keccak(&[0x01u8, 0x23]), &[0x01u8, 0x23]).unwrap();
|
||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||
t.insert(&keccak::keccak(&[0x01u8, 0x23]), &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = SecTrieDB::new(&memdb, &root).unwrap();
|
||||
let t = SecTrieDB::new(&db, &root).unwrap();
|
||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
}
|
||||
}
|
@ -14,43 +14,53 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use ethereum_types::H256;
|
||||
use keccak::keccak;
|
||||
use hashdb::{HashDB, DBValue};
|
||||
use super::triedbmut::TrieDBMut;
|
||||
use super::TrieMut;
|
||||
use hashdb::{HashDB, DBValue, Hasher};
|
||||
use super::{Result, TrieMut, TrieDBMut};
|
||||
use node_codec::NodeCodec;
|
||||
|
||||
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||
///
|
||||
/// Use it as a `Trie` or `TrieMut` trait object. You can use `raw()` to get the backing `TrieDBMut` object.
|
||||
pub struct SecTrieDBMut<'db> {
|
||||
raw: TrieDBMut<'db>
|
||||
pub struct SecTrieDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
raw: TrieDBMut<'db, H, C>
|
||||
}
|
||||
|
||||
impl<'db> SecTrieDBMut<'db> {
|
||||
impl<'db, H, C> SecTrieDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and empty `root`
|
||||
/// Initialise to the state entailed by the genesis block.
|
||||
/// This guarantees the trie is built correctly.
|
||||
pub fn new(db: &'db mut HashDB, root: &'db mut H256) -> Self {
|
||||
pub fn new(db: &'db mut HashDB<H>, root: &'db mut H::Out) -> Self {
|
||||
SecTrieDBMut { raw: TrieDBMut::new(db, root) }
|
||||
}
|
||||
|
||||
/// Create a new trie with the backing database `db` and `root`.
|
||||
///
|
||||
/// Returns an error if root does not exist.
|
||||
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> super::Result<Self> {
|
||||
pub fn from_existing(db: &'db mut HashDB<H>, root: &'db mut H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
Ok(SecTrieDBMut { raw: TrieDBMut::from_existing(db, root)? })
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&self) -> &HashDB { self.raw.db() }
|
||||
pub fn db(&self) -> &HashDB<H> { self.raw.db() }
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db_mut(&mut self) -> &mut HashDB { self.raw.db_mut() }
|
||||
pub fn db_mut(&mut self) -> &mut HashDB<H> { self.raw.db_mut() }
|
||||
}
|
||||
|
||||
impl<'db> TrieMut for SecTrieDBMut<'db> {
|
||||
fn root(&mut self) -> &H256 {
|
||||
impl<'db, H, C> TrieMut<H, C> for SecTrieDBMut<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&mut self) -> &H::Out {
|
||||
self.raw.root()
|
||||
}
|
||||
|
||||
@ -58,37 +68,43 @@ impl<'db> TrieMut for SecTrieDBMut<'db> {
|
||||
self.raw.is_empty()
|
||||
}
|
||||
|
||||
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||
self.raw.contains(&keccak(key))
|
||||
fn contains(&self, key: &[u8]) -> Result<bool, H::Out, C::Error> {
|
||||
self.raw.contains(&H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<DBValue>>
|
||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
self.raw.get(&keccak(key))
|
||||
self.raw.get(&H::hash(key).as_ref())
|
||||
}
|
||||
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> super::Result<Option<DBValue>> {
|
||||
self.raw.insert(&keccak(key), value)
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
self.raw.insert(&H::hash(key).as_ref(), value)
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &[u8]) -> super::Result<Option<DBValue>> {
|
||||
self.raw.remove(&keccak(key))
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
self.raw.remove(&H::hash(key).as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use memorydb::MemoryDB;
|
||||
use hashdb::DBValue;
|
||||
use keccak;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use ethtrie::{TrieDB, SecTrieDBMut, trie::{Trie, TrieMut}};
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn sectrie_to_trie() {
|
||||
use memorydb::*;
|
||||
use super::triedb::*;
|
||||
use super::Trie;
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut root = H256::default();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = SecTrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
}
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get(&keccak(&[0x01u8, 0x23])).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
assert_eq!(t.get(&keccak::keccak(&[0x01u8, 0x23])).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
}
|
||||
}
|
||||
|
@ -18,12 +18,15 @@ use std::fmt;
|
||||
use hashdb::*;
|
||||
use nibbleslice::NibbleSlice;
|
||||
use super::node::{Node, OwnedNode};
|
||||
use node_codec::NodeCodec;
|
||||
use super::lookup::Lookup;
|
||||
use super::{Trie, TrieItem, TrieError, TrieIterator, Query};
|
||||
use ethereum_types::H256;
|
||||
use super::{Result, Trie, TrieItem, TrieError, TrieIterator, Query};
|
||||
use bytes::Bytes;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
/// A `Trie` implementation using a generic `HashDB` backing database.
|
||||
/// A `Trie` implementation using a generic `HashDB` backing database, a `Hasher`
|
||||
/// implementation to generate keys and a `NodeCodec` implementation to encode/decode
|
||||
/// the nodes.
|
||||
///
|
||||
/// Use it as a `Trie` trait object. You can use `db()` to get the backing database object.
|
||||
/// Use `get` and `contains` to query values associated with keys in the trie.
|
||||
@ -31,17 +34,22 @@ use bytes::Bytes;
|
||||
/// # Example
|
||||
/// ```
|
||||
/// extern crate patricia_trie as trie;
|
||||
/// extern crate patricia_trie_ethereum as ethtrie;
|
||||
/// extern crate hashdb;
|
||||
/// extern crate keccak_hasher;
|
||||
/// extern crate memorydb;
|
||||
/// extern crate ethereum_types;
|
||||
///
|
||||
/// use trie::*;
|
||||
/// use hashdb::*;
|
||||
/// use keccak_hasher::KeccakHasher;
|
||||
/// use memorydb::*;
|
||||
/// use ethereum_types::H256;
|
||||
/// use ethtrie::{TrieDB, TrieDBMut};
|
||||
///
|
||||
///
|
||||
/// fn main() {
|
||||
/// let mut memdb = MemoryDB::new();
|
||||
/// let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
/// let mut root = H256::new();
|
||||
/// TrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap();
|
||||
/// let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
@ -49,35 +57,38 @@ use bytes::Bytes;
|
||||
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar"));
|
||||
/// }
|
||||
/// ```
|
||||
pub struct TrieDB<'db> {
|
||||
db: &'db HashDB,
|
||||
root: &'db H256,
|
||||
pub struct TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
db: &'db HashDB<H>,
|
||||
root: &'db H::Out,
|
||||
/// The number of hashes performed so far in operations on this trie.
|
||||
hash_count: usize,
|
||||
codec_marker: PhantomData<C>,
|
||||
}
|
||||
|
||||
impl<'db> TrieDB<'db> {
|
||||
impl<'db, H, C> TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with the backing database `db` and `root`
|
||||
/// Returns an error if `root` does not exist
|
||||
pub fn new(db: &'db HashDB, root: &'db H256) -> super::Result<Self> {
|
||||
pub fn new(db: &'db HashDB<H>, root: &'db H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
if !db.contains(root) {
|
||||
Err(Box::new(TrieError::InvalidStateRoot(*root)))
|
||||
} else {
|
||||
Ok(TrieDB {
|
||||
db: db,
|
||||
root: root,
|
||||
hash_count: 0
|
||||
})
|
||||
Ok(TrieDB {db, root, hash_count: 0, codec_marker: PhantomData})
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the backing database.
|
||||
pub fn db(&'db self) -> &'db HashDB {
|
||||
self.db
|
||||
}
|
||||
pub fn db(&'db self) -> &'db HashDB<H> { self.db }
|
||||
|
||||
/// Get the data of the root node.
|
||||
fn root_data(&self) -> super::Result<DBValue> {
|
||||
fn root_data(&self) -> Result<DBValue, H::Out, C::Error> {
|
||||
self.db
|
||||
.get(self.root)
|
||||
.ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
|
||||
@ -86,49 +97,57 @@ impl<'db> TrieDB<'db> {
|
||||
/// Given some node-describing data `node`, return the actual node RLP.
|
||||
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
||||
/// may require a database lookup.
|
||||
fn get_raw_or_lookup(&'db self, node: &'db [u8]) -> super::Result<DBValue> {
|
||||
match Node::try_decode_hash(node) {
|
||||
fn get_raw_or_lookup(&'db self, node: &'db [u8]) -> Result<DBValue, H::Out, C::Error> {
|
||||
match C::try_decode_hash(node) {
|
||||
Some(key) => {
|
||||
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
||||
}
|
||||
None => Ok(DBValue::from_slice(node))
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a node from raw rlp bytes, assumes valid rlp because encoded locally
|
||||
fn decode_node(node: &'db [u8]) -> Node {
|
||||
Node::decoded(node).expect("rlp read from db; qed")
|
||||
}
|
||||
}
|
||||
|
||||
impl<'db> Trie for TrieDB<'db> {
|
||||
fn iter<'a>(&'a self) -> super::Result<Box<TrieIterator<Item = TrieItem> + 'a>> {
|
||||
TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>)
|
||||
}
|
||||
impl<'db, H, C> Trie<H, C> for TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&self) -> &H::Out { self.root }
|
||||
|
||||
fn root(&self) -> &H256 { self.root }
|
||||
|
||||
fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> super::Result<Option<Q::Item>>
|
||||
fn get_with<'a, 'key, Q: Query<H>>(&'a self, key: &'key [u8], query: Q) -> Result<Option<Q::Item>, H::Out, C::Error>
|
||||
where 'a: 'key
|
||||
{
|
||||
Lookup {
|
||||
db: self.db,
|
||||
query: query,
|
||||
hash: self.root.clone(),
|
||||
marker: PhantomData::<C>,
|
||||
}.look_up(NibbleSlice::new(key))
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Result<Box<TrieIterator<H, C, Item=TrieItem<H::Out, C::Error>> + 'a>, H::Out, C::Error> {
|
||||
TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>)
|
||||
}
|
||||
}
|
||||
|
||||
// This is for pretty debug output only
|
||||
struct TrieAwareDebugNode<'db, 'a> {
|
||||
trie: &'db TrieDB<'db>,
|
||||
struct TrieAwareDebugNode<'db, 'a, H, C>
|
||||
where
|
||||
H: Hasher + 'db,
|
||||
C: NodeCodec<H> + 'db
|
||||
{
|
||||
trie: &'db TrieDB<'db, H, C>,
|
||||
key: &'a[u8]
|
||||
}
|
||||
|
||||
impl<'db, 'a> fmt::Debug for TrieAwareDebugNode<'db, 'a> {
|
||||
impl<'db, 'a, H, C> fmt::Debug for TrieAwareDebugNode<'db, 'a, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
if let Ok(node) = self.trie.get_raw_or_lookup(self.key) {
|
||||
match Node::decoded(&node) {
|
||||
match C::decode(&node) {
|
||||
Ok(Node::Leaf(slice, value)) => f.debug_struct("Node::Leaf")
|
||||
.field("slice", &slice)
|
||||
.field("value", &value)
|
||||
@ -138,7 +157,7 @@ impl<'db, 'a> fmt::Debug for TrieAwareDebugNode<'db, 'a> {
|
||||
.field("item", &TrieAwareDebugNode{trie: self.trie, key: item})
|
||||
.finish(),
|
||||
Ok(Node::Branch(ref nodes, ref value)) => {
|
||||
let nodes: Vec<TrieAwareDebugNode> = nodes.into_iter().map(|n| TrieAwareDebugNode{trie: self.trie, key: n} ).collect();
|
||||
let nodes: Vec<TrieAwareDebugNode<H, C>> = nodes.into_iter().map(|n| TrieAwareDebugNode{trie: self.trie, key: n} ).collect();
|
||||
f.debug_struct("Node::Branch")
|
||||
.field("nodes", &nodes)
|
||||
.field("value", &value)
|
||||
@ -160,8 +179,11 @@ impl<'db, 'a> fmt::Debug for TrieAwareDebugNode<'db, 'a> {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<'db> fmt::Debug for TrieDB<'db> {
|
||||
impl<'db, H, C> fmt::Debug for TrieDB<'db, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let root_rlp = self.db.get(self.root).expect("Trie root not found!");
|
||||
f.debug_struct("TrieDB")
|
||||
@ -202,29 +224,24 @@ impl Crumb {
|
||||
}
|
||||
|
||||
/// Iterator for going through all values in the trie.
|
||||
pub struct TrieDBIterator<'a> {
|
||||
db: &'a TrieDB<'a>,
|
||||
pub struct TrieDBIterator<'a, H: Hasher + 'a, C: NodeCodec<H> + 'a> {
|
||||
db: &'a TrieDB<'a, H, C>,
|
||||
trail: Vec<Crumb>,
|
||||
key_nibbles: Bytes,
|
||||
}
|
||||
|
||||
impl<'a> TrieDBIterator<'a> {
|
||||
impl<'a, H: Hasher, C: NodeCodec<H>> TrieDBIterator<'a, H, C> {
|
||||
/// Create a new iterator.
|
||||
pub fn new(db: &'a TrieDB) -> super::Result<TrieDBIterator<'a>> {
|
||||
let mut r = TrieDBIterator {
|
||||
db: db,
|
||||
trail: vec![],
|
||||
key_nibbles: Vec::new(),
|
||||
};
|
||||
|
||||
pub fn new(db: &'a TrieDB<H, C>) -> Result<TrieDBIterator<'a, H, C>, H::Out, C::Error> {
|
||||
let mut r = TrieDBIterator { db, trail: Vec::with_capacity(8), key_nibbles: Vec::with_capacity(64) };
|
||||
db.root_data().and_then(|root| r.descend(&root))?;
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
fn seek<'key>(&mut self, mut node_data: DBValue, mut key: NibbleSlice<'key>) -> super::Result<()> {
|
||||
fn seek<'key>(&mut self, mut node_data: DBValue, mut key: NibbleSlice<'key>) -> Result<(), H::Out, C::Error> {
|
||||
loop {
|
||||
let (data, mid) = {
|
||||
let node = TrieDB::decode_node(&node_data);
|
||||
let node = C::decode(&node_data).expect("encoded data read from db; qed");
|
||||
match node {
|
||||
Node::Leaf(slice, _) => {
|
||||
if slice == key {
|
||||
@ -285,17 +302,15 @@ impl<'a> TrieDBIterator<'a> {
|
||||
}
|
||||
|
||||
/// Descend into a payload.
|
||||
fn descend(&mut self, d: &[u8]) -> super::Result<()> {
|
||||
let node = TrieDB::decode_node(&self.db.get_raw_or_lookup(d)?).into();
|
||||
Ok(self.descend_into_node(node))
|
||||
fn descend(&mut self, d: &[u8]) -> Result<(), H::Out, C::Error> {
|
||||
let node_data = &self.db.get_raw_or_lookup(d)?;
|
||||
let node = C::decode(&node_data).expect("encoded node read from db; qed");
|
||||
Ok(self.descend_into_node(node.into()))
|
||||
}
|
||||
|
||||
/// Descend into a payload.
|
||||
fn descend_into_node(&mut self, node: OwnedNode) {
|
||||
self.trail.push(Crumb {
|
||||
status: Status::Entering,
|
||||
node: node,
|
||||
});
|
||||
self.trail.push(Crumb { status: Status::Entering, node });
|
||||
match &self.trail.last().expect("just pushed item; qed").node {
|
||||
&OwnedNode::Leaf(ref n, _) | &OwnedNode::Extension(ref n, _) => {
|
||||
self.key_nibbles.extend((0..n.len()).map(|i| n.at(i)));
|
||||
@ -319,26 +334,25 @@ impl<'a> TrieDBIterator<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TrieIterator for TrieDBIterator<'a> {
|
||||
impl<'a, H: Hasher, C: NodeCodec<H>> TrieIterator<H, C> for TrieDBIterator<'a, H, C> {
|
||||
/// Position the iterator on the first element with key >= `key`
|
||||
fn seek(&mut self, key: &[u8]) -> super::Result<()> {
|
||||
fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, C::Error> {
|
||||
self.trail.clear();
|
||||
self.key_nibbles.clear();
|
||||
let root_rlp = self.db.root_data()?;
|
||||
self.seek(root_rlp, NibbleSlice::new(key))
|
||||
self.seek(root_rlp, NibbleSlice::new(key.as_ref()))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
type Item = TrieItem<'a>;
|
||||
impl<'a, H: Hasher, C: NodeCodec<H>> Iterator for TrieDBIterator<'a, H, C> {
|
||||
type Item = TrieItem<'a, H::Out, C::Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
enum IterStep {
|
||||
enum IterStep<O, E> {
|
||||
Continue,
|
||||
PopTrail,
|
||||
Descend(super::Result<DBValue>),
|
||||
Descend(Result<DBValue, O, E>),
|
||||
}
|
||||
|
||||
loop {
|
||||
let iter_step = {
|
||||
self.trail.last_mut()?.increment();
|
||||
@ -359,7 +373,9 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
(Status::At, &OwnedNode::Leaf(_, ref v)) | (Status::At, &OwnedNode::Branch(_, Some(ref v))) => {
|
||||
return Some(Ok((self.key(), v.clone())));
|
||||
},
|
||||
(Status::At, &OwnedNode::Extension(_, ref d)) => IterStep::Descend(self.db.get_raw_or_lookup(&*d)),
|
||||
(Status::At, &OwnedNode::Extension(_, ref d)) => {
|
||||
IterStep::Descend::<H::Out, C::Error>(self.db.get_raw_or_lookup(&*d))
|
||||
},
|
||||
(Status::At, &OwnedNode::Branch(_, _)) => IterStep::Continue,
|
||||
(Status::AtChild(i), &OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => {
|
||||
match i {
|
||||
@ -367,7 +383,7 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
i => *self.key_nibbles.last_mut()
|
||||
.expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8,
|
||||
}
|
||||
IterStep::Descend(self.db.get_raw_or_lookup(&*children[i]))
|
||||
IterStep::Descend::<H::Out, C::Error>(self.db.get_raw_or_lookup(&*children[i]))
|
||||
},
|
||||
(Status::AtChild(i), &OwnedNode::Branch(_, _)) => {
|
||||
if i == 0 {
|
||||
@ -383,10 +399,11 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
IterStep::PopTrail => {
|
||||
self.trail.pop();
|
||||
},
|
||||
IterStep::Descend(Ok(d)) => {
|
||||
self.descend_into_node(TrieDB::decode_node(&d).into())
|
||||
IterStep::Descend::<H::Out, C::Error>(Ok(d)) => {
|
||||
let node = C::decode(&d).expect("encoded data read from db; qed");
|
||||
self.descend_into_node(node.into())
|
||||
},
|
||||
IterStep::Descend(Err(e)) => {
|
||||
IterStep::Descend::<H::Out, C::Error>(Err(e)) => {
|
||||
return Some(Err(e))
|
||||
}
|
||||
IterStep::Continue => {},
|
||||
@ -395,15 +412,19 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use hashdb::DBValue;
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use memorydb::MemoryDB;
|
||||
use ethtrie::{TrieDB, TrieDBMut, RlpCodec, trie::{Trie, TrieMut, Lookup}};
|
||||
use ethereum_types::H256;
|
||||
|
||||
#[test]
|
||||
fn iterator() {
|
||||
use memorydb::*;
|
||||
use super::TrieMut;
|
||||
use super::triedbmut::*;
|
||||
|
||||
let d = vec![DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B")];
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
@ -419,13 +440,9 @@ fn iterator() {
|
||||
|
||||
#[test]
|
||||
fn iterator_seek() {
|
||||
use memorydb::*;
|
||||
use super::TrieMut;
|
||||
use super::triedbmut::*;
|
||||
|
||||
let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ];
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
@ -436,7 +453,7 @@ fn iterator_seek() {
|
||||
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
let mut iter = t.iter().unwrap();
|
||||
assert_eq!(iter.next(), Some(Ok((b"A".to_vec(), DBValue::from_slice(b"A")))));
|
||||
assert_eq!(iter.next().unwrap().unwrap(), (b"A".to_vec(), DBValue::from_slice(b"A")));
|
||||
iter.seek(b"!").unwrap();
|
||||
assert_eq!(d, iter.map(|x| x.unwrap().1).collect::<Vec<_>>());
|
||||
let mut iter = t.iter().unwrap();
|
||||
@ -464,11 +481,7 @@ fn iterator_seek() {
|
||||
|
||||
#[test]
|
||||
fn get_len() {
|
||||
use memorydb::*;
|
||||
use super::TrieMut;
|
||||
use super::triedbmut::*;
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
@ -477,21 +490,16 @@ fn get_len() {
|
||||
}
|
||||
|
||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||
assert_eq!(t.get_with(b"A", |x: &[u8]| x.len()), Ok(Some(3)));
|
||||
assert_eq!(t.get_with(b"B", |x: &[u8]| x.len()), Ok(Some(5)));
|
||||
assert_eq!(t.get_with(b"C", |x: &[u8]| x.len()), Ok(None));
|
||||
assert_eq!(t.get_with(b"A", |x: &[u8]| x.len()).unwrap(), Some(3));
|
||||
assert_eq!(t.get_with(b"B", |x: &[u8]| x.len()).unwrap(), Some(5));
|
||||
assert_eq!(t.get_with(b"C", |x: &[u8]| x.len()).unwrap(), None);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn debug_output_supports_pretty_print() {
|
||||
use memorydb::*;
|
||||
use super::TrieMut;
|
||||
use super::triedbmut::*;
|
||||
|
||||
let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ];
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let root = {
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
@ -596,13 +604,12 @@ fn debug_output_supports_pretty_print() {
|
||||
|
||||
#[test]
|
||||
fn test_lookup_with_corrupt_data_returns_decoder_error() {
|
||||
use memorydb::*;
|
||||
use super::TrieMut;
|
||||
use super::triedbmut::*;
|
||||
use rlp;
|
||||
use ethereum_types::H512;
|
||||
use std::marker::PhantomData;
|
||||
use ethtrie::trie::NibbleSlice;
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
@ -614,7 +621,8 @@ fn test_lookup_with_corrupt_data_returns_decoder_error() {
|
||||
|
||||
// query for an invalid data type to trigger an error
|
||||
let q = rlp::decode::<H512>;
|
||||
let lookup = Lookup{ db: t.db, query: q, hash: root };
|
||||
let lookup = Lookup::<_, RlpCodec, _>{ db: t.db(), query: q, hash: root, marker: PhantomData };
|
||||
let query_result = lookup.look_up(NibbleSlice::new(b"A"));
|
||||
assert_eq!(query_result.unwrap().unwrap().unwrap_err(), rlp::DecoderError::RlpIsTooShort);
|
||||
}
|
||||
}
|
@ -16,23 +16,21 @@
|
||||
|
||||
//! In-memory trie representation.
|
||||
|
||||
use super::{TrieError, TrieMut};
|
||||
use super::{Result, TrieError, TrieMut};
|
||||
use super::lookup::Lookup;
|
||||
use super::node::Node as RlpNode;
|
||||
use super::node::Node as EncodedNode;
|
||||
use node_codec::NodeCodec;
|
||||
use super::node::NodeKey;
|
||||
|
||||
use hashdb::HashDB;
|
||||
use bytes::ToPretty;
|
||||
use hashdb::{HashDB, Hasher, DBValue};
|
||||
use nibbleslice::NibbleSlice;
|
||||
use rlp::{Rlp, RlpStream};
|
||||
use hashdb::DBValue;
|
||||
|
||||
use elastic_array::ElasticArray1024;
|
||||
use std::collections::{HashSet, VecDeque};
|
||||
use std::marker::PhantomData;
|
||||
use std::mem;
|
||||
use std::ops::Index;
|
||||
use ethereum_types::H256;
|
||||
use elastic_array::ElasticArray1024;
|
||||
use keccak::{KECCAK_NULL_RLP};
|
||||
|
||||
// For lookups into the Node storage buffer.
|
||||
// This is deliberately non-copyable.
|
||||
@ -41,26 +39,20 @@ struct StorageHandle(usize);
|
||||
|
||||
// Handles to nodes in the trie.
|
||||
#[derive(Debug)]
|
||||
enum NodeHandle {
|
||||
enum NodeHandle<H: Hasher> {
|
||||
/// Loaded into memory.
|
||||
InMemory(StorageHandle),
|
||||
/// Either a hash or an inline node
|
||||
Hash(H256),
|
||||
Hash(H::Out),
|
||||
}
|
||||
|
||||
impl From<StorageHandle> for NodeHandle {
|
||||
impl<H: Hasher> From<StorageHandle> for NodeHandle<H> {
|
||||
fn from(handle: StorageHandle) -> Self {
|
||||
NodeHandle::InMemory(handle)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<H256> for NodeHandle {
|
||||
fn from(hash: H256) -> Self {
|
||||
NodeHandle::Hash(hash)
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_children() -> Box<[Option<NodeHandle>; 16]> {
|
||||
fn empty_children<H: Hasher>() -> Box<[Option<NodeHandle<H>>; 16]> {
|
||||
Box::new([
|
||||
None, None, None, None, None, None, None, None,
|
||||
None, None, None, None, None, None, None, None,
|
||||
@ -69,7 +61,7 @@ fn empty_children() -> Box<[Option<NodeHandle>; 16]> {
|
||||
|
||||
/// Node types in the Trie.
|
||||
#[derive(Debug)]
|
||||
enum Node {
|
||||
enum Node<H: Hasher> {
|
||||
/// Empty node.
|
||||
Empty,
|
||||
/// A leaf node contains the end of a key and a value.
|
||||
@ -80,36 +72,41 @@ enum Node {
|
||||
/// The shared portion is encoded from a `NibbleSlice` meaning it contains
|
||||
/// a flag indicating it is an extension.
|
||||
/// The child node is always a branch.
|
||||
Extension(NodeKey, NodeHandle),
|
||||
Extension(NodeKey, NodeHandle<H>),
|
||||
/// A branch has up to 16 children and an optional value.
|
||||
Branch(Box<[Option<NodeHandle>; 16]>, Option<DBValue>)
|
||||
Branch(Box<[Option<NodeHandle<H>>; 16]>, Option<DBValue>)
|
||||
}
|
||||
|
||||
impl Node {
|
||||
impl<H: Hasher> Node<H> {
|
||||
// load an inline node into memory or get the hash to do the lookup later.
|
||||
fn inline_or_hash(node: &[u8], db: &HashDB, storage: &mut NodeStorage) -> NodeHandle {
|
||||
RlpNode::try_decode_hash(&node)
|
||||
fn inline_or_hash<C>(node: &[u8], db: &HashDB<H>, storage: &mut NodeStorage<H>) -> NodeHandle<H>
|
||||
where C: NodeCodec<H>
|
||||
{
|
||||
C::try_decode_hash(&node)
|
||||
.map(NodeHandle::Hash)
|
||||
.unwrap_or_else(|| {
|
||||
let child = Node::from_rlp(node, db, storage);
|
||||
let child = Node::from_encoded::<C>(node, db, storage);
|
||||
NodeHandle::InMemory(storage.alloc(Stored::New(child)))
|
||||
})
|
||||
}
|
||||
|
||||
// decode a node from rlp without getting its children.
|
||||
fn from_rlp(rlp: &[u8], db: &HashDB, storage: &mut NodeStorage) -> Self {
|
||||
match RlpNode::decoded(rlp).expect("rlp read from db; qed") {
|
||||
RlpNode::Empty => Node::Empty,
|
||||
RlpNode::Leaf(k, v) => Node::Leaf(k.encoded(true), DBValue::from_slice(&v)),
|
||||
RlpNode::Extension(key, cb) => {
|
||||
Node::Extension(key.encoded(false), Self::inline_or_hash(cb, db, storage))
|
||||
// decode a node from encoded bytes without getting its children.
|
||||
fn from_encoded<C>(data: &[u8], db: &HashDB<H>, storage: &mut NodeStorage<H>) -> Self
|
||||
where C: NodeCodec<H>
|
||||
{
|
||||
match C::decode(data).expect("encoded bytes read from db; qed") {
|
||||
EncodedNode::Empty => Node::Empty,
|
||||
EncodedNode::Leaf(k, v) => Node::Leaf(k.encoded(true), DBValue::from_slice(&v)),
|
||||
EncodedNode::Extension(key, cb) => {
|
||||
Node::Extension(
|
||||
key.encoded(false),
|
||||
Self::inline_or_hash::<C>(cb, db, storage))
|
||||
}
|
||||
RlpNode::Branch(ref children_rlp, val) => {
|
||||
let mut child = |i| {
|
||||
let raw = children_rlp[i];
|
||||
let child_rlp = Rlp::new(raw);
|
||||
if !child_rlp.is_empty() {
|
||||
Some(Self::inline_or_hash(raw, db, storage))
|
||||
EncodedNode::Branch(ref encoded_children, val) => {
|
||||
let mut child = |i:usize| {
|
||||
let raw = encoded_children[i];
|
||||
if !C::is_empty_node(raw) {
|
||||
Some(Self::inline_or_hash::<C>(raw, db, storage))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@ -127,70 +124,51 @@ impl Node {
|
||||
}
|
||||
}
|
||||
|
||||
// encode a node to RLP
|
||||
// TODO: parallelize
|
||||
fn into_rlp<F>(self, mut child_cb: F) -> ElasticArray1024<u8>
|
||||
where F: FnMut(NodeHandle, &mut RlpStream)
|
||||
fn into_encoded<F, C>(self, mut child_cb: F) -> ElasticArray1024<u8>
|
||||
where
|
||||
C: NodeCodec<H>,
|
||||
F: FnMut(NodeHandle<H>) -> ChildReference<H::Out>
|
||||
{
|
||||
match self {
|
||||
Node::Empty => {
|
||||
let mut stream = RlpStream::new();
|
||||
stream.append_empty_data();
|
||||
stream.drain()
|
||||
}
|
||||
Node::Leaf(partial, value) => {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append(&&*partial);
|
||||
stream.append(&&*value);
|
||||
stream.drain()
|
||||
}
|
||||
Node::Extension(partial, child) => {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append(&&*partial);
|
||||
child_cb(child, &mut stream);
|
||||
stream.drain()
|
||||
}
|
||||
Node::Empty => C::empty_node(),
|
||||
Node::Leaf(partial, value) => C::leaf_node(&partial, &value),
|
||||
Node::Extension(partial, child) => C::ext_node(&partial, child_cb(child)),
|
||||
Node::Branch(mut children, value) => {
|
||||
let mut stream = RlpStream::new_list(17);
|
||||
for child in children.iter_mut().map(Option::take) {
|
||||
if let Some(handle) = child {
|
||||
child_cb(handle, &mut stream);
|
||||
} else {
|
||||
stream.append_empty_data();
|
||||
}
|
||||
}
|
||||
if let Some(value) = value {
|
||||
stream.append(&&*value);
|
||||
} else {
|
||||
stream.append_empty_data();
|
||||
}
|
||||
|
||||
stream.drain()
|
||||
C::branch_node(
|
||||
// map the `NodeHandle`s from the Branch to `ChildReferences`
|
||||
children.iter_mut()
|
||||
.map(Option::take)
|
||||
.map(|maybe_child|
|
||||
maybe_child.map(|child| child_cb(child))
|
||||
),
|
||||
value
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// post-inspect action.
|
||||
enum Action {
|
||||
enum Action<H: Hasher> {
|
||||
// Replace a node with a new one.
|
||||
Replace(Node),
|
||||
Replace(Node<H>),
|
||||
// Restore the original node. This trusts that the node is actually the original.
|
||||
Restore(Node),
|
||||
Restore(Node<H>),
|
||||
// if it is a new node, just clears the storage.
|
||||
Delete,
|
||||
}
|
||||
|
||||
// post-insert action. Same as action without delete
|
||||
enum InsertAction {
|
||||
enum InsertAction<H: Hasher> {
|
||||
// Replace a node with a new one.
|
||||
Replace(Node),
|
||||
Replace(Node<H>),
|
||||
// Restore the original node.
|
||||
Restore(Node),
|
||||
Restore(Node<H>),
|
||||
}
|
||||
|
||||
impl InsertAction {
|
||||
fn into_action(self) -> Action {
|
||||
impl<H: Hasher> InsertAction<H> {
|
||||
fn into_action(self) -> Action<H> {
|
||||
match self {
|
||||
InsertAction::Replace(n) => Action::Replace(n),
|
||||
InsertAction::Restore(n) => Action::Restore(n),
|
||||
@ -198,7 +176,7 @@ impl InsertAction {
|
||||
}
|
||||
|
||||
// unwrap the node, disregarding replace or restore state.
|
||||
fn unwrap_node(self) -> Node {
|
||||
fn unwrap_node(self) -> Node<H> {
|
||||
match self {
|
||||
InsertAction::Replace(n) | InsertAction::Restore(n) => n,
|
||||
}
|
||||
@ -206,20 +184,26 @@ impl InsertAction {
|
||||
}
|
||||
|
||||
// What kind of node is stored here.
|
||||
enum Stored {
|
||||
enum Stored<H: Hasher> {
|
||||
// A new node.
|
||||
New(Node),
|
||||
New(Node<H>),
|
||||
// A cached node, loaded from the DB.
|
||||
Cached(Node, H256),
|
||||
Cached(Node<H>, H::Out),
|
||||
}
|
||||
|
||||
/// Used to build a collection of child nodes from a collection of `NodeHandle`s
|
||||
pub enum ChildReference<HO> { // `HO` is e.g. `H256`, i.e. the output of a `Hasher`
|
||||
Hash(HO),
|
||||
Inline(HO, usize), // usize is the length of the node data we store in the `H::Out`
|
||||
}
|
||||
|
||||
/// Compact and cache-friendly storage for Trie nodes.
|
||||
struct NodeStorage {
|
||||
nodes: Vec<Stored>,
|
||||
struct NodeStorage<H: Hasher> {
|
||||
nodes: Vec<Stored<H>>,
|
||||
free_indices: VecDeque<usize>,
|
||||
}
|
||||
|
||||
impl NodeStorage {
|
||||
impl<H: Hasher> NodeStorage<H> {
|
||||
/// Create a new storage.
|
||||
fn empty() -> Self {
|
||||
NodeStorage {
|
||||
@ -229,7 +213,7 @@ impl NodeStorage {
|
||||
}
|
||||
|
||||
/// Allocate a new node in the storage.
|
||||
fn alloc(&mut self, stored: Stored) -> StorageHandle {
|
||||
fn alloc(&mut self, stored: Stored<H>) -> StorageHandle {
|
||||
if let Some(idx) = self.free_indices.pop_front() {
|
||||
self.nodes[idx] = stored;
|
||||
StorageHandle(idx)
|
||||
@ -240,7 +224,7 @@ impl NodeStorage {
|
||||
}
|
||||
|
||||
/// Remove a node from the storage, consuming the handle and returning the node.
|
||||
fn destroy(&mut self, handle: StorageHandle) -> Stored {
|
||||
fn destroy(&mut self, handle: StorageHandle) -> Stored<H> {
|
||||
let idx = handle.0;
|
||||
|
||||
self.free_indices.push_back(idx);
|
||||
@ -248,10 +232,10 @@ impl NodeStorage {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Index<&'a StorageHandle> for NodeStorage {
|
||||
type Output = Node;
|
||||
impl<'a, H: Hasher> Index<&'a StorageHandle> for NodeStorage<H> {
|
||||
type Output = Node<H>;
|
||||
|
||||
fn index(&self, handle: &'a StorageHandle) -> &Node {
|
||||
fn index(&self, handle: &'a StorageHandle) -> &Node<H> {
|
||||
match self.nodes[handle.0] {
|
||||
Stored::New(ref node) => node,
|
||||
Stored::Cached(ref node, _) => node,
|
||||
@ -268,19 +252,22 @@ impl<'a> Index<&'a StorageHandle> for NodeStorage {
|
||||
/// # Example
|
||||
/// ```
|
||||
/// extern crate patricia_trie as trie;
|
||||
/// extern crate keccak_hash;
|
||||
/// extern crate patricia_trie_ethereum as ethtrie;
|
||||
/// extern crate hashdb;
|
||||
/// extern crate keccak_hash;
|
||||
/// extern crate keccak_hasher;
|
||||
/// extern crate memorydb;
|
||||
/// extern crate ethereum_types;
|
||||
///
|
||||
/// use keccak_hash::KECCAK_NULL_RLP;
|
||||
/// use trie::*;
|
||||
/// use hashdb::*;
|
||||
/// use ethtrie::{TrieDBMut, trie::TrieMut};
|
||||
/// use hashdb::DBValue;
|
||||
/// use keccak_hasher::KeccakHasher;
|
||||
/// use memorydb::*;
|
||||
/// use ethereum_types::H256;
|
||||
///
|
||||
/// fn main() {
|
||||
/// let mut memdb = MemoryDB::new();
|
||||
/// let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
/// let mut root = H256::new();
|
||||
/// let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
/// assert!(t.is_empty());
|
||||
@ -292,22 +279,31 @@ impl<'a> Index<&'a StorageHandle> for NodeStorage {
|
||||
/// assert!(!t.contains(b"foo").unwrap());
|
||||
/// }
|
||||
/// ```
|
||||
pub struct TrieDBMut<'a> {
|
||||
storage: NodeStorage,
|
||||
db: &'a mut HashDB,
|
||||
root: &'a mut H256,
|
||||
root_handle: NodeHandle,
|
||||
death_row: HashSet<H256>,
|
||||
pub struct TrieDBMut<'a, H, C>
|
||||
where
|
||||
H: Hasher + 'a,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
storage: NodeStorage<H>,
|
||||
db: &'a mut HashDB<H>,
|
||||
root: &'a mut H::Out,
|
||||
root_handle: NodeHandle<H>,
|
||||
death_row: HashSet<H::Out>,
|
||||
/// The number of hash operations this trie has performed.
|
||||
/// Note that none are performed until changes are committed.
|
||||
hash_count: usize,
|
||||
marker: PhantomData<C>, // TODO: rpheimer: "we could have the NodeCodec trait take &self to its methods and then we don't need PhantomData. we can just store an instance of C: NodeCodec in the trie struct. If it's a ZST it won't have any additional overhead anyway"
|
||||
}
|
||||
|
||||
impl<'a> TrieDBMut<'a> {
|
||||
impl<'a, H, C> TrieDBMut<'a, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
/// Create a new trie with backing database `db` and empty `root`.
|
||||
pub fn new(db: &'a mut HashDB, root: &'a mut H256) -> Self {
|
||||
*root = KECCAK_NULL_RLP;
|
||||
let root_handle = NodeHandle::Hash(KECCAK_NULL_RLP);
|
||||
pub fn new(db: &'a mut HashDB<H>, root: &'a mut H::Out) -> Self {
|
||||
*root = C::HASHED_NULL_NODE;
|
||||
let root_handle = NodeHandle::Hash(C::HASHED_NULL_NODE);
|
||||
|
||||
TrieDBMut {
|
||||
storage: NodeStorage::empty(),
|
||||
@ -316,12 +312,13 @@ impl<'a> TrieDBMut<'a> {
|
||||
root_handle: root_handle,
|
||||
death_row: HashSet::new(),
|
||||
hash_count: 0,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a new trie with the backing database `db` and `root.
|
||||
/// Returns an error if `root` does not exist.
|
||||
pub fn from_existing(db: &'a mut HashDB, root: &'a mut H256) -> super::Result<Self> {
|
||||
pub fn from_existing(db: &'a mut HashDB<H>, root: &'a mut H::Out) -> Result<Self, H::Out, C::Error> {
|
||||
if !db.contains(root) {
|
||||
return Err(Box::new(TrieError::InvalidStateRoot(*root)));
|
||||
}
|
||||
@ -334,29 +331,34 @@ impl<'a> TrieDBMut<'a> {
|
||||
root_handle: root_handle,
|
||||
death_row: HashSet::new(),
|
||||
hash_count: 0,
|
||||
marker: PhantomData,
|
||||
})
|
||||
}
|
||||
/// Get the backing database.
|
||||
pub fn db(&self) -> &HashDB {
|
||||
pub fn db(&self) -> &HashDB<H> {
|
||||
self.db
|
||||
}
|
||||
|
||||
/// Get the backing database mutably.
|
||||
pub fn db_mut(&mut self) -> &mut HashDB {
|
||||
pub fn db_mut(&mut self) -> &mut HashDB<H> {
|
||||
self.db
|
||||
}
|
||||
|
||||
// cache a node by hash
|
||||
fn cache(&mut self, hash: H256) -> super::Result<StorageHandle> {
|
||||
let node_rlp = self.db.get(&hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?;
|
||||
let node = Node::from_rlp(&node_rlp, &*self.db, &mut self.storage);
|
||||
fn cache(&mut self, hash: H::Out) -> Result<StorageHandle, H::Out, C::Error> {
|
||||
let node_encoded = self.db.get(&hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?;
|
||||
let node = Node::from_encoded::<C>(
|
||||
&node_encoded,
|
||||
&*self.db,
|
||||
&mut self.storage
|
||||
);
|
||||
Ok(self.storage.alloc(Stored::Cached(node, hash)))
|
||||
}
|
||||
|
||||
// inspect a node, choosing either to replace, restore, or delete it.
|
||||
// if restored or replaced, returns the new node along with a flag of whether it was changed.
|
||||
fn inspect<F>(&mut self, stored: Stored, inspector: F) -> super::Result<Option<(Stored, bool)>>
|
||||
where F: FnOnce(&mut Self, Node) -> super::Result<Action> {
|
||||
fn inspect<F>(&mut self, stored: Stored<H>, inspector: F) -> Result<Option<(Stored<H>, bool)>, H::Out, C::Error>
|
||||
where F: FnOnce(&mut Self, Node<H>) -> Result<Action<H>, H::Out, C::Error> {
|
||||
Ok(match stored {
|
||||
Stored::New(node) => match inspector(self, node)? {
|
||||
Action::Restore(node) => Some((Stored::New(node), false)),
|
||||
@ -378,7 +380,7 @@ impl<'a> TrieDBMut<'a> {
|
||||
}
|
||||
|
||||
// walk the trie, attempting to find the key's node.
|
||||
fn lookup<'x, 'key>(&'x self, mut partial: NibbleSlice<'key>, handle: &NodeHandle) -> super::Result<Option<DBValue>>
|
||||
fn lookup<'x, 'key>(&'x self, mut partial: NibbleSlice<'key>, handle: &NodeHandle<H>) -> Result<Option<DBValue>, H::Out, C::Error>
|
||||
where 'x: 'key
|
||||
{
|
||||
let mut handle = handle;
|
||||
@ -388,6 +390,7 @@ impl<'a> TrieDBMut<'a> {
|
||||
db: &*self.db,
|
||||
query: DBValue::from_slice,
|
||||
hash: hash.clone(),
|
||||
marker: PhantomData::<C>,
|
||||
}.look_up(partial),
|
||||
NodeHandle::InMemory(ref handle) => match self.storage[handle] {
|
||||
Node::Empty => return Ok(None),
|
||||
@ -425,10 +428,8 @@ impl<'a> TrieDBMut<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// insert a key, value pair into the trie, creating new nodes if necessary.
|
||||
fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: DBValue, old_val: &mut Option<DBValue>)
|
||||
-> super::Result<(StorageHandle, bool)>
|
||||
{
|
||||
/// insert a key-value pair into the trie, creating new nodes if necessary.
|
||||
fn insert_at(&mut self, handle: NodeHandle<H>, partial: NibbleSlice, value: DBValue, old_val: &mut Option<DBValue>) -> Result<(StorageHandle, bool), H::Out, C::Error> {
|
||||
let h = match handle {
|
||||
NodeHandle::InMemory(h) => h,
|
||||
NodeHandle::Hash(h) => self.cache(h)?,
|
||||
@ -442,9 +443,7 @@ impl<'a> TrieDBMut<'a> {
|
||||
}
|
||||
|
||||
/// the insertion inspector.
|
||||
fn insert_inspector(&mut self, node: Node, partial: NibbleSlice, value: DBValue, old_val: &mut Option<DBValue>)
|
||||
-> super::Result<InsertAction>
|
||||
{
|
||||
fn insert_inspector(&mut self, node: Node<H>, partial: NibbleSlice, value: DBValue, old_val: &mut Option<DBValue>) -> Result<InsertAction<H>, H::Out, C::Error> {
|
||||
trace!(target: "trie", "augmented (partial: {:?}, value: {:?})", partial, value.pretty());
|
||||
|
||||
Ok(match node {
|
||||
@ -605,9 +604,7 @@ impl<'a> TrieDBMut<'a> {
|
||||
}
|
||||
|
||||
/// Remove a node from the trie based on key.
|
||||
fn remove_at(&mut self, handle: NodeHandle, partial: NibbleSlice, old_val: &mut Option<DBValue>)
|
||||
-> super::Result<Option<(StorageHandle, bool)>>
|
||||
{
|
||||
fn remove_at(&mut self, handle: NodeHandle<H>, partial: NibbleSlice, old_val: &mut Option<DBValue>) -> Result<Option<(StorageHandle, bool)>, H::Out, C::Error> {
|
||||
let stored = match handle {
|
||||
NodeHandle::InMemory(h) => self.storage.destroy(h),
|
||||
NodeHandle::Hash(h) => {
|
||||
@ -622,7 +619,7 @@ impl<'a> TrieDBMut<'a> {
|
||||
}
|
||||
|
||||
/// the removal inspector
|
||||
fn remove_inspector(&mut self, node: Node, partial: NibbleSlice, old_val: &mut Option<DBValue>) -> super::Result<Action> {
|
||||
fn remove_inspector(&mut self, node: Node<H>, partial: NibbleSlice, old_val: &mut Option<DBValue>) -> Result<Action<H>, H::Out, C::Error> {
|
||||
Ok(match (node, partial.is_empty()) {
|
||||
(Node::Empty, _) => Action::Delete,
|
||||
(Node::Branch(c, None), true) => Action::Restore(Node::Branch(c, None)),
|
||||
@ -708,7 +705,7 @@ impl<'a> TrieDBMut<'a> {
|
||||
/// _invalid state_ means:
|
||||
/// - Branch node where there is only a single entry;
|
||||
/// - Extension node followed by anything other than a Branch node.
|
||||
fn fix(&mut self, node: Node) -> super::Result<Node> {
|
||||
fn fix(&mut self, node: Node<H>) -> Result<Node<H>, H::Out, C::Error> {
|
||||
match node {
|
||||
Node::Branch(mut children, value) => {
|
||||
// if only a single value, transmute to leaf/extension and feed through fixed.
|
||||
@ -828,11 +825,11 @@ impl<'a> TrieDBMut<'a> {
|
||||
|
||||
match self.storage.destroy(handle) {
|
||||
Stored::New(node) => {
|
||||
let root_rlp = node.into_rlp(|child, stream| self.commit_node(child, stream));
|
||||
*self.root = self.db.insert(&root_rlp[..]);
|
||||
let encoded_root = node.into_encoded::<_, C>(|child| self.commit_child(child) );
|
||||
*self.root = self.db.insert(&encoded_root[..]);
|
||||
self.hash_count += 1;
|
||||
|
||||
trace!(target: "trie", "root node rlp: {:?}", (&root_rlp[..]).pretty());
|
||||
trace!(target: "trie", "encoded root node: {:?}", (&encoded_root[..]).pretty());
|
||||
self.root_handle = NodeHandle::Hash(*self.root);
|
||||
}
|
||||
Stored::Cached(node, hash) => {
|
||||
@ -843,29 +840,38 @@ impl<'a> TrieDBMut<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// commit a node, hashing it, committing it to the db,
|
||||
/// and writing it to the rlp stream as necessary.
|
||||
fn commit_node(&mut self, handle: NodeHandle, stream: &mut RlpStream) {
|
||||
/// Commit a node by hashing it and writing it to the db. Returns a
|
||||
/// `ChildReference` which in most cases carries a normal hash but for the
|
||||
/// case where we can fit the actual data in the `Hasher`s output type, we
|
||||
/// store the data inline. This function is used as the callback to the
|
||||
/// `into_encoded` method of `Node`.
|
||||
fn commit_child(&mut self, handle: NodeHandle<H>) -> ChildReference<H::Out> {
|
||||
match handle {
|
||||
NodeHandle::Hash(h) => stream.append(&h),
|
||||
NodeHandle::InMemory(h) => match self.storage.destroy(h) {
|
||||
Stored::Cached(_, h) => stream.append(&h),
|
||||
NodeHandle::Hash(hash) => ChildReference::Hash(hash),
|
||||
NodeHandle::InMemory(storage_handle) => {
|
||||
match self.storage.destroy(storage_handle) {
|
||||
Stored::Cached(_, hash) => ChildReference::Hash(hash),
|
||||
Stored::New(node) => {
|
||||
let node_rlp = node.into_rlp(|child, stream| self.commit_node(child, stream));
|
||||
if node_rlp.len() >= 32 {
|
||||
let hash = self.db.insert(&node_rlp[..]);
|
||||
let encoded = node.into_encoded::<_, C>(|node_handle| self.commit_child(node_handle) );
|
||||
if encoded.len() >= H::LENGTH {
|
||||
let hash = self.db.insert(&encoded[..]);
|
||||
self.hash_count +=1;
|
||||
stream.append(&hash)
|
||||
ChildReference::Hash(hash)
|
||||
} else {
|
||||
stream.append_raw(&node_rlp, 1)
|
||||
// it's a small value, so we cram it into a `H::Out` and tag with length
|
||||
let mut h = H::Out::default();
|
||||
let len = encoded.len();
|
||||
h.as_mut()[..len].copy_from_slice(&encoded[..len]);
|
||||
ChildReference::Inline(h, len)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// a hack to get the root node's handle
|
||||
fn root_handle(&self) -> NodeHandle {
|
||||
fn root_handle(&self) -> NodeHandle<H> {
|
||||
match self.root_handle {
|
||||
NodeHandle::Hash(h) => NodeHandle::Hash(h),
|
||||
NodeHandle::InMemory(StorageHandle(x)) => NodeHandle::InMemory(StorageHandle(x)),
|
||||
@ -873,15 +879,19 @@ impl<'a> TrieDBMut<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TrieMut for TrieDBMut<'a> {
|
||||
fn root(&mut self) -> &H256 {
|
||||
impl<'a, H, C> TrieMut<H, C> for TrieDBMut<'a, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn root(&mut self) -> &H::Out {
|
||||
self.commit();
|
||||
self.root
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
match self.root_handle {
|
||||
NodeHandle::Hash(h) => h == KECCAK_NULL_RLP,
|
||||
NodeHandle::Hash(h) => h == C::HASHED_NULL_NODE,
|
||||
NodeHandle::InMemory(ref h) => match self.storage[h] {
|
||||
Node::Empty => true,
|
||||
_ => false,
|
||||
@ -889,11 +899,13 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn get<'x, 'key>(&'x self, key: &'key [u8]) -> super::Result<Option<DBValue>> where 'x: 'key {
|
||||
fn get<'x, 'key>(&'x self, key: &'key [u8]) -> Result<Option<DBValue>, H::Out, C::Error>
|
||||
where 'x: 'key
|
||||
{
|
||||
self.lookup(NibbleSlice::new(key), &self.root_handle)
|
||||
}
|
||||
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> super::Result<Option<DBValue>> {
|
||||
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
if value.is_empty() { return self.remove(key) }
|
||||
|
||||
let mut old_val = None;
|
||||
@ -914,7 +926,7 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
||||
Ok(old_val)
|
||||
}
|
||||
|
||||
fn remove(&mut self, key: &[u8]) -> super::Result<Option<DBValue>> {
|
||||
fn remove(&mut self, key: &[u8]) -> Result<Option<DBValue>, H::Out, C::Error> {
|
||||
trace!(target: "trie", "remove: key={:?}", key.pretty());
|
||||
|
||||
let root_handle = self.root_handle();
|
||||
@ -928,8 +940,8 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
||||
}
|
||||
None => {
|
||||
trace!(target: "trie", "remove: obliterated trie");
|
||||
self.root_handle = NodeHandle::Hash(KECCAK_NULL_RLP);
|
||||
*self.root = KECCAK_NULL_RLP;
|
||||
self.root_handle = NodeHandle::Hash(C::HASHED_NULL_NODE);
|
||||
*self.root = C::HASHED_NULL_NODE;
|
||||
}
|
||||
}
|
||||
|
||||
@ -937,7 +949,11 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Drop for TrieDBMut<'a> {
|
||||
impl<'a, H, C> Drop for TrieDBMut<'a, H, C>
|
||||
where
|
||||
H: Hasher,
|
||||
C: NodeCodec<H>
|
||||
{
|
||||
fn drop(&mut self) {
|
||||
self.commit();
|
||||
}
|
||||
@ -945,18 +961,20 @@ impl<'a> Drop for TrieDBMut<'a> {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
extern crate triehash;
|
||||
|
||||
use self::triehash::trie_root;
|
||||
use hashdb::*;
|
||||
use memorydb::*;
|
||||
use super::*;
|
||||
use bytes::ToPretty;
|
||||
use keccak::KECCAK_NULL_RLP;
|
||||
use super::super::TrieMut;
|
||||
use hashdb::{DBValue, Hasher, HashDB};
|
||||
use keccak_hasher::KeccakHasher;
|
||||
use memorydb::MemoryDB;
|
||||
use rlp::{Decodable, Encodable};
|
||||
use triehash::trie_root;
|
||||
use standardmap::*;
|
||||
use ethtrie::{TrieDBMut, RlpCodec, trie::{TrieMut, NodeCodec}};
|
||||
use env_logger;
|
||||
use ethereum_types::H256;
|
||||
|
||||
fn populate_trie<'db>(db: &'db mut HashDB, root: &'db mut H256, v: &[(Vec<u8>, Vec<u8>)]) -> TrieDBMut<'db> {
|
||||
fn populate_trie<'db, H, C>(db: &'db mut HashDB<KeccakHasher>, root: &'db mut H256, v: &[(Vec<u8>, Vec<u8>)]) -> TrieDBMut<'db>
|
||||
where H: Hasher, H::Out: Decodable + Encodable, C: NodeCodec<H>
|
||||
{
|
||||
let mut t = TrieDBMut::new(db, root);
|
||||
for i in 0..v.len() {
|
||||
let key: &[u8]= &v[i].0;
|
||||
@ -975,8 +993,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn playpen() {
|
||||
::ethcore_logger::init_log();
|
||||
|
||||
env_logger::init();
|
||||
let mut seed = H256::new();
|
||||
for test_i in 0..10 {
|
||||
if test_i % 50 == 0 {
|
||||
@ -991,9 +1008,9 @@ mod tests {
|
||||
}.make_with(&mut seed);
|
||||
|
||||
let real = trie_root(x.clone());
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut memtrie = populate_trie(&mut memdb, &mut root, &x);
|
||||
let mut memtrie = populate_trie::<_, RlpCodec>(&mut memdb, &mut root, &x);
|
||||
|
||||
memtrie.commit();
|
||||
if *memtrie.root() != real {
|
||||
@ -1007,7 +1024,7 @@ mod tests {
|
||||
assert_eq!(*memtrie.root(), real);
|
||||
unpopulate_trie(&mut memtrie, &x);
|
||||
memtrie.commit();
|
||||
if *memtrie.root() != KECCAK_NULL_RLP {
|
||||
if *memtrie.root() != RlpCodec::HASHED_NULL_NODE {
|
||||
println!("- TRIE MISMATCH");
|
||||
println!("");
|
||||
println!("{:?} vs {:?}", memtrie.root(), real);
|
||||
@ -1015,21 +1032,21 @@ mod tests {
|
||||
println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty());
|
||||
}
|
||||
}
|
||||
assert_eq!(*memtrie.root(), KECCAK_NULL_RLP);
|
||||
assert_eq!(*memtrie.root(), RlpCodec::HASHED_NULL_NODE);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn init() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
assert_eq!(*t.root(), KECCAK_NULL_RLP);
|
||||
assert_eq!(*t.root(), RlpCodec::HASHED_NULL_NODE);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_on_empty() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1040,14 +1057,15 @@ mod tests {
|
||||
fn remove_to_empty() {
|
||||
let big_value = b"00000000000000000000000000000000";
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t1 = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t1.insert(&[0x01, 0x23], big_value).unwrap();
|
||||
t1.insert(&[0x01, 0x34], big_value).unwrap();
|
||||
let mut memdb2 = MemoryDB::new();
|
||||
let mut memdb2 = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root2 = H256::new();
|
||||
let mut t2 = TrieDBMut::new(&mut memdb2, &mut root2);
|
||||
|
||||
t2.insert(&[0x01], big_value).unwrap();
|
||||
t2.insert(&[0x01, 0x23], big_value).unwrap();
|
||||
t2.insert(&[0x01, 0x34], big_value).unwrap();
|
||||
@ -1056,7 +1074,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn insert_replace_root() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1066,7 +1084,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn insert_make_branch_root() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1079,7 +1097,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn insert_into_branch_root() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1094,7 +1112,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn insert_value_into_branch_root() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1107,7 +1125,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn insert_split_leaf() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1120,7 +1138,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn insert_split_extenstion() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01, 0x23, 0x45], &[0x01]).unwrap();
|
||||
@ -1138,7 +1156,7 @@ mod tests {
|
||||
let big_value0 = b"00000000000000000000000000000000";
|
||||
let big_value1 = b"11111111111111111111111111111111";
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], big_value0).unwrap();
|
||||
@ -1153,7 +1171,7 @@ mod tests {
|
||||
fn insert_duplicate_value() {
|
||||
let big_value = b"00000000000000000000000000000000";
|
||||
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], big_value).unwrap();
|
||||
@ -1166,15 +1184,15 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_at_empty() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
assert_eq!(t.get(&[0x5]), Ok(None));
|
||||
assert_eq!(t.get(&[0x5]).unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_at_one() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1185,7 +1203,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_at_three() {
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1194,12 +1212,12 @@ mod tests {
|
||||
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0xf1u8, 0x23]));
|
||||
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x81u8, 0x23]));
|
||||
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
||||
assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None);
|
||||
t.commit();
|
||||
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23]));
|
||||
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0xf1u8, 0x23]));
|
||||
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x81u8, 0x23]));
|
||||
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
||||
assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1215,14 +1233,14 @@ mod tests {
|
||||
}.make_with(&mut seed);
|
||||
|
||||
let real = trie_root(x.clone());
|
||||
let mut memdb = MemoryDB::new();
|
||||
let mut memdb = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut memtrie = populate_trie(&mut memdb, &mut root, &x);
|
||||
let mut memtrie = populate_trie::<_, RlpCodec>(&mut memdb, &mut root, &x);
|
||||
let mut y = x.clone();
|
||||
y.sort_by(|ref a, ref b| a.0.cmp(&b.0));
|
||||
let mut memdb2 = MemoryDB::new();
|
||||
let mut memdb2 = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root2 = H256::new();
|
||||
let mut memtrie_sorted = populate_trie(&mut memdb2, &mut root2, &y);
|
||||
let mut memtrie_sorted = populate_trie::<_, RlpCodec>(&mut memdb2, &mut root2, &y);
|
||||
if *memtrie.root() != real || *memtrie_sorted.root() != real {
|
||||
println!("TRIE MISMATCH");
|
||||
println!("");
|
||||
@ -1242,8 +1260,8 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_trie_existing() {
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut db = MemoryDB::new();
|
||||
{
|
||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||
@ -1265,7 +1283,7 @@ mod tests {
|
||||
count: 4,
|
||||
}.make_with(&mut seed);
|
||||
|
||||
let mut db = MemoryDB::new();
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||
for &(ref key, ref value) in &x {
|
||||
@ -1279,7 +1297,7 @@ mod tests {
|
||||
}
|
||||
|
||||
assert!(t.is_empty());
|
||||
assert_eq!(*t.root(), KECCAK_NULL_RLP);
|
||||
assert_eq!(*t.root(), RlpCodec::HASHED_NULL_NODE);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1293,7 +1311,7 @@ mod tests {
|
||||
count: 4,
|
||||
}.make_with(&mut seed);
|
||||
|
||||
let mut db = MemoryDB::new();
|
||||
let mut db = MemoryDB::<KeccakHasher>::new();
|
||||
let mut root = H256::new();
|
||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||
for &(ref key, ref value) in &x {
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "plain_hasher"
|
||||
description = "Hasher for 32-bit keys."
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
authors = ["Parity Technologies <admin@parity.io>"]
|
||||
license = "MIT"
|
||||
keywords = ["hash", "hasher"]
|
||||
@ -10,3 +10,4 @@ homepage = "https://github.com/paritytech/plain_hasher"
|
||||
[dependencies]
|
||||
crunchy = "0.1.6"
|
||||
ethereum-types = "0.3"
|
||||
hashdb = { version = "0.2.0", path = "../hashdb" }
|
@ -17,11 +17,12 @@
|
||||
#[macro_use]
|
||||
extern crate crunchy;
|
||||
extern crate ethereum_types;
|
||||
extern crate hashdb;
|
||||
|
||||
use ethereum_types::H256;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
// use hashdb::Hasher;
|
||||
use std::hash;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
/// Specialized version of `HashMap` with H256 keys and fast hashing function.
|
||||
pub type H256FastMap<T> = HashMap<H256, T, hash::BuildHasherDefault<PlainHasher>>;
|
||||
/// Specialized version of `HashSet` with H256 keys and fast hashing function.
|
||||
|
@ -14,13 +14,13 @@
|
||||
|
||||
#![feature(test)]
|
||||
|
||||
extern crate test;
|
||||
extern crate ethcore_bigint as bigint;
|
||||
extern crate ethereum_types;
|
||||
extern crate rlp;
|
||||
extern crate test;
|
||||
|
||||
use test::Bencher;
|
||||
use bigint::prelude::U256;
|
||||
use ethereum_types::U256;
|
||||
use rlp::{RlpStream, Rlp};
|
||||
use test::Bencher;
|
||||
|
||||
#[bench]
|
||||
fn bench_stream_u64_value(b: &mut Bencher) {
|
||||
@ -38,7 +38,7 @@ fn bench_decode_u64_value(b: &mut Bencher) {
|
||||
// u64
|
||||
let data = vec![0x88, 0x10, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef];
|
||||
let rlp = Rlp::new(&data);
|
||||
let _: u64 = rlp.as_val();
|
||||
let _: u64 = rlp.as_val().unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ fn bench_decode_u256_value(b: &mut Bencher) {
|
||||
0x30, 0x40, 0x50, 0x60, 0x77, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0xf0];
|
||||
let rlp = Rlp::new(&data);
|
||||
let _ : U256 = rlp.as_val();
|
||||
let _ : U256 = rlp.as_val().unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
@ -83,11 +83,11 @@ fn bench_decode_nested_empty_lists(b: &mut Bencher) {
|
||||
// [ [], [[]], [ [], [[]] ] ]
|
||||
let data = vec![0xc7, 0xc0, 0xc1, 0xc0, 0xc3, 0xc0, 0xc1, 0xc0];
|
||||
let rlp = Rlp::new(&data);
|
||||
let _v0: Vec<u16> = rlp.at(0).as_list();
|
||||
let _v1: Vec<u16> = rlp.at(1).at(0).as_list();
|
||||
let nested_rlp = rlp.at(2);
|
||||
let _v2a: Vec<u16> = nested_rlp.at(0).as_list();
|
||||
let _v2b: Vec<u16> = nested_rlp.at(1).at(0).as_list();
|
||||
let _v0: Vec<u16> = rlp.at(0).unwrap().as_list().unwrap();
|
||||
let _v1: Vec<u16> = rlp.at(1).unwrap().at(0).unwrap().as_list().unwrap();
|
||||
let nested_rlp = rlp.at(2).unwrap();
|
||||
let _v2a: Vec<u16> = nested_rlp.at(0).unwrap().as_list().unwrap();
|
||||
let _v2b: Vec<u16> = nested_rlp.at(1).unwrap().at(0).unwrap().as_list().unwrap();
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -43,8 +43,8 @@ mod rlpin;
|
||||
mod stream;
|
||||
mod impls;
|
||||
|
||||
use std::borrow::Borrow;
|
||||
use elastic_array::ElasticArray1024;
|
||||
use std::borrow::Borrow;
|
||||
|
||||
pub use error::DecoderError;
|
||||
pub use traits::{Decodable, Encodable};
|
||||
|
@ -58,6 +58,50 @@ impl RlpStream {
|
||||
stream
|
||||
}
|
||||
|
||||
/// Apends null to the end of stream, chainable.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate rlp;
|
||||
/// use rlp::*;
|
||||
///
|
||||
/// fn main () {
|
||||
/// let mut stream = RlpStream::new_list(2);
|
||||
/// stream.append_empty_data().append_empty_data();
|
||||
/// let out = stream.out();
|
||||
/// assert_eq!(out, vec![0xc2, 0x80, 0x80]);
|
||||
/// }
|
||||
/// ```
|
||||
pub fn append_empty_data(&mut self) -> &mut Self {
|
||||
// self push raw item
|
||||
self.buffer.push(0x80);
|
||||
|
||||
// try to finish and prepend the length
|
||||
self.note_appended(1);
|
||||
|
||||
// return chainable self
|
||||
self
|
||||
}
|
||||
|
||||
/// Drain the object and return the underlying ElasticArray. Panics if it is not finished.
|
||||
pub fn drain(self) -> ElasticArray1024<u8> {
|
||||
match self.is_finished() {
|
||||
true => self.buffer,
|
||||
false => panic!()
|
||||
}
|
||||
}
|
||||
|
||||
/// Appends raw (pre-serialised) RLP data. Use with caution. Chainable.
|
||||
pub fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self {
|
||||
// push raw items
|
||||
self.buffer.append_slice(bytes);
|
||||
|
||||
// try to finish and prepend the length
|
||||
self.note_appended(item_count);
|
||||
|
||||
// return chainable self
|
||||
self
|
||||
}
|
||||
|
||||
/// Appends value to the end of stream, chainable.
|
||||
///
|
||||
/// ```rust
|
||||
@ -145,42 +189,6 @@ impl RlpStream {
|
||||
self
|
||||
}
|
||||
|
||||
/// Apends null to the end of stream, chainable.
|
||||
///
|
||||
/// ```rust
|
||||
/// extern crate rlp;
|
||||
/// use rlp::*;
|
||||
///
|
||||
/// fn main () {
|
||||
/// let mut stream = RlpStream::new_list(2);
|
||||
/// stream.append_empty_data().append_empty_data();
|
||||
/// let out = stream.out();
|
||||
/// assert_eq!(out, vec![0xc2, 0x80, 0x80]);
|
||||
/// }
|
||||
/// ```
|
||||
pub fn append_empty_data(&mut self) -> &mut RlpStream {
|
||||
// self push raw item
|
||||
self.buffer.push(0x80);
|
||||
|
||||
// try to finish and prepend the length
|
||||
self.note_appended(1);
|
||||
|
||||
// return chainable self
|
||||
self
|
||||
}
|
||||
|
||||
/// Appends raw (pre-serialised) RLP data. Use with caution. Chainable.
|
||||
pub fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut RlpStream {
|
||||
// push raw items
|
||||
self.buffer.append_slice(bytes);
|
||||
|
||||
// try to finish and prepend the length
|
||||
self.note_appended(item_count);
|
||||
|
||||
// return chainable self
|
||||
self
|
||||
}
|
||||
|
||||
/// Appends raw (pre-serialised) RLP data. Checks for size oveflow.
|
||||
pub fn append_raw_checked<'a>(&'a mut self, bytes: &[u8], item_count: usize, max_size: usize) -> bool {
|
||||
if self.estimate_size(bytes.len()) > max_size {
|
||||
@ -300,14 +308,6 @@ impl RlpStream {
|
||||
BasicEncoder::new(self)
|
||||
}
|
||||
|
||||
/// Drain the object and return the underlying ElasticArray.
|
||||
pub fn drain(self) -> ElasticArray1024<u8> {
|
||||
match self.is_finished() {
|
||||
true => self.buffer,
|
||||
false => panic!()
|
||||
}
|
||||
}
|
||||
|
||||
/// Finalize current ubnbound list. Panics if no unbounded list has been opened.
|
||||
pub fn complete_unbounded_list(&mut self) {
|
||||
let list = self.unfinished_lists.pop().expect("No open list.");
|
||||
|
@ -423,4 +423,3 @@ fn test_rlp_stream_unbounded_list() {
|
||||
stream.complete_unbounded_list();
|
||||
assert!(stream.is_finished());
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user