Merge branch 'master' of github.com:ethcore/parity into move_hash
This commit is contained in:
commit
03c3d16744
178
Cargo.lock
generated
178
Cargo.lock
generated
@ -3,7 +3,7 @@ name = "parity"
|
|||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"clippy 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
|
"ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)",
|
||||||
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
"docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -73,7 +73,7 @@ dependencies = [
|
|||||||
name = "bigint"
|
name = "bigint"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"heapsize 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -148,15 +148,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clippy"
|
name = "clippy"
|
||||||
version = "0.0.79"
|
version = "0.0.80"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy_lints 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy_lints 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "clippy_lints"
|
name = "clippy_lints"
|
||||||
version = "0.0.79"
|
version = "0.0.80"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -221,7 +221,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "elastic-array"
|
name = "elastic-array"
|
||||||
version = "0.4.0"
|
version = "0.4.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "git+https://github.com/ethcore/elastic-array#9a9bebd6ea291c58e4d6b44dd5dc18368638fefe"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "env_logger"
|
name = "env_logger"
|
||||||
@ -260,7 +260,7 @@ version = "1.3.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"clippy 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethash 1.3.0",
|
"ethash 1.3.0",
|
||||||
@ -271,7 +271,7 @@ dependencies = [
|
|||||||
"ethcore-util 1.3.0",
|
"ethcore-util 1.3.0",
|
||||||
"ethjson 0.1.0",
|
"ethjson 0.1.0",
|
||||||
"ethstore 0.1.0",
|
"ethstore 0.1.0",
|
||||||
"heapsize 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
|
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -287,7 +287,7 @@ dependencies = [
|
|||||||
name = "ethcore-dapps"
|
name = "ethcore-dapps"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethcore-rpc 1.3.0",
|
"ethcore-rpc 1.3.0",
|
||||||
"ethcore-util 1.3.0",
|
"ethcore-util 1.3.0",
|
||||||
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
|
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
|
||||||
@ -389,7 +389,7 @@ dependencies = [
|
|||||||
name = "ethcore-rpc"
|
name = "ethcore-rpc"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethash 1.3.0",
|
"ethash 1.3.0",
|
||||||
"ethcore 1.3.0",
|
"ethcore 1.3.0",
|
||||||
"ethcore-devtools 1.3.0",
|
"ethcore-devtools 1.3.0",
|
||||||
@ -413,7 +413,7 @@ dependencies = [
|
|||||||
name = "ethcore-signer"
|
name = "ethcore-signer"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethcore-rpc 1.3.0",
|
"ethcore-rpc 1.3.0",
|
||||||
"ethcore-util 1.3.0",
|
"ethcore-util 1.3.0",
|
||||||
@ -433,13 +433,13 @@ dependencies = [
|
|||||||
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"bigint 0.1.0",
|
"bigint 0.1.0",
|
||||||
"chrono 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
"chrono 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"clippy 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"elastic-array 0.4.0 (git+https://github.com/ethcore/elastic-array)",
|
||||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
|
"eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)",
|
||||||
"ethcore-devtools 1.3.0",
|
"ethcore-devtools 1.3.0",
|
||||||
"heapsize 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"igd 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"igd 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
"itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -507,14 +507,14 @@ dependencies = [
|
|||||||
name = "ethsync"
|
name = "ethsync"
|
||||||
version = "1.3.0"
|
version = "1.3.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"clippy 0.0.79 (registry+https://github.com/rust-lang/crates.io-index)",
|
"clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"ethcore 1.3.0",
|
"ethcore 1.3.0",
|
||||||
"ethcore-ipc 1.3.0",
|
"ethcore-ipc 1.3.0",
|
||||||
"ethcore-ipc-codegen 1.3.0",
|
"ethcore-ipc-codegen 1.3.0",
|
||||||
"ethcore-ipc-nano 1.3.0",
|
"ethcore-ipc-nano 1.3.0",
|
||||||
"ethcore-util 1.3.0",
|
"ethcore-util 1.3.0",
|
||||||
"heapsize 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
"parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -546,7 +546,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heapsize"
|
name = "heapsize"
|
||||||
version = "0.3.5"
|
version = "0.3.6"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -1518,3 +1518,147 @@ dependencies = [
|
|||||||
"xml-rs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"xml-rs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[metadata]
|
||||||
|
"checksum aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67077478f0a03952bed2e6786338d400d40c25e9836e08ad50af96607317fd03"
|
||||||
|
"checksum ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1f46cd5b1d660c938e3f92dfe7a73d832b3281479363dd0cd9c1c2fbf60f7962"
|
||||||
|
"checksum arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)" = "16e3bdb2f54b3ace0285975d59a97cf8ed3855294b2b6bc651fcf22a9c352975"
|
||||||
|
"checksum aster 0.17.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07d344974f0a155f091948aa389fb1b912d3a58414fbdb9c8d446d193ee3496a"
|
||||||
|
"checksum bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d9bf6104718e80d7b26a68fdbacff3481cfc05df670821affc7e9cbc1884400c"
|
||||||
|
"checksum bit-vec 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "5b97c2c8e8bbb4251754f559df8af22fb264853c7d009084a576cdf12565089d"
|
||||||
|
"checksum bitflags 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "32866f4d103c4e438b1db1158aa1b1a80ee078e5d77a59a2f906fd62a577389c"
|
||||||
|
"checksum bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8dead7461c1127cf637931a1e50934eb6eee8bff2f74433ac7909e9afcee04a3"
|
||||||
|
"checksum bitflags 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4f67931368edf3a9a51d29886d245f1c3db2f1ef0dcc9e35ff70341b78c10d23"
|
||||||
|
"checksum bitflags 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "72cd7314bd4ee024071241147222c706e80385a1605ac7d4cd2fcc339da2ae46"
|
||||||
|
"checksum blastfig 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "09640e0509d97d5cdff03a9f5daf087a8e04c735c3b113a75139634a19cfc7b2"
|
||||||
|
"checksum bloomchain 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f421095d2a76fc24cd3fb3f912b90df06be7689912b1bdb423caefae59c258d"
|
||||||
|
"checksum byteorder 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "3e68d0b3b234a583993a53d5b0063fb5fe8713590fe733d41b98a2cee6a9c26e"
|
||||||
|
"checksum bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c129aff112dcc562970abb69e2508b40850dd24c274761bb50fb8a0067ba6c27"
|
||||||
|
"checksum cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de1e760d7b6535af4241fca8bd8adf68e2e7edacc6b29f5d399050c5e48cf88c"
|
||||||
|
"checksum chrono 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)" = "a714b6792cb4bb07643c35d2a051d92988d4e296322a60825549dd0764bcd396"
|
||||||
|
"checksum clippy 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)" = "e96469b413984c78285727f94f9c626a1f2006cecdcf813b5d6893c0c85df42f"
|
||||||
|
"checksum clippy_lints 0.0.80 (registry+https://github.com/rust-lang/crates.io-index)" = "f11938c4b10c556903bb1c1e717eb038658324bf7197e4cfc159a16417327345"
|
||||||
|
"checksum cookie 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90266f45846f14a1e986c77d1e9c2626b8c342ed806fe60241ec38cc8697b245"
|
||||||
|
"checksum crossbeam 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "fb974f835e90390c5f9dfac00f05b06dc117299f5ea4e85fbc7bb443af4911cc"
|
||||||
|
"checksum ctrlc 1.1.1 (git+https://github.com/ethcore/rust-ctrlc.git)" = "<none>"
|
||||||
|
"checksum daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "271ec51b7e0bee92f0d04601422c73eb76ececf197026711c97ad25038a010cf"
|
||||||
|
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
|
||||||
|
"checksum docopt 0.6.80 (registry+https://github.com/rust-lang/crates.io-index)" = "4cc0acb4ce0828c6a5a11d47baa432fe885881c27428c3a4e473e454ffe57a76"
|
||||||
|
"checksum elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a7ec9af4640023400b86c9ce9126c79ed17ab247ec10a2f530eb78e3893b51aa"
|
||||||
|
"checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5"
|
||||||
|
"checksum eth-secp256k1 0.5.4 (git+https://github.com/ethcore/rust-secp256k1)" = "<none>"
|
||||||
|
"checksum gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "3da3a2cbaeb01363c8e3704fd9fd0eb2ceb17c6f27abd4c1ef040fb57d20dc79"
|
||||||
|
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
||||||
|
"checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1"
|
||||||
|
"checksum heapsize 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "927f352867eb72d0ef81b0e2aa457cd9b0888b2d26672cf7ca5912d771215191"
|
||||||
|
"checksum hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d2da7d3a34cf6406d9d700111b8eafafe9a251de41ae71d8052748259343b58"
|
||||||
|
"checksum httparse 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "46534074dbb80b070d60a5cb8ecadd8963a00a438ae1a95268850a7ef73b67ae"
|
||||||
|
"checksum hyper 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bb0f4d00bb781e559b6e66ae4b5479df0fdf9ab15949f52fa2f1f5de16d4cc07"
|
||||||
|
"checksum hyper 0.9.4 (git+https://github.com/ethcore/hyper)" = "<none>"
|
||||||
|
"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11"
|
||||||
|
"checksum igd 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f5b93df68d6152576e9bc9f371e33e00b40738d528b3566ff41ea11d04401dc"
|
||||||
|
"checksum isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7408a548dc0e406b7912d9f84c261cc533c1866e047644a811c133c56041ac0c"
|
||||||
|
"checksum itertools 0.4.13 (registry+https://github.com/rust-lang/crates.io-index)" = "086e1fa5fe48840b1cfdef3a20c7e3115599f8d5c4c87ef32a794a7cdd184d76"
|
||||||
|
"checksum json-ipc-server 0.2.4 (git+https://github.com/ethcore/json-ipc-server.git)" = "<none>"
|
||||||
|
"checksum jsonrpc-core 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)" = "91755680900913f73576065c85359ee793ac3883bc461dbca90fc4a603be84cc"
|
||||||
|
"checksum jsonrpc-http-server 6.1.0 (git+https://github.com/ethcore/jsonrpc-http-server.git)" = "<none>"
|
||||||
|
"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
|
||||||
|
"checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a"
|
||||||
|
"checksum lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "49247ec2a285bb3dcb23cbd9c35193c025e7251bfce77c1d5da97e6362dffe7f"
|
||||||
|
"checksum libc 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "97def9dc7ce1d8e153e693e3a33020bc69972181adb2f871e87e888876feae49"
|
||||||
|
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"
|
||||||
|
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
|
||||||
|
"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20"
|
||||||
|
"checksum mime 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a74cc2587bf97c49f3f5bab62860d6abf3902ca73b66b51d9b049fbdcd727bd2"
|
||||||
|
"checksum mime_guess 1.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e50bf542f81754ef69e5cea856946a3819f7c09ea97b4903c8bc8a89f74e7b6"
|
||||||
|
"checksum mio 0.5.1 (git+https://github.com/ethcore/mio?branch=v0.5.x)" = "<none>"
|
||||||
|
"checksum mio 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a637d1ca14eacae06296a008fa7ad955347e34efcb5891cfd8ba05491a37907e"
|
||||||
|
"checksum miow 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "4e93d633d34b8ff65a24566d67d49703e7a5c7ac2844d6139a9fc441a799e89a"
|
||||||
|
"checksum nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>"
|
||||||
|
"checksum nanomsg-sys 0.5.0 (git+https://github.com/ethcore/nanomsg.rs.git)" = "<none>"
|
||||||
|
"checksum net2 0.2.23 (registry+https://github.com/rust-lang/crates.io-index)" = "6a816012ca11cb47009693c1e0c6130e26d39e4d97ee2a13c50e868ec83e3204"
|
||||||
|
"checksum nix 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f05c2fc965fc1cd6b73fa57fa7b89f288178737f2f3ce9e63e4a6a141189000e"
|
||||||
|
"checksum nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4d9a22dbcebdeef7bf275cbf444d6521d4e7a2fee187b72d80dba0817120dd8f"
|
||||||
|
"checksum nom 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6caab12c5f97aa316cb249725aa32115118e1522b445e26c257dd77cad5ffd4e"
|
||||||
|
"checksum num 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "c04bd954dbf96f76bab6e5bd6cef6f1ce1262d15268ce4f926d2b5b778fa7af2"
|
||||||
|
"checksum num-bigint 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "41655c8d667be847a0b72fe0888857a7b3f052f691cf40852be5fcf87b274a65"
|
||||||
|
"checksum num-complex 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "ccac67baf893ac97474f8d70eff7761dabb1f6c66e71f8f1c67a6859218db810"
|
||||||
|
"checksum num-integer 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "fb24d9bfb3f222010df27995441ded1e954f8f69cd35021f6bef02ca9552fb92"
|
||||||
|
"checksum num-iter 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "287a1c9969a847055e1122ec0ea7a5c5d6f72aad97934e131c83d5c08ab4e45c"
|
||||||
|
"checksum num-rational 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "48cdcc9ff4ae2a8296805ac15af88b3d88ce62128ded0cb74ffb63a587502a84"
|
||||||
|
"checksum num-traits 0.1.32 (registry+https://github.com/rust-lang/crates.io-index)" = "51eab148f171aefad295f8cece636fc488b9b392ef544da31ea4b8ef6b9e9c39"
|
||||||
|
"checksum num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "51fedae97a05f7353612fe017ab705a37e6db8f4d67c5c6fe739a9e70d6eed09"
|
||||||
|
"checksum number_prefix 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "084d05f4bf60621a9ac9bde941a410df548f4de9545f06e5ee9d3aef4b97cd77"
|
||||||
|
"checksum odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)" = "b28c06e81b0f789122d415d6394b5fe849bde8067469f4c2980d3cdc10c78ec1"
|
||||||
|
"checksum parity-dapps 0.6.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>"
|
||||||
|
"checksum parity-dapps-home 0.6.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>"
|
||||||
|
"checksum parity-dapps-signer 0.6.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>"
|
||||||
|
"checksum parity-dapps-status 0.6.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>"
|
||||||
|
"checksum parity-dapps-wallet 0.6.0 (git+https://github.com/ethcore/parity-ui.git)" = "<none>"
|
||||||
|
"checksum parking_lot 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "e0fd1be2c3cf5fef20a6d18fec252c4f3c87c14fc3039002eb7d4ed91e436826"
|
||||||
|
"checksum phf 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "447d9d45f2e0b4a9b532e808365abf18fc211be6ca217202fcd45236ef12f026"
|
||||||
|
"checksum phf_codegen 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "8af7ae7c3f75a502292b491e5cc0a1f69e3407744abe6e57e2a3b712bb82f01d"
|
||||||
|
"checksum phf_generator 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "db005608fd99800c8c74106a7c894cf582055b689aa14a79462cefdcb7dc1cc3"
|
||||||
|
"checksum phf_shared 0.7.14 (registry+https://github.com/rust-lang/crates.io-index)" = "fee4d039930e4f45123c9b15976cf93a499847b6483dc09c42ea0ec4940f2aa6"
|
||||||
|
"checksum primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0e31b86efadeaeb1235452171a66689682783149a6249ff334a2c5d8218d00a4"
|
||||||
|
"checksum primal-bit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "464a91febc06166783d4f5ba3577b5ed8dda8e421012df80bfe48a971ed7be8f"
|
||||||
|
"checksum primal-check 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "647c81b67bb9551a7b88d0bcd785ac35b7d0bf4b2f358683d7c2375d04daec51"
|
||||||
|
"checksum primal-estimate 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56ea4531dde757b56906493c8604641da14607bf9cdaa80fb9c9cabd2429f8d5"
|
||||||
|
"checksum primal-sieve 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7aa73fd87e5984a00bdb4c1b14d3d5d6d0bad01b2caaaf924c16ab7260ac946c"
|
||||||
|
"checksum quasi 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b26543b563704e7d87f3ec7cfafb713010a905c5f1b155a8ab66863af43ca578"
|
||||||
|
"checksum quasi_codegen 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0881d9a45d5f9ebe4a7e77742f8c604f3658c212baf8dd711a692dd000bc648c"
|
||||||
|
"checksum quick-error 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0aad603e8d7fb67da22dbdf1f4b826ce8829e406124109e73cf1b2454b93a71c"
|
||||||
|
"checksum quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6683b0e23d80813b1a535841f0048c1537d3f86d63c999e8373b39a9b0eb74a"
|
||||||
|
"checksum rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2791d88c6defac799c3f20d74f094ca33b9332612d9aef9078519c82e4fe04a5"
|
||||||
|
"checksum rayon 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "941deb43a6254b9867fec1e0caeda38a2ad905ab18c57f7c68c396ca68998c07"
|
||||||
|
"checksum regex 0.1.68 (registry+https://github.com/rust-lang/crates.io-index)" = "b4329b8928a284580a1c63ec9d846b12f6d3472317243ff7077aff11f23f2b29"
|
||||||
|
"checksum regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "841591b1e05609a643e3b4d0045fce04f701daba7151ddcd3ad47b080693d5a9"
|
||||||
|
"checksum rocksdb 0.4.5 (git+https://github.com/ethcore/rust-rocksdb)" = "<none>"
|
||||||
|
"checksum rocksdb-sys 0.3.0 (git+https://github.com/ethcore/rust-rocksdb)" = "<none>"
|
||||||
|
"checksum rotor 0.6.3 (git+https://github.com/ethcore/rotor)" = "<none>"
|
||||||
|
"checksum rpassword 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "5d3a99497c5c544e629cc8b359ae5ede321eba5fa8e5a8078f3ced727a976c3f"
|
||||||
|
"checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a"
|
||||||
|
"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b"
|
||||||
|
"checksum rustc_version 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "c5f5376ea5e30ce23c03eb77cbe4962b988deead10910c372b226388b594c084"
|
||||||
|
"checksum semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)" = "d4f410fedcf71af0345d7607d246e7ad15faaadd49d240ee3b24e5dc21a820ac"
|
||||||
|
"checksum semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2d5b7638a1f03815d94e88cb3b3c08e87f0db4d683ef499d1836aaf70a45623f"
|
||||||
|
"checksum serde 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b76133a8a02f1c6ebd3fb9a2ecaab3d54302565a51320e80931adba571aadb1b"
|
||||||
|
"checksum serde_codegen 0.7.9 (registry+https://github.com/rust-lang/crates.io-index)" = "c5b74ff4fb34013cc0b917dd182fefc05ee9af233b9d0d557078334554284d0e"
|
||||||
|
"checksum serde_json 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2c88a751caa8f0000058fb971cd443ed2e6b653f33f5a47f29892a8bd44ca4c1"
|
||||||
|
"checksum sha1 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a307a40d5834140e4213a6952483b84e9ad53bdcab918b7335a6e305e505a53c"
|
||||||
|
"checksum slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d807fd58c4181bbabed77cb3b891ba9748241a552bcc5be698faaebefc54f46e"
|
||||||
|
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
|
||||||
|
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
|
||||||
|
"checksum solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "172382bac9424588d7840732b250faeeef88942e37b6e35317dce98cafdd75b2"
|
||||||
|
"checksum spmc 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "93bdab61c1a413e591c4d17388ffa859eaff2df27f1e13a5ec8b716700605adf"
|
||||||
|
"checksum strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e4d73a2c36a4d095ed1a6df5cbeac159863173447f7a82b3f4757426844ab825"
|
||||||
|
"checksum syntex 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "393b6dd0889df2b064beeea954cfda6bc2571604ac460deeae0fed55a53988af"
|
||||||
|
"checksum syntex_syntax 0.33.0 (registry+https://github.com/rust-lang/crates.io-index)" = "44bded3cabafc65c90b663b1071bd2d198a9ab7515e6ce729e4570aaf53c407e"
|
||||||
|
"checksum target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c63f48baada5c52e65a29eef93ab4f8982681b67f9e8d29c7b05abcfec2b9ffe"
|
||||||
|
"checksum term 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "f2077e54d38055cf1ca0fd7933a2e00cd3ec8f6fed352b2a377f06dcdaaf3281"
|
||||||
|
"checksum termios 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d5d9cf598a6d7ce700a4e6a9199da127e6819a61e64b68609683cc9a01b5683a"
|
||||||
|
"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03"
|
||||||
|
"checksum thread_local 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "0694f51610ef7cfac7a1b81de7f1602ee5356e76541bcd62c40e71933338cab1"
|
||||||
|
"checksum time 0.1.35 (registry+https://github.com/rust-lang/crates.io-index)" = "3c7ec6d62a20df54e07ab3b78b9a3932972f4b7981de295563686849eb3989af"
|
||||||
|
"checksum tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "f7aef43048292ca0bae4ab32180e85f6202cf2816c2a210c396a84b99dab9270"
|
||||||
|
"checksum toml 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "fcd27a04ca509aff336ba5eb2abc58d456f52c4ff64d9724d88acb85ead560b6"
|
||||||
|
"checksum traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "07eaeb7689bb7fca7ce15628319635758eda769fed481ecfe6686ddef2600616"
|
||||||
|
"checksum transient-hashmap 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "15f7cc7116182edca1ed08f6f8c4da92104555ca77addbabea4eaa59b20373d0"
|
||||||
|
"checksum typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1410f6f91f21d1612654e7cc69193b0334f909dcf2c790c4826254fbb86f8887"
|
||||||
|
"checksum unicase 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "13a5906ca2b98c799f4b1ab4557b76367ebd6ae5ef14930ec841c74aed5f3764"
|
||||||
|
"checksum unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f7ceb96afdfeedee42bade65a0d585a6a0106f681b6749c8ff4daa8df30b3f"
|
||||||
|
"checksum unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "26643a2f83bac55f1976fb716c10234485f9202dcd65cfbdf9da49867b271172"
|
||||||
|
"checksum unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "36dff09cafb4ec7c8cf0023eb0b686cb6ce65499116a12201c9e11840ca01beb"
|
||||||
|
"checksum url 0.5.9 (registry+https://github.com/rust-lang/crates.io-index)" = "f6d04073d0fcd045a1cf57aea560d1be5ba812d8f28814e1e1cf0e90ff4d2f03"
|
||||||
|
"checksum url 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8ab4ca6f0107350f41a59a51cb0e71a04d905bc6a29181d2cb42fa4f040c65c9"
|
||||||
|
"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f"
|
||||||
|
"checksum uuid 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9767696a9e1bc7a73f2d5f8e0f5428b076cecd9199c200c0364aa0b2d57b8dfa"
|
||||||
|
"checksum vecio 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0795a11576d29ae80525a3fda315bf7b534f8feb9d34101e5fe63fb95bb2fd24"
|
||||||
|
"checksum vergen 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "56b639f935488eb40f06d17c3e3bcc3054f6f75d264e187b1107c8d1cba8d31c"
|
||||||
|
"checksum void 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
|
||||||
|
"checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4"
|
||||||
|
"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
|
||||||
|
"checksum ws 0.5.0 (git+https://github.com/ethcore/ws-rs.git?branch=stable)" = "<none>"
|
||||||
|
"checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
|
||||||
|
"checksum xml-rs 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)" = "4bac8fd82b24db2dd3b54aa7b29f336d8b5ca1830065ce3aada71bce6f661519"
|
||||||
|
"checksum xml-rs 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f11ef7864e55d06a38755beaf03ab70139a04e619acfe94ef800b11bd79eb52c"
|
||||||
|
"checksum xmltree 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "472a9d37c7c53ab2391161df5b89b1f3bf76dab6ab150d7941ecbdd832282082"
|
||||||
|
@ -40,7 +40,7 @@ ethcore-ipc-hypervisor = { path = "ipc/hypervisor" }
|
|||||||
ethcore-logger = { path = "logger" }
|
ethcore-logger = { path = "logger" }
|
||||||
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
|
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
|
||||||
ethcore-dapps = { path = "dapps", optional = true }
|
ethcore-dapps = { path = "dapps", optional = true }
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
|
|
||||||
[target.'cfg(windows)'.dependencies]
|
[target.'cfg(windows)'.dependencies]
|
||||||
winapi = "0.2"
|
winapi = "0.2"
|
||||||
|
@ -27,7 +27,7 @@ parity-dapps-status = { git = "https://github.com/ethcore/parity-ui.git", versio
|
|||||||
parity-dapps-home = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6" }
|
parity-dapps-home = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6" }
|
||||||
parity-dapps-wallet = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6", optional = true }
|
parity-dapps-wallet = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6", optional = true }
|
||||||
mime_guess = { version = "1.6.1" }
|
mime_guess = { version = "1.6.1" }
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
serde_codegen = { version = "0.7.0", optional = true }
|
serde_codegen = { version = "0.7.0", optional = true }
|
||||||
|
@ -12,7 +12,7 @@ syntex = "*"
|
|||||||
ethcore-ipc-codegen = { path = "../ipc/codegen" }
|
ethcore-ipc-codegen = { path = "../ipc/codegen" }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
ethcore-devtools = { path = "../devtools" }
|
ethcore-devtools = { path = "../devtools" }
|
||||||
ethcore-ipc = { path = "../ipc/rpc" }
|
ethcore-ipc = { path = "../ipc/rpc" }
|
||||||
rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }
|
rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }
|
||||||
|
@ -25,7 +25,7 @@ semver = "0.2"
|
|||||||
bit-set = "0.4"
|
bit-set = "0.4"
|
||||||
time = "0.1"
|
time = "0.1"
|
||||||
evmjit = { path = "../evmjit", optional = true }
|
evmjit = { path = "../evmjit", optional = true }
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
ethash = { path = "../ethash" }
|
ethash = { path = "../ethash" }
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
ethcore-devtools = { path = "../devtools" }
|
ethcore-devtools = { path = "../devtools" }
|
||||||
|
@ -20,6 +20,8 @@ use util::*;
|
|||||||
use pod_account::*;
|
use pod_account::*;
|
||||||
use account_db::*;
|
use account_db::*;
|
||||||
|
|
||||||
|
use std::cell::{Ref, RefCell};
|
||||||
|
|
||||||
/// Single account in the system.
|
/// Single account in the system.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Account {
|
pub struct Account {
|
||||||
@ -136,7 +138,11 @@ impl Account {
|
|||||||
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
|
SecTrieDBMut would not set it to an invalid state root. Therefore the root is valid and DB creation \
|
||||||
using it will not fail.");
|
using it will not fail.");
|
||||||
|
|
||||||
(Filth::Clean, H256::from(db.get(key).map_or(U256::zero(), |v| -> U256 {decode(v)})))
|
let item: U256 = match db.get(key){
|
||||||
|
Ok(x) => x.map_or_else(U256::zero, decode),
|
||||||
|
Err(e) => panic!("Encountered potential DB corruption: {}", e),
|
||||||
|
};
|
||||||
|
(Filth::Clean, item.into())
|
||||||
}).1.clone()
|
}).1.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,6 +191,12 @@ impl Account {
|
|||||||
pub fn is_dirty(&self) -> bool {
|
pub fn is_dirty(&self) -> bool {
|
||||||
self.filth == Filth::Dirty
|
self.filth == Filth::Dirty
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Mark account as clean.
|
||||||
|
pub fn set_clean(&mut self) {
|
||||||
|
self.filth = Filth::Clean
|
||||||
|
}
|
||||||
|
|
||||||
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
|
/// Provide a database to get `code_hash`. Should not be called if it is a contract without code.
|
||||||
pub fn cache_code(&mut self, db: &AccountDB) -> bool {
|
pub fn cache_code(&mut self, db: &AccountDB) -> bool {
|
||||||
// TODO: fill out self.code_cache;
|
// TODO: fill out self.code_cache;
|
||||||
@ -243,9 +255,13 @@ impl Account {
|
|||||||
if f == &Filth::Dirty {
|
if f == &Filth::Dirty {
|
||||||
// cast key and value to trait type,
|
// cast key and value to trait type,
|
||||||
// so we can call overloaded `to_bytes` method
|
// so we can call overloaded `to_bytes` method
|
||||||
match v.is_zero() {
|
let res = match v.is_zero() {
|
||||||
true => { t.remove(k); },
|
true => t.remove(k),
|
||||||
false => { t.insert(k, &encode(&U256::from(v.as_slice()))); },
|
false => t.insert(k, &encode(&U256::from(v.as_slice()))),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = res {
|
||||||
|
warn!("Encountered potential DB corruption: {}", e);
|
||||||
}
|
}
|
||||||
*f = Filth::Clean;
|
*f = Filth::Clean;
|
||||||
}
|
}
|
||||||
|
@ -193,7 +193,7 @@ pub struct OpenBlock<'x> {
|
|||||||
block: ExecutedBlock,
|
block: ExecutedBlock,
|
||||||
engine: &'x Engine,
|
engine: &'x Engine,
|
||||||
vm_factory: &'x EvmFactory,
|
vm_factory: &'x EvmFactory,
|
||||||
last_hashes: LastHashes,
|
last_hashes: Arc<LastHashes>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Just like `OpenBlock`, except that we've applied `Engine::on_close_block`, finished up the non-seal header fields,
|
/// Just like `OpenBlock`, except that we've applied `Engine::on_close_block`, finished up the non-seal header fields,
|
||||||
@ -204,7 +204,7 @@ pub struct OpenBlock<'x> {
|
|||||||
pub struct ClosedBlock {
|
pub struct ClosedBlock {
|
||||||
block: ExecutedBlock,
|
block: ExecutedBlock,
|
||||||
uncle_bytes: Bytes,
|
uncle_bytes: Bytes,
|
||||||
last_hashes: LastHashes,
|
last_hashes: Arc<LastHashes>,
|
||||||
unclosed_state: State,
|
unclosed_state: State,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,7 +235,7 @@ impl<'x> OpenBlock<'x> {
|
|||||||
tracing: bool,
|
tracing: bool,
|
||||||
db: Box<JournalDB>,
|
db: Box<JournalDB>,
|
||||||
parent: &Header,
|
parent: &Header,
|
||||||
last_hashes: LastHashes,
|
last_hashes: Arc<LastHashes>,
|
||||||
author: Address,
|
author: Address,
|
||||||
gas_range_target: (U256, U256),
|
gas_range_target: (U256, U256),
|
||||||
extra_data: Bytes,
|
extra_data: Bytes,
|
||||||
@ -316,7 +316,7 @@ impl<'x> OpenBlock<'x> {
|
|||||||
author: self.block.base.header.author.clone(),
|
author: self.block.base.header.author.clone(),
|
||||||
timestamp: self.block.base.header.timestamp,
|
timestamp: self.block.base.header.timestamp,
|
||||||
difficulty: self.block.base.header.difficulty.clone(),
|
difficulty: self.block.base.header.difficulty.clone(),
|
||||||
last_hashes: self.last_hashes.clone(), // TODO: should be a reference.
|
last_hashes: self.last_hashes.clone(),
|
||||||
gas_used: self.block.receipts.last().map_or(U256::zero(), |r| r.gas_used),
|
gas_used: self.block.receipts.last().map_or(U256::zero(), |r| r.gas_used),
|
||||||
gas_limit: self.block.base.header.gas_limit.clone(),
|
gas_limit: self.block.base.header.gas_limit.clone(),
|
||||||
}
|
}
|
||||||
@ -498,7 +498,7 @@ pub fn enact(
|
|||||||
tracing: bool,
|
tracing: bool,
|
||||||
db: Box<JournalDB>,
|
db: Box<JournalDB>,
|
||||||
parent: &Header,
|
parent: &Header,
|
||||||
last_hashes: LastHashes,
|
last_hashes: Arc<LastHashes>,
|
||||||
vm_factory: &EvmFactory,
|
vm_factory: &EvmFactory,
|
||||||
trie_factory: TrieFactory,
|
trie_factory: TrieFactory,
|
||||||
) -> Result<LockedBlock, Error> {
|
) -> Result<LockedBlock, Error> {
|
||||||
@ -531,7 +531,7 @@ pub fn enact_bytes(
|
|||||||
tracing: bool,
|
tracing: bool,
|
||||||
db: Box<JournalDB>,
|
db: Box<JournalDB>,
|
||||||
parent: &Header,
|
parent: &Header,
|
||||||
last_hashes: LastHashes,
|
last_hashes: Arc<LastHashes>,
|
||||||
vm_factory: &EvmFactory,
|
vm_factory: &EvmFactory,
|
||||||
trie_factory: TrieFactory,
|
trie_factory: TrieFactory,
|
||||||
) -> Result<LockedBlock, Error> {
|
) -> Result<LockedBlock, Error> {
|
||||||
@ -548,7 +548,7 @@ pub fn enact_verified(
|
|||||||
tracing: bool,
|
tracing: bool,
|
||||||
db: Box<JournalDB>,
|
db: Box<JournalDB>,
|
||||||
parent: &Header,
|
parent: &Header,
|
||||||
last_hashes: LastHashes,
|
last_hashes: Arc<LastHashes>,
|
||||||
vm_factory: &EvmFactory,
|
vm_factory: &EvmFactory,
|
||||||
trie_factory: TrieFactory,
|
trie_factory: TrieFactory,
|
||||||
) -> Result<LockedBlock, Error> {
|
) -> Result<LockedBlock, Error> {
|
||||||
@ -564,7 +564,7 @@ pub fn enact_and_seal(
|
|||||||
tracing: bool,
|
tracing: bool,
|
||||||
db: Box<JournalDB>,
|
db: Box<JournalDB>,
|
||||||
parent: &Header,
|
parent: &Header,
|
||||||
last_hashes: LastHashes,
|
last_hashes: Arc<LastHashes>,
|
||||||
vm_factory: &EvmFactory,
|
vm_factory: &EvmFactory,
|
||||||
trie_factory: TrieFactory,
|
trie_factory: TrieFactory,
|
||||||
) -> Result<SealedBlock, Error> {
|
) -> Result<SealedBlock, Error> {
|
||||||
@ -586,8 +586,8 @@ mod tests {
|
|||||||
let genesis_header = spec.genesis_header();
|
let genesis_header = spec.genesis_header();
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = Arc::new(vec![genesis_header.hash()]);
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
||||||
let b = b.close_and_lock();
|
let b = b.close_and_lock();
|
||||||
@ -603,17 +603,18 @@ mod tests {
|
|||||||
|
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, vec![genesis_header.hash()], Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap()
|
let last_hashes = Arc::new(vec![genesis_header.hash()]);
|
||||||
|
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap()
|
||||||
.close_and_lock().seal(engine.deref(), vec![]).unwrap();
|
.close_and_lock().seal(engine.deref(), vec![]).unwrap();
|
||||||
let orig_bytes = b.rlp_bytes();
|
let orig_bytes = b.rlp_bytes();
|
||||||
let orig_db = b.drain();
|
let orig_db = b.drain();
|
||||||
|
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let e = enact_and_seal(&orig_bytes, engine.deref(), false, db, &genesis_header, vec![genesis_header.hash()], &Default::default(), Default::default()).unwrap();
|
let e = enact_and_seal(&orig_bytes, engine.deref(), false, db, &genesis_header, last_hashes, &Default::default(), Default::default()).unwrap();
|
||||||
|
|
||||||
assert_eq!(e.rlp_bytes(), orig_bytes);
|
assert_eq!(e.rlp_bytes(), orig_bytes);
|
||||||
|
|
||||||
@ -631,9 +632,10 @@ mod tests {
|
|||||||
|
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let mut open_block = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, vec![genesis_header.hash()], Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
let last_hashes = Arc::new(vec![genesis_header.hash()]);
|
||||||
|
let mut open_block = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes.clone(), Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
||||||
let mut uncle1_header = Header::new();
|
let mut uncle1_header = Header::new();
|
||||||
uncle1_header.extra_data = b"uncle1".to_vec();
|
uncle1_header.extra_data = b"uncle1".to_vec();
|
||||||
let mut uncle2_header = Header::new();
|
let mut uncle2_header = Header::new();
|
||||||
@ -647,8 +649,8 @@ mod tests {
|
|||||||
|
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let e = enact_and_seal(&orig_bytes, engine.deref(), false, db, &genesis_header, vec![genesis_header.hash()], &Default::default(), Default::default()).unwrap();
|
let e = enact_and_seal(&orig_bytes, engine.deref(), false, db, &genesis_header, last_hashes, &Default::default(), Default::default()).unwrap();
|
||||||
|
|
||||||
let bytes = e.rlp_bytes();
|
let bytes = e.rlp_bytes();
|
||||||
assert_eq!(bytes, orig_bytes);
|
assert_eq!(bytes, orig_bytes);
|
||||||
|
@ -359,7 +359,7 @@ impl BlockChain {
|
|||||||
|
|
||||||
let batch = DBTransaction::new(&db);
|
let batch = DBTransaction::new(&db);
|
||||||
batch.put(DB_COL_HEADERS, &hash, block.header_rlp().as_raw()).unwrap();
|
batch.put(DB_COL_HEADERS, &hash, block.header_rlp().as_raw()).unwrap();
|
||||||
batch.put(DB_COL_BODIES, &hash, &Self::block_to_body(&genesis)).unwrap();
|
batch.put(DB_COL_BODIES, &hash, &Self::block_to_body(genesis)).unwrap();
|
||||||
|
|
||||||
batch.write(DB_COL_EXTRA, &hash, &details);
|
batch.write(DB_COL_EXTRA, &hash, &details);
|
||||||
batch.write(DB_COL_EXTRA, &header.number(), &hash);
|
batch.write(DB_COL_EXTRA, &header.number(), &hash);
|
||||||
@ -549,15 +549,11 @@ impl BlockChain {
|
|||||||
|
|
||||||
assert!(self.pending_best_block.read().is_none());
|
assert!(self.pending_best_block.read().is_none());
|
||||||
|
|
||||||
let block_rlp = UntrustedRlp::new(bytes);
|
|
||||||
let compressed_header = block_rlp.at(0).unwrap().compress(RlpType::Blocks);
|
|
||||||
let compressed_body = UntrustedRlp::new(&Self::block_to_body(bytes)).compress(RlpType::Blocks);
|
|
||||||
|
|
||||||
// store block in db
|
// store block in db
|
||||||
batch.put(DB_COL_HEADERS, &hash, &compressed_header).unwrap();
|
batch.put_compressed(DB_COL_HEADERS, &hash, block.header_rlp().as_raw().to_vec()).unwrap();
|
||||||
batch.put(DB_COL_BODIES, &hash, &compressed_body).unwrap();
|
batch.put_compressed(DB_COL_BODIES, &hash, Self::block_to_body(bytes)).unwrap();
|
||||||
|
|
||||||
let info = self.block_info(bytes);
|
let info = self.block_info(&header);
|
||||||
|
|
||||||
if let BlockLocation::BranchBecomingCanonChain(ref d) = info.location {
|
if let BlockLocation::BranchBecomingCanonChain(ref d) = info.location {
|
||||||
info!(target: "reorg", "Reorg to {} ({} {} {})",
|
info!(target: "reorg", "Reorg to {} ({} {} {})",
|
||||||
@ -582,10 +578,8 @@ impl BlockChain {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Get inserted block info which is critical to prepare extras updates.
|
/// Get inserted block info which is critical to prepare extras updates.
|
||||||
fn block_info(&self, block_bytes: &[u8]) -> BlockInfo {
|
fn block_info(&self, header: &HeaderView) -> BlockInfo {
|
||||||
let block = BlockView::new(block_bytes);
|
let hash = header.sha3();
|
||||||
let header = block.header_view();
|
|
||||||
let hash = block.sha3();
|
|
||||||
let number = header.number();
|
let number = header.number();
|
||||||
let parent_hash = header.parent_hash();
|
let parent_hash = header.parent_hash();
|
||||||
let parent_details = self.block_details(&parent_hash).unwrap_or_else(|| panic!("Invalid parent hash: {:?}", parent_hash));
|
let parent_details = self.block_details(&parent_hash).unwrap_or_else(|| panic!("Invalid parent hash: {:?}", parent_hash));
|
||||||
@ -1056,7 +1050,7 @@ mod tests {
|
|||||||
let bc = BlockChain::new(Config::default(), &genesis, db.clone());
|
let bc = BlockChain::new(Config::default(), &genesis, db.clone());
|
||||||
|
|
||||||
let batch = db.transaction();
|
let batch = db.transaction();
|
||||||
for b in [&b1a, &b1b, &b2a, &b2b, &b3a, &b3b, &b4a, &b4b, &b5a, &b5b].iter() {
|
for b in &[&b1a, &b1b, &b2a, &b2b, &b3a, &b3b, &b4a, &b4b, &b5a, &b5b] {
|
||||||
bc.insert_block(&batch, b, vec![]);
|
bc.insert_block(&batch, b, vec![]);
|
||||||
bc.commit();
|
bc.commit();
|
||||||
}
|
}
|
||||||
|
@ -183,7 +183,7 @@ impl Client {
|
|||||||
let tracedb = Arc::new(try!(TraceDB::new(config.tracing, db.clone(), chain.clone())));
|
let tracedb = Arc::new(try!(TraceDB::new(config.tracing, db.clone(), chain.clone())));
|
||||||
|
|
||||||
let mut state_db = journaldb::new(db.clone(), config.pruning, DB_COL_STATE);
|
let mut state_db = journaldb::new(db.clone(), config.pruning, DB_COL_STATE);
|
||||||
if state_db.is_empty() && spec.ensure_db_good(state_db.as_hashdb_mut()) {
|
if state_db.is_empty() && try!(spec.ensure_db_good(state_db.as_hashdb_mut())) {
|
||||||
let batch = DBTransaction::new(&db);
|
let batch = DBTransaction::new(&db);
|
||||||
try!(state_db.commit(&batch, 0, &spec.genesis_header().hash(), None));
|
try!(state_db.commit(&batch, 0, &spec.genesis_header().hash(), None));
|
||||||
try!(db.write(batch).map_err(ClientError::Database));
|
try!(db.write(batch).map_err(ClientError::Database));
|
||||||
@ -246,13 +246,13 @@ impl Client {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_last_hashes(&self, parent_hash: H256) -> LastHashes {
|
fn build_last_hashes(&self, parent_hash: H256) -> Arc<LastHashes> {
|
||||||
{
|
{
|
||||||
let hashes = self.last_hashes.read();
|
let hashes = self.last_hashes.read();
|
||||||
if hashes.front().map_or(false, |h| h == &parent_hash) {
|
if hashes.front().map_or(false, |h| h == &parent_hash) {
|
||||||
let mut res = Vec::from(hashes.clone());
|
let mut res = Vec::from(hashes.clone());
|
||||||
res.resize(256, H256::default());
|
res.resize(256, H256::default());
|
||||||
return res;
|
return Arc::new(res);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut last_hashes = LastHashes::new();
|
let mut last_hashes = LastHashes::new();
|
||||||
@ -268,7 +268,7 @@ impl Client {
|
|||||||
}
|
}
|
||||||
let mut cached_hashes = self.last_hashes.write();
|
let mut cached_hashes = self.last_hashes.write();
|
||||||
*cached_hashes = VecDeque::from(last_hashes.clone());
|
*cached_hashes = VecDeque::from(last_hashes.clone());
|
||||||
last_hashes
|
Arc::new(last_hashes)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_and_close_block(&self, block: &PreverifiedBlock) -> Result<LockedBlock, ()> {
|
fn check_and_close_block(&self, block: &PreverifiedBlock) -> Result<LockedBlock, ()> {
|
||||||
@ -413,6 +413,7 @@ impl Client {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
self.db.flush().expect("DB flush failed.");
|
||||||
imported
|
imported
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -440,7 +441,7 @@ impl Client {
|
|||||||
// CHECK! I *think* this is fine, even if the state_root is equal to another
|
// CHECK! I *think* this is fine, even if the state_root is equal to another
|
||||||
// already-imported block of the same number.
|
// already-imported block of the same number.
|
||||||
// TODO: Prove it with a test.
|
// TODO: Prove it with a test.
|
||||||
block.drain().commit(&batch, number, hash, ancient).expect("State DB commit failed.");
|
block.drain().commit(&batch, number, hash, ancient).expect("DB commit failed.");
|
||||||
|
|
||||||
let route = self.chain.insert_block(&batch, block_data, receipts);
|
let route = self.chain.insert_block(&batch, block_data, receipts);
|
||||||
self.tracedb.import(&batch, TraceImportRequest {
|
self.tracedb.import(&batch, TraceImportRequest {
|
||||||
@ -451,7 +452,7 @@ impl Client {
|
|||||||
retracted: route.retracted.len()
|
retracted: route.retracted.len()
|
||||||
});
|
});
|
||||||
// Final commit to the DB
|
// Final commit to the DB
|
||||||
self.db.write(batch).expect("State DB write failed.");
|
self.db.write_buffered(batch).expect("DB write failed.");
|
||||||
self.chain.commit();
|
self.chain.commit();
|
||||||
|
|
||||||
self.update_last_hashes(&parent, hash);
|
self.update_last_hashes(&parent, hash);
|
||||||
@ -712,7 +713,7 @@ impl BlockChainClient for Client {
|
|||||||
x.state_diff = Some(state.diff_from(orig));
|
x.state_diff = Some(state.diff_from(orig));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ret.map_err(|ee| ReplayError::Execution(ee))
|
ret.map_err(ReplayError::Execution)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn keep_alive(&self) {
|
fn keep_alive(&self) {
|
||||||
@ -975,7 +976,7 @@ impl BlockChainClient for Client {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn last_hashes(&self) -> LastHashes {
|
fn last_hashes(&self) -> LastHashes {
|
||||||
self.build_last_hashes(self.chain.best_block_hash())
|
(*self.build_last_hashes(self.chain.best_block_hash())).clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn queue_transactions(&self, transactions: Vec<Bytes>) {
|
fn queue_transactions(&self, transactions: Vec<Bytes>) {
|
||||||
@ -1059,6 +1060,7 @@ impl MiningBlockChainClient for Client {
|
|||||||
precise_time_ns() - start,
|
precise_time_ns() - start,
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
self.db.flush().expect("DB flush failed.");
|
||||||
Ok(h)
|
Ok(h)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,11 +2,15 @@ use trace::Error as TraceError;
|
|||||||
use util::UtilError;
|
use util::UtilError;
|
||||||
use std::fmt::{Display, Formatter, Error as FmtError};
|
use std::fmt::{Display, Formatter, Error as FmtError};
|
||||||
|
|
||||||
|
use util::trie::TrieError;
|
||||||
|
|
||||||
/// Client configuration errors.
|
/// Client configuration errors.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
/// TraceDB configuration error.
|
/// TraceDB configuration error.
|
||||||
Trace(TraceError),
|
Trace(TraceError),
|
||||||
|
/// TrieDB-related error.
|
||||||
|
Trie(TrieError),
|
||||||
/// Database error
|
/// Database error
|
||||||
Database(String),
|
Database(String),
|
||||||
/// Util error
|
/// Util error
|
||||||
@ -19,16 +23,29 @@ impl From<TraceError> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<TrieError> for Error {
|
||||||
|
fn from(err: TrieError) -> Self {
|
||||||
|
Error::Trie(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<UtilError> for Error {
|
impl From<UtilError> for Error {
|
||||||
fn from(err: UtilError) -> Self {
|
fn from(err: UtilError) -> Self {
|
||||||
Error::Util(err)
|
Error::Util(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<E> From<Box<E>> for Error where Error: From<E> {
|
||||||
|
fn from(err: Box<E>) -> Self {
|
||||||
|
Error::from(*err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Display for Error {
|
impl Display for Error {
|
||||||
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
|
fn fmt(&self, f: &mut Formatter) -> Result<(), FmtError> {
|
||||||
match *self {
|
match *self {
|
||||||
Error::Trace(ref err) => write!(f, "{}", err),
|
Error::Trace(ref err) => write!(f, "{}", err),
|
||||||
|
Error::Trie(ref err) => write!(f, "{}", err),
|
||||||
Error::Util(ref err) => write!(f, "{}", err),
|
Error::Util(ref err) => write!(f, "{}", err),
|
||||||
Error::Database(ref s) => write!(f, "Database error: {}", s),
|
Error::Database(ref s) => write!(f, "Database error: {}", s),
|
||||||
}
|
}
|
||||||
|
@ -262,7 +262,7 @@ impl MiningBlockChainClient for TestBlockChainClient {
|
|||||||
let genesis_header = self.spec.genesis_header();
|
let genesis_header = self.spec.genesis_header();
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
self.spec.ensure_db_good(db.as_hashdb_mut());
|
self.spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
|
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = vec![genesis_header.hash()];
|
||||||
let mut open_block = OpenBlock::new(
|
let mut open_block = OpenBlock::new(
|
||||||
@ -272,7 +272,7 @@ impl MiningBlockChainClient for TestBlockChainClient {
|
|||||||
false,
|
false,
|
||||||
db,
|
db,
|
||||||
&genesis_header,
|
&genesis_header,
|
||||||
last_hashes,
|
Arc::new(last_hashes),
|
||||||
author,
|
author,
|
||||||
gas_range_target,
|
gas_range_target,
|
||||||
extra_data
|
extra_data
|
||||||
|
@ -202,7 +202,7 @@ mod tests {
|
|||||||
author: 0.into(),
|
author: 0.into(),
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
difficulty: 0.into(),
|
difficulty: 0.into(),
|
||||||
last_hashes: vec![],
|
last_hashes: Arc::new(vec![]),
|
||||||
gas_used: 0.into(),
|
gas_used: 0.into(),
|
||||||
gas_limit: 0.into(),
|
gas_limit: 0.into(),
|
||||||
});
|
});
|
||||||
@ -250,8 +250,8 @@ mod tests {
|
|||||||
let genesis_header = spec.genesis_header();
|
let genesis_header = spec.genesis_header();
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = Arc::new(vec![genesis_header.hash()]);
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
|
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
|
||||||
let b = b.close_and_lock();
|
let b = b.close_and_lock();
|
||||||
|
@ -84,8 +84,8 @@ mod tests {
|
|||||||
let genesis_header = spec.genesis_header();
|
let genesis_header = spec.genesis_header();
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = Arc::new(vec![genesis_header.hash()]);
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
|
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, addr, (3141562.into(), 31415620.into()), vec![]).unwrap();
|
||||||
let b = b.close_and_lock();
|
let b = b.close_and_lock();
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::cmp;
|
use std::cmp;
|
||||||
|
use std::sync::Arc;
|
||||||
use util::{U256, Address, H256, Hashable};
|
use util::{U256, Address, H256, Hashable};
|
||||||
use header::BlockNumber;
|
use header::BlockNumber;
|
||||||
use ethjson;
|
use ethjson;
|
||||||
@ -37,7 +38,7 @@ pub struct EnvInfo {
|
|||||||
/// The block gas limit.
|
/// The block gas limit.
|
||||||
pub gas_limit: U256,
|
pub gas_limit: U256,
|
||||||
/// The last 256 block hashes.
|
/// The last 256 block hashes.
|
||||||
pub last_hashes: LastHashes,
|
pub last_hashes: Arc<LastHashes>,
|
||||||
/// The gas used.
|
/// The gas used.
|
||||||
pub gas_used: U256,
|
pub gas_used: U256,
|
||||||
}
|
}
|
||||||
@ -50,7 +51,7 @@ impl Default for EnvInfo {
|
|||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
difficulty: 0.into(),
|
difficulty: 0.into(),
|
||||||
gas_limit: 0.into(),
|
gas_limit: 0.into(),
|
||||||
last_hashes: vec![],
|
last_hashes: Arc::new(vec![]),
|
||||||
gas_used: 0.into(),
|
gas_used: 0.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -65,8 +66,8 @@ impl From<ethjson::vm::Env> for EnvInfo {
|
|||||||
difficulty: e.difficulty.into(),
|
difficulty: e.difficulty.into(),
|
||||||
gas_limit: e.gas_limit.into(),
|
gas_limit: e.gas_limit.into(),
|
||||||
timestamp: e.timestamp.into(),
|
timestamp: e.timestamp.into(),
|
||||||
last_hashes: (1..cmp::min(number + 1, 257)).map(|i| format!("{}", number - i).as_bytes().sha3()).collect(),
|
last_hashes: Arc::new((1..cmp::min(number + 1, 257)).map(|i| format!("{}", number - i).as_bytes().sha3()).collect()),
|
||||||
gas_used: Default::default(),
|
gas_used: U256::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -258,7 +258,10 @@ pub type ImportResult = Result<H256, Error>;
|
|||||||
|
|
||||||
impl From<ClientError> for Error {
|
impl From<ClientError> for Error {
|
||||||
fn from(err: ClientError) -> Error {
|
fn from(err: ClientError) -> Error {
|
||||||
Error::Client(err)
|
match err {
|
||||||
|
ClientError::Trie(err) => Error::Trie(err),
|
||||||
|
_ => Error::Client(err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -338,6 +341,12 @@ impl From<BlockImportError> for Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<E> From<Box<E>> for Error where Error: From<E> {
|
||||||
|
fn from(err: Box<E>) -> Error {
|
||||||
|
Error::from(*err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
binary_fixed_size!(BlockError);
|
binary_fixed_size!(BlockError);
|
||||||
binary_fixed_size!(ImportError);
|
binary_fixed_size!(ImportError);
|
||||||
binary_fixed_size!(TransactionError);
|
binary_fixed_size!(TransactionError);
|
||||||
|
@ -163,7 +163,9 @@ impl Engine for Ethash {
|
|||||||
for u in fields.uncles.iter() {
|
for u in fields.uncles.iter() {
|
||||||
fields.state.add_balance(u.author(), &(reward * U256::from(8 + u.number() - current_number) / U256::from(8)));
|
fields.state.add_balance(u.author(), &(reward * U256::from(8 + u.number() - current_number) / U256::from(8)));
|
||||||
}
|
}
|
||||||
fields.state.commit();
|
if let Err(e) = fields.state.commit() {
|
||||||
|
warn!("Encountered error on state commit: {}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_block_basic(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
fn verify_block_basic(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
||||||
@ -352,8 +354,8 @@ mod tests {
|
|||||||
let genesis_header = spec.genesis_header();
|
let genesis_header = spec.genesis_header();
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = Arc::new(vec![genesis_header.hash()]);
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
let b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
||||||
let b = b.close();
|
let b = b.close();
|
||||||
@ -367,8 +369,8 @@ mod tests {
|
|||||||
let genesis_header = spec.genesis_header();
|
let genesis_header = spec.genesis_header();
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let last_hashes = vec![genesis_header.hash()];
|
let last_hashes = Arc::new(vec![genesis_header.hash()]);
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let mut b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
let mut b = OpenBlock::new(engine.deref(), &vm_factory, Default::default(), false, db, &genesis_header, last_hashes, Address::zero(), (3141562.into(), 31415620.into()), vec![]).unwrap();
|
||||||
let mut uncle = Header::new();
|
let mut uncle = Header::new();
|
||||||
@ -396,7 +398,7 @@ mod tests {
|
|||||||
author: 0.into(),
|
author: 0.into(),
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
difficulty: 0.into(),
|
difficulty: 0.into(),
|
||||||
last_hashes: vec![],
|
last_hashes: Arc::new(vec![]),
|
||||||
gas_used: 0.into(),
|
gas_used: 0.into(),
|
||||||
gas_limit: 0.into(),
|
gas_limit: 0.into(),
|
||||||
});
|
});
|
||||||
@ -408,7 +410,7 @@ mod tests {
|
|||||||
author: 0.into(),
|
author: 0.into(),
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
difficulty: 0.into(),
|
difficulty: 0.into(),
|
||||||
last_hashes: vec![],
|
last_hashes: Arc::new(vec![]),
|
||||||
gas_used: 0.into(),
|
gas_used: 0.into(),
|
||||||
gas_limit: 0.into(),
|
gas_limit: 0.into(),
|
||||||
});
|
});
|
||||||
|
@ -67,7 +67,7 @@ mod tests {
|
|||||||
let genesis_header = spec.genesis_header();
|
let genesis_header = spec.genesis_header();
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
spec.ensure_db_good(db.as_hashdb_mut());
|
spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let s = State::from_existing(db, genesis_header.state_root.clone(), engine.account_start_nonce(), Default::default()).unwrap();
|
let s = State::from_existing(db, genesis_header.state_root.clone(), engine.account_start_nonce(), Default::default()).unwrap();
|
||||||
assert_eq!(s.balance(&"0000000000000000000000000000000000000001".parse().unwrap()), U256::from(1u64));
|
assert_eq!(s.balance(&"0000000000000000000000000000000000000001".parse().unwrap()), U256::from(1u64));
|
||||||
assert_eq!(s.balance(&"0000000000000000000000000000000000000002".parse().unwrap()), U256::from(1u64));
|
assert_eq!(s.balance(&"0000000000000000000000000000000000000002".parse().unwrap()), U256::from(1u64));
|
||||||
|
@ -324,7 +324,7 @@ mod tests {
|
|||||||
author: 0.into(),
|
author: 0.into(),
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
difficulty: 0.into(),
|
difficulty: 0.into(),
|
||||||
last_hashes: vec![],
|
last_hashes: Arc::new(vec![]),
|
||||||
gas_used: 0.into(),
|
gas_used: 0.into(),
|
||||||
gas_limit: 0.into(),
|
gas_limit: 0.into(),
|
||||||
}
|
}
|
||||||
@ -391,7 +391,9 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let env_info = &mut setup.env_info;
|
let env_info = &mut setup.env_info;
|
||||||
env_info.number = test_env_number;
|
env_info.number = test_env_number;
|
||||||
env_info.last_hashes.push(test_hash.clone());
|
let mut last_hashes = (*env_info.last_hashes).clone();
|
||||||
|
last_hashes.push(test_hash.clone());
|
||||||
|
env_info.last_hashes = Arc::new(last_hashes);
|
||||||
}
|
}
|
||||||
let state = setup.state.reference_mut();
|
let state = setup.state.reference_mut();
|
||||||
let mut tracer = NoopTracer;
|
let mut tracer = NoopTracer;
|
||||||
|
@ -20,6 +20,8 @@ use util::*;
|
|||||||
use basic_types::*;
|
use basic_types::*;
|
||||||
use time::get_time;
|
use time::get_time;
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
|
||||||
/// Type for Block number
|
/// Type for Block number
|
||||||
pub type BlockNumber = u64;
|
pub type BlockNumber = u64;
|
||||||
|
|
||||||
|
@ -62,7 +62,8 @@ pub fn json_chain_test(json_data: &[u8], era: ChainEra) -> Vec<String> {
|
|||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.populate_from(pre);
|
state.populate_from(pre);
|
||||||
state.commit();
|
state.commit()
|
||||||
|
.expect(&format!("State test {} failed due to internal error.", name));
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let res = state.apply(&env, engine.deref(), &vm_factory, &transaction, false);
|
let res = state.apply(&env, engine.deref(), &vm_factory, &transaction, false);
|
||||||
|
|
||||||
|
@ -15,7 +15,9 @@
|
|||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use ethjson;
|
use ethjson;
|
||||||
use util::{H256, MemoryDB, TrieSpec, TrieFactory};
|
use util::trie::{TrieFactory, TrieSpec};
|
||||||
|
use util::hash::H256;
|
||||||
|
use util::memorydb::MemoryDB;
|
||||||
|
|
||||||
fn test_trie(json: &[u8], trie: TrieSpec) -> Vec<String> {
|
fn test_trie(json: &[u8], trie: TrieSpec) -> Vec<String> {
|
||||||
let tests = ethjson::trie::Test::load(json).unwrap();
|
let tests = ethjson::trie::Test::load(json).unwrap();
|
||||||
@ -30,7 +32,8 @@ fn test_trie(json: &[u8], trie: TrieSpec) -> Vec<String> {
|
|||||||
for (key, value) in test.input.data.into_iter() {
|
for (key, value) in test.input.data.into_iter() {
|
||||||
let key: Vec<u8> = key.into();
|
let key: Vec<u8> = key.into();
|
||||||
let value: Vec<u8> = value.map_or_else(Vec::new, Into::into);
|
let value: Vec<u8> = value.map_or_else(Vec::new, Into::into);
|
||||||
t.insert(&key, &value);
|
t.insert(&key, &value)
|
||||||
|
.expect(&format!("Trie test '{:?}' failed due to internal error", name))
|
||||||
}
|
}
|
||||||
|
|
||||||
if *t.root() != test.root.into() {
|
if *t.root() != test.root.into() {
|
||||||
@ -46,7 +49,7 @@ fn test_trie(json: &[u8], trie: TrieSpec) -> Vec<String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
mod generic {
|
mod generic {
|
||||||
use util::TrieSpec;
|
use util::trie::TrieSpec;
|
||||||
|
|
||||||
fn do_json_test(json: &[u8]) -> Vec<String> {
|
fn do_json_test(json: &[u8]) -> Vec<String> {
|
||||||
super::test_trie(json, TrieSpec::Generic)
|
super::test_trie(json, TrieSpec::Generic)
|
||||||
@ -57,7 +60,7 @@ mod generic {
|
|||||||
}
|
}
|
||||||
|
|
||||||
mod secure {
|
mod secure {
|
||||||
use util::TrieSpec;
|
use util::trie::TrieSpec;
|
||||||
|
|
||||||
fn do_json_test(json: &[u8]) -> Vec<String> {
|
fn do_json_test(json: &[u8]) -> Vec<String> {
|
||||||
super::test_trie(json, TrieSpec::Secure)
|
super::test_trie(json, TrieSpec::Secure)
|
||||||
|
@ -472,7 +472,7 @@ impl MinerService for Miner {
|
|||||||
|
|
||||||
// TODO: merge this code with client.rs's fn call somwhow.
|
// TODO: merge this code with client.rs's fn call somwhow.
|
||||||
let header = block.header();
|
let header = block.header();
|
||||||
let last_hashes = chain.last_hashes();
|
let last_hashes = Arc::new(chain.last_hashes());
|
||||||
let env_info = EnvInfo {
|
let env_info = EnvInfo {
|
||||||
number: header.number(),
|
number: header.number(),
|
||||||
author: *header.author(),
|
author: *header.author(),
|
||||||
|
@ -71,7 +71,9 @@ impl PodAccount {
|
|||||||
let mut r = H256::new();
|
let mut r = H256::new();
|
||||||
let mut t = SecTrieDBMut::new(db, &mut r);
|
let mut t = SecTrieDBMut::new(db, &mut r);
|
||||||
for (k, v) in &self.storage {
|
for (k, v) in &self.storage {
|
||||||
t.insert(k, &encode(&U256::from(v.as_slice())));
|
if let Err(e) = t.insert(k, &encode(&U256::from(v.as_slice()))) {
|
||||||
|
warn!("Encountered potential DB corruption: {}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -96,7 +98,7 @@ impl From<ethjson::spec::Account> for PodAccount {
|
|||||||
PodAccount {
|
PodAccount {
|
||||||
balance: a.balance.map_or_else(U256::zero, Into::into),
|
balance: a.balance.map_or_else(U256::zero, Into::into),
|
||||||
nonce: a.nonce.map_or_else(U256::zero, Into::into),
|
nonce: a.nonce.map_or_else(U256::zero, Into::into),
|
||||||
code: a.code.map(Into::into).or(Some(Vec::new())),
|
code: a.code.map(Into::into).or_else(|| Some(Vec::new())),
|
||||||
storage: BTreeMap::new()
|
storage: BTreeMap::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
use account_db::{AccountDB, AccountDBMut};
|
use account_db::{AccountDB, AccountDBMut};
|
||||||
use error::Error;
|
use error::Error;
|
||||||
use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY, TrieDB};
|
use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY, TrieDB};
|
||||||
use util::rlp::{DecoderError, Rlp, RlpStream, Stream, UntrustedRlp, View};
|
use util::rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View};
|
||||||
|
|
||||||
// An alternate account structure from ::account::Account.
|
// An alternate account structure from ::account::Account.
|
||||||
#[derive(PartialEq, Clone, Debug)]
|
#[derive(PartialEq, Clone, Debug)]
|
||||||
@ -99,7 +99,7 @@ impl Account {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// decode a fat rlp, and rebuild the storage trie as we go.
|
// decode a fat rlp, and rebuild the storage trie as we go.
|
||||||
pub fn from_fat_rlp(acct_db: &mut AccountDBMut, rlp: UntrustedRlp) -> Result<Self, DecoderError> {
|
pub fn from_fat_rlp(acct_db: &mut AccountDBMut, rlp: UntrustedRlp) -> Result<Self, Error> {
|
||||||
use util::{TrieDBMut, TrieMut};
|
use util::{TrieDBMut, TrieMut};
|
||||||
|
|
||||||
let nonce = try!(rlp.val_at(0));
|
let nonce = try!(rlp.val_at(0));
|
||||||
@ -120,7 +120,7 @@ impl Account {
|
|||||||
let k: Bytes = try!(pair_rlp.val_at(0));
|
let k: Bytes = try!(pair_rlp.val_at(0));
|
||||||
let v: Bytes = try!(pair_rlp.val_at(1));
|
let v: Bytes = try!(pair_rlp.val_at(1));
|
||||||
|
|
||||||
storage_trie.insert(&k, &v);
|
try!(storage_trie.insert(&k, &v));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(Account {
|
Ok(Account {
|
||||||
@ -157,7 +157,7 @@ mod tests {
|
|||||||
{
|
{
|
||||||
let mut trie = SecTrieDBMut::new(&mut db, &mut root);
|
let mut trie = SecTrieDBMut::new(&mut db, &mut root);
|
||||||
for (k, v) in map.make() {
|
for (k, v) in map.make() {
|
||||||
trie.insert(&k, &v);
|
trie.insert(&k, &v).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
root
|
root
|
||||||
|
@ -387,11 +387,11 @@ impl StateRebuilder {
|
|||||||
};
|
};
|
||||||
|
|
||||||
for (hash, thin_rlp) in pairs {
|
for (hash, thin_rlp) in pairs {
|
||||||
account_trie.insert(&hash, &thin_rlp);
|
try!(account_trie.insert(&hash, &thin_rlp));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let batch = DBTransaction::new(&self.db.backing());
|
let batch = DBTransaction::new(self.db.backing());
|
||||||
try!(self.db.commit(&batch, 0, &H256::zero(), None));
|
try!(self.db.commit(&batch, 0, &H256::zero(), None));
|
||||||
try!(self.db.backing().write(batch).map_err(|e| Error::Util(e.into())));
|
try!(self.db.backing().write(batch).map_err(|e| Error::Util(e.into())));
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -25,6 +25,8 @@ use super::seal::Generic as GenericSeal;
|
|||||||
use ethereum;
|
use ethereum;
|
||||||
use ethjson;
|
use ethjson;
|
||||||
|
|
||||||
|
use std::cell::RefCell;
|
||||||
|
|
||||||
/// Parameters common to all engines.
|
/// Parameters common to all engines.
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub struct CommonParams {
|
pub struct CommonParams {
|
||||||
@ -226,21 +228,21 @@ impl Spec {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Ensure that the given state DB has the trie nodes in for the genesis state.
|
/// Ensure that the given state DB has the trie nodes in for the genesis state.
|
||||||
pub fn ensure_db_good(&self, db: &mut HashDB) -> bool {
|
pub fn ensure_db_good(&self, db: &mut HashDB) -> Result<bool, Box<TrieError>> {
|
||||||
if !db.contains(&self.state_root()) {
|
if !db.contains(&self.state_root()) {
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
{
|
{
|
||||||
let mut t = SecTrieDBMut::new(db, &mut root);
|
let mut t = SecTrieDBMut::new(db, &mut root);
|
||||||
for (address, account) in self.genesis_state.get().iter() {
|
for (address, account) in self.genesis_state.get().iter() {
|
||||||
t.insert(address.as_slice(), &account.rlp());
|
try!(t.insert(address.as_slice(), &account.rlp()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (address, account) in self.genesis_state.get().iter() {
|
for (address, account) in self.genesis_state.get().iter() {
|
||||||
account.insert_additional(&mut AccountDBMut::new(db, address));
|
account.insert_additional(&mut AccountDBMut::new(db, address));
|
||||||
}
|
}
|
||||||
assert!(db.contains(&self.state_root()));
|
assert!(db.contains(&self.state_root()));
|
||||||
true
|
Ok(true)
|
||||||
} else { false }
|
} else { Ok(false) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Loads spec from json file.
|
/// Loads spec from json file.
|
||||||
|
@ -14,6 +14,8 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
use std::cell::{RefCell, RefMut};
|
||||||
|
|
||||||
use common::*;
|
use common::*;
|
||||||
use engines::Engine;
|
use engines::Engine;
|
||||||
use executive::{Executive, TransactOptions};
|
use executive::{Executive, TransactOptions};
|
||||||
@ -71,7 +73,7 @@ impl State {
|
|||||||
/// Creates new state with existing state root
|
/// Creates new state with existing state root
|
||||||
pub fn from_existing(db: Box<JournalDB>, root: H256, account_start_nonce: U256, trie_factory: TrieFactory) -> Result<State, TrieError> {
|
pub fn from_existing(db: Box<JournalDB>, root: H256, account_start_nonce: U256, trie_factory: TrieFactory) -> Result<State, TrieError> {
|
||||||
if !db.as_hashdb().contains(&root) {
|
if !db.as_hashdb().contains(&root) {
|
||||||
return Err(TrieError::InvalidStateRoot);
|
return Err(TrieError::InvalidStateRoot(root));
|
||||||
}
|
}
|
||||||
|
|
||||||
let state = State {
|
let state = State {
|
||||||
@ -161,8 +163,7 @@ impl State {
|
|||||||
|
|
||||||
/// Determine whether an account exists.
|
/// Determine whether an account exists.
|
||||||
pub fn exists(&self, a: &Address) -> bool {
|
pub fn exists(&self, a: &Address) -> bool {
|
||||||
let db = self.trie_factory.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
self.ensure_cached(a, false, |a| a.is_some())
|
||||||
self.cache.borrow().get(a).unwrap_or(&None).is_some() || db.contains(a)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the balance of account `a`.
|
/// Get the balance of account `a`.
|
||||||
@ -238,8 +239,7 @@ impl State {
|
|||||||
|
|
||||||
// TODO uncomment once to_pod() works correctly.
|
// TODO uncomment once to_pod() works correctly.
|
||||||
// trace!("Applied transaction. Diff:\n{}\n", state_diff::diff_pod(&old, &self.to_pod()));
|
// trace!("Applied transaction. Diff:\n{}\n", state_diff::diff_pod(&old, &self.to_pod()));
|
||||||
self.commit();
|
try!(self.commit());
|
||||||
self.clear();
|
|
||||||
let receipt = Receipt::new(self.root().clone(), e.cumulative_gas_used, e.logs);
|
let receipt = Receipt::new(self.root().clone(), e.cumulative_gas_used, e.logs);
|
||||||
// trace!("Transaction receipt: {:?}", receipt);
|
// trace!("Transaction receipt: {:?}", receipt);
|
||||||
Ok(ApplyOutcome{receipt: receipt, trace: e.trace})
|
Ok(ApplyOutcome{receipt: receipt, trace: e.trace})
|
||||||
@ -248,7 +248,13 @@ impl State {
|
|||||||
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
|
/// Commit accounts to SecTrieDBMut. This is similar to cpp-ethereum's dev::eth::commit.
|
||||||
/// `accounts` is mutable because we may need to commit the code or storage and record that.
|
/// `accounts` is mutable because we may need to commit the code or storage and record that.
|
||||||
#[cfg_attr(feature="dev", allow(match_ref_pats))]
|
#[cfg_attr(feature="dev", allow(match_ref_pats))]
|
||||||
pub fn commit_into(trie_factory: &TrieFactory, db: &mut HashDB, root: &mut H256, accounts: &mut HashMap<Address, Option<Account>>) {
|
pub fn commit_into(
|
||||||
|
trie_factory: &TrieFactory,
|
||||||
|
db: &mut HashDB,
|
||||||
|
root: &mut H256,
|
||||||
|
accounts: &mut HashMap<Address,
|
||||||
|
Option<Account>>
|
||||||
|
) -> Result<(), Error> {
|
||||||
// first, commit the sub trees.
|
// first, commit the sub trees.
|
||||||
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
|
// TODO: is this necessary or can we dispense with the `ref mut a` for just `a`?
|
||||||
for (address, ref mut a) in accounts.iter_mut() {
|
for (address, ref mut a) in accounts.iter_mut() {
|
||||||
@ -264,20 +270,25 @@ impl State {
|
|||||||
|
|
||||||
{
|
{
|
||||||
let mut trie = trie_factory.from_existing(db, root).unwrap();
|
let mut trie = trie_factory.from_existing(db, root).unwrap();
|
||||||
for (address, ref a) in accounts.iter() {
|
for (address, ref mut a) in accounts.iter_mut() {
|
||||||
match **a {
|
match **a {
|
||||||
Some(ref account) if account.is_dirty() => trie.insert(address, &account.rlp()),
|
Some(ref mut account) if account.is_dirty() => {
|
||||||
None => trie.remove(address),
|
account.set_clean();
|
||||||
|
try!(trie.insert(address, &account.rlp()))
|
||||||
|
},
|
||||||
|
None => try!(trie.remove(address)),
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Commits our cached account changes into the trie.
|
/// Commits our cached account changes into the trie.
|
||||||
pub fn commit(&mut self) {
|
pub fn commit(&mut self) -> Result<(), Error> {
|
||||||
assert!(self.snapshots.borrow().is_empty());
|
assert!(self.snapshots.borrow().is_empty());
|
||||||
Self::commit_into(&self.trie_factory, self.db.as_hashdb_mut(), &mut self.root, self.cache.borrow_mut().deref_mut());
|
Self::commit_into(&self.trie_factory, self.db.as_hashdb_mut(), &mut self.root, &mut *self.cache.borrow_mut())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clear state cache
|
/// Clear state cache
|
||||||
@ -336,7 +347,11 @@ impl State {
|
|||||||
let have_key = self.cache.borrow().contains_key(a);
|
let have_key = self.cache.borrow().contains_key(a);
|
||||||
if !have_key {
|
if !have_key {
|
||||||
let db = self.trie_factory.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
let db = self.trie_factory.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
||||||
self.insert_cache(a, db.get(a).map(Account::from_rlp))
|
let maybe_acc = match db.get(&a) {
|
||||||
|
Ok(acc) => acc.map(Account::from_rlp),
|
||||||
|
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
||||||
|
};
|
||||||
|
self.insert_cache(a, maybe_acc);
|
||||||
}
|
}
|
||||||
if require_code {
|
if require_code {
|
||||||
if let Some(ref mut account) = self.cache.borrow_mut().get_mut(a).unwrap().as_mut() {
|
if let Some(ref mut account) = self.cache.borrow_mut().get_mut(a).unwrap().as_mut() {
|
||||||
@ -348,33 +363,40 @@ impl State {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Pull account `a` in our cache from the trie DB. `require_code` requires that the code be cached, too.
|
/// Pull account `a` in our cache from the trie DB. `require_code` requires that the code be cached, too.
|
||||||
fn require<'a>(&'a self, a: &Address, require_code: bool) -> &'a mut Account {
|
fn require<'a>(&'a self, a: &Address, require_code: bool) -> RefMut<'a, Account> {
|
||||||
self.require_or_from(a, require_code, || Account::new_basic(U256::from(0u8), self.account_start_nonce), |_|{})
|
self.require_or_from(a, require_code, || Account::new_basic(U256::from(0u8), self.account_start_nonce), |_|{})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Pull account `a` in our cache from the trie DB. `require_code` requires that the code be cached, too.
|
/// Pull account `a` in our cache from the trie DB. `require_code` requires that the code be cached, too.
|
||||||
/// If it doesn't exist, make account equal the evaluation of `default`.
|
/// If it doesn't exist, make account equal the evaluation of `default`.
|
||||||
fn require_or_from<'a, F: FnOnce() -> Account, G: FnOnce(&mut Account)>(&self, a: &Address, require_code: bool, default: F, not_default: G) -> &'a mut Account {
|
fn require_or_from<'a, F: FnOnce() -> Account, G: FnOnce(&mut Account)>(&'a self, a: &Address, require_code: bool, default: F, not_default: G)
|
||||||
let have_key = self.cache.borrow().contains_key(a);
|
-> RefMut<'a, Account>
|
||||||
if !have_key {
|
{
|
||||||
|
let contains_key = self.cache.borrow().contains_key(a);
|
||||||
|
if !contains_key {
|
||||||
let db = self.trie_factory.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
let db = self.trie_factory.readonly(self.db.as_hashdb(), &self.root).expect(SEC_TRIE_DB_UNWRAP_STR);
|
||||||
self.insert_cache(a, db.get(a).map(Account::from_rlp))
|
let maybe_acc = match db.get(&a) {
|
||||||
|
Ok(acc) => acc.map(Account::from_rlp),
|
||||||
|
Err(e) => panic!("Potential DB corruption encountered: {}", e),
|
||||||
|
};
|
||||||
|
|
||||||
|
self.insert_cache(a, maybe_acc);
|
||||||
} else {
|
} else {
|
||||||
self.note_cache(a);
|
self.note_cache(a);
|
||||||
}
|
}
|
||||||
let preexists = self.cache.borrow().get(a).unwrap().is_none();
|
|
||||||
if preexists {
|
match self.cache.borrow_mut().get_mut(a).unwrap() {
|
||||||
self.cache.borrow_mut().insert(a.clone(), Some(default()));
|
&mut Some(ref mut acc) => not_default(acc),
|
||||||
} else {
|
slot @ &mut None => *slot = Some(default()),
|
||||||
not_default(self.cache.borrow_mut().get_mut(a).unwrap().as_mut().unwrap());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe { ::std::mem::transmute(self.cache.borrow_mut().get_mut(a).unwrap().as_mut().map(|account| {
|
RefMut::map(self.cache.borrow_mut(), |c| {
|
||||||
|
let account = c.get_mut(a).unwrap().as_mut().unwrap();
|
||||||
if require_code {
|
if require_code {
|
||||||
account.cache_code(&AccountDB::new(self.db.as_hashdb(), a));
|
account.cache_code(&AccountDB::new(self.db.as_hashdb(), a));
|
||||||
}
|
}
|
||||||
account
|
account
|
||||||
}).unwrap()) }
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -466,12 +488,12 @@ fn should_work_when_cloned() {
|
|||||||
let mut state = get_temp_state_in(temp.as_path());
|
let mut state = get_temp_state_in(temp.as_path());
|
||||||
assert_eq!(state.exists(&a), false);
|
assert_eq!(state.exists(&a), false);
|
||||||
state.inc_nonce(&a);
|
state.inc_nonce(&a);
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
state.clone()
|
state.clone()
|
||||||
};
|
};
|
||||||
|
|
||||||
state.inc_nonce(&a);
|
state.inc_nonce(&a);
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1281,7 +1303,7 @@ fn code_from_database() {
|
|||||||
state.require_or_from(&a, false, ||Account::new_contract(42.into(), 0.into()), |_|{});
|
state.require_or_from(&a, false, ||Account::new_contract(42.into(), 0.into()), |_|{});
|
||||||
state.init_code(&a, vec![1, 2, 3]);
|
state.init_code(&a, vec![1, 2, 3]);
|
||||||
assert_eq!(state.code(&a), Some([1u8, 2, 3].to_vec()));
|
assert_eq!(state.code(&a), Some([1u8, 2, 3].to_vec()));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.code(&a), Some([1u8, 2, 3].to_vec()));
|
assert_eq!(state.code(&a), Some([1u8, 2, 3].to_vec()));
|
||||||
state.drop()
|
state.drop()
|
||||||
};
|
};
|
||||||
@ -1297,7 +1319,7 @@ fn storage_at_from_database() {
|
|||||||
let (root, db) = {
|
let (root, db) = {
|
||||||
let mut state = get_temp_state_in(temp.as_path());
|
let mut state = get_temp_state_in(temp.as_path());
|
||||||
state.set_storage(&a, H256::from(&U256::from(01u64)), H256::from(&U256::from(69u64)));
|
state.set_storage(&a, H256::from(&U256::from(01u64)), H256::from(&U256::from(69u64)));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
state.drop()
|
state.drop()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -1313,7 +1335,7 @@ fn get_from_database() {
|
|||||||
let mut state = get_temp_state_in(temp.as_path());
|
let mut state = get_temp_state_in(temp.as_path());
|
||||||
state.inc_nonce(&a);
|
state.inc_nonce(&a);
|
||||||
state.add_balance(&a, &U256::from(69u64));
|
state.add_balance(&a, &U256::from(69u64));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.balance(&a), U256::from(69u64));
|
assert_eq!(state.balance(&a), U256::from(69u64));
|
||||||
state.drop()
|
state.drop()
|
||||||
};
|
};
|
||||||
@ -1344,7 +1366,7 @@ fn remove_from_database() {
|
|||||||
let (root, db) = {
|
let (root, db) = {
|
||||||
let mut state = get_temp_state_in(temp.as_path());
|
let mut state = get_temp_state_in(temp.as_path());
|
||||||
state.inc_nonce(&a);
|
state.inc_nonce(&a);
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.exists(&a), true);
|
assert_eq!(state.exists(&a), true);
|
||||||
assert_eq!(state.nonce(&a), U256::from(1u64));
|
assert_eq!(state.nonce(&a), U256::from(1u64));
|
||||||
state.drop()
|
state.drop()
|
||||||
@ -1355,7 +1377,7 @@ fn remove_from_database() {
|
|||||||
assert_eq!(state.exists(&a), true);
|
assert_eq!(state.exists(&a), true);
|
||||||
assert_eq!(state.nonce(&a), U256::from(1u64));
|
assert_eq!(state.nonce(&a), U256::from(1u64));
|
||||||
state.kill_account(&a);
|
state.kill_account(&a);
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.exists(&a), false);
|
assert_eq!(state.exists(&a), false);
|
||||||
assert_eq!(state.nonce(&a), U256::from(0u64));
|
assert_eq!(state.nonce(&a), U256::from(0u64));
|
||||||
state.drop()
|
state.drop()
|
||||||
@ -1374,16 +1396,16 @@ fn alter_balance() {
|
|||||||
let b = address_from_u64(1u64);
|
let b = address_from_u64(1u64);
|
||||||
state.add_balance(&a, &U256::from(69u64));
|
state.add_balance(&a, &U256::from(69u64));
|
||||||
assert_eq!(state.balance(&a), U256::from(69u64));
|
assert_eq!(state.balance(&a), U256::from(69u64));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.balance(&a), U256::from(69u64));
|
assert_eq!(state.balance(&a), U256::from(69u64));
|
||||||
state.sub_balance(&a, &U256::from(42u64));
|
state.sub_balance(&a, &U256::from(42u64));
|
||||||
assert_eq!(state.balance(&a), U256::from(27u64));
|
assert_eq!(state.balance(&a), U256::from(27u64));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.balance(&a), U256::from(27u64));
|
assert_eq!(state.balance(&a), U256::from(27u64));
|
||||||
state.transfer_balance(&a, &b, &U256::from(18u64));
|
state.transfer_balance(&a, &b, &U256::from(18u64));
|
||||||
assert_eq!(state.balance(&a), U256::from(9u64));
|
assert_eq!(state.balance(&a), U256::from(9u64));
|
||||||
assert_eq!(state.balance(&b), U256::from(18u64));
|
assert_eq!(state.balance(&b), U256::from(18u64));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.balance(&a), U256::from(9u64));
|
assert_eq!(state.balance(&a), U256::from(9u64));
|
||||||
assert_eq!(state.balance(&b), U256::from(18u64));
|
assert_eq!(state.balance(&b), U256::from(18u64));
|
||||||
}
|
}
|
||||||
@ -1397,11 +1419,11 @@ fn alter_nonce() {
|
|||||||
assert_eq!(state.nonce(&a), U256::from(1u64));
|
assert_eq!(state.nonce(&a), U256::from(1u64));
|
||||||
state.inc_nonce(&a);
|
state.inc_nonce(&a);
|
||||||
assert_eq!(state.nonce(&a), U256::from(2u64));
|
assert_eq!(state.nonce(&a), U256::from(2u64));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.nonce(&a), U256::from(2u64));
|
assert_eq!(state.nonce(&a), U256::from(2u64));
|
||||||
state.inc_nonce(&a);
|
state.inc_nonce(&a);
|
||||||
assert_eq!(state.nonce(&a), U256::from(3u64));
|
assert_eq!(state.nonce(&a), U256::from(3u64));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.nonce(&a), U256::from(3u64));
|
assert_eq!(state.nonce(&a), U256::from(3u64));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1412,7 +1434,7 @@ fn balance_nonce() {
|
|||||||
let a = Address::zero();
|
let a = Address::zero();
|
||||||
assert_eq!(state.balance(&a), U256::from(0u64));
|
assert_eq!(state.balance(&a), U256::from(0u64));
|
||||||
assert_eq!(state.nonce(&a), U256::from(0u64));
|
assert_eq!(state.nonce(&a), U256::from(0u64));
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.balance(&a), U256::from(0u64));
|
assert_eq!(state.balance(&a), U256::from(0u64));
|
||||||
assert_eq!(state.nonce(&a), U256::from(0u64));
|
assert_eq!(state.nonce(&a), U256::from(0u64));
|
||||||
}
|
}
|
||||||
@ -1423,7 +1445,7 @@ fn ensure_cached() {
|
|||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
let a = Address::zero();
|
let a = Address::zero();
|
||||||
state.require(&a, false);
|
state.require(&a, false);
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.root().hex(), "0ce23f3c809de377b008a4a3ee94a0834aac8bec1f86e28ffe4fdb5a15b0c785");
|
assert_eq!(state.root().hex(), "0ce23f3c809de377b008a4a3ee94a0834aac8bec1f86e28ffe4fdb5a15b0c785");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1463,7 +1485,7 @@ fn snapshot_nested() {
|
|||||||
fn create_empty() {
|
fn create_empty() {
|
||||||
let mut state_result = get_temp_state();
|
let mut state_result = get_temp_state();
|
||||||
let mut state = state_result.reference_mut();
|
let mut state = state_result.reference_mut();
|
||||||
state.commit();
|
state.commit().unwrap();
|
||||||
assert_eq!(state.root().hex(), "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421");
|
assert_eq!(state.root().hex(), "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ pub fn generate_dummy_client_with_spec_and_data<F>(get_test_spec: F, block_numbe
|
|||||||
|
|
||||||
let mut db_result = get_temp_journal_db();
|
let mut db_result = get_temp_journal_db();
|
||||||
let mut db = db_result.take();
|
let mut db = db_result.take();
|
||||||
test_spec.ensure_db_good(db.as_hashdb_mut());
|
test_spec.ensure_db_good(db.as_hashdb_mut()).unwrap();
|
||||||
let vm_factory = Default::default();
|
let vm_factory = Default::default();
|
||||||
let genesis_header = test_spec.genesis_header();
|
let genesis_header = test_spec.genesis_header();
|
||||||
|
|
||||||
@ -160,7 +160,7 @@ pub fn generate_dummy_client_with_spec_and_data<F>(get_test_spec: F, block_numbe
|
|||||||
false,
|
false,
|
||||||
db,
|
db,
|
||||||
&last_header,
|
&last_header,
|
||||||
last_hashes.clone(),
|
Arc::new(last_hashes.clone()),
|
||||||
author.clone(),
|
author.clone(),
|
||||||
(3141562.into(), 31415620.into()),
|
(3141562.into(), 31415620.into()),
|
||||||
vec![]
|
vec![]
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::path::{PathBuf};
|
|
||||||
use ethkey::{KeyPair, sign, Address, Secret, Signature, Message};
|
use ethkey::{KeyPair, sign, Address, Secret, Signature, Message};
|
||||||
use {json, Error, crypto};
|
use {json, Error, crypto};
|
||||||
use crypto::Keccak256;
|
use crypto::Keccak256;
|
||||||
@ -36,7 +35,7 @@ pub struct SafeAccount {
|
|||||||
pub version: Version,
|
pub version: Version,
|
||||||
pub address: Address,
|
pub address: Address,
|
||||||
pub crypto: Crypto,
|
pub crypto: Crypto,
|
||||||
pub path: Option<PathBuf>,
|
pub filename: Option<String>,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub meta: String,
|
pub meta: String,
|
||||||
}
|
}
|
||||||
@ -63,20 +62,6 @@ impl Into<json::Crypto> for Crypto {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<json::KeyFile> for SafeAccount {
|
|
||||||
fn from(json: json::KeyFile) -> Self {
|
|
||||||
SafeAccount {
|
|
||||||
id: json.id.into(),
|
|
||||||
version: json.version.into(),
|
|
||||||
address: json.address.into(),
|
|
||||||
crypto: json.crypto.into(),
|
|
||||||
path: None,
|
|
||||||
name: json.name.unwrap_or(String::new()),
|
|
||||||
meta: json.meta.unwrap_or("{}".to_owned()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Into<json::KeyFile> for SafeAccount {
|
impl Into<json::KeyFile> for SafeAccount {
|
||||||
fn into(self) -> json::KeyFile {
|
fn into(self) -> json::KeyFile {
|
||||||
json::KeyFile {
|
json::KeyFile {
|
||||||
@ -147,26 +132,32 @@ impl Crypto {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SafeAccount {
|
impl SafeAccount {
|
||||||
// DEPRECATED. use `create_with_name` instead
|
pub fn create(
|
||||||
pub fn create(keypair: &KeyPair, id: [u8; 16], password: &str, iterations: u32, name: String, meta: String) -> Self {
|
keypair: &KeyPair,
|
||||||
|
id: [u8; 16],
|
||||||
|
password: &str,
|
||||||
|
iterations: u32,
|
||||||
|
name: String,
|
||||||
|
meta: String
|
||||||
|
) -> Self {
|
||||||
SafeAccount {
|
SafeAccount {
|
||||||
id: id,
|
id: id,
|
||||||
version: Version::V3,
|
version: Version::V3,
|
||||||
crypto: Crypto::create(keypair.secret(), password, iterations),
|
crypto: Crypto::create(keypair.secret(), password, iterations),
|
||||||
address: keypair.address(),
|
address: keypair.address(),
|
||||||
path: None,
|
filename: None,
|
||||||
name: name,
|
name: name,
|
||||||
meta: meta,
|
meta: meta,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn from_file(json: json::KeyFile, path: PathBuf) -> Self {
|
pub fn from_file(json: json::KeyFile, filename: String) -> Self {
|
||||||
SafeAccount {
|
SafeAccount {
|
||||||
id: json.id.into(),
|
id: json.id.into(),
|
||||||
version: json.version.into(),
|
version: json.version.into(),
|
||||||
address: json.address.into(),
|
address: json.address.into(),
|
||||||
crypto: json.crypto.into(),
|
crypto: json.crypto.into(),
|
||||||
path: Some(path),
|
filename: Some(filename),
|
||||||
name: json.name.unwrap_or(String::new()),
|
name: json.name.unwrap_or(String::new()),
|
||||||
meta: json.meta.unwrap_or("{}".to_owned()),
|
meta: json.meta.unwrap_or("{}".to_owned()),
|
||||||
}
|
}
|
||||||
@ -184,7 +175,7 @@ impl SafeAccount {
|
|||||||
version: self.version.clone(),
|
version: self.version.clone(),
|
||||||
crypto: Crypto::create(&secret, new_password, iterations),
|
crypto: Crypto::create(&secret, new_password, iterations),
|
||||||
address: self.address.clone(),
|
address: self.address.clone(),
|
||||||
path: self.path.clone(),
|
filename: self.filename.clone(),
|
||||||
name: self.name.clone(),
|
name: self.name.clone(),
|
||||||
meta: self.meta.clone(),
|
meta: self.meta.clone(),
|
||||||
};
|
};
|
||||||
|
@ -77,7 +77,9 @@ impl DiskDirectory {
|
|||||||
.map(json::KeyFile::load)
|
.map(json::KeyFile::load)
|
||||||
.zip(paths.into_iter())
|
.zip(paths.into_iter())
|
||||||
.map(|(file, path)| match file {
|
.map(|(file, path)| match file {
|
||||||
Ok(file) => Ok((path, file.into())),
|
Ok(file) => Ok((path.clone(), SafeAccount::from_file(
|
||||||
|
file, path.file_name().and_then(|n| n.to_str()).expect("Keys have valid UTF8 names only.").to_owned()
|
||||||
|
))),
|
||||||
Err(err) => Err(Error::InvalidKeyFile(format!("{:?}: {}", path, err))),
|
Err(err) => Err(Error::InvalidKeyFile(format!("{:?}: {}", path, err))),
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
@ -98,22 +100,26 @@ impl KeyDirectory for DiskDirectory {
|
|||||||
let keyfile: json::KeyFile = account.clone().into();
|
let keyfile: json::KeyFile = account.clone().into();
|
||||||
|
|
||||||
// build file path
|
// build file path
|
||||||
let mut account = account;
|
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
|
||||||
account.path = account.path.or_else(|| {
|
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
|
||||||
let mut keyfile_path = self.path.clone();
|
format!("UTC--{}Z--{:?}", timestamp, account.address)
|
||||||
let timestamp = time::strftime("%Y-%m-%d_%H:%M:%S_%Z", &time::now()).unwrap_or("???".to_owned());
|
|
||||||
keyfile_path.push(format!("{}-{}.json", keyfile.id, timestamp));
|
|
||||||
Some(keyfile_path)
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// update account filename
|
||||||
|
let mut account = account;
|
||||||
|
account.filename = Some(filename.clone());
|
||||||
|
|
||||||
{
|
{
|
||||||
|
// Path to keyfile
|
||||||
|
let mut keyfile_path = self.path.clone();
|
||||||
|
keyfile_path.push(filename.as_str());
|
||||||
|
|
||||||
// save the file
|
// save the file
|
||||||
let path = account.path.as_ref().expect("build-file-path ensures is not None; qed");
|
let mut file = try!(fs::File::create(&keyfile_path));
|
||||||
let mut file = try!(fs::File::create(path));
|
|
||||||
try!(keyfile.write(&mut file).map_err(|e| Error::Custom(format!("{:?}", e))));
|
try!(keyfile.write(&mut file).map_err(|e| Error::Custom(format!("{:?}", e))));
|
||||||
|
|
||||||
if let Err(_) = restrict_permissions_to_owner(path) {
|
if let Err(_) = restrict_permissions_to_owner(keyfile_path.as_path()) {
|
||||||
fs::remove_file(path).expect("Expected to remove recently created file");
|
fs::remove_file(keyfile_path).expect("Expected to remove recently created file");
|
||||||
return Err(Error::Io(io::Error::last_os_error()));
|
return Err(Error::Io(io::Error::last_os_error()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -135,3 +141,34 @@ impl KeyDirectory for DiskDirectory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use std::{env, fs};
|
||||||
|
use super::DiskDirectory;
|
||||||
|
use dir::KeyDirectory;
|
||||||
|
use account::SafeAccount;
|
||||||
|
use ethkey::{Random, Generator};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_create_new_account() {
|
||||||
|
// given
|
||||||
|
let dir = env::temp_dir();
|
||||||
|
let keypair = Random.generate().unwrap();
|
||||||
|
let password = "hello world";
|
||||||
|
let directory = DiskDirectory::create(dir.clone()).unwrap();
|
||||||
|
|
||||||
|
// when
|
||||||
|
let account = SafeAccount::create(&keypair, [0u8; 16], password, 1024, "Test".to_owned(), "{}".to_owned());
|
||||||
|
let res = directory.insert(account);
|
||||||
|
|
||||||
|
|
||||||
|
// then
|
||||||
|
assert!(res.is_ok(), "Should save account succesfuly.");
|
||||||
|
assert!(res.unwrap().filename.is_some(), "Filename has been assigned.");
|
||||||
|
|
||||||
|
// cleanup
|
||||||
|
let _ = fs::remove_dir_all(dir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -14,15 +14,20 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
use std::collections::HashSet;
|
||||||
use ethkey::Address;
|
use ethkey::Address;
|
||||||
use dir::KeyDirectory;
|
use dir::KeyDirectory;
|
||||||
use Error;
|
use Error;
|
||||||
|
|
||||||
pub fn import_accounts(src: &KeyDirectory, dst: &KeyDirectory) -> Result<Vec<Address>, Error> {
|
pub fn import_accounts(src: &KeyDirectory, dst: &KeyDirectory) -> Result<Vec<Address>, Error> {
|
||||||
let accounts = try!(src.load());
|
let accounts = try!(src.load());
|
||||||
accounts.into_iter().map(|a| {
|
let existing_accounts = try!(dst.load()).into_iter().map(|a| a.address).collect::<HashSet<_>>();
|
||||||
let address = a.address.clone();
|
|
||||||
try!(dst.insert(a));
|
accounts.into_iter()
|
||||||
Ok(address)
|
.filter(|a| !existing_accounts.contains(&a.address))
|
||||||
}).collect()
|
.map(|a| {
|
||||||
|
let address = a.address.clone();
|
||||||
|
try!(dst.insert(a));
|
||||||
|
Ok(address)
|
||||||
|
}).collect()
|
||||||
}
|
}
|
||||||
|
@ -10,7 +10,7 @@ rustc-serialize = "0.3"
|
|||||||
serde = "0.7.0"
|
serde = "0.7.0"
|
||||||
serde_json = "0.7.0"
|
serde_json = "0.7.0"
|
||||||
serde_macros = { version = "0.7.0", optional = true }
|
serde_macros = { version = "0.7.0", optional = true }
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
serde_codegen = { version = "0.7.0", optional = true }
|
serde_codegen = { version = "0.7.0", optional = true }
|
||||||
|
@ -47,21 +47,25 @@ pub fn execute(cmd: AccountCmd) -> Result<String, String> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn keys_dir(path: String) -> Result<DiskDirectory, String> {
|
||||||
|
DiskDirectory::create(path).map_err(|e| format!("Could not open keys directory: {}", e))
|
||||||
|
}
|
||||||
|
|
||||||
fn new(n: NewAccount) -> Result<String, String> {
|
fn new(n: NewAccount) -> Result<String, String> {
|
||||||
let password: String = match n.password_file {
|
let password: String = match n.password_file {
|
||||||
Some(file) => try!(password_from_file(file)),
|
Some(file) => try!(password_from_file(file)),
|
||||||
None => try!(password_prompt()),
|
None => try!(password_prompt()),
|
||||||
};
|
};
|
||||||
|
|
||||||
let dir = Box::new(DiskDirectory::create(n.path).unwrap());
|
let dir = Box::new(try!(keys_dir(n.path)));
|
||||||
let secret_store = Box::new(EthStore::open_with_iterations(dir, n.iterations).unwrap());
|
let secret_store = Box::new(EthStore::open_with_iterations(dir, n.iterations).unwrap());
|
||||||
let acc_provider = AccountProvider::new(secret_store);
|
let acc_provider = AccountProvider::new(secret_store);
|
||||||
let new_account = acc_provider.new_account(&password).unwrap();
|
let new_account = try!(acc_provider.new_account(&password).map_err(|e| format!("Could not create new account: {}", e)));
|
||||||
Ok(format!("{:?}", new_account))
|
Ok(format!("{:?}", new_account))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list(path: String) -> Result<String, String> {
|
fn list(path: String) -> Result<String, String> {
|
||||||
let dir = Box::new(DiskDirectory::create(path).unwrap());
|
let dir = Box::new(try!(keys_dir(path)));
|
||||||
let secret_store = Box::new(EthStore::open(dir).unwrap());
|
let secret_store = Box::new(EthStore::open(dir).unwrap());
|
||||||
let acc_provider = AccountProvider::new(secret_store);
|
let acc_provider = AccountProvider::new(secret_store);
|
||||||
let accounts = acc_provider.accounts();
|
let accounts = acc_provider.accounts();
|
||||||
@ -74,7 +78,7 @@ fn list(path: String) -> Result<String, String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn import(i: ImportAccounts) -> Result<String, String> {
|
fn import(i: ImportAccounts) -> Result<String, String> {
|
||||||
let to = DiskDirectory::create(i.to).unwrap();
|
let to = try!(keys_dir(i.to));
|
||||||
let mut imported = 0;
|
let mut imported = 0;
|
||||||
for path in &i.from {
|
for path in &i.from {
|
||||||
let from = DiskDirectory::at(path);
|
let from = DiskDirectory::at(path);
|
||||||
|
@ -361,7 +361,7 @@ impl Configuration {
|
|||||||
let mut buffer = String::new();
|
let mut buffer = String::new();
|
||||||
let mut node_file = try!(File::open(path).map_err(|e| format!("Error opening reserved nodes file: {}", e)));
|
let mut node_file = try!(File::open(path).map_err(|e| format!("Error opening reserved nodes file: {}", e)));
|
||||||
try!(node_file.read_to_string(&mut buffer).map_err(|_| "Error reading reserved node file"));
|
try!(node_file.read_to_string(&mut buffer).map_err(|_| "Error reading reserved node file"));
|
||||||
let lines = buffer.lines().map(|s| s.trim().to_owned()).filter(|s| s.len() > 0).collect::<Vec<_>>();
|
let lines = buffer.lines().map(|s| s.trim().to_owned()).filter(|s| !s.is_empty()).collect::<Vec<_>>();
|
||||||
if let Some(invalid) = lines.iter().find(|s| !is_valid_node_url(s)) {
|
if let Some(invalid) = lines.iter().find(|s| !is_valid_node_url(s)) {
|
||||||
return Err(format!("Invalid node address format given for a boot node: {}", invalid));
|
return Err(format!("Invalid node address format given for a boot node: {}", invalid));
|
||||||
}
|
}
|
||||||
|
@ -161,7 +161,7 @@ fn consolidate_database(
|
|||||||
let mut db_config = DatabaseConfig {
|
let mut db_config = DatabaseConfig {
|
||||||
max_open_files: 64,
|
max_open_files: 64,
|
||||||
cache_size: None,
|
cache_size: None,
|
||||||
compaction: config.compaction_profile.clone(),
|
compaction: config.compaction_profile,
|
||||||
columns: None,
|
columns: None,
|
||||||
wal: true,
|
wal: true,
|
||||||
};
|
};
|
||||||
|
@ -132,7 +132,7 @@ pub fn execute(cmd: RunCmd) -> Result<(), String> {
|
|||||||
Some(id) => id,
|
Some(id) => id,
|
||||||
None => spec.network_id(),
|
None => spec.network_id(),
|
||||||
};
|
};
|
||||||
sync_config.fork_block = spec.fork_block().clone();
|
sync_config.fork_block = spec.fork_block();
|
||||||
|
|
||||||
// prepare account provider
|
// prepare account provider
|
||||||
let account_provider = Arc::new(try!(prepare_account_provider(&cmd.dirs, cmd.acc_conf)));
|
let account_provider = Arc::new(try!(prepare_account_provider(&cmd.dirs, cmd.acc_conf)));
|
||||||
@ -298,7 +298,7 @@ fn prepare_account_provider(dirs: &Directories, cfg: AccountsConfig) -> Result<A
|
|||||||
};
|
};
|
||||||
|
|
||||||
let from = GethDirectory::open(t);
|
let from = GethDirectory::open(t);
|
||||||
let to = DiskDirectory::create(dirs.keys.clone()).unwrap();
|
let to = try!(DiskDirectory::create(dirs.keys.clone()).map_err(|e| format!("Could not open keys directory: {}", e)));
|
||||||
match import_accounts(&from, &to) {
|
match import_accounts(&from, &to) {
|
||||||
Ok(_) => {}
|
Ok(_) => {}
|
||||||
Err(Error::Io(ref io_err)) if io_err.kind() == ErrorKind::NotFound => {}
|
Err(Error::Io(ref io_err)) if io_err.kind() == ErrorKind::NotFound => {}
|
||||||
@ -306,8 +306,10 @@ fn prepare_account_provider(dirs: &Directories, cfg: AccountsConfig) -> Result<A
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let dir = Box::new(DiskDirectory::create(dirs.keys.clone()).unwrap());
|
let dir = Box::new(try!(DiskDirectory::create(dirs.keys.clone()).map_err(|e| format!("Could not open keys directory: {}", e))));
|
||||||
let account_service = AccountProvider::new(Box::new(EthStore::open_with_iterations(dir, cfg.iterations).unwrap()));
|
let account_service = AccountProvider::new(Box::new(
|
||||||
|
try!(EthStore::open_with_iterations(dir, cfg.iterations).map_err(|e| format!("Could not open keys directory: {}", e)))
|
||||||
|
));
|
||||||
|
|
||||||
for a in cfg.unlocked_accounts {
|
for a in cfg.unlocked_accounts {
|
||||||
if passwords.iter().find(|p| account_service.unlock_account_permanently(a, (*p).clone()).is_ok()).is_none() {
|
if passwords.iter().find(|p| account_service.unlock_account_permanently(a, (*p).clone()).is_ok()).is_none() {
|
||||||
|
@ -23,7 +23,7 @@ ethcore-devtools = { path = "../devtools" }
|
|||||||
rustc-serialize = "0.3"
|
rustc-serialize = "0.3"
|
||||||
transient-hashmap = "0.1"
|
transient-hashmap = "0.1"
|
||||||
serde_macros = { version = "0.7.0", optional = true }
|
serde_macros = { version = "0.7.0", optional = true }
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
|
json-ipc-server = { git = "https://github.com/ethcore/json-ipc-server.git" }
|
||||||
ethcore-ipc = { path = "../ipc/rpc" }
|
ethcore-ipc = { path = "../ipc/rpc" }
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ ethcore-util = { path = "../util" }
|
|||||||
ethcore-rpc = { path = "../rpc" }
|
ethcore-rpc = { path = "../rpc" }
|
||||||
parity-dapps-signer = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6", optional = true}
|
parity-dapps-signer = { git = "https://github.com/ethcore/parity-ui.git", version = "0.6", optional = true}
|
||||||
|
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
dev = ["clippy"]
|
dev = ["clippy"]
|
||||||
|
@ -103,6 +103,7 @@ pub struct Session {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ws::Handler for Session {
|
impl ws::Handler for Session {
|
||||||
|
#[cfg_attr(feature="dev", allow(collapsible_if))]
|
||||||
fn on_request(&mut self, req: &ws::Request) -> ws::Result<(ws::Response)> {
|
fn on_request(&mut self, req: &ws::Request) -> ws::Result<(ws::Response)> {
|
||||||
let origin = req.header("origin").or_else(|| req.header("Origin")).map(|x| &x[..]);
|
let origin = req.header("origin").or_else(|| req.header("Origin")).map(|x| &x[..]);
|
||||||
let host = req.header("host").or_else(|| req.header("Host")).map(|x| &x[..]);
|
let host = req.header("host").or_else(|| req.header("Host")).map(|x| &x[..]);
|
||||||
|
@ -14,7 +14,7 @@ ethcore-ipc-codegen = { path = "../ipc/codegen" }
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
ethcore-util = { path = "../util" }
|
ethcore-util = { path = "../util" }
|
||||||
ethcore = { path = "../ethcore" }
|
ethcore = { path = "../ethcore" }
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
log = "0.3"
|
log = "0.3"
|
||||||
env_logger = "0.3"
|
env_logger = "0.3"
|
||||||
time = "0.1.34"
|
time = "0.1.34"
|
||||||
|
@ -21,13 +21,13 @@ rocksdb = { git = "https://github.com/ethcore/rust-rocksdb" }
|
|||||||
lazy_static = "0.2"
|
lazy_static = "0.2"
|
||||||
eth-secp256k1 = { git = "https://github.com/ethcore/rust-secp256k1" }
|
eth-secp256k1 = { git = "https://github.com/ethcore/rust-secp256k1" }
|
||||||
rust-crypto = "0.2.34"
|
rust-crypto = "0.2.34"
|
||||||
elastic-array = "0.4"
|
elastic-array = { git = "https://github.com/ethcore/elastic-array" }
|
||||||
heapsize = "0.3"
|
heapsize = { version = "0.3", features = ["unstable"] }
|
||||||
itertools = "0.4"
|
itertools = "0.4"
|
||||||
crossbeam = "0.2"
|
crossbeam = "0.2"
|
||||||
slab = "0.2"
|
slab = "0.2"
|
||||||
sha3 = { path = "sha3" }
|
sha3 = { path = "sha3" }
|
||||||
clippy = { version = "0.0.79", optional = true}
|
clippy = { version = "0.0.80", optional = true}
|
||||||
igd = "0.5.0"
|
igd = "0.5.0"
|
||||||
ethcore-devtools = { path = "../devtools" }
|
ethcore-devtools = { path = "../devtools" }
|
||||||
libc = "0.2.7"
|
libc = "0.2.7"
|
||||||
|
@ -19,7 +19,8 @@
|
|||||||
use std::{ops, fmt, cmp, mem};
|
use std::{ops, fmt, cmp, mem};
|
||||||
use std::cmp::*;
|
use std::cmp::*;
|
||||||
use std::ops::*;
|
use std::ops::*;
|
||||||
use std::hash::{Hash, Hasher};
|
use std::hash::{Hash, Hasher, BuildHasherDefault};
|
||||||
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use rand::os::OsRng;
|
use rand::os::OsRng;
|
||||||
@ -544,6 +545,37 @@ impl_hash!(H1024, 128);
|
|||||||
impl_hash!(H2048, 256);
|
impl_hash!(H2048, 256);
|
||||||
|
|
||||||
known_heap_size!(0, H32, H64, H128, Address, H256, H264, H512, H520, H1024, H2048);
|
known_heap_size!(0, H32, H64, H128, Address, H256, H264, H512, H520, H1024, H2048);
|
||||||
|
// Specialized HashMap and HashSet
|
||||||
|
|
||||||
|
/// Hasher that just takes 8 bytes of the provided value.
|
||||||
|
pub struct PlainHasher(u64);
|
||||||
|
|
||||||
|
impl Default for PlainHasher {
|
||||||
|
#[inline]
|
||||||
|
fn default() -> PlainHasher {
|
||||||
|
PlainHasher(0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Hasher for PlainHasher {
|
||||||
|
#[inline]
|
||||||
|
fn finish(&self) -> u64 {
|
||||||
|
self.0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn write(&mut self, bytes: &[u8]) {
|
||||||
|
debug_assert!(bytes.len() == 32);
|
||||||
|
let mut prefix = [0u8; 8];
|
||||||
|
prefix.clone_from_slice(&bytes[0..8]);
|
||||||
|
self.0 = unsafe { ::std::mem::transmute(prefix) };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Specialized version of HashMap with H256 keys and fast hashing function.
|
||||||
|
pub type H256FastMap<T> = HashMap<H256, T, BuildHasherDefault<PlainHasher>>;
|
||||||
|
/// Specialized version of HashSet with H256 keys and fast hashing function.
|
||||||
|
pub type H256FastSet = HashSet<H256, BuildHasherDefault<PlainHasher>>;
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
@ -36,10 +36,8 @@
|
|||||||
//! The functions here are designed to be fast.
|
//! The functions here are designed to be fast.
|
||||||
//!
|
//!
|
||||||
|
|
||||||
#[cfg(all(asm_available, target_arch="x86_64"))]
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
use std::str::{FromStr};
|
use std::str::{FromStr};
|
||||||
use std::convert::From;
|
use std::convert::From;
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
@ -647,16 +645,46 @@ macro_rules! construct_uint {
|
|||||||
(arr[index / 8] >> (((index % 8)) * 8)) as u8
|
(arr[index / 8] >> (((index % 8)) * 8)) as u8
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(any(
|
||||||
|
target_arch = "arm",
|
||||||
|
target_arch = "mips",
|
||||||
|
target_arch = "powerpc",
|
||||||
|
target_arch = "x86",
|
||||||
|
target_arch = "x86_64",
|
||||||
|
target_arch = "aarch64",
|
||||||
|
target_arch = "powerpc64"))]
|
||||||
|
#[inline]
|
||||||
fn to_big_endian(&self, bytes: &mut[u8]) {
|
fn to_big_endian(&self, bytes: &mut[u8]) {
|
||||||
assert!($n_words * 8 == bytes.len());
|
debug_assert!($n_words * 8 == bytes.len());
|
||||||
let &$name(ref arr) = self;
|
let &$name(ref arr) = self;
|
||||||
for i in 0..bytes.len() {
|
unsafe {
|
||||||
let rev = bytes.len() - 1 - i;
|
let mut out: *mut u64 = mem::transmute(bytes.as_mut_ptr());
|
||||||
let pos = rev / 8;
|
out = out.offset($n_words);
|
||||||
bytes[i] = (arr[pos] >> ((rev % 8) * 8)) as u8;
|
for i in 0..$n_words {
|
||||||
|
out = out.offset(-1);
|
||||||
|
*out = arr[i].swap_bytes();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(not(any(
|
||||||
|
target_arch = "arm",
|
||||||
|
target_arch = "mips",
|
||||||
|
target_arch = "powerpc",
|
||||||
|
target_arch = "x86",
|
||||||
|
target_arch = "x86_64",
|
||||||
|
target_arch = "aarch64",
|
||||||
|
target_arch = "powerpc64")))]
|
||||||
|
#[inline]
|
||||||
|
fn to_big_endian(&self, bytes: &mut[u8]) {
|
||||||
|
debug_assert!($n_words * 8 == bytes.len());
|
||||||
|
let &$name(ref arr) = self;
|
||||||
|
for i in 0..bytes.len() {
|
||||||
|
let rev = bytes.len() - 1 - i;
|
||||||
|
let pos = rev / 8;
|
||||||
|
bytes[i] = (arr[pos] >> ((rev % 8) * 8)) as u8;
|
||||||
|
}
|
||||||
|
}
|
||||||
#[inline]
|
#[inline]
|
||||||
fn exp10(n: usize) -> Self {
|
fn exp10(n: usize) -> Self {
|
||||||
match n {
|
match n {
|
||||||
|
@ -28,14 +28,17 @@ use hash::H256;
|
|||||||
pub enum BaseDataError {
|
pub enum BaseDataError {
|
||||||
/// An entry was removed more times than inserted.
|
/// An entry was removed more times than inserted.
|
||||||
NegativelyReferencedHash(H256),
|
NegativelyReferencedHash(H256),
|
||||||
|
/// A committed value was inserted more than once.
|
||||||
|
AlreadyExists(H256),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for BaseDataError {
|
impl fmt::Display for BaseDataError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
BaseDataError::NegativelyReferencedHash(hash) =>
|
BaseDataError::NegativelyReferencedHash(hash) =>
|
||||||
f.write_fmt(format_args!("Entry {} removed from database more times \
|
write!(f, "Entry {} removed from database more times than it was added.", hash),
|
||||||
than it was added.", hash)),
|
BaseDataError::AlreadyExists(hash) =>
|
||||||
|
write!(f, "Committed key already exists in database: {}", hash),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,7 +20,7 @@ use bytes::*;
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
|
/// Trait modelling datastore keyed by a 32-byte Keccak hash.
|
||||||
pub trait HashDB: AsHashDB {
|
pub trait HashDB: AsHashDB + Send + Sync {
|
||||||
/// Get the keys in the database together with number of underlying references.
|
/// Get the keys in the database together with number of underlying references.
|
||||||
fn keys(&self) -> HashMap<H256, i32>;
|
fn keys(&self) -> HashMap<H256, i32>;
|
||||||
|
|
||||||
|
@ -88,10 +88,10 @@ impl HashDB for ArchiveDB {
|
|||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<&[u8]> {
|
||||||
let k = self.overlay.raw(key);
|
let k = self.overlay.raw(key);
|
||||||
match k {
|
match k {
|
||||||
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
Some((d, rc)) if rc > 0 => Some(d),
|
||||||
_ => {
|
_ => {
|
||||||
if let Some(x) = self.payload(key) {
|
if let Some(x) = self.payload(key) {
|
||||||
Some(&self.overlay.denote(key, x).0)
|
Some(self.overlay.denote(key, x).0)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
None
|
None
|
||||||
@ -185,6 +185,38 @@ impl JournalDB for ArchiveDB {
|
|||||||
Ok((inserts + deletes) as u32)
|
Ok((inserts + deletes) as u32)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn inject(&mut self, batch: &DBTransaction) -> Result<u32, UtilError> {
|
||||||
|
let mut inserts = 0usize;
|
||||||
|
let mut deletes = 0usize;
|
||||||
|
|
||||||
|
for i in self.overlay.drain().into_iter() {
|
||||||
|
let (key, (value, rc)) = i;
|
||||||
|
if rc > 0 {
|
||||||
|
assert!(rc == 1);
|
||||||
|
if try!(self.backing.get(self.column, &key)).is_some() {
|
||||||
|
return Err(BaseDataError::AlreadyExists(key).into());
|
||||||
|
}
|
||||||
|
try!(batch.put(self.column, &key, &value));
|
||||||
|
inserts += 1;
|
||||||
|
}
|
||||||
|
if rc < 0 {
|
||||||
|
assert!(rc == -1);
|
||||||
|
if try!(self.backing.get(self.column, &key)).is_none() {
|
||||||
|
return Err(BaseDataError::NegativelyReferencedHash(key).into());
|
||||||
|
}
|
||||||
|
try!(batch.delete(self.column, &key));
|
||||||
|
deletes += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (mut key, value) in self.overlay.drain_aux().into_iter() {
|
||||||
|
key.push(AUX_FLAG);
|
||||||
|
try!(batch.put(self.column, &key, &value));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((inserts + deletes) as u32)
|
||||||
|
}
|
||||||
|
|
||||||
fn latest_era(&self) -> Option<u64> { self.latest_era }
|
fn latest_era(&self) -> Option<u64> { self.latest_era }
|
||||||
|
|
||||||
fn state(&self, id: &H256) -> Option<Bytes> {
|
fn state(&self, id: &H256) -> Option<Bytes> {
|
||||||
@ -449,4 +481,19 @@ mod tests {
|
|||||||
assert!(state.is_some());
|
assert!(state.is_some());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inject() {
|
||||||
|
let temp = ::devtools::RandomTempPath::new();
|
||||||
|
|
||||||
|
let mut jdb = new_db(temp.as_path().as_path());
|
||||||
|
let key = jdb.insert(b"dog");
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
||||||
|
jdb.remove(&key);
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert!(jdb.get(&key).is_none());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -277,10 +277,10 @@ impl HashDB for EarlyMergeDB {
|
|||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<&[u8]> {
|
||||||
let k = self.overlay.raw(key);
|
let k = self.overlay.raw(key);
|
||||||
match k {
|
match k {
|
||||||
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
Some((d, rc)) if rc > 0 => Some(d),
|
||||||
_ => {
|
_ => {
|
||||||
if let Some(x) = self.payload(key) {
|
if let Some(x) = self.payload(key) {
|
||||||
Some(&self.overlay.denote(key, x).0)
|
Some(self.overlay.denote(key, x).0)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
None
|
None
|
||||||
@ -430,7 +430,7 @@ impl JournalDB for EarlyMergeDB {
|
|||||||
r.begin_list(inserts.len());
|
r.begin_list(inserts.len());
|
||||||
inserts.iter().foreach(|&(k, _)| {r.append(&k);});
|
inserts.iter().foreach(|&(k, _)| {r.append(&k);});
|
||||||
r.append(&removes);
|
r.append(&removes);
|
||||||
Self::insert_keys(&inserts, &self.backing, self.column, &mut refs, &batch, trace);
|
Self::insert_keys(&inserts, &self.backing, self.column, &mut refs, batch, trace);
|
||||||
if trace {
|
if trace {
|
||||||
let ins = inserts.iter().map(|&(k, _)| k).collect::<Vec<_>>();
|
let ins = inserts.iter().map(|&(k, _)| k).collect::<Vec<_>>();
|
||||||
trace!(target: "jdb.ops", " Inserts: {:?}", ins);
|
trace!(target: "jdb.ops", " Inserts: {:?}", ins);
|
||||||
@ -464,7 +464,7 @@ impl JournalDB for EarlyMergeDB {
|
|||||||
if trace {
|
if trace {
|
||||||
trace!(target: "jdb.ops", " Expunging: {:?}", deletes);
|
trace!(target: "jdb.ops", " Expunging: {:?}", deletes);
|
||||||
}
|
}
|
||||||
Self::remove_keys(&deletes, &mut refs, &batch, self.column, RemoveFrom::Archive, trace);
|
Self::remove_keys(&deletes, &mut refs, batch, self.column, RemoveFrom::Archive, trace);
|
||||||
|
|
||||||
if trace {
|
if trace {
|
||||||
trace!(target: "jdb.ops", " Finalising: {:?}", inserts);
|
trace!(target: "jdb.ops", " Finalising: {:?}", inserts);
|
||||||
@ -482,7 +482,7 @@ impl JournalDB for EarlyMergeDB {
|
|||||||
}
|
}
|
||||||
Some( RefInfo{queue_refs: x, in_archive: false} ) => {
|
Some( RefInfo{queue_refs: x, in_archive: false} ) => {
|
||||||
// must set already in; ,
|
// must set already in; ,
|
||||||
Self::set_already_in(&batch, self.column, k);
|
Self::set_already_in(batch, self.column, k);
|
||||||
refs.insert(k.clone(), RefInfo{ queue_refs: x - 1, in_archive: true });
|
refs.insert(k.clone(), RefInfo{ queue_refs: x - 1, in_archive: true });
|
||||||
}
|
}
|
||||||
Some( RefInfo{in_archive: true, ..} ) => {
|
Some( RefInfo{in_archive: true, ..} ) => {
|
||||||
@ -496,7 +496,7 @@ impl JournalDB for EarlyMergeDB {
|
|||||||
if trace {
|
if trace {
|
||||||
trace!(target: "jdb.ops", " Reverting: {:?}", inserts);
|
trace!(target: "jdb.ops", " Reverting: {:?}", inserts);
|
||||||
}
|
}
|
||||||
Self::remove_keys(&inserts, &mut refs, &batch, self.column, RemoveFrom::Queue, trace);
|
Self::remove_keys(&inserts, &mut refs, batch, self.column, RemoveFrom::Queue, trace);
|
||||||
}
|
}
|
||||||
|
|
||||||
try!(batch.delete(self.column, &last));
|
try!(batch.delete(self.column, &last));
|
||||||
@ -513,6 +513,32 @@ impl JournalDB for EarlyMergeDB {
|
|||||||
|
|
||||||
Ok(0)
|
Ok(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn inject(&mut self, batch: &DBTransaction) -> Result<u32, UtilError> {
|
||||||
|
let mut ops = 0;
|
||||||
|
for (key, (value, rc)) in self.overlay.drain() {
|
||||||
|
if rc != 0 { ops += 1 }
|
||||||
|
|
||||||
|
match rc {
|
||||||
|
0 => {}
|
||||||
|
1 => {
|
||||||
|
if try!(self.backing.get(self.column, &key)).is_some() {
|
||||||
|
return Err(BaseDataError::AlreadyExists(key).into());
|
||||||
|
}
|
||||||
|
try!(batch.put(self.column, &key, &value))
|
||||||
|
}
|
||||||
|
-1 => {
|
||||||
|
if try!(self.backing.get(self.column, &key)).is_none() {
|
||||||
|
return Err(BaseDataError::NegativelyReferencedHash(key).into());
|
||||||
|
}
|
||||||
|
try!(batch.delete(self.column, &key))
|
||||||
|
}
|
||||||
|
_ => panic!("Attempted to inject invalid state."),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ops)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -1045,4 +1071,19 @@ mod tests {
|
|||||||
assert!(!jdb.contains(&bar));
|
assert!(!jdb.contains(&bar));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inject() {
|
||||||
|
let temp = ::devtools::RandomTempPath::new();
|
||||||
|
|
||||||
|
let mut jdb = new_db(temp.as_path().as_path());
|
||||||
|
let key = jdb.insert(b"dog");
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
||||||
|
jdb.remove(&key);
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert!(jdb.get(&key).is_none());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ use super::JournalDB;
|
|||||||
///
|
///
|
||||||
/// Commit workflow:
|
/// Commit workflow:
|
||||||
/// 1. Create a new journal record from the transaction overlay.
|
/// 1. Create a new journal record from the transaction overlay.
|
||||||
/// 2. Inseart each node from the transaction overlay into the History overlay increasing reference
|
/// 2. Insert each node from the transaction overlay into the History overlay increasing reference
|
||||||
/// count if it is already there. Note that the reference counting is managed by `MemoryDB`
|
/// count if it is already there. Note that the reference counting is managed by `MemoryDB`
|
||||||
/// 3. Clear the transaction overlay.
|
/// 3. Clear the transaction overlay.
|
||||||
/// 4. For a canonical journal record that becomes ancient inserts its insertions into the disk DB
|
/// 4. For a canonical journal record that becomes ancient inserts its insertions into the disk DB
|
||||||
@ -126,7 +126,7 @@ impl OverlayRecentDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn payload(&self, key: &H256) -> Option<Bytes> {
|
fn payload(&self, key: &H256) -> Option<Bytes> {
|
||||||
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?").map(|v| v.to_vec())
|
self.backing.get(self.column, key).expect("Low-level database error. Some issue with your hard disk?")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_overlay(db: &Database, col: Option<u32>) -> JournalOverlay {
|
fn read_overlay(db: &Database, col: Option<u32>) -> JournalOverlay {
|
||||||
@ -155,7 +155,7 @@ impl OverlayRecentDB {
|
|||||||
for r in insertions.iter() {
|
for r in insertions.iter() {
|
||||||
let k: H256 = r.val_at(0);
|
let k: H256 = r.val_at(0);
|
||||||
let v: Bytes = r.val_at(1);
|
let v: Bytes = r.val_at(1);
|
||||||
overlay.emplace(OverlayRecentDB::to_short_key(&k), v);
|
overlay.emplace(to_short_key(&k), v);
|
||||||
inserted_keys.push(k);
|
inserted_keys.push(k);
|
||||||
count += 1;
|
count += 1;
|
||||||
}
|
}
|
||||||
@ -176,12 +176,13 @@ impl OverlayRecentDB {
|
|||||||
JournalOverlay { backing_overlay: overlay, journal: journal, latest_era: latest_era }
|
JournalOverlay { backing_overlay: overlay, journal: journal, latest_era: latest_era }
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
}
|
||||||
fn to_short_key(key: &H256) -> H256 {
|
|
||||||
let mut k = H256::new();
|
#[inline]
|
||||||
k[0..DB_PREFIX_LEN].copy_from_slice(&key[0..DB_PREFIX_LEN]);
|
fn to_short_key(key: &H256) -> H256 {
|
||||||
k
|
let mut k = H256::new();
|
||||||
}
|
k[0..DB_PREFIX_LEN].copy_from_slice(&key[0..DB_PREFIX_LEN]);
|
||||||
|
k
|
||||||
}
|
}
|
||||||
|
|
||||||
impl JournalDB for OverlayRecentDB {
|
impl JournalDB for OverlayRecentDB {
|
||||||
@ -208,7 +209,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
fn latest_era(&self) -> Option<u64> { self.journal_overlay.read().latest_era }
|
fn latest_era(&self) -> Option<u64> { self.journal_overlay.read().latest_era }
|
||||||
|
|
||||||
fn state(&self, key: &H256) -> Option<Bytes> {
|
fn state(&self, key: &H256) -> Option<Bytes> {
|
||||||
let v = self.journal_overlay.read().backing_overlay.get(&OverlayRecentDB::to_short_key(key)).map(|v| v.to_vec());
|
let v = self.journal_overlay.read().backing_overlay.get(&to_short_key(key)).map(|v| v.to_vec());
|
||||||
v.or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.to_vec()))
|
v.or_else(|| self.backing.get_by_prefix(self.column, &key[0..DB_PREFIX_LEN]).map(|b| b.to_vec()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -229,7 +230,7 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
r.begin_list(2);
|
r.begin_list(2);
|
||||||
r.append(&k);
|
r.append(&k);
|
||||||
r.append(&v);
|
r.append(&v);
|
||||||
journal_overlay.backing_overlay.emplace(OverlayRecentDB::to_short_key(&k), v);
|
journal_overlay.backing_overlay.emplace(to_short_key(&k), v);
|
||||||
}
|
}
|
||||||
r.append(&removed_keys);
|
r.append(&removed_keys);
|
||||||
|
|
||||||
@ -238,15 +239,15 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
k.append(&now);
|
k.append(&now);
|
||||||
k.append(&index);
|
k.append(&index);
|
||||||
k.append(&&PADDING[..]);
|
k.append(&&PADDING[..]);
|
||||||
try!(batch.put(self.column, &k.drain(), r.as_raw()));
|
try!(batch.put_vec(self.column, &k.drain(), r.out()));
|
||||||
if journal_overlay.latest_era.map_or(true, |e| now > e) {
|
if journal_overlay.latest_era.map_or(true, |e| now > e) {
|
||||||
try!(batch.put(self.column, &LATEST_ERA_KEY, &encode(&now)));
|
try!(batch.put_vec(self.column, &LATEST_ERA_KEY, encode(&now).to_vec()));
|
||||||
journal_overlay.latest_era = Some(now);
|
journal_overlay.latest_era = Some(now);
|
||||||
}
|
}
|
||||||
journal_overlay.journal.entry(now).or_insert_with(Vec::new).push(JournalEntry { id: id.clone(), insertions: inserted_keys, deletions: removed_keys });
|
journal_overlay.journal.entry(now).or_insert_with(Vec::new).push(JournalEntry { id: id.clone(), insertions: inserted_keys, deletions: removed_keys });
|
||||||
}
|
}
|
||||||
|
|
||||||
let journal_overlay = journal_overlay.deref_mut();
|
let journal_overlay = &mut *journal_overlay;
|
||||||
// apply old commits' details
|
// apply old commits' details
|
||||||
if let Some((end_era, canon_id)) = end {
|
if let Some((end_era, canon_id)) = end {
|
||||||
if let Some(ref mut records) = journal_overlay.journal.get_mut(&end_era) {
|
if let Some(ref mut records) = journal_overlay.journal.get_mut(&end_era) {
|
||||||
@ -265,9 +266,9 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
{
|
{
|
||||||
if canon_id == journal.id {
|
if canon_id == journal.id {
|
||||||
for h in &journal.insertions {
|
for h in &journal.insertions {
|
||||||
if let Some(&(ref d, rc)) = journal_overlay.backing_overlay.raw(&OverlayRecentDB::to_short_key(h)) {
|
if let Some((d, rc)) = journal_overlay.backing_overlay.raw(&to_short_key(h)) {
|
||||||
if rc > 0 {
|
if rc > 0 {
|
||||||
canon_insertions.push((h.clone(), d.clone())); //TODO: optimize this to avoid data copy
|
canon_insertions.push((h.clone(), d.to_owned())); //TODO: optimize this to avoid data copy
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -279,15 +280,15 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
}
|
}
|
||||||
// apply canon inserts first
|
// apply canon inserts first
|
||||||
for (k, v) in canon_insertions {
|
for (k, v) in canon_insertions {
|
||||||
try!(batch.put(self.column, &k, &v));
|
try!(batch.put_vec(self.column, &k, v));
|
||||||
}
|
}
|
||||||
// update the overlay
|
// update the overlay
|
||||||
for k in overlay_deletions {
|
for k in overlay_deletions {
|
||||||
journal_overlay.backing_overlay.remove_and_purge(&OverlayRecentDB::to_short_key(&k));
|
journal_overlay.backing_overlay.remove_and_purge(&to_short_key(&k));
|
||||||
}
|
}
|
||||||
// apply canon deletions
|
// apply canon deletions
|
||||||
for k in canon_deletions {
|
for k in canon_deletions {
|
||||||
if !journal_overlay.backing_overlay.contains(&OverlayRecentDB::to_short_key(&k)) {
|
if !journal_overlay.backing_overlay.contains(&to_short_key(&k)) {
|
||||||
try!(batch.delete(self.column, &k));
|
try!(batch.delete(self.column, &k));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -297,6 +298,31 @@ impl JournalDB for OverlayRecentDB {
|
|||||||
Ok(0)
|
Ok(0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn inject(&mut self, batch: &DBTransaction) -> Result<u32, UtilError> {
|
||||||
|
let mut ops = 0;
|
||||||
|
for (key, (value, rc)) in self.transaction_overlay.drain() {
|
||||||
|
if rc != 0 { ops += 1 }
|
||||||
|
|
||||||
|
match rc {
|
||||||
|
0 => {}
|
||||||
|
1 => {
|
||||||
|
if try!(self.backing.get(self.column, &key)).is_some() {
|
||||||
|
return Err(BaseDataError::AlreadyExists(key).into());
|
||||||
|
}
|
||||||
|
try!(batch.put(self.column, &key, &value))
|
||||||
|
}
|
||||||
|
-1 => {
|
||||||
|
if try!(self.backing.get(self.column, &key)).is_none() {
|
||||||
|
return Err(BaseDataError::NegativelyReferencedHash(key).into());
|
||||||
|
}
|
||||||
|
try!(batch.delete(self.column, &key))
|
||||||
|
}
|
||||||
|
_ => panic!("Attempted to inject invalid state."),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(ops)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HashDB for OverlayRecentDB {
|
impl HashDB for OverlayRecentDB {
|
||||||
@ -317,9 +343,9 @@ impl HashDB for OverlayRecentDB {
|
|||||||
fn get(&self, key: &H256) -> Option<&[u8]> {
|
fn get(&self, key: &H256) -> Option<&[u8]> {
|
||||||
let k = self.transaction_overlay.raw(key);
|
let k = self.transaction_overlay.raw(key);
|
||||||
match k {
|
match k {
|
||||||
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
Some((d, rc)) if rc > 0 => Some(d),
|
||||||
_ => {
|
_ => {
|
||||||
let v = self.journal_overlay.read().backing_overlay.get(&OverlayRecentDB::to_short_key(key)).map(|v| v.to_vec());
|
let v = self.journal_overlay.read().backing_overlay.get(&to_short_key(key)).map(|v| v.to_vec());
|
||||||
match v {
|
match v {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
Some(&self.transaction_overlay.denote(key, x).0)
|
Some(&self.transaction_overlay.denote(key, x).0)
|
||||||
@ -879,4 +905,19 @@ mod tests {
|
|||||||
assert!(jdb.contains(&foo));
|
assert!(jdb.contains(&foo));
|
||||||
assert!(jdb.contains(&bar));
|
assert!(jdb.contains(&bar));
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
#[test]
|
||||||
|
fn inject() {
|
||||||
|
let temp = ::devtools::RandomTempPath::new();
|
||||||
|
|
||||||
|
let mut jdb = new_db(temp.as_path().as_path());
|
||||||
|
let key = jdb.insert(b"dog");
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
||||||
|
jdb.remove(&key);
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert!(jdb.get(&key).is_none());
|
||||||
|
}
|
||||||
|
}
|
@ -184,6 +184,14 @@ impl JournalDB for RefCountedDB {
|
|||||||
let r = try!(self.forward.commit_to_batch(&batch));
|
let r = try!(self.forward.commit_to_batch(&batch));
|
||||||
Ok(r)
|
Ok(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn inject(&mut self, batch: &DBTransaction) -> Result<u32, UtilError> {
|
||||||
|
self.inserts.clear();
|
||||||
|
for remove in self.removes.drain(..) {
|
||||||
|
self.forward.remove(&remove);
|
||||||
|
}
|
||||||
|
self.forward.commit_to_batch(&batch)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
@ -298,4 +306,17 @@ mod tests {
|
|||||||
assert!(!jdb.contains(&baz));
|
assert!(!jdb.contains(&baz));
|
||||||
assert!(!jdb.contains(&bar));
|
assert!(!jdb.contains(&bar));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn inject() {
|
||||||
|
let mut jdb = RefCountedDB::new_temp();
|
||||||
|
let key = jdb.insert(b"dog");
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert_eq!(jdb.get(&key).unwrap(), b"dog");
|
||||||
|
jdb.remove(&key);
|
||||||
|
jdb.inject_batch().unwrap();
|
||||||
|
|
||||||
|
assert!(jdb.get(&key).is_none());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,7 +22,7 @@ use kvdb::{Database, DBTransaction};
|
|||||||
|
|
||||||
/// A `HashDB` which can manage a short-term journal potentially containing many forks of mutually
|
/// A `HashDB` which can manage a short-term journal potentially containing many forks of mutually
|
||||||
/// exclusive actions.
|
/// exclusive actions.
|
||||||
pub trait JournalDB : HashDB + Send + Sync {
|
pub trait JournalDB: HashDB {
|
||||||
/// Return a copy of ourself, in a box.
|
/// Return a copy of ourself, in a box.
|
||||||
fn boxed_clone(&self) -> Box<JournalDB>;
|
fn boxed_clone(&self) -> Box<JournalDB>;
|
||||||
|
|
||||||
@ -39,6 +39,15 @@ pub trait JournalDB : HashDB + Send + Sync {
|
|||||||
/// old era to the backing database, reverting any non-canonical historical commit's inserts.
|
/// old era to the backing database, reverting any non-canonical historical commit's inserts.
|
||||||
fn commit(&mut self, batch: &DBTransaction, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError>;
|
fn commit(&mut self, batch: &DBTransaction, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError>;
|
||||||
|
|
||||||
|
/// Commit all queued insert and delete operations without affecting any journalling -- this requires that all insertions
|
||||||
|
/// and deletions are indeed canonical and will likely lead to an invalid database if that assumption is violated.
|
||||||
|
///
|
||||||
|
/// Any keys or values inserted or deleted must be completely independent of those affected
|
||||||
|
/// by any previous `commit` operations. Essentially, this means that `inject` can be used
|
||||||
|
/// either to restore a state to a fresh database, or to insert data which may only be journalled
|
||||||
|
/// from this point onwards.
|
||||||
|
fn inject(&mut self, batch: &DBTransaction) -> Result<u32, UtilError>;
|
||||||
|
|
||||||
/// State data query
|
/// State data query
|
||||||
fn state(&self, _id: &H256) -> Option<Bytes>;
|
fn state(&self, _id: &H256) -> Option<Bytes>;
|
||||||
|
|
||||||
@ -48,11 +57,19 @@ pub trait JournalDB : HashDB + Send + Sync {
|
|||||||
/// Get backing database.
|
/// Get backing database.
|
||||||
fn backing(&self) -> &Arc<Database>;
|
fn backing(&self) -> &Arc<Database>;
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
/// Commit all changes in a single batch
|
/// Commit all changes in a single batch
|
||||||
|
#[cfg(test)]
|
||||||
fn commit_batch(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
|
fn commit_batch(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
|
||||||
let batch = self.backing().transaction();
|
let batch = self.backing().transaction();
|
||||||
let res = try!(self.commit(&batch, now, id, end));
|
let res = try!(self.commit(&batch, now, id, end));
|
||||||
self.backing().write(batch).map(|_| res).map_err(Into::into)
|
self.backing().write(batch).map(|_| res).map_err(Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Inject all changes in a single batch.
|
||||||
|
#[cfg(test)]
|
||||||
|
fn inject_batch(&mut self) -> Result<u32, UtilError> {
|
||||||
|
let batch = self.backing().transaction();
|
||||||
|
let res = try!(self.inject(&batch));
|
||||||
|
self.backing().write(batch).map(|_| res).map_err(Into::into)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
191
util/src/kvdb.rs
191
util/src/kvdb.rs
@ -16,8 +16,11 @@
|
|||||||
|
|
||||||
//! Key-Value store abstraction with `RocksDB` backend.
|
//! Key-Value store abstraction with `RocksDB` backend.
|
||||||
|
|
||||||
|
use common::*;
|
||||||
|
use elastic_array::*;
|
||||||
use std::default::Default;
|
use std::default::Default;
|
||||||
use rocksdb::{DB, Writable, WriteBatch, WriteOptions, IteratorMode, DBVector, DBIterator,
|
use rlp::{UntrustedRlp, RlpType, View, Compressible};
|
||||||
|
use rocksdb::{DB, Writable, WriteBatch, WriteOptions, IteratorMode, DBIterator,
|
||||||
Options, DBCompactionStyle, BlockBasedOptions, Direction, Cache, Column};
|
Options, DBCompactionStyle, BlockBasedOptions, Direction, Cache, Column};
|
||||||
|
|
||||||
const DB_BACKGROUND_FLUSHES: i32 = 2;
|
const DB_BACKGROUND_FLUSHES: i32 = 2;
|
||||||
@ -25,30 +28,89 @@ const DB_BACKGROUND_COMPACTIONS: i32 = 2;
|
|||||||
|
|
||||||
/// Write transaction. Batches a sequence of put/delete operations for efficiency.
|
/// Write transaction. Batches a sequence of put/delete operations for efficiency.
|
||||||
pub struct DBTransaction {
|
pub struct DBTransaction {
|
||||||
batch: WriteBatch,
|
ops: RwLock<Vec<DBOp>>,
|
||||||
cfs: Vec<Column>,
|
}
|
||||||
|
|
||||||
|
enum DBOp {
|
||||||
|
Insert {
|
||||||
|
col: Option<u32>,
|
||||||
|
key: ElasticArray32<u8>,
|
||||||
|
value: Bytes,
|
||||||
|
},
|
||||||
|
InsertCompressed {
|
||||||
|
col: Option<u32>,
|
||||||
|
key: ElasticArray32<u8>,
|
||||||
|
value: Bytes,
|
||||||
|
},
|
||||||
|
Delete {
|
||||||
|
col: Option<u32>,
|
||||||
|
key: ElasticArray32<u8>,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DBTransaction {
|
impl DBTransaction {
|
||||||
/// Create new transaction.
|
/// Create new transaction.
|
||||||
pub fn new(db: &Database) -> DBTransaction {
|
pub fn new(_db: &Database) -> DBTransaction {
|
||||||
DBTransaction {
|
DBTransaction {
|
||||||
batch: WriteBatch::new(),
|
ops: RwLock::new(Vec::with_capacity(256)),
|
||||||
cfs: db.cfs.clone(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Insert a key-value pair in the transaction. Any existing value value will be overwritten upon write.
|
/// Insert a key-value pair in the transaction. Any existing value value will be overwritten upon write.
|
||||||
pub fn put(&self, col: Option<u32>, key: &[u8], value: &[u8]) -> Result<(), String> {
|
pub fn put(&self, col: Option<u32>, key: &[u8], value: &[u8]) -> Result<(), String> {
|
||||||
col.map_or_else(|| self.batch.put(key, value), |c| self.batch.put_cf(self.cfs[c as usize], key, value))
|
let mut ekey = ElasticArray32::new();
|
||||||
|
ekey.append_slice(key);
|
||||||
|
self.ops.write().push(DBOp::Insert {
|
||||||
|
col: col,
|
||||||
|
key: ekey,
|
||||||
|
value: value.to_vec(),
|
||||||
|
});
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert a key-value pair in the transaction. Any existing value value will be overwritten upon write.
|
||||||
|
pub fn put_vec(&self, col: Option<u32>, key: &[u8], value: Bytes) -> Result<(), String> {
|
||||||
|
let mut ekey = ElasticArray32::new();
|
||||||
|
ekey.append_slice(key);
|
||||||
|
self.ops.write().push(DBOp::Insert {
|
||||||
|
col: col,
|
||||||
|
key: ekey,
|
||||||
|
value: value,
|
||||||
|
});
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Insert a key-value pair in the transaction. Any existing value value will be overwritten upon write.
|
||||||
|
/// Value will be RLP-compressed on flush
|
||||||
|
pub fn put_compressed(&self, col: Option<u32>, key: &[u8], value: Bytes) -> Result<(), String> {
|
||||||
|
let mut ekey = ElasticArray32::new();
|
||||||
|
ekey.append_slice(key);
|
||||||
|
self.ops.write().push(DBOp::InsertCompressed {
|
||||||
|
col: col,
|
||||||
|
key: ekey,
|
||||||
|
value: value,
|
||||||
|
});
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete value by key.
|
/// Delete value by key.
|
||||||
pub fn delete(&self, col: Option<u32>, key: &[u8]) -> Result<(), String> {
|
pub fn delete(&self, col: Option<u32>, key: &[u8]) -> Result<(), String> {
|
||||||
col.map_or_else(|| self.batch.delete(key), |c| self.batch.delete_cf(self.cfs[c as usize], key))
|
let mut ekey = ElasticArray32::new();
|
||||||
|
ekey.append_slice(key);
|
||||||
|
self.ops.write().push(DBOp::Delete {
|
||||||
|
col: col,
|
||||||
|
key: ekey,
|
||||||
|
});
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
struct DBColumnOverlay {
|
||||||
|
insertions: HashMap<ElasticArray32<u8>, Bytes>,
|
||||||
|
compressed_insertions: HashMap<ElasticArray32<u8>, Bytes>,
|
||||||
|
deletions: HashSet<ElasticArray32<u8>>,
|
||||||
|
}
|
||||||
|
|
||||||
/// Compaction profile for the database settings
|
/// Compaction profile for the database settings
|
||||||
#[derive(Clone, Copy)]
|
#[derive(Clone, Copy)]
|
||||||
pub struct CompactionProfile {
|
pub struct CompactionProfile {
|
||||||
@ -118,7 +180,7 @@ impl Default for DatabaseConfig {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Database iterator
|
/// Database iterator for flushed data only
|
||||||
pub struct DatabaseIterator {
|
pub struct DatabaseIterator {
|
||||||
iter: DBIterator,
|
iter: DBIterator,
|
||||||
}
|
}
|
||||||
@ -136,6 +198,7 @@ pub struct Database {
|
|||||||
db: DB,
|
db: DB,
|
||||||
write_opts: WriteOptions,
|
write_opts: WriteOptions,
|
||||||
cfs: Vec<Column>,
|
cfs: Vec<Column>,
|
||||||
|
overlay: RwLock<Vec<DBColumnOverlay>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Database {
|
impl Database {
|
||||||
@ -209,7 +272,16 @@ impl Database {
|
|||||||
},
|
},
|
||||||
Err(s) => { return Err(s); }
|
Err(s) => { return Err(s); }
|
||||||
};
|
};
|
||||||
Ok(Database { db: db, write_opts: write_opts, cfs: cfs })
|
Ok(Database {
|
||||||
|
db: db,
|
||||||
|
write_opts: write_opts,
|
||||||
|
overlay: RwLock::new((0..(cfs.len() + 1)).map(|_| DBColumnOverlay {
|
||||||
|
insertions: HashMap::new(),
|
||||||
|
compressed_insertions: HashMap::new(),
|
||||||
|
deletions: HashSet::new(),
|
||||||
|
}).collect()),
|
||||||
|
cfs: cfs,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates new transaction for this database.
|
/// Creates new transaction for this database.
|
||||||
@ -217,14 +289,107 @@ impl Database {
|
|||||||
DBTransaction::new(self)
|
DBTransaction::new(self)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn to_overly_column(col: Option<u32>) -> usize {
|
||||||
|
col.map_or(0, |c| (c + 1) as usize)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Commit transaction to database.
|
||||||
|
pub fn write_buffered(&self, tr: DBTransaction) -> Result<(), String> {
|
||||||
|
let mut overlay = self.overlay.write();
|
||||||
|
let ops = mem::replace(&mut *tr.ops.write(), Vec::new());
|
||||||
|
for op in ops {
|
||||||
|
match op {
|
||||||
|
DBOp::Insert { col, key, value } => {
|
||||||
|
let c = Self::to_overly_column(col);
|
||||||
|
overlay[c].deletions.remove(&key);
|
||||||
|
overlay[c].compressed_insertions.remove(&key);
|
||||||
|
overlay[c].insertions.insert(key, value);
|
||||||
|
},
|
||||||
|
DBOp::InsertCompressed { col, key, value } => {
|
||||||
|
let c = Self::to_overly_column(col);
|
||||||
|
overlay[c].deletions.remove(&key);
|
||||||
|
overlay[c].insertions.remove(&key);
|
||||||
|
overlay[c].compressed_insertions.insert(key, value);
|
||||||
|
},
|
||||||
|
DBOp::Delete { col, key } => {
|
||||||
|
let c = Self::to_overly_column(col);
|
||||||
|
overlay[c].insertions.remove(&key);
|
||||||
|
overlay[c].compressed_insertions.remove(&key);
|
||||||
|
overlay[c].deletions.insert(key);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
};
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Commit buffered changes to database.
|
||||||
|
pub fn flush(&self) -> Result<(), String> {
|
||||||
|
let batch = WriteBatch::new();
|
||||||
|
let mut overlay = self.overlay.write();
|
||||||
|
|
||||||
|
let mut c = 0;
|
||||||
|
for column in overlay.iter_mut() {
|
||||||
|
let insertions = mem::replace(&mut column.insertions, HashMap::new());
|
||||||
|
let compressed_insertions = mem::replace(&mut column.compressed_insertions, HashMap::new());
|
||||||
|
let deletions = mem::replace(&mut column.deletions, HashSet::new());
|
||||||
|
for d in deletions.into_iter() {
|
||||||
|
if c > 0 {
|
||||||
|
try!(batch.delete_cf(self.cfs[c - 1], &d));
|
||||||
|
} else {
|
||||||
|
try!(batch.delete(&d));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (key, value) in insertions.into_iter() {
|
||||||
|
if c > 0 {
|
||||||
|
try!(batch.put_cf(self.cfs[c - 1], &key, &value));
|
||||||
|
} else {
|
||||||
|
try!(batch.put(&key, &value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (key, value) in compressed_insertions.into_iter() {
|
||||||
|
let compressed = UntrustedRlp::new(&value).compress(RlpType::Blocks);
|
||||||
|
if c > 0 {
|
||||||
|
try!(batch.put_cf(self.cfs[c - 1], &key, &compressed));
|
||||||
|
} else {
|
||||||
|
try!(batch.put(&key, &compressed));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
c += 1;
|
||||||
|
}
|
||||||
|
self.db.write_opt(batch, &self.write_opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Commit transaction to database.
|
/// Commit transaction to database.
|
||||||
pub fn write(&self, tr: DBTransaction) -> Result<(), String> {
|
pub fn write(&self, tr: DBTransaction) -> Result<(), String> {
|
||||||
self.db.write_opt(tr.batch, &self.write_opts)
|
let batch = WriteBatch::new();
|
||||||
|
let ops = mem::replace(&mut *tr.ops.write(), Vec::new());
|
||||||
|
for op in ops {
|
||||||
|
match op {
|
||||||
|
DBOp::Insert { col, key, value } => {
|
||||||
|
try!(col.map_or_else(|| batch.put(&key, &value), |c| batch.put_cf(self.cfs[c as usize], &key, &value)))
|
||||||
|
},
|
||||||
|
DBOp::InsertCompressed { col, key, value } => {
|
||||||
|
let compressed = UntrustedRlp::new(&value).compress(RlpType::Blocks);
|
||||||
|
try!(col.map_or_else(|| batch.put(&key, &compressed), |c| batch.put_cf(self.cfs[c as usize], &key, &compressed)))
|
||||||
|
},
|
||||||
|
DBOp::Delete { col, key } => {
|
||||||
|
try!(col.map_or_else(|| batch.delete(&key), |c| batch.delete_cf(self.cfs[c as usize], &key)))
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.db.write_opt(batch, &self.write_opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get value by key.
|
/// Get value by key.
|
||||||
pub fn get(&self, col: Option<u32>, key: &[u8]) -> Result<Option<DBVector>, String> {
|
pub fn get(&self, col: Option<u32>, key: &[u8]) -> Result<Option<Bytes>, String> {
|
||||||
col.map_or_else(|| self.db.get(key), |c| self.db.get_cf(self.cfs[c as usize], key))
|
let overlay = &self.overlay.read()[Self::to_overly_column(col)];
|
||||||
|
overlay.insertions.get(key).or_else(|| overlay.compressed_insertions.get(key)).map_or_else(||
|
||||||
|
col.map_or_else(
|
||||||
|
|| self.db.get(key).map(|r| r.map(|v| v.to_vec())),
|
||||||
|
|c| self.db.get_cf(self.cfs[c as usize], key).map(|r| r.map(|v| v.to_vec()))),
|
||||||
|
|value| Ok(Some(value.clone())))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get value by partial key. Prefix size should match configured prefix size.
|
/// Get value by partial key. Prefix size should match configured prefix size.
|
||||||
|
@ -158,7 +158,7 @@ pub use overlaydb::*;
|
|||||||
pub use journaldb::JournalDB;
|
pub use journaldb::JournalDB;
|
||||||
pub use crypto::*;
|
pub use crypto::*;
|
||||||
pub use triehash::*;
|
pub use triehash::*;
|
||||||
pub use trie::*;
|
pub use trie::{Trie, TrieMut, TrieDB, TrieDBMut, TrieFactory, TrieError, SecTrieDB, SecTrieDBMut};
|
||||||
pub use nibbleslice::*;
|
pub use nibbleslice::*;
|
||||||
pub use semantic_version::*;
|
pub use semantic_version::*;
|
||||||
pub use network::*;
|
pub use network::*;
|
||||||
|
@ -24,10 +24,10 @@ use hashdb::*;
|
|||||||
use heapsize::*;
|
use heapsize::*;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::hash_map::Entry;
|
|
||||||
use std::default::Default;
|
|
||||||
|
|
||||||
#[derive(Debug,Clone)]
|
const STATIC_NULL_RLP: (&'static [u8], i32) = (&[0x80; 1], 1);
|
||||||
|
use std::collections::hash_map::Entry;
|
||||||
|
|
||||||
/// Reference-counted memory-based `HashDB` implementation.
|
/// Reference-counted memory-based `HashDB` implementation.
|
||||||
///
|
///
|
||||||
/// Use `new()` to create a new database. Insert items with `insert()`, remove items
|
/// Use `new()` to create a new database. Insert items with `insert()`, remove items
|
||||||
@ -71,25 +71,17 @@ use std::default::Default;
|
|||||||
/// assert!(!m.contains(&k));
|
/// assert!(!m.contains(&k));
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
#[derive(PartialEq)]
|
#[derive(Default, Clone, PartialEq)]
|
||||||
pub struct MemoryDB {
|
pub struct MemoryDB {
|
||||||
data: HashMap<H256, (Bytes, i32)>,
|
data: H256FastMap<(Bytes, i32)>,
|
||||||
static_null_rlp: (Bytes, i32),
|
|
||||||
aux: HashMap<Bytes, Bytes>,
|
aux: HashMap<Bytes, Bytes>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Default for MemoryDB {
|
|
||||||
fn default() -> Self {
|
|
||||||
MemoryDB::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MemoryDB {
|
impl MemoryDB {
|
||||||
/// Create a new instance of the memory DB.
|
/// Create a new instance of the memory DB.
|
||||||
pub fn new() -> MemoryDB {
|
pub fn new() -> MemoryDB {
|
||||||
MemoryDB {
|
MemoryDB {
|
||||||
data: HashMap::new(),
|
data: H256FastMap::default(),
|
||||||
static_null_rlp: (vec![0x80u8; 1], 1),
|
|
||||||
aux: HashMap::new(),
|
aux: HashMap::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -123,21 +115,9 @@ impl MemoryDB {
|
|||||||
for empty in empties { self.data.remove(&empty); }
|
for empty in empties { self.data.remove(&empty); }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Grab the raw information associated with a key. Returns None if the key
|
|
||||||
/// doesn't exist.
|
|
||||||
///
|
|
||||||
/// Even when Some is returned, the data is only guaranteed to be useful
|
|
||||||
/// when the refs > 0.
|
|
||||||
pub fn raw(&self, key: &H256) -> Option<&(Bytes, i32)> {
|
|
||||||
if key == &SHA3_NULL_RLP {
|
|
||||||
return Some(&self.static_null_rlp);
|
|
||||||
}
|
|
||||||
self.data.get(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return the internal map of hashes to data, clearing the current state.
|
/// Return the internal map of hashes to data, clearing the current state.
|
||||||
pub fn drain(&mut self) -> HashMap<H256, (Bytes, i32)> {
|
pub fn drain(&mut self) -> H256FastMap<(Bytes, i32)> {
|
||||||
mem::replace(&mut self.data, HashMap::new())
|
mem::replace(&mut self.data, H256FastMap::default())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the internal map of auxiliary data, clearing the current state.
|
/// Return the internal map of auxiliary data, clearing the current state.
|
||||||
@ -145,14 +125,26 @@ impl MemoryDB {
|
|||||||
mem::replace(&mut self.aux, HashMap::new())
|
mem::replace(&mut self.aux, HashMap::new())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Grab the raw information associated with a key. Returns None if the key
|
||||||
|
/// doesn't exist.
|
||||||
|
///
|
||||||
|
/// Even when Some is returned, the data is only guaranteed to be useful
|
||||||
|
/// when the refs > 0.
|
||||||
|
pub fn raw(&self, key: &H256) -> Option<(&[u8], i32)> {
|
||||||
|
if key == &SHA3_NULL_RLP {
|
||||||
|
return Some(STATIC_NULL_RLP.clone());
|
||||||
|
}
|
||||||
|
self.data.get(key).map(|&(ref v, x)| (&v[..], x))
|
||||||
|
}
|
||||||
|
|
||||||
/// Denote than an existing value has the given key. Used when a key gets removed without
|
/// Denote than an existing value has the given key. Used when a key gets removed without
|
||||||
/// a prior insert and thus has a negative reference with no value.
|
/// a prior insert and thus has a negative reference with no value.
|
||||||
///
|
///
|
||||||
/// May safely be called even if the key's value is known, in which case it will be a no-op.
|
/// May safely be called even if the key's value is known, in which case it will be a no-op.
|
||||||
pub fn denote(&self, key: &H256, value: Bytes) -> &(Bytes, i32) {
|
pub fn denote(&self, key: &H256, value: Bytes) -> (&[u8], i32) {
|
||||||
if self.raw(key) == None {
|
if self.raw(key) == None {
|
||||||
unsafe {
|
unsafe {
|
||||||
let p = &self.data as *const HashMap<H256, (Bytes, i32)> as *mut HashMap<H256, (Bytes, i32)>;
|
let p = &self.data as *const H256FastMap<(Bytes, i32)> as *mut H256FastMap<(Bytes, i32)>;
|
||||||
(*p).insert(key.clone(), (value, 0));
|
(*p).insert(key.clone(), (value, 0));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -162,6 +154,7 @@ impl MemoryDB {
|
|||||||
/// Returns the size of allocated heap memory
|
/// Returns the size of allocated heap memory
|
||||||
pub fn mem_used(&self) -> usize {
|
pub fn mem_used(&self) -> usize {
|
||||||
self.data.heap_size_of_children()
|
self.data.heap_size_of_children()
|
||||||
|
+ self.aux.heap_size_of_children()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove an element and delete it from storage if reference count reaches zero.
|
/// Remove an element and delete it from storage if reference count reaches zero.
|
||||||
@ -190,6 +183,7 @@ impl HashDB for MemoryDB {
|
|||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return Some(&NULL_RLP_STATIC);
|
return Some(&NULL_RLP_STATIC);
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.data.get(key) {
|
match self.data.get(key) {
|
||||||
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
||||||
_ => None
|
_ => None
|
||||||
@ -204,6 +198,7 @@ impl HashDB for MemoryDB {
|
|||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.data.get(key) {
|
match self.data.get(key) {
|
||||||
Some(&(_, x)) if x > 0 => true,
|
Some(&(_, x)) if x > 0 => true,
|
||||||
_ => false
|
_ => false
|
||||||
@ -217,14 +212,14 @@ impl HashDB for MemoryDB {
|
|||||||
let key = value.sha3();
|
let key = value.sha3();
|
||||||
if match self.data.get_mut(&key) {
|
if match self.data.get_mut(&key) {
|
||||||
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => {
|
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => {
|
||||||
*old_value = From::from(value);
|
*old_value = value.into();
|
||||||
*rc += 1;
|
*rc += 1;
|
||||||
false
|
false
|
||||||
},
|
},
|
||||||
Some(&mut (_, ref mut x)) => { *x += 1; false } ,
|
Some(&mut (_, ref mut x)) => { *x += 1; false } ,
|
||||||
None => true,
|
None => true,
|
||||||
}{ // ... None falls through into...
|
}{ // ... None falls through into...
|
||||||
self.data.insert(key.clone(), (From::from(value), 1));
|
self.data.insert(key.clone(), (value.into(), 1));
|
||||||
}
|
}
|
||||||
key
|
key
|
||||||
}
|
}
|
||||||
@ -233,6 +228,7 @@ impl HashDB for MemoryDB {
|
|||||||
if value == &NULL_RLP {
|
if value == &NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.data.get_mut(&key) {
|
match self.data.get_mut(&key) {
|
||||||
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => {
|
Some(&mut (ref mut old_value, ref mut rc @ -0x80000000i32 ... 0)) => {
|
||||||
*old_value = value;
|
*old_value = value;
|
||||||
@ -250,6 +246,7 @@ impl HashDB for MemoryDB {
|
|||||||
if key == &SHA3_NULL_RLP {
|
if key == &SHA3_NULL_RLP {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if match self.data.get_mut(key) {
|
if match self.data.get_mut(key) {
|
||||||
Some(&mut (_, ref mut x)) => { *x -= 1; false }
|
Some(&mut (_, ref mut x)) => { *x -= 1; false }
|
||||||
None => true
|
None => true
|
||||||
@ -281,9 +278,9 @@ fn memorydb_denote() {
|
|||||||
for _ in 0..1000 {
|
for _ in 0..1000 {
|
||||||
let r = H256::random();
|
let r = H256::random();
|
||||||
let k = r.sha3();
|
let k = r.sha3();
|
||||||
let &(ref v, ref rc) = m.denote(&k, r.to_bytes());
|
let (v, rc) = m.denote(&k, r.to_bytes());
|
||||||
assert_eq!(v.as_slice(), r.as_slice());
|
assert_eq!(v, r.as_slice());
|
||||||
assert_eq!(*rc, 0);
|
assert_eq!(rc, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(m.get(&hash).unwrap(), b"Hello world!");
|
assert_eq!(m.get(&hash).unwrap(), b"Hello world!");
|
||||||
|
@ -99,7 +99,7 @@ impl OverlayDB {
|
|||||||
pub fn revert(&mut self) { self.overlay.clear(); }
|
pub fn revert(&mut self) { self.overlay.clear(); }
|
||||||
|
|
||||||
/// Get the number of references that would be committed.
|
/// Get the number of references that would be committed.
|
||||||
pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key).map_or(0, |&(_, refs)| refs) }
|
pub fn commit_refs(&self, key: &H256) -> i32 { self.overlay.raw(key).map_or(0, |(_, refs)| refs) }
|
||||||
|
|
||||||
/// Get the refs and value of the given key.
|
/// Get the refs and value of the given key.
|
||||||
fn payload(&self, key: &H256) -> Option<(Bytes, u32)> {
|
fn payload(&self, key: &H256) -> Option<(Bytes, u32)> {
|
||||||
@ -146,14 +146,14 @@ impl HashDB for OverlayDB {
|
|||||||
// it positive again.
|
// it positive again.
|
||||||
let k = self.overlay.raw(key);
|
let k = self.overlay.raw(key);
|
||||||
match k {
|
match k {
|
||||||
Some(&(ref d, rc)) if rc > 0 => Some(d),
|
Some((d, rc)) if rc > 0 => Some(d),
|
||||||
_ => {
|
_ => {
|
||||||
let memrc = k.map_or(0, |&(_, rc)| rc);
|
let memrc = k.map_or(0, |(_, rc)| rc);
|
||||||
match self.payload(key) {
|
match self.payload(key) {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
let (d, rc) = x;
|
let (d, rc) = x;
|
||||||
if rc as i32 + memrc > 0 {
|
if rc as i32 + memrc > 0 {
|
||||||
Some(&self.overlay.denote(key, d).0)
|
Some(self.overlay.denote(key, d).0)
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
None
|
None
|
||||||
@ -171,9 +171,9 @@ impl HashDB for OverlayDB {
|
|||||||
// it positive again.
|
// it positive again.
|
||||||
let k = self.overlay.raw(key);
|
let k = self.overlay.raw(key);
|
||||||
match k {
|
match k {
|
||||||
Some(&(_, rc)) if rc > 0 => true,
|
Some((_, rc)) if rc > 0 => true,
|
||||||
_ => {
|
_ => {
|
||||||
let memrc = k.map_or(0, |&(_, rc)| rc);
|
let memrc = k.map_or(0, |(_, rc)| rc);
|
||||||
match self.payload(key) {
|
match self.payload(key) {
|
||||||
Some(x) => {
|
Some(x) => {
|
||||||
let (_, rc) = x;
|
let (_, rc) = x;
|
||||||
@ -192,6 +192,7 @@ impl HashDB for OverlayDB {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
#[cfg_attr(feature="dev", allow(blacklisted_name))]
|
||||||
fn overlaydb_revert() {
|
fn overlaydb_revert() {
|
||||||
let mut m = OverlayDB::new_temp();
|
let mut m = OverlayDB::new_temp();
|
||||||
let foo = m.insert(b"foo"); // insert foo.
|
let foo = m.insert(b"foo"); // insert foo.
|
||||||
|
@ -45,7 +45,7 @@ static COMMON_RLPS: &'static [&'static [u8]] = &[
|
|||||||
&[148, 50, 190, 52, 59, 148, 248, 96, 18, 77, 196, 254, 226, 120, 253, 203, 211, 140, 16, 45, 136],
|
&[148, 50, 190, 52, 59, 148, 248, 96, 18, 77, 196, 254, 226, 120, 253, 203, 211, 140, 16, 45, 136],
|
||||||
&[148, 82, 188, 68, 213, 55, 131, 9, 238, 42, 191, 21, 57, 191, 113, 222, 27, 125, 123, 227, 181],
|
&[148, 82, 188, 68, 213, 55, 131, 9, 238, 42, 191, 21, 57, 191, 113, 222, 27, 125, 123, 227, 181],
|
||||||
&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
|
||||||
];
|
];
|
||||||
|
|
||||||
static INVALID_RLPS: &'static [&'static [u8]] = &[&[0x81, 0x0], &[0x81, 0x1], &[0x81, 0x2], &[0x81, 0x3], &[0x81, 0x4], &[0x81, 0x5], &[0x81, 0x6], &[0x81, 0x7], &[0x81, 0x8], &[0x81, 0x9], &[0x81, 0xa], &[0x81, 0xb], &[0x81, 0xc], &[0x81, 0xd], &[0x81, 0xe], &[0x81, 0xf], &[0x81, 0x10], &[0x81, 0x11], &[0x81, 0x12], &[0x81, 0x13], &[0x81, 0x14], &[0x81, 0x15], &[0x81, 0x16], &[0x81, 0x17], &[0x81, 0x18], &[0x81, 0x19], &[0x81, 0x1a], &[0x81, 0x1b], &[0x81, 0x1c], &[0x81, 0x1d], &[0x81, 0x1e], &[0x81, 0x1f], &[0x81, 0x20], &[0x81, 0x21], &[0x81, 0x22], &[0x81, 0x23], &[0x81, 0x24], &[0x81, 0x25], &[0x81, 0x26], &[0x81, 0x27], &[0x81, 0x28], &[0x81, 0x29], &[0x81, 0x2a], &[0x81, 0x2b], &[0x81, 0x2c], &[0x81, 0x2d], &[0x81, 0x2e], &[0x81, 0x2f], &[0x81, 0x30], &[0x81, 0x31], &[0x81, 0x32], &[0x81, 0x33], &[0x81, 0x34], &[0x81, 0x35], &[0x81, 0x36], &[0x81, 0x37], &[0x81, 0x38], &[0x81, 0x39], &[0x81, 0x3a], &[0x81, 0x3b], &[0x81, 0x3c], &[0x81, 0x3d], &[0x81, 0x3e], &[0x81, 0x3f], &[0x81, 0x40], &[0x81, 0x41], &[0x81, 0x42], &[0x81, 0x43], &[0x81, 0x44], &[0x81, 0x45], &[0x81, 0x46], &[0x81, 0x47], &[0x81, 0x48], &[0x81, 0x49], &[0x81, 0x4a], &[0x81, 0x4b], &[0x81, 0x4c], &[0x81, 0x4d], &[0x81, 0x4e], &[0x81, 0x4f], &[0x81, 0x50], &[0x81, 0x51], &[0x81, 0x52], &[0x81, 0x53], &[0x81, 0x54], &[0x81, 0x55], &[0x81, 0x56], &[0x81, 0x57], &[0x81, 0x58], &[0x81, 0x59], &[0x81, 0x5a], &[0x81, 0x5b], &[0x81, 0x5c], &[0x81, 0x5d], &[0x81, 0x5e], &[0x81, 0x5f], &[0x81, 0x60], &[0x81, 0x61], &[0x81, 0x62], &[0x81, 0x63], &[0x81, 0x64], &[0x81, 0x65], &[0x81, 0x66], &[0x81, 0x67], &[0x81, 0x68], &[0x81, 0x69], &[0x81, 0x6a], &[0x81, 0x6b], &[0x81, 0x6c], &[0x81, 0x6d], &[0x81, 0x6e], &[0x81, 0x6f], &[0x81, 0x70], &[0x81, 0x71], &[0x81, 0x72], &[0x81, 0x73], &[0x81, 0x74], &[0x81, 0x75], &[0x81, 0x76], &[0x81, 0x77], &[0x81, 0x78], &[0x81, 0x79], &[0x81, 0x7a], &[0x81, 0x7b], &[0x81, 0x7c], &[0x81, 0x7d], &[0x81, 0x7e]];
|
static INVALID_RLPS: &'static [&'static [u8]] = &[&[0x81, 0x0], &[0x81, 0x1], &[0x81, 0x2], &[0x81, 0x3], &[0x81, 0x4], &[0x81, 0x5], &[0x81, 0x6], &[0x81, 0x7], &[0x81, 0x8], &[0x81, 0x9], &[0x81, 0xa], &[0x81, 0xb], &[0x81, 0xc], &[0x81, 0xd], &[0x81, 0xe], &[0x81, 0xf], &[0x81, 0x10], &[0x81, 0x11], &[0x81, 0x12], &[0x81, 0x13], &[0x81, 0x14], &[0x81, 0x15], &[0x81, 0x16], &[0x81, 0x17], &[0x81, 0x18], &[0x81, 0x19], &[0x81, 0x1a], &[0x81, 0x1b], &[0x81, 0x1c], &[0x81, 0x1d], &[0x81, 0x1e], &[0x81, 0x1f], &[0x81, 0x20], &[0x81, 0x21], &[0x81, 0x22], &[0x81, 0x23], &[0x81, 0x24], &[0x81, 0x25], &[0x81, 0x26], &[0x81, 0x27], &[0x81, 0x28], &[0x81, 0x29], &[0x81, 0x2a], &[0x81, 0x2b], &[0x81, 0x2c], &[0x81, 0x2d], &[0x81, 0x2e], &[0x81, 0x2f], &[0x81, 0x30], &[0x81, 0x31], &[0x81, 0x32], &[0x81, 0x33], &[0x81, 0x34], &[0x81, 0x35], &[0x81, 0x36], &[0x81, 0x37], &[0x81, 0x38], &[0x81, 0x39], &[0x81, 0x3a], &[0x81, 0x3b], &[0x81, 0x3c], &[0x81, 0x3d], &[0x81, 0x3e], &[0x81, 0x3f], &[0x81, 0x40], &[0x81, 0x41], &[0x81, 0x42], &[0x81, 0x43], &[0x81, 0x44], &[0x81, 0x45], &[0x81, 0x46], &[0x81, 0x47], &[0x81, 0x48], &[0x81, 0x49], &[0x81, 0x4a], &[0x81, 0x4b], &[0x81, 0x4c], &[0x81, 0x4d], &[0x81, 0x4e], &[0x81, 0x4f], &[0x81, 0x50], &[0x81, 0x51], &[0x81, 0x52], &[0x81, 0x53], &[0x81, 0x54], &[0x81, 0x55], &[0x81, 0x56], &[0x81, 0x57], &[0x81, 0x58], &[0x81, 0x59], &[0x81, 0x5a], &[0x81, 0x5b], &[0x81, 0x5c], &[0x81, 0x5d], &[0x81, 0x5e], &[0x81, 0x5f], &[0x81, 0x60], &[0x81, 0x61], &[0x81, 0x62], &[0x81, 0x63], &[0x81, 0x64], &[0x81, 0x65], &[0x81, 0x66], &[0x81, 0x67], &[0x81, 0x68], &[0x81, 0x69], &[0x81, 0x6a], &[0x81, 0x6b], &[0x81, 0x6c], &[0x81, 0x6d], &[0x81, 0x6e], &[0x81, 0x6f], &[0x81, 0x70], &[0x81, 0x71], &[0x81, 0x72], &[0x81, 0x73], &[0x81, 0x74], &[0x81, 0x75], &[0x81, 0x76], &[0x81, 0x77], &[0x81, 0x78], &[0x81, 0x79], &[0x81, 0x7a], &[0x81, 0x7b], &[0x81, 0x7c], &[0x81, 0x7d], &[0x81, 0x7e]];
|
||||||
|
|
||||||
@ -85,11 +85,11 @@ mod tests {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for v in values.iter() {
|
for v in &values {
|
||||||
let rlp = UntrustedRlp::new(&v);
|
let rlp = UntrustedRlp::new(v);
|
||||||
let mut flat = Vec::new();
|
let mut flat = Vec::new();
|
||||||
flat_rlp(&mut flat, rlp);
|
flat_rlp(&mut flat, rlp);
|
||||||
for r in flat.iter() {
|
for r in &flat {
|
||||||
*rlp_counts.entry(r.as_raw()).or_insert(0) += 1;
|
*rlp_counts.entry(r.as_raw()).or_insert(0) += 1;
|
||||||
*rlp_sizes.entry(r.as_raw()).or_insert(0) += space_saving(r.as_raw());
|
*rlp_sizes.entry(r.as_raw()).or_insert(0) += space_saving(r.as_raw());
|
||||||
}
|
}
|
||||||
|
@ -43,11 +43,11 @@ impl<'a> InvalidRlpSwapper<'a> {
|
|||||||
}
|
}
|
||||||
/// Get a valid RLP corresponding to an invalid one
|
/// Get a valid RLP corresponding to an invalid one
|
||||||
fn get_valid(&self, invalid_rlp: &[u8]) -> Option<&[u8]> {
|
fn get_valid(&self, invalid_rlp: &[u8]) -> Option<&[u8]> {
|
||||||
self.invalid_to_valid.get(invalid_rlp).map(|r| r.clone())
|
self.invalid_to_valid.get(invalid_rlp).cloned()
|
||||||
}
|
}
|
||||||
/// Get an invalid RLP corresponding to a valid one
|
/// Get an invalid RLP corresponding to a valid one
|
||||||
fn get_invalid(&self, valid_rlp: &[u8]) -> Option<&[u8]> {
|
fn get_invalid(&self, valid_rlp: &[u8]) -> Option<&[u8]> {
|
||||||
self.valid_to_invalid.get(valid_rlp).map(|r| r.clone())
|
self.valid_to_invalid.get(valid_rlp).cloned()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,18 +86,18 @@ fn map_rlp<F>(rlp: &UntrustedRlp, f: F) -> Option<ElasticArray1024<u8>> where
|
|||||||
/// Replace common RLPs with invalid shorter ones.
|
/// Replace common RLPs with invalid shorter ones.
|
||||||
fn simple_compress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> ElasticArray1024<u8> {
|
fn simple_compress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> ElasticArray1024<u8> {
|
||||||
if rlp.is_data() {
|
if rlp.is_data() {
|
||||||
to_elastic(swapper.get_invalid(rlp.as_raw()).unwrap_or(rlp.as_raw()))
|
to_elastic(swapper.get_invalid(rlp.as_raw()).unwrap_or_else(|| rlp.as_raw()))
|
||||||
} else {
|
} else {
|
||||||
map_rlp(rlp, |r| Some(simple_compress(r, swapper))).unwrap_or(to_elastic(rlp.as_raw()))
|
map_rlp(rlp, |r| Some(simple_compress(r, swapper))).unwrap_or_else(|| to_elastic(rlp.as_raw()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recover valid RLP from a compressed form.
|
/// Recover valid RLP from a compressed form.
|
||||||
fn simple_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> ElasticArray1024<u8> {
|
fn simple_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> ElasticArray1024<u8> {
|
||||||
if rlp.is_data() {
|
if rlp.is_data() {
|
||||||
to_elastic(swapper.get_valid(rlp.as_raw()).unwrap_or(rlp.as_raw()))
|
to_elastic(swapper.get_valid(rlp.as_raw()).unwrap_or_else(|| rlp.as_raw()))
|
||||||
} else {
|
} else {
|
||||||
map_rlp(rlp, |r| Some(simple_decompress(r, swapper))).unwrap_or(to_elastic(rlp.as_raw()))
|
map_rlp(rlp, |r| Some(simple_decompress(r, swapper))).unwrap_or_else(|| to_elastic(rlp.as_raw()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,7 +105,7 @@ fn simple_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Elastic
|
|||||||
/// Tries to compress data insides.
|
/// Tries to compress data insides.
|
||||||
fn deep_compress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<ElasticArray1024<u8>> {
|
fn deep_compress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<ElasticArray1024<u8>> {
|
||||||
let simple_swap = ||
|
let simple_swap = ||
|
||||||
swapper.get_invalid(rlp.as_raw()).map(|b| to_elastic(&b));
|
swapper.get_invalid(rlp.as_raw()).map(to_elastic);
|
||||||
if rlp.is_data() {
|
if rlp.is_data() {
|
||||||
// Try to treat the inside as RLP.
|
// Try to treat the inside as RLP.
|
||||||
return match rlp.payload_info() {
|
return match rlp.payload_info() {
|
||||||
@ -134,7 +134,7 @@ fn deep_compress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<Elas
|
|||||||
/// Tries to decompress compressed data insides.
|
/// Tries to decompress compressed data insides.
|
||||||
fn deep_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<ElasticArray1024<u8>> {
|
fn deep_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<ElasticArray1024<u8>> {
|
||||||
let simple_swap = ||
|
let simple_swap = ||
|
||||||
swapper.get_valid(rlp.as_raw()).map(|b| to_elastic(&b));
|
swapper.get_valid(rlp.as_raw()).map(to_elastic);
|
||||||
// Simply decompress data.
|
// Simply decompress data.
|
||||||
if rlp.is_data() { return simple_swap(); }
|
if rlp.is_data() { return simple_swap(); }
|
||||||
match rlp.item_count() {
|
match rlp.item_count() {
|
||||||
@ -152,17 +152,17 @@ fn deep_decompress(rlp: &UntrustedRlp, swapper: &InvalidRlpSwapper) -> Option<El
|
|||||||
impl<'a> Compressible for UntrustedRlp<'a> {
|
impl<'a> Compressible for UntrustedRlp<'a> {
|
||||||
type DataType = RlpType;
|
type DataType = RlpType;
|
||||||
|
|
||||||
fn compress(&self, t: RlpType) -> ElasticArray1024<u8> {
|
fn compress(&self, t: RlpType) -> ElasticArray1024<u8> {
|
||||||
match t {
|
match t {
|
||||||
RlpType::Snapshot => simple_compress(self, &SNAPSHOT_RLP_SWAPPER),
|
RlpType::Snapshot => simple_compress(self, &SNAPSHOT_RLP_SWAPPER),
|
||||||
RlpType::Blocks => deep_compress(self, &BLOCKS_RLP_SWAPPER).unwrap_or(to_elastic(self.as_raw())),
|
RlpType::Blocks => deep_compress(self, &BLOCKS_RLP_SWAPPER).unwrap_or_else(|| to_elastic(self.as_raw())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn decompress(&self, t: RlpType) -> ElasticArray1024<u8> {
|
fn decompress(&self, t: RlpType) -> ElasticArray1024<u8> {
|
||||||
match t {
|
match t {
|
||||||
RlpType::Snapshot => simple_decompress(self, &SNAPSHOT_RLP_SWAPPER),
|
RlpType::Snapshot => simple_decompress(self, &SNAPSHOT_RLP_SWAPPER),
|
||||||
RlpType::Blocks => deep_decompress(self, &BLOCKS_RLP_SWAPPER).unwrap_or(to_elastic(self.as_raw())),
|
RlpType::Blocks => deep_decompress(self, &BLOCKS_RLP_SWAPPER).unwrap_or_else(|| to_elastic(self.as_raw())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -232,8 +232,8 @@ mod tests {
|
|||||||
let mut decomp_size = 0;
|
let mut decomp_size = 0;
|
||||||
let mut comp_size = 0;
|
let mut comp_size = 0;
|
||||||
|
|
||||||
for v in values.iter() {
|
for v in &values {
|
||||||
let rlp = UntrustedRlp::new(&v);
|
let rlp = UntrustedRlp::new(v);
|
||||||
let compressed = rlp.compress(RlpType::Blocks).to_vec();
|
let compressed = rlp.compress(RlpType::Blocks).to_vec();
|
||||||
comp_size += compressed.len();
|
comp_size += compressed.len();
|
||||||
let decompressed = rlp.decompress(RlpType::Blocks).to_vec();
|
let decompressed = rlp.decompress(RlpType::Blocks).to_vec();
|
||||||
|
@ -37,7 +37,6 @@ pub use std::error::Error as StdError;
|
|||||||
pub use std::ops::*;
|
pub use std::ops::*;
|
||||||
pub use std::cmp::*;
|
pub use std::cmp::*;
|
||||||
pub use std::sync::Arc;
|
pub use std::sync::Arc;
|
||||||
pub use std::cell::*;
|
|
||||||
pub use std::collections::*;
|
pub use std::collections::*;
|
||||||
|
|
||||||
pub use rustc_serialize::json::Json;
|
pub use rustc_serialize::json::Json;
|
||||||
|
@ -17,8 +17,7 @@
|
|||||||
use hash::H256;
|
use hash::H256;
|
||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::HashDB;
|
||||||
use super::{TrieDB, Trie, TrieDBIterator, TrieError};
|
use super::{TrieDB, Trie, TrieDBIterator, TrieItem};
|
||||||
use trie::trietraits::TrieItem;
|
|
||||||
|
|
||||||
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||||
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
||||||
@ -32,7 +31,7 @@ impl<'db> FatDB<'db> {
|
|||||||
/// Create a new trie with the backing database `db` and empty `root`
|
/// Create a new trie with the backing database `db` and empty `root`
|
||||||
/// Initialise to the state entailed by the genesis block.
|
/// Initialise to the state entailed by the genesis block.
|
||||||
/// This guarantees the trie is built correctly.
|
/// This guarantees the trie is built correctly.
|
||||||
pub fn new(db: &'db HashDB, root: &'db H256) -> Result<Self, TrieError> {
|
pub fn new(db: &'db HashDB, root: &'db H256) -> super::Result<Self> {
|
||||||
let fatdb = FatDB {
|
let fatdb = FatDB {
|
||||||
raw: try!(TrieDB::new(db, root))
|
raw: try!(TrieDB::new(db, root))
|
||||||
};
|
};
|
||||||
@ -60,11 +59,13 @@ impl<'db> Trie for FatDB<'db> {
|
|||||||
self.raw.root()
|
self.raw.root()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &[u8]) -> bool {
|
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Option<&'a [u8]> where 'a: 'key {
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<&'a [u8]>>
|
||||||
|
where 'a: 'key
|
||||||
|
{
|
||||||
self.raw.get(&key.sha3())
|
self.raw.get(&key.sha3())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -105,9 +106,9 @@ fn fatdb_to_trie() {
|
|||||||
let mut root = H256::default();
|
let mut root = H256::default();
|
||||||
{
|
{
|
||||||
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = FatDB::new(&memdb, &root).unwrap();
|
let t = FatDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
||||||
assert_eq!(t.iter().collect::<Vec<_>>(), vec![(vec![0x01u8, 0x23], &[0x01u8, 0x23] as &[u8])]);
|
assert_eq!(t.iter().collect::<Vec<_>>(), vec![(vec![0x01u8, 0x23], &[0x01u8, 0x23] as &[u8])]);
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
use hash::H256;
|
use hash::H256;
|
||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::HashDB;
|
||||||
use super::{TrieDBMut, TrieMut, TrieError};
|
use super::{TrieDBMut, TrieMut};
|
||||||
|
|
||||||
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||||
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
/// Additionaly it stores inserted hash-key mappings for later retrieval.
|
||||||
@ -38,7 +38,7 @@ impl<'db> FatDBMut<'db> {
|
|||||||
/// Create a new trie with the backing database `db` and `root`.
|
/// Create a new trie with the backing database `db` and `root`.
|
||||||
///
|
///
|
||||||
/// Returns an error if root does not exist.
|
/// Returns an error if root does not exist.
|
||||||
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> Result<Self, TrieError> {
|
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> super::Result<Self> {
|
||||||
Ok(FatDBMut { raw: try!(TrieDBMut::from_existing(db, root)) })
|
Ok(FatDBMut { raw: try!(TrieDBMut::from_existing(db, root)) })
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,23 +62,26 @@ impl<'db> TrieMut for FatDBMut<'db> {
|
|||||||
self.raw.is_empty()
|
self.raw.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &[u8]) -> bool {
|
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Option<&'a [u8]> where 'a: 'key {
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<&'a [u8]>>
|
||||||
|
where 'a: 'key
|
||||||
|
{
|
||||||
self.raw.get(&key.sha3())
|
self.raw.get(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, key: &[u8], value: &[u8]) {
|
fn insert(&mut self, key: &[u8], value: &[u8]) -> super::Result<()> {
|
||||||
let hash = key.sha3();
|
let hash = key.sha3();
|
||||||
self.raw.insert(&hash, value);
|
try!(self.raw.insert(&hash, value));
|
||||||
let db = self.raw.db_mut();
|
let db = self.raw.db_mut();
|
||||||
db.insert_aux(hash.to_vec(), key.to_vec());
|
db.insert_aux(hash.to_vec(), key.to_vec());
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, key: &[u8]) {
|
fn remove(&mut self, key: &[u8]) -> super::Result<()> {
|
||||||
self.raw.remove(&key.sha3());
|
self.raw.remove(&key.sha3())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -92,8 +95,8 @@ fn fatdb_to_trie() {
|
|||||||
let mut root = H256::default();
|
let mut root = H256::default();
|
||||||
{
|
{
|
||||||
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
let mut t = FatDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap().unwrap(), &[0x01u8, 0x23]);
|
||||||
}
|
}
|
||||||
|
@ -20,8 +20,6 @@ use std::fmt;
|
|||||||
use hash::H256;
|
use hash::H256;
|
||||||
use hashdb::HashDB;
|
use hashdb::HashDB;
|
||||||
|
|
||||||
/// Export the trietraits module.
|
|
||||||
pub mod trietraits;
|
|
||||||
/// Export the standardmap module.
|
/// Export the standardmap module.
|
||||||
pub mod standardmap;
|
pub mod standardmap;
|
||||||
/// Export the journal module.
|
/// Export the journal module.
|
||||||
@ -40,7 +38,6 @@ pub mod sectriedbmut;
|
|||||||
mod fatdb;
|
mod fatdb;
|
||||||
mod fatdbmut;
|
mod fatdbmut;
|
||||||
|
|
||||||
pub use self::trietraits::{Trie, TrieMut};
|
|
||||||
pub use self::standardmap::{Alphabet, StandardMap, ValueMode};
|
pub use self::standardmap::{Alphabet, StandardMap, ValueMode};
|
||||||
pub use self::triedbmut::TrieDBMut;
|
pub use self::triedbmut::TrieDBMut;
|
||||||
pub use self::triedb::{TrieDB, TrieDBIterator};
|
pub use self::triedb::{TrieDB, TrieDBIterator};
|
||||||
@ -49,19 +46,80 @@ pub use self::sectriedb::SecTrieDB;
|
|||||||
pub use self::fatdb::{FatDB, FatDBIterator};
|
pub use self::fatdb::{FatDB, FatDBIterator};
|
||||||
pub use self::fatdbmut::FatDBMut;
|
pub use self::fatdbmut::FatDBMut;
|
||||||
|
|
||||||
/// Trie Errors
|
/// Trie Errors.
|
||||||
#[derive(Debug)]
|
///
|
||||||
|
/// These borrow the data within them to avoid excessive copying on every
|
||||||
|
/// trie operation.
|
||||||
|
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||||
pub enum TrieError {
|
pub enum TrieError {
|
||||||
/// Attempted to create a trie with a state root not in the DB.
|
/// Attempted to create a trie with a state root not in the DB.
|
||||||
InvalidStateRoot,
|
InvalidStateRoot(H256),
|
||||||
|
/// Trie item not found in the database,
|
||||||
|
IncompleteDatabase(H256),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for TrieError {
|
impl fmt::Display for TrieError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
write!(f, "Trie Error: Invalid state root.")
|
match *self {
|
||||||
|
TrieError::InvalidStateRoot(ref root) => write!(f, "Invalid state root: {}", root),
|
||||||
|
TrieError::IncompleteDatabase(ref missing) =>
|
||||||
|
write!(f, "Database missing expected key: {}", missing),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Trie-Item type.
|
||||||
|
pub type TrieItem<'a> = (Vec<u8>, &'a [u8]);
|
||||||
|
|
||||||
|
/// Trie result type. Boxed to avoid copying around extra space for `H256`s on successful queries.
|
||||||
|
pub type Result<T> = ::std::result::Result<T, Box<TrieError>>;
|
||||||
|
|
||||||
|
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
||||||
|
pub trait Trie {
|
||||||
|
/// Return the root of the trie.
|
||||||
|
fn root(&self) -> &H256;
|
||||||
|
|
||||||
|
/// Is the trie empty?
|
||||||
|
fn is_empty(&self) -> bool { *self.root() == ::rlp::SHA3_NULL_RLP }
|
||||||
|
|
||||||
|
/// Does the trie contain a given key?
|
||||||
|
fn contains(&self, key: &[u8]) -> Result<bool> {
|
||||||
|
self.get(key).map(|x| x.is_some())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// What is the value of the given key in this trie?
|
||||||
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<&'a [u8]>> where 'a: 'key;
|
||||||
|
|
||||||
|
/// Returns an iterator over elements of trie.
|
||||||
|
fn iter<'a>(&'a self) -> Box<Iterator<Item = TrieItem> + 'a>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
||||||
|
pub trait TrieMut {
|
||||||
|
/// Return the root of the trie.
|
||||||
|
fn root(&mut self) -> &H256;
|
||||||
|
|
||||||
|
/// Is the trie empty?
|
||||||
|
fn is_empty(&self) -> bool;
|
||||||
|
|
||||||
|
/// Does the trie contain a given key?
|
||||||
|
fn contains(&self, key: &[u8]) -> Result<bool> {
|
||||||
|
self.get(key).map(|x| x.is_some())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// What is the value of the given key in this trie?
|
||||||
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result<Option<&'a [u8]>> where 'a: 'key;
|
||||||
|
|
||||||
|
/// Insert a `key`/`value` pair into the trie. An `empty` value is equivalent to removing
|
||||||
|
/// `key` from the trie.
|
||||||
|
fn insert(&mut self, key: &[u8], value: &[u8]) -> Result<()>;
|
||||||
|
|
||||||
|
/// Remove a `key` from the trie. Equivalent to making it equal to the empty
|
||||||
|
/// value.
|
||||||
|
fn remove(&mut self, key: &[u8]) -> Result<()>;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/// Trie types
|
/// Trie types
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone)]
|
||||||
pub enum TrieSpec {
|
pub enum TrieSpec {
|
||||||
@ -95,7 +153,7 @@ impl TrieFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create new immutable instance of Trie.
|
/// Create new immutable instance of Trie.
|
||||||
pub fn readonly<'db>(&self, db: &'db HashDB, root: &'db H256) -> Result<Box<Trie + 'db>, TrieError> {
|
pub fn readonly<'db>(&self, db: &'db HashDB, root: &'db H256) -> Result<Box<Trie + 'db>> {
|
||||||
match self.spec {
|
match self.spec {
|
||||||
TrieSpec::Generic => Ok(Box::new(try!(TrieDB::new(db, root)))),
|
TrieSpec::Generic => Ok(Box::new(try!(TrieDB::new(db, root)))),
|
||||||
TrieSpec::Secure => Ok(Box::new(try!(SecTrieDB::new(db, root)))),
|
TrieSpec::Secure => Ok(Box::new(try!(SecTrieDB::new(db, root)))),
|
||||||
@ -113,7 +171,7 @@ impl TrieFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Create new mutable instance of trie and check for errors.
|
/// Create new mutable instance of trie and check for errors.
|
||||||
pub fn from_existing<'db>(&self, db: &'db mut HashDB, root: &'db mut H256) -> Result<Box<TrieMut + 'db>, TrieError> {
|
pub fn from_existing<'db>(&self, db: &'db mut HashDB, root: &'db mut H256) -> Result<Box<TrieMut + 'db>> {
|
||||||
match self.spec {
|
match self.spec {
|
||||||
TrieSpec::Generic => Ok(Box::new(try!(TrieDBMut::from_existing(db, root)))),
|
TrieSpec::Generic => Ok(Box::new(try!(TrieDBMut::from_existing(db, root)))),
|
||||||
TrieSpec::Secure => Ok(Box::new(try!(SecTrieDBMut::from_existing(db, root)))),
|
TrieSpec::Secure => Ok(Box::new(try!(SecTrieDBMut::from_existing(db, root)))),
|
||||||
|
@ -48,7 +48,7 @@ impl<'a> Node<'a> {
|
|||||||
},
|
},
|
||||||
// branch - first 16 are nodes, 17th is a value (or empty).
|
// branch - first 16 are nodes, 17th is a value (or empty).
|
||||||
Prototype::List(17) => {
|
Prototype::List(17) => {
|
||||||
let mut nodes: [&'a [u8]; 16] = unsafe { ::std::mem::uninitialized() };
|
let mut nodes: [&'a [u8]; 16] = [&[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[], &[]];
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
nodes[i] = r.at(i).as_raw();
|
nodes[i] = r.at(i).as_raw();
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,7 @@ use hash::H256;
|
|||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::HashDB;
|
||||||
use super::triedb::TrieDB;
|
use super::triedb::TrieDB;
|
||||||
use super::trietraits::{Trie, TrieItem};
|
use super::{Trie, TrieItem};
|
||||||
use super::TrieError;
|
|
||||||
|
|
||||||
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||||
///
|
///
|
||||||
@ -34,7 +33,7 @@ impl<'db> SecTrieDB<'db> {
|
|||||||
/// Initialise to the state entailed by the genesis block.
|
/// Initialise to the state entailed by the genesis block.
|
||||||
/// This guarantees the trie is built correctly.
|
/// This guarantees the trie is built correctly.
|
||||||
/// Returns an error if root does not exist.
|
/// Returns an error if root does not exist.
|
||||||
pub fn new(db: &'db HashDB, root: &'db H256) -> Result<Self, TrieError> {
|
pub fn new(db: &'db HashDB, root: &'db H256) -> super::Result<Self> {
|
||||||
Ok(SecTrieDB { raw: try!(TrieDB::new(db, root)) })
|
Ok(SecTrieDB { raw: try!(TrieDB::new(db, root)) })
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -56,11 +55,13 @@ impl<'db> Trie for SecTrieDB<'db> {
|
|||||||
|
|
||||||
fn root(&self) -> &H256 { self.raw.root() }
|
fn root(&self) -> &H256 { self.raw.root() }
|
||||||
|
|
||||||
fn contains(&self, key: &[u8]) -> bool {
|
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Option<&'a [u8]> where 'a: 'key {
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<&'a [u8]>>
|
||||||
|
where 'a: 'key
|
||||||
|
{
|
||||||
self.raw.get(&key.sha3())
|
self.raw.get(&key.sha3())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -69,14 +70,14 @@ impl<'db> Trie for SecTrieDB<'db> {
|
|||||||
fn trie_to_sectrie() {
|
fn trie_to_sectrie() {
|
||||||
use memorydb::MemoryDB;
|
use memorydb::MemoryDB;
|
||||||
use super::triedbmut::TrieDBMut;
|
use super::triedbmut::TrieDBMut;
|
||||||
use super::trietraits::TrieMut;
|
use super::super::TrieMut;
|
||||||
|
|
||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::default();
|
let mut root = H256::default();
|
||||||
{
|
{
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&(&[0x01u8, 0x23]).sha3(), &[0x01u8, 0x23]);
|
t.insert(&(&[0x01u8, 0x23]).sha3(), &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = SecTrieDB::new(&memdb, &root).unwrap();
|
let t = SecTrieDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
||||||
}
|
}
|
||||||
|
@ -18,8 +18,7 @@ use hash::H256;
|
|||||||
use sha3::Hashable;
|
use sha3::Hashable;
|
||||||
use hashdb::HashDB;
|
use hashdb::HashDB;
|
||||||
use super::triedbmut::TrieDBMut;
|
use super::triedbmut::TrieDBMut;
|
||||||
use super::trietraits::TrieMut;
|
use super::TrieMut;
|
||||||
use super::TrieError;
|
|
||||||
|
|
||||||
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database.
|
||||||
///
|
///
|
||||||
@ -39,7 +38,7 @@ impl<'db> SecTrieDBMut<'db> {
|
|||||||
/// Create a new trie with the backing database `db` and `root`.
|
/// Create a new trie with the backing database `db` and `root`.
|
||||||
///
|
///
|
||||||
/// Returns an error if root does not exist.
|
/// Returns an error if root does not exist.
|
||||||
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> Result<Self, TrieError> {
|
pub fn from_existing(db: &'db mut HashDB, root: &'db mut H256) -> super::Result<Self> {
|
||||||
Ok(SecTrieDBMut { raw: try!(TrieDBMut::from_existing(db, root)) })
|
Ok(SecTrieDBMut { raw: try!(TrieDBMut::from_existing(db, root)) })
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -59,20 +58,22 @@ impl<'db> TrieMut for SecTrieDBMut<'db> {
|
|||||||
self.raw.is_empty()
|
self.raw.is_empty()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &[u8]) -> bool {
|
fn contains(&self, key: &[u8]) -> super::Result<bool> {
|
||||||
self.raw.contains(&key.sha3())
|
self.raw.contains(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Option<&'a [u8]> where 'a: 'key {
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<&'a [u8]>>
|
||||||
|
where 'a: 'key
|
||||||
|
{
|
||||||
self.raw.get(&key.sha3())
|
self.raw.get(&key.sha3())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, key: &[u8], value: &[u8]) {
|
fn insert(&mut self, key: &[u8], value: &[u8]) -> super::Result<()> {
|
||||||
self.raw.insert(&key.sha3(), value);
|
self.raw.insert(&key.sha3(), value)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, key: &[u8]) {
|
fn remove(&mut self, key: &[u8]) -> super::Result<()> {
|
||||||
self.raw.remove(&key.sha3());
|
self.raw.remove(&key.sha3())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,8 +87,8 @@ fn sectrie_to_trie() {
|
|||||||
let mut root = H256::default();
|
let mut root = H256::default();
|
||||||
{
|
{
|
||||||
let mut t = SecTrieDBMut::new(&mut memdb, &mut root);
|
let mut t = SecTrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
let t = TrieDB::new(&memdb, &root).unwrap();
|
let t = TrieDB::new(&memdb, &root).unwrap();
|
||||||
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&(&[0x01u8, 0x23]).sha3()).unwrap().unwrap(), &[0x01u8, 0x23]);
|
||||||
}
|
}
|
||||||
|
@ -18,9 +18,8 @@ use common::*;
|
|||||||
use hashdb::*;
|
use hashdb::*;
|
||||||
use nibbleslice::*;
|
use nibbleslice::*;
|
||||||
use rlp::*;
|
use rlp::*;
|
||||||
use super::trietraits::{Trie, TrieItem};
|
|
||||||
use super::node::Node;
|
use super::node::Node;
|
||||||
use super::TrieError;
|
use super::{Trie, TrieItem, TrieError};
|
||||||
|
|
||||||
/// A `Trie` implementation using a generic `HashDB` backing database.
|
/// A `Trie` implementation using a generic `HashDB` backing database.
|
||||||
///
|
///
|
||||||
@ -41,11 +40,11 @@ use super::TrieError;
|
|||||||
/// fn main() {
|
/// fn main() {
|
||||||
/// let mut memdb = MemoryDB::new();
|
/// let mut memdb = MemoryDB::new();
|
||||||
/// let mut root = H256::new();
|
/// let mut root = H256::new();
|
||||||
/// TrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar");
|
/// TrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap();
|
||||||
/// let t = TrieDB::new(&memdb, &root).unwrap();
|
/// let t = TrieDB::new(&memdb, &root).unwrap();
|
||||||
/// assert!(t.contains(b"foo"));
|
/// assert!(t.contains(b"foo").unwrap());
|
||||||
/// assert_eq!(t.get(b"foo").unwrap(), b"bar");
|
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), b"bar");
|
||||||
/// assert!(t.db_items_remaining().is_empty());
|
/// assert!(t.db_items_remaining().unwrap().is_empty());
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
pub struct TrieDB<'db> {
|
pub struct TrieDB<'db> {
|
||||||
@ -59,9 +58,9 @@ pub struct TrieDB<'db> {
|
|||||||
impl<'db> TrieDB<'db> {
|
impl<'db> TrieDB<'db> {
|
||||||
/// Create a new trie with the backing database `db` and `root`
|
/// Create a new trie with the backing database `db` and `root`
|
||||||
/// Returns an error if `root` does not exist
|
/// Returns an error if `root` does not exist
|
||||||
pub fn new(db: &'db HashDB, root: &'db H256) -> Result<Self, TrieError> {
|
pub fn new(db: &'db HashDB, root: &'db H256) -> super::Result<Self> {
|
||||||
if !db.contains(root) {
|
if !db.contains(root) {
|
||||||
Err(TrieError::InvalidStateRoot)
|
Err(Box::new(TrieError::InvalidStateRoot(*root)))
|
||||||
} else {
|
} else {
|
||||||
Ok(TrieDB {
|
Ok(TrieDB {
|
||||||
db: db,
|
db: db,
|
||||||
@ -77,11 +76,11 @@ impl<'db> TrieDB<'db> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Determine all the keys in the backing database that belong to the trie.
|
/// Determine all the keys in the backing database that belong to the trie.
|
||||||
pub fn keys(&self) -> Vec<H256> {
|
pub fn keys(&self) -> super::Result<Vec<H256>> {
|
||||||
let mut ret: Vec<H256> = Vec::new();
|
let mut ret: Vec<H256> = Vec::new();
|
||||||
ret.push(self.root.clone());
|
ret.push(self.root.clone());
|
||||||
self.accumulate_keys(self.root_node(), &mut ret);
|
try!(self.accumulate_keys(try!(self.root_node()), &mut ret));
|
||||||
ret
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert a vector of hashes to a hashmap of hash to occurrences.
|
/// Convert a vector of hashes to a hashmap of hash to occurrences.
|
||||||
@ -95,49 +94,51 @@ impl<'db> TrieDB<'db> {
|
|||||||
|
|
||||||
/// Determine occurrences of items in the backing database which are not related to this
|
/// Determine occurrences of items in the backing database which are not related to this
|
||||||
/// trie.
|
/// trie.
|
||||||
pub fn db_items_remaining(&self) -> HashMap<H256, i32> {
|
pub fn db_items_remaining(&self) -> super::Result<HashMap<H256, i32>> {
|
||||||
let mut ret = self.db.keys();
|
let mut ret = self.db.keys();
|
||||||
for (k, v) in Self::to_map(self.keys()).into_iter() {
|
for (k, v) in Self::to_map(try!(self.keys())).into_iter() {
|
||||||
let keycount = *ret.get(&k).unwrap_or(&0);
|
let keycount = *ret.get(&k).unwrap_or(&0);
|
||||||
match keycount <= v as i32 {
|
match keycount <= v as i32 {
|
||||||
true => ret.remove(&k),
|
true => ret.remove(&k),
|
||||||
_ => ret.insert(k, keycount - v as i32),
|
_ => ret.insert(k, keycount - v as i32),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
ret
|
Ok(ret)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursion helper for `keys`.
|
/// Recursion helper for `keys`.
|
||||||
fn accumulate_keys(&self, node: Node, acc: &mut Vec<H256>) {
|
fn accumulate_keys(&self, node: Node, acc: &mut Vec<H256>) -> super::Result<()> {
|
||||||
let mut handle_payload = |payload| {
|
let mut handle_payload = |payload| {
|
||||||
let p = Rlp::new(payload);
|
let p = Rlp::new(payload);
|
||||||
if p.is_data() && p.size() == 32 {
|
if p.is_data() && p.size() == 32 {
|
||||||
acc.push(p.as_val());
|
acc.push(p.as_val());
|
||||||
}
|
}
|
||||||
|
|
||||||
self.accumulate_keys(self.get_node(payload), acc);
|
self.accumulate_keys(try!(self.get_node(payload)), acc)
|
||||||
};
|
};
|
||||||
|
|
||||||
match node {
|
match node {
|
||||||
Node::Extension(_, payload) => handle_payload(payload),
|
Node::Extension(_, payload) => try!(handle_payload(payload)),
|
||||||
Node::Branch(payloads, _) => for payload in &payloads { handle_payload(payload) },
|
Node::Branch(payloads, _) => for payload in &payloads { try!(handle_payload(payload)) },
|
||||||
_ => {},
|
_ => {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the root node's RLP.
|
/// Get the root node's RLP.
|
||||||
fn root_node(&self) -> Node {
|
fn root_node(&self) -> super::Result<Node> {
|
||||||
Node::decoded(self.root_data())
|
self.root_data().map(Node::decoded)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the data of the root node.
|
/// Get the data of the root node.
|
||||||
fn root_data(&self) -> &[u8] {
|
fn root_data(&self) -> super::Result<&[u8]> {
|
||||||
self.db.get(self.root).expect("Trie root not found!")
|
self.db.get(self.root).ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the root node as a `Node`.
|
/// Get the root node as a `Node`.
|
||||||
fn get_node<'a>(&'a self, node: &'a [u8]) -> Node {
|
fn get_node(&'db self, node: &'db [u8]) -> super::Result<Node> {
|
||||||
Node::decoded(self.get_raw_or_lookup(node))
|
self.get_raw_or_lookup(node).map(Node::decoded)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Indentation helper for `formal_all`.
|
/// Indentation helper for `formal_all`.
|
||||||
@ -154,7 +155,9 @@ impl<'db> TrieDB<'db> {
|
|||||||
Node::Leaf(slice, value) => try!(writeln!(f, "'{:?}: {:?}.", slice, value.pretty())),
|
Node::Leaf(slice, value) => try!(writeln!(f, "'{:?}: {:?}.", slice, value.pretty())),
|
||||||
Node::Extension(ref slice, ref item) => {
|
Node::Extension(ref slice, ref item) => {
|
||||||
try!(write!(f, "'{:?} ", slice));
|
try!(write!(f, "'{:?} ", slice));
|
||||||
try!(self.fmt_all(self.get_node(item), f, deepness));
|
if let Ok(node) = self.get_node(item) {
|
||||||
|
try!(self.fmt_all(node, f, deepness));
|
||||||
|
}
|
||||||
},
|
},
|
||||||
Node::Branch(ref nodes, ref value) => {
|
Node::Branch(ref nodes, ref value) => {
|
||||||
try!(writeln!(f, ""));
|
try!(writeln!(f, ""));
|
||||||
@ -164,12 +167,15 @@ impl<'db> TrieDB<'db> {
|
|||||||
}
|
}
|
||||||
for i in 0..16 {
|
for i in 0..16 {
|
||||||
match self.get_node(nodes[i]) {
|
match self.get_node(nodes[i]) {
|
||||||
Node::Empty => {},
|
Ok(Node::Empty) => {},
|
||||||
n => {
|
Ok(n) => {
|
||||||
try!(self.fmt_indent(f, deepness + 1));
|
try!(self.fmt_indent(f, deepness + 1));
|
||||||
try!(write!(f, "'{:x} ", i));
|
try!(write!(f, "'{:x} ", i));
|
||||||
try!(self.fmt_all(n, f, deepness + 1));
|
try!(self.fmt_all(n, f, deepness + 1));
|
||||||
}
|
}
|
||||||
|
Err(e) => {
|
||||||
|
try!(write!(f, "ERROR: {}", e));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -182,38 +188,46 @@ impl<'db> TrieDB<'db> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
||||||
fn do_lookup<'a, 'key>(&'a self, key: &NibbleSlice<'key>) -> Option<&'a [u8]> where 'a: 'key {
|
fn do_lookup<'key>(&'db self, key: &NibbleSlice<'key>) -> super::Result<Option<&'db [u8]>>
|
||||||
let root_rlp = self.root_data();
|
where 'db: 'key
|
||||||
self.get_from_node(root_rlp, key)
|
{
|
||||||
|
let root_rlp = try!(self.root_data());
|
||||||
|
self.get_from_node(&root_rlp, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
||||||
/// value exists for the key.
|
/// value exists for the key.
|
||||||
///
|
///
|
||||||
/// Note: Not a public API; use Trie trait functions.
|
/// Note: Not a public API; use Trie trait functions.
|
||||||
fn get_from_node<'a, 'key>(&'a self, node: &'a [u8], key: &NibbleSlice<'key>) -> Option<&'a [u8]> where 'a: 'key {
|
fn get_from_node<'key>(&'db self, node: &'db [u8], key: &NibbleSlice<'key>) -> super::Result<Option<&'db [u8]>>
|
||||||
|
where 'db: 'key
|
||||||
|
{
|
||||||
match Node::decoded(node) {
|
match Node::decoded(node) {
|
||||||
Node::Leaf(ref slice, ref value) if key == slice => Some(value),
|
Node::Leaf(ref slice, ref value) if key == slice => Ok(Some(value)),
|
||||||
Node::Extension(ref slice, ref item) if key.starts_with(slice) => {
|
Node::Extension(ref slice, ref item) if key.starts_with(slice) => {
|
||||||
self.get_from_node(self.get_raw_or_lookup(item), &key.mid(slice.len()))
|
let data = try!(self.get_raw_or_lookup(item));
|
||||||
|
self.get_from_node(data, &key.mid(slice.len()))
|
||||||
},
|
},
|
||||||
Node::Branch(ref nodes, value) => match key.is_empty() {
|
Node::Branch(ref nodes, value) => match key.is_empty() {
|
||||||
true => value,
|
true => Ok(value),
|
||||||
false => self.get_from_node(self.get_raw_or_lookup(nodes[key.at(0) as usize]), &key.mid(1))
|
false => self.get_from_node(try!(self.get_raw_or_lookup(nodes[key.at(0) as usize])), &key.mid(1))
|
||||||
},
|
},
|
||||||
_ => None
|
_ => Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given some node-describing data `node`, return the actual node RLP.
|
/// Given some node-describing data `node`, return the actual node RLP.
|
||||||
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
||||||
/// may require a database lookup.
|
/// may require a database lookup.
|
||||||
fn get_raw_or_lookup<'a>(&'a self, node: &'a [u8]) -> &'a [u8] {
|
fn get_raw_or_lookup(&'db self, node: &'db [u8]) -> super::Result<&'db [u8]> {
|
||||||
// check if its sha3 + len
|
// check if its sha3 + len
|
||||||
let r = Rlp::new(node);
|
let r = Rlp::new(node);
|
||||||
match r.is_data() && r.size() == 32 {
|
match r.is_data() && r.size() == 32 {
|
||||||
true => self.db.get(&r.as_val::<H256>()).unwrap_or_else(|| panic!("Not found! {:?}", r.as_val::<H256>())),
|
true => {
|
||||||
false => node
|
let key = r.as_val::<H256>();
|
||||||
|
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
||||||
|
}
|
||||||
|
false => Ok(node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -229,7 +243,6 @@ enum Status {
|
|||||||
#[derive(Clone, Eq, PartialEq)]
|
#[derive(Clone, Eq, PartialEq)]
|
||||||
struct Crumb<'a> {
|
struct Crumb<'a> {
|
||||||
node: Node<'a>,
|
node: Node<'a>,
|
||||||
// key: &'a[u8],
|
|
||||||
status: Status,
|
status: Status,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -262,7 +275,7 @@ impl<'a> TrieDBIterator<'a> {
|
|||||||
trail: vec![],
|
trail: vec![],
|
||||||
key_nibbles: Vec::new(),
|
key_nibbles: Vec::new(),
|
||||||
};
|
};
|
||||||
r.descend(db.root_data());
|
r.descend(db.root_data().unwrap());
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -270,7 +283,7 @@ impl<'a> TrieDBIterator<'a> {
|
|||||||
fn descend(&mut self, d: &'a [u8]) {
|
fn descend(&mut self, d: &'a [u8]) {
|
||||||
self.trail.push(Crumb {
|
self.trail.push(Crumb {
|
||||||
status: Status::Entering,
|
status: Status::Entering,
|
||||||
node: self.db.get_node(d)
|
node: self.db.get_node(d).unwrap(),
|
||||||
});
|
});
|
||||||
match self.trail.last().unwrap().node {
|
match self.trail.last().unwrap().node {
|
||||||
Node::Leaf(n, _) | Node::Extension(n, _) => { self.key_nibbles.extend(n.iter()); },
|
Node::Leaf(n, _) | Node::Extension(n, _) => { self.key_nibbles.extend(n.iter()); },
|
||||||
@ -342,11 +355,9 @@ impl<'db> Trie for TrieDB<'db> {
|
|||||||
|
|
||||||
fn root(&self) -> &H256 { self.root }
|
fn root(&self) -> &H256 { self.root }
|
||||||
|
|
||||||
fn contains(&self, key: &[u8]) -> bool {
|
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> super::Result<Option<&'a [u8]>>
|
||||||
self.get(key).is_some()
|
where 'a: 'key
|
||||||
}
|
{
|
||||||
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Option<&'a [u8]> where 'a: 'key {
|
|
||||||
self.do_lookup(&NibbleSlice::new(key))
|
self.do_lookup(&NibbleSlice::new(key))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -362,8 +373,8 @@ impl<'db> fmt::Debug for TrieDB<'db> {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn iterator() {
|
fn iterator() {
|
||||||
use super::trietraits::TrieMut;
|
|
||||||
use memorydb::*;
|
use memorydb::*;
|
||||||
|
use super::TrieMut;
|
||||||
use super::triedbmut::*;
|
use super::triedbmut::*;
|
||||||
|
|
||||||
let d = vec![ &b"A"[..], &b"AA"[..], &b"AB"[..], &b"B"[..] ];
|
let d = vec![ &b"A"[..], &b"AA"[..], &b"AB"[..], &b"B"[..] ];
|
||||||
@ -373,7 +384,7 @@ fn iterator() {
|
|||||||
{
|
{
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
for x in &d {
|
for x in &d {
|
||||||
t.insert(x, x);
|
t.insert(x, x).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert_eq!(d.iter().map(|i|i.to_vec()).collect::<Vec<_>>(), TrieDB::new(&memdb, &root).unwrap().iter().map(|x|x.0).collect::<Vec<_>>());
|
assert_eq!(d.iter().map(|i|i.to_vec()).collect::<Vec<_>>(), TrieDB::new(&memdb, &root).unwrap().iter().map(|x|x.0).collect::<Vec<_>>());
|
||||||
|
@ -273,11 +273,11 @@ impl<'a> Index<&'a StorageHandle> for NodeStorage {
|
|||||||
/// let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
/// let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
/// assert!(t.is_empty());
|
/// assert!(t.is_empty());
|
||||||
/// assert_eq!(*t.root(), SHA3_NULL_RLP);
|
/// assert_eq!(*t.root(), SHA3_NULL_RLP);
|
||||||
/// t.insert(b"foo", b"bar");
|
/// t.insert(b"foo", b"bar").unwrap();
|
||||||
/// assert!(t.contains(b"foo"));
|
/// assert!(t.contains(b"foo").unwrap());
|
||||||
/// assert_eq!(t.get(b"foo").unwrap(), b"bar");
|
/// assert_eq!(t.get(b"foo").unwrap().unwrap(), b"bar");
|
||||||
/// t.remove(b"foo");
|
/// t.remove(b"foo").unwrap();
|
||||||
/// assert!(!t.contains(b"foo"));
|
/// assert!(!t.contains(b"foo").unwrap());
|
||||||
/// }
|
/// }
|
||||||
/// ```
|
/// ```
|
||||||
pub struct TrieDBMut<'a> {
|
pub struct TrieDBMut<'a> {
|
||||||
@ -309,9 +309,9 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
|
|
||||||
/// Create a new trie with the backing database `db` and `root.
|
/// Create a new trie with the backing database `db` and `root.
|
||||||
/// Returns an error if `root` does not exist.
|
/// Returns an error if `root` does not exist.
|
||||||
pub fn from_existing(db: &'a mut HashDB, root: &'a mut H256) -> Result<Self, TrieError> {
|
pub fn from_existing(db: &'a mut HashDB, root: &'a mut H256) -> super::Result<Self> {
|
||||||
if !db.contains(root) {
|
if !db.contains(root) {
|
||||||
return Err(TrieError::InvalidStateRoot);
|
return Err(Box::new(TrieError::InvalidStateRoot(*root)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let root_handle = NodeHandle::Hash(*root);
|
let root_handle = NodeHandle::Hash(*root);
|
||||||
@ -335,23 +335,23 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// cache a node by hash
|
// cache a node by hash
|
||||||
fn cache(&mut self, hash: H256) -> StorageHandle {
|
fn cache(&mut self, hash: H256) -> super::Result<StorageHandle> {
|
||||||
let node_rlp = self.db.get(&hash).expect("Not found!");
|
let node_rlp = try!(self.db.get(&hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash))));
|
||||||
let node = Node::from_rlp(node_rlp, &*self.db, &mut self.storage);
|
let node = Node::from_rlp(node_rlp, &*self.db, &mut self.storage);
|
||||||
self.storage.alloc(Stored::Cached(node, hash))
|
Ok(self.storage.alloc(Stored::Cached(node, hash)))
|
||||||
}
|
}
|
||||||
|
|
||||||
// inspect a node, choosing either to replace, restore, or delete it.
|
// inspect a node, choosing either to replace, restore, or delete it.
|
||||||
// if restored or replaced, returns the new node along with a flag of whether it was changed.
|
// if restored or replaced, returns the new node along with a flag of whether it was changed.
|
||||||
fn inspect<F>(&mut self, stored: Stored, inspector: F) -> Option<(Stored, bool)>
|
fn inspect<F>(&mut self, stored: Stored, inspector: F) -> super::Result<Option<(Stored, bool)>>
|
||||||
where F: FnOnce(&mut Self, Node) -> Action {
|
where F: FnOnce(&mut Self, Node) -> super::Result<Action> {
|
||||||
match stored {
|
Ok(match stored {
|
||||||
Stored::New(node) => match inspector(self, node) {
|
Stored::New(node) => match try!(inspector(self, node)) {
|
||||||
Action::Restore(node) => Some((Stored::New(node), false)),
|
Action::Restore(node) => Some((Stored::New(node), false)),
|
||||||
Action::Replace(node) => Some((Stored::New(node), true)),
|
Action::Replace(node) => Some((Stored::New(node), true)),
|
||||||
Action::Delete => None,
|
Action::Delete => None,
|
||||||
},
|
},
|
||||||
Stored::Cached(node, hash) => match inspector(self, node) {
|
Stored::Cached(node, hash) => match try!(inspector(self, node)) {
|
||||||
Action::Restore(node) => Some((Stored::Cached(node, hash), false)),
|
Action::Restore(node) => Some((Stored::Cached(node, hash), false)),
|
||||||
Action::Replace(node) => {
|
Action::Replace(node) => {
|
||||||
self.death_row.insert(hash);
|
self.death_row.insert(hash);
|
||||||
@ -362,21 +362,22 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// walk the trie, attempting to find the key's node.
|
// walk the trie, attempting to find the key's node.
|
||||||
fn lookup<'x, 'key>(&'x self, partial: NibbleSlice<'key>, handle: &NodeHandle) -> Option<&'x [u8]>
|
fn lookup<'x, 'key>(&'x self, partial: NibbleSlice<'key>, handle: &NodeHandle) -> super::Result<Option<&'x [u8]>>
|
||||||
where 'x: 'key {
|
where 'x: 'key
|
||||||
|
{
|
||||||
match *handle {
|
match *handle {
|
||||||
NodeHandle::Hash(ref hash) => self.do_db_lookup(hash, partial),
|
NodeHandle::Hash(ref hash) => self.do_db_lookup(hash, partial),
|
||||||
NodeHandle::InMemory(ref handle) => match self.storage[handle] {
|
NodeHandle::InMemory(ref handle) => match self.storage[handle] {
|
||||||
Node::Empty => None,
|
Node::Empty => Ok(None),
|
||||||
Node::Leaf(ref key, ref value) => {
|
Node::Leaf(ref key, ref value) => {
|
||||||
if NibbleSlice::from_encoded(key).0 == partial {
|
if NibbleSlice::from_encoded(key).0 == partial {
|
||||||
Some(value)
|
Ok(Some(value))
|
||||||
} else {
|
} else {
|
||||||
None
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Node::Extension(ref slice, ref child) => {
|
Node::Extension(ref slice, ref child) => {
|
||||||
@ -384,15 +385,18 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
if partial.starts_with(&slice) {
|
if partial.starts_with(&slice) {
|
||||||
self.lookup(partial.mid(slice.len()), child)
|
self.lookup(partial.mid(slice.len()), child)
|
||||||
} else {
|
} else {
|
||||||
None
|
Ok(None)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Node::Branch(ref children, ref value) => {
|
Node::Branch(ref children, ref value) => {
|
||||||
if partial.is_empty() {
|
if partial.is_empty() {
|
||||||
value.as_ref().map(|v| &v[..])
|
Ok(value.as_ref().map(|v| &v[..]))
|
||||||
} else {
|
} else {
|
||||||
let idx = partial.at(0);
|
let idx = partial.at(0);
|
||||||
(&children[idx as usize]).as_ref().and_then(|child| self.lookup(partial.mid(1), child))
|
match children[idx as usize].as_ref() {
|
||||||
|
Some(child) => self.lookup(partial.mid(1), child),
|
||||||
|
None => Ok(None),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -400,60 +404,68 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
/// Return optional data for a key given as a `NibbleSlice`. Returns `None` if no data exists.
|
||||||
fn do_db_lookup<'x, 'key>(&'x self, hash: &H256, key: NibbleSlice<'key>) -> Option<&'x [u8]> where 'x: 'key {
|
fn do_db_lookup<'x, 'key>(&'x self, hash: &H256, key: NibbleSlice<'key>) -> super::Result<Option<&'x [u8]>>
|
||||||
self.db.get(hash).and_then(|node_rlp| self.get_from_db_node(node_rlp, key))
|
where 'x: 'key
|
||||||
|
{
|
||||||
|
self.db.get(hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(*hash)))
|
||||||
|
.and_then(|node_rlp| self.get_from_db_node(node_rlp, key))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
/// Recursible function to retrieve the value given a `node` and a partial `key`. `None` if no
|
||||||
/// value exists for the key.
|
/// value exists for the key.
|
||||||
///
|
///
|
||||||
/// Note: Not a public API; use Trie trait functions.
|
/// Note: Not a public API; use Trie trait functions.
|
||||||
fn get_from_db_node<'x, 'key>(&'x self, node: &'x [u8], key: NibbleSlice<'key>) -> Option<&'x [u8]> where 'x: 'key {
|
fn get_from_db_node<'x, 'key>(&'x self, node: &'x [u8], key: NibbleSlice<'key>) -> super::Result<Option<&'x [u8]>>
|
||||||
|
where 'x: 'key
|
||||||
|
{
|
||||||
match RlpNode::decoded(node) {
|
match RlpNode::decoded(node) {
|
||||||
RlpNode::Leaf(ref slice, ref value) if &key == slice => Some(value),
|
RlpNode::Leaf(ref slice, ref value) if &key == slice => Ok(Some(value)),
|
||||||
RlpNode::Extension(ref slice, ref item) if key.starts_with(slice) => {
|
RlpNode::Extension(ref slice, ref item) if key.starts_with(slice) => {
|
||||||
self.get_from_db_node(self.get_raw_or_lookup(item), key.mid(slice.len()))
|
self.get_from_db_node(try!(self.get_raw_or_lookup(item)), key.mid(slice.len()))
|
||||||
},
|
},
|
||||||
RlpNode::Branch(ref nodes, value) => match key.is_empty() {
|
RlpNode::Branch(ref nodes, value) => match key.is_empty() {
|
||||||
true => value,
|
true => Ok(value),
|
||||||
false => self.get_from_db_node(self.get_raw_or_lookup(nodes[key.at(0) as usize]), key.mid(1))
|
false => self.get_from_db_node(try!(self.get_raw_or_lookup(nodes[key.at(0) as usize])), key.mid(1))
|
||||||
},
|
},
|
||||||
_ => None
|
_ => Ok(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given some node-describing data `node`, return the actual node RLP.
|
/// Given some node-describing data `node`, return the actual node RLP.
|
||||||
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
/// This could be a simple identity operation in the case that the node is sufficiently small, but
|
||||||
/// may require a database lookup.
|
/// may require a database lookup.
|
||||||
fn get_raw_or_lookup<'x>(&'x self, node: &'x [u8]) -> &'x [u8] {
|
fn get_raw_or_lookup<'x>(&'x self, node: &'x [u8]) -> super::Result<&'x [u8]> {
|
||||||
// check if its sha3 + len
|
// check if its sha3 + len
|
||||||
let r = Rlp::new(node);
|
let r = Rlp::new(node);
|
||||||
match r.is_data() && r.size() == 32 {
|
match r.is_data() && r.size() == 32 {
|
||||||
true => self.db.get(&r.as_val::<H256>()).expect("Not found!"),
|
true => {
|
||||||
false => node
|
let key = r.as_val::<H256>();
|
||||||
|
self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key)))
|
||||||
|
}
|
||||||
|
false => Ok(node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// insert a key, value pair into the trie, creating new nodes if necessary.
|
/// insert a key, value pair into the trie, creating new nodes if necessary.
|
||||||
fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: Bytes) -> (StorageHandle, bool) {
|
fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: Bytes) -> super::Result<(StorageHandle, bool)> {
|
||||||
let h = match handle {
|
let h = match handle {
|
||||||
NodeHandle::InMemory(h) => h,
|
NodeHandle::InMemory(h) => h,
|
||||||
NodeHandle::Hash(h) => self.cache(h)
|
NodeHandle::Hash(h) => try!(self.cache(h)),
|
||||||
};
|
};
|
||||||
let stored = self.storage.destroy(h);
|
let stored = self.storage.destroy(h);
|
||||||
let (new_stored, changed) = self.inspect(stored, move |trie, stored| {
|
let (new_stored, changed) = try!(self.inspect(stored, move |trie, stored| {
|
||||||
trie.insert_inspector(stored, partial, value).into_action()
|
trie.insert_inspector(stored, partial, value).map(|a| a.into_action())
|
||||||
}).expect("Insertion never deletes.");
|
})).expect("Insertion never deletes.");
|
||||||
|
|
||||||
(self.storage.alloc(new_stored), changed)
|
Ok((self.storage.alloc(new_stored), changed))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// the insertion inspector.
|
/// the insertion inspector.
|
||||||
#[cfg_attr(feature = "dev", allow(cyclomatic_complexity))]
|
#[cfg_attr(feature = "dev", allow(cyclomatic_complexity))]
|
||||||
fn insert_inspector(&mut self, node: Node, partial: NibbleSlice, value: Bytes) -> InsertAction {
|
fn insert_inspector(&mut self, node: Node, partial: NibbleSlice, value: Bytes) -> super::Result<InsertAction> {
|
||||||
trace!(target: "trie", "augmented (partial: {:?}, value: {:?})", partial, value.pretty());
|
trace!(target: "trie", "augmented (partial: {:?}, value: {:?})", partial, value.pretty());
|
||||||
|
|
||||||
match node {
|
Ok(match node {
|
||||||
Node::Empty => {
|
Node::Empty => {
|
||||||
trace!(target: "trie", "empty: COMPOSE");
|
trace!(target: "trie", "empty: COMPOSE");
|
||||||
InsertAction::Replace(Node::Leaf(partial.encoded(true), value))
|
InsertAction::Replace(Node::Leaf(partial.encoded(true), value))
|
||||||
@ -473,11 +485,11 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
let partial = partial.mid(1);
|
let partial = partial.mid(1);
|
||||||
if let Some(child) = children[idx].take() {
|
if let Some(child) = children[idx].take() {
|
||||||
// original had something there. recurse down into it.
|
// original had something there. recurse down into it.
|
||||||
let (new_child, changed) = self.insert_at(child, partial, value);
|
let (new_child, changed) = try!(self.insert_at(child, partial, value));
|
||||||
children[idx] = Some(new_child.into());
|
children[idx] = Some(new_child.into());
|
||||||
if !changed {
|
if !changed {
|
||||||
// the new node we composed didn't change. that means our branch is untouched too.
|
// the new node we composed didn't change. that means our branch is untouched too.
|
||||||
return InsertAction::Restore(Node::Branch(children, stored_value));
|
return Ok(InsertAction::Restore(Node::Branch(children, stored_value)));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// original had nothing there. compose a leaf.
|
// original had nothing there. compose a leaf.
|
||||||
@ -516,7 +528,8 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// always replace because whatever we get out here is not the branch we started with.
|
// always replace because whatever we get out here is not the branch we started with.
|
||||||
InsertAction::Replace(self.insert_inspector(branch, partial, value).unwrap_node())
|
let branch_action = try!(self.insert_inspector(branch, partial, value)).unwrap_node();
|
||||||
|
InsertAction::Replace(branch_action)
|
||||||
} else if cp == existing_key.len() {
|
} else if cp == existing_key.len() {
|
||||||
trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp);
|
trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp);
|
||||||
|
|
||||||
@ -524,7 +537,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
// make a stub branch and an extension.
|
// make a stub branch and an extension.
|
||||||
let branch = Node::Branch(empty_children(), Some(stored_value));
|
let branch = Node::Branch(empty_children(), Some(stored_value));
|
||||||
// augment the new branch.
|
// augment the new branch.
|
||||||
let branch = self.insert_inspector(branch, partial.mid(cp), value).unwrap_node();
|
let branch = try!(self.insert_inspector(branch, partial.mid(cp), value)).unwrap_node();
|
||||||
|
|
||||||
// always replace since we took a leaf and made an extension.
|
// always replace since we took a leaf and made an extension.
|
||||||
let branch_handle = self.storage.alloc(Stored::New(branch)).into();
|
let branch_handle = self.storage.alloc(Stored::New(branch)).into();
|
||||||
@ -537,7 +550,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
let low = Node::Leaf(existing_key.mid(cp).encoded(true), stored_value);
|
let low = Node::Leaf(existing_key.mid(cp).encoded(true), stored_value);
|
||||||
// augment it. this will result in the Leaf -> cp == 0 routine,
|
// augment it. this will result in the Leaf -> cp == 0 routine,
|
||||||
// which creates a branch.
|
// which creates a branch.
|
||||||
let augmented_low = self.insert_inspector(low, partial.mid(cp), value).unwrap_node();
|
let augmented_low = try!(self.insert_inspector(low, partial.mid(cp), value)).unwrap_node();
|
||||||
|
|
||||||
// make an extension using it. this is a replacement.
|
// make an extension using it. this is a replacement.
|
||||||
InsertAction::Replace(Node::Extension(
|
InsertAction::Replace(Node::Extension(
|
||||||
@ -568,14 +581,15 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// continue inserting.
|
// continue inserting.
|
||||||
InsertAction::Replace(self.insert_inspector(Node::Branch(children, None), partial, value).unwrap_node())
|
let branch_action = try!(self.insert_inspector(Node::Branch(children, None), partial, value)).unwrap_node();
|
||||||
|
InsertAction::Replace(branch_action)
|
||||||
} else if cp == existing_key.len() {
|
} else if cp == existing_key.len() {
|
||||||
trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp);
|
trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp);
|
||||||
|
|
||||||
// fully-shared prefix.
|
// fully-shared prefix.
|
||||||
|
|
||||||
// insert into the child node.
|
// insert into the child node.
|
||||||
let (new_child, changed) = self.insert_at(child_branch, partial.mid(cp), value);
|
let (new_child, changed) = try!(self.insert_at(child_branch, partial.mid(cp), value));
|
||||||
let new_ext = Node::Extension(existing_key.encoded(false), new_child.into());
|
let new_ext = Node::Extension(existing_key.encoded(false), new_child.into());
|
||||||
|
|
||||||
// if the child branch wasn't changed, meaning this extension remains the same.
|
// if the child branch wasn't changed, meaning this extension remains the same.
|
||||||
@ -589,7 +603,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
// partially-shared.
|
// partially-shared.
|
||||||
let low = Node::Extension(existing_key.mid(cp).encoded(false), child_branch);
|
let low = Node::Extension(existing_key.mid(cp).encoded(false), child_branch);
|
||||||
// augment the extension. this will take the cp == 0 path, creating a branch.
|
// augment the extension. this will take the cp == 0 path, creating a branch.
|
||||||
let augmented_low = self.insert_inspector(low, partial.mid(cp), value).unwrap_node();
|
let augmented_low = try!(self.insert_inspector(low, partial.mid(cp), value)).unwrap_node();
|
||||||
|
|
||||||
// always replace, since this extension is not the one we started with.
|
// always replace, since this extension is not the one we started with.
|
||||||
// this is known because the partial key is only the common prefix.
|
// this is known because the partial key is only the common prefix.
|
||||||
@ -599,37 +613,38 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Remove a node from the trie based on key.
|
/// Remove a node from the trie based on key.
|
||||||
fn remove_at(&mut self, handle: NodeHandle, partial: NibbleSlice) -> Option<(StorageHandle, bool)> {
|
fn remove_at(&mut self, handle: NodeHandle, partial: NibbleSlice) -> super::Result<Option<(StorageHandle, bool)>> {
|
||||||
let stored = match handle {
|
let stored = match handle {
|
||||||
NodeHandle::InMemory(h) => self.storage.destroy(h),
|
NodeHandle::InMemory(h) => self.storage.destroy(h),
|
||||||
NodeHandle::Hash(h) => {
|
NodeHandle::Hash(h) => {
|
||||||
let handle = self.cache(h);
|
let handle = try!(self.cache(h));
|
||||||
self.storage.destroy(handle)
|
self.storage.destroy(handle)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
self.inspect(stored, move |trie, node| trie.remove_inspector(node, partial))
|
let opt = try!(self.inspect(stored, move |trie, node| trie.remove_inspector(node, partial)));
|
||||||
.map(|(new, changed)| (self.storage.alloc(new), changed))
|
|
||||||
|
Ok(opt.map(|(new, changed)| (self.storage.alloc(new), changed)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// the removal inspector
|
/// the removal inspector
|
||||||
fn remove_inspector(&mut self, node: Node, partial: NibbleSlice) -> Action {
|
fn remove_inspector(&mut self, node: Node, partial: NibbleSlice) -> super::Result<Action> {
|
||||||
match (node, partial.is_empty()) {
|
Ok(match (node, partial.is_empty()) {
|
||||||
(Node::Empty, _) => Action::Delete,
|
(Node::Empty, _) => Action::Delete,
|
||||||
(Node::Branch(c, None), true) => Action::Restore(Node::Branch(c, None)),
|
(Node::Branch(c, None), true) => Action::Restore(Node::Branch(c, None)),
|
||||||
(Node::Branch(children, _), true) => {
|
(Node::Branch(children, _), true) => {
|
||||||
// always replace since we took the value out.
|
// always replace since we took the value out.
|
||||||
Action::Replace(self.fix(Node::Branch(children, None)))
|
Action::Replace(try!(self.fix(Node::Branch(children, None))))
|
||||||
}
|
}
|
||||||
(Node::Branch(mut children, value), false) => {
|
(Node::Branch(mut children, value), false) => {
|
||||||
let idx = partial.at(0) as usize;
|
let idx = partial.at(0) as usize;
|
||||||
if let Some(child) = children[idx].take() {
|
if let Some(child) = children[idx].take() {
|
||||||
trace!(target: "trie", "removing value out of branch child, partial={:?}", partial);
|
trace!(target: "trie", "removing value out of branch child, partial={:?}", partial);
|
||||||
match self.remove_at(child, partial.mid(1)) {
|
match try!(self.remove_at(child, partial.mid(1))) {
|
||||||
Some((new, changed)) => {
|
Some((new, changed)) => {
|
||||||
children[idx] = Some(new.into());
|
children[idx] = Some(new.into());
|
||||||
let branch = Node::Branch(children, value);
|
let branch = Node::Branch(children, value);
|
||||||
@ -644,7 +659,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
// the child we took was deleted.
|
// the child we took was deleted.
|
||||||
// the node may need fixing.
|
// the node may need fixing.
|
||||||
trace!(target: "trie", "branch child deleted, partial={:?}", partial);
|
trace!(target: "trie", "branch child deleted, partial={:?}", partial);
|
||||||
Action::Replace(self.fix(Node::Branch(children, value)))
|
Action::Replace(try!(self.fix(Node::Branch(children, value))))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -670,14 +685,14 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
if cp == existing_len {
|
if cp == existing_len {
|
||||||
// try to remove from the child branch.
|
// try to remove from the child branch.
|
||||||
trace!(target: "trie", "removing from extension child, partial={:?}", partial);
|
trace!(target: "trie", "removing from extension child, partial={:?}", partial);
|
||||||
match self.remove_at(child_branch, partial.mid(cp)) {
|
match try!(self.remove_at(child_branch, partial.mid(cp))) {
|
||||||
Some((new_child, changed)) => {
|
Some((new_child, changed)) => {
|
||||||
let new_child = new_child.into();
|
let new_child = new_child.into();
|
||||||
|
|
||||||
// if the child branch was unchanged, then the extension is too.
|
// if the child branch was unchanged, then the extension is too.
|
||||||
// otherwise, this extension may need fixing.
|
// otherwise, this extension may need fixing.
|
||||||
match changed {
|
match changed {
|
||||||
true => Action::Replace(self.fix(Node::Extension(encoded, new_child))),
|
true => Action::Replace(try!(self.fix(Node::Extension(encoded, new_child)))),
|
||||||
false => Action::Restore(Node::Extension(encoded, new_child)),
|
false => Action::Restore(Node::Extension(encoded, new_child)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -692,7 +707,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
Action::Restore(Node::Extension(encoded, child_branch))
|
Action::Restore(Node::Extension(encoded, child_branch))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a node which may be in an _invalid state_, fix it such that it is then in a valid
|
/// Given a node which may be in an _invalid state_, fix it such that it is then in a valid
|
||||||
@ -701,7 +716,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
/// _invalid state_ means:
|
/// _invalid state_ means:
|
||||||
/// - Branch node where there is only a single entry;
|
/// - Branch node where there is only a single entry;
|
||||||
/// - Extension node followed by anything other than a Branch node.
|
/// - Extension node followed by anything other than a Branch node.
|
||||||
fn fix(&mut self, node: Node) -> Node {
|
fn fix(&mut self, node: Node) -> super::Result<Node> {
|
||||||
match node {
|
match node {
|
||||||
Node::Branch(mut children, value) => {
|
Node::Branch(mut children, value) => {
|
||||||
// if only a single value, transmute to leaf/extension and feed through fixed.
|
// if only a single value, transmute to leaf/extension and feed through fixed.
|
||||||
@ -734,12 +749,12 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
(UsedIndex::None, Some(value)) => {
|
(UsedIndex::None, Some(value)) => {
|
||||||
// make a leaf.
|
// make a leaf.
|
||||||
trace!(target: "trie", "fixing: branch -> leaf");
|
trace!(target: "trie", "fixing: branch -> leaf");
|
||||||
Node::Leaf(NibbleSlice::new(&[]).encoded(true), value)
|
Ok(Node::Leaf(NibbleSlice::new(&[]).encoded(true), value))
|
||||||
}
|
}
|
||||||
(_, value) => {
|
(_, value) => {
|
||||||
// all is well.
|
// all is well.
|
||||||
trace!(target: "trie", "fixing: restoring branch");
|
trace!(target: "trie", "fixing: restoring branch");
|
||||||
Node::Branch(children, value)
|
Ok(Node::Branch(children, value))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -747,7 +762,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
let stored = match child {
|
let stored = match child {
|
||||||
NodeHandle::InMemory(h) => self.storage.destroy(h),
|
NodeHandle::InMemory(h) => self.storage.destroy(h),
|
||||||
NodeHandle::Hash(h) => {
|
NodeHandle::Hash(h) => {
|
||||||
let handle = self.cache(h);
|
let handle = try!(self.cache(h));
|
||||||
self.storage.destroy(handle)
|
self.storage.destroy(handle)
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -782,7 +797,7 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
|
|
||||||
let new_partial = NibbleSlice::new_composed(&partial, &sub_partial);
|
let new_partial = NibbleSlice::new_composed(&partial, &sub_partial);
|
||||||
trace!(target: "trie", "fixing: extension -> leaf. new_partial={:?}", new_partial);
|
trace!(target: "trie", "fixing: extension -> leaf. new_partial={:?}", new_partial);
|
||||||
Node::Leaf(new_partial.encoded(true), value)
|
Ok(Node::Leaf(new_partial.encoded(true), value))
|
||||||
}
|
}
|
||||||
child_node => {
|
child_node => {
|
||||||
trace!(target: "trie", "fixing: restoring extension");
|
trace!(target: "trie", "fixing: restoring extension");
|
||||||
@ -794,11 +809,11 @@ impl<'a> TrieDBMut<'a> {
|
|||||||
Stored::New(child_node)
|
Stored::New(child_node)
|
||||||
};
|
};
|
||||||
|
|
||||||
Node::Extension(partial, self.storage.alloc(stored).into())
|
Ok(Node::Extension(partial, self.storage.alloc(stored).into()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
other => other, // only ext and branch need fixing.
|
other => Ok(other), // only ext and branch need fixing.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -881,29 +896,27 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get<'b, 'key>(&'b self, key: &'key [u8]) -> Option<&'b [u8]> where 'b: 'key {
|
fn get<'x, 'key>(&'x self, key: &'key [u8]) -> super::Result<Option<&'x [u8]>> where 'x: 'key {
|
||||||
self.lookup(NibbleSlice::new(key), &self.root_handle)
|
self.lookup(NibbleSlice::new(key), &self.root_handle)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn contains(&self, key: &[u8]) -> bool {
|
|
||||||
self.get(key).is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert(&mut self, key: &[u8], value: &[u8]) {
|
fn insert(&mut self, key: &[u8], value: &[u8]) -> super::Result<()> {
|
||||||
if value.is_empty() {
|
if value.is_empty() {
|
||||||
self.remove(key);
|
return self.remove(key);
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let root_handle = self.root_handle();
|
let root_handle = self.root_handle();
|
||||||
let (new_handle, _) = self.insert_at(root_handle, NibbleSlice::new(key), value.to_owned());
|
let (new_handle, _) = try!(self.insert_at(root_handle, NibbleSlice::new(key), value.to_owned()));
|
||||||
self.root_handle = NodeHandle::InMemory(new_handle);
|
self.root_handle = NodeHandle::InMemory(new_handle);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove(&mut self, key: &[u8]) {
|
fn remove(&mut self, key: &[u8]) -> super::Result<()> {
|
||||||
let root_handle = self.root_handle();
|
let root_handle = self.root_handle();
|
||||||
let key = NibbleSlice::new(key);
|
let key = NibbleSlice::new(key);
|
||||||
match self.remove_at(root_handle, key) {
|
match try!(self.remove_at(root_handle, key)) {
|
||||||
Some((handle, _)) => {
|
Some((handle, _)) => {
|
||||||
self.root_handle = NodeHandle::InMemory(handle);
|
self.root_handle = NodeHandle::InMemory(handle);
|
||||||
}
|
}
|
||||||
@ -912,6 +925,8 @@ impl<'a> TrieMut for TrieDBMut<'a> {
|
|||||||
*self.root = SHA3_NULL_RLP;
|
*self.root = SHA3_NULL_RLP;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -930,7 +945,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
use rlp::*;
|
use rlp::*;
|
||||||
use bytes::ToPretty;
|
use bytes::ToPretty;
|
||||||
use super::super::trietraits::*;
|
use super::super::TrieMut;
|
||||||
use super::super::standardmap::*;
|
use super::super::standardmap::*;
|
||||||
|
|
||||||
fn populate_trie<'db>(db: &'db mut HashDB, root: &'db mut H256, v: &[(Vec<u8>, Vec<u8>)]) -> TrieDBMut<'db> {
|
fn populate_trie<'db>(db: &'db mut HashDB, root: &'db mut H256, v: &[(Vec<u8>, Vec<u8>)]) -> TrieDBMut<'db> {
|
||||||
@ -938,7 +953,7 @@ mod tests {
|
|||||||
for i in 0..v.len() {
|
for i in 0..v.len() {
|
||||||
let key: &[u8]= &v[i].0;
|
let key: &[u8]= &v[i].0;
|
||||||
let val: &[u8] = &v[i].1;
|
let val: &[u8] = &v[i].1;
|
||||||
t.insert(key, val);
|
t.insert(key, val).unwrap();
|
||||||
}
|
}
|
||||||
t
|
t
|
||||||
}
|
}
|
||||||
@ -946,7 +961,7 @@ mod tests {
|
|||||||
fn unpopulate_trie<'db>(t: &mut TrieDBMut<'db>, v: &[(Vec<u8>, Vec<u8>)]) {
|
fn unpopulate_trie<'db>(t: &mut TrieDBMut<'db>, v: &[(Vec<u8>, Vec<u8>)]) {
|
||||||
for i in v {
|
for i in v {
|
||||||
let key: &[u8]= &i.0;
|
let key: &[u8]= &i.0;
|
||||||
t.remove(key);
|
t.remove(key).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1009,7 +1024,7 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x01u8, 0x23]) ]));
|
assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x01u8, 0x23]) ]));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1020,15 +1035,15 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t1 = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t1 = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t1.insert(&[0x01, 0x23], &big_value.to_vec());
|
t1.insert(&[0x01, 0x23], &big_value.to_vec()).unwrap();
|
||||||
t1.insert(&[0x01, 0x34], &big_value.to_vec());
|
t1.insert(&[0x01, 0x34], &big_value.to_vec()).unwrap();
|
||||||
let mut memdb2 = MemoryDB::new();
|
let mut memdb2 = MemoryDB::new();
|
||||||
let mut root2 = H256::new();
|
let mut root2 = H256::new();
|
||||||
let mut t2 = TrieDBMut::new(&mut memdb2, &mut root2);
|
let mut t2 = TrieDBMut::new(&mut memdb2, &mut root2);
|
||||||
t2.insert(&[0x01], &big_value.to_vec());
|
t2.insert(&[0x01], &big_value.to_vec()).unwrap();
|
||||||
t2.insert(&[0x01, 0x23], &big_value.to_vec());
|
t2.insert(&[0x01, 0x23], &big_value.to_vec()).unwrap();
|
||||||
t2.insert(&[0x01, 0x34], &big_value.to_vec());
|
t2.insert(&[0x01, 0x34], &big_value.to_vec()).unwrap();
|
||||||
t2.remove(&[0x01]);
|
t2.remove(&[0x01]).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1036,8 +1051,8 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
t.insert(&[0x01u8, 0x23], &[0x23u8, 0x45]);
|
t.insert(&[0x01u8, 0x23], &[0x23u8, 0x45]).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x23u8, 0x45]) ]));
|
assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x23u8, 0x45]) ]));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1046,8 +1061,8 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
t.insert(&[0x11u8, 0x23], &[0x11u8, 0x23]);
|
t.insert(&[0x11u8, 0x23], &[0x11u8, 0x23]).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![
|
assert_eq!(*t.root(), trie_root(vec![
|
||||||
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
||||||
(vec![0x11u8, 0x23], vec![0x11u8, 0x23])
|
(vec![0x11u8, 0x23], vec![0x11u8, 0x23])
|
||||||
@ -1059,9 +1074,9 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]);
|
t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap();
|
||||||
t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]);
|
t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![
|
assert_eq!(*t.root(), trie_root(vec![
|
||||||
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
||||||
(vec![0x81u8, 0x23], vec![0x81u8, 0x23]),
|
(vec![0x81u8, 0x23], vec![0x81u8, 0x23]),
|
||||||
@ -1074,8 +1089,8 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
t.insert(&[], &[0x0]);
|
t.insert(&[], &[0x0]).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![
|
assert_eq!(*t.root(), trie_root(vec![
|
||||||
(vec![], vec![0x0]),
|
(vec![], vec![0x0]),
|
||||||
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
||||||
@ -1087,8 +1102,8 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
t.insert(&[0x01u8, 0x34], &[0x01u8, 0x34]);
|
t.insert(&[0x01u8, 0x34], &[0x01u8, 0x34]).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![
|
assert_eq!(*t.root(), trie_root(vec![
|
||||||
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
(vec![0x01u8, 0x23], vec![0x01u8, 0x23]),
|
||||||
(vec![0x01u8, 0x34], vec![0x01u8, 0x34]),
|
(vec![0x01u8, 0x34], vec![0x01u8, 0x34]),
|
||||||
@ -1100,9 +1115,9 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01, 0x23, 0x45], &[0x01]);
|
t.insert(&[0x01, 0x23, 0x45], &[0x01]).unwrap();
|
||||||
t.insert(&[0x01, 0xf3, 0x45], &[0x02]);
|
t.insert(&[0x01, 0xf3, 0x45], &[0x02]).unwrap();
|
||||||
t.insert(&[0x01, 0xf3, 0xf5], &[0x03]);
|
t.insert(&[0x01, 0xf3, 0xf5], &[0x03]).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![
|
assert_eq!(*t.root(), trie_root(vec![
|
||||||
(vec![0x01, 0x23, 0x45], vec![0x01]),
|
(vec![0x01, 0x23, 0x45], vec![0x01]),
|
||||||
(vec![0x01, 0xf3, 0x45], vec![0x02]),
|
(vec![0x01, 0xf3, 0x45], vec![0x02]),
|
||||||
@ -1118,8 +1133,8 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], big_value0);
|
t.insert(&[0x01u8, 0x23], big_value0).unwrap();
|
||||||
t.insert(&[0x11u8, 0x23], big_value1);
|
t.insert(&[0x11u8, 0x23], big_value1).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![
|
assert_eq!(*t.root(), trie_root(vec![
|
||||||
(vec![0x01u8, 0x23], big_value0.to_vec()),
|
(vec![0x01u8, 0x23], big_value0.to_vec()),
|
||||||
(vec![0x11u8, 0x23], big_value1.to_vec())
|
(vec![0x11u8, 0x23], big_value1.to_vec())
|
||||||
@ -1133,8 +1148,8 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], big_value);
|
t.insert(&[0x01u8, 0x23], big_value).unwrap();
|
||||||
t.insert(&[0x11u8, 0x23], big_value);
|
t.insert(&[0x11u8, 0x23], big_value).unwrap();
|
||||||
assert_eq!(*t.root(), trie_root(vec![
|
assert_eq!(*t.root(), trie_root(vec![
|
||||||
(vec![0x01u8, 0x23], big_value.to_vec()),
|
(vec![0x01u8, 0x23], big_value.to_vec()),
|
||||||
(vec![0x11u8, 0x23], big_value.to_vec())
|
(vec![0x11u8, 0x23], big_value.to_vec())
|
||||||
@ -1146,7 +1161,7 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let t = TrieDBMut::new(&mut memdb, &mut root);
|
let t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
assert_eq!(t.get(&[0x5]), None);
|
assert_eq!(t.get(&[0x5]), Ok(None));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1154,10 +1169,10 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
assert_eq!(t.get(&[0x1, 0x23]).unwrap(), &[0x1u8, 0x23]);
|
assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), &[0x1u8, 0x23]);
|
||||||
t.commit();
|
t.commit();
|
||||||
assert_eq!(t.get(&[0x1, 0x23]).unwrap(), &[0x1u8, 0x23]);
|
assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), &[0x1u8, 0x23]);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1165,18 +1180,18 @@ mod tests {
|
|||||||
let mut memdb = MemoryDB::new();
|
let mut memdb = MemoryDB::new();
|
||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
let mut t = TrieDBMut::new(&mut memdb, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]);
|
t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap();
|
||||||
t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]);
|
t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap();
|
||||||
assert_eq!(t.get(&[0x01, 0x23]).unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
||||||
assert_eq!(t.get(&[0xf1, 0x23]).unwrap(), &[0xf1u8, 0x23]);
|
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), &[0xf1u8, 0x23]);
|
||||||
assert_eq!(t.get(&[0x81, 0x23]).unwrap(), &[0x81u8, 0x23]);
|
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), &[0x81u8, 0x23]);
|
||||||
assert_eq!(t.get(&[0x82, 0x23]), None);
|
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
||||||
t.commit();
|
t.commit();
|
||||||
assert_eq!(t.get(&[0x01, 0x23]).unwrap(), &[0x01u8, 0x23]);
|
assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), &[0x01u8, 0x23]);
|
||||||
assert_eq!(t.get(&[0xf1, 0x23]).unwrap(), &[0xf1u8, 0x23]);
|
assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), &[0xf1u8, 0x23]);
|
||||||
assert_eq!(t.get(&[0x81, 0x23]).unwrap(), &[0x81u8, 0x23]);
|
assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), &[0x81u8, 0x23]);
|
||||||
assert_eq!(t.get(&[0x82, 0x23]), None);
|
assert_eq!(t.get(&[0x82, 0x23]), Ok(None));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1223,7 +1238,7 @@ mod tests {
|
|||||||
let mut db = MemoryDB::new();
|
let mut db = MemoryDB::new();
|
||||||
{
|
{
|
||||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||||
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]);
|
t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
@ -1246,13 +1261,13 @@ mod tests {
|
|||||||
let mut root = H256::new();
|
let mut root = H256::new();
|
||||||
let mut t = TrieDBMut::new(&mut db, &mut root);
|
let mut t = TrieDBMut::new(&mut db, &mut root);
|
||||||
for &(ref key, ref value) in &x {
|
for &(ref key, ref value) in &x {
|
||||||
t.insert(key, value);
|
t.insert(key, value).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(*t.root(), trie_root(x.clone()));
|
assert_eq!(*t.root(), trie_root(x.clone()));
|
||||||
|
|
||||||
for &(ref key, _) in &x {
|
for &(ref key, _) in &x {
|
||||||
t.insert(key, &[]);
|
t.insert(key, &[]).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(t.is_empty());
|
assert!(t.is_empty());
|
||||||
|
@ -1,62 +0,0 @@
|
|||||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
|
||||||
// This file is part of Parity.
|
|
||||||
|
|
||||||
// Parity is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
|
|
||||||
// Parity is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU General Public License for more details.
|
|
||||||
|
|
||||||
// You should have received a copy of the GNU General Public License
|
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
use hash::H256;
|
|
||||||
use rlp::SHA3_NULL_RLP;
|
|
||||||
|
|
||||||
/// Trie-Item type.
|
|
||||||
pub type TrieItem<'a> = (Vec<u8>, &'a [u8]);
|
|
||||||
|
|
||||||
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
|
||||||
pub trait Trie {
|
|
||||||
/// Return the root of the trie.
|
|
||||||
fn root(&self) -> &H256;
|
|
||||||
|
|
||||||
/// Is the trie empty?
|
|
||||||
fn is_empty(&self) -> bool { *self.root() == SHA3_NULL_RLP }
|
|
||||||
|
|
||||||
/// Does the trie contain a given key?
|
|
||||||
fn contains(&self, key: &[u8]) -> bool;
|
|
||||||
|
|
||||||
/// What is the value of the given key in this trie?
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Option<&'a [u8]> where 'a: 'key;
|
|
||||||
|
|
||||||
/// Returns an iterator over elements of trie.
|
|
||||||
fn iter<'a>(&'a self) -> Box<Iterator<Item = TrieItem> + 'a>;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A key-value datastore implemented as a database-backed modified Merkle tree.
|
|
||||||
pub trait TrieMut {
|
|
||||||
/// Return the root of the trie.
|
|
||||||
fn root(&mut self) -> &H256;
|
|
||||||
|
|
||||||
/// Is the trie empty?
|
|
||||||
fn is_empty(&self) -> bool;
|
|
||||||
|
|
||||||
/// Does the trie contain a given key?
|
|
||||||
fn contains(&self, key: &[u8]) -> bool;
|
|
||||||
|
|
||||||
/// What is the value of the given key in this trie?
|
|
||||||
fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Option<&'a [u8]> where 'a: 'key;
|
|
||||||
|
|
||||||
/// Insert a `key`/`value` pair into the trie. An `empty` value is equivalent to removing
|
|
||||||
/// `key` from the trie.
|
|
||||||
fn insert(&mut self, key: &[u8], value: &[u8]);
|
|
||||||
|
|
||||||
/// Remove a `key` from the trie. Equivalent to making it equal to the empty
|
|
||||||
/// value.
|
|
||||||
fn remove(&mut self, key: &[u8]);
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user