From c7f608ec740882eac94038249037ddd955b60d31 Mon Sep 17 00:00:00 2001 From: David Date: Tue, 10 Jul 2018 14:59:19 +0200 Subject: [PATCH] Delete crates from parity-ethereum and fetch them from parity-common instead (#9083) Use crates from parity-common: hashdb, keccak-hash, kvdb, kvdb-memorydb, kvdb-rocksdb, memorydb, parity-bytes, parity-crypto, path, patricia_trie, plain_hasher, rlp, target, test-support, trie-standardmap, triehash --- Cargo.lock | 411 ++++---- Cargo.toml | 13 +- dapps/Cargo.toml | 4 +- dapps/src/lib.rs | 2 +- ethash/Cargo.toml | 2 +- ethcore/Cargo.toml | 24 +- ethcore/benches/evm.rs | 4 +- ethcore/crypto/Cargo.toml | 12 - ethcore/crypto/README.md | 5 - ethcore/crypto/src/aes.rs | 53 - ethcore/crypto/src/aes_gcm.rs | 198 ---- ethcore/crypto/src/digest.rs | 109 -- ethcore/crypto/src/error.rs | 82 -- ethcore/crypto/src/hmac.rs | 88 -- ethcore/crypto/src/lib.rs | 76 -- ethcore/crypto/src/pbkdf2.rs | 28 - ethcore/crypto/src/scrypt.rs | 38 - ethcore/evm/Cargo.toml | 2 +- ethcore/light/Cargo.toml | 20 +- ethcore/light/src/lib.rs | 4 +- ethcore/node_filter/Cargo.toml | 2 +- ethcore/private-tx/Cargo.toml | 10 +- ethcore/private-tx/src/lib.rs | 4 +- ethcore/service/Cargo.toml | 4 +- ethcore/src/builtin.rs | 2 +- ethcore/src/lib.rs | 6 +- ethcore/stratum/Cargo.toml | 2 +- ethcore/sync/Cargo.toml | 16 +- ethcore/sync/src/blocks.rs | 2 +- ethcore/sync/src/lib.rs | 4 +- ethcore/transaction/Cargo.toml | 4 +- ethcore/types/Cargo.toml | 6 +- ethcore/types/src/lib.rs | 2 +- ethcore/vm/Cargo.toml | 8 +- ethcore/vm/src/lib.rs | 2 +- ethkey/Cargo.toml | 2 +- ethkey/src/crypto.rs | 4 +- ethkey/src/extended.rs | 2 +- ethkey/src/lib.rs | 2 +- ethstore/Cargo.toml | 2 +- ethstore/src/lib.rs | 2 +- evmbin/Cargo.toml | 2 +- evmbin/src/main.rs | 2 +- hash-fetch/Cargo.toml | 4 +- hash-fetch/src/lib.rs | 2 +- ipfs/Cargo.toml | 4 +- ipfs/src/lib.rs | 2 +- local-store/Cargo.toml | 6 +- miner/Cargo.toml | 4 +- parity/lib.rs | 2 +- registrar/Cargo.toml | 2 +- rpc/Cargo.toml | 12 +- rpc/src/lib.rs | 4 +- rpc_client/Cargo.toml | 2 +- secret_store/Cargo.toml | 10 +- secret_store/src/lib.rs | 4 +- updater/Cargo.toml | 6 +- updater/src/lib.rs | 2 +- util/bytes/Cargo.toml | 8 - util/bytes/src/lib.rs | 179 ---- util/hash/Cargo.toml | 15 - util/hash/benches/keccak_256.rs | 52 - util/hash/src/lib.rs | 141 --- util/hashdb/Cargo.toml | 10 - util/hashdb/src/lib.rs | 83 -- util/journaldb/Cargo.toml | 16 +- util/journaldb/src/lib.rs | 2 +- util/keccak-hasher/Cargo.toml | 4 +- util/kvdb-memorydb/Cargo.toml | 8 - util/kvdb-memorydb/src/lib.rs | 118 --- util/kvdb-rocksdb/Cargo.toml | 19 - util/kvdb-rocksdb/src/lib.rs | 864 --------------- util/kvdb/Cargo.toml | 8 - util/kvdb/src/lib.rs | 175 ---- util/memorydb/Cargo.toml | 19 - util/memorydb/benches/memdb.rs | 79 -- util/memorydb/src/lib.rs | 344 ------ util/migration-rocksdb/Cargo.toml | 4 +- util/network-devp2p/Cargo.toml | 10 +- util/network-devp2p/src/connection.rs | 4 +- util/network-devp2p/src/discovery.rs | 2 +- util/network-devp2p/src/handshake.rs | 2 +- util/network-devp2p/src/lib.rs | 4 +- util/network-devp2p/tests/tests.rs | 4 +- util/network/Cargo.toml | 4 +- util/network/src/lib.rs | 2 +- util/path/Cargo.toml | 8 - util/path/src/lib.rs | 102 -- util/patricia-trie-ethereum/Cargo.toml | 8 +- util/patricia-trie-ethereum/src/lib.rs | 2 +- util/patricia_trie/Cargo.toml | 25 - util/patricia_trie/benches/trie.rs | 214 ---- util/patricia_trie/src/fatdb.rs | 147 --- util/patricia_trie/src/fatdbmut.rs | 129 --- util/patricia_trie/src/lib.rs | 319 ------ util/patricia_trie/src/lookup.rs | 104 -- util/patricia_trie/src/nibbleslice.rs | 311 ------ util/patricia_trie/src/nibblevec.rs | 146 --- util/patricia_trie/src/node.rs | 69 -- util/patricia_trie/src/node_codec.rs | 55 - util/patricia_trie/src/recorder.rs | 207 ---- util/patricia_trie/src/sectriedb.rs | 100 -- util/patricia_trie/src/sectriedbmut.rs | 110 -- util/patricia_trie/src/triedb.rs | 628 ----------- util/patricia_trie/src/triedbmut.rs | 1332 ------------------------ util/plain_hasher/Cargo.toml | 13 - util/plain_hasher/benches/bench.rs | 49 - util/plain_hasher/src/lib.rs | 76 -- util/rlp/Cargo.toml | 13 - util/rlp/LICENSE-APACHE2 | 201 ---- util/rlp/LICENSE-MIT | 19 - util/rlp/README.md | 8 - util/rlp/benches/rlp.rs | 103 -- util/rlp/license-header | 7 - util/rlp/src/error.rs | 49 - util/rlp/src/impls.rs | 285 ----- util/rlp/src/lib.rs | 101 -- util/rlp/src/rlpin.rs | 405 ------- util/rlp/src/stream.rs | 380 ------- util/rlp/src/traits.rs | 30 - util/rlp/tests/tests.rs | 425 -------- util/rlp_compress/Cargo.toml | 2 +- util/rlp_derive/Cargo.toml | 2 +- util/trie-standardmap/Cargo.toml | 11 - util/trie-standardmap/src/lib.rs | 124 --- util/triehash-ethereum/Cargo.toml | 11 + util/triehash-ethereum/src/lib.rs | 87 ++ util/triehash/Cargo.toml | 15 - util/triehash/benches/triehash.rs | 147 --- util/triehash/src/lib.rs | 376 ------- util/version/Cargo.toml | 4 +- util/version/src/lib.rs | 2 +- whisper/Cargo.toml | 4 +- whisper/src/lib.rs | 2 +- 134 files changed, 439 insertions(+), 10041 deletions(-) delete mode 100644 ethcore/crypto/Cargo.toml delete mode 100644 ethcore/crypto/README.md delete mode 100644 ethcore/crypto/src/aes.rs delete mode 100644 ethcore/crypto/src/aes_gcm.rs delete mode 100644 ethcore/crypto/src/digest.rs delete mode 100644 ethcore/crypto/src/error.rs delete mode 100644 ethcore/crypto/src/hmac.rs delete mode 100644 ethcore/crypto/src/lib.rs delete mode 100644 ethcore/crypto/src/pbkdf2.rs delete mode 100644 ethcore/crypto/src/scrypt.rs delete mode 100644 util/bytes/Cargo.toml delete mode 100644 util/bytes/src/lib.rs delete mode 100644 util/hash/Cargo.toml delete mode 100644 util/hash/benches/keccak_256.rs delete mode 100644 util/hash/src/lib.rs delete mode 100644 util/hashdb/Cargo.toml delete mode 100644 util/hashdb/src/lib.rs delete mode 100644 util/kvdb-memorydb/Cargo.toml delete mode 100644 util/kvdb-memorydb/src/lib.rs delete mode 100644 util/kvdb-rocksdb/Cargo.toml delete mode 100644 util/kvdb-rocksdb/src/lib.rs delete mode 100644 util/kvdb/Cargo.toml delete mode 100644 util/kvdb/src/lib.rs delete mode 100644 util/memorydb/Cargo.toml delete mode 100644 util/memorydb/benches/memdb.rs delete mode 100644 util/memorydb/src/lib.rs delete mode 100644 util/path/Cargo.toml delete mode 100644 util/path/src/lib.rs delete mode 100644 util/patricia_trie/Cargo.toml delete mode 100644 util/patricia_trie/benches/trie.rs delete mode 100644 util/patricia_trie/src/fatdb.rs delete mode 100644 util/patricia_trie/src/fatdbmut.rs delete mode 100644 util/patricia_trie/src/lib.rs delete mode 100644 util/patricia_trie/src/lookup.rs delete mode 100644 util/patricia_trie/src/nibbleslice.rs delete mode 100644 util/patricia_trie/src/nibblevec.rs delete mode 100644 util/patricia_trie/src/node.rs delete mode 100644 util/patricia_trie/src/node_codec.rs delete mode 100644 util/patricia_trie/src/recorder.rs delete mode 100644 util/patricia_trie/src/sectriedb.rs delete mode 100644 util/patricia_trie/src/sectriedbmut.rs delete mode 100644 util/patricia_trie/src/triedb.rs delete mode 100644 util/patricia_trie/src/triedbmut.rs delete mode 100644 util/plain_hasher/Cargo.toml delete mode 100644 util/plain_hasher/benches/bench.rs delete mode 100644 util/plain_hasher/src/lib.rs delete mode 100644 util/rlp/Cargo.toml delete mode 100644 util/rlp/LICENSE-APACHE2 delete mode 100644 util/rlp/LICENSE-MIT delete mode 100644 util/rlp/README.md delete mode 100644 util/rlp/benches/rlp.rs delete mode 100644 util/rlp/license-header delete mode 100644 util/rlp/src/error.rs delete mode 100644 util/rlp/src/impls.rs delete mode 100644 util/rlp/src/lib.rs delete mode 100644 util/rlp/src/rlpin.rs delete mode 100644 util/rlp/src/stream.rs delete mode 100644 util/rlp/src/traits.rs delete mode 100644 util/rlp/tests/tests.rs delete mode 100644 util/trie-standardmap/Cargo.toml delete mode 100644 util/trie-standardmap/src/lib.rs create mode 100644 util/triehash-ethereum/Cargo.toml create mode 100644 util/triehash-ethereum/src/lib.rs delete mode 100644 util/triehash/Cargo.toml delete mode 100644 util/triehash/benches/triehash.rs delete mode 100644 util/triehash/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index c791ae4a9..14340ae95 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -249,12 +249,12 @@ dependencies = [ name = "common-types" version = "0.1.0" dependencies = [ - "ethcore-bytes 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethjson 0.1.0", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", - "rlp 0.2.1", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rlp_derive 0.1.0", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -445,18 +445,6 @@ dependencies = [ "regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "env_logger" -version = "0.5.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "atty 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", - "humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "error-chain" version = "0.11.0" @@ -519,7 +507,7 @@ version = "1.12.0" dependencies = [ "crunchy 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -555,8 +543,6 @@ dependencies = [ "ethabi-derive 5.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethash 1.12.0", "ethcore-bloom-journal 0.1.0", - "ethcore-bytes 0.1.0", - "ethcore-crypto 0.1.0", "ethcore-io 1.12.0", "ethcore-logger 1.12.0", "ethcore-miner 1.12.0", @@ -569,30 +555,32 @@ dependencies = [ "evm 0.1.0", "fake-hardware-wallet 0.0.1", "hardware-wallet 1.12.0", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", "journaldb 0.2.0", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "keccak-hasher 0.1.0", - "kvdb 0.1.0", - "kvdb-memorydb 0.1.0", - "kvdb-rocksdb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-rocksdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "macros 0.1.0", "memory-cache 0.1.0", - "memorydb 0.2.0", + "memorydb 0.2.0 (git+https://github.com/paritytech/parity-common)", "num 0.1.42 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-machine 0.1.0", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "patricia-trie 0.2.0", + "patricia-trie 0.2.1 (git+https://github.com/paritytech/parity-common)", "patricia-trie-ethereum 0.1.0", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rlp_compress 0.1.0", "rlp_derive 0.1.0", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -601,8 +589,8 @@ dependencies = [ "stop-guard 0.1.0", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "trace-time 0.1.0", - "trie-standardmap 0.1.0", - "triehash 0.1.0", + "trie-standardmap 0.1.0 (git+https://github.com/paritytech/parity-common)", + "triehash-ethereum 0.2.0", "unexpected 0.1.0", "using_queue 0.1.0", "vm 0.1.0", @@ -616,21 +604,6 @@ dependencies = [ "siphasher 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "ethcore-bytes" -version = "0.1.0" - -[[package]] -name = "ethcore-crypto" -version = "0.1.0" -dependencies = [ - "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ring 0.12.1 (git+https://github.com/paritytech/ring)", - "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", - "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "ethcore-devtools" version = "1.12.0" @@ -657,35 +630,35 @@ dependencies = [ "bincode 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", "ethcore-io 1.12.0", "ethcore-network 1.12.0", "ethcore-transaction 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "keccak-hasher 0.1.0", - "kvdb 0.1.0", - "kvdb-memorydb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "memory-cache 0.1.0", - "memorydb 0.2.0", + "memorydb 0.2.0 (git+https://github.com/paritytech/parity-common)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "patricia-trie 0.2.0", + "patricia-trie 0.2.1 (git+https://github.com/paritytech/parity-common)", "patricia-trie-ethereum 0.1.0", - "plain_hasher 0.2.0", + "plain_hasher 0.1.0 (git+https://github.com/paritytech/parity-common)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rlp_derive 0.1.0", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "stats 0.1.0", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "triehash 0.1.0", + "triehash-ethereum 0.2.0", "vm 0.1.0", ] @@ -720,13 +693,13 @@ dependencies = [ "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.11.24 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "linked-hash-map 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "parity-reactor 0.1.0", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "price-info 1.12.0", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "trace-time 0.1.0", "transaction-pool 1.12.1", @@ -739,13 +712,13 @@ version = "1.12.0" dependencies = [ "assert_matches 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-crypto 0.1.0", "ethcore-io 1.12.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethkey 0.3.0", "ipnetwork 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "snappy 0.1.0 (git+https://github.com/paritytech/rust-snappy)", ] @@ -757,8 +730,6 @@ dependencies = [ "assert_matches 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "bytes 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-bytes 0.1.0", - "ethcore-crypto 0.1.0", "ethcore-io 1.12.0", "ethcore-logger 1.12.0", "ethcore-network 1.12.0", @@ -766,14 +737,16 @@ dependencies = [ "ethkey 0.3.0", "igd 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "ipnetwork 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "mio 0.6.14 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "path 0.1.1", + "path 0.1.1 (git+https://github.com/paritytech/parity-common)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", @@ -794,8 +767,6 @@ dependencies = [ "ethabi-contract 5.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi-derive 5.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", - "ethcore-crypto 0.1.0", "ethcore-io 1.12.0", "ethcore-logger 1.12.0", "ethcore-miner 1.12.0", @@ -805,13 +776,15 @@ dependencies = [ "ethkey 0.3.0", "fetch 0.1.0", "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "patricia-trie 0.2.0", + "patricia-trie 0.2.1 (git+https://github.com/paritytech/parity-common)", "patricia-trie-ethereum 0.1.0", "rand 0.3.20 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rlp_derive 0.1.0", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", @@ -830,8 +803,6 @@ dependencies = [ "ethabi-contract 5.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi-derive 5.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", - "ethcore-crypto 0.1.0", "ethcore-logger 1.12.0", "ethcore-sync 1.12.0", "ethcore-transaction 0.1.0", @@ -840,11 +811,13 @@ dependencies = [ "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.11.24 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", - "kvdb 0.1.0", - "kvdb-rocksdb 0.1.0", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-rocksdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", @@ -870,8 +843,8 @@ dependencies = [ "ethcore-io 1.12.0", "ethcore-private-tx 1.0.0", "ethcore-sync 1.12.0", - "kvdb 0.1.0", - "kvdb-rocksdb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-rocksdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "stop-guard 0.1.0", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -888,7 +861,7 @@ dependencies = [ "jsonrpc-core 8.0.1 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "jsonrpc-macros 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "jsonrpc-tcp-server 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-core 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", @@ -901,7 +874,6 @@ version = "1.12.0" dependencies = [ "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", "ethcore-io 1.12.0", "ethcore-light 1.12.0", "ethcore-network 1.12.0", @@ -910,24 +882,25 @@ dependencies = [ "ethcore-transaction 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethkey 0.3.0", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "ipnetwork 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "keccak-hasher 0.1.0", - "kvdb 0.1.0", - "kvdb-memorydb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "macros 0.1.0", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "plain_hasher 0.2.0", + "plain_hasher 0.1.0 (git+https://github.com/paritytech/parity-common)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "smallvec 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "trace-time 0.1.0", - "triehash 0.1.0", + "triehash-ethereum 0.2.0", ] [[package]] @@ -939,8 +912,8 @@ dependencies = [ "ethkey 0.3.0", "evm 0.1.0", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", - "rlp 0.2.1", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "unexpected 0.1.0", ] @@ -985,11 +958,11 @@ dependencies = [ "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "edit-distance 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "eth-secp256k1 0.5.7 (git+https://github.com/paritytech/rust-secp256k1)", - "ethcore-crypto 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "mem 0.1.0", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-wordlist 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1019,13 +992,13 @@ name = "ethstore" version = "0.2.0" dependencies = [ "dir 0.1.1", - "ethcore-crypto 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethkey 0.3.0", "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-wordlist 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1062,7 +1035,7 @@ dependencies = [ "bit-set 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "memory-cache 0.1.0", @@ -1077,12 +1050,12 @@ version = "0.1.0" dependencies = [ "docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", "ethcore-transaction 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethjson 0.1.0", "evm 0.1.0", "panic_hook 0.1.0", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "pretty_assertions 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1250,6 +1223,7 @@ dependencies = [ [[package]] name = "hashdb" version = "0.2.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1290,14 +1264,6 @@ name = "httparse" version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "humantime" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "hyper" version = "0.11.24" @@ -1406,20 +1372,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" name = "journaldb" version = "0.2.0" dependencies = [ - "ethcore-bytes 0.1.0", "ethcore-logger 1.12.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "keccak-hasher 0.1.0", - "kvdb 0.1.0", - "kvdb-memorydb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "memorydb 0.2.0", + "memorydb 0.2.0 (git+https://github.com/paritytech/parity-common)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "plain_hasher 0.2.0", - "rlp 0.2.1", + "plain_hasher 0.1.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -1521,9 +1487,9 @@ dependencies = [ [[package]] name = "keccak-hash" version = "0.1.2" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1532,8 +1498,8 @@ name = "keccak-hasher" version = "0.1.0" dependencies = [ "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "hashdb 0.2.0", - "plain_hasher 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", + "plain_hasher 0.1.0 (git+https://github.com/paritytech/parity-common)", "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -1549,34 +1515,36 @@ dependencies = [ [[package]] name = "kvdb" version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-bytes 0.1.0", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", ] [[package]] name = "kvdb-memorydb" version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ - "kvdb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "kvdb-rocksdb" version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "fs-swap 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "interleaved-ordered 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "kvdb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "rocksdb 0.4.5 (git+https://github.com/paritytech/rust-rocksdb)", - "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -1736,24 +1704,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "memorydb" version = "0.2.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", - "keccak-hasher 0.1.0", - "plain_hasher 0.2.0", - "rlp 0.2.1", - "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", + "plain_hasher 0.1.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", ] [[package]] name = "migration-rocksdb" version = "0.1.0" dependencies = [ - "kvdb 0.1.0", - "kvdb-rocksdb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-rocksdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "macros 0.1.0", "tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1911,7 +1876,7 @@ dependencies = [ "ethcore-network 1.12.0", "ethcore-network-devp2p 1.12.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "kvdb-memorydb 0.1.0", + "kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "lru-cache 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2076,7 +2041,6 @@ dependencies = [ "docopt 0.8.3 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", "ethcore-io 1.12.0", "ethcore-light 1.12.0", "ethcore-logger 1.12.0", @@ -2096,9 +2060,9 @@ dependencies = [ "ipnetwork 0.12.7 (registry+https://github.com/rust-lang/crates.io-index)", "journaldb 0.2.0", "jsonrpc-core 8.0.1 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", - "keccak-hash 0.1.2", - "kvdb 0.1.0", - "kvdb-rocksdb 0.1.0", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-rocksdb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "mem 0.1.0", "migration-rocksdb 0.1.0", @@ -2107,6 +2071,7 @@ dependencies = [ "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "number_prefix 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)", "panic_hook 0.1.0", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-dapps 1.12.0", "parity-hash-fetch 1.12.0", "parity-ipfs-api 1.12.0", @@ -2118,11 +2083,11 @@ dependencies = [ "parity-version 1.12.0", "parity-whisper 0.1.0", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "path 0.1.1", + "path 0.1.1 (git+https://github.com/paritytech/parity-common)", "pretty_assertions 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "registrar 0.0.1", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rpassword 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "rpc-cli 1.4.0", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2138,6 +2103,11 @@ dependencies = [ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "parity-bytes" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" + [[package]] name = "parity-clib" version = "1.12.0" @@ -2145,13 +2115,24 @@ dependencies = [ "parity 1.12.0", ] +[[package]] +name = "parity-crypto" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" +dependencies = [ + "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "ring 0.12.1 (git+https://github.com/paritytech/ring)", + "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", + "tiny-keccak 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "parity-dapps" version = "1.12.0" dependencies = [ "base32 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-bytes 0.1.0", "ethcore-devtools 1.12.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "fetch 0.1.0", @@ -2160,11 +2141,12 @@ dependencies = [ "itertools 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 8.0.1 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "jsonrpc-http-server 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "linked-hash-map 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.0-alpha.2 (registry+https://github.com/rust-lang/crates.io-index)", "node-health 0.1.0", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-dapps-glue 1.9.1", "parity-hash-fetch 1.12.0", "parity-reactor 0.1.0", @@ -2201,17 +2183,17 @@ dependencies = [ "ethabi 5.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi-contract 5.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi-derive 5.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-bytes 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "fake-fetch 0.0.1", "fetch 0.1.0", "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", "futures-cpupool 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "hyper 0.11.24 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "mime 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", "mime_guess 2.0.0-alpha.2 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-reactor 0.1.0", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2225,12 +2207,12 @@ version = "1.12.0" dependencies = [ "cid 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 8.0.1 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "jsonrpc-http-server 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "multihash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "unicase 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2242,10 +2224,10 @@ dependencies = [ "ethcore-io 1.12.0", "ethcore-transaction 0.1.0", "ethkey 0.3.0", - "kvdb 0.1.0", - "kvdb-memorydb 0.1.0", + "kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)", + "kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2274,8 +2256,6 @@ dependencies = [ "cid 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", "ethash 1.12.0", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", - "ethcore-crypto 0.1.0", "ethcore-devtools 1.12.0", "ethcore-io 1.12.0", "ethcore-light 1.12.0", @@ -2302,21 +2282,23 @@ dependencies = [ "jsonrpc-macros 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "jsonrpc-pubsub 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "jsonrpc-ws-server 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", - "keccak-hash 0.1.2", - "kvdb-memorydb 0.1.0", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", + "kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "macros 0.1.0", "multihash 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "node-health 0.1.0", "order-stat 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-reactor 0.1.0", "parity-updater 1.12.0", "parity-version 1.12.0", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "patricia-trie 0.2.0", + "patricia-trie 0.2.1 (git+https://github.com/paritytech/parity-common)", "pretty_assertions 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rustc-hex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2338,7 +2320,7 @@ dependencies = [ "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", "jsonrpc-core 8.0.1 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", "jsonrpc-ws-server 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "parity-rpc 1.12.0", @@ -2373,17 +2355,17 @@ dependencies = [ "ethabi-contract 5.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi-derive 5.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethcore 1.12.0", - "ethcore-bytes 0.1.0", "ethcore-sync 1.12.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "matches 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "parity-hash-fetch 1.12.0", "parity-version 1.12.0", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", - "path 0.1.1", + "path 0.1.1 (git+https://github.com/paritytech/parity-common)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", "semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", "target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2394,8 +2376,8 @@ dependencies = [ name = "parity-version" version = "1.12.0" dependencies = [ - "ethcore-bytes 0.1.0", - "rlp 0.2.1", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "rustc_version 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2416,7 +2398,6 @@ version = "0.1.0" dependencies = [ "bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)", "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-crypto 0.1.0", "ethcore-network 1.12.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethkey 0.3.0", @@ -2427,9 +2408,10 @@ dependencies = [ "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "mem 0.1.0", "ordered-float 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", + "parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)", "parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "serde 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.37 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.9 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2475,29 +2457,22 @@ dependencies = [ [[package]] name = "path" version = "0.1.1" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "dirs 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "patricia-trie" -version = "0.2.0" +version = "0.2.1" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", - "env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-bytes 0.1.0", - "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", "heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", - "keccak-hasher 0.1.0", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "memorydb 0.2.0", - "patricia-trie-ethereum 0.1.0", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", - "trie-standardmap 0.1.0", - "triehash 0.1.0", ] [[package]] @@ -2505,12 +2480,12 @@ name = "patricia-trie-ethereum" version = "0.1.0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", - "ethcore-bytes 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", "keccak-hasher 0.1.0", - "patricia-trie 0.2.0", - "rlp 0.2.1", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "patricia-trie 0.2.1 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -2564,11 +2539,12 @@ dependencies = [ [[package]] name = "plain_hasher" -version = "0.2.0" +version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "crunchy 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "hashdb 0.2.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -2793,31 +2769,11 @@ dependencies = [ "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "regex" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "aho-corasick 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "utf8-ranges 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "regex-syntax" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "regex-syntax" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "registrar" version = "0.0.1" @@ -2826,7 +2782,7 @@ dependencies = [ "ethabi-contract 5.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethabi-derive 5.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "futures 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -2853,6 +2809,7 @@ dependencies = [ [[package]] name = "rlp" version = "0.2.1" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -2866,7 +2823,7 @@ version = "0.1.0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -2874,7 +2831,7 @@ name = "rlp_derive" version = "0.1.0" dependencies = [ "quote 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", "syn 0.13.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3266,14 +3223,6 @@ dependencies = [ "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", ] -[[package]] -name = "termcolor" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "termion" version = "1.5.1" @@ -3588,28 +3537,33 @@ dependencies = [ [[package]] name = "trie-standardmap" version = "0.1.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ - "ethcore-bytes 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", - "rlp 0.2.1", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", ] [[package]] name = "triehash" -version = "0.1.0" +version = "0.2.0" +source = "git+https://github.com/paritytech/parity-common#d322bcebd2303306a8f259ea38026598a5b439c0" dependencies = [ "elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", - "keccak-hash 0.1.2", - "rlp 0.2.1", - "trie-standardmap 0.1.0", + "hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", ] [[package]] -name = "ucd-util" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" +name = "triehash-ethereum" +version = "0.2.0" +dependencies = [ + "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", + "keccak-hasher 0.1.0", + "triehash 0.2.0 (git+https://github.com/paritytech/parity-common)", +] [[package]] name = "uint" @@ -3740,14 +3694,14 @@ version = "0.1.0" dependencies = [ "byteorder 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "common-types 0.1.0", - "ethcore-bytes 0.1.0", "ethereum-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)", "ethjson 0.1.0", - "keccak-hash 0.1.2", + "keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)", "log 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", - "patricia-trie 0.2.0", + "parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)", + "patricia-trie 0.2.1 (git+https://github.com/paritytech/parity-common)", "patricia-trie-ethereum 0.1.0", - "rlp 0.2.1", + "rlp 0.2.1 (git+https://github.com/paritytech/parity-common)", ] [[package]] @@ -3846,14 +3800,6 @@ name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -[[package]] -name = "wincolor" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "ws" version = "0.7.5" @@ -3964,7 +3910,6 @@ dependencies = [ "checksum either 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "740178ddf48b1a9e878e6d6509a1442a2d42fd2928aae8e7a6f8a36fb01981b3" "checksum elastic-array 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "88d4851b005ef16de812ea9acdb7bece2f0a40dd86c07b85631d7dafa54537bb" "checksum env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3ddf21e73e016298f5cb37d6ef8e8da8e39f91f9ec8b0df44b7deb16a9f8cd5b" -"checksum env_logger 0.5.10 (registry+https://github.com/rust-lang/crates.io-index)" = "0e6e40ebb0e66918a37b38c7acab4e10d299e0463fe2af5d29b9cc86710cfd2a" "checksum error-chain 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ff511d5dc435d703f4971bc399647c9bc38e20cb41452e3b9feb4765419ed3f3" "checksum error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "07e791d3be96241c77c43846b665ef1384606da2cd2a48730abe606a12906e02" "checksum eth-secp256k1 0.5.7 (git+https://github.com/paritytech/rust-secp256k1)" = "" @@ -3989,12 +3934,12 @@ dependencies = [ "checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb" "checksum globset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "464627f948c3190ae3d04b1bc6d7dca2f785bda0ac01278e6db129ad383dbeb6" "checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1" +"checksum hashdb 0.2.0 (git+https://github.com/paritytech/parity-common)" = "" "checksum heapsize 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1679e6ea370dee694f91f1dc469bf94cf8f52051d147aec3e1f9497c6fc22461" "checksum heck 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ea04fa3ead4e05e51a7c806fc07271fdbde4e246a6c6d1efd52e72230b771b82" "checksum hex 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d6a22814455d41612f41161581c2883c0c6a1c41852729b17d5ed88f01e153aa" "checksum hidapi 0.3.1 (git+https://github.com/paritytech/hidapi-rs)" = "" "checksum httparse 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "af2f2dd97457e8fb1ae7c5a420db346af389926e36f43768b96f101546b04a07" -"checksum humantime 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0484fda3e7007f2a4a0d9c3a703ca38c71c54c55602ce4660c419fd32e188c9e" "checksum hyper 0.11.24 (registry+https://github.com/rust-lang/crates.io-index)" = "df4dd5dae401458087396b6db7fabc4d6760aa456a5fa8e92bda549f39cae661" "checksum hyper-rustls 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d6cdc1751771a14b8175764394f025e309a28c825ed9eaf97fa62bb831dc8c5" "checksum idna 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "014b298351066f1512874135335d62a789ffe78a9974f94b43ed5621951eaf7d" @@ -4013,7 +3958,11 @@ dependencies = [ "checksum jsonrpc-server-utils 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)" = "" "checksum jsonrpc-tcp-server 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)" = "" "checksum jsonrpc-ws-server 8.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.11)" = "" +"checksum keccak-hash 0.1.2 (git+https://github.com/paritytech/parity-common)" = "" "checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum kvdb 0.1.0 (git+https://github.com/paritytech/parity-common)" = "" +"checksum kvdb-memorydb 0.1.0 (git+https://github.com/paritytech/parity-common)" = "" +"checksum kvdb-rocksdb 0.1.0 (git+https://github.com/paritytech/parity-common)" = "" "checksum language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a91d884b6667cd606bb5a69aa0c99ba811a115fc68915e7056ec08a46e93199a" "checksum lazy_static 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "76f033c7ad61445c5b347c7382dd1237847eb1bce590fe50365dcb33d546be73" "checksum lazy_static 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c8f31047daa365f19be14b47c29df4f7c3b581832407daabe6ae77397619237d" @@ -4034,6 +3983,7 @@ dependencies = [ "checksum memmap 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e2ffa2c986de11a9df78620c01eeaaf27d94d3ff02bf81bfcca953102dd0c6ff" "checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3" "checksum memory_units 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71d96e3f3c0b6325d8ccd83c33b28acb183edcb6c67938ba104ec546854b0882" +"checksum memorydb 0.2.0 (git+https://github.com/paritytech/parity-common)" = "" "checksum mime 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "e3d709ffbb330e1566dc2f2a3c9b58a5ad4a381f740b810cd305dc3f089bc160" "checksum mime_guess 2.0.0-alpha.2 (registry+https://github.com/rust-lang/crates.io-index)" = "27a5e6679a0614e25adc14c6434ba84e41632b765a6d9cb2031a0cca682699ae" "checksum miniz_oxide 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "aaa2d3ad070f428fffbd7d3ca2ea20bb0d8cffe9024405c44e1840bc1418b398" @@ -4063,17 +4013,22 @@ dependencies = [ "checksum ordered-float 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "58d25b6c0e47b20d05226d288ff434940296e7e2f8b877975da32f862152241f" "checksum ordermap 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a86ed3f5f244b372d6b1a00b72ef7f8876d0bc6a78a4c9985c53614041512063" "checksum owning_ref 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "cdf84f41639e037b484f93433aa3897863b561ed65c6e59c7073d7c561710f37" +"checksum parity-bytes 0.1.0 (git+https://github.com/paritytech/parity-common)" = "" +"checksum parity-crypto 0.1.0 (git+https://github.com/paritytech/parity-common)" = "" "checksum parity-tokio-ipc 0.1.5 (git+https://github.com/nikvolf/parity-tokio-ipc)" = "" "checksum parity-wasm 0.31.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e1c91199d14bd5b78ecade323d4a891d094799749c1b9e82d9c590c2e2849a40" "checksum parity-wordlist 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1d0dec124478845b142f68b446cbee953d14d4b41f1bc0425024417720dce693" "checksum parking_lot 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "901d6514273469bb17380c1ac3f51fb3ce54be1f960e51a6f04901eba313ab8d" "checksum parking_lot_core 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4f610cb9664da38e417ea3225f23051f589851999535290e077939838ab7a595" +"checksum path 0.1.1 (git+https://github.com/paritytech/parity-common)" = "" +"checksum patricia-trie 0.2.1 (git+https://github.com/paritytech/parity-common)" = "" "checksum percent-encoding 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "de154f638187706bde41d9b4738748933d64e6b37bdbffc0b47a97d16a6ae356" "checksum petgraph 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "8b30dc85588cd02b9b76f5e386535db546d21dc68506cff2abebee0b6445e8e4" "checksum phf 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "cb325642290f28ee14d8c6201159949a872f220c62af6e110a56ea914fbe42fc" "checksum phf_codegen 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "d62594c0bb54c464f633175d502038177e90309daf2e0158be42ed5f023ce88f" "checksum phf_generator 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "6b07ffcc532ccc85e3afc45865469bf5d9e4ef5bfcf9622e3cfe80c2d275ec03" "checksum phf_shared 0.7.21 (registry+https://github.com/rust-lang/crates.io-index)" = "07e24b0ca9643bdecd0632f2b3da6b1b89bbb0030e0b992afc1113b23a7bc2f2" +"checksum plain_hasher 0.1.0 (git+https://github.com/paritytech/parity-common)" = "" "checksum podio 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e5422a1ee1bc57cc47ae717b0137314258138f38fd5f3cea083f43a9725383a0" "checksum pretty_assertions 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2412f3332a07c7a2a50168988dcc184f32180a9758ad470390e5f55e089f6b6e" "checksum primal 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "0e31b86efadeaeb1235452171a66689682783149a6249ff334a2c5d8218d00a4" @@ -4097,11 +4052,10 @@ dependencies = [ "checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1" "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" "checksum regex 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "744554e01ccbd98fff8c457c3b092cd67af62a555a43bfe97ae8a0451f7799fa" -"checksum regex 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "13c93d55961981ba9226a213b385216f83ab43bd6ac53ab16b2eeb47e337cf4e" "checksum regex-syntax 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ad890a5eef7953f55427c50575c680c42841653abd2b028b68cd223d157f62db" -"checksum regex-syntax 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05b06a75f5217880fc5e905952a42750bf44787e56a6c6d6852ed0992f5e1d54" "checksum relay 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1576e382688d7e9deecea24417e350d3062d97e32e45d70b1cde65994ff1489a" "checksum ring 0.12.1 (git+https://github.com/paritytech/ring)" = "" +"checksum rlp 0.2.1 (git+https://github.com/paritytech/parity-common)" = "" "checksum rocksdb 0.4.5 (git+https://github.com/paritytech/rust-rocksdb)" = "" "checksum rocksdb-sys 0.3.0 (git+https://github.com/paritytech/rust-rocksdb)" = "" "checksum rpassword 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b273c91bd242ca03ad6d71c143b6f17a48790e61f21a6c78568fa2b6774a24a4" @@ -4149,7 +4103,6 @@ dependencies = [ "checksum tempfile 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "11ce2fe9db64b842314052e2421ac61a73ce41b898dc8e3750398b219c5fc1e0" "checksum term 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "fa63644f74ce96fbeb9b794f66aff2a52d601cbd5e80f4b97123e3899f4570f1" "checksum term_size 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9e5b9a66db815dcfd2da92db471106457082577c3c278d4138ab3e3b4e189327" -"checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83" "checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096" "checksum textwrap 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c0b59b6b4b44d867f1370ef1bd91bfb262bf07bf0ae65c202ea2fbc16153b693" "checksum thread-id 3.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "c7fbf4c9d56b320106cd64fd024dadfa0be7cb4706725fc44a7d7ce952d820c1" @@ -4177,7 +4130,8 @@ dependencies = [ "checksum toml 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)" = "a7540f4ffc193e0d3c94121edb19b055670d369f77d5804db11ae053a45b6e7e" "checksum transient-hashmap 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "715254c8f0811be1a79ad3ea5e6fa3c8eddec2b03d7f5ba78cf093e56d79c24f" "checksum trezor-sys 1.0.0 (git+https://github.com/paritytech/trezor-sys)" = "" -"checksum ucd-util 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "fd2be2d6639d0f8fe6cdda291ad456e23629558d466e2789d2c3e9892bda285d" +"checksum trie-standardmap 0.1.0 (git+https://github.com/paritytech/parity-common)" = "" +"checksum triehash 0.2.0 (git+https://github.com/paritytech/parity-common)" = "" "checksum uint 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "38051a96565903d81c9a9210ce11076b2218f3b352926baa1f5f6abbdfce8273" "checksum unicase 1.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7f4765f83163b74f957c797ad9253caf97f103fb064d3999aea9568d09fc8a33" "checksum unicase 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "284b6d3db520d67fbe88fd778c21510d1b0ba4a551e5d0fbb023d33405f6de8a" @@ -4204,7 +4158,6 @@ dependencies = [ "checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -"checksum wincolor 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "eeb06499a3a4d44302791052df005d5232b927ed1a9658146d842165c4de7767" "checksum ws 0.7.5 (git+https://github.com/tomusdrw/ws-rs)" = "" "checksum ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e" "checksum xdg 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a66b7c2281ebde13cf4391d70d4c7e5946c3c25e72a7b859ca8f677dcd0b0c61" diff --git a/Cargo.toml b/Cargo.toml index 0a92f4e07..a92fe35eb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -33,7 +33,7 @@ fdlimit = "0.1" ctrlc = { git = "https://github.com/paritytech/rust-ctrlc.git" } jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } ethcore = { path = "ethcore", features = ["work-notify", "price-info", "stratum"] } -ethcore-bytes = { path = "util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethcore-io = { path = "util/io" } ethcore-light = { path = "ethcore/light" } ethcore-logger = { path = "logger" } @@ -47,7 +47,7 @@ ethereum-types = "0.3" node-filter = { path = "ethcore/node_filter" } ethkey = { path = "ethkey" } node-health = { path = "dapps/node-health" } -rlp = { path = "util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } rpc-cli = { path = "rpc_cli" } parity-hash-fetch = { path = "hash-fetch" } parity-ipfs-api = { path = "ipfs" } @@ -58,13 +58,13 @@ parity-rpc-client = { path = "rpc_client" } parity-updater = { path = "updater" } parity-version = { path = "util/version" } parity-whisper = { path = "whisper" } -path = { path = "util/path" } +path = { git = "https://github.com/paritytech/parity-common" } dir = { path = "util/dir" } panic_hook = { path = "util/panic_hook" } -keccak-hash = { path = "util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } migration-rocksdb = { path = "util/migration-rocksdb" } -kvdb = { path = "util/kvdb" } -kvdb-rocksdb = { path = "util/kvdb-rocksdb" } +kvdb = { git = "https://github.com/paritytech/parity-common" } +kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common" } journaldb = { path = "util/journaldb" } mem = { path = "util/mem" } @@ -137,6 +137,7 @@ members = [ "transaction-pool", "whisper", "whisper/cli", + "util/triehash-ethereum", "util/keccak-hasher", "util/patricia-trie-ethereum", ] diff --git a/dapps/Cargo.toml b/dapps/Cargo.toml index 477ad9b06..b32fdb4a3 100644 --- a/dapps/Cargo.toml +++ b/dapps/Cargo.toml @@ -27,14 +27,14 @@ itertools = "0.5" jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } -ethcore-bytes = { path = "../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" fetch = { path = "../util/fetch" } node-health = { path = "./node-health" } parity-dapps-glue = { path = "./js-glue" } parity-hash-fetch = { path = "../hash-fetch" } parity-reactor = { path = "../util/reactor" } -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } parity-version = { path = "../util/version" } registrar = { path = "../registrar" } diff --git a/dapps/src/lib.rs b/dapps/src/lib.rs index 12a6a8050..4fcf9740d 100644 --- a/dapps/src/lib.rs +++ b/dapps/src/lib.rs @@ -32,7 +32,7 @@ extern crate zip; extern crate jsonrpc_http_server; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethereum_types; extern crate fetch; extern crate node_health; diff --git a/ethash/Cargo.toml b/ethash/Cargo.toml index 61b3fa504..df0f17e0f 100644 --- a/ethash/Cargo.toml +++ b/ethash/Cargo.toml @@ -7,7 +7,7 @@ authors = ["Parity Technologies "] [dependencies] log = "0.3" -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } primal = "0.2.3" parking_lot = "0.6" crunchy = "0.1.0" diff --git a/ethcore/Cargo.toml b/ethcore/Cargo.toml index 9c25cdccb..8a3ce5a6d 100644 --- a/ethcore/Cargo.toml +++ b/ethcore/Cargo.toml @@ -15,12 +15,12 @@ common-types = { path = "types" } crossbeam = "0.3" ethash = { path = "../ethash" } ethcore-bloom-journal = { path = "../util/bloom" } -ethcore-bytes = { path = "../util/bytes" } -hashdb = { path = "../util/hashdb" } -memorydb = { path = "../util/memorydb" } -patricia-trie = { path = "../util/patricia_trie" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } +hashdb = { git = "https://github.com/paritytech/parity-common" } +memorydb = { git = "https://github.com/paritytech/parity-common" } +patricia-trie = { git = "https://github.com/paritytech/parity-common" } patricia-trie-ethereum = { path = "../util/patricia-trie-ethereum" } -ethcore-crypto = { path = "crypto" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } error-chain = { version = "0.12", default-features = false } ethcore-io = { path = "../util/io" } ethcore-logger = { path = "../logger" } @@ -47,11 +47,11 @@ parity-machine = { path = "../machine" } parking_lot = "0.6" rayon = "1.0" rand = "0.4" -rlp = { path = "../util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } rlp_compress = { path = "../util/rlp_compress" } rlp_derive = { path = "../util/rlp_derive" } -kvdb = { path = "../util/kvdb" } -kvdb-memorydb = { path = "../util/kvdb-memorydb" } +kvdb = { git = "https://github.com/paritytech/parity-common" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common" } snappy = { git = "https://github.com/paritytech/rust-snappy" } stop-guard = { path = "../util/stop-guard" } macros = { path = "../util/macros" } @@ -61,12 +61,12 @@ trace-time = { path = "../util/trace-time" } using_queue = { path = "../util/using_queue" } vm = { path = "vm" } wasm = { path = "wasm" } -keccak-hash = { path = "../util/hash" } -triehash = { path = "../util/triehash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } +triehash-ethereum = { version = "0.2", path = "../util/triehash-ethereum" } unexpected = { path = "../util/unexpected" } journaldb = { path = "../util/journaldb" } keccak-hasher = { path = "../util/keccak-hasher" } -kvdb-rocksdb = { path = "../util/kvdb-rocksdb" } +kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common" } tempdir = {version="0.3", optional = true} [target.'cfg(any(target_os = "linux", target_os = "macos", target_os = "windows", target_os = "android"))'.dependencies] @@ -77,7 +77,7 @@ fake-hardware-wallet = { path = "../util/fake-hardware-wallet" } [dev-dependencies] tempdir = "0.3" -trie-standardmap = { path = "../util/trie-standardmap" } +trie-standardmap = { git = "https://github.com/paritytech/parity-common" } [features] # Display EVM debug traces. diff --git a/ethcore/benches/evm.rs b/ethcore/benches/evm.rs index c68adc987..46e5bfb30 100644 --- a/ethcore/benches/evm.rs +++ b/ethcore/benches/evm.rs @@ -20,7 +20,7 @@ extern crate test; extern crate ethcore_util as util; extern crate rand; extern crate bn; -extern crate ethcore_crypto; +extern crate parity_crypto; extern crate ethkey; extern crate rustc_hex; extern crate ethcore_bigint; @@ -60,7 +60,7 @@ fn bn_128_mul(b: &mut Bencher) { #[bench] fn sha256(b: &mut Bencher) { - use ethcore_crypto::digest::sha256; + use parity_crypto::digest::sha256; let mut input: [u8; 256] = [0; 256]; let mut out = [0; 32]; diff --git a/ethcore/crypto/Cargo.toml b/ethcore/crypto/Cargo.toml deleted file mode 100644 index c882d0d09..000000000 --- a/ethcore/crypto/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "ethcore-crypto" -version = "0.1.0" -authors = ["Parity Technologies "] - -[dependencies] -ethereum-types = "0.3" -quick-error = "1.2.2" -ring = "0.12" -rust-crypto = "0.2.36" -tiny-keccak = "1.4" - diff --git a/ethcore/crypto/README.md b/ethcore/crypto/README.md deleted file mode 100644 index 130d27f3c..000000000 --- a/ethcore/crypto/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# Ethcrypto - -General cryptographic utilities for Ethereum. - -By default, this library is compiled with the `secp256k1` feature, which provides ECDH and ECIES capability on that curve. It can be compiled without to avoid a dependency on the `libsecp256k1` library. diff --git a/ethcore/crypto/src/aes.rs b/ethcore/crypto/src/aes.rs deleted file mode 100644 index 42a26fad0..000000000 --- a/ethcore/crypto/src/aes.rs +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use error::SymmError; -use rcrypto::blockmodes::{CtrMode, CbcDecryptor, PkcsPadding}; -use rcrypto::aessafe::{AesSafe128Encryptor, AesSafe128Decryptor}; -use rcrypto::symmetriccipher::{Encryptor, Decryptor}; -use rcrypto::buffer::{RefReadBuffer, RefWriteBuffer, WriteBuffer}; - -/// Encrypt a message (CTR mode). -/// -/// Key (`k`) length and initialisation vector (`iv`) length have to be 16 bytes each. -/// An error is returned if the input lengths are invalid. -pub fn encrypt_128_ctr(k: &[u8], iv: &[u8], plain: &[u8], dest: &mut [u8]) -> Result<(), SymmError> { - let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec()); - encryptor.encrypt(&mut RefReadBuffer::new(plain), &mut RefWriteBuffer::new(dest), true)?; - Ok(()) -} - -/// Decrypt a message (CTR mode). -/// -/// Key (`k`) length and initialisation vector (`iv`) length have to be 16 bytes each. -/// An error is returned if the input lengths are invalid. -pub fn decrypt_128_ctr(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) -> Result<(), SymmError> { - let mut encryptor = CtrMode::new(AesSafe128Encryptor::new(k), iv.to_vec()); - encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut RefWriteBuffer::new(dest), true)?; - Ok(()) -} - -/// Decrypt a message (CBC mode). -/// -/// Key (`k`) length and initialisation vector (`iv`) length have to be 16 bytes each. -/// An error is returned if the input lengths are invalid. -pub fn decrypt_128_cbc(k: &[u8], iv: &[u8], encrypted: &[u8], dest: &mut [u8]) -> Result { - let mut encryptor = CbcDecryptor::new(AesSafe128Decryptor::new(k), PkcsPadding, iv.to_vec()); - let len = dest.len(); - let mut buffer = RefWriteBuffer::new(dest); - encryptor.decrypt(&mut RefReadBuffer::new(encrypted), &mut buffer, true)?; - Ok(len - buffer.remaining()) -} diff --git a/ethcore/crypto/src/aes_gcm.rs b/ethcore/crypto/src/aes_gcm.rs deleted file mode 100644 index 819c61319..000000000 --- a/ethcore/crypto/src/aes_gcm.rs +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use error::SymmError; -use ring; - -enum Mode { Aes128Gcm, Aes256Gcm } - -/// AES GCM encryptor. -pub struct Encryptor<'a> { - mode: Mode, - key: ring::aead::SealingKey, - ad: &'a [u8], - offset: usize, -} - -impl<'a> Encryptor<'a> { - pub fn aes_128_gcm(key: &[u8; 16]) -> Result, SymmError> { - let sk = ring::aead::SealingKey::new(&ring::aead::AES_128_GCM, key)?; - Ok(Encryptor { - mode: Mode::Aes128Gcm, - key: sk, - ad: &[], - offset: 0, - }) - } - - pub fn aes_256_gcm(key: &[u8; 32]) -> Result, SymmError> { - let sk = ring::aead::SealingKey::new(&ring::aead::AES_256_GCM, key)?; - Ok(Encryptor { - mode: Mode::Aes256Gcm, - key: sk, - ad: &[], - offset: 0, - }) - } - - /// Optional associated data which is not encrypted but authenticated. - pub fn associate(&mut self, data: &'a [u8]) -> &mut Self { - self.ad = data; - self - } - - /// Optional offset value. Only the slice `[offset..]` will be encrypted. - pub fn offset(&mut self, off: usize) -> &mut Self { - self.offset = off; - self - } - - /// Please note that the pair (key, nonce) must never be reused. Using random nonces - /// limits the number of messages encrypted with the same key to 2^32 (cf. [[1]]) - /// - /// [1]: https://nvlpubs.nist.gov/nistpubs/Legacy/SP/nistspecialpublication800-38d.pdf - pub fn encrypt(&self, nonce: &[u8; 12], mut data: Vec) -> Result, SymmError> { - if self.offset > data.len() { - return Err(SymmError::offset_error(self.offset)) - } - let tag_len = match self.mode { - Mode::Aes128Gcm => ring::aead::AES_128_GCM.tag_len(), - Mode::Aes256Gcm => ring::aead::AES_256_GCM.tag_len(), - }; - data.extend(::std::iter::repeat(0).take(tag_len)); - let len = ring::aead::seal_in_place(&self.key, nonce, self.ad, &mut data[self.offset ..], tag_len)?; - data.truncate(self.offset + len); - Ok(data) - } -} - -/// AES GCM decryptor. -pub struct Decryptor<'a> { - key: ring::aead::OpeningKey, - ad: &'a [u8], - offset: usize, -} - -impl<'a> Decryptor<'a> { - pub fn aes_128_gcm(key: &[u8; 16]) -> Result, SymmError> { - let ok = ring::aead::OpeningKey::new(&ring::aead::AES_128_GCM, key)?; - Ok(Decryptor { - key: ok, - ad: &[], - offset: 0, - }) - } - - pub fn aes_256_gcm(key: &[u8; 32]) -> Result, SymmError> { - let ok = ring::aead::OpeningKey::new(&ring::aead::AES_256_GCM, key)?; - Ok(Decryptor { - key: ok, - ad: &[], - offset: 0, - }) - } - - /// Optional associated data which is not encrypted but authenticated. - pub fn associate(&mut self, data: &'a [u8]) -> &mut Self { - self.ad = data; - self - } - - /// Optional offset value. Only the slice `[offset..]` will be decrypted. - pub fn offset(&mut self, off: usize) -> &mut Self { - self.offset = off; - self - } - - pub fn decrypt(&self, nonce: &[u8; 12], mut data: Vec) -> Result, SymmError> { - if self.offset > data.len() { - return Err(SymmError::offset_error(self.offset)) - } - let len = ring::aead::open_in_place(&self.key, nonce, self.ad, 0, &mut data[self.offset ..])?.len(); - data.truncate(self.offset + len); - Ok(data) - } -} - -#[cfg(test)] -mod tests { - use super::{Encryptor, Decryptor}; - - #[test] - fn aes_gcm_128() { - let secret = b"1234567890123456"; - let nonce = b"123456789012"; - let message = b"So many books, so little time"; - - let ciphertext = Encryptor::aes_128_gcm(secret) - .unwrap() - .encrypt(nonce, message.to_vec()) - .unwrap(); - - assert!(ciphertext != message); - - let plaintext = Decryptor::aes_128_gcm(secret) - .unwrap() - .decrypt(nonce, ciphertext) - .unwrap(); - - assert_eq!(plaintext, message) - } - - #[test] - fn aes_gcm_256() { - let secret = b"12345678901234567890123456789012"; - let nonce = b"123456789012"; - let message = b"So many books, so little time"; - - let ciphertext = Encryptor::aes_256_gcm(secret) - .unwrap() - .encrypt(nonce, message.to_vec()) - .unwrap(); - - assert!(ciphertext != message); - - let plaintext = Decryptor::aes_256_gcm(secret) - .unwrap() - .decrypt(nonce, ciphertext) - .unwrap(); - - assert_eq!(plaintext, message) - } - - #[test] - fn aes_gcm_256_offset() { - let secret = b"12345678901234567890123456789012"; - let nonce = b"123456789012"; - let message = b"prefix data; So many books, so little time"; - - let ciphertext = Encryptor::aes_256_gcm(secret) - .unwrap() - .offset(13) // length of "prefix data; " - .encrypt(nonce, message.to_vec()) - .unwrap(); - - assert!(ciphertext != &message[..]); - - let plaintext = Decryptor::aes_256_gcm(secret) - .unwrap() - .offset(13) // length of "prefix data; " - .decrypt(nonce, ciphertext) - .unwrap(); - - assert_eq!(plaintext, &message[..]) - } -} diff --git a/ethcore/crypto/src/digest.rs b/ethcore/crypto/src/digest.rs deleted file mode 100644 index b2be0b8ed..000000000 --- a/ethcore/crypto/src/digest.rs +++ /dev/null @@ -1,109 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use rcrypto::ripemd160; -use ring::digest::{self, Context, SHA256, SHA512}; -use std::marker::PhantomData; -use std::ops::Deref; - -/// The message digest. -pub struct Digest(InnerDigest, PhantomData); - -enum InnerDigest { - Ring(digest::Digest), - Ripemd160([u8; 20]), -} - -impl Deref for Digest { - type Target = [u8]; - fn deref(&self) -> &Self::Target { - match self.0 { - InnerDigest::Ring(ref d) => d.as_ref(), - InnerDigest::Ripemd160(ref d) => &d[..] - } - } -} - -/// Single-step sha256 digest computation. -pub fn sha256(data: &[u8]) -> Digest { - Digest(InnerDigest::Ring(digest::digest(&SHA256, data)), PhantomData) -} - -/// Single-step sha512 digest computation. -pub fn sha512(data: &[u8]) -> Digest { - Digest(InnerDigest::Ring(digest::digest(&SHA512, data)), PhantomData) -} - -/// Single-step ripemd160 digest computation. -pub fn ripemd160(data: &[u8]) -> Digest { - let mut hasher = Hasher::ripemd160(); - hasher.update(data); - hasher.finish() -} - -pub enum Sha256 {} -pub enum Sha512 {} -pub enum Ripemd160 {} - -/// Stateful digest computation. -pub struct Hasher(Inner, PhantomData); - -enum Inner { - Ring(Context), - Ripemd160(ripemd160::Ripemd160) -} - -impl Hasher { - pub fn sha256() -> Hasher { - Hasher(Inner::Ring(Context::new(&SHA256)), PhantomData) - } -} - -impl Hasher { - pub fn sha512() -> Hasher { - Hasher(Inner::Ring(Context::new(&SHA512)), PhantomData) - } -} - -impl Hasher { - pub fn ripemd160() -> Hasher { - Hasher(Inner::Ripemd160(ripemd160::Ripemd160::new()), PhantomData) - } -} - -impl Hasher { - pub fn update(&mut self, data: &[u8]) { - match self.0 { - Inner::Ring(ref mut ctx) => ctx.update(data), - Inner::Ripemd160(ref mut ctx) => { - use rcrypto::digest::Digest; - ctx.input(data) - } - } - } - - pub fn finish(self) -> Digest { - match self.0 { - Inner::Ring(ctx) => Digest(InnerDigest::Ring(ctx.finish()), PhantomData), - Inner::Ripemd160(mut ctx) => { - use rcrypto::digest::Digest; - let mut d = [0; 20]; - ctx.result(&mut d); - Digest(InnerDigest::Ripemd160(d), PhantomData) - } - } - } -} diff --git a/ethcore/crypto/src/error.rs b/ethcore/crypto/src/error.rs deleted file mode 100644 index 4e5582e19..000000000 --- a/ethcore/crypto/src/error.rs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use rcrypto; -use ring; - -quick_error! { - #[derive(Debug)] - pub enum Error { - Scrypt(e: ScryptError) { - cause(e) - from() - } - Symm(e: SymmError) { - cause(e) - from() - } - } -} - -quick_error! { - #[derive(Debug)] - pub enum ScryptError { - // log(N) < r / 16 - InvalidN { - display("Invalid N argument of the scrypt encryption") - } - // p <= (2^31-1 * 32)/(128 * r) - InvalidP { - display("Invalid p argument of the scrypt encryption") - } - } -} - -quick_error! { - #[derive(Debug)] - pub enum SymmError wraps PrivSymmErr { - RustCrypto(e: rcrypto::symmetriccipher::SymmetricCipherError) { - display("symmetric crypto error") - from() - } - Ring(e: ring::error::Unspecified) { - display("symmetric crypto error") - cause(e) - from() - } - Offset(x: usize) { - display("offset {} greater than slice length", x) - } - } -} - -impl SymmError { - pub(crate) fn offset_error(x: usize) -> SymmError { - SymmError(PrivSymmErr::Offset(x)) - } -} - -impl From for SymmError { - fn from(e: ring::error::Unspecified) -> SymmError { - SymmError(PrivSymmErr::Ring(e)) - } -} - -impl From for SymmError { - fn from(e: rcrypto::symmetriccipher::SymmetricCipherError) -> SymmError { - SymmError(PrivSymmErr::RustCrypto(e)) - } -} diff --git a/ethcore/crypto/src/hmac.rs b/ethcore/crypto/src/hmac.rs deleted file mode 100644 index ff337ed02..000000000 --- a/ethcore/crypto/src/hmac.rs +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use digest; -use ring::digest::{SHA256, SHA512}; -use ring::hmac::{self, SigningContext}; -use std::marker::PhantomData; -use std::ops::Deref; - -/// HMAC signature. -pub struct Signature(hmac::Signature, PhantomData); - -impl Deref for Signature { - type Target = [u8]; - fn deref(&self) -> &Self::Target { - self.0.as_ref() - } -} - -/// HMAC signing key. -pub struct SigKey(hmac::SigningKey, PhantomData); - -impl SigKey { - pub fn sha256(key: &[u8]) -> SigKey { - SigKey(hmac::SigningKey::new(&SHA256, key), PhantomData) - } -} - -impl SigKey { - pub fn sha512(key: &[u8]) -> SigKey { - SigKey(hmac::SigningKey::new(&SHA512, key), PhantomData) - } -} - -/// Compute HMAC signature of `data`. -pub fn sign(k: &SigKey, data: &[u8]) -> Signature { - Signature(hmac::sign(&k.0, data), PhantomData) -} - -/// Stateful HMAC computation. -pub struct Signer(SigningContext, PhantomData); - -impl Signer { - pub fn with(key: &SigKey) -> Signer { - Signer(hmac::SigningContext::with_key(&key.0), PhantomData) - } - - pub fn update(&mut self, data: &[u8]) { - self.0.update(data) - } - - pub fn sign(self) -> Signature { - Signature(self.0.sign(), PhantomData) - } -} - -/// HMAC signature verification key. -pub struct VerifyKey(hmac::VerificationKey, PhantomData); - -impl VerifyKey { - pub fn sha256(key: &[u8]) -> VerifyKey { - VerifyKey(hmac::VerificationKey::new(&SHA256, key), PhantomData) - } -} - -impl VerifyKey { - pub fn sha512(key: &[u8]) -> VerifyKey { - VerifyKey(hmac::VerificationKey::new(&SHA512, key), PhantomData) - } -} - -/// Verify HMAC signature of `data`. -pub fn verify(k: &VerifyKey, data: &[u8], sig: &[u8]) -> bool { - hmac::verify(&k.0, data, sig).is_ok() -} diff --git a/ethcore/crypto/src/lib.rs b/ethcore/crypto/src/lib.rs deleted file mode 100644 index 1958067f9..000000000 --- a/ethcore/crypto/src/lib.rs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Crypto utils used ethstore and network. - -extern crate crypto as rcrypto; -extern crate ethereum_types; -#[macro_use] -extern crate quick_error; -extern crate ring; -extern crate tiny_keccak; - -pub mod aes; -pub mod aes_gcm; -pub mod error; -pub mod scrypt; -pub mod digest; -pub mod hmac; -pub mod pbkdf2; - -pub use error::Error; - -use tiny_keccak::Keccak; - -pub const KEY_LENGTH: usize = 32; -pub const KEY_ITERATIONS: usize = 10240; -pub const KEY_LENGTH_AES: usize = KEY_LENGTH / 2; - -/// Default authenticated data to use (in RPC). -pub const DEFAULT_MAC: [u8; 2] = [0, 0]; - -pub trait Keccak256 { - fn keccak256(&self) -> T where T: Sized; -} - -impl Keccak256<[u8; 32]> for T where T: AsRef<[u8]> { - fn keccak256(&self) -> [u8; 32] { - let mut keccak = Keccak::new_keccak256(); - let mut result = [0u8; 32]; - keccak.update(self.as_ref()); - keccak.finalize(&mut result); - result - } -} - -pub fn derive_key_iterations(password: &[u8], salt: &[u8; 32], c: u32) -> (Vec, Vec) { - let mut derived_key = [0u8; KEY_LENGTH]; - pbkdf2::sha256(c, pbkdf2::Salt(salt), pbkdf2::Secret(password), &mut derived_key); - let derived_right_bits = &derived_key[0..KEY_LENGTH_AES]; - let derived_left_bits = &derived_key[KEY_LENGTH_AES..KEY_LENGTH]; - (derived_right_bits.to_vec(), derived_left_bits.to_vec()) -} - -pub fn derive_mac(derived_left_bits: &[u8], cipher_text: &[u8]) -> Vec { - let mut mac = vec![0u8; KEY_LENGTH_AES + cipher_text.len()]; - mac[0..KEY_LENGTH_AES].copy_from_slice(derived_left_bits); - mac[KEY_LENGTH_AES..cipher_text.len() + KEY_LENGTH_AES].copy_from_slice(cipher_text); - mac -} - -pub fn is_equal(a: &[u8], b: &[u8]) -> bool { - ring::constant_time::verify_slices_are_equal(a, b).is_ok() -} diff --git a/ethcore/crypto/src/pbkdf2.rs b/ethcore/crypto/src/pbkdf2.rs deleted file mode 100644 index d210f6f65..000000000 --- a/ethcore/crypto/src/pbkdf2.rs +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use ring; - -pub struct Salt<'a>(pub &'a [u8]); -pub struct Secret<'a>(pub &'a [u8]); - -pub fn sha256(iter: u32, salt: Salt, sec: Secret, out: &mut [u8; 32]) { - ring::pbkdf2::derive(&ring::digest::SHA256, iter, salt.0, sec.0, &mut out[..]) -} - -pub fn sha512(iter: u32, salt: Salt, sec: Secret, out: &mut [u8; 64]) { - ring::pbkdf2::derive(&ring::digest::SHA512, iter, salt.0, sec.0, &mut out[..]) -} diff --git a/ethcore/crypto/src/scrypt.rs b/ethcore/crypto/src/scrypt.rs deleted file mode 100644 index 11c258155..000000000 --- a/ethcore/crypto/src/scrypt.rs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use error::ScryptError; -use rcrypto::scrypt::{scrypt, ScryptParams}; -use super::{KEY_LENGTH_AES, KEY_LENGTH}; - -pub fn derive_key(pass: &[u8], salt: &[u8; 32], n: u32, p: u32, r: u32) -> Result<(Vec, Vec), ScryptError> { - // sanity checks - let log_n = (32 - n.leading_zeros() - 1) as u8; - if log_n as u32 >= r * 16 { - return Err(ScryptError::InvalidN); - } - - if p as u64 > ((u32::max_value() as u64 - 1) * 32)/(128 * (r as u64)) { - return Err(ScryptError::InvalidP); - } - - let mut derived_key = vec![0u8; KEY_LENGTH]; - let scrypt_params = ScryptParams::new(log_n, r, p); - scrypt(pass, salt, &scrypt_params, &mut derived_key); - let derived_right_bits = &derived_key[0..KEY_LENGTH_AES]; - let derived_left_bits = &derived_key[KEY_LENGTH_AES..KEY_LENGTH]; - Ok((derived_right_bits.to_vec(), derived_left_bits.to_vec())) -} diff --git a/ethcore/evm/Cargo.toml b/ethcore/evm/Cargo.toml index 654f7879a..18c9a3907 100644 --- a/ethcore/evm/Cargo.toml +++ b/ethcore/evm/Cargo.toml @@ -10,7 +10,7 @@ heapsize = "0.4" lazy_static = "1.0" log = "0.3" vm = { path = "../vm" } -keccak-hash = { path = "../../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } parking_lot = "0.6" memory-cache = { path = "../../util/memory_cache" } diff --git a/ethcore/light/Cargo.toml b/ethcore/light/Cargo.toml index 3bdcd73b7..6c3a454e2 100644 --- a/ethcore/light/Cargo.toml +++ b/ethcore/light/Cargo.toml @@ -9,19 +9,19 @@ authors = ["Parity Technologies "] [dependencies] log = "0.3" ethcore = { path = ".."} -ethcore-bytes = { path = "../../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethcore-transaction = { path = "../transaction" } ethereum-types = "0.3" -memorydb = { path = "../../util/memorydb" } -patricia-trie = { path = "../../util/patricia_trie" } +memorydb = { git = "https://github.com/paritytech/parity-common" } +patricia-trie = { git = "https://github.com/paritytech/parity-common" } patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" } ethcore-network = { path = "../../util/network" } ethcore-io = { path = "../../util/io" } -hashdb = { path = "../../util/hashdb" } +hashdb = { git = "https://github.com/paritytech/parity-common" } heapsize = "0.4" vm = { path = "../vm" } -plain_hasher = { path = "../../util/plain_hasher" } -rlp = { path = "../../util/rlp" } +plain_hasher = { git = "https://github.com/paritytech/parity-common" } +rlp = { git = "https://github.com/paritytech/parity-common" } rlp_derive = { path = "../../util/rlp_derive" } smallvec = "0.4" futures = "0.1" @@ -32,16 +32,16 @@ serde = "1.0" serde_derive = "1.0" parking_lot = "0.6" stats = { path = "../../util/stats" } -keccak-hash = { path = "../../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } keccak-hasher = { path = "../../util/keccak-hasher" } -triehash = { path = "../../util/triehash" } -kvdb = { path = "../../util/kvdb" } +triehash-ethereum = { version = "0.2", path = "../../util/triehash-ethereum" } +kvdb = { git = "https://github.com/paritytech/parity-common" } memory-cache = { path = "../../util/memory_cache" } error-chain = { version = "0.12", default-features = false } [dev-dependencies] ethcore = { path = "..", features = ["test-helpers"] } -kvdb-memorydb = { path = "../../util/kvdb-memorydb" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common" } tempdir = "0.3" [features] diff --git a/ethcore/light/src/lib.rs b/ethcore/light/src/lib.rs index 2000131a6..24c95cfde 100644 --- a/ethcore/light/src/lib.rs +++ b/ethcore/light/src/lib.rs @@ -56,7 +56,7 @@ extern crate log; extern crate bincode; extern crate ethcore_io as io; extern crate ethcore_network as network; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethcore_transaction as transaction; extern crate ethereum_types; extern crate ethcore; @@ -79,7 +79,7 @@ extern crate smallvec; extern crate stats; extern crate vm; extern crate keccak_hash as hash; -extern crate triehash; +extern crate triehash_ethereum as triehash; extern crate kvdb; extern crate memory_cache; #[macro_use] diff --git a/ethcore/node_filter/Cargo.toml b/ethcore/node_filter/Cargo.toml index 597f9a962..b6e3cfe55 100644 --- a/ethcore/node_filter/Cargo.toml +++ b/ethcore/node_filter/Cargo.toml @@ -20,6 +20,6 @@ lru-cache = "0.1" [dev-dependencies] ethcore = { path = "..", features = ["test-helpers"] } -kvdb-memorydb = { path = "../../util/kvdb-memorydb" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common" } ethcore-io = { path = "../../util/io" } tempdir = "0.3" diff --git a/ethcore/private-tx/Cargo.toml b/ethcore/private-tx/Cargo.toml index 42bcc4f20..e547c9808 100644 --- a/ethcore/private-tx/Cargo.toml +++ b/ethcore/private-tx/Cargo.toml @@ -11,8 +11,8 @@ ethabi = "5.1" ethabi-contract = "5.0" ethabi-derive = "5.0" ethcore = { path = ".." } -ethcore-bytes = { path = "../../util/bytes" } -ethcore-crypto = { path = "../crypto" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethcore-io = { path = "../../util/io" } ethcore-logger = { path = "../../logger" } ethcore-miner = { path = "../../miner" } @@ -22,13 +22,13 @@ ethjson = { path = "../../json" } ethkey = { path = "../../ethkey" } fetch = { path = "../../util/fetch" } futures = "0.1" -keccak-hash = { path = "../../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } log = "0.3" parking_lot = "0.6" -patricia-trie = { path = "../../util/patricia_trie" } +patricia-trie = { git = "https://github.com/paritytech/parity-common" } patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" } rand = "0.3" -rlp = { path = "../../util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } rlp_derive = { path = "../../util/rlp_derive" } rustc-hex = "1.0" serde = "1.0" diff --git a/ethcore/private-tx/src/lib.rs b/ethcore/private-tx/src/lib.rs index 2034ea7fa..0700a428b 100644 --- a/ethcore/private-tx/src/lib.rs +++ b/ethcore/private-tx/src/lib.rs @@ -26,8 +26,8 @@ mod messages; mod error; extern crate ethcore; -extern crate ethcore_bytes as bytes; -extern crate ethcore_crypto as crypto; +extern crate parity_bytes as bytes; +extern crate parity_crypto as crypto; extern crate ethcore_io as io; extern crate ethcore_miner; extern crate ethcore_transaction as transaction; diff --git a/ethcore/service/Cargo.toml b/ethcore/service/Cargo.toml index e9f16bc7a..245bce787 100644 --- a/ethcore/service/Cargo.toml +++ b/ethcore/service/Cargo.toml @@ -10,7 +10,7 @@ ethcore = { path = ".." } ethcore-io = { path = "../../util/io" } ethcore-private-tx = { path = "../private-tx" } ethcore-sync = { path = "../sync" } -kvdb = { path = "../../util/kvdb" } +kvdb = { git = "https://github.com/paritytech/parity-common" } log = "0.3" stop-guard = { path = "../../util/stop-guard" } trace-time = { path = "../../util/trace-time" } @@ -18,4 +18,4 @@ trace-time = { path = "../../util/trace-time" } [dev-dependencies] ethcore = { path = "..", features = ["test-helpers"] } tempdir = "0.3" -kvdb-rocksdb = { path = "../../util/kvdb-rocksdb" } +kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common" } diff --git a/ethcore/src/builtin.rs b/ethcore/src/builtin.rs index 61739c7b1..91477ca63 100644 --- a/ethcore/src/builtin.rs +++ b/ethcore/src/builtin.rs @@ -18,7 +18,7 @@ use std::cmp::{max, min}; use std::io::{self, Read}; use byteorder::{ByteOrder, BigEndian}; -use ethcore_crypto::digest; +use parity_crypto::digest; use num::{BigUint, Zero, One}; use hash::keccak; diff --git a/ethcore/src/lib.rs b/ethcore/src/lib.rs index 70044bd09..ed5ea6dbe 100644 --- a/ethcore/src/lib.rs +++ b/ethcore/src/lib.rs @@ -65,9 +65,9 @@ extern crate crossbeam; extern crate common_types as types; extern crate ethash; extern crate ethcore_bloom_journal as bloom_journal; -extern crate ethcore_crypto; +extern crate parity_crypto; extern crate ethcore_io as io; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethcore_logger; extern crate ethcore_miner; #[cfg(feature = "stratum")] @@ -97,7 +97,7 @@ extern crate heapsize; extern crate memorydb; extern crate patricia_trie as trie; extern crate patricia_trie_ethereum as ethtrie; -extern crate triehash; +extern crate triehash_ethereum as triehash; extern crate ansi_term; extern crate unexpected; extern crate snappy; diff --git a/ethcore/stratum/Cargo.toml b/ethcore/stratum/Cargo.toml index e160907a4..1da27c01a 100644 --- a/ethcore/stratum/Cargo.toml +++ b/ethcore/stratum/Cargo.toml @@ -7,7 +7,7 @@ authors = ["Parity Technologies "] [dependencies] ethereum-types = "0.3" -keccak-hash = { path = "../../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } jsonrpc-macros = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } jsonrpc-tcp-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } diff --git a/ethcore/sync/Cargo.toml b/ethcore/sync/Cargo.toml index 1e539bc4a..dfd457772 100644 --- a/ethcore/sync/Cargo.toml +++ b/ethcore/sync/Cargo.toml @@ -8,7 +8,7 @@ authors = ["Parity Technologies "] [lib] [dependencies] -ethcore-bytes = { path = "../../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethcore-network = { path = "../../util/network" } ethcore-network-devp2p = { path = "../../util/network-devp2p" } ethcore-io = { path = "../../util/io" } @@ -16,14 +16,14 @@ ethcore-light = { path = "../light" } ethcore-transaction = { path = "../transaction" } ethcore = { path = ".." } ethereum-types = "0.3" -hashdb = { version = "0.2", path = "../../util/hashdb" } -plain_hasher = { version = "0.2", path = "../../util/plain_hasher" } -rlp = { path = "../../util/rlp" } +hashdb = { git = "https://github.com/paritytech/parity-common" } +plain_hasher = { git = "https://github.com/paritytech/parity-common" } +rlp = { git = "https://github.com/paritytech/parity-common" } rustc-hex = "1.0" -keccak-hash = { path = "../../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } keccak-hasher = { path = "../../util/keccak-hasher" } -triehash = { path = "../../util/triehash" } -kvdb = { path = "../../util/kvdb" } +triehash-ethereum = {version = "0.2", path = "../../util/triehash-ethereum" } +kvdb = { git = "https://github.com/paritytech/parity-common" } macros = { path = "../../util/macros" } log = "0.3" env_logger = "0.4" @@ -38,6 +38,6 @@ ipnetwork = "0.12.6" [dev-dependencies] ethcore-io = { path = "../../util/io", features = ["mio"] } ethkey = { path = "../../ethkey" } -kvdb-memorydb = { path = "../../util/kvdb-memorydb" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common" } ethcore-private-tx = { path = "../private-tx" } ethcore = { path = "..", features = ["test-helpers"] } diff --git a/ethcore/sync/src/blocks.rs b/ethcore/sync/src/blocks.rs index 8485b1d75..a275e1dcc 100644 --- a/ethcore/sync/src/blocks.rs +++ b/ethcore/sync/src/blocks.rs @@ -20,7 +20,7 @@ use smallvec::SmallVec; use hash::{keccak, KECCAK_NULL_RLP, KECCAK_EMPTY_LIST_RLP}; use heapsize::HeapSizeOf; use ethereum_types::H256; -use triehash::ordered_trie_root; +use triehash_ethereum::ordered_trie_root; use bytes::Bytes; use rlp::{Rlp, RlpStream, DecoderError}; use network; diff --git a/ethcore/sync/src/lib.rs b/ethcore/sync/src/lib.rs index 6e49b1ce7..f9f8a3e3e 100644 --- a/ethcore/sync/src/lib.rs +++ b/ethcore/sync/src/lib.rs @@ -23,7 +23,7 @@ extern crate ethcore_network as network; extern crate ethcore_network_devp2p as devp2p; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethcore_io as io; extern crate ethcore_transaction as transaction; #[macro_use] @@ -40,7 +40,7 @@ extern crate rlp; extern crate ipnetwork; extern crate keccak_hash as hash; extern crate keccak_hasher; -extern crate triehash; +extern crate triehash_ethereum; extern crate kvdb; extern crate ethcore_light as light; diff --git a/ethcore/transaction/Cargo.toml b/ethcore/transaction/Cargo.toml index 79e7282c3..bde83f478 100644 --- a/ethcore/transaction/Cargo.toml +++ b/ethcore/transaction/Cargo.toml @@ -9,8 +9,8 @@ ethjson = { path = "../../json" } ethkey = { path = "../../ethkey" } evm = { path = "../evm" } heapsize = "0.4" -keccak-hash = { path = "../../util/hash" } -rlp = { path = "../../util/rlp" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } +rlp = { git = "https://github.com/paritytech/parity-common" } unexpected = { path = "../../util/unexpected" } ethereum-types = "0.3" diff --git a/ethcore/types/Cargo.toml b/ethcore/types/Cargo.toml index 92cc74551..82b42c519 100644 --- a/ethcore/types/Cargo.toml +++ b/ethcore/types/Cargo.toml @@ -5,12 +5,12 @@ version = "0.1.0" authors = ["Parity Technologies "] [dependencies] -rlp = { path = "../../util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } rlp_derive = { path = "../../util/rlp_derive" } -ethcore-bytes = { path = "../../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" ethjson = { path = "../../json" } -keccak-hash = { path = "../../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } heapsize = "0.4" [dev-dependencies] diff --git a/ethcore/types/src/lib.rs b/ethcore/types/src/lib.rs index 5ac8ff12a..f375fec13 100644 --- a/ethcore/types/src/lib.rs +++ b/ethcore/types/src/lib.rs @@ -17,7 +17,7 @@ //! Types used in the public API extern crate ethereum_types; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethjson; extern crate rlp; #[macro_use] diff --git a/ethcore/vm/Cargo.toml b/ethcore/vm/Cargo.toml index 7348951d7..194f4600d 100644 --- a/ethcore/vm/Cargo.toml +++ b/ethcore/vm/Cargo.toml @@ -5,12 +5,12 @@ authors = ["Parity Technologies "] [dependencies] byteorder = "1.0" -ethcore-bytes = { path = "../../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" -patricia-trie = { path = "../../util/patricia_trie" } +patricia-trie = { git = "https://github.com/paritytech/parity-common" } patricia-trie-ethereum = { path = "../../util/patricia-trie-ethereum" } log = "0.3" common-types = { path = "../types" } ethjson = { path = "../../json" } -rlp = { path = "../../util/rlp" } -keccak-hash = { path = "../../util/hash" } +rlp = { git = "https://github.com/paritytech/parity-common" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } diff --git a/ethcore/vm/src/lib.rs b/ethcore/vm/src/lib.rs index 658420f74..2c98cfcd2 100644 --- a/ethcore/vm/src/lib.rs +++ b/ethcore/vm/src/lib.rs @@ -17,7 +17,7 @@ //! Virtual machines support library extern crate ethereum_types; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate common_types as types; extern crate ethjson; extern crate rlp; diff --git a/ethkey/Cargo.toml b/ethkey/Cargo.toml index ef828ac30..8449a54c3 100644 --- a/ethkey/Cargo.toml +++ b/ethkey/Cargo.toml @@ -6,7 +6,7 @@ authors = ["Parity Technologies "] [dependencies] byteorder = "1.0" edit-distance = "2.0" -ethcore-crypto = { path = "../ethcore/crypto" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } eth-secp256k1 = { git = "https://github.com/paritytech/rust-secp256k1" } ethereum-types = "0.3" lazy_static = "1.0" diff --git a/ethkey/src/crypto.rs b/ethkey/src/crypto.rs index 3ff809614..8049f16b5 100644 --- a/ethkey/src/crypto.rs +++ b/ethkey/src/crypto.rs @@ -16,7 +16,7 @@ use secp256k1; use std::io; -use ethcore_crypto::error::SymmError; +use parity_crypto::error::SymmError; quick_error! { #[derive(Debug)] @@ -67,7 +67,7 @@ pub mod ecdh { /// ECIES function pub mod ecies { - use ethcore_crypto::{aes, digest, hmac, is_equal}; + use parity_crypto::{aes, digest, hmac, is_equal}; use ethereum_types::H128; use super::{ecdh, Error}; use {Random, Generator, Public, Secret}; diff --git a/ethkey/src/extended.rs b/ethkey/src/extended.rs index 89a4bb26a..e48f6b561 100644 --- a/ethkey/src/extended.rs +++ b/ethkey/src/extended.rs @@ -207,7 +207,7 @@ impl ExtendedKeyPair { // Work is based on BIP0032 // https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki mod derivation { - use ethcore_crypto::hmac; + use parity_crypto::hmac; use ethereum_types::{U256, U512, H512, H256}; use secp256k1::key::{SecretKey, PublicKey}; use SECP256K1; diff --git a/ethkey/src/lib.rs b/ethkey/src/lib.rs index af13f5a1e..013a60cd3 100644 --- a/ethkey/src/lib.rs +++ b/ethkey/src/lib.rs @@ -18,7 +18,7 @@ extern crate byteorder; extern crate edit_distance; -extern crate ethcore_crypto; +extern crate parity_crypto; extern crate ethereum_types; extern crate mem; extern crate parity_wordlist; diff --git a/ethstore/Cargo.toml b/ethstore/Cargo.toml index 038b27fbe..deeb5a946 100644 --- a/ethstore/Cargo.toml +++ b/ethstore/Cargo.toml @@ -16,7 +16,7 @@ tiny-keccak = "1.4" time = "0.1.34" itertools = "0.5" parking_lot = "0.6" -ethcore-crypto = { path = "../ethcore/crypto" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" dir = { path = "../util/dir" } smallvec = "0.4" diff --git a/ethstore/src/lib.rs b/ethstore/src/lib.rs index 67e636dd5..ad58bd0e9 100644 --- a/ethstore/src/lib.rs +++ b/ethstore/src/lib.rs @@ -31,7 +31,7 @@ extern crate time; extern crate tiny_keccak; extern crate tempdir; -extern crate ethcore_crypto as crypto; +extern crate parity_crypto as crypto; extern crate ethereum_types; extern crate ethkey as _ethkey; extern crate parity_wordlist; diff --git a/evmbin/Cargo.toml b/evmbin/Cargo.toml index 9ff27a2e6..43264f042 100644 --- a/evmbin/Cargo.toml +++ b/evmbin/Cargo.toml @@ -12,7 +12,7 @@ path = "./src/main.rs" docopt = "0.8" ethcore = { path = "../ethcore", features = ["test-helpers", "json-tests"] } ethjson = { path = "../json" } -ethcore-bytes = { path = "../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethcore-transaction = { path = "../ethcore/transaction" } ethereum-types = "0.3" evm = { path = "../ethcore/evm" } diff --git a/evmbin/src/main.rs b/evmbin/src/main.rs index 45a09c73e..144c99fb3 100644 --- a/evmbin/src/main.rs +++ b/evmbin/src/main.rs @@ -26,7 +26,7 @@ extern crate serde; extern crate serde_derive; extern crate docopt; extern crate ethcore_transaction as transaction; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethereum_types; extern crate vm; extern crate evm; diff --git a/hash-fetch/Cargo.toml b/hash-fetch/Cargo.toml index df332602c..c4eb7acd3 100644 --- a/hash-fetch/Cargo.toml +++ b/hash-fetch/Cargo.toml @@ -15,10 +15,10 @@ mime_guess = "2.0.0-alpha.2" rand = "0.4" rustc-hex = "1.0" fetch = { path = "../util/fetch" } -ethcore-bytes = { path = "../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" parity-reactor = { path = "../util/reactor" } -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } registrar = { path = "../registrar" } ethabi = "5.1" diff --git a/hash-fetch/src/lib.rs b/hash-fetch/src/lib.rs index bdbb0e350..9ed8c59fd 100644 --- a/hash-fetch/src/lib.rs +++ b/hash-fetch/src/lib.rs @@ -22,7 +22,7 @@ extern crate log; extern crate ethabi; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethereum_types; extern crate futures; extern crate futures_cpupool; diff --git a/ipfs/Cargo.toml b/ipfs/Cargo.toml index 5a7204813..968aef67b 100644 --- a/ipfs/Cargo.toml +++ b/ipfs/Cargo.toml @@ -7,11 +7,11 @@ authors = ["Parity Technologies "] [dependencies] ethcore = { path = "../ethcore" } -ethcore-bytes = { path = "../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } jsonrpc-http-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } -rlp = { path = "../util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } cid = "0.2" multihash = "0.7" unicase = "2.0" diff --git a/ipfs/src/lib.rs b/ipfs/src/lib.rs index 7f6ebe77c..ac0871b64 100644 --- a/ipfs/src/lib.rs +++ b/ipfs/src/lib.rs @@ -20,7 +20,7 @@ extern crate unicase; extern crate rlp; extern crate ethcore; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethereum_types; extern crate jsonrpc_core as core; extern crate jsonrpc_http_server as http; diff --git a/local-store/Cargo.toml b/local-store/Cargo.toml index d2c3469ca..75717bed0 100644 --- a/local-store/Cargo.toml +++ b/local-store/Cargo.toml @@ -8,9 +8,9 @@ authors = ["Parity Technologies "] ethcore = { path = "../ethcore" } ethcore-io = { path = "../util/io" } ethcore-transaction = { path = "../ethcore/transaction" } -kvdb = { path = "../util/kvdb" } +kvdb = { git = "https://github.com/paritytech/parity-common" } log = "0.3" -rlp = { path = "../util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } serde = "1.0" serde_derive = "1.0" serde_json = "1.0" @@ -18,4 +18,4 @@ serde_json = "1.0" [dev-dependencies] ethcore = { path = "../ethcore", features = ["test-helpers"] } ethkey = { path = "../ethkey" } -kvdb-memorydb = { path = "../util/kvdb-memorydb" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common" } diff --git a/miner/Cargo.toml b/miner/Cargo.toml index e1e7974a6..0f4c2c2db 100644 --- a/miner/Cargo.toml +++ b/miner/Cargo.toml @@ -22,12 +22,12 @@ ethereum-types = "0.3" futures = "0.1" futures-cpupool = "0.1" heapsize = "0.4" -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } linked-hash-map = "0.5" log = "0.3" parking_lot = "0.6" price-info = { path = "../price-info", optional = true } -rlp = { path = "../util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } trace-time = { path = "../util/trace-time" } transaction-pool = { path = "../transaction-pool" } diff --git a/parity/lib.rs b/parity/lib.rs index 609738c26..4a925bba7 100644 --- a/parity/lib.rs +++ b/parity/lib.rs @@ -45,7 +45,7 @@ extern crate toml; extern crate blooms_db; extern crate ethcore; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethcore_io as io; extern crate ethcore_light as light; extern crate ethcore_logger; diff --git a/registrar/Cargo.toml b/registrar/Cargo.toml index 3ba26e456..dcbfa439e 100644 --- a/registrar/Cargo.toml +++ b/registrar/Cargo.toml @@ -10,4 +10,4 @@ futures = "0.1" ethabi = "5.1.0" ethabi-derive = "5.0.5" ethabi-contract = "5.0.3" -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } diff --git a/rpc/Cargo.toml b/rpc/Cargo.toml index c84a1dc5f..af5b4f413 100644 --- a/rpc/Cargo.toml +++ b/rpc/Cargo.toml @@ -37,8 +37,8 @@ jsonrpc-pubsub = { git = "https://github.com/paritytech/jsonrpc.git", branch = " ethash = { path = "../ethash" } ethcore = { path = "../ethcore", features = ["test-helpers"] } -ethcore-bytes = { path = "../util/bytes" } -ethcore-crypto = { path = "../ethcore/crypto" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethcore-devtools = { path = "../devtools" } ethcore-io = { path = "../util/io" } ethcore-light = { path = "../ethcore/light" } @@ -53,13 +53,13 @@ ethjson = { path = "../json" } ethkey = { path = "../ethkey" } ethstore = { path = "../ethstore" } fetch = { path = "../util/fetch" } -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } node-health = { path = "../dapps/node-health" } parity-reactor = { path = "../util/reactor" } parity-updater = { path = "../updater" } parity-version = { path = "../util/version" } -patricia-trie = { path = "../util/patricia_trie" } -rlp = { path = "../util/rlp" } +patricia-trie = { git = "https://github.com/paritytech/parity-common" } +rlp = { git = "https://github.com/paritytech/parity-common" } stats = { path = "../util/stats" } vm = { path = "../ethcore/vm" } @@ -73,7 +73,7 @@ fake-hardware-wallet = { path = "../util/fake-hardware-wallet" } ethcore = { path = "../ethcore", features = ["test-helpers"] } ethcore-network = { path = "../util/network" } fake-fetch = { path = "../util/fake-fetch" } -kvdb-memorydb = { path = "../util/kvdb-memorydb" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common" } macros = { path = "../util/macros" } pretty_assertions = "0.1" transaction-pool = { path = "../transaction-pool" } diff --git a/rpc/src/lib.rs b/rpc/src/lib.rs index 1caf383f0..860e4bc6d 100644 --- a/rpc/src/lib.rs +++ b/rpc/src/lib.rs @@ -45,8 +45,8 @@ extern crate jsonrpc_pubsub; extern crate ethash; #[cfg_attr(test, macro_use)] extern crate ethcore; -extern crate ethcore_bytes as bytes; -extern crate ethcore_crypto as crypto; +extern crate parity_bytes as bytes; +extern crate parity_crypto as crypto; extern crate ethcore_devtools as devtools; extern crate ethcore_io as io; extern crate ethcore_light as light; diff --git a/rpc_client/Cargo.toml b/rpc_client/Cargo.toml index 67efce18c..6b0f4c2cc 100644 --- a/rpc_client/Cargo.toml +++ b/rpc_client/Cargo.toml @@ -17,4 +17,4 @@ parking_lot = "0.6" jsonrpc-core = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } jsonrpc-ws-server = { git = "https://github.com/paritytech/jsonrpc.git", branch = "parity-1.11" } parity-rpc = { path = "../rpc" } -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } diff --git a/secret_store/Cargo.toml b/secret_store/Cargo.toml index dd419ae4e..85eda93e3 100644 --- a/secret_store/Cargo.toml +++ b/secret_store/Cargo.toml @@ -24,14 +24,14 @@ tokio-service = "0.1" tokio-proto = "0.1" url = "1.0" ethcore = { path = "../ethcore" } -ethcore-bytes = { path = "../util/bytes" } -ethcore-crypto = { path = "../ethcore/crypto" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethcore-logger = { path = "../logger" } ethcore-sync = { path = "../ethcore/sync" } ethcore-transaction = { path = "../ethcore/transaction" } ethereum-types = "0.3" -kvdb = { path = "../util/kvdb" } -keccak-hash = { path = "../util/hash" } +kvdb = { git = "https://github.com/paritytech/parity-common" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } ethkey = { path = "../ethkey" } lazy_static = "1.0" ethabi = "5.1" @@ -41,4 +41,4 @@ ethabi-contract = "5.0" [dev-dependencies] ethcore = { path = "../ethcore", features = ["test-helpers"] } tempdir = "0.3" -kvdb-rocksdb = { path = "../util/kvdb-rocksdb" } +kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common" } diff --git a/secret_store/src/lib.rs b/secret_store/src/lib.rs index 74cde2c5a..b58534b3e 100644 --- a/secret_store/src/lib.rs +++ b/secret_store/src/lib.rs @@ -17,8 +17,8 @@ extern crate byteorder; extern crate ethabi; extern crate ethcore; -extern crate ethcore_bytes as bytes; -extern crate ethcore_crypto as crypto; +extern crate parity_bytes as bytes; +extern crate parity_crypto as crypto; extern crate ethcore_logger as logger; extern crate ethcore_sync as sync; extern crate ethcore_transaction as transaction; diff --git a/updater/Cargo.toml b/updater/Cargo.toml index 8bf708b76..b7c1aded9 100644 --- a/updater/Cargo.toml +++ b/updater/Cargo.toml @@ -6,7 +6,7 @@ license = "GPL-3.0" authors = ["Parity Technologies "] [dependencies] -keccak-hash = { path = "../util/hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } lazy_static = "1.0" log = "0.3" ethabi = "5.1" @@ -15,13 +15,13 @@ ethabi-contract = "5.0" target_info = "0.1" semver = "0.9" ethcore = { path = "../ethcore" } -ethcore-bytes = { path = "../util/bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethcore-sync = { path = "../ethcore/sync" } ethereum-types = "0.3" parking_lot = "0.6" parity-hash-fetch = { path = "../hash-fetch" } parity-version = { path = "../util/version" } -path = { path = "../util/path" } +path = { git = "https://github.com/paritytech/parity-common" } rand = "0.4" [dev-dependencies] diff --git a/updater/src/lib.rs b/updater/src/lib.rs index 75447e8d5..c50e0fee6 100644 --- a/updater/src/lib.rs +++ b/updater/src/lib.rs @@ -20,7 +20,7 @@ extern crate ethabi; extern crate ethcore; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate ethcore_sync as sync; extern crate ethereum_types; extern crate keccak_hash as hash; diff --git a/util/bytes/Cargo.toml b/util/bytes/Cargo.toml deleted file mode 100644 index b20e38a2a..000000000 --- a/util/bytes/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "ethcore-bytes" -version = "0.1.0" -authors = ["Parity Technologies "] -description = "byte utilities for Parity" -license = "GPL-3.0" - -[dependencies] diff --git a/util/bytes/src/lib.rs b/util/bytes/src/lib.rs deleted file mode 100644 index 03b474559..000000000 --- a/util/bytes/src/lib.rs +++ /dev/null @@ -1,179 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! General bytes-related utilities. -//! -//! Includes a pretty-printer for bytes, in the form of `ToPretty` and `PrettySlice` -//! as - -use std::fmt; -use std::cmp::min; -use std::ops::{Deref, DerefMut}; - -/// Slice pretty print helper -pub struct PrettySlice<'a> (&'a [u8]); - -impl<'a> fmt::Debug for PrettySlice<'a> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for i in 0..self.0.len() { - match i > 0 { - true => { write!(f, "·{:02x}", self.0[i])?; }, - false => { write!(f, "{:02x}", self.0[i])?; }, - } - } - Ok(()) - } -} - -impl<'a> fmt::Display for PrettySlice<'a> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for i in 0..self.0.len() { - write!(f, "{:02x}", self.0[i])?; - } - Ok(()) - } -} - -/// Trait to allow a type to be pretty-printed in `format!`, where unoverridable -/// defaults cannot otherwise be avoided. -pub trait ToPretty { - /// Convert a type into a derivative form in order to make `format!` print it prettily. - fn pretty(&self) -> PrettySlice; - /// Express the object as a hex string. - fn to_hex(&self) -> String { - format!("{}", self.pretty()) - } -} - -impl> ToPretty for T { - fn pretty(&self) -> PrettySlice { - PrettySlice(self.as_ref()) - } -} - -/// A byte collection reference that can either be a slice or a vector -pub enum BytesRef<'a> { - /// This is a reference to a vector - Flexible(&'a mut Bytes), - /// This is a reference to a slice - Fixed(&'a mut [u8]) -} - -impl<'a> BytesRef<'a> { - /// Writes given `input` to this `BytesRef` starting at `offset`. - /// Returns number of bytes written to the ref. - /// NOTE can return number greater then `input.len()` in case flexible vector had to be extended. - pub fn write(&mut self, offset: usize, input: &[u8]) -> usize { - match *self { - BytesRef::Flexible(ref mut data) => { - let data_len = data.len(); - let wrote = input.len() + if data_len > offset { 0 } else { offset - data_len }; - - data.resize(offset, 0); - data.extend_from_slice(input); - wrote - }, - BytesRef::Fixed(ref mut data) if offset < data.len() => { - let max = min(data.len() - offset, input.len()); - for i in 0..max { - data[offset + i] = input[i]; - } - max - }, - _ => 0 - } - } -} - -impl<'a> Deref for BytesRef<'a> { - type Target = [u8]; - - fn deref(&self) -> &[u8] { - match *self { - BytesRef::Flexible(ref bytes) => bytes, - BytesRef::Fixed(ref bytes) => bytes, - } - } -} - -impl <'a> DerefMut for BytesRef<'a> { - fn deref_mut(&mut self) -> &mut [u8] { - match *self { - BytesRef::Flexible(ref mut bytes) => bytes, - BytesRef::Fixed(ref mut bytes) => bytes, - } - } -} - -/// Vector of bytes. -pub type Bytes = Vec; - -#[cfg(test)] -mod tests { - use super::BytesRef; - - #[test] - fn should_write_bytes_to_fixed_bytesref() { - // given - let mut data1 = vec![0, 0, 0]; - let mut data2 = vec![0, 0, 0]; - let (res1, res2) = { - let mut bytes1 = BytesRef::Fixed(&mut data1[..]); - let mut bytes2 = BytesRef::Fixed(&mut data2[1..2]); - - // when - let res1 = bytes1.write(1, &[1, 1, 1]); - let res2 = bytes2.write(3, &[1, 1, 1]); - (res1, res2) - }; - - // then - assert_eq!(&data1, &[0, 1, 1]); - assert_eq!(res1, 2); - - assert_eq!(&data2, &[0, 0, 0]); - assert_eq!(res2, 0); - } - - #[test] - fn should_write_bytes_to_flexible_bytesref() { - // given - let mut data1 = vec![0, 0, 0]; - let mut data2 = vec![0, 0, 0]; - let mut data3 = vec![0, 0, 0]; - let (res1, res2, res3) = { - let mut bytes1 = BytesRef::Flexible(&mut data1); - let mut bytes2 = BytesRef::Flexible(&mut data2); - let mut bytes3 = BytesRef::Flexible(&mut data3); - - // when - let res1 = bytes1.write(1, &[1, 1, 1]); - let res2 = bytes2.write(3, &[1, 1, 1]); - let res3 = bytes3.write(5, &[1, 1, 1]); - (res1, res2, res3) - }; - - // then - assert_eq!(&data1, &[0, 1, 1, 1]); - assert_eq!(res1, 3); - - assert_eq!(&data2, &[0, 0, 0, 1, 1, 1]); - assert_eq!(res2, 3); - - assert_eq!(&data3, &[0, 0, 0, 0, 0, 1, 1, 1]); - assert_eq!(res3, 5); - } -} diff --git a/util/hash/Cargo.toml b/util/hash/Cargo.toml deleted file mode 100644 index 4ca503751..000000000 --- a/util/hash/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -description = "Rust bindings for tinykeccak C library" -homepage = "https://github.com/paritytech/keccak-hash" -readme = "README.md" -license = "GPL-3.0" -name = "keccak-hash" -version = "0.1.2" -authors = ["Parity Technologies "] - -[dependencies] -ethereum-types = "0.3" -tiny-keccak = "1.4" - -[dev-dependencies] -tempdir = "0.3" diff --git a/util/hash/benches/keccak_256.rs b/util/hash/benches/keccak_256.rs deleted file mode 100644 index d59e53410..000000000 --- a/util/hash/benches/keccak_256.rs +++ /dev/null @@ -1,52 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -#![feature(test)] - -extern crate test; -extern crate ethereum_types; -extern crate keccak_hash; - -use keccak_hash::{keccak, write_keccak}; -use test::Bencher; - -#[bench] -fn bench_keccak_256_with_empty_input(b: &mut Bencher) { - let empty = [0u8;0]; - b.bytes = empty.len() as u64; - b.iter(|| { - let _out = keccak(empty); - }) -} - -#[bench] -fn bench_keccak_256_with_typical_input(b: &mut Bencher) { - let data: Vec = From::from("some medum length string with important information"); - b.bytes = data.len() as u64; - b.iter(|| { - let _out = keccak(&data); - }) -} - -#[bench] -fn bench_keccak_256_with_large_input(b: &mut Bencher) { - // 4096 chars - let data: Vec = From::from("IGxcKBr1Qp7tuqtpSVhAbvt7UgWLEi7mCA6Wa185seLSIJLFS8K1aAFO9AwtO9b3n9SM3Qg136JMmy9Mj9gZ84IaUm8XioPtloabFDU5ZR1wvauJT6jNTkvBVBpUigIsyU7C1u3s99vKP64LpXqvo1hwItZKtISxmUAgzzjv5q14V4G9bkKAnmc4M5xixgLsDGZmnj6HcOMY3XRkWtxN3RscSKwPA0bfpgtz27ZVHplbXwloYRgRLpjRhZJc7sqO8RFnTHKasVkxVRcUoDBvWNJK27TbLvQQcfxETI2Q1H6c2cBAchi8unSiuxqy5rIvVxcl9rsmmRY4IXLEG9qKntUGbiIRLjEffIP9ODoWog0GbWLmMtfvtf24hWVwXz6Ap5oUAR0kLgb7HYIYrOwKjvfV25iEF7GW8cjhl8yowXx1zcgW4t6NJNqJlGzRKx8MvRWQXvHz8h8JxcHl7S64i6PAkxI9eCLXLvs8cpbEQQHt05Zu6GKm6IInjc9mSh52WFuGhgjbno69XzfkBufJs6c9tZuBf6ErVPj4UxmT82ajCruDusk79Tlvb8oQMLjoplQc1alQaLQwSsMac9iVp9MiE3PeYnTTepJ1V10tp79fciDAnNPJgPcRfDYv0REcSFgR9Q7yWhbpPpyBjO7HwOykDQVGtV0ZbDFrFRygLAXagAIkOPc9HDfcBNID1Q2MGk8ijVWMyvmGz1wzbpNfFcQaSOm8olhwoLyHUGvkyXegh44iNsPBUvSicNxTTDowtMqO5azleuWEjzxCobYbASDopvl6JeJjRtEBBO5YCQJiHsYjlXh9QR5Q543GsqhzRLgcHNRSZYLMZqDmIABXZi8VRNJMZyWXDRKHOGDmcHWe55uZomW6FnyU0uSRKxxz66K0JWfxuFzzxAR0vR4ZZCTemgDRQuDwL1loC3KUMjDpU13jUgoPc4UJUVfwQ4f4BUY3X51Cfw9FLw4oX39KoFoiCP2Z6z27gZUY1IlE59WoXGLj4KjTp4C16ZihG080gfDIWlXnDEk3VwBuBFyKWARB63sGLrGnn27b1gHWMaop6sPvkQgWxkEKIqsxDIvXLZJg2s23V8Gqtt0FeA7R3RCvBysF4jNjQ7NiQTIQWQZ8G9gO4mEsftolSZv6FlSpNeBKIIwYWSO2R6vkgeiz06euE9bwwnenOjwPNGTGk8WHIOZBJ1hIP0ejVU2i2ca9ON0phSAnewqjo5W3PtZf2Q7mDvp9imuVWoy4t8XcZq8I2Un9jVjes9Xi0FLN2t71vLFWLWZmGDzwXxpqEgkARS1WjtJoYXCBmRnXEPj6jQfwMZWKPYSIrmOogxMVoWvA8wrof6utfJna9JezyTnrBJSCuGTSNmwwAXRLoFYxF1RITyN8mI2KmHSfvLXBrbE6kmAkjsm4XJb6kria7oUQQ1gzJuCyB7oNHjZTBFNhNa7VeQ1s1xLOwZXLOAjZ4MDTYKnF7giGJGyswb5KQxkOV9orbuAu6pJsjtql6h1UD3BcNUkG3oz8kJNepbuCN3vNCJcZOX1VrQi0PWkDwyvECrQ2E1CgbU6GpWatpg2sCTpo9W62pCcWBK2FKUFWqU3qo2T7T1Mk2ZtM6hE9I8op0M7xlGE91Mn7ea6aq93MWp7nvFlBvbaMIoeU4MpDx0BeOSkROY03ZBJ0x7K8nJrNUhAtvxp17c9oFk0VxLiuRbAAcwDUormOmpVXZNIcqnap4twEVYaSIowfcNojyUSrFL5nPc8ZG93WgNNl9rpUPZhssVml3DvXghI80A9SW3QauzohTQAX2bkWelFBHnuG2LKrsJ8en51N6CkjcS5b87y1DVMZELcZ1n5s8PCAA1wyn7OSZlgw00GRzch1YwMoHzBBgIUtMO9HrMyuhgqIPJP7KcKbQkKhtvBXKplX8SCfSlOwUkLwHNKm3HYVE0uVfJ91NAsUrGoCOjYiXYpoRT8bjAPWTm6fDlTq2sbPOyTMoc4xRasmiOJ7B0PT6UxPzCPImM4100sPFxp7Kofv4okKZWTPKTefeYiPefI3jRgfDtEIP9E6a35LZD75lBNMXYlAqL3qlnheUQD1WQimFTHiDsW6bmURptNvtkMjEXzXzpWbnyxBskUGTvP2YQjtSAhWliDXkv6t1x71cYav7TQbqvbIzMRQQsguSGYMbs8YIC4DC9ep5reWAfanlTxcxksbEhQ7FGzXOvcufeGnDl2C85gWfryVzwN7kOZiSEktFMOQ1ngRC23y1fCOiHQVQJ2nLnaW7GILb9wkN1mBTRuHsOefRJST0TnRxcn4bBq4MIibIitVyjPRy7G5XvPEcL4pFaW1HCPGm6pUOEEwTer32JObNGCyTFB1BI2cRLJu5BHPjgG3mmb0gGkGlIfh8D2b2amogpivqEn2r9Y1KOKQ8ufJvG2mYfkevco9DuEZ9Nmzkm6XkCTZaFMNHqbfQaKqsEYK7i2N1KfkBct1leW2H9MQ9QO7AHCqXHK47b1kWVIm6pSJA1yV4funzCqXnIJCEURQgHiKf38YpN7ylLhe1J4UvSG3KeesZNeFFIZOEP9HZUSFMpnN1MOrwejojK0D4qzwucYWtXrTQ8I7UP5QhlijIsCKckUa9C1Osjrq8cgSclYNGt19wpy0onUbX1rOQBUlAAUJs4CyXNU0wmVUjw7tG1LUC8my4s9KZDUj4R5UcPz3VaZRrx1RqYu6YxjroJW70I1LyG4WEiQbOkCoLmaiWo9WzbUS2cErlOo2RPymlkWHxbNnZawX2Bc872ivRHSWqNpRHyuR5QewXmcyghH3EhESBAxTel5E2xuQXfLCEVK0kEk0Mj22KPsckKKyH7sVYC1F4YItQh5hj9Titb7KflQb9vnXQ44UHxY3zBhTQT5PSYv1Kv8HxXCsnpmhZCiBru16iX9oEB33icBVB2KKcZZEEKnCGPVxJlM9RTlyNyQmjHf7z4GeTDuMAUrsMO31WvgZBnWcAOtn6ulBTUCAaqxJiWqzlMx2FSANAlyAjAxqzmQjzPLvQRjskUnBFN3woKB1m2bSo2c5thwA1fKiPvN5LW8tl1rnfNy3rJ0GJpK8nZjkzHMztYrKYAe56pX4SvplpTyibTIiRXLyEVsmuByTHCZhO3fvGoFsav3ZuRhe9eAAWeqAh13eKDTcA0ufME3ZnmJheXEZ3OwrxnFjSf3U0clkWYVont3neh77ODKHhYnX0bOmnJJlr4RqFoLBitskY0kcGMKcZlaej21SENjDcFgaka3CfHbAH5vIFqnoX1JZrZPkQ65PZqQWImP79U3gXWKvz96lElyJZAFqn0Mbltllqw4MhlI766AvHraOmMsJoNvjv1QR7pCSnC0iX6nbqW1eVPaUSZDuZRtRIxfLA8HC9VbxufT2KZV3qG0l7wrZna5Di2MNcBE9uthuVLZcqp8vCmEhINDhRRlipR7tC2iRBHecS5WtxBCpbEm1y1kgNG5o60UKgAswxxuJ3RQ9Y49mPIApBMmp4LFpuKRfcrZb4UJnCfR3pNbQ70nnZ6Be2M7tuJUCoFfHrhqHXNz5A0uWMgxUS50c60zLl6QAELxHaCGba4WCMOHIo5nSKcUuYtDyDoDlrezALW5mZR4PRPRxnjrXxbJI14qrpymRReC3QgFDJp6sT5TLwvSHaavPlEbt2Eu0Kh5SXklGHXP9YuF3glGuJzSob3NakW1RXF5786U1MHhtJby64LyGWvNn4QXie3VjeL3QQu4C9crEAxSSiOJOfnL3DYIVOY4ipUkKFlF7Rp2q6gZazDvcUCp1cbcr7T7B4s22rXzjN7mHYWOyWuZGwlImeorY3aVKi7BaXbhgOFw6BUmIc1HeGFELHIEnPE9MwOjZam3LOm0rhBHlvJJZkXvJKmDUJrGlyqC5GtC5lDWLfXewyDWDqq7PY0atVQily5GWqib6wub6u6LZ3HZDNP8gK64Nf4kC259AE4V2hCohDnSsXAIoOkehwXyp6CkDT42NJb6sXHUv2N6cm292MiKA22PKWrwUGsan599KI2V67YRDfcfiB4ZHRDiSe62MBE0fGLIgXLIWw1xTWYbPQ9YAj3xovBvmewbJ1De4k6uS"); - b.bytes = data.len() as u64; - b.iter(|| { - let _out = keccak(&data); - }) -} diff --git a/util/hash/src/lib.rs b/util/hash/src/lib.rs deleted file mode 100644 index c54d7233c..000000000 --- a/util/hash/src/lib.rs +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -extern crate ethereum_types; -extern crate tiny_keccak; - -use std::io; -use std::slice; -use tiny_keccak::Keccak; - -pub use ethereum_types::H256; - -/// Get the KECCAK (i.e. Keccak) hash of the empty bytes string. -pub const KECCAK_EMPTY: H256 = H256( [0xc5, 0xd2, 0x46, 0x01, 0x86, 0xf7, 0x23, 0x3c, 0x92, 0x7e, 0x7d, 0xb2, 0xdc, 0xc7, 0x03, 0xc0, 0xe5, 0x00, 0xb6, 0x53, 0xca, 0x82, 0x27, 0x3b, 0x7b, 0xfa, 0xd8, 0x04, 0x5d, 0x85, 0xa4, 0x70] ); - -/// The KECCAK of the RLP encoding of empty data. -pub const KECCAK_NULL_RLP: H256 = H256( [0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21] ); - -/// The KECCAK of the RLP encoding of empty list. -pub const KECCAK_EMPTY_LIST_RLP: H256 = H256( [0x1d, 0xcc, 0x4d, 0xe8, 0xde, 0xc7, 0x5d, 0x7a, 0xab, 0x85, 0xb5, 0x67, 0xb6, 0xcc, 0xd4, 0x1a, 0xd3, 0x12, 0x45, 0x1b, 0x94, 0x8a, 0x74, 0x13, 0xf0, 0xa1, 0x42, 0xfd, 0x40, 0xd4, 0x93, 0x47] ); - -pub fn keccak>(s: T) -> H256 { - let mut result = [0u8; 32]; - write_keccak(s, &mut result); - H256(result) -} - -pub unsafe fn keccak_256_unchecked(out: *mut u8, outlen: usize, input: *const u8, inputlen: usize) { - // This is safe since `keccak_*` uses an internal buffer and copies the result to the output. This - // means that we can reuse the input buffer for both input and output. - Keccak::keccak256( - slice::from_raw_parts(input, inputlen), - slice::from_raw_parts_mut(out, outlen) - ); -} - -pub unsafe fn keccak_512_unchecked(out: *mut u8, outlen: usize, input: *const u8, inputlen: usize) { - // This is safe since `keccak_*` uses an internal buffer and copies the result to the output. This - // means that we can reuse the input buffer for both input and output. - Keccak::keccak512( - slice::from_raw_parts(input, inputlen), - slice::from_raw_parts_mut(out, outlen) - ); -} - -pub fn keccak_256(input: &[u8], mut output: &mut [u8]) { Keccak::keccak256(input, &mut output); } - -pub fn keccak_512(input: &[u8], mut output: &mut [u8]) { Keccak::keccak512(input, &mut output); } - -pub fn write_keccak>(s: T, dest: &mut [u8]) { Keccak::keccak256(s.as_ref(), dest); } - -pub fn keccak_pipe(r: &mut io::BufRead, w: &mut io::Write) -> Result { - let mut output = [0u8; 32]; - let mut input = [0u8; 1024]; - let mut keccak = Keccak::new_keccak256(); - - // read file - loop { - let some = r.read(&mut input)?; - if some == 0 { - break; - } - keccak.update(&input[0..some]); - w.write_all(&input[0..some])?; - } - - keccak.finalize(&mut output); - Ok(output.into()) -} - -pub fn keccak_buffer(r: &mut io::BufRead) -> Result { - keccak_pipe(r, &mut io::sink()) -} - -#[cfg(test)] -mod tests { - extern crate tempdir; - - use std::fs; - use std::io::{Write, BufReader}; - use self::tempdir::TempDir; - use super::{keccak, write_keccak, keccak_buffer, KECCAK_EMPTY}; - - #[test] - fn keccak_empty() { - assert_eq!(keccak([0u8; 0]), KECCAK_EMPTY); - } - - #[test] - fn keccak_as() { - assert_eq!(keccak([0x41u8; 32]), From::from("59cad5948673622c1d64e2322488bf01619f7ff45789741b15a9f782ce9290a8")); - } - - #[test] - fn write_keccak_with_content() { - let data: Vec = From::from("hello world"); - let expected = vec![ - 0x47, 0x17, 0x32, 0x85, 0xa8, 0xd7, 0x34, 0x1e, - 0x5e, 0x97, 0x2f, 0xc6, 0x77, 0x28, 0x63, 0x84, - 0xf8, 0x02, 0xf8, 0xef, 0x42, 0xa5, 0xec, 0x5f, - 0x03, 0xbb, 0xfa, 0x25, 0x4c, 0xb0, 0x1f, 0xad - ]; - let mut dest = [0u8;32]; - write_keccak(data, &mut dest); - - assert_eq!(dest, expected.as_ref()); - } - - #[test] - fn should_keccak_a_file() { - // given - let tempdir = TempDir::new("keccak").unwrap(); - let mut path = tempdir.path().to_owned(); - path.push("should_keccak_a_file"); - // Prepare file - { - let mut file = fs::File::create(&path).unwrap(); - file.write_all(b"something").unwrap(); - } - - let mut file = BufReader::new(fs::File::open(&path).unwrap()); - // when - let hash = keccak_buffer(&mut file).unwrap(); - - // then - assert_eq!(format!("{:x}", hash), "68371d7e884c168ae2022c82bd837d51837718a7f7dfb7aa3f753074a35e1d87"); - } -} diff --git a/util/hashdb/Cargo.toml b/util/hashdb/Cargo.toml deleted file mode 100644 index f5e63fb1b..000000000 --- a/util/hashdb/Cargo.toml +++ /dev/null @@ -1,10 +0,0 @@ -[package] -name = "hashdb" -version = "0.2.0" -authors = ["Parity Technologies "] -description = "trait for hash-keyed databases." -license = "GPL-3.0" - -[dependencies] -elastic-array = "0.10" -heapsize = "0.4" \ No newline at end of file diff --git a/util/hashdb/src/lib.rs b/util/hashdb/src/lib.rs deleted file mode 100644 index 4f5bed48c..000000000 --- a/util/hashdb/src/lib.rs +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Database of byte-slices keyed to their hash. -extern crate elastic_array; -extern crate heapsize; - -use elastic_array::ElasticArray128; -use heapsize::HeapSizeOf; -use std::collections::HashMap; -use std::{fmt::Debug, hash::Hash}; - -/// Trait describing an object that can hash a slice of bytes. Used to abstract -/// other types over the hashing algorithm. Defines a single `hash` method and an -/// `Out` associated type with the necessary bounds. -pub trait Hasher: Sync + Send { - /// The output type of the `Hasher` - type Out: AsRef<[u8]> + AsMut<[u8]> + Default + HeapSizeOf + Debug + PartialEq + Eq + Hash + Send + Sync + Clone + Copy; - /// What to use to build `HashMap`s with this `Hasher` - type StdHasher: Sync + Send + Default + std::hash::Hasher; - /// The length in bytes of the `Hasher` output - const LENGTH: usize; - - /// Compute the hash of the provided slice of bytes returning the `Out` type of the `Hasher` - fn hash(x: &[u8]) -> Self::Out; -} - -/// `HashDB` value type. -pub type DBValue = ElasticArray128; - -/// Trait modelling datastore keyed by a hash defined by the `Hasher`. -pub trait HashDB: Send + Sync + AsHashDB { - /// Get the keys in the database together with number of underlying references. - fn keys(&self) -> HashMap; - - /// Look up a given hash into the bytes that hash to it, returning None if the - /// hash is not known. - fn get(&self, key: &H::Out) -> Option; - - /// Check for the existance of a hash-key. - fn contains(&self, key: &H::Out) -> bool; - - /// Insert a datum item into the DB and return the datum's hash for a later lookup. Insertions - /// are counted and the equivalent number of `remove()`s must be performed before the data - /// is considered dead. - fn insert(&mut self, value: &[u8]) -> H::Out; - - /// Like `insert()`, except you provide the key and the data is all moved. - fn emplace(&mut self, key: H::Out, value: DBValue); - - /// Remove a datum previously inserted. Insertions can be "owed" such that the same number of `insert()`s may - /// happen without the data being eventually being inserted into the DB. It can be "owed" more than once. - fn remove(&mut self, key: &H::Out); -} - -/// Upcast trait. -pub trait AsHashDB { - /// Perform upcast to HashDB for anything that derives from HashDB. - fn as_hashdb(&self) -> &HashDB; - /// Perform mutable upcast to HashDB for anything that derives from HashDB. - fn as_hashdb_mut(&mut self) -> &mut HashDB; -} - -// NOTE: There used to be a `impl AsHashDB for T` but that does not work with generics. See https://stackoverflow.com/questions/48432842/implementing-a-trait-for-reference-and-non-reference-types-causes-conflicting-im -// This means we need concrete impls of AsHashDB in several places, which somewhat defeats the point of the trait. -impl<'a, H: Hasher> AsHashDB for &'a mut HashDB { - fn as_hashdb(&self) -> &HashDB { &**self } - fn as_hashdb_mut(&mut self) -> &mut HashDB { &mut **self } -} - diff --git a/util/journaldb/Cargo.toml b/util/journaldb/Cargo.toml index bd6c83415..27b0ae195 100644 --- a/util/journaldb/Cargo.toml +++ b/util/journaldb/Cargo.toml @@ -6,19 +6,19 @@ description = "A `HashDB` which can manage a short-term journal potentially cont license = "GPL3" [dependencies] -ethcore-bytes = { path = "../bytes" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" -hashdb = { version = "0.2.0", path = "../hashdb" } +hashdb = { git = "https://github.com/paritytech/parity-common" } heapsize = "0.4" keccak-hasher = { path = "../keccak-hasher" } -kvdb = { path = "../kvdb" } +kvdb = { git = "https://github.com/paritytech/parity-common" } log = "0.3" -memorydb = { version = "0.2.0", path = "../memorydb" } +memorydb = { git = "https://github.com/paritytech/parity-common" } parking_lot = "0.6" -plain_hasher = { path = "../plain_hasher" } -rlp = { path = "../rlp" } +plain_hasher = { git = "https://github.com/paritytech/parity-common" } +rlp = { git = "https://github.com/paritytech/parity-common" } [dev-dependencies] ethcore-logger = { path = "../../logger" } -keccak-hash = { path = "../hash" } -kvdb-memorydb = { path = "../kvdb-memorydb" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } +kvdb-memorydb = { git = "https://github.com/paritytech/parity-common" } diff --git a/util/journaldb/src/lib.rs b/util/journaldb/src/lib.rs index 5b2381f2d..b14ef88e9 100644 --- a/util/journaldb/src/lib.rs +++ b/util/journaldb/src/lib.rs @@ -21,7 +21,7 @@ extern crate heapsize; extern crate log; extern crate ethereum_types; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate hashdb; extern crate keccak_hasher; extern crate kvdb; diff --git a/util/keccak-hasher/Cargo.toml b/util/keccak-hasher/Cargo.toml index e7c8e950e..0367b1767 100644 --- a/util/keccak-hasher/Cargo.toml +++ b/util/keccak-hasher/Cargo.toml @@ -8,5 +8,5 @@ license = "GPL-3.0" [dependencies] ethereum-types = "0.3" tiny-keccak = "1.4.2" -hashdb = { path = "../hashdb" } -plain_hasher = { path = "../plain_hasher" } \ No newline at end of file +hashdb = { git = "https://github.com/paritytech/parity-common" } +plain_hasher = { git = "https://github.com/paritytech/parity-common" } \ No newline at end of file diff --git a/util/kvdb-memorydb/Cargo.toml b/util/kvdb-memorydb/Cargo.toml deleted file mode 100644 index a70b1e3da..000000000 --- a/util/kvdb-memorydb/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "kvdb-memorydb" -version = "0.1.0" -authors = ["Parity Technologies "] - -[dependencies] -parking_lot = "0.6" -kvdb = { path = "../kvdb" } diff --git a/util/kvdb-memorydb/src/lib.rs b/util/kvdb-memorydb/src/lib.rs deleted file mode 100644 index 7a4590632..000000000 --- a/util/kvdb-memorydb/src/lib.rs +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -extern crate parking_lot; -extern crate kvdb; - -use std::collections::{BTreeMap, HashMap}; -use std::io; -use parking_lot::RwLock; -use kvdb::{DBValue, DBTransaction, KeyValueDB, DBOp}; - -/// A key-value database fulfilling the `KeyValueDB` trait, living in memory. -/// This is generally intended for tests and is not particularly optimized. -#[derive(Default)] -pub struct InMemory { - columns: RwLock, BTreeMap, DBValue>>>, -} - -/// Create an in-memory database with the given number of columns. -/// Columns will be indexable by 0..`num_cols` -pub fn create(num_cols: u32) -> InMemory { - let mut cols = HashMap::new(); - cols.insert(None, BTreeMap::new()); - - for idx in 0..num_cols { - cols.insert(Some(idx), BTreeMap::new()); - } - - InMemory { - columns: RwLock::new(cols) - } -} - -impl KeyValueDB for InMemory { - fn get(&self, col: Option, key: &[u8]) -> io::Result> { - let columns = self.columns.read(); - match columns.get(&col) { - None => Err(io::Error::new(io::ErrorKind::Other, format!("No such column family: {:?}", col))), - Some(map) => Ok(map.get(key).cloned()), - } - } - - fn get_by_prefix(&self, col: Option, prefix: &[u8]) -> Option> { - let columns = self.columns.read(); - match columns.get(&col) { - None => None, - Some(map) => - map.iter() - .find(|&(ref k ,_)| k.starts_with(prefix)) - .map(|(_, v)| v.to_vec().into_boxed_slice()) - } - } - - fn write_buffered(&self, transaction: DBTransaction) { - let mut columns = self.columns.write(); - let ops = transaction.ops; - for op in ops { - match op { - DBOp::Insert { col, key, value } => { - if let Some(col) = columns.get_mut(&col) { - col.insert(key.into_vec(), value); - } - }, - DBOp::Delete { col, key } => { - if let Some(col) = columns.get_mut(&col) { - col.remove(&*key); - } - }, - } - } - } - - fn flush(&self) -> io::Result<()> { - Ok(()) - } - - fn iter<'a>(&'a self, col: Option) -> Box, Box<[u8]>)> + 'a> { - match self.columns.read().get(&col) { - Some(map) => Box::new( // TODO: worth optimizing at all? - map.clone() - .into_iter() - .map(|(k, v)| (k.into_boxed_slice(), v.into_vec().into_boxed_slice())) - ), - None => Box::new(None.into_iter()), - } - } - - fn iter_from_prefix<'a>(&'a self, col: Option, prefix: &'a [u8]) - -> Box, Box<[u8]>)> + 'a> - { - match self.columns.read().get(&col) { - Some(map) => Box::new( - map.clone() - .into_iter() - .skip_while(move |&(ref k, _)| !k.starts_with(prefix)) - .map(|(k, v)| (k.into_boxed_slice(), v.into_vec().into_boxed_slice())) - ), - None => Box::new(None.into_iter()), - } - } - - fn restore(&self, _new_db: &str) -> io::Result<()> { - Err(io::Error::new(io::ErrorKind::Other, "Attempted to restore in-memory database")) - } -} diff --git a/util/kvdb-rocksdb/Cargo.toml b/util/kvdb-rocksdb/Cargo.toml deleted file mode 100644 index c580502f5..000000000 --- a/util/kvdb-rocksdb/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "kvdb-rocksdb" -version = "0.1.0" -authors = ["Parity Technologies "] - -[dependencies] -elastic-array = "0.10" -ethereum-types = "0.3" -fs-swap = "0.2.1" -interleaved-ordered = "0.1.0" -kvdb = { path = "../kvdb" } -log = "0.3" -num_cpus = "1.0" -parking_lot = "0.6" -regex = "0.2" -rocksdb = { git = "https://github.com/paritytech/rust-rocksdb" } - -[dev-dependencies] -tempdir = "0.3" diff --git a/util/kvdb-rocksdb/src/lib.rs b/util/kvdb-rocksdb/src/lib.rs deleted file mode 100644 index 99e7c0564..000000000 --- a/util/kvdb-rocksdb/src/lib.rs +++ /dev/null @@ -1,864 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -#[macro_use] -extern crate log; - -extern crate elastic_array; -extern crate fs_swap; -extern crate interleaved_ordered; -extern crate num_cpus; -extern crate parking_lot; -extern crate regex; -extern crate rocksdb; - -extern crate ethereum_types; -extern crate kvdb; - -use std::collections::HashMap; -use std::marker::PhantomData; -use std::{cmp, fs, io, mem, result, error}; -use std::path::Path; - -use parking_lot::{Mutex, MutexGuard, RwLock}; -use rocksdb::{ - DB, Writable, WriteBatch, WriteOptions, IteratorMode, DBIterator, - Options, BlockBasedOptions, Direction, Cache, Column, ReadOptions -}; -use interleaved_ordered::{interleave_ordered, InterleaveOrdered}; - -use elastic_array::ElasticArray32; -use fs_swap::{swap, swap_nonatomic}; -use kvdb::{KeyValueDB, DBTransaction, DBValue, DBOp}; - -#[cfg(target_os = "linux")] -use regex::Regex; -#[cfg(target_os = "linux")] -use std::process::Command; -#[cfg(target_os = "linux")] -use std::fs::File; -#[cfg(target_os = "linux")] -use std::path::PathBuf; - -fn other_io_err(e: E) -> io::Error where E: Into> { - io::Error::new(io::ErrorKind::Other, e) -} - -const DB_DEFAULT_MEMORY_BUDGET_MB: usize = 128; - -enum KeyState { - Insert(DBValue), - Delete, -} - -/// Compaction profile for the database settings -#[derive(Clone, Copy, PartialEq, Debug)] -pub struct CompactionProfile { - /// L0-L1 target file size - pub initial_file_size: u64, - /// block size - pub block_size: usize, - /// rate limiter for background flushes and compactions, bytes/sec, if any - pub write_rate_limit: Option, -} - -impl Default for CompactionProfile { - /// Default profile suitable for most storage - fn default() -> CompactionProfile { - CompactionProfile::ssd() - } -} - -/// Given output of df command return Linux rotational flag file path. -#[cfg(target_os = "linux")] -pub fn rotational_from_df_output(df_out: Vec) -> Option { - use std::str; - str::from_utf8(df_out.as_slice()) - .ok() - // Get the drive name. - .and_then(|df_str| Regex::new(r"/dev/(sd[:alpha:]{1,2})") - .ok() - .and_then(|re| re.captures(df_str)) - .and_then(|captures| captures.get(1))) - // Generate path e.g. /sys/block/sda/queue/rotational - .map(|drive_path| { - let mut p = PathBuf::from("/sys/block"); - p.push(drive_path.as_str()); - p.push("queue/rotational"); - p - }) -} - -impl CompactionProfile { - /// Attempt to determine the best profile automatically, only Linux for now. - #[cfg(target_os = "linux")] - pub fn auto(db_path: &Path) -> CompactionProfile { - use std::io::Read; - let hdd_check_file = db_path - .to_str() - .and_then(|path_str| Command::new("df").arg(path_str).output().ok()) - .and_then(|df_res| match df_res.status.success() { - true => Some(df_res.stdout), - false => None, - }) - .and_then(rotational_from_df_output); - // Read out the file and match compaction profile. - if let Some(hdd_check) = hdd_check_file { - if let Ok(mut file) = File::open(hdd_check.as_path()) { - let mut buffer = [0; 1]; - if file.read_exact(&mut buffer).is_ok() { - // 0 means not rotational. - if buffer == [48] { return Self::ssd(); } - // 1 means rotational. - if buffer == [49] { return Self::hdd(); } - } - } - } - // Fallback if drive type was not determined. - Self::default() - } - - /// Just default for other platforms. - #[cfg(not(target_os = "linux"))] - pub fn auto(_db_path: &Path) -> CompactionProfile { - Self::default() - } - - /// Default profile suitable for SSD storage - pub fn ssd() -> CompactionProfile { - CompactionProfile { - initial_file_size: 64 * 1024 * 1024, - block_size: 16 * 1024, - write_rate_limit: None, - } - } - - /// Slow HDD compaction profile - pub fn hdd() -> CompactionProfile { - CompactionProfile { - initial_file_size: 256 * 1024 * 1024, - block_size: 64 * 1024, - write_rate_limit: Some(16 * 1024 * 1024), - } - } -} - -/// Database configuration -#[derive(Clone)] -pub struct DatabaseConfig { - /// Max number of open files. - pub max_open_files: i32, - /// Memory budget (in MiB) used for setting block cache size, write buffer size. - pub memory_budget: Option, - /// Compaction profile - pub compaction: CompactionProfile, - /// Set number of columns - pub columns: Option, - /// Should we keep WAL enabled? - pub wal: bool, -} - -impl DatabaseConfig { - /// Create new `DatabaseConfig` with default parameters and specified set of columns. - /// Note that cache sizes must be explicitly set. - pub fn with_columns(columns: Option) -> Self { - let mut config = Self::default(); - config.columns = columns; - config - } - - pub fn memory_budget(&self) -> usize { - self.memory_budget.unwrap_or(DB_DEFAULT_MEMORY_BUDGET_MB) * 1024 * 1024 - } - - pub fn memory_budget_per_col(&self) -> usize { - self.memory_budget() / self.columns.unwrap_or(1) as usize - } -} - -impl Default for DatabaseConfig { - fn default() -> DatabaseConfig { - DatabaseConfig { - max_open_files: 512, - memory_budget: None, - compaction: CompactionProfile::default(), - columns: None, - wal: true, - } - } -} - -/// Database iterator (for flushed data only) -// The compromise of holding only a virtual borrow vs. holding a lock on the -// inner DB (to prevent closing via restoration) may be re-evaluated in the future. -// -pub struct DatabaseIterator<'a> { - iter: InterleaveOrdered<::std::vec::IntoIter<(Box<[u8]>, Box<[u8]>)>, DBIterator>, - _marker: PhantomData<&'a Database>, -} - -impl<'a> Iterator for DatabaseIterator<'a> { - type Item = (Box<[u8]>, Box<[u8]>); - - fn next(&mut self) -> Option { - self.iter.next() - } -} - -struct DBAndColumns { - db: DB, - cfs: Vec, -} - -// get column family configuration from database config. -fn col_config(config: &DatabaseConfig, block_opts: &BlockBasedOptions) -> io::Result { - let mut opts = Options::new(); - - opts.set_parsed_options("level_compaction_dynamic_level_bytes=true").map_err(other_io_err)?; - - opts.set_block_based_table_factory(block_opts); - - opts.set_parsed_options( - &format!("block_based_table_factory={{{};{}}}", - "cache_index_and_filter_blocks=true", - "pin_l0_filter_and_index_blocks_in_cache=true")).map_err(other_io_err)?; - - opts.optimize_level_style_compaction(config.memory_budget_per_col() as i32); - opts.set_target_file_size_base(config.compaction.initial_file_size); - - opts.set_parsed_options("compression_per_level=").map_err(other_io_err)?; - - Ok(opts) -} - -/// Key-Value database. -pub struct Database { - db: RwLock>, - config: DatabaseConfig, - write_opts: WriteOptions, - read_opts: ReadOptions, - block_opts: BlockBasedOptions, - path: String, - // Dirty values added with `write_buffered`. Cleaned on `flush`. - overlay: RwLock, KeyState>>>, - // Values currently being flushed. Cleared when `flush` completes. - flushing: RwLock, KeyState>>>, - // Prevents concurrent flushes. - // Value indicates if a flush is in progress. - flushing_lock: Mutex, -} - -#[inline] -fn check_for_corruption>(path: P, res: result::Result) -> io::Result { - if let Err(ref s) = res { - if s.starts_with("Corruption:") { - warn!("DB corrupted: {}. Repair will be triggered on next restart", s); - let _ = fs::File::create(path.as_ref().join(Database::CORRUPTION_FILE_NAME)); - } - } - - res.map_err(other_io_err) -} - -fn is_corrupted(s: &str) -> bool { - s.starts_with("Corruption:") || s.starts_with("Invalid argument: You have to open all column families") -} - -impl Database { - const CORRUPTION_FILE_NAME: &'static str = "CORRUPTED"; - - /// Open database with default settings. - pub fn open_default(path: &str) -> io::Result { - Database::open(&DatabaseConfig::default(), path) - } - - /// Open database file. Creates if it does not exist. - pub fn open(config: &DatabaseConfig, path: &str) -> io::Result { - let mut opts = Options::new(); - - if let Some(rate_limit) = config.compaction.write_rate_limit { - opts.set_parsed_options(&format!("rate_limiter_bytes_per_sec={}", rate_limit)).map_err(other_io_err)?; - } - opts.set_use_fsync(false); - opts.create_if_missing(true); - opts.set_max_open_files(config.max_open_files); - opts.set_parsed_options("keep_log_file_num=1").map_err(other_io_err)?; - opts.set_parsed_options("bytes_per_sync=1048576").map_err(other_io_err)?; - opts.set_db_write_buffer_size(config.memory_budget_per_col() / 2); - opts.increase_parallelism(cmp::max(1, ::num_cpus::get() as i32 / 2)); - - let mut block_opts = BlockBasedOptions::new(); - - { - block_opts.set_block_size(config.compaction.block_size); - let cache_size = cmp::max(8, config.memory_budget() / 3); - let cache = Cache::new(cache_size); - block_opts.set_cache(cache); - } - - // attempt database repair if it has been previously marked as corrupted - let db_corrupted = Path::new(path).join(Database::CORRUPTION_FILE_NAME); - if db_corrupted.exists() { - warn!("DB has been previously marked as corrupted, attempting repair"); - DB::repair(&opts, path).map_err(other_io_err)?; - fs::remove_file(db_corrupted)?; - } - - let columns = config.columns.unwrap_or(0) as usize; - - let mut cf_options = Vec::with_capacity(columns); - let cfnames: Vec<_> = (0..columns).map(|c| format!("col{}", c)).collect(); - let cfnames: Vec<&str> = cfnames.iter().map(|n| n as &str).collect(); - - for _ in 0 .. config.columns.unwrap_or(0) { - cf_options.push(col_config(&config, &block_opts)?); - } - - let mut write_opts = WriteOptions::new(); - if !config.wal { - write_opts.disable_wal(true); - } - let mut read_opts = ReadOptions::new(); - read_opts.set_verify_checksums(false); - - let mut cfs: Vec = Vec::new(); - let db = match config.columns { - Some(_) => { - match DB::open_cf(&opts, path, &cfnames, &cf_options) { - Ok(db) => { - cfs = cfnames.iter().map(|n| db.cf_handle(n) - .expect("rocksdb opens a cf_handle for each cfname; qed")).collect(); - Ok(db) - } - Err(_) => { - // retry and create CFs - match DB::open_cf(&opts, path, &[], &[]) { - Ok(mut db) => { - cfs = cfnames.iter() - .enumerate() - .map(|(i, n)| db.create_cf(n, &cf_options[i])) - .collect::<::std::result::Result<_, _>>() - .map_err(other_io_err)?; - Ok(db) - }, - err => err, - } - } - } - }, - None => DB::open(&opts, path) - }; - - let db = match db { - Ok(db) => db, - Err(ref s) if is_corrupted(s) => { - warn!("DB corrupted: {}, attempting repair", s); - DB::repair(&opts, path).map_err(other_io_err)?; - - match cfnames.is_empty() { - true => DB::open(&opts, path).map_err(other_io_err)?, - false => { - let db = DB::open_cf(&opts, path, &cfnames, &cf_options).map_err(other_io_err)?; - cfs = cfnames.iter().map(|n| db.cf_handle(n) - .expect("rocksdb opens a cf_handle for each cfname; qed")).collect(); - db - }, - } - }, - Err(s) => { - return Err(other_io_err(s)) - } - }; - let num_cols = cfs.len(); - Ok(Database { - db: RwLock::new(Some(DBAndColumns{ db: db, cfs: cfs })), - config: config.clone(), - write_opts: write_opts, - overlay: RwLock::new((0..(num_cols + 1)).map(|_| HashMap::new()).collect()), - flushing: RwLock::new((0..(num_cols + 1)).map(|_| HashMap::new()).collect()), - flushing_lock: Mutex::new(false), - path: path.to_owned(), - read_opts: read_opts, - block_opts: block_opts, - }) - } - - /// Helper to create new transaction for this database. - pub fn transaction(&self) -> DBTransaction { - DBTransaction::new() - } - - fn to_overlay_column(col: Option) -> usize { - col.map_or(0, |c| (c + 1) as usize) - } - - /// Commit transaction to database. - pub fn write_buffered(&self, tr: DBTransaction) { - let mut overlay = self.overlay.write(); - let ops = tr.ops; - for op in ops { - match op { - DBOp::Insert { col, key, value } => { - let c = Self::to_overlay_column(col); - overlay[c].insert(key, KeyState::Insert(value)); - }, - DBOp::Delete { col, key } => { - let c = Self::to_overlay_column(col); - overlay[c].insert(key, KeyState::Delete); - }, - } - }; - } - - /// Commit buffered changes to database. Must be called under `flush_lock` - fn write_flushing_with_lock(&self, _lock: &mut MutexGuard) -> io::Result<()> { - match *self.db.read() { - Some(DBAndColumns { ref db, ref cfs }) => { - let batch = WriteBatch::new(); - mem::swap(&mut *self.overlay.write(), &mut *self.flushing.write()); - { - for (c, column) in self.flushing.read().iter().enumerate() { - for (key, state) in column.iter() { - match *state { - KeyState::Delete => { - if c > 0 { - batch.delete_cf(cfs[c - 1], key).map_err(other_io_err)?; - } else { - batch.delete(key).map_err(other_io_err)?; - } - }, - KeyState::Insert(ref value) => { - if c > 0 { - batch.put_cf(cfs[c - 1], key, value).map_err(other_io_err)?; - } else { - batch.put(key, value).map_err(other_io_err)?; - } - }, - } - } - } - } - - check_for_corruption( - &self.path, - db.write_opt(batch, &self.write_opts))?; - - for column in self.flushing.write().iter_mut() { - column.clear(); - column.shrink_to_fit(); - } - Ok(()) - }, - None => Err(other_io_err("Database is closed")) - } - } - - /// Commit buffered changes to database. - pub fn flush(&self) -> io::Result<()> { - let mut lock = self.flushing_lock.lock(); - // If RocksDB batch allocation fails the thread gets terminated and the lock is released. - // The value inside the lock is used to detect that. - if *lock { - // This can only happen if another flushing thread is terminated unexpectedly. - return Err(other_io_err("Database write failure. Running low on memory perhaps?")) - } - *lock = true; - let result = self.write_flushing_with_lock(&mut lock); - *lock = false; - result - } - - /// Commit transaction to database. - pub fn write(&self, tr: DBTransaction) -> io::Result<()> { - match *self.db.read() { - Some(DBAndColumns { ref db, ref cfs }) => { - let batch = WriteBatch::new(); - let ops = tr.ops; - for op in ops { - // remove any buffered operation for this key - self.overlay.write()[Self::to_overlay_column(op.col())].remove(op.key()); - - match op { - DBOp::Insert { col, key, value } => match col { - None => batch.put(&key, &value).map_err(other_io_err)?, - Some(c) => batch.put_cf(cfs[c as usize], &key, &value).map_err(other_io_err)?, - }, - DBOp::Delete { col, key } => match col { - None => batch.delete(&key).map_err(other_io_err)?, - Some(c) => batch.delete_cf(cfs[c as usize], &key).map_err(other_io_err)?, - } - } - } - - check_for_corruption(&self.path, db.write_opt(batch, &self.write_opts)) - }, - None => Err(other_io_err("Database is closed")), - } - } - - /// Get value by key. - pub fn get(&self, col: Option, key: &[u8]) -> io::Result> { - match *self.db.read() { - Some(DBAndColumns { ref db, ref cfs }) => { - let overlay = &self.overlay.read()[Self::to_overlay_column(col)]; - match overlay.get(key) { - Some(&KeyState::Insert(ref value)) => Ok(Some(value.clone())), - Some(&KeyState::Delete) => Ok(None), - None => { - let flushing = &self.flushing.read()[Self::to_overlay_column(col)]; - match flushing.get(key) { - Some(&KeyState::Insert(ref value)) => Ok(Some(value.clone())), - Some(&KeyState::Delete) => Ok(None), - None => { - col.map_or_else( - || db.get_opt(key, &self.read_opts).map(|r| r.map(|v| DBValue::from_slice(&v))), - |c| db.get_cf_opt(cfs[c as usize], key, &self.read_opts).map(|r| r.map(|v| DBValue::from_slice(&v)))) - .map_err(other_io_err) - }, - } - }, - } - }, - None => Ok(None), - } - } - - /// Get value by partial key. Prefix size should match configured prefix size. Only searches flushed values. - // TODO: support prefix seek for unflushed data - pub fn get_by_prefix(&self, col: Option, prefix: &[u8]) -> Option> { - self.iter_from_prefix(col, prefix).and_then(|mut iter| { - match iter.next() { - // TODO: use prefix_same_as_start read option (not availabele in C API currently) - Some((k, v)) => if k[0 .. prefix.len()] == prefix[..] { Some(v) } else { None }, - _ => None - } - }) - } - - /// Get database iterator for flushed data. - pub fn iter(&self, col: Option) -> Option { - match *self.db.read() { - Some(DBAndColumns { ref db, ref cfs }) => { - let overlay = &self.overlay.read()[Self::to_overlay_column(col)]; - let mut overlay_data = overlay.iter() - .filter_map(|(k, v)| match *v { - KeyState::Insert(ref value) => - Some((k.clone().into_vec().into_boxed_slice(), value.clone().into_vec().into_boxed_slice())), - KeyState::Delete => None, - }).collect::>(); - overlay_data.sort(); - - let iter = col.map_or_else( - || db.iterator_opt(IteratorMode::Start, &self.read_opts), - |c| db.iterator_cf_opt(cfs[c as usize], IteratorMode::Start, &self.read_opts) - .expect("iterator params are valid; qed") - ); - - Some(DatabaseIterator { - iter: interleave_ordered(overlay_data, iter), - _marker: PhantomData, - }) - }, - None => None, - } - } - - fn iter_from_prefix(&self, col: Option, prefix: &[u8]) -> Option { - match *self.db.read() { - Some(DBAndColumns { ref db, ref cfs }) => { - let iter = col.map_or_else(|| db.iterator_opt(IteratorMode::From(prefix, Direction::Forward), &self.read_opts), - |c| db.iterator_cf_opt(cfs[c as usize], IteratorMode::From(prefix, Direction::Forward), &self.read_opts) - .expect("iterator params are valid; qed")); - - Some(DatabaseIterator { - iter: interleave_ordered(Vec::new(), iter), - _marker: PhantomData, - }) - }, - None => None, - } - } - - /// Close the database - fn close(&self) { - *self.db.write() = None; - self.overlay.write().clear(); - self.flushing.write().clear(); - } - - /// Restore the database from a copy at given path. - pub fn restore(&self, new_db: &str) -> io::Result<()> { - self.close(); - - // swap is guaranteed to be atomic - match swap(new_db, &self.path) { - Ok(_) => { - // ignore errors - let _ = fs::remove_dir_all(new_db); - }, - Err(err) => { - warn!("DB atomic swap failed: {}", err); - match swap_nonatomic(new_db, &self.path) { - Ok(_) => { - // ignore errors - let _ = fs::remove_dir_all(new_db); - }, - Err(err) => { - warn!("DB nonatomic atomic swap failed: {}", err); - return Err(err.into()); - } - } - } - } - - // reopen the database and steal handles into self - let db = Self::open(&self.config, &self.path)?; - *self.db.write() = mem::replace(&mut *db.db.write(), None); - *self.overlay.write() = mem::replace(&mut *db.overlay.write(), Vec::new()); - *self.flushing.write() = mem::replace(&mut *db.flushing.write(), Vec::new()); - Ok(()) - } - - /// The number of non-default column families. - pub fn num_columns(&self) -> u32 { - self.db.read().as_ref() - .and_then(|db| if db.cfs.is_empty() { None } else { Some(db.cfs.len()) } ) - .map(|n| n as u32) - .unwrap_or(0) - } - - /// Drop a column family. - pub fn drop_column(&self) -> io::Result<()> { - match *self.db.write() { - Some(DBAndColumns { ref mut db, ref mut cfs }) => { - if let Some(col) = cfs.pop() { - let name = format!("col{}", cfs.len()); - drop(col); - db.drop_cf(&name).map_err(other_io_err)?; - } - Ok(()) - }, - None => Ok(()), - } - } - - /// Add a column family. - pub fn add_column(&self) -> io::Result<()> { - match *self.db.write() { - Some(DBAndColumns { ref mut db, ref mut cfs }) => { - let col = cfs.len() as u32; - let name = format!("col{}", col); - cfs.push(db.create_cf(&name, &col_config(&self.config, &self.block_opts)?).map_err(other_io_err)?); - Ok(()) - }, - None => Ok(()), - } - } -} - -// duplicate declaration of methods here to avoid trait import in certain existing cases -// at time of addition. -impl KeyValueDB for Database { - fn get(&self, col: Option, key: &[u8]) -> io::Result> { - Database::get(self, col, key) - } - - fn get_by_prefix(&self, col: Option, prefix: &[u8]) -> Option> { - Database::get_by_prefix(self, col, prefix) - } - - fn write_buffered(&self, transaction: DBTransaction) { - Database::write_buffered(self, transaction) - } - - fn write(&self, transaction: DBTransaction) -> io::Result<()> { - Database::write(self, transaction) - } - - fn flush(&self) -> io::Result<()> { - Database::flush(self) - } - - fn iter<'a>(&'a self, col: Option) -> Box, Box<[u8]>)> + 'a> { - let unboxed = Database::iter(self, col); - Box::new(unboxed.into_iter().flat_map(|inner| inner)) - } - - fn iter_from_prefix<'a>(&'a self, col: Option, prefix: &'a [u8]) - -> Box, Box<[u8]>)> + 'a> - { - let unboxed = Database::iter_from_prefix(self, col, prefix); - Box::new(unboxed.into_iter().flat_map(|inner| inner)) - } - - fn restore(&self, new_db: &str) -> io::Result<()> { - Database::restore(self, new_db) - } -} - -impl Drop for Database { - fn drop(&mut self) { - // write all buffered changes if we can. - let _ = self.flush(); - } -} - -#[cfg(test)] -mod tests { - extern crate tempdir; - - use std::str::FromStr; - use self::tempdir::TempDir; - use ethereum_types::H256; - use super::*; - - fn test_db(config: &DatabaseConfig) { - let tempdir = TempDir::new("").unwrap(); - let db = Database::open(config, tempdir.path().to_str().unwrap()).unwrap(); - let key1 = H256::from_str("02c69be41d0b7e40352fc85be1cd65eb03d40ef8427a0ca4596b1ead9a00e9fc").unwrap(); - let key2 = H256::from_str("03c69be41d0b7e40352fc85be1cd65eb03d40ef8427a0ca4596b1ead9a00e9fc").unwrap(); - let key3 = H256::from_str("01c69be41d0b7e40352fc85be1cd65eb03d40ef8427a0ca4596b1ead9a00e9fc").unwrap(); - - let mut batch = db.transaction(); - batch.put(None, &key1, b"cat"); - batch.put(None, &key2, b"dog"); - db.write(batch).unwrap(); - - assert_eq!(&*db.get(None, &key1).unwrap().unwrap(), b"cat"); - - let contents: Vec<_> = db.iter(None).into_iter().flat_map(|inner| inner).collect(); - assert_eq!(contents.len(), 2); - assert_eq!(&*contents[0].0, &*key1); - assert_eq!(&*contents[0].1, b"cat"); - assert_eq!(&*contents[1].0, &*key2); - assert_eq!(&*contents[1].1, b"dog"); - - let mut batch = db.transaction(); - batch.delete(None, &key1); - db.write(batch).unwrap(); - - assert!(db.get(None, &key1).unwrap().is_none()); - - let mut batch = db.transaction(); - batch.put(None, &key1, b"cat"); - db.write(batch).unwrap(); - - let mut transaction = db.transaction(); - transaction.put(None, &key3, b"elephant"); - transaction.delete(None, &key1); - db.write(transaction).unwrap(); - assert!(db.get(None, &key1).unwrap().is_none()); - assert_eq!(&*db.get(None, &key3).unwrap().unwrap(), b"elephant"); - - assert_eq!(&*db.get_by_prefix(None, &key3).unwrap(), b"elephant"); - assert_eq!(&*db.get_by_prefix(None, &key2).unwrap(), b"dog"); - - let mut transaction = db.transaction(); - transaction.put(None, &key1, b"horse"); - transaction.delete(None, &key3); - db.write_buffered(transaction); - assert!(db.get(None, &key3).unwrap().is_none()); - assert_eq!(&*db.get(None, &key1).unwrap().unwrap(), b"horse"); - - db.flush().unwrap(); - assert!(db.get(None, &key3).unwrap().is_none()); - assert_eq!(&*db.get(None, &key1).unwrap().unwrap(), b"horse"); - } - - #[test] - fn kvdb() { - let tempdir = TempDir::new("").unwrap(); - let _ = Database::open_default(tempdir.path().to_str().unwrap()).unwrap(); - test_db(&DatabaseConfig::default()); - } - - #[test] - #[cfg(target_os = "linux")] - fn df_to_rotational() { - use std::path::PathBuf; - // Example df output. - let example_df = vec![70, 105, 108, 101, 115, 121, 115, 116, 101, 109, 32, 32, 32, 32, 32, 49, 75, 45, 98, 108, 111, 99, 107, 115, 32, 32, 32, 32, 32, 85, 115, 101, 100, 32, 65, 118, 97, 105, 108, 97, 98, 108, 101, 32, 85, 115, 101, 37, 32, 77, 111, 117, 110, 116, 101, 100, 32, 111, 110, 10, 47, 100, 101, 118, 47, 115, 100, 97, 49, 32, 32, 32, 32, 32, 32, 32, 54, 49, 52, 48, 57, 51, 48, 48, 32, 51, 56, 56, 50, 50, 50, 51, 54, 32, 32, 49, 57, 52, 52, 52, 54, 49, 54, 32, 32, 54, 55, 37, 32, 47, 10]; - let expected_output = Some(PathBuf::from("/sys/block/sda/queue/rotational")); - assert_eq!(rotational_from_df_output(example_df), expected_output); - } - - #[test] - fn add_columns() { - let config = DatabaseConfig::default(); - let config_5 = DatabaseConfig::with_columns(Some(5)); - - let tempdir = TempDir::new("").unwrap(); - - // open empty, add 5. - { - let db = Database::open(&config, tempdir.path().to_str().unwrap()).unwrap(); - assert_eq!(db.num_columns(), 0); - - for i in 0..5 { - db.add_column().unwrap(); - assert_eq!(db.num_columns(), i + 1); - } - } - - // reopen as 5. - { - let db = Database::open(&config_5, tempdir.path().to_str().unwrap()).unwrap(); - assert_eq!(db.num_columns(), 5); - } - } - - #[test] - fn drop_columns() { - let config = DatabaseConfig::default(); - let config_5 = DatabaseConfig::with_columns(Some(5)); - - let tempdir = TempDir::new("").unwrap(); - - // open 5, remove all. - { - let db = Database::open(&config_5, tempdir.path().to_str().unwrap()).unwrap(); - assert_eq!(db.num_columns(), 5); - - for i in (0..5).rev() { - db.drop_column().unwrap(); - assert_eq!(db.num_columns(), i); - } - } - - // reopen as 0. - { - let db = Database::open(&config, tempdir.path().to_str().unwrap()).unwrap(); - assert_eq!(db.num_columns(), 0); - } - } - - #[test] - fn write_clears_buffered_ops() { - let tempdir = TempDir::new("").unwrap(); - let config = DatabaseConfig::default(); - let db = Database::open(&config, tempdir.path().to_str().unwrap()).unwrap(); - - let mut batch = db.transaction(); - batch.put(None, b"foo", b"bar"); - db.write_buffered(batch); - - let mut batch = db.transaction(); - batch.put(None, b"foo", b"baz"); - db.write(batch).unwrap(); - - assert_eq!(db.get(None, b"foo").unwrap().unwrap().as_ref(), b"baz"); - } -} diff --git a/util/kvdb/Cargo.toml b/util/kvdb/Cargo.toml deleted file mode 100644 index 7d1044505..000000000 --- a/util/kvdb/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "kvdb" -version = "0.1.0" -authors = ["Parity Technologies "] - -[dependencies] -elastic-array = "0.10" -ethcore-bytes = { path = "../bytes" } diff --git a/util/kvdb/src/lib.rs b/util/kvdb/src/lib.rs deleted file mode 100644 index 25e8a341a..000000000 --- a/util/kvdb/src/lib.rs +++ /dev/null @@ -1,175 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Key-Value store abstraction with `RocksDB` backend. - -extern crate elastic_array; -extern crate ethcore_bytes as bytes; - -use std::io; -use std::path::Path; -use std::sync::Arc; -use elastic_array::{ElasticArray128, ElasticArray32}; -use bytes::Bytes; - -/// Required length of prefixes. -pub const PREFIX_LEN: usize = 12; - -/// Database value. -pub type DBValue = ElasticArray128; - -/// Write transaction. Batches a sequence of put/delete operations for efficiency. -#[derive(Default, Clone, PartialEq)] -pub struct DBTransaction { - /// Database operations. - pub ops: Vec, -} - -/// Database operation. -#[derive(Clone, PartialEq)] -pub enum DBOp { - Insert { - col: Option, - key: ElasticArray32, - value: DBValue, - }, - Delete { - col: Option, - key: ElasticArray32, - } -} - -impl DBOp { - /// Returns the key associated with this operation. - pub fn key(&self) -> &[u8] { - match *self { - DBOp::Insert { ref key, .. } => key, - DBOp::Delete { ref key, .. } => key, - } - } - - /// Returns the column associated with this operation. - pub fn col(&self) -> Option { - match *self { - DBOp::Insert { col, .. } => col, - DBOp::Delete { col, .. } => col, - } - } -} - -impl DBTransaction { - /// Create new transaction. - pub fn new() -> DBTransaction { - DBTransaction::with_capacity(256) - } - - /// Create new transaction with capacity. - pub fn with_capacity(cap: usize) -> DBTransaction { - DBTransaction { - ops: Vec::with_capacity(cap) - } - } - - /// Insert a key-value pair in the transaction. Any existing value will be overwritten upon write. - pub fn put(&mut self, col: Option, key: &[u8], value: &[u8]) { - let mut ekey = ElasticArray32::new(); - ekey.append_slice(key); - self.ops.push(DBOp::Insert { - col: col, - key: ekey, - value: DBValue::from_slice(value), - }); - } - - /// Insert a key-value pair in the transaction. Any existing value will be overwritten upon write. - pub fn put_vec(&mut self, col: Option, key: &[u8], value: Bytes) { - let mut ekey = ElasticArray32::new(); - ekey.append_slice(key); - self.ops.push(DBOp::Insert { - col: col, - key: ekey, - value: DBValue::from_vec(value), - }); - } - - /// Delete value by key. - pub fn delete(&mut self, col: Option, key: &[u8]) { - let mut ekey = ElasticArray32::new(); - ekey.append_slice(key); - self.ops.push(DBOp::Delete { - col: col, - key: ekey, - }); - } -} - -/// Generic key-value database. -/// -/// This makes a distinction between "buffered" and "flushed" values. Values which have been -/// written can always be read, but may be present in an in-memory buffer. Values which have -/// been flushed have been moved to backing storage, like a RocksDB instance. There are certain -/// operations which are only guaranteed to operate on flushed data and not buffered, -/// although implementations may differ in this regard. -/// -/// The contents of an interior buffer may be explicitly flushed using the `flush` method. -/// -/// The `KeyValueDB` also deals in "column families", which can be thought of as distinct -/// stores within a database. Keys written in one column family will not be accessible from -/// any other. The number of column families must be specified at initialization, with a -/// differing interface for each database. The `None` argument in place of a column index -/// is always supported. -/// -/// The API laid out here, along with the `Sync` bound implies interior synchronization for -/// implementation. -pub trait KeyValueDB: Sync + Send { - /// Helper to create a new transaction. - fn transaction(&self) -> DBTransaction { DBTransaction::new() } - - /// Get a value by key. - fn get(&self, col: Option, key: &[u8]) -> io::Result>; - - /// Get a value by partial key. Only works for flushed data. - fn get_by_prefix(&self, col: Option, prefix: &[u8]) -> Option>; - - /// Write a transaction of changes to the buffer. - fn write_buffered(&self, transaction: DBTransaction); - - /// Write a transaction of changes to the backing store. - fn write(&self, transaction: DBTransaction) -> io::Result<()> { - self.write_buffered(transaction); - self.flush() - } - - /// Flush all buffered data. - fn flush(&self) -> io::Result<()>; - - /// Iterate over flushed data for a given column. - fn iter<'a>(&'a self, col: Option) -> Box, Box<[u8]>)> + 'a>; - - /// Iterate over flushed data for a given column, starting from a given prefix. - fn iter_from_prefix<'a>(&'a self, col: Option, prefix: &'a [u8]) - -> Box, Box<[u8]>)> + 'a>; - - /// Attempt to replace this database with a new one located at the given path. - fn restore(&self, new_db: &str) -> io::Result<()>; -} - -/// Generic key-value database handler. This trait contains one function `open`. When called, it opens database with a -/// predefined config. -pub trait KeyValueDBHandler: Send + Sync { - /// Open the predefined key-value database. - fn open(&self, path: &Path) -> io::Result>; -} diff --git a/util/memorydb/Cargo.toml b/util/memorydb/Cargo.toml deleted file mode 100644 index 57d7439cf..000000000 --- a/util/memorydb/Cargo.toml +++ /dev/null @@ -1,19 +0,0 @@ -[package] -name = "memorydb" -version = "0.2.0" -authors = ["Parity Technologies "] -description = "in-memory implementation of hashdb" -license = "GPL-3.0" - -[dependencies] -elastic-array = "0.10" -heapsize = "0.4" -hashdb = { version = "0.2.0", path = "../hashdb" } -plain_hasher = { path = "../plain_hasher" } -rlp = { version = "0.2.1", path = "../rlp" } - -[dev-dependencies] -tiny-keccak = "1.4.2" -ethereum-types = "0.3" -keccak-hasher = { path = "../keccak-hasher" } -keccak-hash = { path = "../hash" } \ No newline at end of file diff --git a/util/memorydb/benches/memdb.rs b/util/memorydb/benches/memdb.rs deleted file mode 100644 index cfc676ccd..000000000 --- a/util/memorydb/benches/memdb.rs +++ /dev/null @@ -1,79 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -#![feature(test)] - -extern crate hashdb; -extern crate memorydb; -extern crate keccak_hasher; -extern crate keccak_hash; -extern crate rlp; -extern crate test; - -use memorydb::MemoryDB; -use keccak_hasher::KeccakHasher; -use hashdb::{HashDB, Hasher}; -use keccak_hash::KECCAK_NULL_RLP; -use rlp::NULL_RLP; -use test::{Bencher, black_box}; - - -#[bench] -fn instantiation(b: &mut Bencher) { - b.iter(|| { - MemoryDB::::new(); - }) -} - -#[bench] -fn compare_to_null_embedded_in_struct(b: &mut Bencher) { - struct X {a_hash: ::Out}; - let x = X {a_hash: KeccakHasher::hash(&NULL_RLP)}; - let key = KeccakHasher::hash(b"abc"); - - b.iter(|| { - black_box(key == x.a_hash); - }) -} - -#[bench] -fn compare_to_null_in_const(b: &mut Bencher) { - let key = KeccakHasher::hash(b"abc"); - - b.iter(|| { - black_box(key == KECCAK_NULL_RLP); - }) -} - -#[bench] -fn contains_with_non_null_key(b: &mut Bencher) { - let mut m = MemoryDB::::new(); - let key = KeccakHasher::hash(b"abc"); - m.insert(b"abcefghijklmnopqrstuvxyz"); - b.iter(|| { - m.contains(&key); - }) -} - -#[bench] -fn contains_with_null_key(b: &mut Bencher) { - let mut m = MemoryDB::::new(); - let null_key = KeccakHasher::hash(&NULL_RLP); - m.insert(b"abcefghijklmnopqrstuvxyz"); - b.iter(|| { - m.contains(&null_key); - }) -} \ No newline at end of file diff --git a/util/memorydb/src/lib.rs b/util/memorydb/src/lib.rs deleted file mode 100644 index 538738995..000000000 --- a/util/memorydb/src/lib.rs +++ /dev/null @@ -1,344 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Reference-counted memory-based `HashDB` implementation. -extern crate elastic_array; -extern crate hashdb; -extern crate heapsize; -extern crate rlp; -#[cfg(test)] extern crate keccak_hasher; -#[cfg(test)] extern crate tiny_keccak; -#[cfg(test)] extern crate ethereum_types; - -use hashdb::{HashDB, Hasher as KeyHasher, DBValue, AsHashDB}; -use heapsize::HeapSizeOf; -use rlp::NULL_RLP; -use std::collections::hash_map::Entry; -use std::collections::HashMap; -use std::hash; -use std::mem; - -// Backing `HashMap` parametrized with a `Hasher` for the keys `Hasher::Out` and the `Hasher::StdHasher` as hash map builder. -type FastMap = HashMap<::Out, T, hash::BuildHasherDefault<::StdHasher>>; - -/// Reference-counted memory-based `HashDB` implementation. -/// -/// Use `new()` to create a new database. Insert items with `insert()`, remove items -/// with `remove()`, check for existence with `contains()` and lookup a hash to derive -/// the data with `get()`. Clear with `clear()` and purge the portions of the data -/// that have no references with `purge()`. -/// -/// # Example -/// ```rust -/// extern crate hashdb; -/// extern crate keccak_hasher; -/// extern crate memorydb; -/// -/// use hashdb::*; -/// use keccak_hasher::KeccakHasher; -/// use memorydb::*; -/// fn main() { -/// let mut m = MemoryDB::::new(); -/// let d = "Hello world!".as_bytes(); -/// -/// let k = m.insert(d); -/// assert!(m.contains(&k)); -/// assert_eq!(m.get(&k).unwrap(), d); -/// -/// m.insert(d); -/// assert!(m.contains(&k)); -/// -/// m.remove(&k); -/// assert!(m.contains(&k)); -/// -/// m.remove(&k); -/// assert!(!m.contains(&k)); -/// -/// m.remove(&k); -/// assert!(!m.contains(&k)); -/// -/// m.insert(d); -/// assert!(!m.contains(&k)); - -/// m.insert(d); -/// assert!(m.contains(&k)); -/// assert_eq!(m.get(&k).unwrap(), d); -/// -/// m.remove(&k); -/// assert!(!m.contains(&k)); -/// } -/// ``` -#[derive(Default, Clone, PartialEq)] -pub struct MemoryDB { - data: FastMap, - hashed_null_node: H::Out, -} - -impl MemoryDB { - /// Create a new instance of the memory DB. - pub fn new() -> MemoryDB { - MemoryDB { - data: FastMap::::default(), - hashed_null_node: H::hash(&NULL_RLP) - } - } - - /// Clear all data from the database. - /// - /// # Examples - /// ```rust - /// extern crate hashdb; - /// extern crate keccak_hasher; - /// extern crate memorydb; - /// - /// use hashdb::*; - /// use keccak_hasher::KeccakHasher; - /// use memorydb::*; - /// - /// fn main() { - /// let mut m = MemoryDB::::new(); - /// let hello_bytes = "Hello world!".as_bytes(); - /// let hash = m.insert(hello_bytes); - /// assert!(m.contains(&hash)); - /// m.clear(); - /// assert!(!m.contains(&hash)); - /// } - /// ``` - pub fn clear(&mut self) { - self.data.clear(); - } - - /// Purge all zero-referenced data from the database. - pub fn purge(&mut self) { - self.data.retain(|_, &mut (_, rc)| rc != 0); - } - - /// Return the internal map of hashes to data, clearing the current state. - pub fn drain(&mut self) -> FastMap { - mem::replace(&mut self.data, FastMap::::default()) - } - - /// Grab the raw information associated with a key. Returns None if the key - /// doesn't exist. - /// - /// Even when Some is returned, the data is only guaranteed to be useful - /// when the refs > 0. - pub fn raw(&self, key: &::Out) -> Option<(DBValue, i32)> { - if key == &self.hashed_null_node { - return Some((DBValue::from_slice(&NULL_RLP), 1)); - } - self.data.get(key).cloned() - } - - /// Returns the size of allocated heap memory - pub fn mem_used(&self) -> usize { - self.data.heap_size_of_children() - } - - /// Remove an element and delete it from storage if reference count reaches zero. - /// If the value was purged, return the old value. - pub fn remove_and_purge(&mut self, key: &::Out) -> Option { - if key == &self.hashed_null_node { - return None; - } - match self.data.entry(key.clone()) { - Entry::Occupied(mut entry) => - if entry.get().1 == 1 { - Some(entry.remove().0) - } else { - entry.get_mut().1 -= 1; - None - }, - Entry::Vacant(entry) => { - entry.insert((DBValue::new(), -1)); - None - } - } - } - - /// Consolidate all the entries of `other` into `self`. - pub fn consolidate(&mut self, mut other: Self) { - for (key, (value, rc)) in other.drain() { - match self.data.entry(key) { - Entry::Occupied(mut entry) => { - if entry.get().1 < 0 { - entry.get_mut().0 = value; - } - - entry.get_mut().1 += rc; - } - Entry::Vacant(entry) => { - entry.insert((value, rc)); - } - } - } - } -} - -impl HashDB for MemoryDB { - - fn keys(&self) -> HashMap { - self.data.iter() - .filter_map(|(k, v)| if v.1 != 0 { - Some((*k, v.1)) - } else { - None - }) - .collect() - } - - fn get(&self, key: &H::Out) -> Option { - if key == &self.hashed_null_node { - return Some(DBValue::from_slice(&NULL_RLP)); - } - - match self.data.get(key) { - Some(&(ref d, rc)) if rc > 0 => Some(d.clone()), - _ => None - } - } - - fn contains(&self, key: &H::Out) -> bool { - if key == &self.hashed_null_node { - return true; - } - - match self.data.get(key) { - Some(&(_, x)) if x > 0 => true, - _ => false - } - } - - fn insert(&mut self, value: &[u8]) -> H::Out { - if value == &NULL_RLP { - return self.hashed_null_node.clone(); - } - let key = H::hash(value); - match self.data.entry(key) { - Entry::Occupied(mut entry) => { - let &mut (ref mut old_value, ref mut rc) = entry.get_mut(); - if *rc <= 0 { - *old_value = DBValue::from_slice(value); - } - *rc += 1; - }, - Entry::Vacant(entry) => { - entry.insert((DBValue::from_slice(value), 1)); - }, - } - key - } - - fn emplace(&mut self, key:H::Out, value: DBValue) { - if &*value == &NULL_RLP { - return; - } - - match self.data.entry(key) { - Entry::Occupied(mut entry) => { - let &mut (ref mut old_value, ref mut rc) = entry.get_mut(); - if *rc <= 0 { - *old_value = value; - } - *rc += 1; - }, - Entry::Vacant(entry) => { - entry.insert((value, 1)); - }, - } - } - - fn remove(&mut self, key: &H::Out) { - if key == &self.hashed_null_node { - return; - } - - match self.data.entry(*key) { - Entry::Occupied(mut entry) => { - let &mut (_, ref mut rc) = entry.get_mut(); - *rc -= 1; - }, - Entry::Vacant(entry) => { - entry.insert((DBValue::new(), -1)); - }, - } - } -} - -impl AsHashDB for MemoryDB { - fn as_hashdb(&self) -> &HashDB { self } - fn as_hashdb_mut(&mut self) -> &mut HashDB { self } -} - -#[cfg(test)] -mod tests { - use super::*; - use tiny_keccak::Keccak; - use ethereum_types::H256; - use keccak_hasher::KeccakHasher; - - #[test] - fn memorydb_remove_and_purge() { - let hello_bytes = b"Hello world!"; - let mut hello_key = [0;32]; - Keccak::keccak256(hello_bytes, &mut hello_key); - let hello_key = H256(hello_key); - - let mut m = MemoryDB::::new(); - m.remove(&hello_key); - assert_eq!(m.raw(&hello_key).unwrap().1, -1); - m.purge(); - assert_eq!(m.raw(&hello_key).unwrap().1, -1); - m.insert(hello_bytes); - assert_eq!(m.raw(&hello_key).unwrap().1, 0); - m.purge(); - assert_eq!(m.raw(&hello_key), None); - - let mut m = MemoryDB::::new(); - assert!(m.remove_and_purge(&hello_key).is_none()); - assert_eq!(m.raw(&hello_key).unwrap().1, -1); - m.insert(hello_bytes); - m.insert(hello_bytes); - assert_eq!(m.raw(&hello_key).unwrap().1, 1); - assert_eq!(&*m.remove_and_purge(&hello_key).unwrap(), hello_bytes); - assert_eq!(m.raw(&hello_key), None); - assert!(m.remove_and_purge(&hello_key).is_none()); - } - - #[test] - fn consolidate() { - let mut main = MemoryDB::::new(); - let mut other = MemoryDB::::new(); - let remove_key = other.insert(b"doggo"); - main.remove(&remove_key); - - let insert_key = other.insert(b"arf"); - main.emplace(insert_key, DBValue::from_slice(b"arf")); - - let negative_remove_key = other.insert(b"negative"); - other.remove(&negative_remove_key); // ref cnt: 0 - other.remove(&negative_remove_key); // ref cnt: -1 - main.remove(&negative_remove_key); // ref cnt: -1 - - main.consolidate(other); - - let overlay = main.drain(); - - assert_eq!(overlay.get(&remove_key).unwrap(), &(DBValue::from_slice(b"doggo"), 0)); - assert_eq!(overlay.get(&insert_key).unwrap(), &(DBValue::from_slice(b"arf"), 2)); - assert_eq!(overlay.get(&negative_remove_key).unwrap(), &(DBValue::from_slice(b"negative"), -2)); - } -} diff --git a/util/migration-rocksdb/Cargo.toml b/util/migration-rocksdb/Cargo.toml index f2eb59865..39ff50cfb 100644 --- a/util/migration-rocksdb/Cargo.toml +++ b/util/migration-rocksdb/Cargo.toml @@ -6,8 +6,8 @@ authors = ["Parity Technologies "] [dependencies] log = "0.3" macros = { path = "../macros" } -kvdb = { path = "../kvdb" } -kvdb-rocksdb = { path = "../kvdb-rocksdb" } +kvdb = { git = "https://github.com/paritytech/parity-common" } +kvdb-rocksdb = { git = "https://github.com/paritytech/parity-common" } [dev-dependencies] tempdir = "0.3" diff --git a/util/network-devp2p/Cargo.toml b/util/network-devp2p/Cargo.toml index 3e483c1d8..99fdc1645 100644 --- a/util/network-devp2p/Cargo.toml +++ b/util/network-devp2p/Cargo.toml @@ -20,16 +20,16 @@ parking_lot = "0.6" ansi_term = "0.10" rustc-hex = "1.0" ethcore-io = { path = "../io", features = ["mio"] } -ethcore-bytes = { path = "../bytes" } -ethcore-crypto = { path = "../../ethcore/crypto" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethcore-logger = { path ="../../logger" } ethcore-network = { path = "../network" } ethereum-types = "0.3" ethkey = { path = "../../ethkey" } -rlp = { path = "../rlp" } -path = { path = "../path" } +rlp = { git = "https://github.com/paritytech/parity-common" } +path = { git = "https://github.com/paritytech/parity-common" } ipnetwork = "0.12.6" -keccak-hash = { path = "../hash" } +keccak-hash = { git = "https://github.com/paritytech/parity-common" } snappy = { git = "https://github.com/paritytech/rust-snappy" } serde = "1.0" serde_json = "1.0" diff --git a/util/network-devp2p/src/connection.rs b/util/network-devp2p/src/connection.rs index 37824ae5d..1ed395acb 100644 --- a/util/network-devp2p/src/connection.rs +++ b/util/network-devp2p/src/connection.rs @@ -23,7 +23,7 @@ use mio::{Token, Ready, PollOpt}; use mio::deprecated::{Handler, EventLoop, TryRead, TryWrite}; use mio::tcp::*; use ethereum_types::{H128, H256, H512}; -use ethcore_bytes::*; +use parity_bytes::*; use rlp::{Rlp, RlpStream}; use std::io::{self, Cursor, Read, Write}; use io::{IoContext, StreamToken}; @@ -502,7 +502,7 @@ mod tests { use std::sync::atomic::AtomicBool; use mio::{Ready}; - use ethcore_bytes::Bytes; + use parity_bytes::Bytes; use io::*; use super::*; diff --git a/util/network-devp2p/src/discovery.rs b/util/network-devp2p/src/discovery.rs index 5f8f0cdbc..3bf7aee1e 100644 --- a/util/network-devp2p/src/discovery.rs +++ b/util/network-devp2p/src/discovery.rs @@ -14,7 +14,7 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -use ethcore_bytes::Bytes; +use parity_bytes::Bytes; use std::net::SocketAddr; use std::collections::{HashSet, HashMap, VecDeque}; use std::default::Default; diff --git a/util/network-devp2p/src/handshake.rs b/util/network-devp2p/src/handshake.rs index 18869de55..4f54f0009 100644 --- a/util/network-devp2p/src/handshake.rs +++ b/util/network-devp2p/src/handshake.rs @@ -19,7 +19,7 @@ use rand::random; use hash::write_keccak; use mio::tcp::*; use ethereum_types::{H256, H520}; -use ethcore_bytes::Bytes; +use parity_bytes::Bytes; use rlp::{Rlp, RlpStream}; use connection::{Connection}; use node_table::NodeId; diff --git a/util/network-devp2p/src/lib.rs b/util/network-devp2p/src/lib.rs index 01fc1fe25..0df17c070 100644 --- a/util/network-devp2p/src/lib.rs +++ b/util/network-devp2p/src/lib.rs @@ -61,8 +61,8 @@ #![allow(deprecated)] extern crate ethcore_io as io; -extern crate ethcore_bytes; -extern crate ethcore_crypto as crypto; +extern crate parity_bytes; +extern crate parity_crypto as crypto; extern crate ethereum_types; extern crate parking_lot; extern crate mio; diff --git a/util/network-devp2p/tests/tests.rs b/util/network-devp2p/tests/tests.rs index 970aa3b8a..091f2509a 100644 --- a/util/network-devp2p/tests/tests.rs +++ b/util/network-devp2p/tests/tests.rs @@ -15,7 +15,7 @@ // along with Parity. If not, see . extern crate parking_lot; -extern crate ethcore_bytes; +extern crate parity_bytes; extern crate ethcore_io as io; extern crate ethcore_logger; extern crate ethcore_network; @@ -27,7 +27,7 @@ use std::sync::Arc; use std::thread; use std::time::*; use parking_lot::Mutex; -use ethcore_bytes::Bytes; +use parity_bytes::Bytes; use ethcore_network::*; use ethcore_network_devp2p::NetworkService; use ethkey::{Random, Generator}; diff --git a/util/network/Cargo.toml b/util/network/Cargo.toml index 6259e04af..53eb58a37 100644 --- a/util/network/Cargo.toml +++ b/util/network/Cargo.toml @@ -8,12 +8,12 @@ authors = ["Parity Technologies "] [dependencies] error-chain = { version = "0.12", default-features = false } -ethcore-crypto = { path = "../../ethcore/crypto" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethcore-io = { path = "../io" } ethereum-types = "0.3" ethkey = { path = "../../ethkey" } ipnetwork = "0.12.6" -rlp = { path = "../rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } libc = "0.2" snappy = { git = "https://github.com/paritytech/rust-snappy" } diff --git a/util/network/src/lib.rs b/util/network/src/lib.rs index 9b7328bdb..c31ace410 100644 --- a/util/network/src/lib.rs +++ b/util/network/src/lib.rs @@ -16,7 +16,7 @@ #![recursion_limit="128"] -extern crate ethcore_crypto as crypto; +extern crate parity_crypto as crypto; extern crate ethcore_io as io; extern crate ethereum_types; extern crate ethkey; diff --git a/util/path/Cargo.toml b/util/path/Cargo.toml deleted file mode 100644 index 6d01d6488..000000000 --- a/util/path/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "path" -version = "0.1.1" -authors = ["Parity Technologies "] -license = "GPL3" - -[dependencies] -dirs = "1.0.2" diff --git a/util/path/src/lib.rs b/util/path/src/lib.rs deleted file mode 100644 index 3169274ad..000000000 --- a/util/path/src/lib.rs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Path utilities -extern crate dirs; - -use std::path::Path; -use std::path::PathBuf; - -#[cfg(target_os = "macos")] -/// Get the config path for application `name`. -/// `name` should be capitalized, e.g. `"Ethereum"`, `"Parity"`. -pub fn config_path(name: &str) -> PathBuf { - let mut home = dirs::home_dir().expect("Failed to get home dir"); - home.push("Library"); - home.push(name); - home -} - -#[cfg(windows)] -/// Get the config path for application `name`. -/// `name` should be capitalized, e.g. `"Ethereum"`, `"Parity"`. -pub fn config_path(name: &str) -> PathBuf { - let mut home = dirs::home_dir().expect("Failed to get home dir"); - home.push("AppData"); - home.push("Roaming"); - home.push(name); - home -} - -#[cfg(not(any(target_os = "macos", windows)))] -/// Get the config path for application `name`. -/// `name` should be capitalized, e.g. `"Ethereum"`, `"Parity"`. -pub fn config_path(name: &str) -> PathBuf { - let mut home = dirs::home_dir().expect("Failed to get home dir"); - home.push(format!(".{}", name.to_lowercase())); - home -} - -/// Get the specific folder inside a config path. -pub fn config_path_with(name: &str, then: &str) -> PathBuf { - let mut path = config_path(name); - path.push(then); - path -} - -/// Default ethereum paths -pub mod ethereum { - use std::path::PathBuf; - - /// Default path for ethereum installation on Mac Os - pub fn default() -> PathBuf { super::config_path("Ethereum") } - - /// Default path for ethereum installation (testnet) - pub fn test() -> PathBuf { - let mut path = default(); - path.push("testnet"); - path - } - - /// Get the specific folder inside default ethereum installation - pub fn with_default(s: &str) -> PathBuf { - let mut path = default(); - path.push(s); - path - } - - /// Get the specific folder inside default ethereum installation configured for testnet - pub fn with_testnet(s: &str) -> PathBuf { - let mut path = default(); - path.push("testnet"); - path.push(s); - path - } -} - -/// Restricts the permissions of given path only to the owner. -#[cfg(unix)] -pub fn restrict_permissions_owner(file_path: &Path, write: bool, executable: bool) -> Result<(), String> { - let perms = ::std::os::unix::fs::PermissionsExt::from_mode(0o400 + write as u32 * 0o200 + executable as u32 * 0o100); - ::std::fs::set_permissions(file_path, perms).map_err(|e| format!("{:?}", e)) -} - -/// Restricts the permissions of given path only to the owner. -#[cfg(not(unix))] -pub fn restrict_permissions_owner(_file_path: &Path, _write: bool, _executable: bool) -> Result<(), String> { - //TODO: implement me - Ok(()) -} diff --git a/util/patricia-trie-ethereum/Cargo.toml b/util/patricia-trie-ethereum/Cargo.toml index 239c7c99f..90ca7e475 100644 --- a/util/patricia-trie-ethereum/Cargo.toml +++ b/util/patricia-trie-ethereum/Cargo.toml @@ -6,10 +6,10 @@ description = "Merkle-Patricia Trie (Ethereum Style)" license = "GPL-3.0" [dependencies] -patricia-trie = { path = "../patricia_trie" } +patricia-trie = { git = "https://github.com/paritytech/parity-common" } keccak-hasher = { path = "../keccak-hasher" } -hashdb = { path = "../hashdb" } -rlp = { path = "../rlp" } -ethcore-bytes = { path = "../bytes" } +hashdb = { git = "https://github.com/paritytech/parity-common" } +rlp = { git = "https://github.com/paritytech/parity-common" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } ethereum-types = "0.3" elastic-array = "0.10" \ No newline at end of file diff --git a/util/patricia-trie-ethereum/src/lib.rs b/util/patricia-trie-ethereum/src/lib.rs index a252152c7..ac2943fc6 100644 --- a/util/patricia-trie-ethereum/src/lib.rs +++ b/util/patricia-trie-ethereum/src/lib.rs @@ -18,7 +18,7 @@ pub extern crate patricia_trie as trie; // `pub` because we need to import this crate for the tests in `patricia_trie` and there were issues: https://gist.github.com/dvdplm/869251ee557a1b4bd53adc7c971979aa extern crate elastic_array; -extern crate ethcore_bytes; +extern crate parity_bytes; extern crate ethereum_types; extern crate hashdb; extern crate keccak_hasher; diff --git a/util/patricia_trie/Cargo.toml b/util/patricia_trie/Cargo.toml deleted file mode 100644 index ebd665045..000000000 --- a/util/patricia_trie/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -[package] -name = "patricia-trie" -version = "0.2.0" -authors = ["Parity Technologies "] -description = "Merkle-Patricia Trie generic over key hasher and node encoding" -license = "GPL-3.0" - -[dependencies] -elastic-array = "0.10" -ethcore-bytes = { version = "0.1.0", path = "../bytes" } -hashdb = { version = "0.2", path = "../hashdb" } -heapsize = "0.4" -log = "0.3" -rand = "0.4" - -[dev-dependencies] -env_logger = "0.5" -ethereum-types = "0.3" -keccak-hash = { version = "0.1.0", path = "../hash" } -keccak-hasher = { path = "../keccak-hasher" } -memorydb = { version = "0.2", path = "../memorydb" } -patricia-trie-ethereum = { path = "../patricia-trie-ethereum" } -rlp = { version = "0.2.1", path = "../rlp" } -trie-standardmap = { path = "../trie-standardmap" } -triehash = { version = "0.1.0", path = "../triehash" } diff --git a/util/patricia_trie/benches/trie.rs b/util/patricia_trie/benches/trie.rs deleted file mode 100644 index 114006ebe..000000000 --- a/util/patricia_trie/benches/trie.rs +++ /dev/null @@ -1,214 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -#![feature(test)] - -extern crate test; -extern crate ethcore_bytes; -extern crate ethereum_types; -extern crate memorydb; -extern crate patricia_trie as trie; -extern crate patricia_trie_ethereum as ethtrie; -extern crate keccak_hasher; -extern crate keccak_hash; -extern crate trie_standardmap; -extern crate hashdb; - -use ethcore_bytes::Bytes; -use ethereum_types::H256; -use keccak_hash::keccak; -use memorydb::MemoryDB; -use test::{Bencher, black_box}; -use trie::{TrieMut, Trie}; -use trie_standardmap::{Alphabet, ValueMode, StandardMap}; -use keccak_hasher::KeccakHasher; -use ethtrie::{TrieDB, TrieDBMut}; - -fn random_word(alphabet: &[u8], min_count: usize, diff_count: usize, seed: &mut H256) -> Vec { - assert!(min_count + diff_count <= 32); - *seed = keccak(&seed); - let r = min_count + (seed[31] as usize % (diff_count + 1)); - let mut ret: Vec = Vec::with_capacity(r); - for i in 0..r { - ret.push(alphabet[seed[i] as usize % alphabet.len()]); - } - ret -} - -fn random_bytes(min_count: usize, diff_count: usize, seed: &mut H256) -> Vec { - assert!(min_count + diff_count <= 32); - *seed = keccak(&seed); - let r = min_count + (seed[31] as usize % (diff_count + 1)); - seed[0..r].to_vec() -} - -fn random_value(seed: &mut H256) -> Bytes { - *seed = keccak(&seed); - match seed[0] % 2 { - 1 => vec![seed[31];1], - _ => seed.to_vec(), - } -} - -#[bench] -fn trie_insertions_32_mir_1k(b: &mut Bencher) { - let st = StandardMap { - alphabet: Alphabet::All, - min_key: 32, - journal_key: 0, - value_mode: ValueMode::Mirror, - count: 1000, - }; - let d = st.make(); - b.iter(&mut ||{ - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for i in d.iter() { - t.insert(&i.0, &i.1).unwrap(); - } - }); -} -#[bench] -fn trie_iter(b: &mut Bencher) { - let st = StandardMap { - alphabet: Alphabet::All, - min_key: 32, - journal_key: 0, - value_mode: ValueMode::Mirror, - count: 1000, - }; - let d = st.make(); - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for i in d.iter() { - t.insert(&i.0, &i.1).unwrap(); - } - } - - b.iter(&mut ||{ - let t = TrieDB::new(&memdb, &root).unwrap(); - for n in t.iter().unwrap() { - black_box(n).unwrap(); - } - }); -} - -#[bench] -fn trie_insertions_32_ran_1k(b: &mut Bencher) { - let st = StandardMap { - alphabet: Alphabet::All, - min_key: 32, - journal_key: 0, - value_mode: ValueMode::Random, - count: 1000, - }; - let d = st.make(); - let mut r = H256::new(); - b.iter(&mut ||{ - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for i in d.iter() { - t.insert(&i.0, &i.1).unwrap(); - } - r = t.root().clone(); - }); -} - -#[bench] -fn trie_insertions_six_high(b: &mut Bencher) { - let mut d: Vec<(Bytes, Bytes)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_bytes(6, 0, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - - b.iter(||{ - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for i in d.iter() { - t.insert(&i.0, &i.1).unwrap(); - } - }) -} - -#[bench] -fn trie_insertions_six_mid(b: &mut Bencher) { - let alphabet = b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_"; - let mut d: Vec<(Bytes, Bytes)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_word(alphabet, 6, 0, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - b.iter(||{ - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for i in d.iter() { - t.insert(&i.0, &i.1).unwrap(); - } - }) -} - -#[bench] -fn trie_insertions_random_mid(b: &mut Bencher) { - let alphabet = b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_"; - let mut d: Vec<(Bytes, Bytes)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_word(alphabet, 1, 5, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - - b.iter(||{ - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for i in d.iter() { - t.insert(&i.0, &i.1).unwrap(); - } - }) -} - -#[bench] -fn trie_insertions_six_low(b: &mut Bencher) { - let alphabet = b"abcdef"; - let mut d: Vec<(Bytes, Bytes)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_word(alphabet, 6, 0, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - - b.iter(||{ - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for i in d.iter() { - t.insert(&i.0, &i.1).unwrap(); - } - }) -} diff --git a/util/patricia_trie/src/fatdb.rs b/util/patricia_trie/src/fatdb.rs deleted file mode 100644 index 34a49a5d0..000000000 --- a/util/patricia_trie/src/fatdb.rs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use hashdb::{HashDB, Hasher}; -use super::{Result, TrieDB, Trie, TrieDBIterator, TrieItem, TrieIterator, Query}; -use node_codec::NodeCodec; - -/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// Additionaly it stores inserted hash-key mappings for later retrieval. -/// -/// Use it as a `Trie` or `TrieMut` trait object. -pub struct FatDB<'db, H, C> -where - H: Hasher + 'db, - C: NodeCodec -{ - raw: TrieDB<'db, H, C>, -} - -impl<'db, H, C> FatDB<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - /// Create a new trie with the backing database `db` and empty `root` - /// Initialise to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - pub fn new(db: &'db HashDB, root: &'db H::Out) -> Result { - Ok(FatDB { raw: TrieDB::new(db, root)? }) - } - - /// Get the backing database. - pub fn db(&self) -> &HashDB { self.raw.db() } -} - -impl<'db, H, C> Trie for FatDB<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn root(&self) -> &H::Out { self.raw.root() } - - fn contains(&self, key: &[u8]) -> Result { - self.raw.contains(H::hash(key).as_ref()) - } - - fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result, H::Out, C::Error> - where 'a: 'key - { - self.raw.get_with(H::hash(key).as_ref(), query) - } - - fn iter<'a>(&'a self) -> Result> + 'a>, ::Out, C::Error> { - FatDBIterator::::new(&self.raw).map(|iter| Box::new(iter) as Box<_>) - } -} - -/// Itarator over inserted pairs of key values. -pub struct FatDBIterator<'db, H, C> -where - H: Hasher + 'db, - C: NodeCodec + 'db -{ - trie_iterator: TrieDBIterator<'db, H, C>, - trie: &'db TrieDB<'db, H, C>, -} - -impl<'db, H, C> FatDBIterator<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - /// Creates new iterator. - pub fn new(trie: &'db TrieDB) -> Result { - Ok(FatDBIterator { - trie_iterator: TrieDBIterator::new(trie)?, - trie: trie, - }) - } -} - -impl<'db, H, C> TrieIterator for FatDBIterator<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, C::Error> { - let hashed_key = H::hash(key); - self.trie_iterator.seek(hashed_key.as_ref()) - } -} - -impl<'db, H, C> Iterator for FatDBIterator<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - type Item = TrieItem<'db, H::Out, C::Error>; - - fn next(&mut self) -> Option { - self.trie_iterator.next() - .map(|res| { - res.map(|(hash, value)| { - let aux_hash = H::hash(&hash); - (self.trie.db().get(&aux_hash).expect("Missing fatdb hash").into_vec(), value) - }) - }) - } -} - -#[cfg(test)] -mod test { - use memorydb::MemoryDB; - use hashdb::DBValue; - use keccak_hasher::KeccakHasher; - use ethtrie::trie::{Trie, TrieMut}; - use ethtrie::{FatDB, FatDBMut}; - use ethereum_types::H256; - - #[test] - fn fatdb_to_trie() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = FatDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - let t = FatDB::new(&memdb, &root).unwrap(); - assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23])); - assert_eq!( - t.iter().unwrap().map(Result::unwrap).collect::>(), - vec![(vec![0x01u8, 0x23], DBValue::from_slice(&[0x01u8, 0x23] as &[u8]))]); - } -} diff --git a/util/patricia_trie/src/fatdbmut.rs b/util/patricia_trie/src/fatdbmut.rs deleted file mode 100644 index 67f4f14a7..000000000 --- a/util/patricia_trie/src/fatdbmut.rs +++ /dev/null @@ -1,129 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use hashdb::{HashDB, DBValue, Hasher}; -use super::{Result, TrieDBMut, TrieMut}; -use node_codec::NodeCodec; - -/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// Additionaly it stores inserted hash-key mappings for later retrieval. -/// -/// Use it as a `Trie` or `TrieMut` trait object. -pub struct FatDBMut<'db, H, C> -where - H: Hasher + 'db, - C: NodeCodec -{ - raw: TrieDBMut<'db, H, C>, -} - -impl<'db, H, C> FatDBMut<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - /// Create a new trie with the backing database `db` and empty `root` - /// Initialise to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - pub fn new(db: &'db mut HashDB, root: &'db mut H::Out) -> Self { - FatDBMut { raw: TrieDBMut::new(db, root) } - } - - /// Create a new trie with the backing database `db` and `root`. - /// - /// Returns an error if root does not exist. - pub fn from_existing(db: &'db mut HashDB, root: &'db mut H::Out) -> Result { - Ok(FatDBMut { raw: TrieDBMut::from_existing(db, root)? }) - } - - /// Get the backing database. - pub fn db(&self) -> &HashDB { - self.raw.db() - } - - /// Get the backing database. - pub fn db_mut(&mut self) -> &mut HashDB { - self.raw.db_mut() - } -} - -impl<'db, H, C> TrieMut for FatDBMut<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn root(&mut self) -> &H::Out { self.raw.root() } - - fn is_empty(&self) -> bool { self.raw.is_empty() } - - fn contains(&self, key: &[u8]) -> Result { - self.raw.contains(H::hash(key).as_ref()) - } - - fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result, H::Out, C::Error> - where 'a: 'key - { - self.raw.get(H::hash(key).as_ref()) - } - - fn insert(&mut self, key: &[u8], value: &[u8]) -> Result, H::Out, C::Error> { - let hash = H::hash(key); - let out = self.raw.insert(hash.as_ref(), value)?; - let db = self.raw.db_mut(); - - // don't insert if it doesn't exist. - if out.is_none() { - let aux_hash = H::hash(hash.as_ref()); - db.emplace(aux_hash, DBValue::from_slice(key)); - } - Ok(out) - } - - fn remove(&mut self, key: &[u8]) -> Result, H::Out, C::Error> { - let hash = H::hash(key); - let out = self.raw.remove(hash.as_ref())?; - - // don't remove if it already exists. - if out.is_some() { - self.raw.db_mut().remove(&hash); - } - - Ok(out) - } -} - -#[cfg(test)] -mod test { - use hashdb::DBValue; - use memorydb::MemoryDB; - use ethtrie::trie::{Trie, TrieMut}; - use ethtrie::{TrieDB, FatDBMut}; - use keccak_hasher::KeccakHasher; - use keccak; - use ethereum_types::H256; - - #[test] - fn fatdbmut_to_trie() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = FatDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - let t = TrieDB::new(&memdb, &root).unwrap(); - assert_eq!(t.get(&keccak::keccak(&[0x01u8, 0x23])).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23])); - } -} \ No newline at end of file diff --git a/util/patricia_trie/src/lib.rs b/util/patricia_trie/src/lib.rs deleted file mode 100644 index a028be87a..000000000 --- a/util/patricia_trie/src/lib.rs +++ /dev/null @@ -1,319 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Trie interface and implementation. -extern crate elastic_array; -extern crate ethcore_bytes as bytes; -extern crate hashdb; -extern crate heapsize; -extern crate rand; -#[macro_use] -extern crate log; - -#[cfg(test)] -extern crate env_logger; -#[cfg(test)] -extern crate ethereum_types; -#[cfg(test)] -extern crate trie_standardmap as standardmap; -#[cfg(test)] -extern crate patricia_trie_ethereum as ethtrie; -#[cfg(test)] -extern crate memorydb; -#[cfg(test)] -extern crate rlp; -#[cfg(test)] -extern crate keccak_hash as keccak; -#[cfg(test)] -extern crate keccak_hasher; -#[cfg(test)] -extern crate triehash; - -use std::{fmt, error}; -use hashdb::{HashDB, DBValue, Hasher}; -use std::marker::PhantomData; - -pub mod node; -pub mod triedb; -pub mod triedbmut; -pub mod sectriedb; -pub mod sectriedbmut; -pub mod recorder; - -mod fatdb; -mod fatdbmut; -mod lookup; -mod nibblevec; -mod nibbleslice; -mod node_codec; - -pub use self::triedb::{TrieDB, TrieDBIterator}; -pub use self::triedbmut::{TrieDBMut, ChildReference}; -pub use self::sectriedbmut::SecTrieDBMut; -pub use self::sectriedb::SecTrieDB; -pub use self::fatdb::{FatDB, FatDBIterator}; -pub use self::fatdbmut::FatDBMut; -pub use self::recorder::Recorder; -pub use self::lookup::Lookup; -pub use self::nibbleslice::NibbleSlice; -pub use node_codec::NodeCodec; - -/// Trie Errors. -/// -/// These borrow the data within them to avoid excessive copying on every -/// trie operation. -#[derive(Debug, PartialEq, Eq, Clone)] -pub enum TrieError { - /// Attempted to create a trie with a state root not in the DB. - InvalidStateRoot(T), - /// Trie item not found in the database, - IncompleteDatabase(T), - /// Corrupt Trie item - DecoderError(T, E), -} - -impl fmt::Display for TrieError where T: std::fmt::Debug, E: std::fmt::Debug { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - TrieError::InvalidStateRoot(ref root) => write!(f, "Invalid state root: {:?}", root), - TrieError::IncompleteDatabase(ref missing) => write!(f, "Database missing expected key: {:?}", missing), - TrieError::DecoderError(ref hash, ref decoder_err) => write!(f, "Decoding failed for hash {:?}; err: {:?}", hash, decoder_err), - } - } -} - -impl error::Error for TrieError where T: std::fmt::Debug, E: std::error::Error { - fn description(&self) -> &str { - match *self { - TrieError::InvalidStateRoot(_) => "Invalid state root", - TrieError::IncompleteDatabase(_) => "Incomplete database", - TrieError::DecoderError(_, ref err) => err.description(), - } - } -} - -/// Trie result type. Boxed to avoid copying around extra space for the `Hasher`s `Out` on successful queries. -pub type Result = ::std::result::Result>>; - - -/// Trie-Item type used for iterators over trie data. -pub type TrieItem<'a, U, E> = Result<(Vec, DBValue), U, E>; - -/// Description of what kind of query will be made to the trie. -/// -/// This is implemented for any &mut recorder (where the query will return -/// a DBValue), any function taking raw bytes (where no recording will be made), -/// or any tuple of (&mut Recorder, FnOnce(&[u8])) -pub trait Query { - /// Output item. - type Item; - - /// Decode a byte-slice into the desired item. - fn decode(self, data: &[u8]) -> Self::Item; - - /// Record that a node has been passed through. - fn record(&mut self, _hash: &H::Out, _data: &[u8], _depth: u32) {} -} - -impl<'a, H: Hasher> Query for &'a mut Recorder { - type Item = DBValue; - fn decode(self, value: &[u8]) -> DBValue { DBValue::from_slice(value) } - fn record(&mut self, hash: &H::Out, data: &[u8], depth: u32) { - (&mut **self).record(hash, data, depth); - } -} - -impl Query for F where F: for<'a> FnOnce(&'a [u8]) -> T { - type Item = T; - fn decode(self, value: &[u8]) -> T { (self)(value) } -} - -impl<'a, F, T, H: Hasher> Query for (&'a mut Recorder, F) where F: FnOnce(&[u8]) -> T { - type Item = T; - fn decode(self, value: &[u8]) -> T { (self.1)(value) } - fn record(&mut self, hash: &H::Out, data: &[u8], depth: u32) { - self.0.record(hash, data, depth) - } -} - -/// A key-value datastore implemented as a database-backed modified Merkle tree. -pub trait Trie> { - /// Return the root of the trie. - fn root(&self) -> &H::Out; - - /// Is the trie empty? - fn is_empty(&self) -> bool { *self.root() == C::HASHED_NULL_NODE } - - /// Does the trie contain a given key? - fn contains(&self, key: &[u8]) -> Result { - self.get(key).map(|x|x.is_some() ) - } - - /// What is the value of the given key in this trie? - fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result, H::Out, C::Error> where 'a: 'key { - self.get_with(key, DBValue::from_slice) - } - - /// Search for the key with the given query parameter. See the docs of the `Query` - /// trait for more details. - fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result, H::Out, C::Error> where 'a: 'key; - - /// Returns a depth-first iterator over the elements of trie. - fn iter<'a>(&'a self) -> Result> + 'a>, H::Out, C::Error>; -} - -/// A key-value datastore implemented as a database-backed modified Merkle tree. -pub trait TrieMut> { - /// Return the root of the trie. - fn root(&mut self) -> &H::Out; - - /// Is the trie empty? - fn is_empty(&self) -> bool; - - /// Does the trie contain a given key? - fn contains(&self, key: &[u8]) -> Result { - self.get(key).map(|x| x.is_some()) - } - - /// What is the value of the given key in this trie? - fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result, H::Out, C::Error> where 'a: 'key; - - /// Insert a `key`/`value` pair into the trie. An empty value is equivalent to removing - /// `key` from the trie. Returns the old value associated with this key, if it existed. - fn insert(&mut self, key: &[u8], value: &[u8]) -> Result, H::Out, C::Error>; - - /// Remove a `key` from the trie. Equivalent to making it equal to the empty - /// value. Returns the old value associated with this key, if it existed. - fn remove(&mut self, key: &[u8]) -> Result, H::Out, C::Error>; -} - -/// A trie iterator that also supports random access (`seek()`). -pub trait TrieIterator>: Iterator { - /// Position the iterator on the first element with key > `key` - fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, >::Error>; -} - -/// Trie types -#[derive(Debug, PartialEq, Clone)] -pub enum TrieSpec { - /// Generic trie. - Generic, - /// Secure trie. - Secure, - /// Secure trie with fat database. - Fat, -} - -impl Default for TrieSpec { - fn default() -> TrieSpec { - TrieSpec::Secure - } -} - -/// Trie factory. -#[derive(Default, Clone)] -pub struct TrieFactory> { - spec: TrieSpec, - mark_hash: PhantomData, - mark_codec: PhantomData, -} - -/// All different kinds of tries. -/// This is used to prevent a heap allocation for every created trie. -pub enum TrieKinds<'db, H: Hasher + 'db, C: NodeCodec> { - /// A generic trie db. - Generic(TrieDB<'db, H, C>), - /// A secure trie db. - Secure(SecTrieDB<'db, H, C>), - /// A fat trie db. - Fat(FatDB<'db, H, C>), -} - -// wrapper macro for making the match easier to deal with. -macro_rules! wrapper { - ($me: ident, $f_name: ident, $($param: ident),*) => { - match *$me { - TrieKinds::Generic(ref t) => t.$f_name($($param),*), - TrieKinds::Secure(ref t) => t.$f_name($($param),*), - TrieKinds::Fat(ref t) => t.$f_name($($param),*), - } - } -} - -impl<'db, H: Hasher, C: NodeCodec> Trie for TrieKinds<'db, H, C> { - fn root(&self) -> &H::Out { - wrapper!(self, root,) - } - - fn is_empty(&self) -> bool { - wrapper!(self, is_empty,) - } - - fn contains(&self, key: &[u8]) -> Result { - wrapper!(self, contains, key) - } - - fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result, H::Out, C::Error> - where 'a: 'key - { - wrapper!(self, get_with, key, query) - } - - fn iter<'a>(&'a self) -> Result> + 'a>, H::Out, C::Error> { - wrapper!(self, iter,) - } -} - -impl<'db, H, C> TrieFactory -where - H: Hasher, - C: NodeCodec + 'db -{ - /// Creates new factory. - pub fn new(spec: TrieSpec) -> Self { - TrieFactory { spec, mark_hash: PhantomData, mark_codec: PhantomData } - } - - /// Create new immutable instance of Trie. - pub fn readonly(&self, db: &'db HashDB, root: &'db H::Out) -> Result, H::Out, >::Error> { - match self.spec { - TrieSpec::Generic => Ok(TrieKinds::Generic(TrieDB::new(db, root)?)), - TrieSpec::Secure => Ok(TrieKinds::Secure(SecTrieDB::new(db, root)?)), - TrieSpec::Fat => Ok(TrieKinds::Fat(FatDB::new(db, root)?)), - } - } - - /// Create new mutable instance of Trie. - pub fn create(&self, db: &'db mut HashDB, root: &'db mut H::Out) -> Box + 'db> { - match self.spec { - TrieSpec::Generic => Box::new(TrieDBMut::<_, C>::new(db, root)), - TrieSpec::Secure => Box::new(SecTrieDBMut::<_, C>::new(db, root)), - TrieSpec::Fat => Box::new(FatDBMut::<_, C>::new(db, root)), - } - } - - /// Create new mutable instance of trie and check for errors. - pub fn from_existing(&self, db: &'db mut HashDB, root: &'db mut H::Out) -> Result + 'db>, H::Out, >::Error> { - match self.spec { - TrieSpec::Generic => Ok(Box::new(TrieDBMut::<_, C>::from_existing(db, root)?)), - TrieSpec::Secure => Ok(Box::new(SecTrieDBMut::<_, C>::from_existing(db, root)?)), - TrieSpec::Fat => Ok(Box::new(FatDBMut::<_, C>::from_existing(db, root)?)), - } - } - - /// Returns true iff the trie DB is a fat DB (allows enumeration of keys). - pub fn is_fat(&self) -> bool { self.spec == TrieSpec::Fat } -} diff --git a/util/patricia_trie/src/lookup.rs b/util/patricia_trie/src/lookup.rs deleted file mode 100644 index 6e9c7ff75..000000000 --- a/util/patricia_trie/src/lookup.rs +++ /dev/null @@ -1,104 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Trie lookup via HashDB. - -use hashdb::{HashDB, Hasher}; -use nibbleslice::NibbleSlice; -use node::Node; -use node_codec::NodeCodec; -use super::{Result, TrieError, Query}; -use std::marker::PhantomData; - -/// Trie lookup helper object. -pub struct Lookup<'a, H: Hasher + 'a, C: NodeCodec, Q: Query> { - /// database to query from. - pub db: &'a HashDB, - /// Query object to record nodes and transform data. - pub query: Q, - /// Hash to start at - pub hash: H::Out, - pub marker: PhantomData, // TODO: probably not needed when all is said and done? When Query is made generic? -} - -impl<'a, H, C, Q> Lookup<'a, H, C, Q> -where - H: Hasher + 'a, - C: NodeCodec + 'a, - Q: Query, -{ - /// Look up the given key. If the value is found, it will be passed to the given - /// function to decode or copy. - pub fn look_up(mut self, mut key: NibbleSlice) -> Result, H::Out, C::Error> { - let mut hash = self.hash; - - // this loop iterates through non-inline nodes. - for depth in 0.. { - let node_data = match self.db.get(&hash) { - Some(value) => value, - None => return Err(Box::new(match depth { - 0 => TrieError::InvalidStateRoot(hash), - _ => TrieError::IncompleteDatabase(hash), - })), - }; - - self.query.record(&hash, &node_data, depth); - - // this loop iterates through all inline children (usually max 1) - // without incrementing the depth. - let mut node_data = &node_data[..]; - loop { - let decoded = match C::decode(node_data) { - Ok(node) => node, - Err(e) => { - return Err(Box::new(TrieError::DecoderError(hash, e))) - } - }; - match decoded { - Node::Leaf(slice, value) => { - return Ok(match slice == key { - true => Some(self.query.decode(value)), - false => None, - }) - } - Node::Extension(slice, item) => { - if key.starts_with(&slice) { - node_data = item; - key = key.mid(slice.len()); - } else { - return Ok(None) - } - } - Node::Branch(children, value) => match key.is_empty() { - true => return Ok(value.map(move |val| self.query.decode(val))), - false => { - node_data = children[key.at(0) as usize]; - key = key.mid(1); - } - }, - _ => return Ok(None), - } - - // check if new node data is inline or hash. - if let Some(h) = C::try_decode_hash(&node_data) { - hash = h; - break - } - } - } - Ok(None) - } -} diff --git a/util/patricia_trie/src/nibbleslice.rs b/util/patricia_trie/src/nibbleslice.rs deleted file mode 100644 index b87a66369..000000000 --- a/util/patricia_trie/src/nibbleslice.rs +++ /dev/null @@ -1,311 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Nibble-orientated view onto byte-slice, allowing nibble-precision offsets. - -use std::cmp::*; -use std::fmt; -use elastic_array::ElasticArray36; - -/// Nibble-orientated view onto byte-slice, allowing nibble-precision offsets. -/// -/// This is an immutable struct. No operations actually change it. -/// -/// # Example -/// ```snippet -/// use patricia_trie::nibbleslice::NibbleSlice; -/// fn main() { -/// let d1 = &[0x01u8, 0x23, 0x45]; -/// let d2 = &[0x34u8, 0x50, 0x12]; -/// let d3 = &[0x00u8, 0x12]; -/// let n1 = NibbleSlice::new(d1); // 0,1,2,3,4,5 -/// let n2 = NibbleSlice::new(d2); // 3,4,5,0,1,2 -/// let n3 = NibbleSlice::new_offset(d3, 1); // 0,1,2 -/// assert!(n1 > n3); // 0,1,2,... > 0,1,2 -/// assert!(n1 < n2); // 0,... < 3,... -/// assert!(n2.mid(3) == n3); // 0,1,2 == 0,1,2 -/// assert!(n1.starts_with(&n3)); -/// assert_eq!(n1.common_prefix(&n3), 3); -/// assert_eq!(n2.mid(3).common_prefix(&n1), 3); -/// } -/// ``` -#[derive(Copy, Clone, Eq, Ord)] -pub struct NibbleSlice<'a> { - data: &'a [u8], - offset: usize, - data_encode_suffix: &'a [u8], - offset_encode_suffix: usize, -} - -/// Iterator type for a nibble slice. -pub struct NibbleSliceIterator<'a> { - p: &'a NibbleSlice<'a>, - i: usize, -} - -impl<'a> Iterator for NibbleSliceIterator<'a> { - type Item = u8; - fn next(&mut self) -> Option { - self.i += 1; - match self.i <= self.p.len() { - true => Some(self.p.at(self.i - 1)), - false => None, - } - } -} - -impl<'a> NibbleSlice<'a> { - /// Create a new nibble slice with the given byte-slice. - pub fn new(data: &'a [u8]) -> Self { NibbleSlice::new_offset(data, 0) } - - /// Create a new nibble slice with the given byte-slice with a nibble offset. - pub fn new_offset(data: &'a [u8], offset: usize) -> Self { - NibbleSlice { - data, - offset, - data_encode_suffix: &b""[..], - offset_encode_suffix: 0 - } - } - - /// Create a composed nibble slice; one followed by the other. - pub fn new_composed(a: &NibbleSlice<'a>, b: &NibbleSlice<'a>) -> Self { - NibbleSlice { - data: a.data, - offset: a.offset, - data_encode_suffix: b.data, - offset_encode_suffix: b.offset - } - } - - /// Get an iterator for the series of nibbles. - pub fn iter(&'a self) -> NibbleSliceIterator<'a> { - NibbleSliceIterator { p: self, i: 0 } - } - - /// Create a new nibble slice from the given HPE encoded data (e.g. output of `encoded()`). - pub fn from_encoded(data: &'a [u8]) -> (NibbleSlice, bool) { - (Self::new_offset(data, if data[0] & 16 == 16 {1} else {2}), data[0] & 32 == 32) - } - - /// Is this an empty slice? - pub fn is_empty(&self) -> bool { self.len() == 0 } - - /// Get the length (in nibbles, naturally) of this slice. - #[inline] - pub fn len(&self) -> usize { (self.data.len() + self.data_encode_suffix.len()) * 2 - self.offset - self.offset_encode_suffix } - - /// Get the nibble at position `i`. - #[inline(always)] - pub fn at(&self, i: usize) -> u8 { - let l = self.data.len() * 2 - self.offset; - if i < l { - if (self.offset + i) & 1 == 1 { - self.data[(self.offset + i) / 2] & 15u8 - } - else { - self.data[(self.offset + i) / 2] >> 4 - } - } - else { - let i = i - l; - if (self.offset_encode_suffix + i) & 1 == 1 { - self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] & 15u8 - } - else { - self.data_encode_suffix[(self.offset_encode_suffix + i) / 2] >> 4 - } - } - } - - /// Return object which represents a view on to this slice (further) offset by `i` nibbles. - pub fn mid(&self, i: usize) -> NibbleSlice<'a> { - NibbleSlice { - data: self.data, - offset: self.offset + i, - data_encode_suffix: &b""[..], - offset_encode_suffix: 0 - } - } - - /// Do we start with the same nibbles as the whole of `them`? - pub fn starts_with(&self, them: &Self) -> bool { self.common_prefix(them) == them.len() } - - /// How many of the same nibbles at the beginning do we match with `them`? - pub fn common_prefix(&self, them: &Self) -> usize { - let s = min(self.len(), them.len()); - let mut i = 0usize; - while i < s { - if self.at(i) != them.at(i) { break; } - i += 1; - } - i - } - - /// Encode while nibble slice in prefixed hex notation, noting whether it `is_leaf`. - #[inline] - pub fn encoded(&self, is_leaf: bool) -> ElasticArray36 { - let l = self.len(); - let mut r = ElasticArray36::new(); - let mut i = l % 2; - r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0}); - while i < l { - r.push(self.at(i) * 16 + self.at(i + 1)); - i += 2; - } - r - } - - /// Encode only the leftmost `n` bytes of the nibble slice in prefixed hex notation, - /// noting whether it `is_leaf`. - pub fn encoded_leftmost(&self, n: usize, is_leaf: bool) -> ElasticArray36 { - let l = min(self.len(), n); - let mut r = ElasticArray36::new(); - let mut i = l % 2; - r.push(if i == 1 {0x10 + self.at(0)} else {0} + if is_leaf {0x20} else {0}); - while i < l { - r.push(self.at(i) * 16 + self.at(i + 1)); - i += 2; - } - r - } -} - -impl<'a> PartialEq for NibbleSlice<'a> { - fn eq(&self, them: &Self) -> bool { - self.len() == them.len() && self.starts_with(them) - } -} - -impl<'a> PartialOrd for NibbleSlice<'a> { - fn partial_cmp(&self, them: &Self) -> Option { - let s = min(self.len(), them.len()); - let mut i = 0usize; - while i < s { - match self.at(i).partial_cmp(&them.at(i)).unwrap() { - Ordering::Less => return Some(Ordering::Less), - Ordering::Greater => return Some(Ordering::Greater), - _ => i += 1, - } - } - self.len().partial_cmp(&them.len()) - } -} - -impl<'a> fmt::Debug for NibbleSlice<'a> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for i in 0..self.len() { - match i { - 0 => write!(f, "{:01x}", self.at(i))?, - _ => write!(f, "'{:01x}", self.at(i))?, - } - } - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::NibbleSlice; - use elastic_array::ElasticArray36; - static D: &'static [u8;3] = &[0x01u8, 0x23, 0x45]; - - #[test] - fn basics() { - let n = NibbleSlice::new(D); - assert_eq!(n.len(), 6); - assert!(!n.is_empty()); - - let n = NibbleSlice::new_offset(D, 6); - assert!(n.is_empty()); - - let n = NibbleSlice::new_offset(D, 3); - assert_eq!(n.len(), 3); - for i in 0..3 { - assert_eq!(n.at(i), i as u8 + 3); - } - } - - #[test] - fn iterator() { - let n = NibbleSlice::new(D); - let mut nibbles: Vec = vec![]; - nibbles.extend(n.iter()); - assert_eq!(nibbles, (0u8..6).collect::>()) - } - - #[test] - fn mid() { - let n = NibbleSlice::new(D); - let m = n.mid(2); - for i in 0..4 { - assert_eq!(m.at(i), i as u8 + 2); - } - let m = n.mid(3); - for i in 0..3 { - assert_eq!(m.at(i), i as u8 + 3); - } - } - - #[test] - fn encoded() { - let n = NibbleSlice::new(D); - assert_eq!(n.encoded(false), ElasticArray36::from_slice(&[0x00, 0x01, 0x23, 0x45])); - assert_eq!(n.encoded(true), ElasticArray36::from_slice(&[0x20, 0x01, 0x23, 0x45])); - assert_eq!(n.mid(1).encoded(false), ElasticArray36::from_slice(&[0x11, 0x23, 0x45])); - assert_eq!(n.mid(1).encoded(true), ElasticArray36::from_slice(&[0x31, 0x23, 0x45])); - } - - #[test] - fn from_encoded() { - let n = NibbleSlice::new(D); - assert_eq!((n, false), NibbleSlice::from_encoded(&[0x00, 0x01, 0x23, 0x45])); - assert_eq!((n, true), NibbleSlice::from_encoded(&[0x20, 0x01, 0x23, 0x45])); - assert_eq!((n.mid(1), false), NibbleSlice::from_encoded(&[0x11, 0x23, 0x45])); - assert_eq!((n.mid(1), true), NibbleSlice::from_encoded(&[0x31, 0x23, 0x45])); - } - - #[test] - fn shared() { - let n = NibbleSlice::new(D); - - let other = &[0x01u8, 0x23, 0x01, 0x23, 0x45, 0x67]; - let m = NibbleSlice::new(other); - - assert_eq!(n.common_prefix(&m), 4); - assert_eq!(m.common_prefix(&n), 4); - assert_eq!(n.mid(1).common_prefix(&m.mid(1)), 3); - assert_eq!(n.mid(1).common_prefix(&m.mid(2)), 0); - assert_eq!(n.common_prefix(&m.mid(4)), 6); - assert!(!n.starts_with(&m.mid(4))); - assert!(m.mid(4).starts_with(&n)); - } - - #[test] - fn compare() { - let other = &[0x01u8, 0x23, 0x01, 0x23, 0x45]; - let n = NibbleSlice::new(D); - let m = NibbleSlice::new(other); - - assert!(n != m); - assert!(n > m); - assert!(m < n); - - assert!(n == m.mid(4)); - assert!(n >= m.mid(4)); - assert!(n <= m.mid(4)); - } -} diff --git a/util/patricia_trie/src/nibblevec.rs b/util/patricia_trie/src/nibblevec.rs deleted file mode 100644 index 3fe8c9fb7..000000000 --- a/util/patricia_trie/src/nibblevec.rs +++ /dev/null @@ -1,146 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! An owning, nibble-oriented byte vector. -use elastic_array::ElasticArray36; -use nibbleslice::NibbleSlice; - -/// Owning, nibble-oriented byte vector. Counterpart to `NibbleSlice`. -#[derive(Clone, PartialEq, Eq, Debug)] -pub struct NibbleVec { - inner: ElasticArray36, - len: usize, -} - -impl Default for NibbleVec { - fn default() -> Self { - NibbleVec::new() - } -} - -impl NibbleVec { - /// Make a new `NibbleVec` - pub fn new() -> Self { - NibbleVec { - inner: ElasticArray36::new(), - len: 0 - } - } - - /// Length of the `NibbleVec` - #[inline(always)] - pub fn len(&self) -> usize { self.len } - - /// Retrurns true if `NibbleVec` has zero length - pub fn is_empty(&self) -> bool { self.len == 0 } - - /// Try to get the nibble at the given offset. - #[inline] - pub fn at(&self, idx: usize) -> u8 { - if idx % 2 == 0 { - self.inner[idx / 2] >> 4 - } else { - self.inner[idx / 2] & 0x0F - } - } - - /// Push a nibble onto the `NibbleVec`. Ignores the high 4 bits. - pub fn push(&mut self, nibble: u8) { - let nibble = nibble & 0x0F; - - if self.len % 2 == 0 { - self.inner.push(nibble << 4); - } else { - *self.inner.last_mut().expect("len != 0 since len % 2 != 0; inner has a last element; qed") |= nibble; - } - - self.len += 1; - } - - /// Try to pop a nibble off the `NibbleVec`. Fails if len == 0. - pub fn pop(&mut self) -> Option { - if self.is_empty() { - return None; - } - - let byte = self.inner.pop().expect("len != 0; inner has last elem; qed"); - let nibble = if self.len % 2 == 0 { - self.inner.push(byte & 0xF0); - byte & 0x0F - } else { - byte >> 4 - }; - - self.len -= 1; - Some(nibble) - } - - /// Try to treat this `NibbleVec` as a `NibbleSlice`. Works only if len is even. - pub fn as_nibbleslice(&self) -> Option { - if self.len % 2 == 0 { - Some(NibbleSlice::new(self.inner())) - } else { - None - } - } - - /// Get the underlying byte slice. - pub fn inner(&self) -> &[u8] { - &self.inner[..] - } -} - -impl<'a> From> for NibbleVec { - fn from(s: NibbleSlice<'a>) -> Self { - let mut v = NibbleVec::new(); - for i in 0..s.len() { - v.push(s.at(i)); - } - v - } -} - -#[cfg(test)] -mod tests { - use super::NibbleVec; - - #[test] - fn push_pop() { - let mut v = NibbleVec::new(); - - for i in 0..16 { - v.push(i); - assert_eq!(v.len() - 1, i as usize); - assert_eq!(v.at(i as usize), i); - } - - for i in (0..16).rev() { - assert_eq!(v.pop(), Some(i)); - assert_eq!(v.len(), i as usize); - } - } - - #[test] - fn nibbleslice_conv() { - let mut v = NibbleVec::new(); - for i in 0..10 { - v.push(i); - } - - let v2: NibbleVec = v.as_nibbleslice().unwrap().into(); - assert_eq!(v, v2); - } -} diff --git a/util/patricia_trie/src/node.rs b/util/patricia_trie/src/node.rs deleted file mode 100644 index 72c344fc3..000000000 --- a/util/patricia_trie/src/node.rs +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use elastic_array::ElasticArray36; -use nibbleslice::NibbleSlice; -use nibblevec::NibbleVec; -use hashdb::DBValue; - -/// Partial node key type. -pub type NodeKey = ElasticArray36; - -/// Type of node in the trie and essential information thereof. -#[derive(Eq, PartialEq, Debug, Clone)] -pub enum Node<'a> { - /// Null trie node; could be an empty root or an empty branch entry. - Empty, - /// Leaf node; has key slice and value. Value may not be empty. - Leaf(NibbleSlice<'a>, &'a [u8]), - /// Extension node; has key slice and node data. Data may not be null. - Extension(NibbleSlice<'a>, &'a [u8]), - /// Branch node; has array of 16 child nodes (each possibly null) and an optional immediate node data. - Branch([&'a [u8]; 16], Option<&'a [u8]>), -} - -/// An owning node type. Useful for trie iterators. -#[derive(Debug, PartialEq, Eq)] -pub enum OwnedNode { - /// Empty trie node. - Empty, - /// Leaf node: partial key and value. - Leaf(NibbleVec, DBValue), - /// Extension node: partial key and child node. - Extension(NibbleVec, DBValue), - /// Branch node: 16 children and an optional value. - Branch([NodeKey; 16], Option), -} - -impl<'a> From> for OwnedNode { - fn from(node: Node<'a>) -> Self { - match node { - Node::Empty => OwnedNode::Empty, - Node::Leaf(k, v) => OwnedNode::Leaf(k.into(), DBValue::from_slice(v)), - Node::Extension(k, child) => OwnedNode::Extension(k.into(), DBValue::from_slice(child)), - Node::Branch(c, val) => { - let children = [ - NodeKey::from_slice(c[0]), NodeKey::from_slice(c[1]), NodeKey::from_slice(c[2]), NodeKey::from_slice(c[3]), - NodeKey::from_slice(c[4]), NodeKey::from_slice(c[5]), NodeKey::from_slice(c[6]), NodeKey::from_slice(c[7]), - NodeKey::from_slice(c[8]), NodeKey::from_slice(c[9]), NodeKey::from_slice(c[10]), NodeKey::from_slice(c[11]), - NodeKey::from_slice(c[12]), NodeKey::from_slice(c[13]), NodeKey::from_slice(c[14]), NodeKey::from_slice(c[15]), - ]; - - OwnedNode::Branch(children, val.map(DBValue::from_slice)) - } - } - } -} diff --git a/util/patricia_trie/src/node_codec.rs b/util/patricia_trie/src/node_codec.rs deleted file mode 100644 index 1dec20f90..000000000 --- a/util/patricia_trie/src/node_codec.rs +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Generic trait for trie node encoding/decoding. Takes a `hashdb::Hasher` -//! to parametrize the hashes used in the codec. - -use hashdb::Hasher; -use node::Node; -use ChildReference; - -use elastic_array::{ElasticArray1024, ElasticArray128}; - -/// Trait for trie node encoding/decoding -pub trait NodeCodec: Sized { - /// Encoding error type - type Error: ::std::error::Error; - - /// Null node type - const HASHED_NULL_NODE: H::Out; - - /// Decode bytes to a `Node`. Returns `Self::E` on failure. - fn decode(data: &[u8]) -> Result; - - /// Decode bytes to the `Hasher`s output type. Returns `None` on failure. - fn try_decode_hash(data: &[u8]) -> Option; - - /// Check if the provided bytes correspond to the codecs "empty" node. - fn is_empty_node(data: &[u8]) -> bool; - - /// Returns an empty node - fn empty_node() -> ElasticArray1024; - - /// Returns an encoded leaft node - fn leaf_node(partial: &[u8], value: &[u8]) -> ElasticArray1024; - - /// Returns an encoded extension node - fn ext_node(partial: &[u8], child_ref: ChildReference) -> ElasticArray1024; - - /// Returns an encoded branch node. Takes an iterator yielding `ChildReference` and an optional value - fn branch_node(children: I, value: Option>) -> ElasticArray1024 - where I: IntoIterator>>; -} diff --git a/util/patricia_trie/src/recorder.rs b/util/patricia_trie/src/recorder.rs deleted file mode 100644 index 9ad7d8c33..000000000 --- a/util/patricia_trie/src/recorder.rs +++ /dev/null @@ -1,207 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Trie query recorder. - -use bytes::Bytes; - -/// A record of a visited node. -#[derive(PartialEq, Eq, Debug, Clone)] -pub struct Record { - /// The depth of this node. - pub depth: u32, - - /// The raw data of the node. - pub data: Bytes, - - /// The hash of the data. - pub hash: HO, -} - -/// Records trie nodes as they pass it. -#[derive(Debug)] -pub struct Recorder { - nodes: Vec>, - min_depth: u32, -} - -impl Default for Recorder { - fn default() -> Self { - Recorder::new() - } -} - -impl Recorder { - /// Create a new `Recorder` which records all given nodes. - #[inline] - pub fn new() -> Self { - Recorder::with_depth(0) - } - - /// Create a `Recorder` which only records nodes beyond a given depth. - pub fn with_depth(depth: u32) -> Self { - Recorder { - nodes: Vec::new(), - min_depth: depth, - } - } - - /// Record a visited node, given its hash, data, and depth. - pub fn record(&mut self, hash: &HO, data: &[u8], depth: u32) { - if depth >= self.min_depth { - self.nodes.push(Record { - depth: depth, - data: data.into(), - hash: *hash, - }) - } - } - - /// Drain all visited records. - pub fn drain(&mut self) -> Vec> { - ::std::mem::replace(&mut self.nodes, Vec::new()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use keccak::keccak; - use keccak_hasher::KeccakHasher; - use ethereum_types::H256; - - #[test] - fn basic_recorder() { - let mut basic = Recorder::::new(); - - let node1 = vec![1, 2, 3, 4]; - let node2 = vec![4, 5, 6, 7, 8, 9, 10]; - - let (hash1, hash2) = (keccak(&node1), keccak(&node2)); - basic.record(&hash1, &node1, 0); - basic.record(&hash2, &node2, 456); - - let record1 = Record { - data: node1, - hash: hash1, - depth: 0, - }; - - let record2 = Record { - data: node2, - hash: hash2, - depth: 456, - }; - - - assert_eq!(basic.drain(), vec![record1, record2]); - } - - #[test] - fn basic_recorder_min_depth() { - let mut basic = Recorder::::with_depth(400); - - let node1 = vec![1, 2, 3, 4]; - let node2 = vec![4, 5, 6, 7, 8, 9, 10]; - - let hash1 = keccak(&node1); - let hash2 = keccak(&node2); - basic.record(&hash1, &node1, 0); - basic.record(&hash2, &node2, 456); - - let records = basic.drain(); - - assert_eq!(records.len(), 1); - - assert_eq!(records[0].clone(), Record { - data: node2, - hash: hash2, - depth: 456, - }); - } - - #[test] - fn trie_record() { - use ethtrie::trie::{Trie, TrieMut, Recorder}; - use memorydb::MemoryDB; - use ethtrie::{TrieDB, TrieDBMut}; - - let mut db = MemoryDB::::new(); - - let mut root = H256::default(); - - { - let mut x = TrieDBMut::new(&mut db, &mut root); - - x.insert(b"dog", b"cat").unwrap(); - x.insert(b"lunch", b"time").unwrap(); - x.insert(b"notdog", b"notcat").unwrap(); - x.insert(b"hotdog", b"hotcat").unwrap(); - x.insert(b"letter", b"confusion").unwrap(); - x.insert(b"insert", b"remove").unwrap(); - x.insert(b"pirate", b"aargh!").unwrap(); - x.insert(b"yo ho ho", b"and a bottle of rum").unwrap(); - } - - let trie = TrieDB::new(&db, &root).unwrap(); - let mut recorder = Recorder::::new(); - - trie.get_with(b"pirate", &mut recorder).unwrap().unwrap(); - - let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect(); - assert_eq!(nodes, vec![ - vec![ - 248, 81, 128, 128, 128, 128, 128, 128, 160, 50, 19, 71, 57, 213, 63, 125, 149, - 92, 119, 88, 96, 80, 126, 59, 11, 160, 142, 98, 229, 237, 200, 231, 224, 79, 118, - 215, 93, 144, 246, 179, 176, 160, 118, 211, 171, 199, 172, 136, 136, 240, 221, 59, - 110, 82, 86, 54, 23, 95, 48, 108, 71, 125, 59, 51, 253, 210, 18, 116, 79, 0, 236, - 102, 142, 48, 128, 128, 128, 128, 128, 128, 128, 128, 128 - ], - vec![ - 248, 60, 206, 134, 32, 105, 114, 97, 116, 101, 134, 97, 97, 114, 103, 104, 33, - 128, 128, 128, 128, 128, 128, 128, 128, 221, 136, 32, 111, 32, 104, 111, 32, 104, - 111, 147, 97, 110, 100, 32, 97, 32, 98, 111, 116, 116, 108, 101, 32, 111, 102, - 32, 114, 117, 109, 128, 128, 128, 128, 128, 128, 128 - ] - ]); - - trie.get_with(b"letter", &mut recorder).unwrap().unwrap(); - - let nodes: Vec<_> = recorder.drain().into_iter().map(|r| r.data).collect(); - assert_eq!(nodes, vec![ - vec![ - 248, 81, 128, 128, 128, 128, 128, 128, 160, 50, 19, 71, 57, 213, 63, 125, 149, - 92, 119, 88, 96, 80, 126, 59, 11, 160, 142, 98, 229, 237, 200, 231, 224, 79, 118, - 215, 93, 144, 246, 179, 176, 160, 118, 211, 171, 199, 172, 136, 136, 240, 221, - 59, 110, 82, 86, 54, 23, 95, 48, 108, 71, 125, 59, 51, 253, 210, 18, 116, 79, - 0, 236, 102, 142, 48, 128, 128, 128, 128, 128, 128, 128, 128, 128 - ], - vec![ - 248, 99, 128, 128, 128, 128, 200, 131, 32, 111, 103, 131, 99, 97, 116, 128, 128, - 128, 206, 134, 32, 111, 116, 100, 111, 103, 134, 104, 111, 116, 99, 97, 116, 206, - 134, 32, 110, 115, 101, 114, 116, 134, 114, 101, 109, 111, 118, 101, 128, 128, - 160, 202, 250, 252, 153, 229, 63, 255, 13, 100, 197, 80, 120, 190, 186, 92, 5, - 255, 135, 245, 205, 180, 213, 161, 8, 47, 107, 13, 105, 218, 1, 9, 5, 128, - 206, 134, 32, 111, 116, 100, 111, 103, 134, 110, 111, 116, 99, 97, 116, 128, 128 - ], - vec![ - 235, 128, 128, 128, 128, 128, 128, 208, 133, 53, 116, 116, 101, 114, 137, 99, - 111, 110, 102, 117, 115, 105, 111, 110, 202, 132, 53, 110, 99, 104, 132, 116, - 105, 109, 101, 128, 128, 128, 128, 128, 128, 128, 128, 128 - ] - ]); - } -} diff --git a/util/patricia_trie/src/sectriedb.rs b/util/patricia_trie/src/sectriedb.rs deleted file mode 100644 index a340de947..000000000 --- a/util/patricia_trie/src/sectriedb.rs +++ /dev/null @@ -1,100 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use hashdb::{HashDB, Hasher}; -use super::triedb::TrieDB; -use super::{Result, Trie, TrieItem, TrieIterator, Query}; -use node_codec::NodeCodec; - -/// A `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// -/// Use it as a `Trie` trait object. You can use `raw()` to get the backing `TrieDB` object. -pub struct SecTrieDB<'db, H, C> -where - H: Hasher + 'db, - C: NodeCodec -{ - raw: TrieDB<'db, H, C> -} - -impl<'db, H, C> SecTrieDB<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - /// Create a new trie with the backing database `db` and empty `root` - /// - /// Initialise to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - /// Returns an error if root does not exist. - pub fn new(db: &'db HashDB, root: &'db H::Out) -> Result { - Ok(SecTrieDB { raw: TrieDB::new(db, root)? }) - } - - /// Get a reference to the underlying raw `TrieDB` struct. - pub fn raw(&self) -> &TrieDB { - &self.raw - } - - /// Get a mutable reference to the underlying raw `TrieDB` struct. - pub fn raw_mut(&mut self) -> &mut TrieDB<'db, H, C> { - &mut self.raw - } -} - -impl<'db, H, C> Trie for SecTrieDB<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn root(&self) -> &H::Out { self.raw.root() } - - fn contains(&self, key: &[u8]) -> Result { - self.raw.contains(H::hash(key).as_ref()) - } - - fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result, H::Out, C::Error> - where 'a: 'key - { - self.raw.get_with(H::hash(key).as_ref(), query) - } - - fn iter<'a>(&'a self) -> Result> + 'a>, H::Out, C::Error> { - TrieDB::iter(&self.raw) - } -} - -#[cfg(test)] -mod test { - use memorydb::MemoryDB; - use hashdb::DBValue; - use keccak; - use keccak_hasher::KeccakHasher; - use ethtrie::{TrieDBMut, SecTrieDB, trie::{Trie, TrieMut}}; - use ethereum_types::H256; - - #[test] - fn trie_to_sectrie() { - let mut db = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = TrieDBMut::new(&mut db, &mut root); - t.insert(&keccak::keccak(&[0x01u8, 0x23]), &[0x01u8, 0x23]).unwrap(); - } - let t = SecTrieDB::new(&db, &root).unwrap(); - assert_eq!(t.get(&[0x01u8, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23])); - } -} \ No newline at end of file diff --git a/util/patricia_trie/src/sectriedbmut.rs b/util/patricia_trie/src/sectriedbmut.rs deleted file mode 100644 index 8750c2dd5..000000000 --- a/util/patricia_trie/src/sectriedbmut.rs +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use hashdb::{HashDB, DBValue, Hasher}; -use super::{Result, TrieMut, TrieDBMut}; -use node_codec::NodeCodec; - -/// A mutable `Trie` implementation which hashes keys and uses a generic `HashDB` backing database. -/// -/// Use it as a `Trie` or `TrieMut` trait object. You can use `raw()` to get the backing `TrieDBMut` object. -pub struct SecTrieDBMut<'db, H, C> -where - H: Hasher + 'db, - C: NodeCodec -{ - raw: TrieDBMut<'db, H, C> -} - -impl<'db, H, C> SecTrieDBMut<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - /// Create a new trie with the backing database `db` and empty `root` - /// Initialise to the state entailed by the genesis block. - /// This guarantees the trie is built correctly. - pub fn new(db: &'db mut HashDB, root: &'db mut H::Out) -> Self { - SecTrieDBMut { raw: TrieDBMut::new(db, root) } - } - - /// Create a new trie with the backing database `db` and `root`. - /// - /// Returns an error if root does not exist. - pub fn from_existing(db: &'db mut HashDB, root: &'db mut H::Out) -> Result { - Ok(SecTrieDBMut { raw: TrieDBMut::from_existing(db, root)? }) - } - - /// Get the backing database. - pub fn db(&self) -> &HashDB { self.raw.db() } - - /// Get the backing database. - pub fn db_mut(&mut self) -> &mut HashDB { self.raw.db_mut() } -} - -impl<'db, H, C> TrieMut for SecTrieDBMut<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn root(&mut self) -> &H::Out { - self.raw.root() - } - - fn is_empty(&self) -> bool { - self.raw.is_empty() - } - - fn contains(&self, key: &[u8]) -> Result { - self.raw.contains(&H::hash(key).as_ref()) - } - - fn get<'a, 'key>(&'a self, key: &'key [u8]) -> Result, H::Out, C::Error> - where 'a: 'key - { - self.raw.get(&H::hash(key).as_ref()) - } - - fn insert(&mut self, key: &[u8], value: &[u8]) -> Result, H::Out, C::Error> { - self.raw.insert(&H::hash(key).as_ref(), value) - } - - fn remove(&mut self, key: &[u8]) -> Result, H::Out, C::Error> { - self.raw.remove(&H::hash(key).as_ref()) - } -} - -#[cfg(test)] -mod test { - use memorydb::MemoryDB; - use hashdb::DBValue; - use keccak; - use keccak_hasher::KeccakHasher; - use ethtrie::{TrieDB, SecTrieDBMut, trie::{Trie, TrieMut}}; - use ethereum_types::H256; - - #[test] - fn sectrie_to_trie() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = SecTrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - let t = TrieDB::new(&memdb, &root).unwrap(); - assert_eq!(t.get(&keccak::keccak(&[0x01u8, 0x23])).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23])); - } -} diff --git a/util/patricia_trie/src/triedb.rs b/util/patricia_trie/src/triedb.rs deleted file mode 100644 index bf88ab2ec..000000000 --- a/util/patricia_trie/src/triedb.rs +++ /dev/null @@ -1,628 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -use std::fmt; -use hashdb::*; -use nibbleslice::NibbleSlice; -use super::node::{Node, OwnedNode}; -use node_codec::NodeCodec; -use super::lookup::Lookup; -use super::{Result, Trie, TrieItem, TrieError, TrieIterator, Query}; -use bytes::Bytes; -use std::marker::PhantomData; - -/// A `Trie` implementation using a generic `HashDB` backing database, a `Hasher` -/// implementation to generate keys and a `NodeCodec` implementation to encode/decode -/// the nodes. -/// -/// Use it as a `Trie` trait object. You can use `db()` to get the backing database object. -/// Use `get` and `contains` to query values associated with keys in the trie. -/// -/// # Example -/// ``` -/// extern crate patricia_trie as trie; -/// extern crate patricia_trie_ethereum as ethtrie; -/// extern crate hashdb; -/// extern crate keccak_hasher; -/// extern crate memorydb; -/// extern crate ethereum_types; -/// -/// use trie::*; -/// use hashdb::*; -/// use keccak_hasher::KeccakHasher; -/// use memorydb::*; -/// use ethereum_types::H256; -/// use ethtrie::{TrieDB, TrieDBMut}; -/// -/// -/// fn main() { -/// let mut memdb = MemoryDB::::new(); -/// let mut root = H256::new(); -/// TrieDBMut::new(&mut memdb, &mut root).insert(b"foo", b"bar").unwrap(); -/// let t = TrieDB::new(&memdb, &root).unwrap(); -/// assert!(t.contains(b"foo").unwrap()); -/// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar")); -/// } -/// ``` -pub struct TrieDB<'db, H, C> -where - H: Hasher + 'db, - C: NodeCodec -{ - db: &'db HashDB, - root: &'db H::Out, - /// The number of hashes performed so far in operations on this trie. - hash_count: usize, - codec_marker: PhantomData, -} - -impl<'db, H, C> TrieDB<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - /// Create a new trie with the backing database `db` and `root` - /// Returns an error if `root` does not exist - pub fn new(db: &'db HashDB, root: &'db H::Out) -> Result { - if !db.contains(root) { - Err(Box::new(TrieError::InvalidStateRoot(*root))) - } else { - Ok(TrieDB {db, root, hash_count: 0, codec_marker: PhantomData}) - } - } - - /// Get the backing database. - pub fn db(&'db self) -> &'db HashDB { self.db } - - /// Get the data of the root node. - fn root_data(&self) -> Result { - self.db - .get(self.root) - .ok_or_else(|| Box::new(TrieError::InvalidStateRoot(*self.root))) - } - - /// Given some node-describing data `node`, return the actual node RLP. - /// This could be a simple identity operation in the case that the node is sufficiently small, but - /// may require a database lookup. - fn get_raw_or_lookup(&'db self, node: &'db [u8]) -> Result { - match C::try_decode_hash(node) { - Some(key) => { - self.db.get(&key).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(key))) - } - None => Ok(DBValue::from_slice(node)) - } - } -} - -impl<'db, H, C> Trie for TrieDB<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn root(&self) -> &H::Out { self.root } - - fn get_with<'a, 'key, Q: Query>(&'a self, key: &'key [u8], query: Q) -> Result, H::Out, C::Error> - where 'a: 'key - { - Lookup { - db: self.db, - query: query, - hash: self.root.clone(), - marker: PhantomData::, - }.look_up(NibbleSlice::new(key)) - } - - fn iter<'a>(&'a self) -> Result> + 'a>, H::Out, C::Error> { - TrieDBIterator::new(self).map(|iter| Box::new(iter) as Box<_>) - } -} - -// This is for pretty debug output only -struct TrieAwareDebugNode<'db, 'a, H, C> -where - H: Hasher + 'db, - C: NodeCodec + 'db -{ - trie: &'db TrieDB<'db, H, C>, - key: &'a[u8] -} - -impl<'db, 'a, H, C> fmt::Debug for TrieAwareDebugNode<'db, 'a, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - if let Ok(node) = self.trie.get_raw_or_lookup(self.key) { - match C::decode(&node) { - Ok(Node::Leaf(slice, value)) => f.debug_struct("Node::Leaf") - .field("slice", &slice) - .field("value", &value) - .finish(), - Ok(Node::Extension(ref slice, ref item)) => f.debug_struct("Node::Extension") - .field("slice", &slice) - .field("item", &TrieAwareDebugNode{trie: self.trie, key: item}) - .finish(), - Ok(Node::Branch(ref nodes, ref value)) => { - let nodes: Vec> = nodes.into_iter().map(|n| TrieAwareDebugNode{trie: self.trie, key: n} ).collect(); - f.debug_struct("Node::Branch") - .field("nodes", &nodes) - .field("value", &value) - .finish() - }, - Ok(Node::Empty) => f.debug_struct("Node::Empty").finish(), - - Err(e) => f.debug_struct("BROKEN_NODE") - .field("key", &self.key) - .field("error", &format!("ERROR decoding node branch Rlp: {}", e)) - .finish() - } - } else { - f.debug_struct("BROKEN_NODE") - .field("key", &self.key) - .field("error", &"Not found") - .finish() - } - } -} - -impl<'db, H, C> fmt::Debug for TrieDB<'db, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let root_rlp = self.db.get(self.root).expect("Trie root not found!"); - f.debug_struct("TrieDB") - .field("hash_count", &self.hash_count) - .field("root", &TrieAwareDebugNode { - trie: self, - key: &root_rlp - }) - .finish() - } -} - -#[derive(Clone, Eq, PartialEq)] -enum Status { - Entering, - At, - AtChild(usize), - Exiting, -} - -#[derive(Eq, PartialEq)] -struct Crumb { - node: OwnedNode, - status: Status, -} - -impl Crumb { - /// Move on to next status in the node's sequence. - fn increment(&mut self) { - self.status = match (&self.status, &self.node) { - (_, &OwnedNode::Empty) => Status::Exiting, - (&Status::Entering, _) => Status::At, - (&Status::At, &OwnedNode::Branch(_, _)) => Status::AtChild(0), - (&Status::AtChild(x), &OwnedNode::Branch(_, _)) if x < 15 => Status::AtChild(x + 1), - _ => Status::Exiting, - } - } -} - -/// Iterator for going through all values in the trie. -pub struct TrieDBIterator<'a, H: Hasher + 'a, C: NodeCodec + 'a> { - db: &'a TrieDB<'a, H, C>, - trail: Vec, - key_nibbles: Bytes, -} - -impl<'a, H: Hasher, C: NodeCodec> TrieDBIterator<'a, H, C> { - /// Create a new iterator. - pub fn new(db: &'a TrieDB) -> Result, H::Out, C::Error> { - let mut r = TrieDBIterator { db, trail: Vec::with_capacity(8), key_nibbles: Vec::with_capacity(64) }; - db.root_data().and_then(|root| r.descend(&root))?; - Ok(r) - } - - fn seek<'key>(&mut self, mut node_data: DBValue, mut key: NibbleSlice<'key>) -> Result<(), H::Out, C::Error> { - loop { - let (data, mid) = { - let node = C::decode(&node_data).expect("encoded data read from db; qed"); - match node { - Node::Leaf(slice, _) => { - if slice == key { - self.trail.push(Crumb { - status: Status::At, - node: node.clone().into(), - }); - } else { - self.trail.push(Crumb { - status: Status::Exiting, - node: node.clone().into(), - }); - } - - self.key_nibbles.extend(slice.iter()); - return Ok(()) - }, - Node::Extension(ref slice, ref item) => { - if key.starts_with(slice) { - self.trail.push(Crumb { - status: Status::At, - node: node.clone().into(), - }); - self.key_nibbles.extend(slice.iter()); - let data = self.db.get_raw_or_lookup(&*item)?; - (data, slice.len()) - } else { - self.descend(&node_data)?; - return Ok(()) - } - }, - Node::Branch(ref nodes, _) => match key.is_empty() { - true => { - self.trail.push(Crumb { - status: Status::At, - node: node.clone().into(), - }); - return Ok(()) - }, - false => { - let i = key.at(0); - self.trail.push(Crumb { - status: Status::AtChild(i as usize), - node: node.clone().into(), - }); - self.key_nibbles.push(i); - let child = self.db.get_raw_or_lookup(&*nodes[i as usize])?; - (child, 1) - } - }, - _ => return Ok(()), - } - }; - - node_data = data; - key = key.mid(mid); - } - } - - /// Descend into a payload. - fn descend(&mut self, d: &[u8]) -> Result<(), H::Out, C::Error> { - let node_data = &self.db.get_raw_or_lookup(d)?; - let node = C::decode(&node_data).expect("encoded node read from db; qed"); - Ok(self.descend_into_node(node.into())) - } - - /// Descend into a payload. - fn descend_into_node(&mut self, node: OwnedNode) { - self.trail.push(Crumb { status: Status::Entering, node }); - match &self.trail.last().expect("just pushed item; qed").node { - &OwnedNode::Leaf(ref n, _) | &OwnedNode::Extension(ref n, _) => { - self.key_nibbles.extend((0..n.len()).map(|i| n.at(i))); - }, - _ => {} - } - } - - /// The present key. - fn key(&self) -> Bytes { - // collapse the key_nibbles down to bytes. - let nibbles = &self.key_nibbles; - let mut i = 1; - let mut result = Bytes::with_capacity(nibbles.len() / 2); - let len = nibbles.len(); - while i < len { - result.push(nibbles[i - 1] * 16 + nibbles[i]); - i += 2; - } - result - } -} - -impl<'a, H: Hasher, C: NodeCodec> TrieIterator for TrieDBIterator<'a, H, C> { - /// Position the iterator on the first element with key >= `key` - fn seek(&mut self, key: &[u8]) -> Result<(), H::Out, C::Error> { - self.trail.clear(); - self.key_nibbles.clear(); - let root_rlp = self.db.root_data()?; - self.seek(root_rlp, NibbleSlice::new(key.as_ref())) - } -} - -impl<'a, H: Hasher, C: NodeCodec> Iterator for TrieDBIterator<'a, H, C> { - type Item = TrieItem<'a, H::Out, C::Error>; - - fn next(&mut self) -> Option { - enum IterStep { - Continue, - PopTrail, - Descend(Result), - } - loop { - let iter_step = { - self.trail.last_mut()?.increment(); - let b = self.trail.last().expect("trail.last_mut().is_some(); qed"); - - match (b.status.clone(), &b.node) { - (Status::Exiting, n) => { - match *n { - OwnedNode::Leaf(ref n, _) | OwnedNode::Extension(ref n, _) => { - let l = self.key_nibbles.len(); - self.key_nibbles.truncate(l - n.len()); - }, - OwnedNode::Branch(_, _) => { self.key_nibbles.pop(); }, - _ => {} - } - IterStep::PopTrail - }, - (Status::At, &OwnedNode::Leaf(_, ref v)) | (Status::At, &OwnedNode::Branch(_, Some(ref v))) => { - return Some(Ok((self.key(), v.clone()))); - }, - (Status::At, &OwnedNode::Extension(_, ref d)) => { - IterStep::Descend::(self.db.get_raw_or_lookup(&*d)) - }, - (Status::At, &OwnedNode::Branch(_, _)) => IterStep::Continue, - (Status::AtChild(i), &OwnedNode::Branch(ref children, _)) if children[i].len() > 0 => { - match i { - 0 => self.key_nibbles.push(0), - i => *self.key_nibbles.last_mut() - .expect("pushed as 0; moves sequentially; removed afterwards; qed") = i as u8, - } - IterStep::Descend::(self.db.get_raw_or_lookup(&*children[i])) - }, - (Status::AtChild(i), &OwnedNode::Branch(_, _)) => { - if i == 0 { - self.key_nibbles.push(0); - } - IterStep::Continue - }, - _ => panic!() // Should never see Entering or AtChild without a Branch here. - } - }; - - match iter_step { - IterStep::PopTrail => { - self.trail.pop(); - }, - IterStep::Descend::(Ok(d)) => { - let node = C::decode(&d).expect("encoded data read from db; qed"); - self.descend_into_node(node.into()) - }, - IterStep::Descend::(Err(e)) => { - return Some(Err(e)) - } - IterStep::Continue => {}, - } - } - } -} - -#[cfg(test)] -mod tests { - use hashdb::DBValue; - use keccak_hasher::KeccakHasher; - use memorydb::MemoryDB; - use ethtrie::{TrieDB, TrieDBMut, RlpCodec, trie::{Trie, TrieMut, Lookup}}; - use ethereum_types::H256; - - #[test] - fn iterator() { - let d = vec![DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B")]; - - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for x in &d { - t.insert(x, x).unwrap(); - } - } - - let t = TrieDB::new(&memdb, &root).unwrap(); - assert_eq!(d.iter().map(|i| i.clone().into_vec()).collect::>(), t.iter().unwrap().map(|x| x.unwrap().0).collect::>()); - assert_eq!(d, t.iter().unwrap().map(|x| x.unwrap().1).collect::>()); - } - - #[test] - fn iterator_seek() { - let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ]; - - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for x in &d { - t.insert(x, x).unwrap(); - } - } - - let t = TrieDB::new(&memdb, &root).unwrap(); - let mut iter = t.iter().unwrap(); - assert_eq!(iter.next().unwrap().unwrap(), (b"A".to_vec(), DBValue::from_slice(b"A"))); - iter.seek(b"!").unwrap(); - assert_eq!(d, iter.map(|x| x.unwrap().1).collect::>()); - let mut iter = t.iter().unwrap(); - iter.seek(b"A").unwrap(); - assert_eq!(&d[1..], &iter.map(|x| x.unwrap().1).collect::>()[..]); - let mut iter = t.iter().unwrap(); - iter.seek(b"AA").unwrap(); - assert_eq!(&d[2..], &iter.map(|x| x.unwrap().1).collect::>()[..]); - let mut iter = t.iter().unwrap(); - iter.seek(b"A!").unwrap(); - assert_eq!(&d[1..], &iter.map(|x| x.unwrap().1).collect::>()[..]); - let mut iter = t.iter().unwrap(); - iter.seek(b"AB").unwrap(); - assert_eq!(&d[3..], &iter.map(|x| x.unwrap().1).collect::>()[..]); - let mut iter = t.iter().unwrap(); - iter.seek(b"AB!").unwrap(); - assert_eq!(&d[3..], &iter.map(|x| x.unwrap().1).collect::>()[..]); - let mut iter = t.iter().unwrap(); - iter.seek(b"B").unwrap(); - assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::>()[..]); - let mut iter = t.iter().unwrap(); - iter.seek(b"C").unwrap(); - assert_eq!(&d[4..], &iter.map(|x| x.unwrap().1).collect::>()[..]); - } - - #[test] - fn get_len() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(b"A", b"ABC").unwrap(); - t.insert(b"B", b"ABCBA").unwrap(); - } - - let t = TrieDB::new(&memdb, &root).unwrap(); - assert_eq!(t.get_with(b"A", |x: &[u8]| x.len()).unwrap(), Some(3)); - assert_eq!(t.get_with(b"B", |x: &[u8]| x.len()).unwrap(), Some(5)); - assert_eq!(t.get_with(b"C", |x: &[u8]| x.len()).unwrap(), None); - } - - #[test] - fn debug_output_supports_pretty_print() { - let d = vec![ DBValue::from_slice(b"A"), DBValue::from_slice(b"AA"), DBValue::from_slice(b"AB"), DBValue::from_slice(b"B") ]; - - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let root = { - let mut t = TrieDBMut::new(&mut memdb, &mut root); - for x in &d { - t.insert(x, x).unwrap(); - } - t.root().clone() - }; - let t = TrieDB::new(&memdb, &root).unwrap(); - - assert_eq!(format!("{:?}", t), "TrieDB { hash_count: 0, root: Node::Extension { slice: 4, item: Node::Branch { nodes: [Node::Empty, Node::Branch { nodes: [Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Branch { nodes: [Node::Empty, Node::Leaf { slice: , value: [65, 65] }, Node::Leaf { slice: , value: [65, 66] }, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty], value: None }, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty], value: Some([65]) }, Node::Leaf { slice: , value: [66] }, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty, Node::Empty], value: None } } }"); - assert_eq!(format!("{:#?}", t), -"TrieDB { - hash_count: 0, - root: Node::Extension { - slice: 4, - item: Node::Branch { - nodes: [ - Node::Empty, - Node::Branch { - nodes: [ - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Branch { - nodes: [ - Node::Empty, - Node::Leaf { - slice: , - value: [ - 65, - 65 - ] - }, - Node::Leaf { - slice: , - value: [ - 65, - 66 - ] - }, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty - ], - value: None - }, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty - ], - value: Some( - [ - 65 - ] - ) - }, - Node::Leaf { - slice: , - value: [ - 66 - ] - }, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty, - Node::Empty - ], - value: None - } - } -}"); - } - - #[test] - fn test_lookup_with_corrupt_data_returns_decoder_error() { - use rlp; - use ethereum_types::H512; - use std::marker::PhantomData; - use ethtrie::trie::NibbleSlice; - - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(b"A", b"ABC").unwrap(); - t.insert(b"B", b"ABCBA").unwrap(); - } - - let t = TrieDB::new(&memdb, &root).unwrap(); - - // query for an invalid data type to trigger an error - let q = rlp::decode::; - let lookup = Lookup::<_, RlpCodec, _>{ db: t.db(), query: q, hash: root, marker: PhantomData }; - let query_result = lookup.look_up(NibbleSlice::new(b"A")); - assert_eq!(query_result.unwrap().unwrap().unwrap_err(), rlp::DecoderError::RlpIsTooShort); - } -} \ No newline at end of file diff --git a/util/patricia_trie/src/triedbmut.rs b/util/patricia_trie/src/triedbmut.rs deleted file mode 100644 index 4490285d5..000000000 --- a/util/patricia_trie/src/triedbmut.rs +++ /dev/null @@ -1,1332 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! In-memory trie representation. - -use super::{Result, TrieError, TrieMut}; -use super::lookup::Lookup; -use super::node::Node as EncodedNode; -use node_codec::NodeCodec; -use super::node::NodeKey; - -use bytes::ToPretty; -use hashdb::{HashDB, Hasher, DBValue}; -use nibbleslice::NibbleSlice; - -use elastic_array::ElasticArray1024; -use std::collections::{HashSet, VecDeque}; -use std::marker::PhantomData; -use std::mem; -use std::ops::Index; -use heapsize::HeapSizeOf; -use std::{fmt::Debug, hash::Hash}; - -// For lookups into the Node storage buffer. -// This is deliberately non-copyable. -#[derive(Debug)] -struct StorageHandle(usize); - -// Handles to nodes in the trie. -#[derive(Debug)] -enum NodeHandle { - /// Loaded into memory. - InMemory(StorageHandle), - /// Either a hash or an inline node - Hash(H), -} - -impl From for NodeHandle { - fn from(handle: StorageHandle) -> Self { - NodeHandle::InMemory(handle) - } -} - -fn empty_children() -> Box<[Option>; 16]> { - Box::new([ - None, None, None, None, None, None, None, None, - None, None, None, None, None, None, None, None, - ]) -} - -/// Node types in the Trie. -#[derive(Debug)] -enum Node { - /// Empty node. - Empty, - /// A leaf node contains the end of a key and a value. - /// This key is encoded from a `NibbleSlice`, meaning it contains - /// a flag indicating it is a leaf. - Leaf(NodeKey, DBValue), - /// An extension contains a shared portion of a key and a child node. - /// The shared portion is encoded from a `NibbleSlice` meaning it contains - /// a flag indicating it is an extension. - /// The child node is always a branch. - Extension(NodeKey, NodeHandle), - /// A branch has up to 16 children and an optional value. - Branch(Box<[Option>; 16]>, Option) -} - -impl Node where O: AsRef<[u8]> + AsMut<[u8]> + Default + HeapSizeOf + Debug + PartialEq + Eq + Hash + Send + Sync + Clone + Copy { - // load an inline node into memory or get the hash to do the lookup later. - fn inline_or_hash(node: &[u8], db: &HashDB, storage: &mut NodeStorage) -> NodeHandle - where C: NodeCodec, - H: Hasher, - { - C::try_decode_hash(&node) - .map(NodeHandle::Hash) - .unwrap_or_else(|| { - let child = Node::from_encoded::(node, db, storage); - NodeHandle::InMemory(storage.alloc(Stored::New(child))) - }) - } - - // decode a node from encoded bytes without getting its children. - fn from_encoded(data: &[u8], db: &HashDB, storage: &mut NodeStorage) -> Self - where C: NodeCodec, - H: Hasher, - { - match C::decode(data).expect("encoded bytes read from db; qed") { - EncodedNode::Empty => Node::Empty, - EncodedNode::Leaf(k, v) => Node::Leaf(k.encoded(true), DBValue::from_slice(&v)), - EncodedNode::Extension(key, cb) => { - Node::Extension( - key.encoded(false), - Self::inline_or_hash::(cb, db, storage)) - } - EncodedNode::Branch(ref encoded_children, val) => { - let mut child = |i:usize| { - let raw = encoded_children[i]; - if !C::is_empty_node(raw) { - Some(Self::inline_or_hash::(raw, db, storage)) - } else { - None - } - }; - - let children = Box::new([ - child(0), child(1), child(2), child(3), - child(4), child(5), child(6), child(7), - child(8), child(9), child(10), child(11), - child(12), child(13), child(14), child(15), - ]); - - Node::Branch(children, val.map(DBValue::from_slice)) - } - } - } - - // TODO: parallelize - fn into_encoded(self, mut child_cb: F) -> ElasticArray1024 - where - C: NodeCodec, - F: FnMut(NodeHandle) -> ChildReference, - H: Hasher, - { - match self { - Node::Empty => C::empty_node(), - Node::Leaf(partial, value) => C::leaf_node(&partial, &value), - Node::Extension(partial, child) => C::ext_node(&partial, child_cb(child)), - Node::Branch(mut children, value) => { - C::branch_node( - // map the `NodeHandle`s from the Branch to `ChildReferences` - children.iter_mut() - .map(Option::take) - .map(|maybe_child| - maybe_child.map(|child| child_cb(child)) - ), - value - ) - } - } - } -} - -// post-inspect action. -enum Action { - // Replace a node with a new one. - Replace(Node), - // Restore the original node. This trusts that the node is actually the original. - Restore(Node), - // if it is a new node, just clears the storage. - Delete, -} - -// post-insert action. Same as action without delete -enum InsertAction { - // Replace a node with a new one. - Replace(Node), - // Restore the original node. - Restore(Node), -} - -impl InsertAction { - fn into_action(self) -> Action { - match self { - InsertAction::Replace(n) => Action::Replace(n), - InsertAction::Restore(n) => Action::Restore(n), - } - } - - // unwrap the node, disregarding replace or restore state. - fn unwrap_node(self) -> Node { - match self { - InsertAction::Replace(n) | InsertAction::Restore(n) => n, - } - } -} - -// What kind of node is stored here. -enum Stored { - // A new node. - New(Node), - // A cached node, loaded from the DB. - Cached(Node, H), -} - -/// Used to build a collection of child nodes from a collection of `NodeHandle`s -pub enum ChildReference { // `HO` is e.g. `H256`, i.e. the output of a `Hasher` - Hash(HO), - Inline(HO, usize), // usize is the length of the node data we store in the `H::Out` -} - -/// Compact and cache-friendly storage for Trie nodes. -struct NodeStorage { - nodes: Vec>, - free_indices: VecDeque, -} - -impl NodeStorage { - /// Create a new storage. - fn empty() -> Self { - NodeStorage { - nodes: Vec::new(), - free_indices: VecDeque::new(), - } - } - - /// Allocate a new node in the storage. - fn alloc(&mut self, stored: Stored) -> StorageHandle { - if let Some(idx) = self.free_indices.pop_front() { - self.nodes[idx] = stored; - StorageHandle(idx) - } else { - self.nodes.push(stored); - StorageHandle(self.nodes.len() - 1) - } - } - - /// Remove a node from the storage, consuming the handle and returning the node. - fn destroy(&mut self, handle: StorageHandle) -> Stored { - let idx = handle.0; - - self.free_indices.push_back(idx); - mem::replace(&mut self.nodes[idx], Stored::New(Node::Empty)) - } -} - -impl<'a, H> Index<&'a StorageHandle> for NodeStorage { - type Output = Node; - - fn index(&self, handle: &'a StorageHandle) -> &Node { - match self.nodes[handle.0] { - Stored::New(ref node) => node, - Stored::Cached(ref node, _) => node, - } - } -} - -/// A `Trie` implementation using a generic `HashDB` backing database. -/// -/// Use it as a `TrieMut` trait object. You can use `db()` to get the backing database object. -/// Note that changes are not committed to the database until `commit` is called. -/// Querying the root or dropping the trie will commit automatically. -/// -/// # Example -/// ``` -/// extern crate patricia_trie as trie; -/// extern crate patricia_trie_ethereum as ethtrie; -/// extern crate hashdb; -/// extern crate keccak_hash; -/// extern crate keccak_hasher; -/// extern crate memorydb; -/// extern crate ethereum_types; -/// -/// use keccak_hash::KECCAK_NULL_RLP; -/// use ethtrie::{TrieDBMut, trie::TrieMut}; -/// use hashdb::DBValue; -/// use keccak_hasher::KeccakHasher; -/// use memorydb::*; -/// use ethereum_types::H256; -/// -/// fn main() { -/// let mut memdb = MemoryDB::::new(); -/// let mut root = H256::new(); -/// let mut t = TrieDBMut::new(&mut memdb, &mut root); -/// assert!(t.is_empty()); -/// assert_eq!(*t.root(), KECCAK_NULL_RLP); -/// t.insert(b"foo", b"bar").unwrap(); -/// assert!(t.contains(b"foo").unwrap()); -/// assert_eq!(t.get(b"foo").unwrap().unwrap(), DBValue::from_slice(b"bar")); -/// t.remove(b"foo").unwrap(); -/// assert!(!t.contains(b"foo").unwrap()); -/// } -/// ``` -pub struct TrieDBMut<'a, H, C> -where - H: Hasher + 'a, - C: NodeCodec -{ - storage: NodeStorage, - db: &'a mut HashDB, - root: &'a mut H::Out, - root_handle: NodeHandle, - death_row: HashSet, - /// The number of hash operations this trie has performed. - /// Note that none are performed until changes are committed. - hash_count: usize, - marker: PhantomData, // TODO: rpheimer: "we could have the NodeCodec trait take &self to its methods and then we don't need PhantomData. we can just store an instance of C: NodeCodec in the trie struct. If it's a ZST it won't have any additional overhead anyway" -} - -impl<'a, H, C> TrieDBMut<'a, H, C> -where - H: Hasher, - C: NodeCodec -{ - /// Create a new trie with backing database `db` and empty `root`. - pub fn new(db: &'a mut HashDB, root: &'a mut H::Out) -> Self { - *root = C::HASHED_NULL_NODE; - let root_handle = NodeHandle::Hash(C::HASHED_NULL_NODE); - - TrieDBMut { - storage: NodeStorage::empty(), - db: db, - root: root, - root_handle: root_handle, - death_row: HashSet::new(), - hash_count: 0, - marker: PhantomData, - } - } - - /// Create a new trie with the backing database `db` and `root. - /// Returns an error if `root` does not exist. - pub fn from_existing(db: &'a mut HashDB, root: &'a mut H::Out) -> Result { - if !db.contains(root) { - return Err(Box::new(TrieError::InvalidStateRoot(*root))); - } - - let root_handle = NodeHandle::Hash(*root); - Ok(TrieDBMut { - storage: NodeStorage::empty(), - db: db, - root: root, - root_handle: root_handle, - death_row: HashSet::new(), - hash_count: 0, - marker: PhantomData, - }) - } - /// Get the backing database. - pub fn db(&self) -> &HashDB { - self.db - } - - /// Get the backing database mutably. - pub fn db_mut(&mut self) -> &mut HashDB { - self.db - } - - // cache a node by hash - fn cache(&mut self, hash: H::Out) -> Result { - let node_encoded = self.db.get(&hash).ok_or_else(|| Box::new(TrieError::IncompleteDatabase(hash)))?; - let node = Node::from_encoded::( - &node_encoded, - &*self.db, - &mut self.storage - ); - Ok(self.storage.alloc(Stored::Cached(node, hash))) - } - - // inspect a node, choosing either to replace, restore, or delete it. - // if restored or replaced, returns the new node along with a flag of whether it was changed. - fn inspect(&mut self, stored: Stored, inspector: F) -> Result, bool)>, H::Out, C::Error> - where F: FnOnce(&mut Self, Node) -> Result, H::Out, C::Error> { - Ok(match stored { - Stored::New(node) => match inspector(self, node)? { - Action::Restore(node) => Some((Stored::New(node), false)), - Action::Replace(node) => Some((Stored::New(node), true)), - Action::Delete => None, - }, - Stored::Cached(node, hash) => match inspector(self, node)? { - Action::Restore(node) => Some((Stored::Cached(node, hash), false)), - Action::Replace(node) => { - self.death_row.insert(hash); - Some((Stored::New(node), true)) - } - Action::Delete => { - self.death_row.insert(hash); - None - } - }, - }) - } - - // walk the trie, attempting to find the key's node. - fn lookup<'x, 'key>(&'x self, mut partial: NibbleSlice<'key>, handle: &NodeHandle) -> Result, H::Out, C::Error> - where 'x: 'key - { - let mut handle = handle; - loop { - let (mid, child) = match *handle { - NodeHandle::Hash(ref hash) => return Lookup{ - db: &*self.db, - query: DBValue::from_slice, - hash: hash.clone(), - marker: PhantomData::, - }.look_up(partial), - NodeHandle::InMemory(ref handle) => match self.storage[handle] { - Node::Empty => return Ok(None), - Node::Leaf(ref key, ref value) => { - if NibbleSlice::from_encoded(key).0 == partial { - return Ok(Some(DBValue::from_slice(value))); - } else { - return Ok(None); - } - } - Node::Extension(ref slice, ref child) => { - let slice = NibbleSlice::from_encoded(slice).0; - if partial.starts_with(&slice) { - (slice.len(), child) - } else { - return Ok(None); - } - } - Node::Branch(ref children, ref value) => { - if partial.is_empty() { - return Ok(value.as_ref().map(|v| DBValue::from_slice(v))); - } else { - let idx = partial.at(0); - match children[idx as usize].as_ref() { - Some(child) => (1, child), - None => return Ok(None), - } - } - } - } - }; - - partial = partial.mid(mid); - handle = child; - } - } - - /// insert a key-value pair into the trie, creating new nodes if necessary. - fn insert_at(&mut self, handle: NodeHandle, partial: NibbleSlice, value: DBValue, old_val: &mut Option) -> Result<(StorageHandle, bool), H::Out, C::Error> { - let h = match handle { - NodeHandle::InMemory(h) => h, - NodeHandle::Hash(h) => self.cache(h)?, - }; - let stored = self.storage.destroy(h); - let (new_stored, changed) = self.inspect(stored, move |trie, stored| { - trie.insert_inspector(stored, partial, value, old_val).map(|a| a.into_action()) - })?.expect("Insertion never deletes."); - - Ok((self.storage.alloc(new_stored), changed)) - } - - /// the insertion inspector. - fn insert_inspector(&mut self, node: Node, partial: NibbleSlice, value: DBValue, old_val: &mut Option) -> Result, H::Out, C::Error> { - trace!(target: "trie", "augmented (partial: {:?}, value: {:?})", partial, value.pretty()); - - Ok(match node { - Node::Empty => { - trace!(target: "trie", "empty: COMPOSE"); - InsertAction::Replace(Node::Leaf(partial.encoded(true), value)) - } - Node::Branch(mut children, stored_value) => { - trace!(target: "trie", "branch: ROUTE,AUGMENT"); - - if partial.is_empty() { - let unchanged = stored_value.as_ref() == Some(&value); - let branch = Node::Branch(children, Some(value)); - *old_val = stored_value; - - match unchanged { - true => InsertAction::Restore(branch), - false => InsertAction::Replace(branch), - } - } else { - let idx = partial.at(0) as usize; - let partial = partial.mid(1); - if let Some(child) = children[idx].take() { - // original had something there. recurse down into it. - let (new_child, changed) = self.insert_at(child, partial, value, old_val)?; - children[idx] = Some(new_child.into()); - if !changed { - // the new node we composed didn't change. that means our branch is untouched too. - return Ok(InsertAction::Restore(Node::Branch(children, stored_value))); - } - } else { - // original had nothing there. compose a leaf. - let leaf = self.storage.alloc(Stored::New(Node::Leaf(partial.encoded(true), value))); - children[idx] = Some(leaf.into()); - } - - InsertAction::Replace(Node::Branch(children, stored_value)) - } - } - Node::Leaf(encoded, stored_value) => { - let existing_key = NibbleSlice::from_encoded(&encoded).0; - let cp = partial.common_prefix(&existing_key); - if cp == existing_key.len() && cp == partial.len() { - trace!(target: "trie", "equivalent-leaf: REPLACE"); - // equivalent leaf. - let unchanged = stored_value == value; - *old_val = Some(stored_value); - - match unchanged { - // unchanged. restore - true => InsertAction::Restore(Node::Leaf(encoded.clone(), value)), - false => InsertAction::Replace(Node::Leaf(encoded.clone(), value)), - } - } else if cp == 0 { - trace!(target: "trie", "no-common-prefix, not-both-empty (exist={:?}; new={:?}): TRANSMUTE,AUGMENT", existing_key.len(), partial.len()); - - // one of us isn't empty: transmute to branch here - let mut children = empty_children(); - let branch = if existing_key.is_empty() { - // always replace since branch isn't leaf. - Node::Branch(children, Some(stored_value)) - } else { - let idx = existing_key.at(0) as usize; - let new_leaf = Node::Leaf(existing_key.mid(1).encoded(true), stored_value); - children[idx] = Some(self.storage.alloc(Stored::New(new_leaf)).into()); - - Node::Branch(children, None) - }; - - // always replace because whatever we get out here is not the branch we started with. - let branch_action = self.insert_inspector(branch, partial, value, old_val)?.unwrap_node(); - InsertAction::Replace(branch_action) - } else if cp == existing_key.len() { - trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp); - - // fully-shared prefix for an extension. - // make a stub branch and an extension. - let branch = Node::Branch(empty_children(), Some(stored_value)); - // augment the new branch. - let branch = self.insert_inspector(branch, partial.mid(cp), value, old_val)?.unwrap_node(); - - // always replace since we took a leaf and made an extension. - let branch_handle = self.storage.alloc(Stored::New(branch)).into(); - InsertAction::Replace(Node::Extension(existing_key.encoded(false), branch_handle)) - } else { - trace!(target: "trie", "partially-shared-prefix (exist={:?}; new={:?}; cp={:?}): AUGMENT-AT-END", existing_key.len(), partial.len(), cp); - - // partially-shared prefix for an extension. - // start by making a leaf. - let low = Node::Leaf(existing_key.mid(cp).encoded(true), stored_value); - - // augment it. this will result in the Leaf -> cp == 0 routine, - // which creates a branch. - let augmented_low = self.insert_inspector(low, partial.mid(cp), value, old_val)?.unwrap_node(); - - // make an extension using it. this is a replacement. - InsertAction::Replace(Node::Extension( - existing_key.encoded_leftmost(cp, false), - self.storage.alloc(Stored::New(augmented_low)).into() - )) - } - } - Node::Extension(encoded, child_branch) => { - let existing_key = NibbleSlice::from_encoded(&encoded).0; - let cp = partial.common_prefix(&existing_key); - if cp == 0 { - trace!(target: "trie", "no-common-prefix, not-both-empty (exist={:?}; new={:?}): TRANSMUTE,AUGMENT", existing_key.len(), partial.len()); - - // partial isn't empty: make a branch here - // extensions may not have empty partial keys. - assert!(!existing_key.is_empty()); - let idx = existing_key.at(0) as usize; - - let mut children = empty_children(); - children[idx] = if existing_key.len() == 1 { - // direct extension, just replace. - Some(child_branch) - } else { - // more work required after branching. - let ext = Node::Extension(existing_key.mid(1).encoded(false), child_branch); - Some(self.storage.alloc(Stored::New(ext)).into()) - }; - - // continue inserting. - let branch_action = self.insert_inspector(Node::Branch(children, None), partial, value, old_val)?.unwrap_node(); - InsertAction::Replace(branch_action) - } else if cp == existing_key.len() { - trace!(target: "trie", "complete-prefix (cp={:?}): AUGMENT-AT-END", cp); - - // fully-shared prefix. - - // insert into the child node. - let (new_child, changed) = self.insert_at(child_branch, partial.mid(cp), value, old_val)?; - let new_ext = Node::Extension(existing_key.encoded(false), new_child.into()); - - // if the child branch wasn't changed, meaning this extension remains the same. - match changed { - true => InsertAction::Replace(new_ext), - false => InsertAction::Restore(new_ext), - } - } else { - trace!(target: "trie", "partially-shared-prefix (exist={:?}; new={:?}; cp={:?}): AUGMENT-AT-END", existing_key.len(), partial.len(), cp); - - // partially-shared. - let low = Node::Extension(existing_key.mid(cp).encoded(false), child_branch); - // augment the extension. this will take the cp == 0 path, creating a branch. - let augmented_low = self.insert_inspector(low, partial.mid(cp), value, old_val)?.unwrap_node(); - - // always replace, since this extension is not the one we started with. - // this is known because the partial key is only the common prefix. - InsertAction::Replace(Node::Extension( - existing_key.encoded_leftmost(cp, false), - self.storage.alloc(Stored::New(augmented_low)).into() - )) - } - } - }) - } - - /// Remove a node from the trie based on key. - fn remove_at(&mut self, handle: NodeHandle, partial: NibbleSlice, old_val: &mut Option) -> Result, H::Out, C::Error> { - let stored = match handle { - NodeHandle::InMemory(h) => self.storage.destroy(h), - NodeHandle::Hash(h) => { - let handle = self.cache(h)?; - self.storage.destroy(handle) - } - }; - - let opt = self.inspect(stored, move |trie, node| trie.remove_inspector(node, partial, old_val))?; - - Ok(opt.map(|(new, changed)| (self.storage.alloc(new), changed))) - } - - /// the removal inspector - fn remove_inspector(&mut self, node: Node, partial: NibbleSlice, old_val: &mut Option) -> Result, H::Out, C::Error> { - Ok(match (node, partial.is_empty()) { - (Node::Empty, _) => Action::Delete, - (Node::Branch(c, None), true) => Action::Restore(Node::Branch(c, None)), - (Node::Branch(children, Some(val)), true) => { - *old_val = Some(val); - // always replace since we took the value out. - Action::Replace(self.fix(Node::Branch(children, None))?) - } - (Node::Branch(mut children, value), false) => { - let idx = partial.at(0) as usize; - if let Some(child) = children[idx].take() { - trace!(target: "trie", "removing value out of branch child, partial={:?}", partial); - match self.remove_at(child, partial.mid(1), old_val)? { - Some((new, changed)) => { - children[idx] = Some(new.into()); - let branch = Node::Branch(children, value); - match changed { - // child was changed, so we were too. - true => Action::Replace(branch), - // unchanged, so we are too. - false => Action::Restore(branch), - } - } - None => { - // the child we took was deleted. - // the node may need fixing. - trace!(target: "trie", "branch child deleted, partial={:?}", partial); - Action::Replace(self.fix(Node::Branch(children, value))?) - } - } - } else { - // no change needed. - Action::Restore(Node::Branch(children, value)) - } - } - (Node::Leaf(encoded, value), _) => { - if NibbleSlice::from_encoded(&encoded).0 == partial { - // this is the node we were looking for. Let's delete it. - *old_val = Some(value); - Action::Delete - } else { - // leaf the node alone. - trace!(target: "trie", "restoring leaf wrong partial, partial={:?}, existing={:?}", partial, NibbleSlice::from_encoded(&encoded).0); - Action::Restore(Node::Leaf(encoded, value)) - } - } - (Node::Extension(encoded, child_branch), _) => { - let (cp, existing_len) = { - let existing_key = NibbleSlice::from_encoded(&encoded).0; - (existing_key.common_prefix(&partial), existing_key.len()) - }; - if cp == existing_len { - // try to remove from the child branch. - trace!(target: "trie", "removing from extension child, partial={:?}", partial); - match self.remove_at(child_branch, partial.mid(cp), old_val)? { - Some((new_child, changed)) => { - let new_child = new_child.into(); - - // if the child branch was unchanged, then the extension is too. - // otherwise, this extension may need fixing. - match changed { - true => Action::Replace(self.fix(Node::Extension(encoded, new_child))?), - false => Action::Restore(Node::Extension(encoded, new_child)), - } - } - None => { - // the whole branch got deleted. - // that means that this extension is useless. - Action::Delete - } - } - } else { - // partway through an extension -- nothing to do here. - Action::Restore(Node::Extension(encoded, child_branch)) - } - } - }) - } - - /// Given a node which may be in an _invalid state_, fix it such that it is then in a valid - /// state. - /// - /// _invalid state_ means: - /// - Branch node where there is only a single entry; - /// - Extension node followed by anything other than a Branch node. - fn fix(&mut self, node: Node) -> Result, H::Out, C::Error> { - match node { - Node::Branch(mut children, value) => { - // if only a single value, transmute to leaf/extension and feed through fixed. - #[derive(Debug)] - enum UsedIndex { - None, - One(u8), - Many, - }; - let mut used_index = UsedIndex::None; - for i in 0..16 { - match (children[i].is_none(), &used_index) { - (false, &UsedIndex::None) => used_index = UsedIndex::One(i as u8), - (false, &UsedIndex::One(_)) => { - used_index = UsedIndex::Many; - break; - } - _ => continue, - } - } - - match (used_index, value) { - (UsedIndex::None, None) => panic!("Branch with no subvalues. Something went wrong."), - (UsedIndex::One(a), None) => { - // only one onward node. make an extension. - let new_partial = NibbleSlice::new_offset(&[a], 1).encoded(false); - let child = children[a as usize].take().expect("used_index only set if occupied; qed"); - let new_node = Node::Extension(new_partial, child); - self.fix(new_node) - } - (UsedIndex::None, Some(value)) => { - // make a leaf. - trace!(target: "trie", "fixing: branch -> leaf"); - Ok(Node::Leaf(NibbleSlice::new(&[]).encoded(true), value)) - } - (_, value) => { - // all is well. - trace!(target: "trie", "fixing: restoring branch"); - Ok(Node::Branch(children, value)) - } - } - } - Node::Extension(partial, child) => { - let stored = match child { - NodeHandle::InMemory(h) => self.storage.destroy(h), - NodeHandle::Hash(h) => { - let handle = self.cache(h)?; - self.storage.destroy(handle) - } - }; - - let (child_node, maybe_hash) = match stored { - Stored::New(node) => (node, None), - Stored::Cached(node, hash) => (node, Some(hash)) - }; - - match child_node { - Node::Extension(sub_partial, sub_child) => { - // combine with node below. - if let Some(hash) = maybe_hash { - // delete the cached child since we are going to replace it. - self.death_row.insert(hash); - } - let partial = NibbleSlice::from_encoded(&partial).0; - let sub_partial = NibbleSlice::from_encoded(&sub_partial).0; - - let new_partial = NibbleSlice::new_composed(&partial, &sub_partial); - trace!(target: "trie", "fixing: extension combination. new_partial={:?}", new_partial); - self.fix(Node::Extension(new_partial.encoded(false), sub_child)) - } - Node::Leaf(sub_partial, value) => { - // combine with node below. - if let Some(hash) = maybe_hash { - // delete the cached child since we are going to replace it. - self.death_row.insert(hash); - } - let partial = NibbleSlice::from_encoded(&partial).0; - let sub_partial = NibbleSlice::from_encoded(&sub_partial).0; - - let new_partial = NibbleSlice::new_composed(&partial, &sub_partial); - trace!(target: "trie", "fixing: extension -> leaf. new_partial={:?}", new_partial); - Ok(Node::Leaf(new_partial.encoded(true), value)) - } - child_node => { - trace!(target: "trie", "fixing: restoring extension"); - - // reallocate the child node. - let stored = if let Some(hash) = maybe_hash { - Stored::Cached(child_node, hash) - } else { - Stored::New(child_node) - }; - - Ok(Node::Extension(partial, self.storage.alloc(stored).into())) - } - } - } - other => Ok(other), // only ext and branch need fixing. - } - } - - /// Commit the in-memory changes to disk, freeing their storage and - /// updating the state root. - pub fn commit(&mut self) { - trace!(target: "trie", "Committing trie changes to db."); - - // always kill all the nodes on death row. - trace!(target: "trie", "{:?} nodes to remove from db", self.death_row.len()); - for hash in self.death_row.drain() { - self.db.remove(&hash); - } - - let handle = match self.root_handle() { - NodeHandle::Hash(_) => return, // no changes necessary. - NodeHandle::InMemory(h) => h, - }; - - match self.storage.destroy(handle) { - Stored::New(node) => { - let encoded_root = node.into_encoded::<_, C, H>(|child| self.commit_child(child) ); - *self.root = self.db.insert(&encoded_root[..]); - self.hash_count += 1; - - trace!(target: "trie", "encoded root node: {:?}", (&encoded_root[..]).pretty()); - self.root_handle = NodeHandle::Hash(*self.root); - } - Stored::Cached(node, hash) => { - // probably won't happen, but update the root and move on. - *self.root = hash; - self.root_handle = NodeHandle::InMemory(self.storage.alloc(Stored::Cached(node, hash))); - } - } - } - - /// Commit a node by hashing it and writing it to the db. Returns a - /// `ChildReference` which in most cases carries a normal hash but for the - /// case where we can fit the actual data in the `Hasher`s output type, we - /// store the data inline. This function is used as the callback to the - /// `into_encoded` method of `Node`. - fn commit_child(&mut self, handle: NodeHandle) -> ChildReference { - match handle { - NodeHandle::Hash(hash) => ChildReference::Hash(hash), - NodeHandle::InMemory(storage_handle) => { - match self.storage.destroy(storage_handle) { - Stored::Cached(_, hash) => ChildReference::Hash(hash), - Stored::New(node) => { - let encoded = node.into_encoded::<_, C, H>(|node_handle| self.commit_child(node_handle) ); - if encoded.len() >= H::LENGTH { - let hash = self.db.insert(&encoded[..]); - self.hash_count +=1; - ChildReference::Hash(hash) - } else { - // it's a small value, so we cram it into a `H::Out` and tag with length - let mut h = H::Out::default(); - let len = encoded.len(); - h.as_mut()[..len].copy_from_slice(&encoded[..len]); - ChildReference::Inline(h, len) - } - } - } - } - } - } - - // a hack to get the root node's handle - fn root_handle(&self) -> NodeHandle { - match self.root_handle { - NodeHandle::Hash(h) => NodeHandle::Hash(h), - NodeHandle::InMemory(StorageHandle(x)) => NodeHandle::InMemory(StorageHandle(x)), - } - } -} - -impl<'a, H, C> TrieMut for TrieDBMut<'a, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn root(&mut self) -> &H::Out { - self.commit(); - self.root - } - - fn is_empty(&self) -> bool { - match self.root_handle { - NodeHandle::Hash(h) => h == C::HASHED_NULL_NODE, - NodeHandle::InMemory(ref h) => match self.storage[h] { - Node::Empty => true, - _ => false, - } - } - } - - fn get<'x, 'key>(&'x self, key: &'key [u8]) -> Result, H::Out, C::Error> - where 'x: 'key - { - self.lookup(NibbleSlice::new(key), &self.root_handle) - } - - fn insert(&mut self, key: &[u8], value: &[u8]) -> Result, H::Out, C::Error> { - if value.is_empty() { return self.remove(key) } - - let mut old_val = None; - - trace!(target: "trie", "insert: key={:?}, value={:?}", key.pretty(), value.pretty()); - - let root_handle = self.root_handle(); - let (new_handle, changed) = self.insert_at( - root_handle, - NibbleSlice::new(key), - DBValue::from_slice(value), - &mut old_val, - )?; - - trace!(target: "trie", "insert: altered trie={}", changed); - self.root_handle = NodeHandle::InMemory(new_handle); - - Ok(old_val) - } - - fn remove(&mut self, key: &[u8]) -> Result, H::Out, C::Error> { - trace!(target: "trie", "remove: key={:?}", key.pretty()); - - let root_handle = self.root_handle(); - let key = NibbleSlice::new(key); - let mut old_val = None; - - match self.remove_at(root_handle, key, &mut old_val)? { - Some((handle, changed)) => { - trace!(target: "trie", "remove: altered trie={}", changed); - self.root_handle = NodeHandle::InMemory(handle); - } - None => { - trace!(target: "trie", "remove: obliterated trie"); - self.root_handle = NodeHandle::Hash(C::HASHED_NULL_NODE); - *self.root = C::HASHED_NULL_NODE; - } - } - - Ok(old_val) - } -} - -impl<'a, H, C> Drop for TrieDBMut<'a, H, C> -where - H: Hasher, - C: NodeCodec -{ - fn drop(&mut self) { - self.commit(); - } -} - -#[cfg(test)] -mod tests { - use bytes::ToPretty; - use hashdb::{DBValue, Hasher, HashDB}; - use keccak_hasher::KeccakHasher; - use memorydb::MemoryDB; - use rlp::{Decodable, Encodable}; - use triehash::trie_root; - use standardmap::*; - use ethtrie::{TrieDBMut, RlpCodec, trie::{TrieMut, NodeCodec}}; - use env_logger; - use ethereum_types::H256; - - fn populate_trie<'db, H, C>(db: &'db mut HashDB, root: &'db mut H256, v: &[(Vec, Vec)]) -> TrieDBMut<'db> - where H: Hasher, H::Out: Decodable + Encodable, C: NodeCodec - { - let mut t = TrieDBMut::new(db, root); - for i in 0..v.len() { - let key: &[u8]= &v[i].0; - let val: &[u8] = &v[i].1; - t.insert(key, val).unwrap(); - } - t - } - - fn unpopulate_trie<'db>(t: &mut TrieDBMut<'db>, v: &[(Vec, Vec)]) { - for i in v { - let key: &[u8]= &i.0; - t.remove(key).unwrap(); - } - } - - #[test] - fn playpen() { - env_logger::init(); - let mut seed = H256::new(); - for test_i in 0..10 { - if test_i % 50 == 0 { - debug!("{:?} of 10000 stress tests done", test_i); - } - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 100, - }.make_with(&mut seed); - - let real = trie_root(x.clone()); - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut memtrie = populate_trie::<_, RlpCodec>(&mut memdb, &mut root, &x); - - memtrie.commit(); - if *memtrie.root() != real { - println!("TRIE MISMATCH"); - println!(""); - println!("{:?} vs {:?}", memtrie.root(), real); - for i in &x { - println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); - } - } - assert_eq!(*memtrie.root(), real); - unpopulate_trie(&mut memtrie, &x); - memtrie.commit(); - if *memtrie.root() != RlpCodec::HASHED_NULL_NODE { - println!("- TRIE MISMATCH"); - println!(""); - println!("{:?} vs {:?}", memtrie.root(), real); - for i in &x { - println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); - } - } - assert_eq!(*memtrie.root(), RlpCodec::HASHED_NULL_NODE); - } - } - - #[test] - fn init() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - assert_eq!(*t.root(), RlpCodec::HASHED_NULL_NODE); - } - - #[test] - fn insert_on_empty() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x01u8, 0x23]) ])); - } - - #[test] - fn remove_to_empty() { - let big_value = b"00000000000000000000000000000000"; - - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t1 = TrieDBMut::new(&mut memdb, &mut root); - t1.insert(&[0x01, 0x23], big_value).unwrap(); - t1.insert(&[0x01, 0x34], big_value).unwrap(); - let mut memdb2 = MemoryDB::::new(); - let mut root2 = H256::new(); - let mut t2 = TrieDBMut::new(&mut memdb2, &mut root2); - - t2.insert(&[0x01], big_value).unwrap(); - t2.insert(&[0x01, 0x23], big_value).unwrap(); - t2.insert(&[0x01, 0x34], big_value).unwrap(); - t2.remove(&[0x01]).unwrap(); - } - - #[test] - fn insert_replace_root() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0x01u8, 0x23], &[0x23u8, 0x45]).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ (vec![0x01u8, 0x23], vec![0x23u8, 0x45]) ])); - } - - #[test] - fn insert_make_branch_root() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0x11u8, 0x23], &[0x11u8, 0x23]).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x11u8, 0x23], vec![0x11u8, 0x23]) - ])); - } - - #[test] - fn insert_into_branch_root() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap(); - t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), - (vec![0xf1u8, 0x23], vec![0xf1u8, 0x23]), - ])); - } - - #[test] - fn insert_value_into_branch_root() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[], &[0x0]).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ - (vec![], vec![0x0]), - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - ])); - } - - #[test] - fn insert_split_leaf() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0x01u8, 0x34], &[0x01u8, 0x34]).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x01u8, 0x34], vec![0x01u8, 0x34]), - ])); - } - - #[test] - fn insert_split_extenstion() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01, 0x23, 0x45], &[0x01]).unwrap(); - t.insert(&[0x01, 0xf3, 0x45], &[0x02]).unwrap(); - t.insert(&[0x01, 0xf3, 0xf5], &[0x03]).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01, 0x23, 0x45], vec![0x01]), - (vec![0x01, 0xf3, 0x45], vec![0x02]), - (vec![0x01, 0xf3, 0xf5], vec![0x03]), - ])); - } - - #[test] - fn insert_big_value() { - let big_value0 = b"00000000000000000000000000000000"; - let big_value1 = b"11111111111111111111111111111111"; - - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], big_value0).unwrap(); - t.insert(&[0x11u8, 0x23], big_value1).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], big_value0.to_vec()), - (vec![0x11u8, 0x23], big_value1.to_vec()) - ])); - } - - #[test] - fn insert_duplicate_value() { - let big_value = b"00000000000000000000000000000000"; - - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], big_value).unwrap(); - t.insert(&[0x11u8, 0x23], big_value).unwrap(); - assert_eq!(*t.root(), trie_root(vec![ - (vec![0x01u8, 0x23], big_value.to_vec()), - (vec![0x11u8, 0x23], big_value.to_vec()) - ])); - } - - #[test] - fn test_at_empty() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let t = TrieDBMut::new(&mut memdb, &mut root); - assert_eq!(t.get(&[0x5]).unwrap(), None); - } - - #[test] - fn test_at_one() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x1u8, 0x23])); - t.commit(); - assert_eq!(t.get(&[0x1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x1u8, 0x23])); - } - - #[test] - fn test_at_three() { - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut memdb, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - t.insert(&[0xf1u8, 0x23], &[0xf1u8, 0x23]).unwrap(); - t.insert(&[0x81u8, 0x23], &[0x81u8, 0x23]).unwrap(); - assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23])); - assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0xf1u8, 0x23])); - assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x81u8, 0x23])); - assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None); - t.commit(); - assert_eq!(t.get(&[0x01, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x01u8, 0x23])); - assert_eq!(t.get(&[0xf1, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0xf1u8, 0x23])); - assert_eq!(t.get(&[0x81, 0x23]).unwrap().unwrap(), DBValue::from_slice(&[0x81u8, 0x23])); - assert_eq!(t.get(&[0x82, 0x23]).unwrap(), None); - } - - #[test] - fn stress() { - let mut seed = H256::new(); - for _ in 0..50 { - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 4, - }.make_with(&mut seed); - - let real = trie_root(x.clone()); - let mut memdb = MemoryDB::::new(); - let mut root = H256::new(); - let mut memtrie = populate_trie::<_, RlpCodec>(&mut memdb, &mut root, &x); - let mut y = x.clone(); - y.sort_by(|ref a, ref b| a.0.cmp(&b.0)); - let mut memdb2 = MemoryDB::::new(); - let mut root2 = H256::new(); - let mut memtrie_sorted = populate_trie::<_, RlpCodec>(&mut memdb2, &mut root2, &y); - if *memtrie.root() != real || *memtrie_sorted.root() != real { - println!("TRIE MISMATCH"); - println!(""); - println!("ORIGINAL... {:?}", memtrie.root()); - for i in &x { - println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); - } - println!("SORTED... {:?}", memtrie_sorted.root()); - for i in &y { - println!("{:?} -> {:?}", i.0.pretty(), i.1.pretty()); - } - } - assert_eq!(*memtrie.root(), real); - assert_eq!(*memtrie_sorted.root(), real); - } - } - - #[test] - fn test_trie_existing() { - let mut db = MemoryDB::::new(); - let mut root = H256::new(); - { - let mut t = TrieDBMut::new(&mut db, &mut root); - t.insert(&[0x01u8, 0x23], &[0x01u8, 0x23]).unwrap(); - } - - { - let _ = TrieDBMut::from_existing(&mut db, &mut root); - } - } - - #[test] - fn insert_empty() { - let mut seed = H256::new(); - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 4, - }.make_with(&mut seed); - - let mut db = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut db, &mut root); - for &(ref key, ref value) in &x { - t.insert(key, value).unwrap(); - } - - assert_eq!(*t.root(), trie_root(x.clone())); - - for &(ref key, _) in &x { - t.insert(key, &[]).unwrap(); - } - - assert!(t.is_empty()); - assert_eq!(*t.root(), RlpCodec::HASHED_NULL_NODE); - } - - #[test] - fn return_old_values() { - let mut seed = H256::new(); - let x = StandardMap { - alphabet: Alphabet::Custom(b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_".to_vec()), - min_key: 5, - journal_key: 0, - value_mode: ValueMode::Index, - count: 4, - }.make_with(&mut seed); - - let mut db = MemoryDB::::new(); - let mut root = H256::new(); - let mut t = TrieDBMut::new(&mut db, &mut root); - for &(ref key, ref value) in &x { - assert!(t.insert(key, value).unwrap().is_none()); - assert_eq!(t.insert(key, value).unwrap(), Some(DBValue::from_slice(value))); - } - - for (key, value) in x { - assert_eq!(t.remove(&key).unwrap(), Some(DBValue::from_slice(&value))); - assert!(t.remove(&key).unwrap().is_none()); - } - } -} diff --git a/util/plain_hasher/Cargo.toml b/util/plain_hasher/Cargo.toml deleted file mode 100644 index 9b2cd5503..000000000 --- a/util/plain_hasher/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -name = "plain_hasher" -description = "Hasher for 32-bit keys." -version = "0.2.0" -authors = ["Parity Technologies "] -license = "MIT" -keywords = ["hash", "hasher"] -homepage = "https://github.com/paritytech/plain_hasher" - -[dependencies] -crunchy = "0.1.6" -ethereum-types = "0.3" -hashdb = { version = "0.2.0", path = "../hashdb" } \ No newline at end of file diff --git a/util/plain_hasher/benches/bench.rs b/util/plain_hasher/benches/bench.rs deleted file mode 100644 index cfaa95eaa..000000000 --- a/util/plain_hasher/benches/bench.rs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -#![feature(test)] - -extern crate test; -extern crate plain_hasher; - -use std::hash::Hasher; -use std::collections::hash_map::DefaultHasher; -use test::{Bencher, black_box}; -use plain_hasher::PlainHasher; - -#[bench] -fn write_plain_hasher(b: &mut Bencher) { - b.iter(|| { - let n: u8 = black_box(100); - (0..n).fold(PlainHasher::default(), |mut old, new| { - let bb = black_box([new; 32]); - old.write(&bb as &[u8]); - old - }); - }); -} - -#[bench] -fn write_default_hasher(b: &mut Bencher) { - b.iter(|| { - let n: u8 = black_box(100); - (0..n).fold(DefaultHasher::default(), |mut old, new| { - let bb = black_box([new; 32]); - old.write(&bb as &[u8]); - old - }); - }); -} diff --git a/util/plain_hasher/src/lib.rs b/util/plain_hasher/src/lib.rs deleted file mode 100644 index 4a8a10441..000000000 --- a/util/plain_hasher/src/lib.rs +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -#[macro_use] -extern crate crunchy; -extern crate ethereum_types; -extern crate hashdb; - -use ethereum_types::H256; -// use hashdb::Hasher; -use std::hash; -use std::collections::{HashMap, HashSet}; -/// Specialized version of `HashMap` with H256 keys and fast hashing function. -pub type H256FastMap = HashMap>; -/// Specialized version of `HashSet` with H256 keys and fast hashing function. -pub type H256FastSet = HashSet>; - -/// Hasher that just takes 8 bytes of the provided value. -/// May only be used for keys which are 32 bytes. -#[derive(Default)] -pub struct PlainHasher { - prefix: u64, -} - -impl hash::Hasher for PlainHasher { - #[inline] - fn finish(&self) -> u64 { - self.prefix - } - - #[inline] - #[allow(unused_assignments)] - fn write(&mut self, bytes: &[u8]) { - debug_assert!(bytes.len() == 32); - let mut bytes_ptr = bytes.as_ptr(); - let mut prefix_ptr = &mut self.prefix as *mut u64 as *mut u8; - - unroll! { - for _i in 0..8 { - unsafe { - *prefix_ptr ^= (*bytes_ptr ^ *bytes_ptr.offset(8)) ^ (*bytes_ptr.offset(16) ^ *bytes_ptr.offset(24)); - bytes_ptr = bytes_ptr.offset(1); - prefix_ptr = prefix_ptr.offset(1); - } - } - } - } -} - -#[cfg(test)] -mod tests { - use std::hash::Hasher; - use super::PlainHasher; - - #[test] - fn it_works() { - let mut bytes = [32u8; 32]; - bytes[0] = 15; - let mut hasher = PlainHasher::default(); - hasher.write(&bytes); - assert_eq!(hasher.prefix, 47); - } -} diff --git a/util/rlp/Cargo.toml b/util/rlp/Cargo.toml deleted file mode 100644 index 3bde7206f..000000000 --- a/util/rlp/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -description = "Recursive-length prefix encoding, decoding, and compression" -repository = "https://github.com/paritytech/parity" -license = "MIT/Apache-2.0" -name = "rlp" -version = "0.2.1" -authors = ["Parity Technologies "] - -[dependencies] -elastic-array = "0.10" -ethereum-types = "0.3" -rustc-hex = "1.0" -byteorder = "1.0" diff --git a/util/rlp/LICENSE-APACHE2 b/util/rlp/LICENSE-APACHE2 deleted file mode 100644 index 16fe87b06..000000000 --- a/util/rlp/LICENSE-APACHE2 +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/util/rlp/LICENSE-MIT b/util/rlp/LICENSE-MIT deleted file mode 100644 index cd8fdd2b9..000000000 --- a/util/rlp/LICENSE-MIT +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2015-2017 Parity Technologies - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/util/rlp/README.md b/util/rlp/README.md deleted file mode 100644 index f4007a884..000000000 --- a/util/rlp/README.md +++ /dev/null @@ -1,8 +0,0 @@ -# RLP - -Recursive-length-prefix encoding, decoding, and compression in Rust. - -## License - -Unlike most parts of Parity, which fall under the GPLv3, this package is dual-licensed under MIT/Apache2 at the user's choice. -Find the associated license files in this directory as `LICENSE-MIT` and `LICENSE-APACHE2` respectively. diff --git a/util/rlp/benches/rlp.rs b/util/rlp/benches/rlp.rs deleted file mode 100644 index 7f2e9f645..000000000 --- a/util/rlp/benches/rlp.rs +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! benchmarking for rlp -//! should be started with: -//! ```bash -//! multirust run nightly cargo bench -//! ``` - -#![feature(test)] - -extern crate ethereum_types; -extern crate rlp; -extern crate test; - -use ethereum_types::U256; -use rlp::{RlpStream, Rlp}; -use test::Bencher; - -#[bench] -fn bench_stream_u64_value(b: &mut Bencher) { - b.iter(|| { - // u64 - let mut stream = RlpStream::new(); - stream.append(&0x1023456789abcdefu64); - let _ = stream.out(); - }); -} - -#[bench] -fn bench_decode_u64_value(b: &mut Bencher) { - b.iter(|| { - // u64 - let data = vec![0x88, 0x10, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]; - let rlp = Rlp::new(&data); - let _: u64 = rlp.as_val().unwrap(); - }); -} - -#[bench] -fn bench_stream_u256_value(b: &mut Bencher) { - b.iter(|| { - // u256 - let mut stream = RlpStream::new(); - let uint: U256 = "8090a0b0c0d0e0f00910203040506077000000000000000100000000000012f0".into(); - stream.append(&uint); - let _ = stream.out(); - }); -} - -#[bench] -fn bench_decode_u256_value(b: &mut Bencher) { - b.iter(|| { - // u256 - let data = vec![0xa0, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, 0x09, 0x10, 0x20, - 0x30, 0x40, 0x50, 0x60, 0x77, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, - 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x12, 0xf0]; - let rlp = Rlp::new(&data); - let _ : U256 = rlp.as_val().unwrap(); - }); -} - -#[bench] -fn bench_stream_nested_empty_lists(b: &mut Bencher) { - b.iter(|| { - // [ [], [[]], [ [], [[]] ] ] - let mut stream = RlpStream::new_list(3); - stream.begin_list(0); - stream.begin_list(1).begin_list(0); - stream.begin_list(2).begin_list(0).begin_list(1).begin_list(0); - let _ = stream.out(); - }); -} - -#[bench] -fn bench_decode_nested_empty_lists(b: &mut Bencher) { - b.iter(|| { - // [ [], [[]], [ [], [[]] ] ] - let data = vec![0xc7, 0xc0, 0xc1, 0xc0, 0xc3, 0xc0, 0xc1, 0xc0]; - let rlp = Rlp::new(&data); - let _v0: Vec = rlp.at(0).unwrap().as_list().unwrap(); - let _v1: Vec = rlp.at(1).unwrap().at(0).unwrap().as_list().unwrap(); - let nested_rlp = rlp.at(2).unwrap(); - let _v2a: Vec = nested_rlp.at(0).unwrap().as_list().unwrap(); - let _v2b: Vec = nested_rlp.at(1).unwrap().at(0).unwrap().as_list().unwrap(); - }); -} - -#[bench] -fn bench_stream_1000_empty_lists(b: &mut Bencher) { - b.iter(|| { - let mut stream = RlpStream::new_list(1000); - for _ in 0..1000 { - stream.begin_list(0); - } - let _ = stream.out(); - }); -} diff --git a/util/rlp/license-header b/util/rlp/license-header deleted file mode 100644 index 03df169c8..000000000 --- a/util/rlp/license-header +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. diff --git a/util/rlp/src/error.rs b/util/rlp/src/error.rs deleted file mode 100644 index 7aef6cfbf..000000000 --- a/util/rlp/src/error.rs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::fmt; -use std::error::Error as StdError; - -#[derive(Debug, PartialEq, Eq, Clone)] -/// Error concerning the RLP decoder. -pub enum DecoderError { - /// Data has additional bytes at the end of the valid RLP fragment. - RlpIsTooBig, - /// Data has too few bytes for valid RLP. - RlpIsTooShort, - /// Expect an encoded list, RLP was something else. - RlpExpectedToBeList, - /// Expect encoded data, RLP was something else. - RlpExpectedToBeData, - /// Expected a different size list. - RlpIncorrectListLen, - /// Data length number has a prefixed zero byte, invalid for numbers. - RlpDataLenWithZeroPrefix, - /// List length number has a prefixed zero byte, invalid for numbers. - RlpListLenWithZeroPrefix, - /// Non-canonical (longer than necessary) representation used for data or list. - RlpInvalidIndirection, - /// Declared length is inconsistent with data specified after. - RlpInconsistentLengthAndData, - /// Declared length is invalid and results in overflow - RlpInvalidLength, - /// Custom rlp decoding error. - Custom(&'static str), -} - -impl StdError for DecoderError { - fn description(&self) -> &str { - "builder error" - } -} - -impl fmt::Display for DecoderError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self, f) - } -} diff --git a/util/rlp/src/impls.rs b/util/rlp/src/impls.rs deleted file mode 100644 index 573f2c078..000000000 --- a/util/rlp/src/impls.rs +++ /dev/null @@ -1,285 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::{cmp, mem, str}; -use byteorder::{ByteOrder, BigEndian}; -use bigint::{U128, U256, H64, H128, H160, H256, H512, H520, Bloom}; -use traits::{Encodable, Decodable}; -use stream::RlpStream; -use {Rlp, DecoderError}; - -pub fn decode_usize(bytes: &[u8]) -> Result { - match bytes.len() { - l if l <= mem::size_of::() => { - if bytes[0] == 0 { - return Err(DecoderError::RlpInvalidIndirection); - } - let mut res = 0usize; - for i in 0..l { - let shift = (l - 1 - i) * 8; - res = res + ((bytes[i] as usize) << shift); - } - Ok(res) - } - _ => Err(DecoderError::RlpIsTooBig), - } -} - -impl Encodable for bool { - fn rlp_append(&self, s: &mut RlpStream) { - if *self { - s.encoder().encode_value(&[1]); - } else { - s.encoder().encode_value(&[0]); - } - } -} - -impl Decodable for bool { - fn decode(rlp: &Rlp) -> Result { - rlp.decoder().decode_value(|bytes| { - match bytes.len() { - 0 => Ok(false), - 1 => Ok(bytes[0] != 0), - _ => Err(DecoderError::RlpIsTooBig), - } - }) - } -} - -impl<'a> Encodable for &'a [u8] { - fn rlp_append(&self, s: &mut RlpStream) { - s.encoder().encode_value(self); - } -} - -impl Encodable for Vec { - fn rlp_append(&self, s: &mut RlpStream) { - s.encoder().encode_value(self); - } -} - -impl Decodable for Vec { - fn decode(rlp: &Rlp) -> Result { - rlp.decoder().decode_value(|bytes| { - Ok(bytes.to_vec()) - }) - } -} - -impl Encodable for Option where T: Encodable { - fn rlp_append(&self, s: &mut RlpStream) { - match *self { - None => { - s.begin_list(0); - }, - Some(ref value) => { - s.begin_list(1); - s.append(value); - } - } - } -} - -impl Decodable for Option where T: Decodable { - fn decode(rlp: &Rlp) -> Result { - let items = rlp.item_count()?; - match items { - 1 => rlp.val_at(0).map(Some), - 0 => Ok(None), - _ => Err(DecoderError::RlpIncorrectListLen), - } - } -} - -impl Encodable for u8 { - fn rlp_append(&self, s: &mut RlpStream) { - if *self != 0 { - s.encoder().encode_value(&[*self]); - } else { - s.encoder().encode_value(&[]); - } - } -} - -impl Decodable for u8 { - fn decode(rlp: &Rlp) -> Result { - rlp.decoder().decode_value(|bytes| { - match bytes.len() { - 1 if bytes[0] != 0 => Ok(bytes[0]), - 0 => Ok(0), - 1 => Err(DecoderError::RlpInvalidIndirection), - _ => Err(DecoderError::RlpIsTooBig), - } - }) - } -} - -macro_rules! impl_encodable_for_u { - ($name: ident, $func: ident, $size: expr) => { - impl Encodable for $name { - fn rlp_append(&self, s: &mut RlpStream) { - let leading_empty_bytes = self.leading_zeros() as usize / 8; - let mut buffer = [0u8; $size]; - BigEndian::$func(&mut buffer, *self); - s.encoder().encode_value(&buffer[leading_empty_bytes..]); - } - } - } -} - -macro_rules! impl_decodable_for_u { - ($name: ident) => { - impl Decodable for $name { - fn decode(rlp: &Rlp) -> Result { - rlp.decoder().decode_value(|bytes| { - match bytes.len() { - 0 | 1 => u8::decode(rlp).map(|v| v as $name), - l if l <= mem::size_of::<$name>() => { - if bytes[0] == 0 { - return Err(DecoderError::RlpInvalidIndirection); - } - let mut res = 0 as $name; - for i in 0..l { - let shift = (l - 1 - i) * 8; - res = res + ((bytes[i] as $name) << shift); - } - Ok(res) - } - _ => Err(DecoderError::RlpIsTooBig), - } - }) - } - } - } -} - -impl_encodable_for_u!(u16, write_u16, 2); -impl_encodable_for_u!(u32, write_u32, 4); -impl_encodable_for_u!(u64, write_u64, 8); - -impl_decodable_for_u!(u16); -impl_decodable_for_u!(u32); -impl_decodable_for_u!(u64); - -impl Encodable for usize { - fn rlp_append(&self, s: &mut RlpStream) { - (*self as u64).rlp_append(s); - } -} - -impl Decodable for usize { - fn decode(rlp: &Rlp) -> Result { - u64::decode(rlp).map(|value| value as usize) - } -} - -macro_rules! impl_encodable_for_hash { - ($name: ident) => { - impl Encodable for $name { - fn rlp_append(&self, s: &mut RlpStream) { - s.encoder().encode_value(self); - } - } - } -} - -macro_rules! impl_decodable_for_hash { - ($name: ident, $size: expr) => { - impl Decodable for $name { - fn decode(rlp: &Rlp) -> Result { - rlp.decoder().decode_value(|bytes| match bytes.len().cmp(&$size) { - cmp::Ordering::Less => Err(DecoderError::RlpIsTooShort), - cmp::Ordering::Greater => Err(DecoderError::RlpIsTooBig), - cmp::Ordering::Equal => { - let mut t = [0u8; $size]; - t.copy_from_slice(bytes); - Ok($name(t)) - } - }) - } - } - } -} - -impl_encodable_for_hash!(H64); -impl_encodable_for_hash!(H128); -impl_encodable_for_hash!(H160); -impl_encodable_for_hash!(H256); -impl_encodable_for_hash!(H512); -impl_encodable_for_hash!(H520); -impl_encodable_for_hash!(Bloom); - -impl_decodable_for_hash!(H64, 8); -impl_decodable_for_hash!(H128, 16); -impl_decodable_for_hash!(H160, 20); -impl_decodable_for_hash!(H256, 32); -impl_decodable_for_hash!(H512, 64); -impl_decodable_for_hash!(H520, 65); -impl_decodable_for_hash!(Bloom, 256); - -macro_rules! impl_encodable_for_uint { - ($name: ident, $size: expr) => { - impl Encodable for $name { - fn rlp_append(&self, s: &mut RlpStream) { - let leading_empty_bytes = $size - (self.bits() + 7) / 8; - let mut buffer = [0u8; $size]; - self.to_big_endian(&mut buffer); - s.encoder().encode_value(&buffer[leading_empty_bytes..]); - } - } - } -} - -macro_rules! impl_decodable_for_uint { - ($name: ident, $size: expr) => { - impl Decodable for $name { - fn decode(rlp: &Rlp) -> Result { - rlp.decoder().decode_value(|bytes| { - if !bytes.is_empty() && bytes[0] == 0 { - Err(DecoderError::RlpInvalidIndirection) - } else if bytes.len() <= $size { - Ok($name::from(bytes)) - } else { - Err(DecoderError::RlpIsTooBig) - } - }) - } - } - } -} - -impl_encodable_for_uint!(U256, 32); -impl_encodable_for_uint!(U128, 16); - -impl_decodable_for_uint!(U256, 32); -impl_decodable_for_uint!(U128, 16); - -impl<'a> Encodable for &'a str { - fn rlp_append(&self, s: &mut RlpStream) { - s.encoder().encode_value(self.as_bytes()); - } -} - -impl Encodable for String { - fn rlp_append(&self, s: &mut RlpStream) { - s.encoder().encode_value(self.as_bytes()); - } -} - -impl Decodable for String { - fn decode(rlp: &Rlp) -> Result { - rlp.decoder().decode_value(|bytes| { - match str::from_utf8(bytes) { - Ok(s) => Ok(s.to_owned()), - // consider better error type here - Err(_err) => Err(DecoderError::RlpExpectedToBeData), - } - }) - } -} diff --git a/util/rlp/src/lib.rs b/util/rlp/src/lib.rs deleted file mode 100644 index 08c36522d..000000000 --- a/util/rlp/src/lib.rs +++ /dev/null @@ -1,101 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Recursive Length Prefix serialization crate. -//! -//! Allows encoding, decoding, and view onto rlp-slice -//! -//!# What should you use when? -//! -//!### Use `encode` function when: -//! * You want to encode something inline. -//! * You do not work on big set of data. -//! * You want to encode whole data structure at once. -//! -//!### Use `decode` function when: -//! * You want to decode something inline. -//! * You do not work on big set of data. -//! * You want to decode whole rlp at once. -//! -//!### Use `RlpStream` when: -//! * You want to encode something in portions. -//! * You encode a big set of data. -//! -//!### Use `Rlp` when: -//! * You need to handle data corruption errors. -//! * You are working on input data. -//! * You want to get view onto rlp-slice. -//! * You don't want to decode whole rlp at once. - -extern crate byteorder; -extern crate ethereum_types as bigint; -extern crate elastic_array; -extern crate rustc_hex; - -mod traits; -mod error; -mod rlpin; -mod stream; -mod impls; - -use elastic_array::ElasticArray1024; -use std::borrow::Borrow; - -pub use error::DecoderError; -pub use traits::{Decodable, Encodable}; -pub use rlpin::{Rlp, RlpIterator, PayloadInfo, Prototype}; -pub use stream::RlpStream; - -/// The RLP encoded empty data (used to mean "null value"). -pub const NULL_RLP: [u8; 1] = [0x80; 1]; -/// The RLP encoded empty list. -pub const EMPTY_LIST_RLP: [u8; 1] = [0xC0; 1]; - -/// Shortcut function to decode trusted rlp -/// -/// ```rust -/// extern crate rlp; -/// -/// fn main () { -/// let data = vec![0x83, b'c', b'a', b't']; -/// let animal: String = rlp::decode(&data).expect("could not decode"); -/// assert_eq!(animal, "cat".to_owned()); -/// } -/// ``` -pub fn decode(bytes: &[u8]) -> Result where T: Decodable { - let rlp = Rlp::new(bytes); - rlp.as_val() -} - -pub fn decode_list(bytes: &[u8]) -> Vec where T: Decodable { - let rlp = Rlp::new(bytes); - rlp.as_list().expect("trusted rlp should be valid") -} - -/// Shortcut function to encode structure into rlp. -/// -/// ```rust -/// extern crate rlp; -/// -/// fn main () { -/// let animal = "cat"; -/// let out = rlp::encode(&animal).into_vec(); -/// assert_eq!(out, vec![0x83, b'c', b'a', b't']); -/// } -/// ``` -pub fn encode(object: &E) -> ElasticArray1024 where E: Encodable { - let mut stream = RlpStream::new(); - stream.append(object); - stream.drain() -} - -pub fn encode_list(object: &[K]) -> ElasticArray1024 where E: Encodable, K: Borrow { - let mut stream = RlpStream::new(); - stream.append_list(object); - stream.drain() -} diff --git a/util/rlp/src/rlpin.rs b/util/rlp/src/rlpin.rs deleted file mode 100644 index 23fdc452e..000000000 --- a/util/rlp/src/rlpin.rs +++ /dev/null @@ -1,405 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::cell::Cell; -use std::fmt; -use rustc_hex::ToHex; -use impls::decode_usize; -use {Decodable, DecoderError}; - -/// rlp offset -#[derive(Copy, Clone, Debug)] -struct OffsetCache { - index: usize, - offset: usize, -} - -impl OffsetCache { - fn new(index: usize, offset: usize) -> OffsetCache { - OffsetCache { - index: index, - offset: offset, - } - } -} - -#[derive(Debug)] -/// RLP prototype -pub enum Prototype { - /// Empty - Null, - /// Value - Data(usize), - /// List - List(usize), -} - -/// Stores basic information about item -pub struct PayloadInfo { - /// Header length in bytes - pub header_len: usize, - /// Value length in bytes - pub value_len: usize, -} - -fn calculate_payload_info(header_bytes: &[u8], len_of_len: usize) -> Result { - let header_len = 1 + len_of_len; - match header_bytes.get(1) { - Some(&0) => return Err(DecoderError::RlpDataLenWithZeroPrefix), - None => return Err(DecoderError::RlpIsTooShort), - _ => (), - } - if header_bytes.len() < header_len { return Err(DecoderError::RlpIsTooShort); } - let value_len = decode_usize(&header_bytes[1..header_len])?; - Ok(PayloadInfo::new(header_len, value_len)) -} - -impl PayloadInfo { - fn new(header_len: usize, value_len: usize) -> PayloadInfo { - PayloadInfo { - header_len: header_len, - value_len: value_len, - } - } - - /// Total size of the RLP. - pub fn total(&self) -> usize { self.header_len + self.value_len } - - /// Create a new object from the given bytes RLP. The bytes - pub fn from(header_bytes: &[u8]) -> Result { - match header_bytes.first().cloned() { - None => Err(DecoderError::RlpIsTooShort), - Some(0...0x7f) => Ok(PayloadInfo::new(0, 1)), - Some(l @ 0x80...0xb7) => Ok(PayloadInfo::new(1, l as usize - 0x80)), - Some(l @ 0xb8...0xbf) => { - let len_of_len = l as usize - 0xb7; - calculate_payload_info(header_bytes, len_of_len) - } - Some(l @ 0xc0...0xf7) => Ok(PayloadInfo::new(1, l as usize - 0xc0)), - Some(l @ 0xf8...0xff) => { - let len_of_len = l as usize - 0xf7; - calculate_payload_info(header_bytes, len_of_len) - }, - // we cant reach this place, but rust requires _ to be implemented - _ => { unreachable!(); } - } - } -} - -/// Data-oriented view onto rlp-slice. -/// -/// This is an immutable structure. No operations change it. -/// -/// Should be used in places where, error handling is required, -/// eg. on input -#[derive(Debug)] -pub struct Rlp<'a> { - bytes: &'a [u8], - offset_cache: Cell, - count_cache: Cell>, -} - -impl<'a> Clone for Rlp<'a> { - fn clone(&self) -> Rlp<'a> { - Rlp { - bytes: self.bytes, - offset_cache: self.offset_cache.clone(), - count_cache: self.count_cache.clone(), - } - } -} - -impl<'a> fmt::Display for Rlp<'a> { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { - match self.prototype() { - Ok(Prototype::Null) => write!(f, "null"), - Ok(Prototype::Data(_)) => write!(f, "\"0x{}\"", self.data().unwrap().to_hex()), - Ok(Prototype::List(len)) => { - write!(f, "[")?; - for i in 0..len-1 { - write!(f, "{}, ", self.at(i).unwrap())?; - } - write!(f, "{}", self.at(len - 1).unwrap())?; - write!(f, "]") - }, - Err(err) => write!(f, "{:?}", err) - } - } -} - -impl<'a, 'view> Rlp<'a> where 'a: 'view { - pub fn new(bytes: &'a [u8]) -> Rlp<'a> { - Rlp { - bytes: bytes, - offset_cache: Cell::new(OffsetCache::new(usize::max_value(), 0)), - count_cache: Cell::new(None) - } - } - - pub fn as_raw(&'view self) -> &'a [u8] { - self.bytes - } - - pub fn prototype(&self) -> Result { - // optimize? && return appropriate errors - if self.is_data() { - Ok(Prototype::Data(self.size())) - } else if self.is_list() { - self.item_count().map(Prototype::List) - } else { - Ok(Prototype::Null) - } - } - - pub fn payload_info(&self) -> Result { - BasicDecoder::payload_info(self.bytes) - } - - pub fn data(&'view self) -> Result<&'a [u8], DecoderError> { - let pi = BasicDecoder::payload_info(self.bytes)?; - Ok(&self.bytes[pi.header_len..(pi.header_len + pi.value_len)]) - } - - pub fn item_count(&self) -> Result { - match self.is_list() { - true => match self.count_cache.get() { - Some(c) => Ok(c), - None => { - let c = self.iter().count(); - self.count_cache.set(Some(c)); - Ok(c) - } - }, - false => Err(DecoderError::RlpExpectedToBeList), - } - } - - pub fn size(&self) -> usize { - match self.is_data() { - // TODO: No panic on malformed data, but ideally would Err on no PayloadInfo. - true => BasicDecoder::payload_info(self.bytes).map(|b| b.value_len).unwrap_or(0), - false => 0 - } - } - - pub fn at(&'view self, index: usize) -> Result, DecoderError> { - if !self.is_list() { - return Err(DecoderError::RlpExpectedToBeList); - } - - // move to cached position if its index is less or equal to - // current search index, otherwise move to beginning of list - let c = self.offset_cache.get(); - let (mut bytes, to_skip) = match c.index <= index { - true => (Rlp::consume(self.bytes, c.offset)?, index - c.index), - false => (self.consume_list_payload()?, index), - }; - - // skip up to x items - bytes = Rlp::consume_items(bytes, to_skip)?; - - // update the cache - self.offset_cache.set(OffsetCache::new(index, self.bytes.len() - bytes.len())); - - // construct new rlp - let found = BasicDecoder::payload_info(bytes)?; - Ok(Rlp::new(&bytes[0..found.header_len + found.value_len])) - } - - pub fn is_null(&self) -> bool { - self.bytes.len() == 0 - } - - pub fn is_empty(&self) -> bool { - !self.is_null() && (self.bytes[0] == 0xc0 || self.bytes[0] == 0x80) - } - - pub fn is_list(&self) -> bool { - !self.is_null() && self.bytes[0] >= 0xc0 - } - - pub fn is_data(&self) -> bool { - !self.is_null() && self.bytes[0] < 0xc0 - } - - pub fn is_int(&self) -> bool { - if self.is_null() { - return false; - } - - match self.bytes[0] { - 0...0x80 => true, - 0x81...0xb7 => self.bytes[1] != 0, - b @ 0xb8...0xbf => self.bytes[1 + b as usize - 0xb7] != 0, - _ => false - } - } - - pub fn iter(&'view self) -> RlpIterator<'a, 'view> { - self.into_iter() - } - - pub fn as_val(&self) -> Result where T: Decodable { - T::decode(self) - } - - pub fn as_list(&self) -> Result, DecoderError> where T: Decodable { - self.iter().map(|rlp| rlp.as_val()).collect() - } - - pub fn val_at(&self, index: usize) -> Result where T: Decodable { - self.at(index)?.as_val() - } - - pub fn list_at(&self, index: usize) -> Result, DecoderError> where T: Decodable { - self.at(index)?.as_list() - } - - pub fn decoder(&self) -> BasicDecoder { - BasicDecoder::new(self.clone()) - } - - /// consumes first found prefix - fn consume_list_payload(&self) -> Result<&'a [u8], DecoderError> { - let item = BasicDecoder::payload_info(self.bytes)?; - let bytes = Rlp::consume(self.bytes, item.header_len)?; - Ok(bytes) - } - - /// consumes fixed number of items - fn consume_items(bytes: &'a [u8], items: usize) -> Result<&'a [u8], DecoderError> { - let mut result = bytes; - for _ in 0..items { - let i = BasicDecoder::payload_info(result)?; - result = Rlp::consume(result, i.header_len + i.value_len)?; - } - Ok(result) - } - - /// consumes slice prefix of length `len` - fn consume(bytes: &'a [u8], len: usize) -> Result<&'a [u8], DecoderError> { - match bytes.len() >= len { - true => Ok(&bytes[len..]), - false => Err(DecoderError::RlpIsTooShort), - } - } -} - -/// Iterator over rlp-slice list elements. -pub struct RlpIterator<'a, 'view> where 'a: 'view { - rlp: &'view Rlp<'a>, - index: usize, -} - -impl<'a, 'view> IntoIterator for &'view Rlp<'a> where 'a: 'view { - type Item = Rlp<'a>; - type IntoIter = RlpIterator<'a, 'view>; - - fn into_iter(self) -> Self::IntoIter { - RlpIterator { - rlp: self, - index: 0, - } - } -} - -impl<'a, 'view> Iterator for RlpIterator<'a, 'view> { - type Item = Rlp<'a>; - - fn next(&mut self) -> Option> { - let index = self.index; - let result = self.rlp.at(index).ok(); - self.index += 1; - result - } -} - -pub struct BasicDecoder<'a> { - rlp: Rlp<'a> -} - -impl<'a> BasicDecoder<'a> { - pub fn new(rlp: Rlp<'a>) -> BasicDecoder<'a> { - BasicDecoder { - rlp: rlp - } - } - - /// Return first item info. - fn payload_info(bytes: &[u8]) -> Result { - let item = PayloadInfo::from(bytes)?; - match item.header_len.checked_add(item.value_len) { - Some(x) if x <= bytes.len() => Ok(item), - _ => Err(DecoderError::RlpIsTooShort), - } - } - - pub fn decode_value(&self, f: F) -> Result - where F: Fn(&[u8]) -> Result { - - let bytes = self.rlp.as_raw(); - - match bytes.first().cloned() { - // RLP is too short. - None => Err(DecoderError::RlpIsTooShort), - // Single byte value. - Some(l @ 0...0x7f) => Ok(f(&[l])?), - // 0-55 bytes - Some(l @ 0x80...0xb7) => { - let last_index_of = 1 + l as usize - 0x80; - if bytes.len() < last_index_of { - return Err(DecoderError::RlpInconsistentLengthAndData); - } - let d = &bytes[1..last_index_of]; - if l == 0x81 && d[0] < 0x80 { - return Err(DecoderError::RlpInvalidIndirection); - } - Ok(f(d)?) - }, - // Longer than 55 bytes. - Some(l @ 0xb8...0xbf) => { - let len_of_len = l as usize - 0xb7; - let begin_of_value = 1 as usize + len_of_len; - if bytes.len() < begin_of_value { - return Err(DecoderError::RlpInconsistentLengthAndData); - } - let len = decode_usize(&bytes[1..begin_of_value])?; - - let last_index_of_value = begin_of_value.checked_add(len) - .ok_or(DecoderError::RlpInvalidLength)?; - if bytes.len() < last_index_of_value { - return Err(DecoderError::RlpInconsistentLengthAndData); - } - Ok(f(&bytes[begin_of_value..last_index_of_value])?) - } - // We are reading value, not a list! - _ => Err(DecoderError::RlpExpectedToBeData) - } - } -} - -#[cfg(test)] -mod tests { - use {Rlp, DecoderError}; - - #[test] - fn test_rlp_display() { - use rustc_hex::FromHex; - let data = "f84d0589010efbef67941f79b2a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470".from_hex().unwrap(); - let rlp = Rlp::new(&data); - assert_eq!(format!("{}", rlp), "[\"0x05\", \"0x010efbef67941f79b2\", \"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421\", \"0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470\"]"); - } - - #[test] - fn length_overflow() { - let bs = [0xbf, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xe5]; - let rlp = Rlp::new(&bs); - let res: Result = rlp.as_val(); - assert_eq!(Err(DecoderError::RlpInvalidLength), res); - } -} diff --git a/util/rlp/src/stream.rs b/util/rlp/src/stream.rs deleted file mode 100644 index 13ccddaa7..000000000 --- a/util/rlp/src/stream.rs +++ /dev/null @@ -1,380 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::borrow::Borrow; -use byteorder::{ByteOrder, BigEndian}; -use elastic_array::{ElasticArray16, ElasticArray1024}; -use traits::Encodable; - -#[derive(Debug, Copy, Clone)] -struct ListInfo { - position: usize, - current: usize, - max: Option, -} - -impl ListInfo { - fn new(position: usize, max: Option) -> ListInfo { - ListInfo { - position: position, - current: 0, - max: max, - } - } -} - -/// Appendable rlp encoder. -pub struct RlpStream { - unfinished_lists: ElasticArray16, - buffer: ElasticArray1024, - finished_list: bool, -} - -impl Default for RlpStream { - fn default() -> Self { - RlpStream::new() - } -} - -impl RlpStream { - /// Initializes instance of empty `Stream`. - pub fn new() -> Self { - RlpStream { - unfinished_lists: ElasticArray16::new(), - buffer: ElasticArray1024::new(), - finished_list: false, - } - } - - /// Initializes the `Stream` as a list. - pub fn new_list(len: usize) -> Self { - let mut stream = RlpStream::new(); - stream.begin_list(len); - stream - } - - /// Apends null to the end of stream, chainable. - /// - /// ```rust - /// extern crate rlp; - /// use rlp::*; - /// - /// fn main () { - /// let mut stream = RlpStream::new_list(2); - /// stream.append_empty_data().append_empty_data(); - /// let out = stream.out(); - /// assert_eq!(out, vec![0xc2, 0x80, 0x80]); - /// } - /// ``` - pub fn append_empty_data(&mut self) -> &mut Self { - // self push raw item - self.buffer.push(0x80); - - // try to finish and prepend the length - self.note_appended(1); - - // return chainable self - self - } - - /// Drain the object and return the underlying ElasticArray. Panics if it is not finished. - pub fn drain(self) -> ElasticArray1024 { - match self.is_finished() { - true => self.buffer, - false => panic!() - } - } - - /// Appends raw (pre-serialised) RLP data. Use with caution. Chainable. - pub fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self { - // push raw items - self.buffer.append_slice(bytes); - - // try to finish and prepend the length - self.note_appended(item_count); - - // return chainable self - self - } - - /// Appends value to the end of stream, chainable. - /// - /// ```rust - /// extern crate rlp; - /// use rlp::*; - /// - /// fn main () { - /// let mut stream = RlpStream::new_list(2); - /// stream.append(&"cat").append(&"dog"); - /// let out = stream.out(); - /// assert_eq!(out, vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']); - /// } - /// ``` - pub fn append<'a, E>(&'a mut self, value: &E) -> &'a mut Self where E: Encodable { - self.finished_list = false; - value.rlp_append(self); - if !self.finished_list { - self.note_appended(1); - } - self - } - - /// Appends list of values to the end of stream, chainable. - pub fn append_list<'a, E, K>(&'a mut self, values: &[K]) -> &'a mut Self where E: Encodable, K: Borrow { - self.begin_list(values.len()); - for value in values { - self.append(value.borrow()); - } - self - } - - /// Appends value to the end of stream, but do not count it as an appended item. - /// It's useful for wrapper types - pub fn append_internal<'a, E>(&'a mut self, value: &E) -> &'a mut Self where E: Encodable { - value.rlp_append(self); - self - } - - /// Declare appending the list of given size, chainable. - /// - /// ```rust - /// extern crate rlp; - /// use rlp::*; - /// - /// fn main () { - /// let mut stream = RlpStream::new_list(2); - /// stream.begin_list(2).append(&"cat").append(&"dog"); - /// stream.append(&""); - /// let out = stream.out(); - /// assert_eq!(out, vec![0xca, 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g', 0x80]); - /// } - /// ``` - pub fn begin_list(&mut self, len: usize) -> &mut RlpStream { - self.finished_list = false; - match len { - 0 => { - // we may finish, if the appended list len is equal 0 - self.buffer.push(0xc0u8); - self.note_appended(1); - self.finished_list = true; - }, - _ => { - // payload is longer than 1 byte only for lists > 55 bytes - // by pushing always this 1 byte we may avoid unnecessary shift of data - self.buffer.push(0); - - let position = self.buffer.len(); - self.unfinished_lists.push(ListInfo::new(position, Some(len))); - }, - } - - // return chainable self - self - } - - /// Declare appending the list of unknown size, chainable. - pub fn begin_unbounded_list(&mut self) -> &mut RlpStream { - self.finished_list = false; - // payload is longer than 1 byte only for lists > 55 bytes - // by pushing always this 1 byte we may avoid unnecessary shift of data - self.buffer.push(0); - let position = self.buffer.len(); - self.unfinished_lists.push(ListInfo::new(position, None)); - // return chainable self - self - } - - /// Appends raw (pre-serialised) RLP data. Checks for size oveflow. - pub fn append_raw_checked<'a>(&'a mut self, bytes: &[u8], item_count: usize, max_size: usize) -> bool { - if self.estimate_size(bytes.len()) > max_size { - return false; - } - self.append_raw(bytes, item_count); - true - } - - /// Calculate total RLP size for appended payload. - pub fn estimate_size<'a>(&'a self, add: usize) -> usize { - let total_size = self.buffer.len() + add; - let mut base_size = total_size; - for list in &self.unfinished_lists[..] { - let len = total_size - list.position; - if len > 55 { - let leading_empty_bytes = (len as u64).leading_zeros() as usize / 8; - let size_bytes = 8 - leading_empty_bytes; - base_size += size_bytes; - } - } - base_size - } - - /// Returns current RLP size in bytes for the data pushed into the list. - pub fn len<'a>(&'a self) -> usize { - self.estimate_size(0) - } - - /// Clear the output stream so far. - /// - /// ```rust - /// extern crate rlp; - /// use rlp::*; - /// - /// fn main () { - /// let mut stream = RlpStream::new_list(3); - /// stream.append(&"cat"); - /// stream.clear(); - /// stream.append(&"dog"); - /// let out = stream.out(); - /// assert_eq!(out, vec![0x83, b'd', b'o', b'g']); - /// } - pub fn clear(&mut self) { - // clear bytes - self.buffer.clear(); - - // clear lists - self.unfinished_lists.clear(); - } - - /// Returns true if stream doesnt expect any more items. - /// - /// ```rust - /// extern crate rlp; - /// use rlp::*; - /// - /// fn main () { - /// let mut stream = RlpStream::new_list(2); - /// stream.append(&"cat"); - /// assert_eq!(stream.is_finished(), false); - /// stream.append(&"dog"); - /// assert_eq!(stream.is_finished(), true); - /// let out = stream.out(); - /// assert_eq!(out, vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']); - /// } - pub fn is_finished(&self) -> bool { - self.unfinished_lists.len() == 0 - } - - /// Get raw encoded bytes - pub fn as_raw(&self) -> &[u8] { - //&self.encoder.bytes - &self.buffer - } - - /// Streams out encoded bytes. - /// - /// panic! if stream is not finished. - pub fn out(self) -> Vec { - match self.is_finished() { - //true => self.encoder.out().into_vec(), - true => self.buffer.into_vec(), - false => panic!() - } - } - - /// Try to finish lists - fn note_appended(&mut self, inserted_items: usize) -> () { - if self.unfinished_lists.len() == 0 { - return; - } - - let back = self.unfinished_lists.len() - 1; - let should_finish = match self.unfinished_lists.get_mut(back) { - None => false, - Some(ref mut x) => { - x.current += inserted_items; - match x.max { - Some(ref max) if x.current > *max => panic!("You cannot append more items then you expect!"), - Some(ref max) => x.current == *max, - _ => false, - } - } - }; - - if should_finish { - let x = self.unfinished_lists.pop().unwrap(); - let len = self.buffer.len() - x.position; - self.encoder().insert_list_payload(len, x.position); - self.note_appended(1); - } - self.finished_list = should_finish; - } - - pub fn encoder(&mut self) -> BasicEncoder { - BasicEncoder::new(self) - } - - /// Finalize current ubnbound list. Panics if no unbounded list has been opened. - pub fn complete_unbounded_list(&mut self) { - let list = self.unfinished_lists.pop().expect("No open list."); - if list.max.is_some() { - panic!("List type mismatch."); - } - let len = self.buffer.len() - list.position; - self.encoder().insert_list_payload(len, list.position); - self.note_appended(1); - } -} - -pub struct BasicEncoder<'a> { - buffer: &'a mut ElasticArray1024, -} - -impl<'a> BasicEncoder<'a> { - fn new(stream: &'a mut RlpStream) -> Self { - BasicEncoder { - buffer: &mut stream.buffer - } - } - - fn insert_size(&mut self, size: usize, position: usize) -> u8 { - let size = size as u32; - let leading_empty_bytes = size.leading_zeros() as usize / 8; - let size_bytes = 4 - leading_empty_bytes as u8; - let mut buffer = [0u8; 4]; - BigEndian::write_u32(&mut buffer, size); - self.buffer.insert_slice(position, &buffer[leading_empty_bytes..]); - size_bytes as u8 - } - - /// Inserts list prefix at given position - fn insert_list_payload(&mut self, len: usize, pos: usize) { - // 1 byte was already reserved for payload earlier - match len { - 0...55 => { - self.buffer[pos - 1] = 0xc0u8 + len as u8; - }, - _ => { - let inserted_bytes = self.insert_size(len, pos); - self.buffer[pos - 1] = 0xf7u8 + inserted_bytes; - } - }; - } - - /// Pushes encoded value to the end of buffer - pub fn encode_value(&mut self, value: &[u8]) { - match value.len() { - // just 0 - 0 => self.buffer.push(0x80u8), - // byte is its own encoding if < 0x80 - 1 if value[0] < 0x80 => self.buffer.push(value[0]), - // (prefix + length), followed by the string - len @ 1 ... 55 => { - self.buffer.push(0x80u8 + len as u8); - self.buffer.append_slice(value); - } - // (prefix + length of length), followed by the length, followd by the string - len => { - self.buffer.push(0); - let position = self.buffer.len(); - let inserted_bytes = self.insert_size(len, position); - self.buffer[position - 1] = 0xb7 + inserted_bytes; - self.buffer.append_slice(value); - } - } - } -} diff --git a/util/rlp/src/traits.rs b/util/rlp/src/traits.rs deleted file mode 100644 index 1596009e7..000000000 --- a/util/rlp/src/traits.rs +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Common RLP traits -use elastic_array::ElasticArray1024; -use {DecoderError, Rlp, RlpStream}; - -/// RLP decodable trait -pub trait Decodable: Sized { - /// Decode a value from RLP bytes - fn decode(rlp: &Rlp) -> Result; -} - -/// Structure encodable to RLP -pub trait Encodable { - /// Append a value to the stream - fn rlp_append(&self, s: &mut RlpStream); - - /// Get rlp-encoded bytes for this instance - fn rlp_bytes(&self) -> ElasticArray1024 { - let mut s = RlpStream::new(); - self.rlp_append(&mut s); - s.drain() - } -} diff --git a/util/rlp/tests/tests.rs b/util/rlp/tests/tests.rs deleted file mode 100644 index 7aa2920c6..000000000 --- a/util/rlp/tests/tests.rs +++ /dev/null @@ -1,425 +0,0 @@ -// Copyright 2015-2017 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -extern crate ethereum_types as bigint; -extern crate rlp; - -use std::{fmt, cmp}; -use bigint::{U256, H160}; -use rlp::{Encodable, Decodable, Rlp, RlpStream, DecoderError}; - -#[test] -fn rlp_at() { - let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']; - { - let rlp = Rlp::new(&data); - assert!(rlp.is_list()); - let animals: Vec = rlp.as_list().unwrap(); - assert_eq!(animals, vec!["cat".to_owned(), "dog".to_owned()]); - - let cat = rlp.at(0).unwrap(); - assert!(cat.is_data()); - assert_eq!(cat.as_raw(), &[0x83, b'c', b'a', b't']); - assert_eq!(cat.as_val::().unwrap(), "cat".to_owned()); - - let dog = rlp.at(1).unwrap(); - assert!(dog.is_data()); - assert_eq!(dog.as_raw(), &[0x83, b'd', b'o', b'g']); - assert_eq!(dog.as_val::().unwrap(), "dog".to_owned()); - - let cat_again = rlp.at(0).unwrap(); - assert!(cat_again.is_data()); - assert_eq!(cat_again.as_raw(), &[0x83, b'c', b'a', b't']); - assert_eq!(cat_again.as_val::().unwrap(), "cat".to_owned()); - } -} - -#[test] -fn rlp_at_err() { - let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o']; - { - let rlp = Rlp::new(&data); - assert!(rlp.is_list()); - - let cat_err = rlp.at(0).unwrap_err(); - assert_eq!(cat_err, DecoderError::RlpIsTooShort); - - let dog_err = rlp.at(1).unwrap_err(); - assert_eq!(dog_err, DecoderError::RlpIsTooShort); - } -} - -#[test] -fn rlp_iter() { - let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']; - { - let rlp = Rlp::new(&data); - let mut iter = rlp.iter(); - - let cat = iter.next().unwrap(); - assert!(cat.is_data()); - assert_eq!(cat.as_raw(), &[0x83, b'c', b'a', b't']); - - let dog = iter.next().unwrap(); - assert!(dog.is_data()); - assert_eq!(dog.as_raw(), &[0x83, b'd', b'o', b'g']); - - let none = iter.next(); - assert!(none.is_none()); - - let cat_again = rlp.at(0).unwrap(); - assert!(cat_again.is_data()); - assert_eq!(cat_again.as_raw(), &[0x83, b'c', b'a', b't']); - } -} - -struct ETestPair(T, Vec) where T: Encodable; - -fn run_encode_tests(tests: Vec>) - where T: Encodable -{ - for t in &tests { - let res = rlp::encode(&t.0); - assert_eq!(&res[..], &t.1[..]); - } -} - -struct VETestPair(Vec, Vec) where T: Encodable; - -fn run_encode_tests_list(tests: Vec>) - where T: Encodable -{ - for t in &tests { - let res = rlp::encode_list(&t.0); - assert_eq!(&res[..], &t.1[..]); - } -} - -#[test] -fn encode_u16() { - let tests = vec![ - ETestPair(0u16, vec![0x80u8]), - ETestPair(0x100, vec![0x82, 0x01, 0x00]), - ETestPair(0xffff, vec![0x82, 0xff, 0xff]), - ]; - run_encode_tests(tests); -} - -#[test] -fn encode_u32() { - let tests = vec![ - ETestPair(0u32, vec![0x80u8]), - ETestPair(0x10000, vec![0x83, 0x01, 0x00, 0x00]), - ETestPair(0xffffff, vec![0x83, 0xff, 0xff, 0xff]), - ]; - run_encode_tests(tests); -} - -#[test] -fn encode_u64() { - let tests = vec![ - ETestPair(0u64, vec![0x80u8]), - ETestPair(0x1000000, vec![0x84, 0x01, 0x00, 0x00, 0x00]), - ETestPair(0xFFFFFFFF, vec![0x84, 0xff, 0xff, 0xff, 0xff]), - ]; - run_encode_tests(tests); -} - -#[test] -fn encode_u256() { - let tests = vec![ETestPair(U256::from(0u64), vec![0x80u8]), - ETestPair(U256::from(0x1000000u64), vec![0x84, 0x01, 0x00, 0x00, 0x00]), - ETestPair(U256::from(0xffffffffu64), - vec![0x84, 0xff, 0xff, 0xff, 0xff]), - ETestPair(("8090a0b0c0d0e0f00910203040506077000000000000\ - 000100000000000012f0").into(), - vec![0xa0, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, - 0x09, 0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x77, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x12, 0xf0])]; - run_encode_tests(tests); -} - -#[test] -fn encode_str() { - let tests = vec![ETestPair("cat", vec![0x83, b'c', b'a', b't']), - ETestPair("dog", vec![0x83, b'd', b'o', b'g']), - ETestPair("Marek", vec![0x85, b'M', b'a', b'r', b'e', b'k']), - ETestPair("", vec![0x80]), - ETestPair("Lorem ipsum dolor sit amet, consectetur adipisicing elit", - vec![0xb8, 0x38, b'L', b'o', b'r', b'e', b'm', b' ', b'i', - b'p', b's', b'u', b'm', b' ', b'd', b'o', b'l', b'o', - b'r', b' ', b's', b'i', b't', b' ', b'a', b'm', b'e', - b't', b',', b' ', b'c', b'o', b'n', b's', b'e', b'c', - b't', b'e', b't', b'u', b'r', b' ', b'a', b'd', b'i', - b'p', b'i', b's', b'i', b'c', b'i', b'n', b'g', b' ', - b'e', b'l', b'i', b't'])]; - run_encode_tests(tests); -} - -#[test] -fn encode_address() { - let tests = vec![ - ETestPair(H160::from("ef2d6d194084c2de36e0dabfce45d046b37d1106"), - vec![0x94, 0xef, 0x2d, 0x6d, 0x19, 0x40, 0x84, 0xc2, 0xde, - 0x36, 0xe0, 0xda, 0xbf, 0xce, 0x45, 0xd0, 0x46, - 0xb3, 0x7d, 0x11, 0x06]) - ]; - run_encode_tests(tests); -} - -/// Vec (Bytes) is treated as a single value -#[test] -fn encode_vector_u8() { - let tests = vec![ - ETestPair(vec![], vec![0x80]), - ETestPair(vec![0u8], vec![0]), - ETestPair(vec![0x15], vec![0x15]), - ETestPair(vec![0x40, 0x00], vec![0x82, 0x40, 0x00]), - ]; - run_encode_tests(tests); -} - -#[test] -fn encode_vector_u64() { - let tests = vec![ - VETestPair(vec![], vec![0xc0]), - VETestPair(vec![15u64], vec![0xc1, 0x0f]), - VETestPair(vec![1, 2, 3, 7, 0xff], vec![0xc6, 1, 2, 3, 7, 0x81, 0xff]), - VETestPair(vec![0xffffffff, 1, 2, 3, 7, 0xff], vec![0xcb, 0x84, 0xff, 0xff, 0xff, 0xff, 1, 2, 3, 7, 0x81, 0xff]), - ]; - run_encode_tests_list(tests); -} - -#[test] -fn encode_vector_str() { - let tests = vec![VETestPair(vec!["cat", "dog"], - vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'])]; - run_encode_tests_list(tests); -} - -struct DTestPair(T, Vec) where T: Decodable + fmt::Debug + cmp::Eq; - -struct VDTestPair(Vec, Vec) where T: Decodable + fmt::Debug + cmp::Eq; - -fn run_decode_tests(tests: Vec>) where T: Decodable + fmt::Debug + cmp::Eq { - for t in &tests { - let res : Result = rlp::decode(&t.1); - assert!(res.is_ok()); - let res = res.unwrap(); - assert_eq!(&res, &t.0); - } -} - -fn run_decode_tests_list(tests: Vec>) where T: Decodable + fmt::Debug + cmp::Eq { - for t in &tests { - let res: Vec = rlp::decode_list(&t.1); - assert_eq!(res, t.0); - } -} - -/// Vec (Bytes) is treated as a single value -#[test] -fn decode_vector_u8() { - let tests = vec![ - DTestPair(vec![], vec![0x80]), - DTestPair(vec![0u8], vec![0]), - DTestPair(vec![0x15], vec![0x15]), - DTestPair(vec![0x40, 0x00], vec![0x82, 0x40, 0x00]), - ]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_u8() { - let tests = vec![ - DTestPair(0x0u8, vec![0x80]), - DTestPair(0x77u8, vec![0x77]), - DTestPair(0xccu8, vec![0x81, 0xcc]), - ]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_u16() { - let tests = vec![ - DTestPair(0x100u16, vec![0x82, 0x01, 0x00]), - DTestPair(0xffffu16, vec![0x82, 0xff, 0xff]), - ]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_u32() { - let tests = vec![ - DTestPair(0x10000u32, vec![0x83, 0x01, 0x00, 0x00]), - DTestPair(0xffffffu32, vec![0x83, 0xff, 0xff, 0xff]), - ]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_u64() { - let tests = vec![ - DTestPair(0x1000000u64, vec![0x84, 0x01, 0x00, 0x00, 0x00]), - DTestPair(0xFFFFFFFFu64, vec![0x84, 0xff, 0xff, 0xff, 0xff]), - ]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_u256() { - let tests = vec![DTestPair(U256::from(0u64), vec![0x80u8]), - DTestPair(U256::from(0x1000000u64), vec![0x84, 0x01, 0x00, 0x00, 0x00]), - DTestPair(U256::from(0xffffffffu64), - vec![0x84, 0xff, 0xff, 0xff, 0xff]), - DTestPair(("8090a0b0c0d0e0f00910203040506077000000000000\ - 000100000000000012f0").into(), - vec![0xa0, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, - 0x09, 0x10, 0x20, 0x30, 0x40, 0x50, 0x60, 0x77, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x00, 0x12, 0xf0])]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_str() { - let tests = vec![DTestPair("cat".to_owned(), vec![0x83, b'c', b'a', b't']), - DTestPair("dog".to_owned(), vec![0x83, b'd', b'o', b'g']), - DTestPair("Marek".to_owned(), - vec![0x85, b'M', b'a', b'r', b'e', b'k']), - DTestPair("".to_owned(), vec![0x80]), - DTestPair("Lorem ipsum dolor sit amet, consectetur adipisicing elit" - .to_owned(), - vec![0xb8, 0x38, b'L', b'o', b'r', b'e', b'm', b' ', b'i', - b'p', b's', b'u', b'm', b' ', b'd', b'o', b'l', b'o', - b'r', b' ', b's', b'i', b't', b' ', b'a', b'm', b'e', - b't', b',', b' ', b'c', b'o', b'n', b's', b'e', b'c', - b't', b'e', b't', b'u', b'r', b' ', b'a', b'd', b'i', - b'p', b'i', b's', b'i', b'c', b'i', b'n', b'g', b' ', - b'e', b'l', b'i', b't'])]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_address() { - let tests = vec![ - DTestPair(H160::from("ef2d6d194084c2de36e0dabfce45d046b37d1106"), - vec![0x94, 0xef, 0x2d, 0x6d, 0x19, 0x40, 0x84, 0xc2, 0xde, - 0x36, 0xe0, 0xda, 0xbf, 0xce, 0x45, 0xd0, 0x46, - 0xb3, 0x7d, 0x11, 0x06]) - ]; - run_decode_tests(tests); -} - -#[test] -fn decode_untrusted_vector_u64() { - let tests = vec![ - VDTestPair(vec![], vec![0xc0]), - VDTestPair(vec![15u64], vec![0xc1, 0x0f]), - VDTestPair(vec![1, 2, 3, 7, 0xff], vec![0xc6, 1, 2, 3, 7, 0x81, 0xff]), - VDTestPair(vec![0xffffffff, 1, 2, 3, 7, 0xff], vec![0xcb, 0x84, 0xff, 0xff, 0xff, 0xff, 1, 2, 3, 7, 0x81, 0xff]), - ]; - run_decode_tests_list(tests); -} - -#[test] -fn decode_untrusted_vector_str() { - let tests = vec![VDTestPair(vec!["cat".to_owned(), "dog".to_owned()], - vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'])]; - run_decode_tests_list(tests); -} - -#[test] -fn test_rlp_data_length_check() -{ - let data = vec![0x84, b'c', b'a', b't']; - let rlp = Rlp::new(&data); - - let as_val: Result = rlp.as_val(); - assert_eq!(Err(DecoderError::RlpInconsistentLengthAndData), as_val); -} - -#[test] -fn test_rlp_long_data_length_check() -{ - let mut data: Vec = vec![0xb8, 255]; - for _ in 0..253 { - data.push(b'c'); - } - - let rlp = Rlp::new(&data); - - let as_val: Result = rlp.as_val(); - assert_eq!(Err(DecoderError::RlpInconsistentLengthAndData), as_val); -} - -#[test] -fn test_the_exact_long_string() -{ - let mut data: Vec = vec![0xb8, 255]; - for _ in 0..255 { - data.push(b'c'); - } - - let rlp = Rlp::new(&data); - - let as_val: Result = rlp.as_val(); - assert!(as_val.is_ok()); -} - -#[test] -fn test_rlp_2bytes_data_length_check() -{ - let mut data: Vec = vec![0xb9, 2, 255]; // 512+255 - for _ in 0..700 { - data.push(b'c'); - } - - let rlp = Rlp::new(&data); - - let as_val: Result = rlp.as_val(); - assert_eq!(Err(DecoderError::RlpInconsistentLengthAndData), as_val); -} - -#[test] -fn test_rlp_nested_empty_list_encode() { - let mut stream = RlpStream::new_list(2); - stream.append_list(&(Vec::new() as Vec)); - stream.append(&40u32); - assert_eq!(stream.drain()[..], [0xc2u8, 0xc0u8, 40u8][..]); -} - -#[test] -fn test_rlp_list_length_overflow() { - let data: Vec = vec![0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00]; - let rlp = Rlp::new(&data); - let as_val: Result = rlp.val_at(0); - assert_eq!(Err(DecoderError::RlpIsTooShort), as_val); -} - -#[test] -fn test_rlp_stream_size_limit() { - for limit in 40 .. 270 { - let item = [0u8; 1]; - let mut stream = RlpStream::new(); - while stream.append_raw_checked(&item, 1, limit) {} - assert_eq!(stream.drain().len(), limit); - } -} - -#[test] -fn test_rlp_stream_unbounded_list() { - let mut stream = RlpStream::new(); - stream.begin_unbounded_list(); - stream.append(&40u32); - stream.append(&41u32); - assert!(!stream.is_finished()); - stream.complete_unbounded_list(); - assert!(stream.is_finished()); -} diff --git a/util/rlp_compress/Cargo.toml b/util/rlp_compress/Cargo.toml index d5f85425f..c61d1f206 100644 --- a/util/rlp_compress/Cargo.toml +++ b/util/rlp_compress/Cargo.toml @@ -4,6 +4,6 @@ version = "0.1.0" authors = ["Parity Technologies "] [dependencies] -rlp = { path = "../rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } elastic-array = "0.10" lazy_static = "1.0" diff --git a/util/rlp_derive/Cargo.toml b/util/rlp_derive/Cargo.toml index bb488cc29..c71e3d245 100644 --- a/util/rlp_derive/Cargo.toml +++ b/util/rlp_derive/Cargo.toml @@ -12,4 +12,4 @@ syn = "0.13" quote = "0.5" [dev-dependencies] -rlp = { path = "../rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } diff --git a/util/trie-standardmap/Cargo.toml b/util/trie-standardmap/Cargo.toml deleted file mode 100644 index 1177f3075..000000000 --- a/util/trie-standardmap/Cargo.toml +++ /dev/null @@ -1,11 +0,0 @@ -[package] -name = "trie-standardmap" -version = "0.1.0" -authors = ["debris "] -description = "Standard test map for profiling tries" - -[dependencies] -ethcore-bytes = { path = "../bytes" } -ethereum-types = "0.3" -keccak-hash = { path = "../hash" } -rlp = { path = "../rlp" } diff --git a/util/trie-standardmap/src/lib.rs b/util/trie-standardmap/src/lib.rs deleted file mode 100644 index 51c8593ea..000000000 --- a/util/trie-standardmap/src/lib.rs +++ /dev/null @@ -1,124 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Key-value datastore with a modified Merkle tree. - -extern crate ethcore_bytes as bytes; -extern crate ethereum_types; -extern crate keccak_hash; -extern crate rlp; - -use bytes::Bytes; -use ethereum_types::H256; -use keccak_hash::keccak; -use rlp::encode; - -/// Alphabet to use when creating words for insertion into tries. -pub enum Alphabet { - /// All values are allowed in each bytes of the key. - All, - /// Only a 6 values ('a' - 'f') are chosen to compose the key. - Low, - /// Quite a few values (around 32) are chosen to compose the key. - Mid, - /// A set of bytes given is used to compose the key. - Custom(Bytes), -} - -/// Means of determining the value. -pub enum ValueMode { - /// Same as the key. - Mirror, - /// Randomly (50:50) 1 or 32 byte randomly string. - Random, - /// RLP-encoded index. - Index, -} - -/// Standard test map for profiling tries. -pub struct StandardMap { - /// The alphabet to use for keys. - pub alphabet: Alphabet, - /// Minimum size of key. - pub min_key: usize, - /// Delta size of key. - pub journal_key: usize, - /// Mode of value generation. - pub value_mode: ValueMode, - /// Number of keys. - pub count: usize, -} - -impl StandardMap { - /// Get a bunch of random bytes, at least `min_count` bytes, at most `min_count` + `journal_count` bytes. - /// `seed` is mutated pseudoramdonly and used. - fn random_bytes(min_count: usize, journal_count: usize, seed: &mut H256) -> Vec { - assert!(min_count + journal_count <= 32); - *seed = keccak(&seed); - let r = min_count + (seed[31] as usize % (journal_count + 1)); - seed[0..r].to_vec() - } - - /// Get a random value. Equal chance of being 1 byte as of 32. `seed` is mutated pseudoramdonly and used. - fn random_value(seed: &mut H256) -> Bytes { - *seed = keccak(&seed); - match seed[0] % 2 { - 1 => vec![seed[31];1], - _ => seed.to_vec(), - } - } - - /// Get a random word of, at least `min_count` bytes, at most `min_count` + `journal_count` bytes. - /// Each byte is an item from `alphabet`. `seed` is mutated pseudoramdonly and used. - fn random_word(alphabet: &[u8], min_count: usize, journal_count: usize, seed: &mut H256) -> Vec { - assert!(min_count + journal_count <= 32); - *seed = keccak(&seed); - let r = min_count + (seed[31] as usize % (journal_count + 1)); - let mut ret: Vec = Vec::with_capacity(r); - for i in 0..r { - ret.push(alphabet[seed[i] as usize % alphabet.len()]); - } - ret - } - - /// Create the standard map (set of keys and values) for the object's fields. - pub fn make(&self) -> Vec<(Bytes, Bytes)> { - self.make_with(&mut H256::new()) - } - - /// Create the standard map (set of keys and values) for the object's fields, using the given seed. - pub fn make_with(&self, seed: &mut H256) -> Vec<(Bytes, Bytes)> { - let low = b"abcdef"; - let mid = b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_"; - - let mut d: Vec<(Bytes, Bytes)> = Vec::new(); - for index in 0..self.count { - let k = match self.alphabet { - Alphabet::All => Self::random_bytes(self.min_key, self.journal_key, seed), - Alphabet::Low => Self::random_word(low, self.min_key, self.journal_key, seed), - Alphabet::Mid => Self::random_word(mid, self.min_key, self.journal_key, seed), - Alphabet::Custom(ref a) => Self::random_word(a, self.min_key, self.journal_key, seed), - }; - let v = match self.value_mode { - ValueMode::Mirror => k.clone(), - ValueMode::Random => Self::random_value(seed), - ValueMode::Index => encode(&index).into_vec(), - }; - d.push((k, v)) - } - d - } -} diff --git a/util/triehash-ethereum/Cargo.toml b/util/triehash-ethereum/Cargo.toml new file mode 100644 index 000000000..d5b1b118a --- /dev/null +++ b/util/triehash-ethereum/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "triehash-ethereum" +version = "0.2.0" +authors = ["Parity Technologies "] +description = "Trie-root helpers, ethereum style" +license = "GPL-3.0" + +[dependencies] +triehash = { git = "https://github.com/paritytech/parity-common" } +ethereum-types = "0.3" +keccak-hasher = { path = "../keccak-hasher" } \ No newline at end of file diff --git a/util/triehash-ethereum/src/lib.rs b/util/triehash-ethereum/src/lib.rs new file mode 100644 index 000000000..7de77473c --- /dev/null +++ b/util/triehash-ethereum/src/lib.rs @@ -0,0 +1,87 @@ +// Copyright 2015-2018 Parity Technologies (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +//! Generates Keccak-flavoured trie roots. + +extern crate ethereum_types; +extern crate keccak_hasher; +extern crate triehash; + +use ethereum_types::H256; +use keccak_hasher::KeccakHasher; + +/// Generates a trie root hash for a vector of key-value tuples +pub fn trie_root(input: I) -> H256 +where + I: IntoIterator, + K: AsRef<[u8]> + Ord, + V: AsRef<[u8]>, +{ + triehash::trie_root::(input) +} + +/// Generates a key-hashed (secure) trie root hash for a vector of key-value tuples. +pub fn sec_trie_root(input: I) -> H256 +where + I: IntoIterator, + K: AsRef<[u8]>, + V: AsRef<[u8]>, +{ + triehash::sec_trie_root::(input) +} + +/// Generates a trie root hash for a vector of values +pub fn ordered_trie_root(input: I) -> H256 +where + I: IntoIterator, + V: AsRef<[u8]>, +{ + triehash::ordered_trie_root::(input) +} + +#[cfg(test)] +mod tests { + use super::{trie_root, sec_trie_root, ordered_trie_root}; + use triehash; + use keccak_hasher::KeccakHasher; + + #[test] + fn simple_test() { + assert_eq!(trie_root(vec![ + (b"A", b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" as &[u8]) + ]), "d23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab".into()); + } + + #[test] + fn proxy_works() { + let input = vec![(b"A", b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" as &[u8])]; + assert_eq!( + trie_root(input.clone()), + triehash::trie_root::(input.clone()) + ); + + assert_eq!( + sec_trie_root(input.clone()), + triehash::sec_trie_root::(input.clone()) + ); + + let data = &["cake", "pie", "candy"]; + assert_eq!( + ordered_trie_root(data), + triehash::ordered_trie_root::(data) + ); + } +} \ No newline at end of file diff --git a/util/triehash/Cargo.toml b/util/triehash/Cargo.toml deleted file mode 100644 index ee42b9d82..000000000 --- a/util/triehash/Cargo.toml +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "triehash" -version = "0.1.0" -authors = ["Parity Technologies "] -description = "in memory patricia trie operations" -license = "GPL-3.0" - -[dependencies] -elastic-array = "0.10" -rlp = { version = "0.2.1", path = "../rlp" } -ethereum-types = "0.3" -keccak-hash = { version = "0.1", path = "../hash" } - -[dev-dependencies] -trie-standardmap = { path = "../trie-standardmap" } diff --git a/util/triehash/benches/triehash.rs b/util/triehash/benches/triehash.rs deleted file mode 100644 index 505ea1223..000000000 --- a/util/triehash/benches/triehash.rs +++ /dev/null @@ -1,147 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -#![feature(test)] - -extern crate ethereum_types; -extern crate keccak_hash; -extern crate test; -extern crate trie_standardmap; -extern crate triehash; - -use ethereum_types::H256; -use keccak_hash::keccak; -use test::Bencher; -use trie_standardmap::{Alphabet, ValueMode, StandardMap}; -use triehash::trie_root; - -fn random_word(alphabet: &[u8], min_count: usize, diff_count: usize, seed: &mut H256) -> Vec { - assert!(min_count + diff_count <= 32); - *seed = keccak(&seed); - let r = min_count + (seed[31] as usize % (diff_count + 1)); - let mut ret: Vec = Vec::with_capacity(r); - for i in 0..r { - ret.push(alphabet[seed[i] as usize % alphabet.len()]); - } - ret -} - -fn random_bytes(min_count: usize, diff_count: usize, seed: &mut H256) -> Vec { - assert!(min_count + diff_count <= 32); - *seed = keccak(&seed); - let r = min_count + (seed[31] as usize % (diff_count + 1)); - seed[0..r].to_vec() -} - -fn random_value(seed: &mut H256) -> Vec { - *seed = keccak(&seed); - match seed[0] % 2 { - 1 => vec![seed[31];1], - _ => seed.to_vec(), - } -} - -#[bench] -fn triehash_insertions_32_mir_1k(b: &mut Bencher) { - let st = StandardMap { - alphabet: Alphabet::All, - min_key: 32, - journal_key: 0, - value_mode: ValueMode::Mirror, - count: 1000, - }; - let d = st.make(); - b.iter(&mut ||{ - trie_root(d.clone()).clone(); - }); -} - -#[bench] -fn triehash_insertions_32_ran_1k(b: &mut Bencher) { - let st = StandardMap { - alphabet: Alphabet::All, - min_key: 32, - journal_key: 0, - value_mode: ValueMode::Random, - count: 1000, - }; - let d = st.make(); - b.iter(&mut ||{ - trie_root(d.clone()).clone(); - }); -} - -#[bench] -fn triehash_insertions_six_high(b: &mut Bencher) { - let mut d: Vec<(Vec, Vec)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_bytes(6, 0, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - - b.iter(&||{ - trie_root(d.clone()); - }) -} - -#[bench] -fn triehash_insertions_six_mid(b: &mut Bencher) { - let alphabet = b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_"; - let mut d: Vec<(Vec, Vec)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_word(alphabet, 6, 0, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - b.iter(||{ - trie_root(d.clone()); - }) -} - -#[bench] -fn triehash_insertions_random_mid(b: &mut Bencher) { - let alphabet = b"@QWERTYUIOPASDFGHJKLZXCVBNM[/]^_"; - let mut d: Vec<(Vec, Vec)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_word(alphabet, 1, 5, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - - b.iter(||{ - trie_root(d.clone()); - }) -} - -#[bench] -fn triehash_insertions_six_low(b: &mut Bencher) { - let alphabet = b"abcdef"; - let mut d: Vec<(Vec, Vec)> = Vec::new(); - let mut seed = H256::new(); - for _ in 0..1000 { - let k = random_word(alphabet, 6, 0, &mut seed); - let v = random_value(&mut seed); - d.push((k, v)) - } - - b.iter(||{ - trie_root(d.clone()); - }) -} diff --git a/util/triehash/src/lib.rs b/util/triehash/src/lib.rs deleted file mode 100644 index c78ed0ca1..000000000 --- a/util/triehash/src/lib.rs +++ /dev/null @@ -1,376 +0,0 @@ -// Copyright 2015-2018 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -//! Generetes trie root. -//! -//! This module should be used to generate trie root hash. - -extern crate elastic_array; -extern crate ethereum_types; -extern crate keccak_hash as hash; -extern crate rlp; - -use std::collections::BTreeMap; -use std::cmp; -use elastic_array::{ElasticArray4, ElasticArray8}; -use ethereum_types::H256; -use hash::keccak; -use rlp::RlpStream; - -fn shared_prefix_len(first: &[T], second: &[T]) -> usize { - let len = cmp::min(first.len(), second.len()); - (0..len).take_while(|&i| first[i] == second[i]).count() -} - -/// Generates a trie root hash for a vector of values -/// -/// ```rust -/// extern crate triehash; -/// use triehash::ordered_trie_root; -/// -/// fn main() { -/// let v = &["doe", "reindeer"]; -/// let root = "e766d5d51b89dc39d981b41bda63248d7abce4f0225eefd023792a540bcffee3"; -/// assert_eq!(ordered_trie_root(v), root.into()); -/// } -/// ``` -pub fn ordered_trie_root(input: I) -> H256 - where I: IntoIterator, - A: AsRef<[u8]>, -{ - let gen_input: Vec<_> = input - // first put elements into btree to sort them by nibbles - // optimize it later - .into_iter() - .enumerate() - .map(|(i, slice)| (rlp::encode(&i), slice)) - .collect::>() - // then move them to a vector - .into_iter() - .map(|(k, v)| (as_nibbles(&k), v) ) - .collect(); - - gen_trie_root(&gen_input) -} - -/// Generates a trie root hash for a vector of key-values -/// -/// ```rust -/// extern crate triehash; -/// use triehash::trie_root; -/// -/// fn main() { -/// let v = vec![ -/// ("doe", "reindeer"), -/// ("dog", "puppy"), -/// ("dogglesworth", "cat"), -/// ]; -/// -/// let root = "8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3"; -/// assert_eq!(trie_root(v), root.into()); -/// } -/// ``` -pub fn trie_root(input: I) -> H256 - where I: IntoIterator, - A: AsRef<[u8]> + Ord, - B: AsRef<[u8]>, -{ - let gen_input: Vec<_> = input - // first put elements into btree to sort them and to remove duplicates - .into_iter() - .collect::>() - // then move them to a vector - .into_iter() - .map(|(k, v)| (as_nibbles(k.as_ref()), v) ) - .collect(); - - gen_trie_root(&gen_input) -} - -/// Generates a key-hashed (secure) trie root hash for a vector of key-values. -/// -/// ```rust -/// extern crate triehash; -/// use triehash::sec_trie_root; -/// -/// fn main() { -/// let v = vec![ -/// ("doe", "reindeer"), -/// ("dog", "puppy"), -/// ("dogglesworth", "cat"), -/// ]; -/// -/// let root = "d4cd937e4a4368d7931a9cf51686b7e10abb3dce38a39000fd7902a092b64585"; -/// assert_eq!(sec_trie_root(v), root.into()); -/// } -/// ``` -pub fn sec_trie_root(input: I) -> H256 - where I: IntoIterator, - A: AsRef<[u8]>, - B: AsRef<[u8]>, -{ - let gen_input: Vec<_> = input - // first put elements into btree to sort them and to remove duplicates - .into_iter() - .map(|(k, v)| (keccak(k), v)) - .collect::>() - // then move them to a vector - .into_iter() - .map(|(k, v)| (as_nibbles(&k), v) ) - .collect(); - - gen_trie_root(&gen_input) -} - -fn gen_trie_root, B: AsRef<[u8]>>(input: &[(A, B)]) -> H256 { - let mut stream = RlpStream::new(); - hash256rlp(input, 0, &mut stream); - keccak(stream.out()) -} - -/// Hex-prefix Notation. First nibble has flags: oddness = 2^0 & termination = 2^1. -/// -/// The "termination marker" and "leaf-node" specifier are completely equivalent. -/// -/// Input values are in range `[0, 0xf]`. -/// -/// ```markdown -/// [0,0,1,2,3,4,5] 0x10012345 // 7 > 4 -/// [0,1,2,3,4,5] 0x00012345 // 6 > 4 -/// [1,2,3,4,5] 0x112345 // 5 > 3 -/// [0,0,1,2,3,4] 0x00001234 // 6 > 3 -/// [0,1,2,3,4] 0x101234 // 5 > 3 -/// [1,2,3,4] 0x001234 // 4 > 3 -/// [0,0,1,2,3,4,5,T] 0x30012345 // 7 > 4 -/// [0,0,1,2,3,4,T] 0x20001234 // 6 > 4 -/// [0,1,2,3,4,5,T] 0x20012345 // 6 > 4 -/// [1,2,3,4,5,T] 0x312345 // 5 > 3 -/// [1,2,3,4,T] 0x201234 // 4 > 3 -/// ``` -fn hex_prefix_encode(nibbles: &[u8], leaf: bool) -> ElasticArray4 { - let inlen = nibbles.len(); - let oddness_factor = inlen % 2; - let mut res = ElasticArray4::new(); - - let first_byte = { - let mut bits = ((inlen as u8 & 1) + (2 * leaf as u8)) << 4; - if oddness_factor == 1 { - bits += nibbles[0]; - } - bits - }; - - res.push(first_byte); - - let mut offset = oddness_factor; - while offset < inlen { - let byte = (nibbles[offset] << 4) + nibbles[offset + 1]; - res.push(byte); - offset += 2; - } - - res -} - -/// Converts slice of bytes to nibbles. -fn as_nibbles(bytes: &[u8]) -> ElasticArray8 { - let mut res = ElasticArray8::new(); - for i in 0..bytes.len() { - let byte = bytes[i]; - res.push(byte >> 4); - res.push(byte & 0b1111); - } - res -} - -fn hash256rlp, B: AsRef<[u8]>>(input: &[(A, B)], pre_len: usize, stream: &mut RlpStream) { - let inlen = input.len(); - - // in case of empty slice, just append empty data - if inlen == 0 { - stream.append_empty_data(); - return; - } - - // take slices - let key: &[u8] = &input[0].0.as_ref(); - let value: &[u8] = &input[0].1.as_ref(); - - // if the slice contains just one item, append the suffix of the key - // and then append value - if inlen == 1 { - stream.begin_list(2); - stream.append(&&*hex_prefix_encode(&key[pre_len..], true)); - stream.append(&value); - return; - } - - // get length of the longest shared prefix in slice keys - let shared_prefix = input.iter() - // skip first element - .skip(1) - // get minimum number of shared nibbles between first and each successive - .fold(key.len(), | acc, &(ref k, _) | { - cmp::min(shared_prefix_len(key, k.as_ref()), acc) - }); - - // if shared prefix is higher than current prefix append its - // new part of the key to the stream - // then recursively append suffixes of all items who had this key - if shared_prefix > pre_len { - stream.begin_list(2); - stream.append(&&*hex_prefix_encode(&key[pre_len..shared_prefix], false)); - hash256aux(input, shared_prefix, stream); - return; - } - - // an item for every possible nibble/suffix - // + 1 for data - stream.begin_list(17); - - // if first key len is equal to prefix_len, move to next element - let mut begin = match pre_len == key.len() { - true => 1, - false => 0 - }; - - // iterate over all possible nibbles - for i in 0..16 { - // cout how many successive elements have same next nibble - let len = match begin < input.len() { - true => input[begin..].iter() - .take_while(| pair | pair.0.as_ref()[pre_len] == i ) - .count(), - false => 0 - }; - - // if at least 1 successive element has the same nibble - // append their suffixes - match len { - 0 => { stream.append_empty_data(); }, - _ => hash256aux(&input[begin..(begin + len)], pre_len + 1, stream) - } - begin += len; - } - - // if fist key len is equal prefix, append its value - match pre_len == key.len() { - true => { stream.append(&value); }, - false => { stream.append_empty_data(); } - }; -} - -fn hash256aux, B: AsRef<[u8]>>(input: &[(A, B)], pre_len: usize, stream: &mut RlpStream) { - let mut s = RlpStream::new(); - hash256rlp(input, pre_len, &mut s); - let out = s.out(); - match out.len() { - 0...31 => stream.append_raw(&out, 1), - _ => stream.append(&keccak(out)) - }; -} - -#[test] -fn test_nibbles() { - let v = vec![0x31, 0x23, 0x45]; - let e = vec![3, 1, 2, 3, 4, 5]; - assert_eq!(as_nibbles(&v), e); - - // A => 65 => 0x41 => [4, 1] - let v: Vec = From::from("A"); - let e = vec![4, 1]; - assert_eq!(as_nibbles(&v), e); -} - -#[cfg(test)] -mod tests { - use super::{trie_root, shared_prefix_len, hex_prefix_encode}; - - #[test] - fn test_hex_prefix_encode() { - let v = vec![0, 0, 1, 2, 3, 4, 5]; - let e = vec![0x10, 0x01, 0x23, 0x45]; - let h = hex_prefix_encode(&v, false); - assert_eq!(h, e); - - let v = vec![0, 1, 2, 3, 4, 5]; - let e = vec![0x00, 0x01, 0x23, 0x45]; - let h = hex_prefix_encode(&v, false); - assert_eq!(h, e); - - let v = vec![0, 1, 2, 3, 4, 5]; - let e = vec![0x20, 0x01, 0x23, 0x45]; - let h = hex_prefix_encode(&v, true); - assert_eq!(h, e); - - let v = vec![1, 2, 3, 4, 5]; - let e = vec![0x31, 0x23, 0x45]; - let h = hex_prefix_encode(&v, true); - assert_eq!(h, e); - - let v = vec![1, 2, 3, 4]; - let e = vec![0x00, 0x12, 0x34]; - let h = hex_prefix_encode(&v, false); - assert_eq!(h, e); - - let v = vec![4, 1]; - let e = vec![0x20, 0x41]; - let h = hex_prefix_encode(&v, true); - assert_eq!(h, e); - } - - #[test] - fn simple_test() { - assert_eq!(trie_root(vec![ - (b"A", b"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" as &[u8]) - ]), "d23786fb4a010da3ce639d66d5e904a11dbc02746d1ce25029e53290cabf28ab".into()); - } - - #[test] - fn test_triehash_out_of_order() { - assert!(trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), - (vec![0xf1u8, 0x23], vec![0xf1u8, 0x23]), - ]) == - trie_root(vec![ - (vec![0x01u8, 0x23], vec![0x01u8, 0x23]), - (vec![0xf1u8, 0x23], vec![0xf1u8, 0x23]), - (vec![0x81u8, 0x23], vec![0x81u8, 0x23]), - ])); - } - - #[test] - fn test_shared_prefix() { - let a = vec![1,2,3,4,5,6]; - let b = vec![4,2,3,4,5,6]; - assert_eq!(shared_prefix_len(&a, &b), 0); - } - - #[test] - fn test_shared_prefix2() { - let a = vec![1,2,3,3,5]; - let b = vec![1,2,3]; - assert_eq!(shared_prefix_len(&a, &b), 3); - } - - #[test] - fn test_shared_prefix3() { - let a = vec![1,2,3,4,5,6]; - let b = vec![1,2,3,4,5,6]; - assert_eq!(shared_prefix_len(&a, &b), 6); - } -} diff --git a/util/version/Cargo.toml b/util/version/Cargo.toml index c7bd4f581..44e5cde36 100644 --- a/util/version/Cargo.toml +++ b/util/version/Cargo.toml @@ -21,8 +21,8 @@ ropsten = { forkBlock = 10, critical = false } kovan = { forkBlock = 6600000, critical = false } [dependencies] -ethcore-bytes = { path = "../bytes" } -rlp = { path = "../rlp" } +parity-bytes = { git = "https://github.com/paritytech/parity-common" } +rlp = { git = "https://github.com/paritytech/parity-common" } target_info = "0.1" [build-dependencies] diff --git a/util/version/src/lib.rs b/util/version/src/lib.rs index 77fc71c70..79f11415e 100644 --- a/util/version/src/lib.rs +++ b/util/version/src/lib.rs @@ -17,7 +17,7 @@ //! Parity version specific information. extern crate target_info; -extern crate ethcore_bytes as bytes; +extern crate parity_bytes as bytes; extern crate rlp; use target_info::Target; diff --git a/whisper/Cargo.toml b/whisper/Cargo.toml index cdc83743a..44882b4f5 100644 --- a/whisper/Cargo.toml +++ b/whisper/Cargo.toml @@ -9,7 +9,7 @@ bitflags = "0.9" byteorder = "1.0.0" ethereum-types = "0.3" ethcore-network = { path = "../util/network" } -ethcore-crypto = { path = "../ethcore/crypto" } +parity-crypto = { git = "https://github.com/paritytech/parity-common" } ethkey = { path = "../ethkey" } hex = "0.2" log = "0.3" @@ -17,7 +17,7 @@ mem = { path = "../util/mem" } ordered-float = "0.5" parking_lot = "0.6" rand = "0.4" -rlp = { path = "../util/rlp" } +rlp = { git = "https://github.com/paritytech/parity-common" } serde = "1.0" serde_derive = "1.0" serde_json = "1.0" diff --git a/whisper/src/lib.rs b/whisper/src/lib.rs index 190169b2c..66d8d1b73 100644 --- a/whisper/src/lib.rs +++ b/whisper/src/lib.rs @@ -18,7 +18,7 @@ //! interface. extern crate byteorder; -extern crate ethcore_crypto as crypto; +extern crate parity_crypto as crypto; extern crate ethcore_network as network; extern crate ethereum_types; extern crate ethkey;