resolving frontier conflicts
This commit is contained in:
commit
453d880999
11
.editorconfig
Normal file
11
.editorconfig
Normal file
@ -0,0 +1,11 @@
|
||||
root = true
|
||||
[*]
|
||||
indent_style=tab
|
||||
indent_size=tab
|
||||
tab_width=4
|
||||
end_of_line=lf
|
||||
charset=utf-8
|
||||
trim_trailing_whitespace=true
|
||||
max_line_length=120
|
||||
insert_final_newline=true
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -7,7 +7,8 @@
|
||||
# Executables
|
||||
*.exe
|
||||
|
||||
Cargo.lock
|
||||
# Cargo lock in subs
|
||||
**/Cargo.lock
|
||||
|
||||
# Generated by Cargo
|
||||
**/target/
|
||||
|
10
.travis.yml
10
.travis.yml
@ -39,11 +39,11 @@ after_success: |
|
||||
wget https://github.com/SimonKagstrom/kcov/archive/master.tar.gz &&
|
||||
tar xzf master.tar.gz && mkdir kcov-master/build && cd kcov-master/build && cmake .. && make && make install DESTDIR=../tmp && cd ../.. &&
|
||||
cargo test --no-run ${KCOV_FEATURES} ${TARGETS} &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust target/kcov target/debug/deps/ethcore_util-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust target/kcov target/debug/deps/ethash-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust target/kcov target/debug/deps/ethcore-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust target/kcov target/debug/deps/ethsync-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust target/kcov target/debug/deps/ethcore_rpc-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethcore_util-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethash-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethcore-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethsync-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --exclude-pattern /.cargo,/root/.multirust,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests target/kcov target/debug/deps/ethcore_rpc-* &&
|
||||
./kcov-master/tmp/usr/local/bin/kcov --coveralls-id=${TRAVIS_JOB_ID} --exclude-pattern /.cargo,/root/.multirust target/kcov target/debug/parity-* &&
|
||||
[ $TRAVIS_BRANCH = master ] &&
|
||||
[ $TRAVIS_PULL_REQUEST = false ] &&
|
||||
|
12
CONTRIBUTING.md
Normal file
12
CONTRIBUTING.md
Normal file
@ -0,0 +1,12 @@
|
||||
# Contributing to Parity
|
||||
|
||||
## License
|
||||
|
||||
By contributing to Parity, you agree that your contributions will be
|
||||
licensed under the [BSD License](LICENSE).
|
||||
|
||||
At the top of every source code file you alter, after the initial
|
||||
licence section, please append a second section that reads:
|
||||
|
||||
Portions contributed by YOUR NAME are hereby placed under the BSD licence.
|
||||
|
808
Cargo.lock
generated
Normal file
808
Cargo.lock
generated
Normal file
@ -0,0 +1,808 @@
|
||||
[root]
|
||||
name = "parity"
|
||||
version = "0.9.0"
|
||||
dependencies = [
|
||||
"clippy 0.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ctrlc 1.0.1 (git+https://github.com/tomusdrw/rust-ctrlc.git)",
|
||||
"docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"docopt_macros 0.6.81 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore 0.9.0",
|
||||
"ethcore-rpc 0.9.0",
|
||||
"ethcore-util 0.9.0",
|
||||
"ethsync 0.1.0",
|
||||
"fdlimit 0.1.0",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aho-corasick"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"memchr 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.3.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nodrop 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aster"
|
||||
version = "0.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "clippy"
|
||||
version = "0.0.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"regex-syntax 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"semver 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cookie"
|
||||
version = "0.1.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 0.2.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cookie"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "ctrlc"
|
||||
version = "1.0.1"
|
||||
source = "git+https://github.com/tomusdrw/rust-ctrlc.git#d8751b66b31d9698cbb11f8ef37155a8211a0683"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docopt"
|
||||
version = "0.6.78"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"regex 0.1.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"strsim 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docopt_macros"
|
||||
version = "0.6.81"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"docopt 0.6.78 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "elastic-array"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "env_logger"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.1.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "eth-secp256k1"
|
||||
version = "0.5.4"
|
||||
source = "git+https://github.com/arkpar/rust-secp256k1.git#321e6c22a83606d1875f89cb61c9cb37c7d249ae"
|
||||
dependencies = [
|
||||
"arrayvec 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"gcc 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ethash"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lru-cache 0.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"sha3 0.1.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ethcore"
|
||||
version = "0.9.0"
|
||||
dependencies = [
|
||||
"clippy 0.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crossbeam 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethash 0.1.0",
|
||||
"ethcore-util 0.9.0",
|
||||
"heapsize 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rocksdb 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rust-crypto 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ethcore-rpc"
|
||||
version = "0.9.0"
|
||||
dependencies = [
|
||||
"clippy 0.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore 0.9.0",
|
||||
"ethcore-util 0.9.0",
|
||||
"ethsync 0.1.0",
|
||||
"jsonrpc-core 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jsonrpc-http-server 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_macros 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ethcore-util"
|
||||
version = "0.9.0"
|
||||
dependencies = [
|
||||
"arrayvec 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"clippy 0.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"crossbeam 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"elastic-array 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"eth-secp256k1 0.5.4 (git+https://github.com/arkpar/rust-secp256k1.git)",
|
||||
"heapsize 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"igd 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"itertools 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"json-tests 0.1.0",
|
||||
"lazy_static 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mio 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rocksdb 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rust-crypto 0.2.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"sha3 0.1.0",
|
||||
"slab 0.1.4 (git+https://github.com/arkpar/slab.git)",
|
||||
"target_info 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tiny-keccak 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ethsync"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"clippy 0.0.41 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"env_logger 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ethcore 0.9.0",
|
||||
"ethcore-util 0.9.0",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fdlimit"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gcc"
|
||||
version = "0.3.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "glob"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "heapsize"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hpack"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "httparse"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.6.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cookie 0.1.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"httparse 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"language-tags 0.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mime 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicase 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 0.2.38 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "0.7.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cookie 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"httparse 1.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"language-tags 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"mime 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num_cpus 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"solicit 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"traitobject 0.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"typeable 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicase 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "igd"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"hyper 0.6.16 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 0.1.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"xml-rs 0.1.26 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"xmltree 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "itertools"
|
||||
version = "0.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "json-tests"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"glob 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonrpc-core"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"serde 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_json 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde_macros 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jsonrpc-http-server"
|
||||
version = "1.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"hyper 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"jsonrpc-core 1.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kernel32-sys"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "language-tags"
|
||||
version = "0.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "language-tags"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "lazy_static"
|
||||
version = "0.1.15"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.1.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lru-cache"
|
||||
version = "0.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"linked-hash-map 0.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matches"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mime"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"miow 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"net2 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nix 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"slab 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miow"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"net2 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "net2"
|
||||
version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"cfg-if 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"ws2_32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nix"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodrop"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"odds 0.2.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.1.30"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num_cpus"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "odds"
|
||||
version = "0.2.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "quasi"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "quasi_codegen"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"aster 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quasi_macros"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quasi_codegen 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.3.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "0.1.48"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"aho-corasick 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "rocksdb"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-crypto"
|
||||
version = "0.2.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"gcc 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"time 0.1.34 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-serialize"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "rustc_version"
|
||||
version = "0.1.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"semver 0.1.20 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "semver"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nom 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "0.6.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_codegen"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"aster 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quasi 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quasi_macros 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"num 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_macros"
|
||||
version = "0.6.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"serde_codegen 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sha3"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"gcc 0.3.23 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "slab"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "slab"
|
||||
version = "0.1.4"
|
||||
source = "git+https://github.com/arkpar/slab.git#3c9284e1f010e394c9d0359b27464e8fb5c87bf0"
|
||||
|
||||
[[package]]
|
||||
name = "solicit"
|
||||
version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"hpack 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "target_info"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.1.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tiny-keccak"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "traitobject"
|
||||
version = "0.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "typeable"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "1.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rustc_version 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-bidi"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-normalization"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "0.2.38"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "0.5.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"uuid 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "0.1.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "winapi"
|
||||
version = "0.2.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-build"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "ws2_32-sys"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"winapi 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xml-rs"
|
||||
version = "0.1.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xml-rs"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xmltree"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"xml-rs 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
@ -11,13 +11,14 @@ env_logger = "0.3"
|
||||
rustc-serialize = "0.3"
|
||||
docopt = "0.6"
|
||||
docopt_macros = "0.6"
|
||||
ctrlc = "1.0"
|
||||
clippy = "0.0.37"
|
||||
ctrlc = { git = "https://github.com/tomusdrw/rust-ctrlc.git" }
|
||||
clippy = "0.0.41"
|
||||
ethcore-util = { path = "util" }
|
||||
ethcore = { path = "ethcore" }
|
||||
ethsync = { path = "sync" }
|
||||
ethcore-rpc = { path = "rpc", optional = true }
|
||||
fdlimit = { path = "util/fdlimit" }
|
||||
target_info = "0.1"
|
||||
|
||||
[features]
|
||||
default = ["rpc"]
|
||||
|
@ -19,7 +19,7 @@
|
||||
# install rocksdb
|
||||
add-apt-repository ppa:ethcore/ethcore
|
||||
apt-get update
|
||||
apt-get install -y --force-yes librocksdb
|
||||
apt-get install -y --force-yes librocksdb-dev
|
||||
|
||||
# install multirust
|
||||
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sh -s -- --yes
|
||||
|
9
cov.sh
9
cov.sh
@ -15,7 +15,12 @@ if ! type kcov > /dev/null; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cargo test --features ethcore/json-tests -p ethcore --no-run || exit $?
|
||||
cargo test -p ethash -p ethcore-util -p ethcore -p ethsync -p ethcore-rpc -p parity --no-run || exit $?
|
||||
rm -rf target/coverage
|
||||
mkdir -p target/coverage
|
||||
kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1 --include-pattern src --verify target/coverage target/debug/deps/ethcore*
|
||||
kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethcore-*
|
||||
kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethash-*
|
||||
kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethcore_util-*
|
||||
kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethsync-*
|
||||
kcov --exclude-pattern ~/.multirust,rocksdb,secp256k1,src/tests,util/json-tests,util/src/network/tests,sync/src/tests,ethcore/src/tests,ethcore/src/evm/tests --include-pattern src --verify target/coverage target/debug/deps/ethcore_rpc-*
|
||||
xdg-open target/coverage/index.html
|
||||
|
@ -8,6 +8,7 @@ RUN apt-get update && \
|
||||
# add-apt-repository
|
||||
software-properties-common \
|
||||
curl \
|
||||
gcc \
|
||||
wget \
|
||||
git \
|
||||
# evmjit dependencies
|
||||
|
@ -10,6 +10,7 @@ RUN apt-get update && \
|
||||
curl \
|
||||
wget \
|
||||
git \
|
||||
gcc \
|
||||
# evmjit dependencies
|
||||
zlib1g-dev \
|
||||
libedit-dev
|
||||
|
@ -3,6 +3,7 @@ FROM ubuntu:14.04
|
||||
# install tools and dependencies
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
gcc \
|
||||
curl \
|
||||
git \
|
||||
# add-apt-repository
|
||||
|
@ -18,7 +18,7 @@ ethcore-util = { path = "../util" }
|
||||
evmjit = { path = "../evmjit", optional = true }
|
||||
ethash = { path = "../ethash" }
|
||||
num_cpus = "0.2"
|
||||
clippy = "0.0.37"
|
||||
clippy = "0.0.41"
|
||||
crossbeam = "0.1.5"
|
||||
lazy_static = "0.1"
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
"engineName": "Ethash",
|
||||
"params": {
|
||||
"accountStartNonce": "0x00",
|
||||
"frontierCompatibilityModeLimit": "0xf4240fff",
|
||||
"frontierCompatibilityModeLimit": "0x10c8e0",
|
||||
"maximumExtraDataSize": "0x20",
|
||||
"tieBreakingGas": false,
|
||||
"minGasLimit": "0x1388",
|
||||
@ -30,7 +30,7 @@
|
||||
"enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@52.16.188.185:30303",
|
||||
"enode://de471bccee3d042261d52e9bff31458daecc406142b401d4cd848f677479f73104b9fdeb090af9583d3391b7f10cb2ba9e26865dd5fca4fcdc0fb1e3b723c786@54.94.239.50:30303",
|
||||
"enode://1118980bf48b0a3640bdba04e0fe78b1add18e1cd99bf22d53daac1fd9972ad650df52176e7c7d89d1114cfef2bc23a2959aa54998a46afcf7d91809f0855082@52.74.57.123:30303",
|
||||
"enode://859bbe6926fc161d218f62bd2efe0b4f6980205c00a5b928ccee39c94c440b73a054ece5db36beddd71963fbd296af61ec72a591f72a2299f9a046bd6d6ce1a9@parity-node-zero.ethcore.io:30303"
|
||||
"enode://248f12bc8b18d5289358085520ac78cd8076485211e6d96ab0bc93d6cd25442db0ce3a937dc404f64f207b0b9aed50e25e98ce32af5ac7cb321ff285b97de485@parity-node-zero.ethcore.io:30303"
|
||||
],
|
||||
"accounts": {
|
||||
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "linear": { "base": 3000, "word": 0 } } },
|
||||
|
@ -3,7 +3,7 @@
|
||||
"engineName": "Ethash",
|
||||
"params": {
|
||||
"accountStartNonce": "0x00",
|
||||
"frontierCompatibilityModeLimit": "0xf4240fff",
|
||||
"frontierCompatibilityModeLimit": "0x10c8e0",
|
||||
"maximumExtraDataSize": "0x20",
|
||||
"tieBreakingGas": false,
|
||||
"minGasLimit": "0x1388",
|
||||
|
@ -3,7 +3,7 @@
|
||||
"engineName": "Ethash",
|
||||
"params": {
|
||||
"accountStartNonce": "0x0100000",
|
||||
"frontierCompatibilityModeLimit": "0xdbba0",
|
||||
"frontierCompatibilityModeLimit": "0x10c8e0",
|
||||
"maximumExtraDataSize": "0x20",
|
||||
"tieBreakingGas": false,
|
||||
"minGasLimit": "0x1388",
|
||||
|
@ -198,7 +198,11 @@ impl Account {
|
||||
pub fn add_balance(&mut self, x: &U256) { self.balance = self.balance + *x; }
|
||||
|
||||
/// Increment the nonce of the account by one.
|
||||
pub fn sub_balance(&mut self, x: &U256) { self.balance = self.balance - *x; }
|
||||
/// Panics if balance is less than `x`
|
||||
pub fn sub_balance(&mut self, x: &U256) {
|
||||
assert!(self.balance >= *x);
|
||||
self.balance = self.balance - *x;
|
||||
}
|
||||
|
||||
/// Commit the `storage_overlay` to the backing DB and update `storage_root`.
|
||||
pub fn commit_storage(&mut self, db: &mut AccountDBMut) {
|
||||
|
@ -72,14 +72,14 @@ impl AccountDiff {
|
||||
pub fn diff_pod(pre: Option<&PodAccount>, post: Option<&PodAccount>) -> Option<AccountDiff> {
|
||||
match (pre, post) {
|
||||
(None, Some(x)) => Some(AccountDiff {
|
||||
balance: Diff::Born(x.balance.clone()),
|
||||
nonce: Diff::Born(x.nonce.clone()),
|
||||
balance: Diff::Born(x.balance),
|
||||
nonce: Diff::Born(x.nonce),
|
||||
code: Diff::Born(x.code.clone()),
|
||||
storage: x.storage.iter().map(|(k, v)| (k.clone(), Diff::Born(v.clone()))).collect(),
|
||||
}),
|
||||
(Some(x), None) => Some(AccountDiff {
|
||||
balance: Diff::Died(x.balance.clone()),
|
||||
nonce: Diff::Died(x.nonce.clone()),
|
||||
balance: Diff::Died(x.balance),
|
||||
nonce: Diff::Died(x.nonce),
|
||||
code: Diff::Died(x.code.clone()),
|
||||
storage: x.storage.iter().map(|(k, v)| (k.clone(), Diff::Died(v.clone()))).collect(),
|
||||
}),
|
||||
@ -88,8 +88,8 @@ impl AccountDiff {
|
||||
.filter(|k| pre.storage.get(k).unwrap_or(&H256::new()) != post.storage.get(k).unwrap_or(&H256::new()))
|
||||
.collect();
|
||||
let r = AccountDiff {
|
||||
balance: Diff::new(pre.balance.clone(), post.balance.clone()),
|
||||
nonce: Diff::new(pre.nonce.clone(), post.nonce.clone()),
|
||||
balance: Diff::new(pre.balance, post.balance),
|
||||
nonce: Diff::new(pre.nonce, post.nonce),
|
||||
code: Diff::new(pre.code.clone(), post.code.clone()),
|
||||
storage: storage.into_iter().map(|k|
|
||||
(k.clone(), Diff::new(
|
||||
|
@ -24,6 +24,7 @@ pub type LogBloom = H2048;
|
||||
/// Constant 2048-bit datum for 0. Often used as a default.
|
||||
pub static ZERO_LOGBLOOM: LogBloom = H2048([0x00; 256]);
|
||||
|
||||
#[allow(enum_variant_names)]
|
||||
/// Semantic boolean for when a seal/signature is included.
|
||||
pub enum Seal {
|
||||
/// The seal/signature is included.
|
||||
|
@ -26,12 +26,11 @@ use views::*;
|
||||
use header::*;
|
||||
use service::*;
|
||||
use client::BlockStatus;
|
||||
use util::panics::*;
|
||||
|
||||
/// Block queue status
|
||||
#[derive(Debug)]
|
||||
pub struct BlockQueueInfo {
|
||||
/// Indicates that queue is full
|
||||
pub full: bool,
|
||||
/// Number of queued blocks pending verification
|
||||
pub unverified_queue_size: usize,
|
||||
/// Number of verified queued blocks pending import
|
||||
@ -46,11 +45,22 @@ impl BlockQueueInfo {
|
||||
|
||||
/// The size of the unverified and verifying queues.
|
||||
pub fn incomplete_queue_size(&self) -> usize { self.unverified_queue_size + self.verifying_queue_size }
|
||||
|
||||
/// Indicates that queue is full
|
||||
pub fn is_full(&self) -> bool {
|
||||
self.unverified_queue_size + self.verified_queue_size + self.verifying_queue_size > MAX_UNVERIFIED_QUEUE_SIZE
|
||||
}
|
||||
|
||||
/// Indicates that queue is empty
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.unverified_queue_size + self.verified_queue_size + self.verifying_queue_size == 0
|
||||
}
|
||||
}
|
||||
|
||||
/// A queue of blocks. Sits between network or other I/O and the BlockChain.
|
||||
/// Sorts them ready for blockchain insertion.
|
||||
pub struct BlockQueue {
|
||||
panic_handler: Arc<PanicHandler>,
|
||||
engine: Arc<Box<Engine>>,
|
||||
more_to_verify: Arc<Condvar>,
|
||||
verification: Arc<Mutex<Verification>>,
|
||||
@ -77,6 +87,7 @@ struct QueueSignal {
|
||||
}
|
||||
|
||||
impl QueueSignal {
|
||||
#[allow(bool_comparison)]
|
||||
fn set(&self) {
|
||||
if self.signalled.compare_and_swap(false, true, AtomicOrdering::Relaxed) == false {
|
||||
self.message_channel.send(UserMessage(SyncMessage::BlockVerified)).expect("Error sending BlockVerified message");
|
||||
@ -105,6 +116,7 @@ impl BlockQueue {
|
||||
let ready_signal = Arc::new(QueueSignal { signalled: AtomicBool::new(false), message_channel: message_channel });
|
||||
let deleting = Arc::new(AtomicBool::new(false));
|
||||
let empty = Arc::new(Condvar::new());
|
||||
let panic_handler = PanicHandler::new_in_arc();
|
||||
|
||||
let mut verifiers: Vec<JoinHandle<()>> = Vec::new();
|
||||
let thread_count = max(::num_cpus::get(), 3) - 2;
|
||||
@ -115,11 +127,21 @@ impl BlockQueue {
|
||||
let ready_signal = ready_signal.clone();
|
||||
let empty = empty.clone();
|
||||
let deleting = deleting.clone();
|
||||
verifiers.push(thread::Builder::new().name(format!("Verifier #{}", i)).spawn(move || BlockQueue::verify(verification, engine, more_to_verify, ready_signal, deleting, empty))
|
||||
.expect("Error starting block verification thread"));
|
||||
let panic_handler = panic_handler.clone();
|
||||
verifiers.push(
|
||||
thread::Builder::new()
|
||||
.name(format!("Verifier #{}", i))
|
||||
.spawn(move || {
|
||||
panic_handler.catch_panic(move || {
|
||||
BlockQueue::verify(verification, engine, more_to_verify, ready_signal, deleting, empty)
|
||||
}).unwrap()
|
||||
})
|
||||
.expect("Error starting block verification thread")
|
||||
);
|
||||
}
|
||||
BlockQueue {
|
||||
engine: engine,
|
||||
panic_handler: panic_handler,
|
||||
ready_signal: ready_signal.clone(),
|
||||
more_to_verify: more_to_verify.clone(),
|
||||
verification: verification.clone(),
|
||||
@ -287,7 +309,6 @@ impl BlockQueue {
|
||||
for h in hashes {
|
||||
processing.remove(&h);
|
||||
}
|
||||
//TODO: reward peers
|
||||
}
|
||||
|
||||
/// Removes up to `max` verified blocks from the queue
|
||||
@ -310,7 +331,6 @@ impl BlockQueue {
|
||||
pub fn queue_info(&self) -> BlockQueueInfo {
|
||||
let verification = self.verification.lock().unwrap();
|
||||
BlockQueueInfo {
|
||||
full: verification.unverified.len() + verification.verifying.len() + verification.verified.len() >= MAX_UNVERIFIED_QUEUE_SIZE,
|
||||
verified_queue_size: verification.verified.len(),
|
||||
unverified_queue_size: verification.unverified.len(),
|
||||
verifying_queue_size: verification.verifying.len(),
|
||||
@ -318,6 +338,12 @@ impl BlockQueue {
|
||||
}
|
||||
}
|
||||
|
||||
impl MayPanic for BlockQueue {
|
||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
||||
self.panic_handler.on_panic(closure);
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for BlockQueue {
|
||||
fn drop(&mut self) {
|
||||
self.clear();
|
||||
@ -395,4 +421,14 @@ mod tests {
|
||||
panic!("error importing block that has already been drained ({:?})", e);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_empty_once_finished() {
|
||||
let mut queue = get_test_queue();
|
||||
queue.import_block(get_good_dummy_block()).expect("error importing block that is valid by definition");
|
||||
queue.flush();
|
||||
queue.drain(1);
|
||||
|
||||
assert!(queue.queue_info().is_empty());
|
||||
}
|
||||
}
|
||||
|
@ -85,6 +85,9 @@ pub trait BlockProvider {
|
||||
/// Get the hash of given block's number.
|
||||
fn block_hash(&self, index: BlockNumber) -> Option<H256>;
|
||||
|
||||
/// Get the address of transaction with given hash.
|
||||
fn transaction_address(&self, hash: &H256) -> Option<TransactionAddress>;
|
||||
|
||||
/// Get the partial-header of a block.
|
||||
fn block_header(&self, hash: &H256) -> Option<Header> {
|
||||
self.block(hash).map(|bytes| BlockView::new(&bytes).header())
|
||||
@ -107,10 +110,15 @@ pub trait BlockProvider {
|
||||
self.block(hash).map(|bytes| BlockView::new(&bytes).header_view().number())
|
||||
}
|
||||
|
||||
/// Get transaction with given transaction hash.
|
||||
fn transaction(&self, address: &TransactionAddress) -> Option<LocalizedTransaction> {
|
||||
self.block(&address.block_hash).and_then(|bytes| BlockView::new(&bytes).localized_transaction_at(address.index))
|
||||
}
|
||||
|
||||
/// Get a list of transactions for a given block.
|
||||
/// Returns None if block deos not exist.
|
||||
fn transactions(&self, hash: &H256) -> Option<Vec<SignedTransaction>> {
|
||||
self.block(hash).map(|bytes| BlockView::new(&bytes).transactions())
|
||||
/// Returns None if block does not exist.
|
||||
fn transactions(&self, hash: &H256) -> Option<Vec<LocalizedTransaction>> {
|
||||
self.block(hash).map(|bytes| BlockView::new(&bytes).localized_transactions())
|
||||
}
|
||||
|
||||
/// Returns reference to genesis hash.
|
||||
@ -201,6 +209,11 @@ impl BlockProvider for BlockChain {
|
||||
fn block_hash(&self, index: BlockNumber) -> Option<H256> {
|
||||
self.query_extras(&index, &self.block_hashes)
|
||||
}
|
||||
|
||||
/// Get the address of transaction with given hash.
|
||||
fn transaction_address(&self, hash: &H256) -> Option<TransactionAddress> {
|
||||
self.query_extras(hash, &self.transaction_addresses)
|
||||
}
|
||||
}
|
||||
|
||||
const COLLECTION_QUEUE_SIZE: usize = 8;
|
||||
@ -474,6 +487,14 @@ impl BlockChain {
|
||||
parent_details.children.push(hash.clone());
|
||||
batch.put_extras(&parent_hash, &parent_details);
|
||||
|
||||
// update transaction addresses
|
||||
for (i, tx_hash) in block.transaction_hashes().iter().enumerate() {
|
||||
batch.put_extras(tx_hash, &TransactionAddress {
|
||||
block_hash: hash.clone(),
|
||||
index: i
|
||||
});
|
||||
}
|
||||
|
||||
// if it's not new best block, just return
|
||||
if !is_new_best {
|
||||
return (batch, None, details);
|
||||
@ -824,4 +845,21 @@ mod tests {
|
||||
let bc = bc_result.reference();
|
||||
assert_eq!(bc.best_block_number(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn find_transaction_by_hash() {
|
||||
let genesis = "f901fcf901f7a00000000000000000000000000000000000000000000000000000000000000000a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0af81e09f8c46ca322193edfda764fa7e88e81923f802f1d325ec0b0308ac2cd0a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200008083023e38808454c98c8142a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421880102030405060708c0c0".from_hex().unwrap();
|
||||
let b1 = "f904a8f901faa0ce1f26f798dd03c8782d63b3e42e79a64eaea5694ea686ac5d7ce3df5171d1aea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a0a65c2364cd0f1542d761823dc0109c6b072f14c20459598c5455c274601438f4a070616ebd7ad2ed6fb7860cf7e9df00163842351c38a87cac2c1cb193895035a2a05c5b4fc43c2d45787f54e1ae7d27afdb4ad16dfc567c5692070d5c4556e0b1d7b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000830200000183023ec683021536845685109780a029f07836e4e59229b3a065913afc27702642c683bba689910b2b2fd45db310d3888957e6d004a31802f902a7f85f800a8255f094aaaf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca0575da4e21b66fa764be5f74da9389e67693d066fb0d1312e19e17e501da00ecda06baf5a5327595f6619dfc2fcb3f2e6fb410b5810af3cb52d0e7508038e91a188f85f010a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba04fa966bf34b93abc1bcd665554b7f316b50f928477b50be0f3285ead29d18c5ba017bba0eeec1625ab433746955e125d46d80b7fdc97386c51266f842d8e02192ef85f020a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca004377418ae981cc32b1312b4a427a1d69a821b28db8584f5f2bd8c6d42458adaa053a1dba1af177fac92f3b6af0a9fa46a22adf56e686c93794b6a012bf254abf5f85f030a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ca04fe13febd28a05f4fcb2f451d7ddc2dda56486d9f8c79a62b0ba4da775122615a0651b2382dd402df9ebc27f8cb4b2e0f3cea68dda2dca0ee9603608f0b6f51668f85f040a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba078e6a0ba086a08f8450e208a399bb2f2d2a0d984acd2517c7c7df66ccfab567da013254002cd45a97fac049ae00afbc43ed0d9961d0c56a3b2382c80ce41c198ddf85f050a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba0a7174d8f43ea71c8e3ca9477691add8d80ac8e0ed89d8d8b572041eef81f4a54a0534ea2e28ec4da3b5b944b18c51ec84a5cf35f5b3343c5fb86521fd2d388f506f85f060a82520894bbbf5374fce5edbc8e2a8697c15331677e6ebf0b0a801ba034bd04065833536a10c77ee2a43a5371bc6d34837088b861dd9d4b7f44074b59a078807715786a13876d3455716a6b9cb2186b7a4887a5c31160fc877454958616c0".from_hex().unwrap();
|
||||
let b1_hash = H256::from_str("f53f268d23a71e85c7d6d83a9504298712b84c1a2ba220441c86eeda0bf0b6e3").unwrap();
|
||||
|
||||
let temp = RandomTempPath::new();
|
||||
let bc = BlockChain::new(&genesis, temp.as_path());
|
||||
bc.insert_block(&b1);
|
||||
|
||||
let transactions = bc.transactions(&b1_hash).unwrap();
|
||||
assert_eq!(transactions.len(), 7);
|
||||
for t in transactions {
|
||||
assert_eq!(bc.transaction(&bc.transaction_address(&t.hash()).unwrap()).unwrap(), t);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -17,6 +17,7 @@
|
||||
//! Blockchain database client.
|
||||
|
||||
use util::*;
|
||||
use util::panics::*;
|
||||
use rocksdb::{Options, DB, DBCompactionStyle};
|
||||
use blockchain::{BlockChain, BlockProvider, CacheSize};
|
||||
use views::BlockView;
|
||||
@ -27,12 +28,38 @@ use spec::Spec;
|
||||
use engine::Engine;
|
||||
use views::HeaderView;
|
||||
use block_queue::{BlockQueue, BlockQueueInfo};
|
||||
use service::NetSyncMessage;
|
||||
use service::{NetSyncMessage, SyncMessage};
|
||||
use env_info::LastHashes;
|
||||
use verification::*;
|
||||
use block::*;
|
||||
use transaction::LocalizedTransaction;
|
||||
use extras::TransactionAddress;
|
||||
pub use blockchain::TreeRoute;
|
||||
|
||||
/// Uniquely identifies block.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum BlockId {
|
||||
/// Block's sha3.
|
||||
/// Querying by hash is always faster.
|
||||
Hash(H256),
|
||||
/// Block number within canon blockchain.
|
||||
Number(BlockNumber),
|
||||
/// Earliest block (genesis).
|
||||
Earliest,
|
||||
/// Latest mined block.
|
||||
Latest
|
||||
}
|
||||
|
||||
/// Uniquely identifies transaction.
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub enum TransactionId {
|
||||
/// Transaction's sha3.
|
||||
Hash(H256),
|
||||
/// Block id and transaction index within this block.
|
||||
/// Querying by block position is always faster.
|
||||
Location(BlockId, usize)
|
||||
}
|
||||
|
||||
/// General block status
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub enum BlockStatus {
|
||||
@ -69,37 +96,27 @@ impl fmt::Display for BlockChainInfo {
|
||||
|
||||
/// Blockchain database client. Owns and manages a blockchain and a block queue.
|
||||
pub trait BlockChainClient : Sync + Send {
|
||||
/// Get raw block header data by block header hash.
|
||||
fn block_header(&self, hash: &H256) -> Option<Bytes>;
|
||||
/// Get raw block header data by block id.
|
||||
fn block_header(&self, id: BlockId) -> Option<Bytes>;
|
||||
|
||||
/// Get raw block body data by block header hash.
|
||||
/// Get raw block body data by block id.
|
||||
/// Block body is an RLP list of two items: uncles and transactions.
|
||||
fn block_body(&self, hash: &H256) -> Option<Bytes>;
|
||||
fn block_body(&self, id: BlockId) -> Option<Bytes>;
|
||||
|
||||
/// Get raw block data by block header hash.
|
||||
fn block(&self, hash: &H256) -> Option<Bytes>;
|
||||
fn block(&self, id: BlockId) -> Option<Bytes>;
|
||||
|
||||
/// Get block status by block header hash.
|
||||
fn block_status(&self, hash: &H256) -> BlockStatus;
|
||||
fn block_status(&self, id: BlockId) -> BlockStatus;
|
||||
|
||||
/// Get block total difficulty.
|
||||
fn block_total_difficulty(&self, hash: &H256) -> Option<U256>;
|
||||
fn block_total_difficulty(&self, id: BlockId) -> Option<U256>;
|
||||
|
||||
/// Get raw block header data by block number.
|
||||
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes>;
|
||||
/// Get address code.
|
||||
fn code(&self, address: &Address) -> Option<Bytes>;
|
||||
|
||||
/// Get raw block body data by block number.
|
||||
/// Block body is an RLP list of two items: uncles and transactions.
|
||||
fn block_body_at(&self, n: BlockNumber) -> Option<Bytes>;
|
||||
|
||||
/// Get raw block data by block number.
|
||||
fn block_at(&self, n: BlockNumber) -> Option<Bytes>;
|
||||
|
||||
/// Get block status by block number.
|
||||
fn block_status_at(&self, n: BlockNumber) -> BlockStatus;
|
||||
|
||||
/// Get block total difficulty.
|
||||
fn block_total_difficulty_at(&self, n: BlockNumber) -> Option<U256>;
|
||||
/// Get transaction with given hash.
|
||||
fn transaction(&self, id: TransactionId) -> Option<LocalizedTransaction>;
|
||||
|
||||
/// Get a tree route between `from` and `to`.
|
||||
/// See `BlockChain::tree_route`.
|
||||
@ -125,7 +142,7 @@ pub trait BlockChainClient : Sync + Send {
|
||||
|
||||
/// Get the best block header.
|
||||
fn best_block_header(&self) -> Bytes {
|
||||
self.block_header(&self.chain_info().best_block_hash).unwrap()
|
||||
self.block_header(BlockId::Hash(self.chain_info().best_block_hash)).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@ -157,11 +174,12 @@ pub struct Client {
|
||||
state_db: Mutex<JournalDB>,
|
||||
block_queue: RwLock<BlockQueue>,
|
||||
report: RwLock<ClientReport>,
|
||||
import_lock: Mutex<()>
|
||||
import_lock: Mutex<()>,
|
||||
panic_handler: Arc<PanicHandler>,
|
||||
}
|
||||
|
||||
const HISTORY: u64 = 1000;
|
||||
const CLIENT_DB_VER_STR: &'static str = "1.0";
|
||||
const CLIENT_DB_VER_STR: &'static str = "2.1";
|
||||
|
||||
impl Client {
|
||||
/// Create a new client with given spec and DB path.
|
||||
@ -204,13 +222,19 @@ impl Client {
|
||||
if state_db.is_empty() && engine.spec().ensure_db_good(&mut state_db) {
|
||||
state_db.commit(0, &engine.spec().genesis_header().hash(), None).expect("Error commiting genesis state to state DB");
|
||||
}
|
||||
|
||||
let block_queue = BlockQueue::new(engine.clone(), message_channel);
|
||||
let panic_handler = PanicHandler::new_in_arc();
|
||||
panic_handler.forward_from(&block_queue);
|
||||
|
||||
Ok(Arc::new(Client {
|
||||
chain: chain,
|
||||
engine: engine.clone(),
|
||||
engine: engine,
|
||||
state_db: Mutex::new(state_db),
|
||||
block_queue: RwLock::new(BlockQueue::new(engine, message_channel)),
|
||||
block_queue: RwLock::new(block_queue),
|
||||
report: RwLock::new(Default::default()),
|
||||
import_lock: Mutex::new(()),
|
||||
panic_handler: panic_handler
|
||||
}))
|
||||
}
|
||||
|
||||
@ -220,7 +244,7 @@ impl Client {
|
||||
}
|
||||
|
||||
/// This is triggered by a message coming from a block queue when the block is ready for insertion
|
||||
pub fn import_verified_blocks(&self, _io: &IoChannel<NetSyncMessage>) -> usize {
|
||||
pub fn import_verified_blocks(&self, io: &IoChannel<NetSyncMessage>) -> usize {
|
||||
let mut ret = 0;
|
||||
let mut bad = HashSet::new();
|
||||
let _import_lock = self.import_lock.lock();
|
||||
@ -294,6 +318,9 @@ impl Client {
|
||||
ret += 1;
|
||||
}
|
||||
self.block_queue.write().unwrap().mark_as_good(&good_blocks);
|
||||
if !good_blocks.is_empty() && self.block_queue.read().unwrap().queue_info().is_empty() {
|
||||
io.send(NetworkIoMessage::User(SyncMessage::BlockVerified)).unwrap();
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
@ -321,60 +348,70 @@ impl Client {
|
||||
pub fn configure_cache(&self, pref_cache_size: usize, max_cache_size: usize) {
|
||||
self.chain.write().unwrap().configure_cache(pref_cache_size, max_cache_size);
|
||||
}
|
||||
|
||||
fn block_hash(chain: &BlockChain, id: BlockId) -> Option<H256> {
|
||||
match id {
|
||||
BlockId::Hash(hash) => Some(hash),
|
||||
BlockId::Number(number) => chain.block_hash(number),
|
||||
BlockId::Earliest => chain.block_hash(0),
|
||||
BlockId::Latest => Some(chain.best_block_hash())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BlockChainClient for Client {
|
||||
fn block_header(&self, hash: &H256) -> Option<Bytes> {
|
||||
self.chain.read().unwrap().block(hash).map(|bytes| BlockView::new(&bytes).rlp().at(0).as_raw().to_vec())
|
||||
fn block_header(&self, id: BlockId) -> Option<Bytes> {
|
||||
let chain = self.chain.read().unwrap();
|
||||
Self::block_hash(&chain, id).and_then(|hash| chain.block(&hash).map(|bytes| BlockView::new(&bytes).rlp().at(0).as_raw().to_vec()))
|
||||
}
|
||||
|
||||
fn block_body(&self, hash: &H256) -> Option<Bytes> {
|
||||
self.chain.read().unwrap().block(hash).map(|bytes| {
|
||||
let rlp = Rlp::new(&bytes);
|
||||
let mut body = RlpStream::new();
|
||||
body.append_raw(rlp.at(1).as_raw(), 1);
|
||||
body.append_raw(rlp.at(2).as_raw(), 1);
|
||||
body.out()
|
||||
fn block_body(&self, id: BlockId) -> Option<Bytes> {
|
||||
let chain = self.chain.read().unwrap();
|
||||
Self::block_hash(&chain, id).and_then(|hash| {
|
||||
chain.block(&hash).map(|bytes| {
|
||||
let rlp = Rlp::new(&bytes);
|
||||
let mut body = RlpStream::new_list(2);
|
||||
body.append_raw(rlp.at(1).as_raw(), 1);
|
||||
body.append_raw(rlp.at(2).as_raw(), 1);
|
||||
body.out()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn block(&self, hash: &H256) -> Option<Bytes> {
|
||||
self.chain.read().unwrap().block(hash)
|
||||
fn block(&self, id: BlockId) -> Option<Bytes> {
|
||||
let chain = self.chain.read().unwrap();
|
||||
Self::block_hash(&chain, id).and_then(|hash| {
|
||||
chain.block(&hash)
|
||||
})
|
||||
}
|
||||
|
||||
fn block_status(&self, hash: &H256) -> BlockStatus {
|
||||
if self.chain.read().unwrap().is_known(&hash) {
|
||||
BlockStatus::InChain
|
||||
} else {
|
||||
self.block_queue.read().unwrap().block_status(hash)
|
||||
}
|
||||
}
|
||||
|
||||
fn block_total_difficulty(&self, hash: &H256) -> Option<U256> {
|
||||
self.chain.read().unwrap().block_details(hash).map(|d| d.total_difficulty)
|
||||
}
|
||||
|
||||
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||
self.chain.read().unwrap().block_hash(n).and_then(|h| self.block_header(&h))
|
||||
}
|
||||
|
||||
fn block_body_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||
self.chain.read().unwrap().block_hash(n).and_then(|h| self.block_body(&h))
|
||||
}
|
||||
|
||||
fn block_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||
self.chain.read().unwrap().block_hash(n).and_then(|h| self.block(&h))
|
||||
}
|
||||
|
||||
fn block_status_at(&self, n: BlockNumber) -> BlockStatus {
|
||||
match self.chain.read().unwrap().block_hash(n) {
|
||||
Some(h) => self.block_status(&h),
|
||||
fn block_status(&self, id: BlockId) -> BlockStatus {
|
||||
let chain = self.chain.read().unwrap();
|
||||
match Self::block_hash(&chain, id) {
|
||||
Some(ref hash) if chain.is_known(hash) => BlockStatus::InChain,
|
||||
Some(hash) => self.block_queue.read().unwrap().block_status(&hash),
|
||||
None => BlockStatus::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn block_total_difficulty_at(&self, n: BlockNumber) -> Option<U256> {
|
||||
self.chain.read().unwrap().block_hash(n).and_then(|h| self.block_total_difficulty(&h))
|
||||
fn block_total_difficulty(&self, id: BlockId) -> Option<U256> {
|
||||
let chain = self.chain.read().unwrap();
|
||||
Self::block_hash(&chain, id).and_then(|hash| chain.block_details(&hash)).map(|d| d.total_difficulty)
|
||||
}
|
||||
|
||||
fn code(&self, address: &Address) -> Option<Bytes> {
|
||||
self.state().code(address)
|
||||
}
|
||||
|
||||
fn transaction(&self, id: TransactionId) -> Option<LocalizedTransaction> {
|
||||
let chain = self.chain.read().unwrap();
|
||||
match id {
|
||||
TransactionId::Hash(ref hash) => chain.transaction_address(hash),
|
||||
TransactionId::Location(id, index) => Self::block_hash(&chain, id).map(|hash| TransactionAddress {
|
||||
block_hash: hash,
|
||||
index: index
|
||||
})
|
||||
}.and_then(|address| chain.transaction(&address))
|
||||
}
|
||||
|
||||
fn tree_route(&self, from: &H256, to: &H256) -> Option<TreeRoute> {
|
||||
@ -394,7 +431,7 @@ impl BlockChainClient for Client {
|
||||
if self.chain.read().unwrap().is_known(&header.hash()) {
|
||||
return Err(ImportError::AlreadyInChain);
|
||||
}
|
||||
if self.block_status(&header.parent_hash) == BlockStatus::Unknown {
|
||||
if self.block_status(BlockId::Hash(header.parent_hash)) == BlockStatus::Unknown {
|
||||
return Err(ImportError::UnknownParent);
|
||||
}
|
||||
self.block_queue.write().unwrap().import_block(bytes)
|
||||
@ -419,3 +456,9 @@ impl BlockChainClient for Client {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MayPanic for Client {
|
||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
||||
self.panic_handler.on_panic(closure);
|
||||
}
|
||||
}
|
||||
|
@ -20,26 +20,6 @@ use util::*;
|
||||
use header::BlockNumber;
|
||||
use basic_types::LogBloom;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
/// Error indicating an expected value was not found.
|
||||
pub struct Mismatch<T: fmt::Debug> {
|
||||
/// Value expected.
|
||||
pub expected: T,
|
||||
/// Value found.
|
||||
pub found: T,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
/// Error indicating value found is outside of a valid range.
|
||||
pub struct OutOfBounds<T: fmt::Debug> {
|
||||
/// Minimum allowed value.
|
||||
pub min: Option<T>,
|
||||
/// Maximum allowed value.
|
||||
pub max: Option<T>,
|
||||
/// Value found.
|
||||
pub found: T,
|
||||
}
|
||||
|
||||
/// Result of executing the transaction.
|
||||
#[derive(PartialEq, Debug)]
|
||||
pub enum ExecutionError {
|
||||
|
@ -23,8 +23,6 @@ use spec::*;
|
||||
use engine::*;
|
||||
use evm::Schedule;
|
||||
use evm::Factory;
|
||||
#[cfg(test)]
|
||||
use tests::helpers::*;
|
||||
|
||||
/// Engine using Ethash proof-of-work consensus algorithm, suitable for Ethereum
|
||||
/// mainnet chains in the Olympic, Frontier and Homestead eras.
|
||||
@ -49,6 +47,17 @@ impl Ethash {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn new_test(spec: Spec) -> Ethash {
|
||||
Ethash {
|
||||
spec: spec,
|
||||
pow: EthashManager::new(),
|
||||
factory: Factory::default(),
|
||||
u64_params: RwLock::new(HashMap::new()),
|
||||
u256_params: RwLock::new(HashMap::new())
|
||||
}
|
||||
}
|
||||
|
||||
fn u64_param(&self, name: &str) -> u64 {
|
||||
*self.u64_params.write().unwrap().entry(name.to_owned()).or_insert_with(||
|
||||
self.spec().engine_params.get(name).map_or(0u64, |a| decode(&a)))
|
||||
@ -123,6 +132,11 @@ impl Engine for Ethash {
|
||||
|
||||
fn verify_block_basic(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
||||
// check the seal fields.
|
||||
if header.seal.len() != self.seal_fields() {
|
||||
return Err(From::from(BlockError::InvalidSealArity(
|
||||
Mismatch { expected: self.seal_fields(), found: header.seal.len() }
|
||||
)));
|
||||
}
|
||||
try!(UntrustedRlp::new(&header.seal[0]).as_val::<H256>());
|
||||
try!(UntrustedRlp::new(&header.seal[1]).as_val::<H64>());
|
||||
|
||||
@ -143,6 +157,11 @@ impl Engine for Ethash {
|
||||
}
|
||||
|
||||
fn verify_block_unordered(&self, header: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
||||
if header.seal.len() != self.seal_fields() {
|
||||
return Err(From::from(BlockError::InvalidSealArity(
|
||||
Mismatch { expected: self.seal_fields(), found: header.seal.len() }
|
||||
)));
|
||||
}
|
||||
let result = self.pow.compute_light(header.number as u64, &Ethash::to_ethash(header.bare_hash()), header.nonce().low_u64());
|
||||
let mix = Ethash::from_ethash(result.mix_hash);
|
||||
let difficulty = Ethash::boundary_to_difficulty(&Ethash::from_ethash(result.value));
|
||||
@ -156,6 +175,11 @@ impl Engine for Ethash {
|
||||
}
|
||||
|
||||
fn verify_block_family(&self, header: &Header, parent: &Header, _block: Option<&[u8]>) -> result::Result<(), Error> {
|
||||
// we should not calculate difficulty for genesis blocks
|
||||
if header.number() == 0 {
|
||||
return Err(From::from(BlockError::RidiculousNumber(OutOfBounds { min: Some(1), max: None, found: header.number() })));
|
||||
}
|
||||
|
||||
// Check difficulty is correct given the two timestamps.
|
||||
let expected_difficulty = self.calculate_difficuty(header, parent);
|
||||
if header.difficulty != expected_difficulty {
|
||||
@ -242,38 +266,236 @@ impl Header {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn on_close_block() {
|
||||
use super::*;
|
||||
let engine = new_morden().to_engine().unwrap();
|
||||
let genesis_header = engine.spec().genesis_header();
|
||||
let mut db_result = get_temp_journal_db();
|
||||
let mut db = db_result.take();
|
||||
engine.spec().ensure_db_good(&mut db);
|
||||
let last_hashes = vec![genesis_header.hash()];
|
||||
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
||||
let b = b.close();
|
||||
assert_eq!(b.state().balance(&Address::zero()), U256::from_str("4563918244f40000").unwrap());
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
extern crate ethash;
|
||||
|
||||
use common::*;
|
||||
use block::*;
|
||||
use engine::*;
|
||||
use tests::helpers::*;
|
||||
use super::{Ethash};
|
||||
use super::super::new_morden;
|
||||
|
||||
#[test]
|
||||
fn on_close_block() {
|
||||
let engine = new_morden().to_engine().unwrap();
|
||||
let genesis_header = engine.spec().genesis_header();
|
||||
let mut db_result = get_temp_journal_db();
|
||||
let mut db = db_result.take();
|
||||
engine.spec().ensure_db_good(&mut db);
|
||||
let last_hashes = vec![genesis_header.hash()];
|
||||
let b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
||||
let b = b.close();
|
||||
assert_eq!(b.state().balance(&Address::zero()), U256::from_str("4563918244f40000").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn on_close_block_with_uncle() {
|
||||
let engine = new_morden().to_engine().unwrap();
|
||||
let genesis_header = engine.spec().genesis_header();
|
||||
let mut db_result = get_temp_journal_db();
|
||||
let mut db = db_result.take();
|
||||
engine.spec().ensure_db_good(&mut db);
|
||||
let last_hashes = vec![genesis_header.hash()];
|
||||
let mut b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
||||
let mut uncle = Header::new();
|
||||
let uncle_author = address_from_hex("ef2d6d194084c2de36e0dabfce45d046b37d1106");
|
||||
uncle.author = uncle_author.clone();
|
||||
b.push_uncle(uncle).unwrap();
|
||||
|
||||
let b = b.close();
|
||||
assert_eq!(b.state().balance(&Address::zero()), U256::from_str("478eae0e571ba000").unwrap());
|
||||
assert_eq!(b.state().balance(&uncle_author), U256::from_str("3cb71f51fc558000").unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn has_valid_metadata() {
|
||||
let engine = Ethash::new_boxed(new_morden());
|
||||
assert!(!engine.name().is_empty());
|
||||
assert!(engine.version().major >= 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_return_params() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
assert!(engine.u64_param("durationLimit") > 0);
|
||||
assert!(engine.u256_param("minimumDifficulty") > U256::zero());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_return_factory() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
engine.vm_factory();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_return_schedule() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let schedule = engine.schedule(&EnvInfo {
|
||||
number: 10000000,
|
||||
author: x!(0),
|
||||
timestamp: 0,
|
||||
difficulty: x!(0),
|
||||
last_hashes: vec![],
|
||||
gas_used: x!(0),
|
||||
gas_limit: x!(0)
|
||||
});
|
||||
|
||||
assert!(schedule.stack_limit > 0);
|
||||
|
||||
let schedule = engine.schedule(&EnvInfo {
|
||||
number: 100,
|
||||
author: x!(0),
|
||||
timestamp: 0,
|
||||
difficulty: x!(0),
|
||||
last_hashes: vec![],
|
||||
gas_used: x!(0),
|
||||
gas_limit: x!(0)
|
||||
});
|
||||
|
||||
assert!(!schedule.have_delegate_call);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_do_seal_verification_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let header: Header = Header::default();
|
||||
|
||||
let verify_result = engine.verify_block_basic(&header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::InvalidSealArity(_))) => {},
|
||||
Err(_) => { panic!("should be block seal-arity mismatch error (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_do_difficulty_verification_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let mut header: Header = Header::default();
|
||||
header.set_seal(vec![rlp::encode(&H256::zero()).to_vec(), rlp::encode(&H64::zero()).to_vec()]);
|
||||
|
||||
let verify_result = engine.verify_block_basic(&header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::DifficultyOutOfBounds(_))) => {},
|
||||
Err(_) => { panic!("should be block difficulty error (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_do_proof_of_work_verification_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let mut header: Header = Header::default();
|
||||
header.set_seal(vec![rlp::encode(&H256::zero()).to_vec(), rlp::encode(&H64::zero()).to_vec()]);
|
||||
header.set_difficulty(U256::from_str("ffffffffffffffffffffffffffffffffffffffffffffaaaaaaaaaaaaaaaaaaaa").unwrap());
|
||||
|
||||
let verify_result = engine.verify_block_basic(&header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::InvalidProofOfWork(_))) => {},
|
||||
Err(_) => { panic!("should be invalid proof of work error (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_do_seal_unordered_verification_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let header: Header = Header::default();
|
||||
|
||||
let verify_result = engine.verify_block_unordered(&header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::InvalidSealArity(_))) => {},
|
||||
Err(_) => { panic!("should be block seal-arity mismatch error (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_do_seal256_verification_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let mut header: Header = Header::default();
|
||||
header.set_seal(vec![rlp::encode(&H256::zero()).to_vec(), rlp::encode(&H64::zero()).to_vec()]);
|
||||
let verify_result = engine.verify_block_unordered(&header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::MismatchedH256SealElement(_))) => {},
|
||||
Err(_) => { panic!("should be invalid 256-bit seal fail (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_do_proof_of_work_unordered_verification_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let mut header: Header = Header::default();
|
||||
header.set_seal(vec![rlp::encode(&H256::from("b251bd2e0283d0658f2cadfdc8ca619b5de94eca5742725e2e757dd13ed7503d")).to_vec(), rlp::encode(&H64::zero()).to_vec()]);
|
||||
header.set_difficulty(U256::from_str("ffffffffffffffffffffffffffffffffffffffffffffaaaaaaaaaaaaaaaaaaaa").unwrap());
|
||||
|
||||
let verify_result = engine.verify_block_unordered(&header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::InvalidProofOfWork(_))) => {},
|
||||
Err(_) => { panic!("should be invalid proof-of-work fail (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_verify_block_family_genesis_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let header: Header = Header::default();
|
||||
let parent_header: Header = Header::default();
|
||||
|
||||
let verify_result = engine.verify_block_family(&header, &parent_header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::RidiculousNumber(_))) => {},
|
||||
Err(_) => { panic!("should be invalid block number fail (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_verify_block_family_difficulty_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let mut header: Header = Header::default();
|
||||
header.set_number(2);
|
||||
let mut parent_header: Header = Header::default();
|
||||
parent_header.set_number(1);
|
||||
|
||||
let verify_result = engine.verify_block_family(&header, &parent_header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::InvalidDifficulty(_))) => {},
|
||||
Err(_) => { panic!("should be invalid difficulty fail (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_verify_block_family_gas_fail() {
|
||||
let engine = Ethash::new_test(new_morden());
|
||||
let mut header: Header = Header::default();
|
||||
header.set_number(2);
|
||||
header.set_difficulty(U256::from_str("0000000000000000000000000000000000000000000000000000000000020000").unwrap());
|
||||
let mut parent_header: Header = Header::default();
|
||||
parent_header.set_number(1);
|
||||
|
||||
let verify_result = engine.verify_block_family(&header, &parent_header, None);
|
||||
|
||||
match verify_result {
|
||||
Err(Error::Block(BlockError::InvalidGasLimit(_))) => {},
|
||||
Err(_) => { panic!("should be invalid difficulty fail (got {:?})", verify_result); },
|
||||
_ => { panic!("Should be error, got Ok"); },
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: difficulty test
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn on_close_block_with_uncle() {
|
||||
use super::*;
|
||||
let engine = new_morden().to_engine().unwrap();
|
||||
let genesis_header = engine.spec().genesis_header();
|
||||
let mut db_result = get_temp_journal_db();
|
||||
let mut db = db_result.take();
|
||||
engine.spec().ensure_db_good(&mut db);
|
||||
let last_hashes = vec![genesis_header.hash()];
|
||||
let mut b = OpenBlock::new(engine.deref(), db, &genesis_header, &last_hashes, Address::zero(), vec![]);
|
||||
let mut uncle = Header::new();
|
||||
let uncle_author = address_from_hex("ef2d6d194084c2de36e0dabfce45d046b37d1106");
|
||||
uncle.author = uncle_author.clone();
|
||||
b.push_uncle(uncle).unwrap();
|
||||
|
||||
let b = b.close();
|
||||
assert_eq!(b.state().balance(&Address::zero()), U256::from_str("478eae0e571ba000").unwrap());
|
||||
assert_eq!(b.state().balance(&uncle_author), U256::from_str("3cb71f51fc558000").unwrap());
|
||||
}
|
||||
|
||||
// TODO: difficulty test
|
||||
|
@ -24,7 +24,7 @@ pub mod ethash;
|
||||
/// Export the denominations module.
|
||||
pub mod denominations;
|
||||
|
||||
pub use self::ethash::*;
|
||||
pub use self::ethash::{Ethash};
|
||||
pub use self::denominations::*;
|
||||
|
||||
use super::spec::*;
|
||||
|
@ -159,11 +159,13 @@ macro_rules! evm_test_ignore(
|
||||
#[test]
|
||||
#[ignore]
|
||||
#[cfg(feature = "jit")]
|
||||
#[cfg(feature = "ignored-tests")]
|
||||
fn $name_jit() {
|
||||
$name_test(Factory::new(VMType::Jit));
|
||||
}
|
||||
#[test]
|
||||
#[ignore]
|
||||
#[cfg(feature = "ignored-tests")]
|
||||
fn $name_int() {
|
||||
$name_test(Factory::new(VMType::Interpreter));
|
||||
}
|
||||
|
@ -258,6 +258,7 @@ impl<'a> CodeReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(enum_variant_names)]
|
||||
enum InstructionCost {
|
||||
Gas(U256),
|
||||
GasMem(U256, U256),
|
||||
@ -282,7 +283,7 @@ impl evm::Evm for Interpreter {
|
||||
let code = ¶ms.code.as_ref().unwrap();
|
||||
let valid_jump_destinations = self.find_jump_destinations(&code);
|
||||
|
||||
let mut current_gas = params.gas.clone();
|
||||
let mut current_gas = params.gas;
|
||||
let mut stack = VecStack::with_capacity(ext.schedule().stack_limit, U256::zero());
|
||||
let mut mem = vec![];
|
||||
let mut reader = CodeReader {
|
||||
@ -380,10 +381,9 @@ impl Interpreter {
|
||||
|
||||
let gas = if self.is_zero(&val) && !self.is_zero(newval) {
|
||||
schedule.sstore_set_gas
|
||||
} else if !self.is_zero(&val) && self.is_zero(newval) {
|
||||
// Refund is added when actually executing sstore
|
||||
schedule.sstore_reset_gas
|
||||
} else {
|
||||
// Refund for below case is added when actually executing sstore
|
||||
// !self.is_zero(&val) && self.is_zero(newval)
|
||||
schedule.sstore_reset_gas
|
||||
};
|
||||
InstructionCost::Gas(U256::from(gas))
|
||||
@ -391,10 +391,7 @@ impl Interpreter {
|
||||
instructions::SLOAD => {
|
||||
InstructionCost::Gas(U256::from(schedule.sload_gas))
|
||||
},
|
||||
instructions::MSTORE => {
|
||||
InstructionCost::GasMem(default_gas, try!(self.mem_needed_const(stack.peek(0), 32)))
|
||||
},
|
||||
instructions::MLOAD => {
|
||||
instructions::MSTORE | instructions::MLOAD => {
|
||||
InstructionCost::GasMem(default_gas, try!(self.mem_needed_const(stack.peek(0), 32)))
|
||||
},
|
||||
instructions::MSTORE8 => {
|
||||
@ -409,10 +406,7 @@ impl Interpreter {
|
||||
let gas = U256::from(schedule.sha3_gas) + (U256::from(schedule.sha3_word_gas) * words);
|
||||
InstructionCost::GasMem(gas, try!(self.mem_needed(stack.peek(0), stack.peek(1))))
|
||||
},
|
||||
instructions::CALLDATACOPY => {
|
||||
InstructionCost::GasMemCopy(default_gas, try!(self.mem_needed(stack.peek(0), stack.peek(2))), stack.peek(2).clone())
|
||||
},
|
||||
instructions::CODECOPY => {
|
||||
instructions::CALLDATACOPY | instructions::CODECOPY => {
|
||||
InstructionCost::GasMemCopy(default_gas, try!(self.mem_needed(stack.peek(0), stack.peek(2))), stack.peek(2).clone())
|
||||
},
|
||||
instructions::EXTCODECOPY => {
|
||||
@ -736,8 +730,7 @@ impl Interpreter {
|
||||
},
|
||||
instructions::CALLVALUE => {
|
||||
stack.push(match params.value {
|
||||
ActionValue::Transfer(val) => val,
|
||||
ActionValue::Apparent(val) => val,
|
||||
ActionValue::Transfer(val) | ActionValue::Apparent(val) => val
|
||||
});
|
||||
},
|
||||
instructions::CALLDATALOAD => {
|
||||
@ -982,9 +975,9 @@ impl Interpreter {
|
||||
let (a, neg_a) = get_and_reset_sign(stack.pop_back());
|
||||
let (b, neg_b) = get_and_reset_sign(stack.pop_back());
|
||||
|
||||
let is_positive_lt = a < b && (neg_a | neg_b) == false;
|
||||
let is_negative_lt = a > b && (neg_a & neg_b) == true;
|
||||
let has_different_signs = neg_a == true && neg_b == false;
|
||||
let is_positive_lt = a < b && !(neg_a | neg_b);
|
||||
let is_negative_lt = a > b && (neg_a & neg_b);
|
||||
let has_different_signs = neg_a && !neg_b;
|
||||
|
||||
stack.push(self.bool_to_u256(is_positive_lt | is_negative_lt | has_different_signs));
|
||||
},
|
||||
@ -997,9 +990,9 @@ impl Interpreter {
|
||||
let (a, neg_a) = get_and_reset_sign(stack.pop_back());
|
||||
let (b, neg_b) = get_and_reset_sign(stack.pop_back());
|
||||
|
||||
let is_positive_gt = a > b && (neg_a | neg_b) == false;
|
||||
let is_negative_gt = a < b && (neg_a & neg_b) == true;
|
||||
let has_different_signs = neg_a == false && neg_b == true;
|
||||
let is_positive_gt = a > b && !(neg_a | neg_b);
|
||||
let is_negative_gt = a < b && (neg_a & neg_b);
|
||||
let has_different_signs = !neg_a && neg_b;
|
||||
|
||||
stack.push(self.bool_to_u256(is_positive_gt | is_negative_gt | has_different_signs));
|
||||
},
|
||||
|
@ -25,6 +25,7 @@ struct FakeLogEntry {
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash, Debug)]
|
||||
#[allow(enum_variant_names)] // Common prefix is C ;)
|
||||
enum FakeCallType {
|
||||
CALL, CREATE
|
||||
}
|
||||
@ -84,7 +85,7 @@ impl Ext for FakeExt {
|
||||
}
|
||||
|
||||
fn balance(&self, address: &Address) -> U256 {
|
||||
self.balances.get(address).unwrap().clone()
|
||||
*self.balances.get(address).unwrap()
|
||||
}
|
||||
|
||||
fn blockhash(&self, number: &U256) -> H256 {
|
||||
@ -94,10 +95,10 @@ impl Ext for FakeExt {
|
||||
fn create(&mut self, gas: &U256, value: &U256, code: &[u8]) -> ContractCreateResult {
|
||||
self.calls.insert(FakeCall {
|
||||
call_type: FakeCallType::CREATE,
|
||||
gas: gas.clone(),
|
||||
gas: *gas,
|
||||
sender_address: None,
|
||||
receive_address: None,
|
||||
value: Some(value.clone()),
|
||||
value: Some(*value),
|
||||
data: code.to_vec(),
|
||||
code_address: None
|
||||
});
|
||||
@ -115,14 +116,14 @@ impl Ext for FakeExt {
|
||||
|
||||
self.calls.insert(FakeCall {
|
||||
call_type: FakeCallType::CALL,
|
||||
gas: gas.clone(),
|
||||
gas: *gas,
|
||||
sender_address: Some(sender_address.clone()),
|
||||
receive_address: Some(receive_address.clone()),
|
||||
value: value,
|
||||
data: data.to_vec(),
|
||||
code_address: Some(code_address.clone())
|
||||
});
|
||||
MessageCallResult::Success(gas.clone())
|
||||
MessageCallResult::Success(*gas)
|
||||
}
|
||||
|
||||
fn extcode(&self, address: &Address) -> Bytes {
|
||||
@ -898,7 +899,7 @@ fn test_calls(factory: super::Factory) {
|
||||
let mut ext = FakeExt::new();
|
||||
ext.balances = {
|
||||
let mut s = HashMap::new();
|
||||
s.insert(params.address.clone(), params.gas.clone());
|
||||
s.insert(params.address.clone(), params.gas);
|
||||
s
|
||||
};
|
||||
|
||||
|
@ -360,45 +360,14 @@ impl<'a> Executive<'a> {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[allow(dead_code)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use common::*;
|
||||
use ethereum;
|
||||
use engine::*;
|
||||
use spec::*;
|
||||
use evm::{Schedule, Factory, VMType};
|
||||
use evm::{Factory, VMType};
|
||||
use substate::*;
|
||||
use tests::helpers::*;
|
||||
|
||||
struct TestEngine {
|
||||
factory: Factory,
|
||||
spec: Spec,
|
||||
max_depth: usize
|
||||
}
|
||||
|
||||
impl TestEngine {
|
||||
fn new(max_depth: usize, factory: Factory) -> TestEngine {
|
||||
TestEngine {
|
||||
factory: factory,
|
||||
spec: ethereum::new_frontier_test(),
|
||||
max_depth: max_depth
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Engine for TestEngine {
|
||||
fn name(&self) -> &str { "TestEngine" }
|
||||
fn spec(&self) -> &Spec { &self.spec }
|
||||
fn vm_factory(&self) -> &Factory {
|
||||
&self.factory
|
||||
}
|
||||
fn schedule(&self, _env_info: &EnvInfo) -> Schedule {
|
||||
let mut schedule = Schedule::new_frontier();
|
||||
schedule.max_depth = self.max_depth;
|
||||
schedule
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_contract_address() {
|
||||
let address = Address::from_str("0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6").unwrap();
|
||||
@ -599,6 +568,7 @@ mod tests {
|
||||
}
|
||||
|
||||
// test is incorrect, mk
|
||||
// TODO: fix (preferred) or remove
|
||||
evm_test_ignore!{test_aba_calls: test_aba_calls_jit, test_aba_calls_int}
|
||||
fn test_aba_calls(factory: Factory) {
|
||||
// 60 00 - push 0
|
||||
@ -659,6 +629,7 @@ mod tests {
|
||||
}
|
||||
|
||||
// test is incorrect, mk
|
||||
// TODO: fix (preferred) or remove
|
||||
evm_test_ignore!{test_recursive_bomb1: test_recursive_bomb1_jit, test_recursive_bomb1_int}
|
||||
fn test_recursive_bomb1(factory: Factory) {
|
||||
// 60 01 - push 1
|
||||
@ -704,6 +675,7 @@ mod tests {
|
||||
}
|
||||
|
||||
// test is incorrect, mk
|
||||
// TODO: fix (preferred) or remove
|
||||
evm_test_ignore!{test_transact_simple: test_transact_simple_jit, test_transact_simple_int}
|
||||
fn test_transact_simple(factory: Factory) {
|
||||
let keypair = KeyPair::create().unwrap();
|
||||
@ -902,5 +874,4 @@ mod tests {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -45,10 +45,9 @@ impl OriginInfo {
|
||||
OriginInfo {
|
||||
address: params.address.clone(),
|
||||
origin: params.origin.clone(),
|
||||
gas_price: params.gas_price.clone(),
|
||||
gas_price: params.gas_price,
|
||||
value: match params.value {
|
||||
ActionValue::Transfer(val) => val,
|
||||
ActionValue::Apparent(val) => val,
|
||||
ActionValue::Transfer(val) | ActionValue::Apparent(val) => val
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -106,16 +105,18 @@ impl<'a> Ext for Externalities<'a> {
|
||||
}
|
||||
|
||||
fn blockhash(&self, number: &U256) -> H256 {
|
||||
// TODO: comment out what this function expects from env_info, since it will produce panics if the latter is inconsistent
|
||||
match *number < U256::from(self.env_info.number) && number.low_u64() >= cmp::max(256, self.env_info.number) - 256 {
|
||||
true => {
|
||||
let index = self.env_info.number - number.low_u64() - 1;
|
||||
assert!(index < self.env_info.last_hashes.len() as u64, format!("Inconsistent env_info, should contain at least {:?} last hashes", index+1));
|
||||
let r = self.env_info.last_hashes[index as usize].clone();
|
||||
trace!("ext: blockhash({}) -> {} self.env_info.number={}\n", number, r, self.env_info.number);
|
||||
r
|
||||
},
|
||||
false => {
|
||||
trace!("ext: blockhash({}) -> null self.env_info.number={}\n", number, self.env_info.number);
|
||||
H256::from(&U256::zero())
|
||||
H256::zero()
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -131,8 +132,8 @@ impl<'a> Ext for Externalities<'a> {
|
||||
sender: self.origin_info.address.clone(),
|
||||
origin: self.origin_info.origin.clone(),
|
||||
gas: *gas,
|
||||
gas_price: self.origin_info.gas_price.clone(),
|
||||
value: ActionValue::Transfer(value.clone()),
|
||||
gas_price: self.origin_info.gas_price,
|
||||
value: ActionValue::Transfer(*value),
|
||||
code: Some(code.to_vec()),
|
||||
data: None,
|
||||
};
|
||||
@ -162,11 +163,11 @@ impl<'a> Ext for Externalities<'a> {
|
||||
let mut params = ActionParams {
|
||||
sender: sender_address.clone(),
|
||||
address: receive_address.clone(),
|
||||
value: ActionValue::Apparent(self.origin_info.value.clone()),
|
||||
value: ActionValue::Apparent(self.origin_info.value),
|
||||
code_address: code_address.clone(),
|
||||
origin: self.origin_info.origin.clone(),
|
||||
gas: *gas,
|
||||
gas_price: self.origin_info.gas_price.clone(),
|
||||
gas_price: self.origin_info.gas_price,
|
||||
code: self.state.code(code_address),
|
||||
data: Some(data.to_vec()),
|
||||
};
|
||||
@ -257,3 +258,144 @@ impl<'a> Ext for Externalities<'a> {
|
||||
self.substate.sstore_clears_count = self.substate.sstore_clears_count + U256::one();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use common::*;
|
||||
use state::*;
|
||||
use engine::*;
|
||||
use evm::{Ext};
|
||||
use substate::*;
|
||||
use tests::helpers::*;
|
||||
use super::*;
|
||||
|
||||
fn get_test_origin() -> OriginInfo {
|
||||
OriginInfo {
|
||||
address: Address::zero(),
|
||||
origin: Address::zero(),
|
||||
gas_price: U256::zero(),
|
||||
value: U256::zero()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_test_env_info() -> EnvInfo {
|
||||
EnvInfo {
|
||||
number: 100,
|
||||
author: x!(0),
|
||||
timestamp: 0,
|
||||
difficulty: x!(0),
|
||||
last_hashes: vec![],
|
||||
gas_used: x!(0),
|
||||
gas_limit: x!(0)
|
||||
}
|
||||
}
|
||||
|
||||
struct TestSetup {
|
||||
state: GuardedTempResult<State>,
|
||||
engine: Box<Engine>,
|
||||
sub_state: Substate,
|
||||
env_info: EnvInfo
|
||||
}
|
||||
|
||||
impl TestSetup {
|
||||
fn new() -> TestSetup {
|
||||
TestSetup {
|
||||
state: get_temp_state(),
|
||||
engine: get_test_spec().to_engine().unwrap(),
|
||||
sub_state: Substate::new(),
|
||||
env_info: get_test_env_info()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_be_created() {
|
||||
let mut setup = TestSetup::new();
|
||||
let state = setup.state.reference_mut();
|
||||
|
||||
let ext = Externalities::new(state, &setup.env_info, &*setup.engine, 0, get_test_origin(), &mut setup.sub_state, OutputPolicy::InitContract);
|
||||
|
||||
assert_eq!(ext.env_info().number, 100);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_return_block_hash_no_env() {
|
||||
let mut setup = TestSetup::new();
|
||||
let state = setup.state.reference_mut();
|
||||
let ext = Externalities::new(state, &setup.env_info, &*setup.engine, 0, get_test_origin(), &mut setup.sub_state, OutputPolicy::InitContract);
|
||||
|
||||
let hash = ext.blockhash(&U256::from_str("0000000000000000000000000000000000000000000000000000000000120000").unwrap());
|
||||
|
||||
assert_eq!(hash, H256::zero());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_return_block_hash() {
|
||||
let test_hash = H256::from("afafafafafafafafafafafbcbcbcbcbcbcbcbcbcbeeeeeeeeeeeeedddddddddd");
|
||||
let test_env_number = 0x120001;
|
||||
|
||||
let mut setup = TestSetup::new();
|
||||
{
|
||||
let env_info = &mut setup.env_info;
|
||||
env_info.number = test_env_number;
|
||||
env_info.last_hashes.push(test_hash.clone());
|
||||
}
|
||||
let state = setup.state.reference_mut();
|
||||
let ext = Externalities::new(state, &setup.env_info, &*setup.engine, 0, get_test_origin(), &mut setup.sub_state, OutputPolicy::InitContract);
|
||||
|
||||
let hash = ext.blockhash(&U256::from_str("0000000000000000000000000000000000000000000000000000000000120000").unwrap());
|
||||
|
||||
assert_eq!(test_hash, hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn can_call_fail_empty() {
|
||||
let mut setup = TestSetup::new();
|
||||
let state = setup.state.reference_mut();
|
||||
let mut ext = Externalities::new(state, &setup.env_info, &*setup.engine, 0, get_test_origin(), &mut setup.sub_state, OutputPolicy::InitContract);
|
||||
|
||||
let mut output = vec![];
|
||||
|
||||
// this should panic because we have no balance on any account
|
||||
ext.call(
|
||||
&U256::from_str("0000000000000000000000000000000000000000000000000000000000120000").unwrap(),
|
||||
&Address::new(),
|
||||
&Address::new(),
|
||||
Some(U256::from_str("0000000000000000000000000000000000000000000000000000000000150000").unwrap()),
|
||||
&[],
|
||||
&Address::new(),
|
||||
&mut output);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_log() {
|
||||
let log_data = vec![120u8, 110u8];
|
||||
let log_topics = vec![H256::from("af0fa234a6af46afa23faf23bcbc1c1cb4bcb7bcbe7e7e7ee3ee2edddddddddd")];
|
||||
|
||||
let mut setup = TestSetup::new();
|
||||
let state = setup.state.reference_mut();
|
||||
|
||||
{
|
||||
let mut ext = Externalities::new(state, &setup.env_info, &*setup.engine, 0, get_test_origin(), &mut setup.sub_state, OutputPolicy::InitContract);
|
||||
ext.log(log_topics, &log_data);
|
||||
}
|
||||
|
||||
assert_eq!(setup.sub_state.logs.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn can_suicide() {
|
||||
let refund_account = &Address::new();
|
||||
|
||||
let mut setup = TestSetup::new();
|
||||
let state = setup.state.reference_mut();
|
||||
|
||||
{
|
||||
let mut ext = Externalities::new(state, &setup.env_info, &*setup.engine, 0, get_test_origin(), &mut setup.sub_state, OutputPolicy::InitContract);
|
||||
ext.suicide(&refund_account);
|
||||
}
|
||||
|
||||
assert_eq!(setup.sub_state.suicides.len(), 1);
|
||||
}
|
||||
}
|
||||
|
@ -260,7 +260,7 @@ pub struct TransactionAddress {
|
||||
/// Block hash
|
||||
pub block_hash: H256,
|
||||
/// Transaction index within the block
|
||||
pub index: u64
|
||||
pub index: usize
|
||||
}
|
||||
|
||||
impl ExtrasIndexable for TransactionAddress {
|
||||
|
@ -26,15 +26,15 @@ use externalities::*;
|
||||
use substate::*;
|
||||
use tests::helpers::*;
|
||||
|
||||
struct TestEngine {
|
||||
struct TestEngineFrontier {
|
||||
vm_factory: Factory,
|
||||
spec: Spec,
|
||||
max_depth: usize
|
||||
}
|
||||
|
||||
impl TestEngine {
|
||||
fn new(max_depth: usize, vm_type: VMType) -> TestEngine {
|
||||
TestEngine {
|
||||
impl TestEngineFrontier {
|
||||
fn new(max_depth: usize, vm_type: VMType) -> TestEngineFrontier {
|
||||
TestEngineFrontier {
|
||||
vm_factory: Factory::new(vm_type),
|
||||
spec: ethereum::new_frontier_test(),
|
||||
max_depth: max_depth
|
||||
@ -42,7 +42,7 @@ impl TestEngine {
|
||||
}
|
||||
}
|
||||
|
||||
impl Engine for TestEngine {
|
||||
impl Engine for TestEngineFrontier {
|
||||
fn name(&self) -> &str { "TestEngine" }
|
||||
fn spec(&self) -> &Spec { &self.spec }
|
||||
fn vm_factory(&self) -> &Factory { &self.vm_factory }
|
||||
@ -209,7 +209,7 @@ fn do_json_test_for(vm: &VMType, json_data: &[u8]) -> Vec<String> {
|
||||
EnvInfo::from_json(env)
|
||||
}).unwrap_or_default();
|
||||
|
||||
let engine = TestEngine::new(1, vm.clone());
|
||||
let engine = TestEngineFrontier::new(1, vm.clone());
|
||||
|
||||
// params
|
||||
let mut params = ActionParams::default();
|
||||
|
@ -20,7 +20,6 @@ mod test_common;
|
||||
mod transaction;
|
||||
mod executive;
|
||||
mod state;
|
||||
mod client;
|
||||
mod chain;
|
||||
mod homestead_state;
|
||||
mod homestead_chain;
|
||||
|
@ -115,7 +115,7 @@ declare_test!{StateTests_stSolidityTest, "StateTests/stSolidityTest"}
|
||||
declare_test!{StateTests_stSpecialTest, "StateTests/stSpecialTest"}
|
||||
declare_test!{StateTests_stSystemOperationsTest, "StateTests/stSystemOperationsTest"}
|
||||
declare_test!{StateTests_stTransactionTest, "StateTests/stTransactionTest"}
|
||||
declare_test!{StateTests_stTransitionTest, "StateTests/stTransitionTest"}
|
||||
//declare_test!{StateTests_stTransitionTest, "StateTests/stTransitionTest"}
|
||||
declare_test!{StateTests_stWalletTest, "StateTests/stWalletTest"}
|
||||
|
||||
|
||||
|
@ -18,8 +18,15 @@
|
||||
#![feature(cell_extras)]
|
||||
#![feature(augmented_assignments)]
|
||||
#![feature(plugin)]
|
||||
// Clippy
|
||||
#![plugin(clippy)]
|
||||
#![allow(needless_range_loop, match_bool)]
|
||||
// TODO [todr] not really sure
|
||||
#![allow(needless_range_loop)]
|
||||
// Shorter than if-else
|
||||
#![allow(match_bool)]
|
||||
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
|
||||
#![allow(clone_on_copy)]
|
||||
|
||||
|
||||
//! Ethcore library
|
||||
//!
|
||||
@ -98,6 +105,7 @@ pub mod ethereum;
|
||||
pub mod header;
|
||||
pub mod service;
|
||||
pub mod spec;
|
||||
pub mod transaction;
|
||||
pub mod views;
|
||||
pub mod receipt;
|
||||
|
||||
@ -115,7 +123,6 @@ mod state;
|
||||
mod account;
|
||||
mod account_db;
|
||||
mod action_params;
|
||||
mod transaction;
|
||||
mod null_engine;
|
||||
mod builtin;
|
||||
mod extras;
|
||||
|
@ -43,8 +43,8 @@ impl PodAccount {
|
||||
/// NOTE: This will silently fail unless the account is fully cached.
|
||||
pub fn from_account(acc: &Account) -> PodAccount {
|
||||
PodAccount {
|
||||
balance: acc.balance().clone(),
|
||||
nonce: acc.nonce().clone(),
|
||||
balance: *acc.balance(),
|
||||
nonce: *acc.nonce(),
|
||||
storage: acc.storage_overlay().iter().fold(BTreeMap::new(), |mut m, (k, &(_, ref v))| {m.insert(k.clone(), v.clone()); m}),
|
||||
code: acc.code().unwrap().to_vec(),
|
||||
}
|
||||
|
@ -17,9 +17,9 @@
|
||||
//! Creates and registers client and network services.
|
||||
|
||||
use util::*;
|
||||
use util::panics::*;
|
||||
use spec::Spec;
|
||||
use error::*;
|
||||
use std::env;
|
||||
use client::Client;
|
||||
|
||||
/// Message type for external and internal events
|
||||
@ -38,17 +38,20 @@ pub type NetSyncMessage = NetworkIoMessage<SyncMessage>;
|
||||
pub struct ClientService {
|
||||
net_service: NetworkService<SyncMessage>,
|
||||
client: Arc<Client>,
|
||||
panic_handler: Arc<PanicHandler>
|
||||
}
|
||||
|
||||
impl ClientService {
|
||||
/// Start the service in a separate thread.
|
||||
pub fn start(spec: Spec, net_config: NetworkConfiguration) -> Result<ClientService, Error> {
|
||||
pub fn start(spec: Spec, net_config: NetworkConfiguration, db_path: &Path) -> Result<ClientService, Error> {
|
||||
let panic_handler = PanicHandler::new_in_arc();
|
||||
let mut net_service = try!(NetworkService::start(net_config));
|
||||
panic_handler.forward_from(&net_service);
|
||||
|
||||
info!("Starting {}", net_service.host_info());
|
||||
info!("Configured for {} using {} engine", spec.name, spec.engine_name);
|
||||
let mut dir = env::home_dir().unwrap();
|
||||
dir.push(".parity");
|
||||
let client = try!(Client::new(spec, &dir, net_service.io().channel()));
|
||||
let client = try!(Client::new(spec, db_path, net_service.io().channel()));
|
||||
panic_handler.forward_from(client.deref());
|
||||
let client_io = Arc::new(ClientIoHandler {
|
||||
client: client.clone()
|
||||
});
|
||||
@ -57,6 +60,7 @@ impl ClientService {
|
||||
Ok(ClientService {
|
||||
net_service: net_service,
|
||||
client: client,
|
||||
panic_handler: panic_handler,
|
||||
})
|
||||
}
|
||||
|
||||
@ -81,6 +85,12 @@ impl ClientService {
|
||||
}
|
||||
}
|
||||
|
||||
impl MayPanic for ClientService {
|
||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
||||
self.panic_handler.on_panic(closure);
|
||||
}
|
||||
}
|
||||
|
||||
/// IO interface for the Client handler
|
||||
struct ClientIoHandler {
|
||||
client: Arc<Client>
|
||||
@ -123,7 +133,8 @@ mod tests {
|
||||
#[test]
|
||||
fn it_can_be_started() {
|
||||
let spec = get_test_spec();
|
||||
let service = ClientService::start(spec, NetworkConfiguration::new());
|
||||
let temp_path = RandomTempPath::new();
|
||||
let service = ClientService::start(spec, NetworkConfiguration::new(), &temp_path.as_path());
|
||||
assert!(service.is_ok());
|
||||
}
|
||||
}
|
||||
|
@ -153,12 +153,12 @@ impl State {
|
||||
|
||||
/// Get the balance of account `a`.
|
||||
pub fn balance(&self, a: &Address) -> U256 {
|
||||
self.get(a, false).as_ref().map_or(U256::zero(), |account| account.balance().clone())
|
||||
self.get(a, false).as_ref().map_or(U256::zero(), |account| *account.balance())
|
||||
}
|
||||
|
||||
/// Get the nonce of account `a`.
|
||||
pub fn nonce(&self, a: &Address) -> U256 {
|
||||
self.get(a, false).as_ref().map_or(U256::zero(), |account| account.nonce().clone())
|
||||
self.get(a, false).as_ref().map_or(U256::zero(), |account| *account.nonce())
|
||||
}
|
||||
|
||||
/// Mutate storage of account `address` so that it is `value` for `key`.
|
||||
|
@ -56,6 +56,12 @@ mod tests {
|
||||
use super::*;
|
||||
use common::*;
|
||||
|
||||
#[test]
|
||||
fn created() {
|
||||
let sub_state = Substate::new();
|
||||
assert_eq!(sub_state.suicides.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn accrue() {
|
||||
let mut sub_state = Substate::new();
|
||||
|
@ -14,9 +14,9 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use client::{BlockChainClient,Client};
|
||||
use super::test_common::*;
|
||||
use client::{BlockChainClient, Client, BlockId};
|
||||
use tests::helpers::*;
|
||||
use common::*;
|
||||
|
||||
#[test]
|
||||
fn created() {
|
||||
@ -44,7 +44,7 @@ fn imports_good_block() {
|
||||
client.flush_queue();
|
||||
client.import_verified_blocks(&IoChannel::disconnected());
|
||||
|
||||
let block = client.block_header_at(1).unwrap();
|
||||
let block = client.block_header(BlockId::Number(1)).unwrap();
|
||||
assert!(!block.is_empty());
|
||||
}
|
||||
|
||||
@ -53,7 +53,7 @@ fn query_none_block() {
|
||||
let dir = RandomTempPath::new();
|
||||
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||
|
||||
let non_existant = client.block_header_at(188);
|
||||
let non_existant = client.block_header(BlockId::Number(188));
|
||||
assert!(non_existant.is_none());
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ fn query_none_block() {
|
||||
fn query_bad_block() {
|
||||
let client_result = get_test_client_with_blocks(vec![get_bad_state_dummy_block()]);
|
||||
let client = client_result.reference();
|
||||
let bad_block:Option<Bytes> = client.block_header_at(1);
|
||||
let bad_block:Option<Bytes> = client.block_header(BlockId::Number(1));
|
||||
|
||||
assert!(bad_block.is_none());
|
||||
}
|
||||
@ -76,11 +76,24 @@ fn returns_chain_info() {
|
||||
assert_eq!(info.best_block_hash, block.header().hash());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_block_body() {
|
||||
let dummy_block = get_good_dummy_block();
|
||||
let client_result = get_test_client_with_blocks(vec![dummy_block.clone()]);
|
||||
let client = client_result.reference();
|
||||
let block = BlockView::new(&dummy_block);
|
||||
let body = client.block_body(BlockId::Hash(block.header().hash())).unwrap();
|
||||
let body = Rlp::new(&body);
|
||||
assert_eq!(body.item_count(), 2);
|
||||
assert_eq!(body.at(0).as_raw()[..], block.rlp().at(1).as_raw()[..]);
|
||||
assert_eq!(body.at(1).as_raw()[..], block.rlp().at(2).as_raw()[..]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn imports_block_sequence() {
|
||||
let client_result = generate_dummy_client(6);
|
||||
let client = client_result.reference();
|
||||
let block = client.block_header_at(5).unwrap();
|
||||
let block = client.block_header(BlockId::Number(5)).unwrap();
|
||||
|
||||
assert!(!block.is_empty());
|
||||
}
|
@ -14,7 +14,6 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
#[cfg(feature = "json-tests")]
|
||||
use client::{BlockChainClient, Client};
|
||||
use std::env;
|
||||
use common::*;
|
||||
@ -24,7 +23,9 @@ use std::fs::{remove_dir_all};
|
||||
use blockchain::{BlockChain};
|
||||
use state::*;
|
||||
use rocksdb::*;
|
||||
|
||||
use evm::{Schedule, Factory};
|
||||
use engine::*;
|
||||
use ethereum;
|
||||
|
||||
#[cfg(feature = "json-tests")]
|
||||
pub enum ChainEra {
|
||||
@ -82,6 +83,35 @@ impl<T> GuardedTempResult<T> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TestEngine {
|
||||
factory: Factory,
|
||||
spec: Spec,
|
||||
max_depth: usize
|
||||
}
|
||||
|
||||
impl TestEngine {
|
||||
pub fn new(max_depth: usize, factory: Factory) -> TestEngine {
|
||||
TestEngine {
|
||||
factory: factory,
|
||||
spec: ethereum::new_frontier_test(),
|
||||
max_depth: max_depth
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Engine for TestEngine {
|
||||
fn name(&self) -> &str { "TestEngine" }
|
||||
fn spec(&self) -> &Spec { &self.spec }
|
||||
fn vm_factory(&self) -> &Factory {
|
||||
&self.factory
|
||||
}
|
||||
fn schedule(&self, _env_info: &EnvInfo) -> Schedule {
|
||||
let mut schedule = Schedule::new_frontier();
|
||||
schedule.max_depth = self.max_depth;
|
||||
schedule
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_test_spec() -> Spec {
|
||||
Spec::new_test()
|
||||
}
|
||||
@ -134,7 +164,6 @@ pub fn create_test_block_with_data(header: &Header, transactions: &[&SignedTrans
|
||||
rlp.out()
|
||||
}
|
||||
|
||||
#[cfg(feature = "json-tests")]
|
||||
pub fn generate_dummy_client(block_number: u32) -> GuardedTempResult<Arc<Client>> {
|
||||
let dir = RandomTempPath::new();
|
||||
|
||||
@ -174,7 +203,6 @@ pub fn generate_dummy_client(block_number: u32) -> GuardedTempResult<Arc<Client>
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "json-tests")]
|
||||
pub fn get_test_client_with_blocks(blocks: Vec<Bytes>) -> GuardedTempResult<Arc<Client>> {
|
||||
let dir = RandomTempPath::new();
|
||||
let client = Client::new(get_test_spec(), dir.as_path(), IoChannel::disconnected()).unwrap();
|
||||
@ -271,7 +299,6 @@ pub fn get_good_dummy_block() -> Bytes {
|
||||
create_test_block(&block_header)
|
||||
}
|
||||
|
||||
#[cfg(feature = "json-tests")]
|
||||
pub fn get_bad_state_dummy_block() -> Bytes {
|
||||
let mut block_header = Header::new();
|
||||
let test_spec = get_test_spec();
|
||||
|
@ -15,3 +15,4 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
pub mod helpers;
|
||||
mod client;
|
@ -19,8 +19,9 @@
|
||||
use util::*;
|
||||
use error::*;
|
||||
use evm::Schedule;
|
||||
use header::BlockNumber;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
/// Transaction action type.
|
||||
pub enum Action {
|
||||
/// Create creates new contract.
|
||||
@ -45,7 +46,7 @@ impl Decodable for Action {
|
||||
|
||||
/// A set of information describing an externally-originating message call
|
||||
/// or contract creation operation.
|
||||
#[derive(Default, Debug, Clone)]
|
||||
#[derive(Default, Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Transaction {
|
||||
/// Nonce.
|
||||
pub nonce: U256,
|
||||
@ -156,9 +157,8 @@ impl Transaction {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
/// Signed transaction information.
|
||||
#[derive(Debug, Clone, Eq)]
|
||||
pub struct SignedTransaction {
|
||||
/// Plain Transaction.
|
||||
unsigned: Transaction,
|
||||
@ -174,6 +174,12 @@ pub struct SignedTransaction {
|
||||
sender: RefCell<Option<Address>>
|
||||
}
|
||||
|
||||
impl PartialEq for SignedTransaction {
|
||||
fn eq(&self, other: &SignedTransaction) -> bool {
|
||||
self.unsigned == other.unsigned && self.v == other.v && self.r == other.r && self.s == other.s
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for SignedTransaction {
|
||||
type Target = Transaction;
|
||||
|
||||
@ -284,6 +290,27 @@ impl SignedTransaction {
|
||||
}
|
||||
}
|
||||
|
||||
/// Signed Transaction that is a part of canon blockchain.
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct LocalizedTransaction {
|
||||
/// Signed part.
|
||||
pub signed: SignedTransaction,
|
||||
/// Block number.
|
||||
pub block_number: BlockNumber,
|
||||
/// Block hash.
|
||||
pub block_hash: H256,
|
||||
/// Transaction index within block.
|
||||
pub transaction_index: usize
|
||||
}
|
||||
|
||||
impl Deref for LocalizedTransaction {
|
||||
type Target = SignedTransaction;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.signed
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sender_test() {
|
||||
let t: SignedTransaction = decode(&FromHex::from_hex("f85f800182520894095e7baea6a6c7c4c2dfeb977efac326af552d870a801ba048b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353a0efffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804").unwrap());
|
||||
|
@ -294,6 +294,10 @@ mod tests {
|
||||
})
|
||||
}
|
||||
|
||||
fn transaction_address(&self, _hash: &H256) -> Option<TransactionAddress> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
/// Get the hash of given block's number.
|
||||
fn block_hash(&self, index: BlockNumber) -> Option<H256> {
|
||||
self.numbers.get(&index).cloned()
|
||||
|
@ -155,6 +155,27 @@ impl<'a> BlockView<'a> {
|
||||
self.rlp.val_at(1)
|
||||
}
|
||||
|
||||
/// Return List of transactions with additional localization info.
|
||||
pub fn localized_transactions(&self) -> Vec<LocalizedTransaction> {
|
||||
let header = self.header_view();
|
||||
let block_hash = header.sha3();
|
||||
let block_number = header.number();
|
||||
self.transactions()
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, t)| LocalizedTransaction {
|
||||
signed: t,
|
||||
block_hash: block_hash.clone(),
|
||||
block_number: block_number,
|
||||
transaction_index: i
|
||||
}).collect()
|
||||
}
|
||||
|
||||
/// Return number of transactions in given block, without deserializing them.
|
||||
pub fn transactions_count(&self) -> usize {
|
||||
self.rlp.at(1).iter().count()
|
||||
}
|
||||
|
||||
/// Return List of transactions in given block.
|
||||
pub fn transaction_views(&self) -> Vec<TransactionView> {
|
||||
self.rlp.at(1).iter().map(TransactionView::new_from_rlp).collect()
|
||||
@ -165,11 +186,34 @@ impl<'a> BlockView<'a> {
|
||||
self.rlp.at(1).iter().map(|rlp| rlp.as_raw().sha3()).collect()
|
||||
}
|
||||
|
||||
/// Returns transaction at given index without deserializing unnecessary data.
|
||||
pub fn transaction_at(&self, index: usize) -> Option<SignedTransaction> {
|
||||
self.rlp.at(1).iter().nth(index).map(|rlp| rlp.as_val())
|
||||
}
|
||||
|
||||
/// Returns localized transaction at given index.
|
||||
pub fn localized_transaction_at(&self, index: usize) -> Option<LocalizedTransaction> {
|
||||
let header = self.header_view();
|
||||
let block_hash = header.sha3();
|
||||
let block_number = header.number();
|
||||
self.transaction_at(index).map(|t| LocalizedTransaction {
|
||||
signed: t,
|
||||
block_hash: block_hash,
|
||||
block_number: block_number,
|
||||
transaction_index: index
|
||||
})
|
||||
}
|
||||
|
||||
/// Return list of uncles of given block.
|
||||
pub fn uncles(&self) -> Vec<Header> {
|
||||
self.rlp.val_at(2)
|
||||
}
|
||||
|
||||
/// Return number of uncles in given block, without deserializing them.
|
||||
pub fn uncles_count(&self) -> usize {
|
||||
self.rlp.at(2).iter().count()
|
||||
}
|
||||
|
||||
/// Return List of transactions in given block.
|
||||
pub fn uncle_views(&self) -> Vec<HeaderView> {
|
||||
self.rlp.at(2).iter().map(HeaderView::new_from_rlp).collect()
|
||||
|
3
evmjit/.gitignore
vendored
3
evmjit/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
target
|
||||
Cargo.lock
|
||||
*.swp
|
@ -570,7 +570,7 @@ function run_installer()
|
||||
sudo apt-add-repository -y ppa:ethcore/ethcore
|
||||
sudo apt-get -f -y install
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq -y librocksdb-dev
|
||||
sudo apt-get install -qq -y librocksdb-dev librocksdb
|
||||
}
|
||||
|
||||
function linux_rocksdb_installer()
|
||||
@ -688,7 +688,7 @@ function run_installer()
|
||||
info "- Run tests with:"
|
||||
info " ${b}cargo test --release --features ethcore/json-tests -p ethcore${reset}"
|
||||
info "- Install the client with:"
|
||||
info " ${b}sudo cp parity/target/release/parity${reset}"
|
||||
info " ${b}sudo cp parity/target/release/parity${reset} /usr/local/bin"
|
||||
echo
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
PARITY_DEB_URL=https://github.com/ethcore/parity/releases/download/beta-0.9/parity_0.9.0-0_amd64.deb
|
||||
PARITY_DEB_URL=https://github.com/ethcore/parity/releases/download/beta-0.9/parity_linux_0.9.0-0_amd64.deb
|
||||
|
||||
|
||||
function run_installer()
|
||||
@ -47,6 +47,7 @@ function run_installer()
|
||||
dim=`tput dim`
|
||||
reverse=`tput rev`
|
||||
reset=`tput sgr0`
|
||||
n=$'\n'
|
||||
|
||||
|
||||
function head() {
|
||||
@ -94,13 +95,19 @@ function run_installer()
|
||||
####### Setup methods
|
||||
|
||||
function wait_for_user() {
|
||||
if [[ $( ask_user "$1" ) == false ]]; then
|
||||
abort_install "${red}==>${reset} Process stopped by user. To resume the install run the one-liner command again."
|
||||
fi
|
||||
}
|
||||
|
||||
function ask_user() {
|
||||
while :
|
||||
do
|
||||
read -p "${blue}==>${reset} $1 [Y/n] " imp
|
||||
case $imp in
|
||||
[yY] ) return 0; break ;;
|
||||
'' ) echo; break ;;
|
||||
[nN] ) return 1 ;;
|
||||
[yY] ) echo true; break ;;
|
||||
'' ) echo true; break ;;
|
||||
[nN] ) echo false; break ;;
|
||||
* ) echo "Unrecognized option provided. Please provide either 'Y' or 'N'";
|
||||
esac
|
||||
done
|
||||
@ -119,6 +126,14 @@ function run_installer()
|
||||
echo "\$ $@"; "$@"
|
||||
}
|
||||
|
||||
function sudo() {
|
||||
if $isSudo; then
|
||||
`which sudo` "$@"
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
}
|
||||
|
||||
function detectOS() {
|
||||
if [[ "$OSTYPE" == "linux-gnu" ]]
|
||||
then
|
||||
@ -130,7 +145,7 @@ function run_installer()
|
||||
get_osx_dependencies
|
||||
else
|
||||
OS_TYPE="win"
|
||||
abortInstall "${red}==>${reset} ${b}OS not supported:${reset} parity one-liner currently support OS X and Linux.\nFor instructions on installing parity on other platforms please visit ${u}${blue}http://ethcore.io/${reset}"
|
||||
abortInstall "${red}==>${reset} ${b}OS not supported:${reset} parity one-liner currently support OS X and Linux.${n}For instructions on installing parity on other platforms please visit ${u}${blue}http://ethcore.io/${reset}"
|
||||
fi
|
||||
|
||||
echo
|
||||
@ -184,8 +199,8 @@ function run_installer()
|
||||
fi
|
||||
fi
|
||||
|
||||
errorMessages+="${red}==>${reset} ${b}Mac OS version too old:${reset} eth requires OS X version ${red}$OSX_REQUIERED_VERSION${reset} at least in order to run.\n"
|
||||
errorMessages+=" Please update the OS and reload the install process.\n"
|
||||
errorMessages+="${red}==>${reset} ${b}Mac OS version too old:${reset} eth requires OS X version ${red}$OSX_REQUIERED_VERSION${reset} at least in order to run.${n}"
|
||||
errorMessages+=" Please update the OS and reload the install process.${n}"
|
||||
}
|
||||
|
||||
function get_osx_dependencies()
|
||||
@ -201,16 +216,19 @@ function run_installer()
|
||||
source /etc/lsb-release
|
||||
|
||||
if [[ $DISTRIB_ID == "Ubuntu" ]]; then
|
||||
if [[ $DISTRIB_RELEASE == "14.04" ]]; then
|
||||
check "Ubuntu-14.04"
|
||||
isUbuntu1404=true
|
||||
if [[ $DISTRIB_RELEASE == "14.04" || $DISTRIB_RELEASE == "15.04" || $DISTRIB_RELEASE == "15.10" ]]; then
|
||||
check "Ubuntu"
|
||||
isUbuntu=true
|
||||
else
|
||||
check "Ubuntu, but not 14.04"
|
||||
isUbuntu1404=false
|
||||
check "Ubuntu, but version not supported"
|
||||
|
||||
errorMessages+="${red}==>${reset} ${b}Ubuntu version not supported:${reset} This script requires Ubuntu version 14.04, 15.04 or 15.10.${n}"
|
||||
errorMessages+=" Please either upgrade your Ubuntu installation or using the get-deps.ethcore.io script instead, which can help you build Parity.${n}"
|
||||
fi
|
||||
else
|
||||
check "Ubuntu not found"
|
||||
isUbuntu1404=false
|
||||
errorMessages+="${red}==>${reset} ${b}Linux distribution not supported:${reset} This script requires Ubuntu version 14.04, 15.04 or 15.10.${n}"
|
||||
errorMessages+=" Please either use this on an Ubuntu installation or instead use the get-deps.ethcore.io script, which can help you build Parity.${n}"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -218,15 +236,12 @@ function run_installer()
|
||||
{
|
||||
linux_version
|
||||
|
||||
find_multirust
|
||||
find_rocksdb
|
||||
|
||||
find_curl
|
||||
find_git
|
||||
find_make
|
||||
find_gcc
|
||||
|
||||
find_apt
|
||||
find_sudo
|
||||
}
|
||||
|
||||
function find_brew()
|
||||
@ -242,10 +257,10 @@ function run_installer()
|
||||
uncheck "Homebrew is missing"
|
||||
isBrew=false
|
||||
|
||||
INSTALL_FILES+="${blue}${dim}==> Homebrew:${reset}\n"
|
||||
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/bin/brew\n"
|
||||
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/Library\n"
|
||||
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/share/man/man1/brew.1\n"
|
||||
INSTALL_FILES+="${blue}${dim}==> Homebrew:${reset}${n}"
|
||||
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/bin/brew${n}"
|
||||
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/Library${n}"
|
||||
INSTALL_FILES+=" ${blue}${dim}➜${reset} $HOMEBREW_PREFIX/share/man/man1/brew.1${n}"
|
||||
fi
|
||||
|
||||
depCount=$((depCount+1))
|
||||
@ -267,8 +282,54 @@ function run_installer()
|
||||
uncheck "Ruby is missing 🔥"
|
||||
isRuby=false
|
||||
canContinue=false
|
||||
errorMessages+="${red}==>${reset} ${b}Couldn't find Ruby:${reset} Brew requires Ruby which could not be found.\n"
|
||||
errorMessages+=" Please install Ruby using these instructions ${u}${blue}https://www.ruby-lang.org/en/documentation/installation/${reset}.\n"
|
||||
errorMessages+="${red}==>${reset} ${b}Couldn't find Ruby:${reset} Brew requires Ruby which could not be found.${n}"
|
||||
errorMessages+=" Please install Ruby using these instructions ${u}${blue}https://www.ruby-lang.org/en/documentation/installation/${reset}.${n}"
|
||||
fi
|
||||
}
|
||||
|
||||
function find_sudo()
|
||||
{
|
||||
depCount=$((depCount+1))
|
||||
SUDO_PATH=`which sudo 2>/dev/null`
|
||||
|
||||
if [[ -f $SUDO_PATH ]]
|
||||
then
|
||||
depFound=$((depFound+1))
|
||||
check "sudo"
|
||||
isSudo=true
|
||||
else
|
||||
uncheck "sudo is missing"
|
||||
if [[ `whoami` == "root" ]]; then
|
||||
if [[ $isApt == false && $isMultirust == false ]]; then
|
||||
canContinue=false
|
||||
errorMessages+="${red}==>${reset} ${b}Couldn't find sudo:${reset} Sudo is needed for the installation of multirust.${n}"
|
||||
errorMessages+=" Please ensure you have sudo installed or alternatively install multirust manually.${n}"
|
||||
fi
|
||||
|
||||
isSudo=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tsudo${n}"
|
||||
else
|
||||
canContinue=false
|
||||
errorMessages+="${red}==>${reset} ${b}Couldn't find sudo:${reset} Root access is needed for parts of this installation.${n}"
|
||||
errorMessages+=" Please ensure you have sudo installed or alternatively run this script as root.${n}"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
function find_curl()
|
||||
{
|
||||
depCount=$((depCount+1))
|
||||
CURL_PATH=`which curl 2>/dev/null`
|
||||
|
||||
if [[ -f $CURL_PATH ]]
|
||||
then
|
||||
depFound=$((depFound+1))
|
||||
check "curl"
|
||||
isCurl=true
|
||||
else
|
||||
uncheck "curl is missing"
|
||||
isCurl=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tcurl${n}"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -282,33 +343,7 @@ function run_installer()
|
||||
else
|
||||
uncheck "librocksdb is missing"
|
||||
isRocksDB=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tlibrocksdb\n"
|
||||
fi
|
||||
}
|
||||
|
||||
function find_multirust()
|
||||
{
|
||||
depCount=$((depCount+2))
|
||||
MULTIRUST_PATH=`which multirust 2>/dev/null`
|
||||
if [[ -f $MULTIRUST_PATH ]]; then
|
||||
depFound=$((depFound+1))
|
||||
check "multirust"
|
||||
isMultirust=true
|
||||
if [[ $(multirust show-default 2>/dev/null | grep nightly | wc -l) == 4 ]]; then
|
||||
depFound=$((depFound+1))
|
||||
check "rust nightly"
|
||||
isMultirustNightly=true
|
||||
else
|
||||
uncheck "rust is not nightly"
|
||||
isMultirustNightly=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tmultirust -> rust nightly\n"
|
||||
fi
|
||||
else
|
||||
uncheck "multirust is missing"
|
||||
uncheck "rust nightly is missing"
|
||||
isMultirust=false
|
||||
isMultirustNightly=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tmultirust\n"
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tlibrocksdb${n}"
|
||||
fi
|
||||
}
|
||||
|
||||
@ -327,112 +362,12 @@ function run_installer()
|
||||
uncheck "apt-get is missing"
|
||||
isApt=false
|
||||
|
||||
if [[ $isGCC == false || $isGit == false || $isMake == false || $isCurl == false ]]; then
|
||||
canContinue=false
|
||||
errorMessages+="${red}==>${reset} ${b}Couldn't find apt-get:${reset} We can only use apt-get in order to grab our dependencies.\n"
|
||||
errorMessages+=" Please switch to a distribution such as Debian or Ubuntu or manually install the missing packages.\n"
|
||||
fi
|
||||
canContinue=false
|
||||
errorMessages+="${red}==>${reset} ${b}Couldn't find apt-get:${reset} We can only use apt-get in order to grab our dependencies.${n}"
|
||||
errorMessages+=" Please switch to a distribution such as Debian or Ubuntu or manually install the missing packages.${n}"
|
||||
fi
|
||||
}
|
||||
|
||||
function find_gcc()
|
||||
{
|
||||
depCount=$((depCount+1))
|
||||
GCC_PATH=`which g++ 2>/dev/null`
|
||||
|
||||
if [[ -f $GCC_PATH ]]
|
||||
then
|
||||
depFound=$((depFound+1))
|
||||
check "g++"
|
||||
isGCC=true
|
||||
else
|
||||
uncheck "g++ is missing"
|
||||
isGCC=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tg++\n"
|
||||
fi
|
||||
}
|
||||
|
||||
function find_git()
|
||||
{
|
||||
depCount=$((depCount+1))
|
||||
GIT_PATH=`which git 2>/dev/null`
|
||||
|
||||
if [[ -f $GIT_PATH ]]
|
||||
then
|
||||
depFound=$((depFound+1))
|
||||
check "git"
|
||||
isGit=true
|
||||
else
|
||||
uncheck "git is missing"
|
||||
isGit=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tgit\n"
|
||||
fi
|
||||
}
|
||||
|
||||
function find_make()
|
||||
{
|
||||
depCount=$((depCount+1))
|
||||
MAKE_PATH=`which make 2>/dev/null`
|
||||
|
||||
if [[ -f $MAKE_PATH ]]
|
||||
then
|
||||
depFound=$((depFound+1))
|
||||
check "make"
|
||||
isMake=true
|
||||
else
|
||||
uncheck "make is missing"
|
||||
isMake=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tmake\n"
|
||||
fi
|
||||
}
|
||||
|
||||
function find_curl()
|
||||
{
|
||||
depCount=$((depCount+1))
|
||||
CURL_PATH=`which curl 2>/dev/null`
|
||||
|
||||
if [[ -f $CURL_PATH ]]
|
||||
then
|
||||
depFound=$((depFound+1))
|
||||
check "curl"
|
||||
isCurl=true
|
||||
else
|
||||
uncheck "curl is missing"
|
||||
isCurl=false
|
||||
INSTALL_FILES+="${blue}${dim}==>${reset}\tcurl\n"
|
||||
fi
|
||||
}
|
||||
|
||||
function ubuntu1404_rocksdb_installer()
|
||||
{
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq -y software-properties-common
|
||||
sudo apt-add-repository -y ppa:giskou/librocksdb
|
||||
sudo apt-get -f -y install
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq -y librocksdb
|
||||
}
|
||||
|
||||
function linux_rocksdb_installer()
|
||||
{
|
||||
if [[ $isUbuntu1404 == true ]]; then
|
||||
ubuntu1404_rocksdb_installer
|
||||
else
|
||||
oldpwd=`pwd`
|
||||
cd /tmp
|
||||
exe git clone --branch v4.2 --depth=1 https://github.com/facebook/rocksdb.git
|
||||
cd rocksdb
|
||||
exe make shared_lib
|
||||
sudo cp -a librocksdb.so* /usr/lib
|
||||
sudo ldconfig
|
||||
cd /tmp
|
||||
rm -rf /tmp/rocksdb
|
||||
cd $oldpwd
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
|
||||
function verify_installation()
|
||||
{
|
||||
ETH_PATH=`which parity 2>/dev/null`
|
||||
@ -451,14 +386,10 @@ function run_installer()
|
||||
info "Verifying installation"
|
||||
|
||||
if [[ $OS_TYPE == "linux" ]]; then
|
||||
find_curl
|
||||
find_git
|
||||
find_make
|
||||
find_gcc
|
||||
find_rocksdb
|
||||
find_multirust
|
||||
find_apt
|
||||
|
||||
if [[ $isCurl == false || $isGit == false || $isMake == false || $isGCC == false || $isRocksDB == false || $isMultirustNightly == false ]]; then
|
||||
if [[ $isRocksDB == false || $isApt == false ]]; then
|
||||
abortInstall
|
||||
fi
|
||||
fi
|
||||
@ -466,43 +397,29 @@ function run_installer()
|
||||
|
||||
function linux_deps_installer()
|
||||
{
|
||||
if [[ $isGCC == false || $isGit == false || $isMake == false || $isCurl == false ]]; then
|
||||
info "Installing build dependencies..."
|
||||
if [[ $isRocksDB == false || $isCurl == false ]]; then
|
||||
info "Preparing apt..."
|
||||
sudo apt-get update -qq
|
||||
if [[ $isGit == false ]]; then
|
||||
sudo apt-get install -q -y git
|
||||
fi
|
||||
if [[ $isGCC == false ]]; then
|
||||
sudo apt-get install -q -y g++ gcc
|
||||
fi
|
||||
if [[ $isMake == false ]]; then
|
||||
sudo apt-get install -q -y make
|
||||
fi
|
||||
if [[ $isCurl == false ]]; then
|
||||
sudo apt-get install -q -y curl
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
|
||||
if [[ $isRocksDB == false ]]; then
|
||||
info "Installing rocksdb..."
|
||||
linux_rocksdb_installer
|
||||
|
||||
sudo apt-get install -qq -y software-properties-common
|
||||
sudo apt-add-repository -y ppa:ethcore/ethcore
|
||||
sudo apt-get -f -y install
|
||||
sudo apt-get update -qq
|
||||
sudo apt-get install -qq -y librocksdb
|
||||
|
||||
echo
|
||||
fi
|
||||
|
||||
if [[ $isMultirust == false ]]; then
|
||||
info "Installing multirust..."
|
||||
curl -sf https://raw.githubusercontent.com/brson/multirust/master/blastoff.sh | sudo sh -s -- --yes
|
||||
if [[ $isCurl == false ]]; then
|
||||
info "Installing curl..."
|
||||
sudo apt-get install -q -y curl
|
||||
echo
|
||||
fi
|
||||
|
||||
if [[ $isMultirustNightly == false ]]; then
|
||||
info "Installing rust nightly..."
|
||||
sudo multirust update nightly
|
||||
sudo multirust default nightly
|
||||
echo
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
function linux_installer()
|
||||
@ -513,7 +430,7 @@ function run_installer()
|
||||
info "Installing parity"
|
||||
file=/tmp/parity.deb
|
||||
|
||||
wget $PARITY_DEB_URL -qO $file
|
||||
curl -L $PARITY_DEB_URL > $file
|
||||
sudo dpkg -i $file
|
||||
rm $file
|
||||
}
|
||||
@ -644,8 +561,8 @@ EOL
|
||||
{
|
||||
echo
|
||||
successHeading "All done"
|
||||
# head "Next steps"
|
||||
# info "Run ${cyan}\`\`${reset} to get started.${reset}"
|
||||
head "Next steps"
|
||||
info "Run ${cyan}\`parity -j\`${reset} to start the Parity Ethereum client.${reset}"
|
||||
echo
|
||||
exit 0
|
||||
}
|
||||
@ -662,10 +579,8 @@ EOL
|
||||
|
||||
#DEBUG
|
||||
|
||||
|
||||
head "${b}OK,${reset} let's install Parity now!"
|
||||
if wait_for_user "${b}Last chance!${reset} Sure you want to install this software?"
|
||||
then
|
||||
if [[ $(ask_user "${b}Last chance!${reset} Sure you want to install this software?") == true ]]; then
|
||||
install
|
||||
echo
|
||||
echo
|
||||
@ -673,19 +588,12 @@ EOL
|
||||
finish
|
||||
fi
|
||||
|
||||
|
||||
|
||||
if [[ $OS_TYPE == "linux" ]]
|
||||
then
|
||||
echo "Netstats:"
|
||||
head "Would you like to install and configure a netstats client?"
|
||||
if wait_for_user "${b}OK,${reset} let's go!"
|
||||
then
|
||||
if [[ $OS_TYPE == "linux" && $DISTRIB_ID == "Ubuntu" ]]; then
|
||||
if [[ $(ask_user "${b}Netstats${reset} Would you like to download, install and configure a Netstats client?${n}${b}${red}WARNING: ${reset}${red}This will need a secret and reconfigure any existing node/NPM installation you have.${reset} ") == true ]]; then
|
||||
install_netstats
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
# Display goodbye message
|
||||
finish
|
||||
}
|
||||
|
169
parity/main.rs
169
parity/main.rs
@ -29,38 +29,56 @@ extern crate log as rlog;
|
||||
extern crate env_logger;
|
||||
extern crate ctrlc;
|
||||
extern crate fdlimit;
|
||||
extern crate target_info;
|
||||
|
||||
#[cfg(feature = "rpc")]
|
||||
extern crate ethcore_rpc as rpc;
|
||||
|
||||
use std::net::{SocketAddr};
|
||||
use std::env;
|
||||
use rlog::{LogLevelFilter};
|
||||
use env_logger::LogBuilder;
|
||||
use ctrlc::CtrlC;
|
||||
use util::*;
|
||||
use util::panics::MayPanic;
|
||||
use ethcore::spec::*;
|
||||
use ethcore::client::*;
|
||||
use ethcore::service::{ClientService, NetSyncMessage};
|
||||
use ethcore::ethereum;
|
||||
use ethcore::blockchain::CacheSize;
|
||||
use ethsync::EthSync;
|
||||
use target_info::Target;
|
||||
|
||||
docopt!(Args derive Debug, "
|
||||
Parity. Ethereum Client.
|
||||
By Wood/Paronyan/Kotewicz/Drwięga/Volf.
|
||||
Copyright 2015, 2016 Ethcore (UK) Limited
|
||||
|
||||
Usage:
|
||||
parity [options]
|
||||
parity [options] <enode>...
|
||||
parity [options] [ --no-bootstrap | <enode>... ]
|
||||
|
||||
Options:
|
||||
-l --logging LOGGING Specify the logging level.
|
||||
-j --jsonrpc Enable the JSON-RPC API sever.
|
||||
--jsonrpc-url URL Specify URL for JSON-RPC API server [default: 127.0.0.1:8545].
|
||||
--chain CHAIN Specify the blockchain type. CHAIN may be either a JSON chain specification file
|
||||
or frontier, mainnet, morden, or testnet [default: frontier].
|
||||
-d --db-path PATH Specify the database & configuration directory path [default: $HOME/.parity]
|
||||
|
||||
--no-bootstrap Don't bother trying to connect to any nodes initially.
|
||||
--listen-address URL Specify the IP/port on which to listen for peers [default: 0.0.0.0:30304].
|
||||
--public-address URL Specify the IP/port on which peers may connect [default: 0.0.0.0:30304].
|
||||
--address URL Equivalent to --listen-address URL --public-address URL.
|
||||
--upnp Use UPnP to try to figure out the correct network settings.
|
||||
--node-key KEY Specify node secret key as hex string.
|
||||
|
||||
--cache-pref-size BYTES Specify the prefered size of the blockchain cache in bytes [default: 16384].
|
||||
--cache-max-size BYTES Specify the maximum size of the blockchain cache in bytes [default: 262144].
|
||||
|
||||
-j --jsonrpc Enable the JSON-RPC API sever.
|
||||
--jsonrpc-url URL Specify URL for JSON-RPC API server [default: 127.0.0.1:8545].
|
||||
|
||||
-l --logging LOGGING Specify the logging level.
|
||||
-v --version Show information about version.
|
||||
-h --help Show this screen.
|
||||
", flag_cache_pref_size: usize, flag_cache_max_size: usize);
|
||||
", flag_cache_pref_size: usize, flag_cache_max_size: usize, flag_address: Option<String>, flag_node_key: Option<String>);
|
||||
|
||||
fn setup_log(init: &str) {
|
||||
let mut builder = LogBuilder::new();
|
||||
@ -75,14 +93,13 @@ fn setup_log(init: &str) {
|
||||
builder.init().unwrap();
|
||||
}
|
||||
|
||||
|
||||
#[cfg(feature = "rpc")]
|
||||
fn setup_rpc_server(client: Arc<Client>, sync: Arc<EthSync>, url: &str) {
|
||||
use rpc::v1::*;
|
||||
|
||||
let mut server = rpc::HttpServer::new(1);
|
||||
server.add_delegate(Web3Client::new().to_delegate());
|
||||
server.add_delegate(EthClient::new(client.clone()).to_delegate());
|
||||
server.add_delegate(EthClient::new(client.clone(), sync.clone()).to_delegate());
|
||||
server.add_delegate(EthFilterClient::new(client).to_delegate());
|
||||
server.add_delegate(NetClient::new(sync).to_delegate());
|
||||
server.start_async(url);
|
||||
@ -92,36 +109,130 @@ fn setup_rpc_server(client: Arc<Client>, sync: Arc<EthSync>, url: &str) {
|
||||
fn setup_rpc_server(_client: Arc<Client>, _sync: Arc<EthSync>, _url: &str) {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let args: Args = Args::docopt().decode().unwrap_or_else(|e| e.exit());
|
||||
fn print_version() {
|
||||
println!("\
|
||||
Parity version {} ({}-{}-{})
|
||||
Copyright 2015, 2016 Ethcore (UK) Limited
|
||||
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>.
|
||||
This is free software: you are free to change and redistribute it.
|
||||
There is NO WARRANTY, to the extent permitted by law.
|
||||
|
||||
setup_log(&args.flag_logging);
|
||||
unsafe { ::fdlimit::raise_fd_limit(); }
|
||||
By Wood/Paronyan/Kotewicz/Drwięga/Volf.\
|
||||
", env!("CARGO_PKG_VERSION"), Target::arch(), Target::env(), Target::os());
|
||||
}
|
||||
|
||||
let spec = ethereum::new_frontier();
|
||||
let init_nodes = match args.arg_enode.len() {
|
||||
0 => spec.nodes().clone(),
|
||||
_ => args.arg_enode.clone(),
|
||||
};
|
||||
let mut net_settings = NetworkConfiguration::new();
|
||||
net_settings.boot_nodes = init_nodes;
|
||||
let mut service = ClientService::start(spec, net_settings).unwrap();
|
||||
let client = service.client().clone();
|
||||
client.configure_cache(args.flag_cache_pref_size, args.flag_cache_max_size);
|
||||
let sync = EthSync::register(service.network(), client);
|
||||
if args.flag_jsonrpc {
|
||||
setup_rpc_server(service.client(), sync.clone(), &args.flag_jsonrpc_url);
|
||||
struct Configuration {
|
||||
args: Args
|
||||
}
|
||||
|
||||
impl Configuration {
|
||||
fn parse() -> Self {
|
||||
Configuration {
|
||||
args: Args::docopt().decode().unwrap_or_else(|e| e.exit())
|
||||
}
|
||||
}
|
||||
let io_handler = Arc::new(ClientIoHandler { client: service.client(), info: Default::default(), sync: sync });
|
||||
service.io().register_handler(io_handler).expect("Error registering IO handler");
|
||||
|
||||
fn path(&self) -> String {
|
||||
self.args.flag_db_path.replace("$HOME", env::home_dir().unwrap().to_str().unwrap())
|
||||
}
|
||||
|
||||
fn spec(&self) -> Spec {
|
||||
match self.args.flag_chain.as_ref() {
|
||||
"frontier" | "mainnet" => ethereum::new_frontier(),
|
||||
"morden" | "testnet" => ethereum::new_morden(),
|
||||
"olympic" => ethereum::new_olympic(),
|
||||
f => Spec::from_json_utf8(contents(f).expect("Couldn't read chain specification file. Sure it exists?").as_ref()),
|
||||
}
|
||||
}
|
||||
|
||||
fn init_nodes(&self, spec: &Spec) -> Vec<String> {
|
||||
if self.args.flag_no_bootstrap { Vec::new() } else {
|
||||
match self.args.arg_enode.len() {
|
||||
0 => spec.nodes().clone(),
|
||||
_ => self.args.arg_enode.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn net_addresses(&self) -> (SocketAddr, SocketAddr) {
|
||||
let listen_address;
|
||||
let public_address;
|
||||
|
||||
match self.args.flag_address {
|
||||
None => {
|
||||
listen_address = SocketAddr::from_str(self.args.flag_listen_address.as_ref()).expect("Invalid listen address given with --listen-address");
|
||||
public_address = SocketAddr::from_str(self.args.flag_public_address.as_ref()).expect("Invalid public address given with --public-address");
|
||||
}
|
||||
Some(ref a) => {
|
||||
public_address = SocketAddr::from_str(a.as_ref()).expect("Invalid listen/public address given with --address");
|
||||
listen_address = public_address;
|
||||
}
|
||||
};
|
||||
|
||||
(listen_address, public_address)
|
||||
}
|
||||
}
|
||||
|
||||
fn wait_for_exit(client_service: &ClientService) {
|
||||
let exit = Arc::new(Condvar::new());
|
||||
// Handle possible exits
|
||||
let e = exit.clone();
|
||||
CtrlC::set_handler(move || { e.notify_all(); });
|
||||
let e = exit.clone();
|
||||
client_service.on_panic(move |_reason| { e.notify_all(); });
|
||||
// Wait for signal
|
||||
let mutex = Mutex::new(());
|
||||
let _ = exit.wait(mutex.lock().unwrap()).unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let conf = Configuration::parse();
|
||||
if conf.args.flag_version {
|
||||
print_version();
|
||||
return;
|
||||
}
|
||||
|
||||
let spec = conf.spec();
|
||||
|
||||
// Setup logging
|
||||
setup_log(&conf.args.flag_logging);
|
||||
// Raise fdlimit
|
||||
unsafe { ::fdlimit::raise_fd_limit(); }
|
||||
|
||||
// Configure network
|
||||
let mut net_settings = NetworkConfiguration::new();
|
||||
net_settings.nat_enabled = conf.args.flag_upnp;
|
||||
net_settings.boot_nodes = conf.init_nodes(&spec);
|
||||
let (listen, public) = conf.net_addresses();
|
||||
net_settings.listen_address = listen;
|
||||
net_settings.public_address = public;
|
||||
net_settings.use_secret = conf.args.flag_node_key.as_ref().map(|s| Secret::from_str(&s).expect("Invalid key string"));
|
||||
|
||||
// Build client
|
||||
let mut service = ClientService::start(spec, net_settings, &Path::new(&conf.path())).unwrap();
|
||||
let client = service.client().clone();
|
||||
client.configure_cache(conf.args.flag_cache_pref_size, conf.args.flag_cache_max_size);
|
||||
|
||||
// Sync
|
||||
let sync = EthSync::register(service.network(), client);
|
||||
|
||||
// Setup rpc
|
||||
if conf.args.flag_jsonrpc {
|
||||
setup_rpc_server(service.client(), sync.clone(), &conf.args.flag_jsonrpc_url);
|
||||
}
|
||||
|
||||
// Register IO handler
|
||||
let io_handler = Arc::new(ClientIoHandler {
|
||||
client: service.client(),
|
||||
info: Default::default(),
|
||||
sync: sync
|
||||
});
|
||||
service.io().register_handler(io_handler).expect("Error registering IO handler");
|
||||
|
||||
// Handle exit
|
||||
wait_for_exit(&service);
|
||||
}
|
||||
|
||||
struct Informant {
|
||||
chain_info: RwLock<Option<BlockChainInfo>>,
|
||||
cache_info: RwLock<Option<CacheSize>>,
|
||||
@ -150,7 +261,7 @@ impl Informant {
|
||||
let sync_info = sync.status();
|
||||
|
||||
if let (_, &Some(ref last_cache_info), &Some(ref last_report)) = (self.chain_info.read().unwrap().deref(), self.cache_info.read().unwrap().deref(), self.report.read().unwrap().deref()) {
|
||||
println!("[ {} {} ]---[ {} blk/s | {} tx/s | {} gas/s //··· {}/{} peers, {} downloaded, {}+{} queued ···// {} ({}) bl {} ({}) ex ]",
|
||||
println!("[ #{} {} ]---[ {} blk/s | {} tx/s | {} gas/s //··· {}/{} peers, #{}, {}+{} queued ···// {} ({}) bl {} ({}) ex ]",
|
||||
chain_info.best_block_number,
|
||||
chain_info.best_block_hash,
|
||||
(report.blocks_imported - last_report.blocks_imported) / dur,
|
||||
@ -159,7 +270,7 @@ impl Informant {
|
||||
|
||||
sync_info.num_active_peers,
|
||||
sync_info.num_peers,
|
||||
sync_info.blocks_received,
|
||||
sync_info.last_imported_block_number.unwrap_or(chain_info.best_block_number),
|
||||
queue_info.unverified_queue_size,
|
||||
queue_info.verified_queue_size,
|
||||
|
||||
|
@ -9,13 +9,13 @@ authors = ["Ethcore <admin@ethcore.io"]
|
||||
|
||||
[dependencies]
|
||||
serde = "0.6.7"
|
||||
serde_macros = "0.6.10"
|
||||
serde_macros = "0.6.13"
|
||||
serde_json = "0.6.0"
|
||||
jsonrpc-core = "1.1"
|
||||
jsonrpc-http-server = "1.1"
|
||||
jsonrpc-core = "1.1.2"
|
||||
jsonrpc-http-server = "1.1.2"
|
||||
ethcore-util = { path = "../util" }
|
||||
ethcore = { path = "../ethcore" }
|
||||
ethsync = { path = "../sync" }
|
||||
clippy = "0.0.37"
|
||||
clippy = "0.0.41"
|
||||
target_info = "0.1.0"
|
||||
|
||||
rustc-serialize = "0.3"
|
||||
|
@ -20,6 +20,7 @@
|
||||
#![plugin(serde_macros)]
|
||||
#![plugin(clippy)]
|
||||
|
||||
extern crate rustc_serialize;
|
||||
extern crate target_info;
|
||||
extern crate serde;
|
||||
extern crate serde_json;
|
||||
|
@ -16,25 +16,73 @@
|
||||
|
||||
//! Eth rpc implementation.
|
||||
use std::sync::Arc;
|
||||
use ethsync::{EthSync, SyncState};
|
||||
use jsonrpc_core::*;
|
||||
use util::hash::*;
|
||||
use util::uint::*;
|
||||
use util::sha3::*;
|
||||
use ethcore::client::*;
|
||||
use ethcore::views::*;
|
||||
use ethcore::ethereum::denominations::shannon;
|
||||
use v1::traits::{Eth, EthFilter};
|
||||
use v1::types::Block;
|
||||
use v1::types::{Block, BlockTransactions, BlockNumber, Bytes, SyncStatus, SyncInfo, Transaction, OptionalValue, Index};
|
||||
|
||||
/// Eth rpc implementation.
|
||||
pub struct EthClient {
|
||||
client: Arc<Client>,
|
||||
sync: Arc<EthSync>
|
||||
}
|
||||
|
||||
impl EthClient {
|
||||
/// Creates new EthClient.
|
||||
pub fn new(client: Arc<Client>) -> Self {
|
||||
pub fn new(client: Arc<Client>, sync: Arc<EthSync>) -> Self {
|
||||
EthClient {
|
||||
client: client
|
||||
client: client,
|
||||
sync: sync
|
||||
}
|
||||
}
|
||||
|
||||
fn block(&self, id: BlockId, include_txs: bool) -> Result<Value, Error> {
|
||||
match (self.client.block(id.clone()), self.client.block_total_difficulty(id)) {
|
||||
(Some(bytes), Some(total_difficulty)) => {
|
||||
let block_view = BlockView::new(&bytes);
|
||||
let view = block_view.header_view();
|
||||
let block = Block {
|
||||
hash: OptionalValue::Value(view.sha3()),
|
||||
parent_hash: view.parent_hash(),
|
||||
uncles_hash: view.uncles_hash(),
|
||||
author: view.author(),
|
||||
miner: view.author(),
|
||||
state_root: view.state_root(),
|
||||
transactions_root: view.transactions_root(),
|
||||
receipts_root: view.receipts_root(),
|
||||
number: OptionalValue::Value(U256::from(view.number())),
|
||||
gas_used: view.gas_used(),
|
||||
gas_limit: view.gas_limit(),
|
||||
logs_bloom: view.log_bloom(),
|
||||
timestamp: U256::from(view.timestamp()),
|
||||
difficulty: view.difficulty(),
|
||||
total_difficulty: total_difficulty,
|
||||
uncles: vec![],
|
||||
transactions: {
|
||||
if include_txs {
|
||||
BlockTransactions::Full(block_view.localized_transactions().into_iter().map(From::from).collect())
|
||||
} else {
|
||||
BlockTransactions::Hashes(block_view.transaction_hashes())
|
||||
}
|
||||
},
|
||||
extra_data: Bytes::default()
|
||||
};
|
||||
to_value(&block)
|
||||
},
|
||||
_ => Ok(Value::Null)
|
||||
}
|
||||
}
|
||||
|
||||
fn transaction(&self, id: TransactionId) -> Result<Value, Error> {
|
||||
match self.client.transaction(id) {
|
||||
Some(t) => to_value(&Transaction::from(t)),
|
||||
None => Ok(Value::Null)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -42,11 +90,30 @@ impl EthClient {
|
||||
impl Eth for EthClient {
|
||||
fn protocol_version(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => Ok(Value::U64(63)),
|
||||
Params::None => to_value(&U256::from(self.sync.status().protocol_version)),
|
||||
_ => Err(Error::invalid_params())
|
||||
}
|
||||
}
|
||||
|
||||
fn syncing(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => {
|
||||
let status = self.sync.status();
|
||||
let res = match status.state {
|
||||
SyncState::NotSynced | SyncState::Idle => SyncStatus::None,
|
||||
SyncState::Waiting | SyncState::Blocks | SyncState::NewBlocks => SyncStatus::Info(SyncInfo {
|
||||
starting_block: U256::from(status.start_block_number),
|
||||
current_block: U256::from(self.client.chain_info().best_block_number),
|
||||
highest_block: U256::from(status.highest_block_number.unwrap_or(status.start_block_number))
|
||||
})
|
||||
};
|
||||
to_value(&res)
|
||||
}
|
||||
_ => Err(Error::invalid_params())
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: do not hardcode author.
|
||||
fn author(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => to_value(&Address::new()),
|
||||
@ -54,20 +121,7 @@ impl Eth for EthClient {
|
||||
}
|
||||
}
|
||||
|
||||
fn gas_price(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => Ok(Value::U64(0)),
|
||||
_ => Err(Error::invalid_params())
|
||||
}
|
||||
}
|
||||
|
||||
fn block_number(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => Ok(Value::U64(self.client.chain_info().best_block_number)),
|
||||
_ => Err(Error::invalid_params())
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: return real value of mining once it's implemented.
|
||||
fn is_mining(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => Ok(Value::Bool(false)),
|
||||
@ -75,50 +129,77 @@ impl Eth for EthClient {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: return real hashrate once we have mining
|
||||
fn hashrate(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => Ok(Value::U64(0)),
|
||||
Params::None => to_value(&U256::zero()),
|
||||
_ => Err(Error::invalid_params())
|
||||
}
|
||||
}
|
||||
|
||||
fn block_transaction_count(&self, _: Params) -> Result<Value, Error> {
|
||||
Ok(Value::U64(0))
|
||||
}
|
||||
|
||||
fn block(&self, params: Params) -> Result<Value, Error> {
|
||||
match from_params::<(H256, bool)>(params) {
|
||||
Ok((hash, _include_txs)) => match (self.client.block_header(&hash), self.client.block_total_difficulty(&hash)) {
|
||||
(Some(bytes), Some(total_difficulty)) => {
|
||||
let view = HeaderView::new(&bytes);
|
||||
let block = Block {
|
||||
hash: view.sha3(),
|
||||
parent_hash: view.parent_hash(),
|
||||
uncles_hash: view.uncles_hash(),
|
||||
author: view.author(),
|
||||
miner: view.author(),
|
||||
state_root: view.state_root(),
|
||||
transactions_root: view.transactions_root(),
|
||||
receipts_root: view.receipts_root(),
|
||||
number: U256::from(view.number()),
|
||||
gas_used: view.gas_used(),
|
||||
gas_limit: view.gas_limit(),
|
||||
logs_bloom: view.log_bloom(),
|
||||
timestamp: U256::from(view.timestamp()),
|
||||
difficulty: view.difficulty(),
|
||||
total_difficulty: total_difficulty,
|
||||
uncles: vec![],
|
||||
transactions: vec![]
|
||||
};
|
||||
to_value(&block)
|
||||
},
|
||||
_ => Ok(Value::Null)
|
||||
},
|
||||
Err(err) => Err(err)
|
||||
fn gas_price(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => to_value(&(shannon() * U256::from(50))),
|
||||
_ => Err(Error::invalid_params())
|
||||
}
|
||||
}
|
||||
|
||||
fn block_number(&self, params: Params) -> Result<Value, Error> {
|
||||
match params {
|
||||
Params::None => to_value(&U256::from(self.client.chain_info().best_block_number)),
|
||||
_ => Err(Error::invalid_params())
|
||||
}
|
||||
}
|
||||
|
||||
fn block_transaction_count(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(H256,)>(params)
|
||||
.and_then(|(hash,)| match self.client.block(BlockId::Hash(hash)) {
|
||||
Some(bytes) => to_value(&BlockView::new(&bytes).transactions_count()),
|
||||
None => Ok(Value::Null)
|
||||
})
|
||||
}
|
||||
|
||||
fn block_uncles_count(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(H256,)>(params)
|
||||
.and_then(|(hash,)| match self.client.block(BlockId::Hash(hash)) {
|
||||
Some(bytes) => to_value(&BlockView::new(&bytes).uncles_count()),
|
||||
None => Ok(Value::Null)
|
||||
})
|
||||
}
|
||||
|
||||
// TODO: do not ignore block number param
|
||||
fn code_at(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(Address, BlockNumber)>(params)
|
||||
.and_then(|(address, _block_number)| to_value(&self.client.code(&address).map_or_else(Bytes::default, Bytes::new)))
|
||||
}
|
||||
|
||||
fn block_by_hash(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(H256, bool)>(params)
|
||||
.and_then(|(hash, include_txs)| self.block(BlockId::Hash(hash), include_txs))
|
||||
}
|
||||
|
||||
fn block_by_number(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(BlockNumber, bool)>(params)
|
||||
.and_then(|(number, include_txs)| self.block(number.into(), include_txs))
|
||||
}
|
||||
|
||||
fn transaction_by_hash(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(H256,)>(params)
|
||||
.and_then(|(hash,)| self.transaction(TransactionId::Hash(hash)))
|
||||
}
|
||||
|
||||
fn transaction_by_block_hash_and_index(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(H256, Index)>(params)
|
||||
.and_then(|(hash, index)| self.transaction(TransactionId::Location(BlockId::Hash(hash), index.value())))
|
||||
}
|
||||
|
||||
fn transaction_by_block_number_and_index(&self, params: Params) -> Result<Value, Error> {
|
||||
from_params::<(BlockNumber, Index)>(params)
|
||||
.and_then(|(number, index)| self.transaction(TransactionId::Location(number.into(), index.value())))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Eth filter rpc implementation.
|
||||
pub struct EthFilterClient {
|
||||
client: Arc<Client>
|
||||
|
@ -21,6 +21,8 @@
|
||||
pub mod traits;
|
||||
mod impls;
|
||||
mod types;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub use self::traits::{Web3, Eth, EthFilter, Net};
|
||||
pub use self::impls::*;
|
||||
|
1
rpc/src/v1/tests/mod.rs
Normal file
1
rpc/src/v1/tests/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
//TODO: load custom blockchain state and test
|
@ -23,6 +23,9 @@ pub trait Eth: Sized + Send + Sync + 'static {
|
||||
/// Returns protocol version.
|
||||
fn protocol_version(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns an object with data about the sync status or false. (wtf?)
|
||||
fn syncing(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns the number of hashes per second that the node is mining with.
|
||||
fn hashrate(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
@ -47,8 +50,11 @@ pub trait Eth: Sized + Send + Sync + 'static {
|
||||
/// Returns content of the storage at given address.
|
||||
fn storage_at(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns block with given index / hash.
|
||||
fn block(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
/// Returns block with given hash.
|
||||
fn block_by_hash(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns block with given number.
|
||||
fn block_by_number(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns the number of transactions sent from given address at given time (block number).
|
||||
fn transaction_count(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
@ -71,8 +77,14 @@ pub trait Eth: Sized + Send + Sync + 'static {
|
||||
/// Estimate gas needed for execution of given contract.
|
||||
fn estimate_gas(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns transaction at given block and index.
|
||||
fn transaction_at(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
/// Get transaction by it's hash.
|
||||
fn transaction_by_hash(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns transaction at given block hash and index.
|
||||
fn transaction_by_block_hash_and_index(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns transaction by given block number and index.
|
||||
fn transaction_by_block_number_and_index(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
|
||||
/// Returns transaction receipt.
|
||||
fn transaction_receipt(&self, _: Params) -> Result<Value, Error> { rpc_unimplemented!() }
|
||||
@ -108,6 +120,7 @@ pub trait Eth: Sized + Send + Sync + 'static {
|
||||
fn to_delegate(self) -> IoDelegate<Self> {
|
||||
let mut delegate = IoDelegate::new(Arc::new(self));
|
||||
delegate.add_method("eth_protocolVersion", Eth::protocol_version);
|
||||
delegate.add_method("eth_syncing", Eth::syncing);
|
||||
delegate.add_method("eth_hashrate", Eth::hashrate);
|
||||
delegate.add_method("eth_coinbase", Eth::author);
|
||||
delegate.add_method("eth_mining", Eth::is_mining);
|
||||
@ -125,10 +138,11 @@ pub trait Eth: Sized + Send + Sync + 'static {
|
||||
delegate.add_method("eth_sendTransaction", Eth::send_transaction);
|
||||
delegate.add_method("eth_call", Eth::call);
|
||||
delegate.add_method("eth_estimateGas", Eth::estimate_gas);
|
||||
delegate.add_method("eth_getBlockByHash", Eth::block);
|
||||
delegate.add_method("eth_getBlockByNumber", Eth::block);
|
||||
delegate.add_method("eth_getTransactionByBlockHashAndIndex", Eth::transaction_at);
|
||||
delegate.add_method("eth_getTransactionByBlockNumberAndIndex", Eth::transaction_at);
|
||||
delegate.add_method("eth_getBlockByHash", Eth::block_by_hash);
|
||||
delegate.add_method("eth_getBlockByNumber", Eth::block_by_number);
|
||||
delegate.add_method("eth_getTransactionByHash", Eth::transaction_by_hash);
|
||||
delegate.add_method("eth_getTransactionByBlockHashAndIndex", Eth::transaction_by_block_hash_and_index);
|
||||
delegate.add_method("eth_getTransactionByBlockNumberAndIndex", Eth::transaction_by_block_number_and_index);
|
||||
delegate.add_method("eth_getTransactionReceipt", Eth::transaction_receipt);
|
||||
delegate.add_method("eth_getUncleByBlockHashAndIndex", Eth::uncle_at);
|
||||
delegate.add_method("eth_getUncleByBlockNumberAndIndex", Eth::uncle_at);
|
||||
|
@ -14,12 +14,30 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use serde::{Serialize, Serializer};
|
||||
use util::hash::*;
|
||||
use util::uint::*;
|
||||
use v1::types::{Bytes, Transaction, OptionalValue};
|
||||
|
||||
#[derive(Default, Debug, Serialize)]
|
||||
#[derive(Debug)]
|
||||
pub enum BlockTransactions {
|
||||
Hashes(Vec<H256>),
|
||||
Full(Vec<Transaction>)
|
||||
}
|
||||
|
||||
impl Serialize for BlockTransactions {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||
where S: Serializer {
|
||||
match *self {
|
||||
BlockTransactions::Hashes(ref hashes) => hashes.serialize(serializer),
|
||||
BlockTransactions::Full(ref ts) => ts.serialize(serializer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct Block {
|
||||
pub hash: H256,
|
||||
pub hash: OptionalValue<H256>,
|
||||
#[serde(rename="parentHash")]
|
||||
pub parent_hash: H256,
|
||||
#[serde(rename="sha3Uncles")]
|
||||
@ -33,14 +51,13 @@ pub struct Block {
|
||||
pub transactions_root: H256,
|
||||
#[serde(rename="receiptsRoot")]
|
||||
pub receipts_root: H256,
|
||||
pub number: U256,
|
||||
pub number: OptionalValue<U256>,
|
||||
#[serde(rename="gasUsed")]
|
||||
pub gas_used: U256,
|
||||
#[serde(rename="gasLimit")]
|
||||
pub gas_limit: U256,
|
||||
// TODO: figure out how to properly serialize bytes
|
||||
//#[serde(rename="extraData")]
|
||||
//extra_data: Vec<u8>,
|
||||
#[serde(rename="extraData")]
|
||||
pub extra_data: Bytes,
|
||||
#[serde(rename="logsBloom")]
|
||||
pub logs_bloom: H2048,
|
||||
pub timestamp: U256,
|
||||
@ -48,5 +65,52 @@ pub struct Block {
|
||||
#[serde(rename="totalDifficulty")]
|
||||
pub total_difficulty: U256,
|
||||
pub uncles: Vec<U256>,
|
||||
pub transactions: Vec<U256>
|
||||
pub transactions: BlockTransactions
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json;
|
||||
use util::hash::*;
|
||||
use util::uint::*;
|
||||
use v1::types::{Transaction, Bytes, OptionalValue};
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_serialize_block_transactions() {
|
||||
let t = BlockTransactions::Full(vec![Transaction::default()]);
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"[{"hash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x00","blockHash":null,"blockNumber":null,"transactionIndex":null,"from":"0x0000000000000000000000000000000000000000","to":null,"value":"0x00","gasPrice":"0x00","gas":"0x00","input":"0x00"}]"#);
|
||||
|
||||
let t = BlockTransactions::Hashes(vec![H256::default()]);
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"["0x0000000000000000000000000000000000000000000000000000000000000000"]"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_block() {
|
||||
let block = Block {
|
||||
hash: OptionalValue::Value(H256::default()),
|
||||
parent_hash: H256::default(),
|
||||
uncles_hash: H256::default(),
|
||||
author: Address::default(),
|
||||
miner: Address::default(),
|
||||
state_root: H256::default(),
|
||||
transactions_root: H256::default(),
|
||||
receipts_root: H256::default(),
|
||||
number: OptionalValue::Value(U256::default()),
|
||||
gas_used: U256::default(),
|
||||
gas_limit: U256::default(),
|
||||
extra_data: Bytes::default(),
|
||||
logs_bloom: H2048::default(),
|
||||
timestamp: U256::default(),
|
||||
difficulty: U256::default(),
|
||||
total_difficulty: U256::default(),
|
||||
uncles: vec![],
|
||||
transactions: BlockTransactions::Hashes(vec![])
|
||||
};
|
||||
|
||||
let serialized = serde_json::to_string(&block).unwrap();
|
||||
assert_eq!(serialized, r#"{"hash":"0x0000000000000000000000000000000000000000000000000000000000000000","parentHash":"0x0000000000000000000000000000000000000000000000000000000000000000","sha3Uncles":"0x0000000000000000000000000000000000000000000000000000000000000000","author":"0x0000000000000000000000000000000000000000","miner":"0x0000000000000000000000000000000000000000","stateRoot":"0x0000000000000000000000000000000000000000000000000000000000000000","transactionsRoot":"0x0000000000000000000000000000000000000000000000000000000000000000","receiptsRoot":"0x0000000000000000000000000000000000000000000000000000000000000000","number":"0x00","gasUsed":"0x00","gasLimit":"0x00","extraData":"0x00","logsBloom":"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000","timestamp":"0x00","difficulty":"0x00","totalDifficulty":"0x00","uncles":[],"transactions":[]}"#);
|
||||
}
|
||||
}
|
||||
|
90
rpc/src/v1/types/block_number.rs
Normal file
90
rpc/src/v1/types/block_number.rs
Normal file
@ -0,0 +1,90 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use serde::{Deserialize, Deserializer, Error};
|
||||
use serde::de::Visitor;
|
||||
use ethcore::client::BlockId;
|
||||
|
||||
/// Represents rpc api block number param.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum BlockNumber {
|
||||
Num(u64),
|
||||
Latest,
|
||||
Earliest,
|
||||
Pending
|
||||
}
|
||||
|
||||
impl Deserialize for BlockNumber {
|
||||
fn deserialize<D>(deserializer: &mut D) -> Result<BlockNumber, D::Error>
|
||||
where D: Deserializer {
|
||||
deserializer.visit(BlockNumberVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
struct BlockNumberVisitor;
|
||||
|
||||
impl Visitor for BlockNumberVisitor {
|
||||
type Value = BlockNumber;
|
||||
|
||||
fn visit_str<E>(&mut self, value: &str) -> Result<Self::Value, E> where E: Error {
|
||||
match value {
|
||||
"latest" => Ok(BlockNumber::Latest),
|
||||
"earliest" => Ok(BlockNumber::Earliest),
|
||||
"pending" => Ok(BlockNumber::Pending),
|
||||
_ if value.starts_with("0x") => u64::from_str_radix(&value[2..], 16).map(BlockNumber::Num).map_err(|_| Error::syntax("invalid block number")),
|
||||
_ => value.parse::<u64>().map(BlockNumber::Num).map_err(|_| Error::syntax("invalid block number"))
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_string<E>(&mut self, value: String) -> Result<Self::Value, E> where E: Error {
|
||||
self.visit_str(value.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<BlockId> for BlockNumber {
|
||||
#[allow(match_same_arms)]
|
||||
fn into(self) -> BlockId {
|
||||
match self {
|
||||
BlockNumber::Num(n) => BlockId::Number(n),
|
||||
BlockNumber::Earliest => BlockId::Earliest,
|
||||
BlockNumber::Latest => BlockId::Latest,
|
||||
BlockNumber::Pending => BlockId::Latest // TODO: change this once blockid support pending
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ethcore::client::BlockId;
|
||||
use super::*;
|
||||
use serde_json;
|
||||
|
||||
#[test]
|
||||
fn block_number_deserialization() {
|
||||
let s = r#"["0xa", "10", "latest", "earliest", "pending"]"#;
|
||||
let deserialized: Vec<BlockNumber> = serde_json::from_str(s).unwrap();
|
||||
assert_eq!(deserialized, vec![BlockNumber::Num(10), BlockNumber::Num(10), BlockNumber::Latest, BlockNumber::Earliest, BlockNumber::Pending])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn block_number_into() {
|
||||
assert_eq!(BlockId::Number(100), BlockNumber::Num(100).into());
|
||||
assert_eq!(BlockId::Earliest, BlockNumber::Earliest.into());
|
||||
assert_eq!(BlockId::Latest, BlockNumber::Latest.into());
|
||||
assert_eq!(BlockId::Latest, BlockNumber::Pending.into());
|
||||
}
|
||||
}
|
||||
|
61
rpc/src/v1/types/bytes.rs
Normal file
61
rpc/src/v1/types/bytes.rs
Normal file
@ -0,0 +1,61 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use rustc_serialize::hex::ToHex;
|
||||
use serde::{Serialize, Serializer};
|
||||
|
||||
/// Wrapper structure around vector of bytes.
|
||||
#[derive(Debug)]
|
||||
pub struct Bytes(Vec<u8>);
|
||||
|
||||
impl Bytes {
|
||||
/// Simple constructor.
|
||||
pub fn new(bytes: Vec<u8>) -> Bytes {
|
||||
Bytes(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Bytes {
|
||||
fn default() -> Self {
|
||||
// default serialized value is 0x00
|
||||
Bytes(vec![0])
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Bytes {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||
where S: Serializer {
|
||||
let mut serialized = "0x".to_owned();
|
||||
serialized.push_str(self.0.to_hex().as_ref());
|
||||
serializer.visit_str(serialized.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json;
|
||||
use rustc_serialize::hex::FromHex;
|
||||
|
||||
#[test]
|
||||
fn test_bytes_serialize() {
|
||||
let bytes = Bytes("0123456789abcdef".from_hex().unwrap());
|
||||
let serialized = serde_json::to_string(&bytes).unwrap();
|
||||
assert_eq!(serialized, r#""0x0123456789abcdef""#);
|
||||
}
|
||||
}
|
||||
|
||||
|
88
rpc/src/v1/types/filter.rs
Normal file
88
rpc/src/v1/types/filter.rs
Normal file
@ -0,0 +1,88 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use serde::{Deserialize, Deserializer, Error};
|
||||
use serde_json::value;
|
||||
use jsonrpc_core::Value;
|
||||
use util::hash::*;
|
||||
use v1::types::BlockNumber;
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum Topic {
|
||||
Single(H256),
|
||||
Multiple(Vec<H256>),
|
||||
Null
|
||||
}
|
||||
|
||||
impl Deserialize for Topic {
|
||||
fn deserialize<D>(deserializer: &mut D) -> Result<Topic, D::Error>
|
||||
where D: Deserializer {
|
||||
let v = try!(Value::deserialize(deserializer));
|
||||
|
||||
if v.is_null() {
|
||||
return Ok(Topic::Null);
|
||||
}
|
||||
|
||||
Deserialize::deserialize(&mut value::Deserializer::new(v.clone())).map(Topic::Single)
|
||||
.or_else(|_| Deserialize::deserialize(&mut value::Deserializer::new(v.clone())).map(Topic::Multiple))
|
||||
.map_err(|_| Error::syntax("")) // unreachable, but types must match
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Deserialize)]
|
||||
pub struct Filter {
|
||||
#[serde(rename="fromBlock")]
|
||||
pub from_block: Option<BlockNumber>,
|
||||
#[serde(rename="toBlock")]
|
||||
pub to_block: Option<BlockNumber>,
|
||||
pub address: Option<Address>,
|
||||
pub topics: Option<Vec<Topic>>
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json;
|
||||
use std::str::FromStr;
|
||||
use util::hash::*;
|
||||
use super::*;
|
||||
use v1::types::BlockNumber;
|
||||
|
||||
#[test]
|
||||
fn topic_deserialization() {
|
||||
let s = r#"["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", null, ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", "0x0000000000000000000000000aff3454fce5edbc8cca8697c15331677e6ebccc"]]"#;
|
||||
let deserialized: Vec<Topic> = serde_json::from_str(s).unwrap();
|
||||
assert_eq!(deserialized, vec![
|
||||
Topic::Single(H256::from_str("000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b").unwrap()),
|
||||
Topic::Null,
|
||||
Topic::Multiple(vec![
|
||||
H256::from_str("000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b").unwrap(),
|
||||
H256::from_str("0000000000000000000000000aff3454fce5edbc8cca8697c15331677e6ebccc").unwrap()
|
||||
])
|
||||
]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn filter_deserialization() {
|
||||
let s = r#"{"fromBlock":"earliest","toBlock":"latest"}"#;
|
||||
let deserialized: Filter = serde_json::from_str(s).unwrap();
|
||||
assert_eq!(deserialized, Filter {
|
||||
from_block: Some(BlockNumber::Earliest),
|
||||
to_block: Some(BlockNumber::Latest),
|
||||
address: None,
|
||||
topics: None
|
||||
});
|
||||
}
|
||||
}
|
66
rpc/src/v1/types/index.rs
Normal file
66
rpc/src/v1/types/index.rs
Normal file
@ -0,0 +1,66 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use serde::{Deserialize, Deserializer, Error};
|
||||
use serde::de::Visitor;
|
||||
|
||||
/// Represents usize.
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct Index(usize);
|
||||
|
||||
impl Index {
|
||||
pub fn value(&self) -> usize {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Deserialize for Index {
|
||||
fn deserialize<D>(deserializer: &mut D) -> Result<Index, D::Error>
|
||||
where D: Deserializer {
|
||||
deserializer.visit(IndexVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
struct IndexVisitor;
|
||||
|
||||
impl Visitor for IndexVisitor {
|
||||
type Value = Index;
|
||||
|
||||
fn visit_str<E>(&mut self, value: &str) -> Result<Self::Value, E> where E: Error {
|
||||
match value {
|
||||
_ if value.starts_with("0x") => usize::from_str_radix(&value[2..], 16).map(Index).map_err(|_| Error::syntax("invalid index")),
|
||||
_ => value.parse::<usize>().map(Index).map_err(|_| Error::syntax("invalid index"))
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_string<E>(&mut self, value: String) -> Result<Self::Value, E> where E: Error {
|
||||
self.visit_str(value.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json;
|
||||
|
||||
#[test]
|
||||
fn block_number_deserialization() {
|
||||
let s = r#"["0xa", "10"]"#;
|
||||
let deserialized: Vec<Index> = serde_json::from_str(s).unwrap();
|
||||
assert_eq!(deserialized, vec![Index(10), Index(10)]);
|
||||
}
|
||||
}
|
||||
|
@ -15,5 +15,19 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
mod block;
|
||||
mod block_number;
|
||||
mod bytes;
|
||||
mod filter;
|
||||
mod index;
|
||||
mod optionals;
|
||||
mod sync;
|
||||
mod transaction;
|
||||
|
||||
pub use self::block::Block;
|
||||
pub use self::block::{Block, BlockTransactions};
|
||||
pub use self::block_number::BlockNumber;
|
||||
pub use self::bytes::Bytes;
|
||||
pub use self::filter::Filter;
|
||||
pub use self::index::Index;
|
||||
pub use self::optionals::OptionalValue;
|
||||
pub use self::sync::{SyncStatus, SyncInfo};
|
||||
pub use self::transaction::Transaction;
|
||||
|
58
rpc/src/v1/types/optionals.rs
Normal file
58
rpc/src/v1/types/optionals.rs
Normal file
@ -0,0 +1,58 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde_json::Value;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum OptionalValue<T> where T: Serialize {
|
||||
Value(T),
|
||||
Null
|
||||
}
|
||||
|
||||
impl<T> Default for OptionalValue<T> where T: Serialize {
|
||||
fn default() -> Self {
|
||||
OptionalValue::Null
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Serialize for OptionalValue<T> where T: Serialize {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||
where S: Serializer {
|
||||
match *self {
|
||||
OptionalValue::Value(ref value) => value.serialize(serializer),
|
||||
OptionalValue::Null => Value::Null.serialize(serializer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json;
|
||||
use util::hash::*;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_serialize_optional_value() {
|
||||
let v: OptionalValue<H256> = OptionalValue::Null;
|
||||
let serialized = serde_json::to_string(&v).unwrap();
|
||||
assert_eq!(serialized, r#"null"#);
|
||||
|
||||
let v = OptionalValue::Value(H256::default());
|
||||
let serialized = serde_json::to_string(&v).unwrap();
|
||||
assert_eq!(serialized, r#""0x0000000000000000000000000000000000000000000000000000000000000000""#);
|
||||
}
|
||||
}
|
68
rpc/src/v1/types/sync.rs
Normal file
68
rpc/src/v1/types/sync.rs
Normal file
@ -0,0 +1,68 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use serde::{Serialize, Serializer};
|
||||
use util::uint::*;
|
||||
|
||||
#[derive(Default, Debug, Serialize, PartialEq)]
|
||||
pub struct SyncInfo {
|
||||
#[serde(rename="startingBlock")]
|
||||
pub starting_block: U256,
|
||||
#[serde(rename="currentBlock")]
|
||||
pub current_block: U256,
|
||||
#[serde(rename="highestBlock")]
|
||||
pub highest_block: U256,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum SyncStatus {
|
||||
Info(SyncInfo),
|
||||
None
|
||||
}
|
||||
|
||||
impl Serialize for SyncStatus {
|
||||
fn serialize<S>(&self, serializer: &mut S) -> Result<(), S::Error>
|
||||
where S: Serializer {
|
||||
match *self {
|
||||
SyncStatus::Info(ref info) => info.serialize(serializer),
|
||||
SyncStatus::None => false.serialize(serializer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_serialize_sync_info() {
|
||||
let t = SyncInfo::default();
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"startingBlock":"0x00","currentBlock":"0x00","highestBlock":"0x00"}"#);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialize_sync_status() {
|
||||
let t = SyncStatus::None;
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, "false");
|
||||
|
||||
let t = SyncStatus::Info(SyncInfo::default());
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"startingBlock":"0x00","currentBlock":"0x00","highestBlock":"0x00"}"#);
|
||||
}
|
||||
}
|
74
rpc/src/v1/types/transaction.rs
Normal file
74
rpc/src/v1/types/transaction.rs
Normal file
@ -0,0 +1,74 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use util::hash::*;
|
||||
use util::uint::*;
|
||||
use ethcore::transaction::{LocalizedTransaction, Action};
|
||||
use v1::types::{Bytes, OptionalValue};
|
||||
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
pub struct Transaction {
|
||||
pub hash: H256,
|
||||
pub nonce: U256,
|
||||
#[serde(rename="blockHash")]
|
||||
pub block_hash: OptionalValue<H256>,
|
||||
#[serde(rename="blockNumber")]
|
||||
pub block_number: OptionalValue<U256>,
|
||||
#[serde(rename="transactionIndex")]
|
||||
pub transaction_index: OptionalValue<U256>,
|
||||
pub from: Address,
|
||||
pub to: OptionalValue<Address>,
|
||||
pub value: U256,
|
||||
#[serde(rename="gasPrice")]
|
||||
pub gas_price: U256,
|
||||
pub gas: U256,
|
||||
pub input: Bytes
|
||||
}
|
||||
|
||||
impl From<LocalizedTransaction> for Transaction {
|
||||
fn from(t: LocalizedTransaction) -> Transaction {
|
||||
Transaction {
|
||||
hash: t.hash(),
|
||||
nonce: t.nonce,
|
||||
block_hash: OptionalValue::Value(t.block_hash.clone()),
|
||||
block_number: OptionalValue::Value(U256::from(t.block_number)),
|
||||
transaction_index: OptionalValue::Value(U256::from(t.transaction_index)),
|
||||
from: t.sender().unwrap(),
|
||||
to: match t.action {
|
||||
Action::Create => OptionalValue::Null,
|
||||
Action::Call(ref address) => OptionalValue::Value(address.clone())
|
||||
},
|
||||
value: t.value,
|
||||
gas_price: t.gas_price,
|
||||
gas: t.gas,
|
||||
input: Bytes::new(t.data.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json;
|
||||
|
||||
#[test]
|
||||
fn test_transaction_serialize() {
|
||||
let t = Transaction::default();
|
||||
let serialized = serde_json::to_string(&t).unwrap();
|
||||
assert_eq!(serialized, r#"{"hash":"0x0000000000000000000000000000000000000000000000000000000000000000","nonce":"0x00","blockHash":null,"blockNumber":null,"transactionIndex":null,"from":"0x0000000000000000000000000000000000000000","to":null,"value":"0x00","gasPrice":"0x00","gas":"0x00","input":"0x00"}"#);
|
||||
}
|
||||
}
|
||||
|
@ -10,8 +10,8 @@ authors = ["Ethcore <admin@ethcore.io"]
|
||||
[dependencies]
|
||||
ethcore-util = { path = "../util" }
|
||||
ethcore = { path = ".." }
|
||||
clippy = "0.0.37"
|
||||
clippy = "0.0.41"
|
||||
log = "0.3"
|
||||
env_logger = "0.3"
|
||||
time = "0.1.34"
|
||||
|
||||
rand = "0.3.13"
|
||||
|
@ -33,7 +33,7 @@ use util::*;
|
||||
use std::mem::{replace};
|
||||
use ethcore::views::{HeaderView};
|
||||
use ethcore::header::{BlockNumber, Header as BlockHeader};
|
||||
use ethcore::client::{BlockChainClient, BlockStatus};
|
||||
use ethcore::client::{BlockChainClient, BlockStatus, BlockId};
|
||||
use range_collection::{RangeCollection, ToUsize, FromUsize};
|
||||
use ethcore::error::*;
|
||||
use ethcore::block::Block;
|
||||
@ -62,6 +62,9 @@ const MAX_NODE_DATA_TO_SEND: usize = 1024;
|
||||
const MAX_RECEIPTS_TO_SEND: usize = 1024;
|
||||
const MAX_HEADERS_TO_REQUEST: usize = 512;
|
||||
const MAX_BODIES_TO_REQUEST: usize = 256;
|
||||
const MIN_PEERS_PROPAGATION: usize = 4;
|
||||
const MAX_PEERS_PROPAGATION: usize = 128;
|
||||
const MAX_PEER_LAG_PROPAGATION: BlockNumber = 20;
|
||||
|
||||
const STATUS_PACKET: u8 = 0x00;
|
||||
const NEW_BLOCK_HASHES_PACKET: u8 = 0x01;
|
||||
@ -134,7 +137,7 @@ pub struct SyncStatus {
|
||||
pub num_active_peers: usize,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
#[derive(PartialEq, Eq, Debug, Clone)]
|
||||
/// Peer data type requested
|
||||
enum PeerAsking {
|
||||
Nothing,
|
||||
@ -142,6 +145,7 @@ enum PeerAsking {
|
||||
BlockBodies,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
/// Syncing peer information
|
||||
struct PeerInfo {
|
||||
/// eth protocol version
|
||||
@ -151,13 +155,17 @@ struct PeerInfo {
|
||||
/// Peer network id
|
||||
network_id: U256,
|
||||
/// Peer best block hash
|
||||
latest: H256,
|
||||
latest_hash: H256,
|
||||
/// Peer best block number if known
|
||||
latest_number: Option<BlockNumber>,
|
||||
/// Peer total difficulty
|
||||
difficulty: U256,
|
||||
/// Type of data currenty being requested from peer.
|
||||
asking: PeerAsking,
|
||||
/// A set of block numbers being requested
|
||||
asking_blocks: Vec<BlockNumber>,
|
||||
/// Holds requested header hash if currently requesting block header by hash
|
||||
asking_hash: Option<H256>,
|
||||
/// Request timestamp
|
||||
ask_time: f64,
|
||||
}
|
||||
@ -175,6 +183,8 @@ pub struct ChainSync {
|
||||
downloading_headers: HashSet<BlockNumber>,
|
||||
/// Set of block body numbers being downloaded
|
||||
downloading_bodies: HashSet<BlockNumber>,
|
||||
/// Set of block headers being downloaded by hash
|
||||
downloading_hashes: HashSet<H256>,
|
||||
/// Downloaded headers.
|
||||
headers: Vec<(BlockNumber, Vec<Header>)>, //TODO: use BTreeMap once range API is sable. For now it is a vector sorted in descending order
|
||||
/// Downloaded bodies
|
||||
@ -191,6 +201,8 @@ pub struct ChainSync {
|
||||
syncing_difficulty: U256,
|
||||
/// True if common block for our and remote chain has been found
|
||||
have_common_block: bool,
|
||||
/// Last propagated block number
|
||||
last_send_block_number: BlockNumber,
|
||||
}
|
||||
|
||||
type RlpResponseResult = Result<Option<(PacketId, RlpStream)>, PacketDecodeError>;
|
||||
@ -204,6 +216,7 @@ impl ChainSync {
|
||||
highest_block: None,
|
||||
downloading_headers: HashSet::new(),
|
||||
downloading_bodies: HashSet::new(),
|
||||
downloading_hashes: HashSet::new(),
|
||||
headers: Vec::new(),
|
||||
bodies: Vec::new(),
|
||||
peers: HashMap::new(),
|
||||
@ -212,6 +225,7 @@ impl ChainSync {
|
||||
last_imported_hash: None,
|
||||
syncing_difficulty: U256::from(0u64),
|
||||
have_common_block: false,
|
||||
last_send_block_number: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -236,6 +250,8 @@ impl ChainSync {
|
||||
self.peers.clear();
|
||||
}
|
||||
|
||||
|
||||
#[allow(for_kv_map)] // Because it's not possible to get `values_mut()`
|
||||
/// Rest sync. Clear all downloaded data but keep the queue
|
||||
fn reset(&mut self) {
|
||||
self.downloading_headers.clear();
|
||||
@ -244,6 +260,7 @@ impl ChainSync {
|
||||
self.bodies.clear();
|
||||
for (_, ref mut p) in &mut self.peers {
|
||||
p.asking_blocks.clear();
|
||||
p.asking_hash = None;
|
||||
}
|
||||
self.header_ids.clear();
|
||||
self.syncing_difficulty = From::from(0u64);
|
||||
@ -269,15 +286,21 @@ impl ChainSync {
|
||||
protocol_version: try!(r.val_at(0)),
|
||||
network_id: try!(r.val_at(1)),
|
||||
difficulty: try!(r.val_at(2)),
|
||||
latest: try!(r.val_at(3)),
|
||||
latest_hash: try!(r.val_at(3)),
|
||||
latest_number: None,
|
||||
genesis: try!(r.val_at(4)),
|
||||
asking: PeerAsking::Nothing,
|
||||
asking_blocks: Vec::new(),
|
||||
asking_hash: None,
|
||||
ask_time: 0f64,
|
||||
};
|
||||
|
||||
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest, peer.genesis);
|
||||
trace!(target: "sync", "New peer {} (protocol: {}, network: {:?}, difficulty: {:?}, latest:{}, genesis:{})", peer_id, peer.protocol_version, peer.network_id, peer.difficulty, peer.latest_hash, peer.genesis);
|
||||
|
||||
if self.peers.contains_key(&peer_id) {
|
||||
warn!("Unexpected status packet from {}:{}", peer_id, io.peer_info(peer_id));
|
||||
return Ok(());
|
||||
}
|
||||
let chain_info = io.chain().chain_info();
|
||||
if peer.genesis != chain_info.genesis_hash {
|
||||
io.disable_peer(peer_id);
|
||||
@ -290,10 +313,7 @@ impl ChainSync {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let old = self.peers.insert(peer_id.clone(), peer);
|
||||
if old.is_some() {
|
||||
panic!("ChainSync: new peer already exists");
|
||||
}
|
||||
self.peers.insert(peer_id.clone(), peer);
|
||||
info!(target: "sync", "Connected {}:{}", peer_id, io.peer_info(peer_id));
|
||||
self.sync_peer(io, peer_id, false);
|
||||
Ok(())
|
||||
@ -327,7 +347,7 @@ impl ChainSync {
|
||||
self.highest_block = Some(number);
|
||||
}
|
||||
let hash = info.hash();
|
||||
match io.chain().block_status(&hash) {
|
||||
match io.chain().block_status(BlockId::Hash(hash.clone())) {
|
||||
BlockStatus::InChain => {
|
||||
self.have_common_block = true;
|
||||
self.last_imported_block = Some(number);
|
||||
@ -430,12 +450,16 @@ impl ChainSync {
|
||||
let block_rlp = try!(r.at(0));
|
||||
let header_rlp = try!(block_rlp.at(0));
|
||||
let h = header_rlp.as_raw().sha3();
|
||||
|
||||
trace!(target: "sync", "{} -> NewBlock ({})", peer_id, h);
|
||||
let header_view = HeaderView::new(header_rlp.as_raw());
|
||||
let header: BlockHeader = try!(header_rlp.as_val());
|
||||
let mut unknown = false;
|
||||
{
|
||||
let peer = self.peers.get_mut(&peer_id).unwrap();
|
||||
peer.latest_hash = header.hash();
|
||||
peer.latest_number = Some(header.number());
|
||||
}
|
||||
// TODO: Decompose block and add to self.headers and self.bodies instead
|
||||
if header_view.number() == From::from(self.current_base_block() + 1) {
|
||||
if header.number == From::from(self.current_base_block() + 1) {
|
||||
match io.chain().import_block(block_rlp.as_raw().to_vec()) {
|
||||
Err(ImportError::AlreadyInChain) => {
|
||||
trace!(target: "sync", "New block already in chain {:?}", h);
|
||||
@ -444,6 +468,7 @@ impl ChainSync {
|
||||
trace!(target: "sync", "New block already queued {:?}", h);
|
||||
},
|
||||
Ok(_) => {
|
||||
self.last_imported_block = Some(header.number);
|
||||
trace!(target: "sync", "New block queued {:?}", h);
|
||||
},
|
||||
Err(ImportError::UnknownParent) => {
|
||||
@ -463,13 +488,9 @@ impl ChainSync {
|
||||
trace!(target: "sync", "New block unknown {:?}", h);
|
||||
//TODO: handle too many unknown blocks
|
||||
let difficulty: U256 = try!(r.val_at(1));
|
||||
let peer_difficulty = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer").difficulty;
|
||||
let peer_difficulty = self.peers.get_mut(&peer_id).unwrap().difficulty;
|
||||
if difficulty > peer_difficulty {
|
||||
trace!(target: "sync", "Received block {:?} with no known parent. Peer needs syncing...", h);
|
||||
{
|
||||
let peer = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer");
|
||||
peer.latest = header_view.sha3();
|
||||
}
|
||||
self.sync_peer(io, peer_id, true);
|
||||
}
|
||||
}
|
||||
@ -478,17 +499,20 @@ impl ChainSync {
|
||||
|
||||
/// Handles NewHashes packet. Initiates headers download for any unknown hashes.
|
||||
fn on_peer_new_hashes(&mut self, io: &mut SyncIo, peer_id: PeerId, r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
|
||||
if self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer").asking != PeerAsking::Nothing {
|
||||
if self.peers.get_mut(&peer_id).unwrap().asking != PeerAsking::Nothing {
|
||||
trace!(target: "sync", "Ignoring new hashes since we're already downloading.");
|
||||
return Ok(());
|
||||
}
|
||||
trace!(target: "sync", "{} -> NewHashes ({} entries)", peer_id, r.item_count());
|
||||
let hashes = r.iter().map(|item| (item.val_at::<H256>(0), item.val_at::<U256>(1)));
|
||||
let mut max_height: U256 = From::from(0);
|
||||
let hashes = r.iter().map(|item| (item.val_at::<H256>(0), item.val_at::<BlockNumber>(1)));
|
||||
let mut max_height: BlockNumber = 0;
|
||||
for (rh, rd) in hashes {
|
||||
let h = try!(rh);
|
||||
let d = try!(rd);
|
||||
match io.chain().block_status(&h) {
|
||||
if self.downloading_hashes.contains(&h) {
|
||||
continue;
|
||||
}
|
||||
match io.chain().block_status(BlockId::Hash(h.clone())) {
|
||||
BlockStatus::InChain => {
|
||||
trace!(target: "sync", "New block hash already in chain {:?}", h);
|
||||
},
|
||||
@ -496,10 +520,11 @@ impl ChainSync {
|
||||
trace!(target: "sync", "New hash block already queued {:?}", h);
|
||||
},
|
||||
BlockStatus::Unknown => {
|
||||
trace!(target: "sync", "New unknown block hash {:?}", h);
|
||||
if d > max_height {
|
||||
let peer = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer");
|
||||
peer.latest = h.clone();
|
||||
trace!(target: "sync", "New unknown block hash {:?}", h);
|
||||
let peer = self.peers.get_mut(&peer_id).unwrap();
|
||||
peer.latest_hash = h.clone();
|
||||
peer.latest_number = Some(d);
|
||||
max_height = d;
|
||||
}
|
||||
},
|
||||
@ -510,7 +535,7 @@ impl ChainSync {
|
||||
}
|
||||
}
|
||||
};
|
||||
if max_height != x!(0) {
|
||||
if max_height != 0 {
|
||||
self.sync_peer(io, peer_id, true);
|
||||
}
|
||||
Ok(())
|
||||
@ -520,7 +545,7 @@ impl ChainSync {
|
||||
pub fn on_peer_aborting(&mut self, io: &mut SyncIo, peer: PeerId) {
|
||||
trace!(target: "sync", "== Disconnecting {}", peer);
|
||||
if self.peers.contains_key(&peer) {
|
||||
info!(target: "sync", "Disconnected {}:{}", peer, io.peer_info(peer));
|
||||
info!(target: "sync", "Disconnected {}", peer);
|
||||
self.clear_peer_download(peer);
|
||||
self.peers.remove(&peer);
|
||||
self.continue_sync(io);
|
||||
@ -558,7 +583,7 @@ impl ChainSync {
|
||||
/// Find something to do for a peer. Called for a new peer or when a peer is done with it's task.
|
||||
fn sync_peer(&mut self, io: &mut SyncIo, peer_id: PeerId, force: bool) {
|
||||
let (peer_latest, peer_difficulty) = {
|
||||
let peer = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer");
|
||||
let peer = self.peers.get_mut(&peer_id).unwrap();
|
||||
if peer.asking != PeerAsking::Nothing {
|
||||
return;
|
||||
}
|
||||
@ -566,7 +591,7 @@ impl ChainSync {
|
||||
trace!(target: "sync", "Waiting for block queue");
|
||||
return;
|
||||
}
|
||||
(peer.latest.clone(), peer.difficulty.clone())
|
||||
(peer.latest_hash.clone(), peer.difficulty.clone())
|
||||
};
|
||||
|
||||
let td = io.chain().chain_info().pending_total_difficulty;
|
||||
@ -578,9 +603,11 @@ impl ChainSync {
|
||||
self.state = SyncState::Blocks;
|
||||
}
|
||||
trace!(target: "sync", "Starting sync with better chain");
|
||||
self.peers.get_mut(&peer_id).unwrap().asking_hash = Some(peer_latest.clone());
|
||||
self.downloading_hashes.insert(peer_latest.clone());
|
||||
self.request_headers_by_hash(io, peer_id, &peer_latest, 1, 0, false);
|
||||
}
|
||||
else if self.state == SyncState::Blocks {
|
||||
else if self.state == SyncState::Blocks && io.chain().block_status(BlockId::Hash(peer_latest)) == BlockStatus::Unknown {
|
||||
self.request_blocks(io, peer_id);
|
||||
}
|
||||
}
|
||||
@ -593,7 +620,7 @@ impl ChainSync {
|
||||
fn request_blocks(&mut self, io: &mut SyncIo, peer_id: PeerId) {
|
||||
self.clear_peer_download(peer_id);
|
||||
|
||||
if io.chain().queue_info().full {
|
||||
if io.chain().queue_info().is_full() {
|
||||
self.pause_sync();
|
||||
return;
|
||||
}
|
||||
@ -604,7 +631,7 @@ impl ChainSync {
|
||||
|
||||
if self.have_common_block && !self.headers.is_empty() && self.headers.range_iter().next().unwrap().0 == self.current_base_block() + 1 {
|
||||
for (start, ref items) in self.headers.range_iter() {
|
||||
if needed_bodies.len() > MAX_BODIES_TO_REQUEST {
|
||||
if needed_bodies.len() >= MAX_BODIES_TO_REQUEST {
|
||||
break;
|
||||
}
|
||||
let mut index: BlockNumber = 0;
|
||||
@ -636,6 +663,7 @@ impl ChainSync {
|
||||
if start == 0 {
|
||||
self.have_common_block = true; //reached genesis
|
||||
self.last_imported_hash = Some(chain_info.genesis_hash);
|
||||
self.last_imported_block = Some(0);
|
||||
}
|
||||
}
|
||||
if self.have_common_block {
|
||||
@ -650,7 +678,7 @@ impl ChainSync {
|
||||
continue;
|
||||
}
|
||||
let mut block = prev;
|
||||
while block < next && headers.len() <= MAX_HEADERS_TO_REQUEST {
|
||||
while block < next && headers.len() < MAX_HEADERS_TO_REQUEST {
|
||||
if !self.downloading_headers.contains(&(block as BlockNumber)) {
|
||||
headers.push(block as BlockNumber);
|
||||
self.downloading_headers.insert(block as BlockNumber);
|
||||
@ -669,6 +697,8 @@ impl ChainSync {
|
||||
}
|
||||
}
|
||||
else {
|
||||
// continue search for common block
|
||||
self.downloading_headers.insert(start as BlockNumber);
|
||||
self.request_headers_by_number(io, peer_id, start as BlockNumber, 1, 0, false);
|
||||
}
|
||||
}
|
||||
@ -676,7 +706,10 @@ impl ChainSync {
|
||||
|
||||
/// Clear all blocks/headers marked as being downloaded by a peer.
|
||||
fn clear_peer_download(&mut self, peer_id: PeerId) {
|
||||
let peer = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer");
|
||||
let peer = self.peers.get_mut(&peer_id).unwrap();
|
||||
if let Some(hash) = peer.asking_hash.take() {
|
||||
self.downloading_hashes.remove(&hash);
|
||||
}
|
||||
for b in &peer.asking_blocks {
|
||||
self.downloading_headers.remove(&b);
|
||||
self.downloading_bodies.remove(&b);
|
||||
@ -809,7 +842,7 @@ impl ChainSync {
|
||||
|
||||
/// Reset peer status after request is complete.
|
||||
fn reset_peer_asking(&mut self, peer_id: PeerId, asking: PeerAsking) {
|
||||
let peer = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer");
|
||||
let peer = self.peers.get_mut(&peer_id).unwrap();
|
||||
if peer.asking != asking {
|
||||
warn!(target:"sync", "Asking {:?} while expected {:?}", peer.asking, asking);
|
||||
}
|
||||
@ -821,9 +854,9 @@ impl ChainSync {
|
||||
/// Generic request sender
|
||||
fn send_request(&mut self, sync: &mut SyncIo, peer_id: PeerId, asking: PeerAsking, packet_id: PacketId, packet: Bytes) {
|
||||
{
|
||||
let peer = self.peers.get_mut(&peer_id).expect("ChainSync: unknown peer");
|
||||
let peer = self.peers.get_mut(&peer_id).unwrap();
|
||||
if peer.asking != PeerAsking::Nothing {
|
||||
warn!(target:"sync", "Asking {:?} while requesting {:?}", asking, peer.asking);
|
||||
warn!(target:"sync", "Asking {:?} while requesting {:?}", peer.asking, asking);
|
||||
}
|
||||
}
|
||||
match sync.send(peer_id, packet_id, packet) {
|
||||
@ -840,6 +873,14 @@ impl ChainSync {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generic packet sender
|
||||
fn send_packet(&mut self, sync: &mut SyncIo, peer_id: PeerId, packet_id: PacketId, packet: Bytes) {
|
||||
if let Err(e) = sync.send(peer_id, packet_id, packet) {
|
||||
warn!(target:"sync", "Error sending packet: {:?}", e);
|
||||
sync.disable_peer(peer_id);
|
||||
self.on_peer_aborting(sync, peer_id);
|
||||
}
|
||||
}
|
||||
/// Called when peer sends us new transactions
|
||||
fn on_peer_transactions(&mut self, _io: &mut SyncIo, _peer_id: PeerId, _r: &UntrustedRlp) -> Result<(), PacketDecodeError> {
|
||||
Ok(())
|
||||
@ -873,7 +914,7 @@ impl ChainSync {
|
||||
// id is a hash
|
||||
let hash: H256 = try!(r.val_at(0));
|
||||
trace!(target: "sync", "-> GetBlockHeaders (hash: {}, max: {}, skip: {}, reverse:{})", hash, max_headers, skip, reverse);
|
||||
match io.chain().block_header(&hash) {
|
||||
match io.chain().block_header(BlockId::Hash(hash)) {
|
||||
Some(hdr) => From::from(HeaderView::new(&hdr).number()),
|
||||
None => last
|
||||
}
|
||||
@ -893,7 +934,7 @@ impl ChainSync {
|
||||
let mut data = Bytes::new();
|
||||
let inc = (skip + 1) as BlockNumber;
|
||||
while number <= last && number > 0 && count < max_count {
|
||||
if let Some(mut hdr) = io.chain().block_header_at(number) {
|
||||
if let Some(mut hdr) = io.chain().block_header(BlockId::Number(number)) {
|
||||
data.append(&mut hdr);
|
||||
count += 1;
|
||||
}
|
||||
@ -925,7 +966,7 @@ impl ChainSync {
|
||||
let mut added = 0usize;
|
||||
let mut data = Bytes::new();
|
||||
for i in 0..count {
|
||||
if let Some(mut hdr) = io.chain().block_body(&try!(r.val_at::<H256>(i))) {
|
||||
if let Some(mut hdr) = io.chain().block_body(BlockId::Hash(try!(r.val_at::<H256>(i)))) {
|
||||
data.append(&mut hdr);
|
||||
added += 1;
|
||||
}
|
||||
@ -996,6 +1037,11 @@ impl ChainSync {
|
||||
/// Dispatch incoming requests and responses
|
||||
pub fn on_packet(&mut self, io: &mut SyncIo, peer: PeerId, packet_id: u8, data: &[u8]) {
|
||||
let rlp = UntrustedRlp::new(data);
|
||||
|
||||
if packet_id != STATUS_PACKET && !self.peers.contains_key(&peer) {
|
||||
warn!(target:"sync", "Unexpected packet from unregistered peer: {}:{}", peer, io.peer_info(peer));
|
||||
return;
|
||||
}
|
||||
let result = match packet_id {
|
||||
STATUS_PACKET => self.on_peer_status(io, peer, &rlp),
|
||||
TRANSACTIONS_PACKET => self.on_peer_transactions(io, peer, &rlp),
|
||||
@ -1030,10 +1076,6 @@ impl ChainSync {
|
||||
})
|
||||
}
|
||||
|
||||
/// Maintain other peers. Send out any new blocks and transactions
|
||||
pub fn _maintain_sync(&mut self, _io: &mut SyncIo) {
|
||||
}
|
||||
|
||||
pub fn maintain_peers(&self, io: &mut SyncIo) {
|
||||
let tick = time::precise_time_s();
|
||||
for (peer_id, peer) in &self.peers {
|
||||
@ -1042,13 +1084,137 @@ impl ChainSync {
|
||||
}
|
||||
}
|
||||
}
|
||||
/// Maintain other peers. Send out any new blocks and transactions
|
||||
pub fn maintain_sync(&mut self, io: &mut SyncIo) {
|
||||
if !io.chain().queue_info().full && self.state == SyncState::Waiting {
|
||||
self.state = SyncState::Idle;
|
||||
|
||||
fn check_resume(&mut self, io: &mut SyncIo) {
|
||||
if !io.chain().queue_info().is_full() && self.state == SyncState::Waiting {
|
||||
self.state = SyncState::Blocks;
|
||||
self.continue_sync(io);
|
||||
}
|
||||
}
|
||||
|
||||
/// creates rlp to send for the tree defined by 'from' and 'to' hashes
|
||||
fn create_new_hashes_rlp(chain: &BlockChainClient, from: &H256, to: &H256) -> Option<Bytes> {
|
||||
match chain.tree_route(from, to) {
|
||||
Some(route) => {
|
||||
match route.blocks.len() {
|
||||
0 => None,
|
||||
_ => {
|
||||
let mut rlp_stream = RlpStream::new_list(route.blocks.len());
|
||||
for block_hash in route.blocks {
|
||||
let mut hash_rlp = RlpStream::new_list(2);
|
||||
let difficulty = chain.block_total_difficulty(BlockId::Hash(block_hash.clone())).expect("Mallformed block without a difficulty on the chain!");
|
||||
hash_rlp.append(&block_hash);
|
||||
hash_rlp.append(&difficulty);
|
||||
rlp_stream.append_raw(&hash_rlp.out(), 1);
|
||||
}
|
||||
Some(rlp_stream.out())
|
||||
}
|
||||
}
|
||||
},
|
||||
None => None
|
||||
}
|
||||
}
|
||||
|
||||
/// creates latest block rlp for the given client
|
||||
fn create_latest_block_rlp(chain: &BlockChainClient) -> Bytes {
|
||||
let mut rlp_stream = RlpStream::new_list(2);
|
||||
rlp_stream.append_raw(&chain.block(BlockId::Hash(chain.chain_info().best_block_hash)).unwrap(), 1);
|
||||
rlp_stream.append(&chain.chain_info().total_difficulty);
|
||||
rlp_stream.out()
|
||||
}
|
||||
|
||||
/// returns peer ids that have less blocks than our chain
|
||||
fn get_lagging_peers(&mut self, io: &SyncIo) -> Vec<(PeerId, BlockNumber)> {
|
||||
let chain = io.chain();
|
||||
let chain_info = chain.chain_info();
|
||||
let latest_hash = chain_info.best_block_hash;
|
||||
let latest_number = chain_info.best_block_number;
|
||||
self.peers.iter_mut().filter_map(|(&id, ref mut peer_info)|
|
||||
match io.chain().block_status(BlockId::Hash(peer_info.latest_hash.clone())) {
|
||||
BlockStatus::InChain => {
|
||||
if peer_info.latest_number.is_none() {
|
||||
peer_info.latest_number = Some(HeaderView::new(&io.chain().block_header(BlockId::Hash(peer_info.latest_hash.clone())).unwrap()).number());
|
||||
}
|
||||
if peer_info.latest_hash != latest_hash && latest_number > peer_info.latest_number.unwrap() {
|
||||
Some((id, peer_info.latest_number.unwrap()))
|
||||
} else { None }
|
||||
},
|
||||
_ => None
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
/// propagades latest block to lagging peers
|
||||
fn propagade_blocks(&mut self, local_best: &H256, best_number: BlockNumber, io: &mut SyncIo) -> usize {
|
||||
let updated_peers = {
|
||||
let lagging_peers = self.get_lagging_peers(io);
|
||||
|
||||
// sqrt(x)/x scaled to max u32
|
||||
let fraction = (self.peers.len() as f64).powf(-0.5).mul(u32::max_value() as f64).round() as u32;
|
||||
let lucky_peers = match lagging_peers.len() {
|
||||
0 ... MIN_PEERS_PROPAGATION => lagging_peers,
|
||||
_ => lagging_peers.into_iter().filter(|_| ::rand::random::<u32>() < fraction).collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
// taking at max of MAX_PEERS_PROPAGATION
|
||||
lucky_peers.iter().map(|&(id, _)| id.clone()).take(min(lucky_peers.len(), MAX_PEERS_PROPAGATION)).collect::<Vec<PeerId>>()
|
||||
};
|
||||
|
||||
let mut sent = 0;
|
||||
for peer_id in updated_peers {
|
||||
let rlp = ChainSync::create_latest_block_rlp(io.chain());
|
||||
self.send_packet(io, peer_id, NEW_BLOCK_PACKET, rlp);
|
||||
self.peers.get_mut(&peer_id).unwrap().latest_hash = local_best.clone();
|
||||
self.peers.get_mut(&peer_id).unwrap().latest_number = Some(best_number);
|
||||
sent = sent + 1;
|
||||
}
|
||||
sent
|
||||
}
|
||||
|
||||
/// propagades new known hashes to all peers
|
||||
fn propagade_new_hashes(&mut self, local_best: &H256, best_number: BlockNumber, io: &mut SyncIo) -> usize {
|
||||
let updated_peers = self.get_lagging_peers(io);
|
||||
let mut sent = 0;
|
||||
let last_parent = HeaderView::new(&io.chain().block_header(BlockId::Hash(local_best.clone())).unwrap()).parent_hash();
|
||||
for (peer_id, peer_number) in updated_peers {
|
||||
let mut peer_best = self.peers.get(&peer_id).unwrap().latest_hash.clone();
|
||||
if best_number - peer_number > MAX_PEERS_PROPAGATION as BlockNumber {
|
||||
// If we think peer is too far behind just end one latest hash
|
||||
peer_best = last_parent.clone();
|
||||
}
|
||||
sent = sent + match ChainSync::create_new_hashes_rlp(io.chain(), &peer_best, &local_best) {
|
||||
Some(rlp) => {
|
||||
{
|
||||
let peer = self.peers.get_mut(&peer_id).unwrap();
|
||||
peer.latest_hash = local_best.clone();
|
||||
peer.latest_number = Some(best_number);
|
||||
}
|
||||
self.send_packet(io, peer_id, NEW_BLOCK_HASHES_PACKET, rlp);
|
||||
1
|
||||
},
|
||||
None => 0
|
||||
}
|
||||
}
|
||||
sent
|
||||
}
|
||||
|
||||
/// Maintain other peers. Send out any new blocks and transactions
|
||||
pub fn maintain_sync(&mut self, io: &mut SyncIo) {
|
||||
self.check_resume(io);
|
||||
}
|
||||
|
||||
/// should be called once chain has new block, triggers the latest block propagation
|
||||
pub fn chain_blocks_verified(&mut self, io: &mut SyncIo) {
|
||||
let chain = io.chain().chain_info();
|
||||
if (((chain.best_block_number as i64) - (self.last_send_block_number as i64)).abs() as BlockNumber) < MAX_PEER_LAG_PROPAGATION {
|
||||
let blocks = self.propagade_blocks(&chain.best_block_hash, chain.best_block_number, io);
|
||||
let hashes = self.propagade_new_hashes(&chain.best_block_hash, chain.best_block_number, io);
|
||||
if blocks != 0 || hashes != 0 {
|
||||
trace!(target: "sync", "Sent latest {} blocks and {} hashes to peers.", blocks, hashes);
|
||||
}
|
||||
}
|
||||
self.last_send_block_number = chain.best_block_number;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -1056,6 +1222,48 @@ mod tests {
|
||||
use tests::helpers::*;
|
||||
use super::*;
|
||||
use util::*;
|
||||
use super::{PeerInfo, PeerAsking};
|
||||
use ethcore::header::*;
|
||||
use ethcore::client::*;
|
||||
|
||||
fn get_dummy_block(order: u32, parent_hash: H256) -> Bytes {
|
||||
let mut header = Header::new();
|
||||
header.gas_limit = x!(0);
|
||||
header.difficulty = x!(order * 100);
|
||||
header.timestamp = (order * 10) as u64;
|
||||
header.number = order as u64;
|
||||
header.parent_hash = parent_hash;
|
||||
header.state_root = H256::zero();
|
||||
|
||||
let mut rlp = RlpStream::new_list(3);
|
||||
rlp.append(&header);
|
||||
rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1);
|
||||
rlp.append_raw(&rlp::EMPTY_LIST_RLP, 1);
|
||||
rlp.out()
|
||||
}
|
||||
|
||||
fn get_dummy_blocks(order: u32, parent_hash: H256) -> Bytes {
|
||||
let mut rlp = RlpStream::new_list(1);
|
||||
rlp.append_raw(&get_dummy_block(order, parent_hash), 1);
|
||||
let difficulty: U256 = x!(100 * order);
|
||||
rlp.append(&difficulty);
|
||||
rlp.out()
|
||||
}
|
||||
|
||||
fn get_dummy_hashes() -> Bytes {
|
||||
let mut rlp = RlpStream::new_list(5);
|
||||
for _ in 0..5 {
|
||||
let mut hash_d_rlp = RlpStream::new_list(2);
|
||||
let hash: H256 = H256::from(0u64);
|
||||
let diff: U256 = U256::from(1u64);
|
||||
hash_d_rlp.append(&hash);
|
||||
hash_d_rlp.append(&diff);
|
||||
|
||||
rlp.append_raw(&hash_d_rlp.out(), 1);
|
||||
}
|
||||
|
||||
rlp.out()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn return_receipts_empty() {
|
||||
@ -1091,9 +1299,9 @@ mod tests {
|
||||
// the length of two rlp-encoded receipts
|
||||
assert_eq!(597, rlp_result.unwrap().1.out().len());
|
||||
|
||||
let mut sync = ChainSync::new();
|
||||
let mut sync = dummy_sync_with_peer(H256::new());
|
||||
io.sender = Some(2usize);
|
||||
sync.on_packet(&mut io, 1usize, super::GET_RECEIPTS_PACKET, &receipts_request);
|
||||
sync.on_packet(&mut io, 0usize, super::GET_RECEIPTS_PACKET, &receipts_request);
|
||||
assert_eq!(1, io.queue.len());
|
||||
}
|
||||
|
||||
@ -1119,9 +1327,219 @@ mod tests {
|
||||
// the length of one rlp-encoded hashe
|
||||
assert_eq!(34, rlp_result.unwrap().1.out().len());
|
||||
|
||||
let mut sync = ChainSync::new();
|
||||
let mut sync = dummy_sync_with_peer(H256::new());
|
||||
io.sender = Some(2usize);
|
||||
sync.on_packet(&mut io, 1usize, super::GET_NODE_DATA_PACKET, &node_request);
|
||||
sync.on_packet(&mut io, 0usize, super::GET_NODE_DATA_PACKET, &node_request);
|
||||
assert_eq!(1, io.queue.len());
|
||||
}
|
||||
|
||||
fn dummy_sync_with_peer(peer_latest_hash: H256) -> ChainSync {
|
||||
let mut sync = ChainSync::new();
|
||||
sync.peers.insert(0,
|
||||
PeerInfo {
|
||||
protocol_version: 0,
|
||||
genesis: H256::zero(),
|
||||
network_id: U256::zero(),
|
||||
latest_hash: peer_latest_hash,
|
||||
latest_number: None,
|
||||
difficulty: U256::zero(),
|
||||
asking: PeerAsking::Nothing,
|
||||
asking_blocks: Vec::<BlockNumber>::new(),
|
||||
asking_hash: None,
|
||||
ask_time: 0f64,
|
||||
});
|
||||
sync
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn finds_lagging_peers() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(100, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(10));
|
||||
let io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let lagging_peers = sync.get_lagging_peers(&io);
|
||||
|
||||
assert_eq!(1, lagging_peers.len())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn calculates_tree_for_lagging_peer() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(15, false);
|
||||
|
||||
let start = client.block_hash_delta_minus(4);
|
||||
let end = client.block_hash_delta_minus(2);
|
||||
|
||||
// wrong way end -> start, should be None
|
||||
let rlp = ChainSync::create_new_hashes_rlp(&client, &end, &start);
|
||||
assert!(rlp.is_none());
|
||||
|
||||
let rlp = ChainSync::create_new_hashes_rlp(&client, &start, &end).unwrap();
|
||||
// size of three rlp encoded hash-difficulty
|
||||
assert_eq!(107, rlp.len());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sends_new_hashes_to_lagging_peer() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(100, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let best_hash = client.chain_info().best_block_hash.clone();
|
||||
let best_number = client.chain_info().best_block_number;
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let peer_count = sync.propagade_new_hashes(&best_hash, best_number, &mut io);
|
||||
|
||||
// 1 message should be send
|
||||
assert_eq!(1, io.queue.len());
|
||||
// 1 peer should be updated
|
||||
assert_eq!(1, peer_count);
|
||||
// NEW_BLOCK_HASHES_PACKET
|
||||
assert_eq!(0x01, io.queue[0].packet_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn sends_latest_block_to_lagging_peer() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(100, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let best_hash = client.chain_info().best_block_hash.clone();
|
||||
let best_number = client.chain_info().best_block_number;
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let peer_count = sync.propagade_blocks(&best_hash, best_number, &mut io);
|
||||
|
||||
// 1 message should be send
|
||||
assert_eq!(1, io.queue.len());
|
||||
// 1 peer should be updated
|
||||
assert_eq!(1, peer_count);
|
||||
// NEW_BLOCK_PACKET
|
||||
assert_eq!(0x07, io.queue[0].packet_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_peer_new_block_mallformed() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(10, false);
|
||||
|
||||
let block_data = get_dummy_block(11, client.chain_info().best_block_hash);
|
||||
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let block = UntrustedRlp::new(&block_data);
|
||||
|
||||
let result = sync.on_peer_new_block(&mut io, 0, &block);
|
||||
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_peer_new_block() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(10, false);
|
||||
|
||||
let block_data = get_dummy_blocks(11, client.chain_info().best_block_hash);
|
||||
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let block = UntrustedRlp::new(&block_data);
|
||||
|
||||
let result = sync.on_peer_new_block(&mut io, 0, &block);
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_peer_new_block_empty() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(10, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let empty_data = vec![];
|
||||
let block = UntrustedRlp::new(&empty_data);
|
||||
|
||||
let result = sync.on_peer_new_block(&mut io, 0, &block);
|
||||
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_peer_new_hashes() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(10, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let hashes_data = get_dummy_hashes();
|
||||
let hashes_rlp = UntrustedRlp::new(&hashes_data);
|
||||
|
||||
let result = sync.on_peer_new_hashes(&mut io, 0, &hashes_rlp);
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn handles_peer_new_hashes_empty() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(10, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
let empty_hashes_data = vec![];
|
||||
let hashes_rlp = UntrustedRlp::new(&empty_hashes_data);
|
||||
|
||||
let result = sync.on_peer_new_hashes(&mut io, 0, &hashes_rlp);
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
// idea is that what we produce when propagading latest hashes should be accepted in
|
||||
// on_peer_new_hashes in our code as well
|
||||
#[test]
|
||||
fn hashes_rlp_mutually_acceptable() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(100, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let best_hash = client.chain_info().best_block_hash.clone();
|
||||
let best_number = client.chain_info().best_block_number;
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
sync.propagade_new_hashes(&best_hash, best_number, &mut io);
|
||||
|
||||
let data = &io.queue[0].data.clone();
|
||||
let result = sync.on_peer_new_hashes(&mut io, 0, &UntrustedRlp::new(&data));
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
// idea is that what we produce when propagading latest block should be accepted in
|
||||
// on_peer_new_block in our code as well
|
||||
#[test]
|
||||
fn block_rlp_mutually_acceptable() {
|
||||
let mut client = TestBlockChainClient::new();
|
||||
client.add_blocks(100, false);
|
||||
let mut queue = VecDeque::new();
|
||||
let mut sync = dummy_sync_with_peer(client.block_hash_delta_minus(5));
|
||||
let best_hash = client.chain_info().best_block_hash.clone();
|
||||
let best_number = client.chain_info().best_block_number;
|
||||
let mut io = TestIo::new(&mut client, &mut queue, None);
|
||||
|
||||
sync.propagade_blocks(&best_hash, best_number, &mut io);
|
||||
|
||||
let data = &io.queue[0].data.clone();
|
||||
let result = sync.on_peer_new_block(&mut io, 0, &UntrustedRlp::new(&data));
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
}
|
@ -16,8 +16,11 @@
|
||||
|
||||
#![warn(missing_docs)]
|
||||
#![feature(plugin)]
|
||||
#![plugin(clippy)]
|
||||
#![feature(augmented_assignments)]
|
||||
#![plugin(clippy)]
|
||||
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
|
||||
#![allow(clone_on_copy)]
|
||||
|
||||
//! Blockchain sync module
|
||||
//! Implements ethereum protocol version 63 as specified here:
|
||||
//! https://github.com/ethereum/wiki/wiki/Ethereum-Wire-Protocol
|
||||
@ -50,6 +53,7 @@ extern crate ethcore_util as util;
|
||||
extern crate ethcore;
|
||||
extern crate env_logger;
|
||||
extern crate time;
|
||||
extern crate rand;
|
||||
|
||||
use std::ops::*;
|
||||
use std::sync::*;
|
||||
@ -75,7 +79,7 @@ pub struct EthSync {
|
||||
sync: RwLock<ChainSync>
|
||||
}
|
||||
|
||||
pub use self::chain::SyncStatus;
|
||||
pub use self::chain::{SyncStatus, SyncState};
|
||||
|
||||
impl EthSync {
|
||||
/// Creates and register protocol with the network service
|
||||
@ -125,4 +129,10 @@ impl NetworkProtocolHandler<SyncMessage> for EthSync {
|
||||
self.sync.write().unwrap().maintain_peers(&mut NetSyncIo::new(io, self.chain.deref()));
|
||||
self.sync.write().unwrap().maintain_sync(&mut NetSyncIo::new(io, self.chain.deref()));
|
||||
}
|
||||
|
||||
fn message(&self, io: &NetworkContext<SyncMessage>, message: &SyncMessage) {
|
||||
if let SyncMessage::BlockVerified = *message {
|
||||
self.sync.write().unwrap().chain_blocks_verified(&mut NetSyncIo::new(io, self.chain.deref()));
|
||||
}
|
||||
}
|
||||
}
|
@ -168,9 +168,9 @@ impl<K, V> RangeCollection<K, V> for Vec<(K, Vec<V>)> where K: Ord + PartialEq +
|
||||
fn insert_item(&mut self, key: K, value: V) {
|
||||
assert!(!self.have_item(&key));
|
||||
|
||||
// todo: fix warning
|
||||
let lower = match self.binary_search_by(|&(k, _)| k.cmp(&key).reverse()) {
|
||||
Ok(index) => index,
|
||||
Err(index) => index,
|
||||
Ok(index) | Err(index) => index
|
||||
};
|
||||
|
||||
let mut to_remove: Option<usize> = None;
|
||||
|
@ -15,7 +15,7 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use util::*;
|
||||
use ethcore::client::{BlockChainClient};
|
||||
use ethcore::client::{BlockChainClient, BlockId};
|
||||
use io::SyncIo;
|
||||
use chain::{SyncState};
|
||||
use super::helpers::*;
|
||||
@ -27,7 +27,7 @@ fn two_peers() {
|
||||
net.peer_mut(1).chain.add_blocks(1000, false);
|
||||
net.peer_mut(2).chain.add_blocks(1000, false);
|
||||
net.sync();
|
||||
assert!(net.peer(0).chain.block_at(1000).is_some());
|
||||
assert!(net.peer(0).chain.block(BlockId::Number(1000)).is_some());
|
||||
assert_eq!(net.peer(0).chain.blocks.read().unwrap().deref(), net.peer(1).chain.blocks.read().unwrap().deref());
|
||||
}
|
||||
|
||||
@ -60,7 +60,7 @@ fn empty_blocks() {
|
||||
net.peer_mut(2).chain.add_blocks(5, n % 2 == 0);
|
||||
}
|
||||
net.sync();
|
||||
assert!(net.peer(0).chain.block_at(1000).is_some());
|
||||
assert!(net.peer(0).chain.block(BlockId::Number(1000)).is_some());
|
||||
assert_eq!(net.peer(0).chain.blocks.read().unwrap().deref(), net.peer(1).chain.blocks.read().unwrap().deref());
|
||||
}
|
||||
|
||||
@ -105,3 +105,69 @@ fn status_empty() {
|
||||
let net = TestNet::new(2);
|
||||
assert_eq!(net.peer(0).sync.status().state, SyncState::NotSynced);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn status_packet() {
|
||||
let mut net = TestNet::new(2);
|
||||
net.peer_mut(0).chain.add_blocks(100, false);
|
||||
net.peer_mut(1).chain.add_blocks(1, false);
|
||||
|
||||
net.start();
|
||||
|
||||
net.sync_step_peer(0);
|
||||
|
||||
assert_eq!(1, net.peer(0).queue.len());
|
||||
assert_eq!(0x00, net.peer(0).queue[0].packet_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn propagade_hashes() {
|
||||
let mut net = TestNet::new(6);
|
||||
net.peer_mut(1).chain.add_blocks(10, false);
|
||||
net.sync();
|
||||
|
||||
net.peer_mut(0).chain.add_blocks(10, false);
|
||||
net.sync();
|
||||
net.trigger_block_verified(0); //first event just sets the marker
|
||||
net.trigger_block_verified(0);
|
||||
|
||||
// 5 peers to sync
|
||||
assert_eq!(5, net.peer(0).queue.len());
|
||||
let mut hashes = 0;
|
||||
let mut blocks = 0;
|
||||
for i in 0..5 {
|
||||
if net.peer(0).queue[i].packet_id == 0x1 {
|
||||
hashes += 1;
|
||||
}
|
||||
if net.peer(0).queue[i].packet_id == 0x7 {
|
||||
blocks += 1;
|
||||
}
|
||||
}
|
||||
assert!(blocks + hashes == 5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn propagade_blocks() {
|
||||
let mut net = TestNet::new(2);
|
||||
net.peer_mut(1).chain.add_blocks(10, false);
|
||||
net.sync();
|
||||
|
||||
net.peer_mut(0).chain.add_blocks(10, false);
|
||||
net.trigger_block_verified(0); //first event just sets the marker
|
||||
net.trigger_block_verified(0);
|
||||
|
||||
assert!(!net.peer(0).queue.is_empty());
|
||||
// NEW_BLOCK_PACKET
|
||||
assert_eq!(0x07, net.peer(0).queue[0].packet_id);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn restart_on_malformed_block() {
|
||||
let mut net = TestNet::new(2);
|
||||
net.peer_mut(1).chain.add_blocks(10, false);
|
||||
net.peer_mut(1).chain.corrupt_block(6);
|
||||
net.sync_steps(10);
|
||||
|
||||
assert_eq!(net.peer(0).chain.chain_info().best_block_number, 4);
|
||||
}
|
||||
|
||||
|
@ -15,13 +15,14 @@
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use util::*;
|
||||
use ethcore::client::{BlockChainClient, BlockStatus, TreeRoute, BlockChainInfo};
|
||||
use ethcore::client::{BlockChainClient, BlockStatus, TreeRoute, BlockChainInfo, TransactionId, BlockId};
|
||||
use ethcore::block_queue::BlockQueueInfo;
|
||||
use ethcore::header::{Header as BlockHeader, BlockNumber};
|
||||
use ethcore::error::*;
|
||||
use io::SyncIo;
|
||||
use chain::{ChainSync};
|
||||
use ethcore::receipt::Receipt;
|
||||
use ethcore::transaction::LocalizedTransaction;
|
||||
|
||||
pub struct TestBlockChainClient {
|
||||
pub blocks: RwLock<HashMap<H256, Bytes>>,
|
||||
@ -69,67 +70,99 @@ impl TestBlockChainClient {
|
||||
self.import_block(rlp.as_raw().to_vec()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn corrupt_block(&mut self, n: BlockNumber) {
|
||||
let hash = self.block_hash(BlockId::Number(n)).unwrap();
|
||||
let mut header: BlockHeader = decode(&self.block_header(BlockId::Number(n)).unwrap());
|
||||
header.parent_hash = H256::new();
|
||||
let mut rlp = RlpStream::new_list(3);
|
||||
rlp.append(&header);
|
||||
rlp.append_raw(&rlp::NULL_RLP, 1);
|
||||
rlp.append_raw(&rlp::NULL_RLP, 1);
|
||||
self.blocks.write().unwrap().insert(hash, rlp.out());
|
||||
}
|
||||
|
||||
pub fn block_hash_delta_minus(&mut self, delta: usize) -> H256 {
|
||||
let blocks_read = self.numbers.read().unwrap();
|
||||
let index = blocks_read.len() - delta;
|
||||
blocks_read[&index].clone()
|
||||
}
|
||||
|
||||
fn block_hash(&self, id: BlockId) -> Option<H256> {
|
||||
match id {
|
||||
BlockId::Hash(hash) => Some(hash),
|
||||
BlockId::Number(n) => self.numbers.read().unwrap().get(&(n as usize)).cloned(),
|
||||
BlockId::Earliest => self.numbers.read().unwrap().get(&0).cloned(),
|
||||
BlockId::Latest => self.numbers.read().unwrap().get(&(self.numbers.read().unwrap().len() - 1)).cloned()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BlockChainClient for TestBlockChainClient {
|
||||
fn block_total_difficulty(&self, _h: &H256) -> Option<U256> {
|
||||
fn block_total_difficulty(&self, _id: BlockId) -> Option<U256> {
|
||||
Some(U256::zero())
|
||||
}
|
||||
|
||||
fn code(&self, _address: &Address) -> Option<Bytes> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn block_header(&self, h: &H256) -> Option<Bytes> {
|
||||
self.blocks.read().unwrap().get(h).map(|r| Rlp::new(r).at(0).as_raw().to_vec())
|
||||
|
||||
fn transaction(&self, _id: TransactionId) -> Option<LocalizedTransaction> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn block_body(&self, h: &H256) -> Option<Bytes> {
|
||||
self.blocks.read().unwrap().get(h).map(|r| {
|
||||
fn block_header(&self, id: BlockId) -> Option<Bytes> {
|
||||
self.block_hash(id).and_then(|hash| self.blocks.read().unwrap().get(&hash).map(|r| Rlp::new(r).at(0).as_raw().to_vec()))
|
||||
}
|
||||
|
||||
fn block_body(&self, id: BlockId) -> Option<Bytes> {
|
||||
self.block_hash(id).and_then(|hash| self.blocks.read().unwrap().get(&hash).map(|r| {
|
||||
let mut stream = RlpStream::new_list(2);
|
||||
stream.append_raw(Rlp::new(&r).at(1).as_raw(), 1);
|
||||
stream.append_raw(Rlp::new(&r).at(2).as_raw(), 1);
|
||||
stream.out()
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
fn block(&self, h: &H256) -> Option<Bytes> {
|
||||
self.blocks.read().unwrap().get(h).cloned()
|
||||
fn block(&self, id: BlockId) -> Option<Bytes> {
|
||||
self.block_hash(id).and_then(|hash| self.blocks.read().unwrap().get(&hash).cloned())
|
||||
}
|
||||
|
||||
fn block_status(&self, h: &H256) -> BlockStatus {
|
||||
match self.blocks.read().unwrap().get(h) {
|
||||
Some(_) => BlockStatus::InChain,
|
||||
None => BlockStatus::Unknown
|
||||
fn block_status(&self, id: BlockId) -> BlockStatus {
|
||||
match id {
|
||||
BlockId::Number(number) if (number as usize) < self.blocks.read().unwrap().len() => BlockStatus::InChain,
|
||||
BlockId::Hash(ref hash) if self.blocks.read().unwrap().get(hash).is_some() => BlockStatus::InChain,
|
||||
_ => BlockStatus::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn block_total_difficulty_at(&self, _number: BlockNumber) -> Option<U256> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn block_header_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||
self.numbers.read().unwrap().get(&(n as usize)).and_then(|h| self.block_header(h))
|
||||
}
|
||||
|
||||
fn block_body_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||
self.numbers.read().unwrap().get(&(n as usize)).and_then(|h| self.block_body(h))
|
||||
}
|
||||
|
||||
fn block_at(&self, n: BlockNumber) -> Option<Bytes> {
|
||||
self.numbers.read().unwrap().get(&(n as usize)).map(|h| self.blocks.read().unwrap().get(h).unwrap().clone())
|
||||
}
|
||||
|
||||
fn block_status_at(&self, n: BlockNumber) -> BlockStatus {
|
||||
if (n as usize) < self.blocks.read().unwrap().len() {
|
||||
BlockStatus::InChain
|
||||
} else {
|
||||
BlockStatus::Unknown
|
||||
}
|
||||
}
|
||||
|
||||
fn tree_route(&self, _from: &H256, _to: &H256) -> Option<TreeRoute> {
|
||||
// works only if blocks are one after another 1 -> 2 -> 3
|
||||
fn tree_route(&self, from: &H256, to: &H256) -> Option<TreeRoute> {
|
||||
Some(TreeRoute {
|
||||
blocks: Vec::new(),
|
||||
ancestor: H256::new(),
|
||||
index: 0
|
||||
index: 0,
|
||||
blocks: {
|
||||
let numbers_read = self.numbers.read().unwrap();
|
||||
let mut adding = false;
|
||||
|
||||
let mut blocks = Vec::new();
|
||||
for (_, hash) in numbers_read.iter().sort_by(|tuple1, tuple2| tuple1.0.cmp(tuple2.0)) {
|
||||
if hash == to {
|
||||
if adding {
|
||||
blocks.push(hash.clone());
|
||||
}
|
||||
adding = false;
|
||||
break;
|
||||
}
|
||||
if hash == from {
|
||||
adding = true;
|
||||
}
|
||||
if adding {
|
||||
blocks.push(hash.clone());
|
||||
}
|
||||
}
|
||||
if adding { Vec::new() } else { blocks }
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@ -202,7 +235,6 @@ impl BlockChainClient for TestBlockChainClient {
|
||||
|
||||
fn queue_info(&self) -> BlockQueueInfo {
|
||||
BlockQueueInfo {
|
||||
full: false,
|
||||
verified_queue_size: 0,
|
||||
unverified_queue_size: 0,
|
||||
verifying_queue_size: 0,
|
||||
@ -334,6 +366,11 @@ impl TestNet {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sync_step_peer(&mut self, peer_num: usize) {
|
||||
let mut peer = self.peer_mut(peer_num);
|
||||
peer.sync.maintain_sync(&mut TestIo::new(&mut peer.chain, &mut peer.queue, None));
|
||||
}
|
||||
|
||||
pub fn restart_peer(&mut self, i: usize) {
|
||||
let peer = self.peer_mut(i);
|
||||
peer.sync.restart(&mut TestIo::new(&mut peer.chain, &mut peer.queue, None));
|
||||
@ -362,4 +399,9 @@ impl TestNet {
|
||||
pub fn done(&self) -> bool {
|
||||
self.peers.iter().all(|p| p.queue.is_empty())
|
||||
}
|
||||
|
||||
pub fn trigger_block_verified(&mut self, peer_id: usize) {
|
||||
let mut peer = self.peer_mut(peer_id);
|
||||
peer.sync.chain_blocks_verified(&mut TestIo::new(&mut peer.chain, &mut peer.queue, None));
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ crossbeam = "0.2"
|
||||
slab = { git = "https://github.com/arkpar/slab.git" }
|
||||
sha3 = { path = "sha3" }
|
||||
serde = "0.6.7"
|
||||
clippy = "0.0.37"
|
||||
clippy = "0.0.41"
|
||||
json-tests = { path = "json-tests" }
|
||||
target_info = "0.1.0"
|
||||
igd = "0.4.2"
|
||||
|
@ -17,7 +17,6 @@
|
||||
#![feature(test)]
|
||||
|
||||
extern crate test;
|
||||
extern crate rand;
|
||||
extern crate ethcore_util;
|
||||
#[macro_use]
|
||||
extern crate log;
|
||||
|
@ -74,7 +74,6 @@ impl From<::secp256k1::Error> for CryptoError {
|
||||
match e {
|
||||
::secp256k1::Error::InvalidMessage => CryptoError::InvalidMessage,
|
||||
::secp256k1::Error::InvalidPublicKey => CryptoError::InvalidPublic,
|
||||
::secp256k1::Error::InvalidSignature => CryptoError::InvalidSignature,
|
||||
::secp256k1::Error::InvalidSecretKey => CryptoError::InvalidSecret,
|
||||
_ => CryptoError::InvalidSignature,
|
||||
}
|
||||
|
@ -20,6 +20,7 @@ use rustc_serialize::hex::FromHexError;
|
||||
use network::NetworkError;
|
||||
use rlp::DecoderError;
|
||||
use io;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
/// Error in database subsystem.
|
||||
@ -55,6 +56,27 @@ pub enum UtilError {
|
||||
BadSize,
|
||||
}
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
/// Error indicating an expected value was not found.
|
||||
pub struct Mismatch<T: fmt::Debug> {
|
||||
/// Value expected.
|
||||
pub expected: T,
|
||||
/// Value found.
|
||||
pub found: T,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
/// Error indicating value found is outside of a valid range.
|
||||
pub struct OutOfBounds<T: fmt::Debug> {
|
||||
/// Minimum allowed value.
|
||||
pub min: Option<T>,
|
||||
/// Maximum allowed value.
|
||||
pub max: Option<T>,
|
||||
/// Value found.
|
||||
pub found: T,
|
||||
}
|
||||
|
||||
impl From<FromHexError> for UtilError {
|
||||
fn from(err: FromHexError) -> UtilError {
|
||||
UtilError::FromHex(err)
|
||||
|
@ -296,7 +296,7 @@ macro_rules! impl_hash {
|
||||
try!(write!(f, "{:02x}", i));
|
||||
}
|
||||
try!(write!(f, "…"));
|
||||
for i in &self.0[$size - 4..$size] {
|
||||
for i in &self.0[$size - 2..$size] {
|
||||
try!(write!(f, "{:02x}", i));
|
||||
}
|
||||
Ok(())
|
||||
@ -647,7 +647,7 @@ mod tests {
|
||||
fn hash() {
|
||||
let h = H64([0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]);
|
||||
assert_eq!(H64::from_str("0123456789abcdef").unwrap(), h);
|
||||
assert_eq!(format!("{}", h), "0123…89abcdef");
|
||||
assert_eq!(format!("{}", h), "0123…cdef");
|
||||
assert_eq!(format!("{:?}", h), "0123456789abcdef");
|
||||
assert_eq!(h.hex(), "0123456789abcdef");
|
||||
assert!(h == h);
|
||||
|
@ -31,16 +31,16 @@
|
||||
//!
|
||||
//! impl IoHandler<MyMessage> for MyHandler {
|
||||
//! fn initialize(&self, io: &IoContext<MyMessage>) {
|
||||
//! io.register_timer(0, 1000).unwrap();
|
||||
//! }
|
||||
//! io.register_timer(0, 1000).unwrap();
|
||||
//! }
|
||||
//!
|
||||
//! fn timeout(&self, _io: &IoContext<MyMessage>, timer: TimerToken) {
|
||||
//! println!("Timeout {}", timer);
|
||||
//! }
|
||||
//! fn timeout(&self, _io: &IoContext<MyMessage>, timer: TimerToken) {
|
||||
//! println!("Timeout {}", timer);
|
||||
//! }
|
||||
//!
|
||||
//! fn message(&self, _io: &IoContext<MyMessage>, message: &MyMessage) {
|
||||
//! println!("Message {}", message.data);
|
||||
//! }
|
||||
//! fn message(&self, _io: &IoContext<MyMessage>, message: &MyMessage) {
|
||||
//! println!("Message {}", message.data);
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! fn main () {
|
||||
|
@ -25,6 +25,7 @@ use io::{IoError, IoHandler};
|
||||
use arrayvec::*;
|
||||
use crossbeam::sync::chase_lev;
|
||||
use io::worker::{Worker, Work, WorkType};
|
||||
use panics::*;
|
||||
|
||||
/// Timer ID
|
||||
pub type TimerToken = usize;
|
||||
@ -159,13 +160,21 @@ pub struct IoManager<Message> where Message: Send + Sync {
|
||||
|
||||
impl<Message> IoManager<Message> where Message: Send + Sync + Clone + 'static {
|
||||
/// Creates a new instance and registers it with the event loop.
|
||||
pub fn start(event_loop: &mut EventLoop<IoManager<Message>>) -> Result<(), UtilError> {
|
||||
pub fn start(panic_handler: Arc<PanicHandler>, event_loop: &mut EventLoop<IoManager<Message>>) -> Result<(), UtilError> {
|
||||
let (worker, stealer) = chase_lev::deque();
|
||||
let num_workers = 4;
|
||||
let work_ready_mutex = Arc::new(Mutex::new(()));
|
||||
let work_ready = Arc::new(Condvar::new());
|
||||
let workers = (0..num_workers).map(|i|
|
||||
Worker::new(i, stealer.clone(), IoChannel::new(event_loop.channel()), work_ready.clone(), work_ready_mutex.clone())).collect();
|
||||
Worker::new(
|
||||
i,
|
||||
stealer.clone(),
|
||||
IoChannel::new(event_loop.channel()),
|
||||
work_ready.clone(),
|
||||
work_ready_mutex.clone(),
|
||||
panic_handler.clone()
|
||||
)
|
||||
).collect();
|
||||
|
||||
let mut io = IoManager {
|
||||
timers: Arc::new(RwLock::new(HashMap::new())),
|
||||
@ -306,19 +315,32 @@ impl<Message> IoChannel<Message> where Message: Send + Clone {
|
||||
/// General IO Service. Starts an event loop and dispatches IO requests.
|
||||
/// 'Message' is a notification message type
|
||||
pub struct IoService<Message> where Message: Send + Sync + Clone + 'static {
|
||||
panic_handler: Arc<PanicHandler>,
|
||||
thread: Option<JoinHandle<()>>,
|
||||
host_channel: Sender<IoMessage<Message>>,
|
||||
}
|
||||
|
||||
impl<Message> MayPanic for IoService<Message> where Message: Send + Sync + Clone + 'static {
|
||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
||||
self.panic_handler.on_panic(closure);
|
||||
}
|
||||
}
|
||||
|
||||
impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
|
||||
/// Starts IO event loop
|
||||
pub fn start() -> Result<IoService<Message>, UtilError> {
|
||||
let panic_handler = PanicHandler::new_in_arc();
|
||||
let mut event_loop = EventLoop::new().unwrap();
|
||||
let channel = event_loop.channel();
|
||||
let panic = panic_handler.clone();
|
||||
let thread = thread::spawn(move || {
|
||||
IoManager::<Message>::start(&mut event_loop).unwrap(); //TODO:
|
||||
let p = panic.clone();
|
||||
panic.catch_panic(move || {
|
||||
IoManager::<Message>::start(p, &mut event_loop).unwrap();
|
||||
}).unwrap()
|
||||
});
|
||||
Ok(IoService {
|
||||
panic_handler: panic_handler,
|
||||
thread: Some(thread),
|
||||
host_channel: channel
|
||||
})
|
||||
|
@ -21,6 +21,7 @@ use std::sync::atomic::{AtomicBool, Ordering as AtomicOrdering};
|
||||
use crossbeam::sync::chase_lev;
|
||||
use io::service::{HandlerId, IoChannel, IoContext};
|
||||
use io::{IoHandler};
|
||||
use panics::*;
|
||||
|
||||
pub enum WorkType<Message> {
|
||||
Readable,
|
||||
@ -51,8 +52,10 @@ impl Worker {
|
||||
stealer: chase_lev::Stealer<Work<Message>>,
|
||||
channel: IoChannel<Message>,
|
||||
wait: Arc<Condvar>,
|
||||
wait_mutex: Arc<Mutex<()>>) -> Worker
|
||||
where Message: Send + Sync + Clone + 'static {
|
||||
wait_mutex: Arc<Mutex<()>>,
|
||||
panic_handler: Arc<PanicHandler>
|
||||
) -> Worker
|
||||
where Message: Send + Sync + Clone + 'static {
|
||||
let deleting = Arc::new(AtomicBool::new(false));
|
||||
let mut worker = Worker {
|
||||
thread: None,
|
||||
@ -60,7 +63,11 @@ impl Worker {
|
||||
deleting: deleting.clone(),
|
||||
};
|
||||
worker.thread = Some(thread::Builder::new().name(format!("IO Worker #{}", index)).spawn(
|
||||
move || Worker::work_loop(stealer, channel.clone(), wait, wait_mutex.clone(), deleting))
|
||||
move || {
|
||||
panic_handler.catch_panic(move || {
|
||||
Worker::work_loop(stealer, channel.clone(), wait, wait_mutex.clone(), deleting)
|
||||
}).unwrap()
|
||||
})
|
||||
.expect("Error creating worker thread"));
|
||||
worker
|
||||
}
|
||||
|
@ -47,10 +47,10 @@ impl Clone for JournalDB {
|
||||
}
|
||||
}
|
||||
|
||||
const LAST_ERA_KEY : [u8; 4] = [ b'l', b'a', b's', b't' ];
|
||||
const LATEST_ERA_KEY : [u8; 4] = [ b'l', b'a', b's', b't' ];
|
||||
const VERSION_KEY : [u8; 4] = [ b'j', b'v', b'e', b'r' ];
|
||||
|
||||
const DB_VERSION: u32 = 1;
|
||||
const DB_VERSION: u32 = 2;
|
||||
|
||||
impl JournalDB {
|
||||
/// Create a new instance given a `backing` database.
|
||||
@ -87,7 +87,7 @@ impl JournalDB {
|
||||
|
||||
/// Check if this database has any commits
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.backing.get(&LAST_ERA_KEY).expect("Low level database error").is_none()
|
||||
self.backing.get(&LATEST_ERA_KEY).expect("Low level database error").is_none()
|
||||
}
|
||||
|
||||
/// Commit all recent insert operations and historical removals from the old era
|
||||
@ -144,6 +144,7 @@ impl JournalDB {
|
||||
r.append(&inserts);
|
||||
r.append(&removes);
|
||||
try!(batch.put(&last, r.as_raw()));
|
||||
try!(batch.put(&LATEST_ERA_KEY, &encode(&now)));
|
||||
}
|
||||
|
||||
// apply old commits' details
|
||||
@ -181,7 +182,6 @@ impl JournalDB {
|
||||
try!(batch.delete(&h));
|
||||
deletes += 1;
|
||||
}
|
||||
try!(batch.put(&LAST_ERA_KEY, &encode(&end_era)));
|
||||
trace!("JournalDB: delete journal for time #{}.{}, (canon was {}): {} entries", end_era, index, canon_id, deletes);
|
||||
}
|
||||
|
||||
@ -228,8 +228,8 @@ impl JournalDB {
|
||||
|
||||
fn read_counters(db: &DB) -> HashMap<H256, i32> {
|
||||
let mut res = HashMap::new();
|
||||
if let Some(val) = db.get(&LAST_ERA_KEY).expect("Low-level database error.") {
|
||||
let mut era = decode::<u64>(&val) + 1;
|
||||
if let Some(val) = db.get(&LATEST_ERA_KEY).expect("Low-level database error.") {
|
||||
let mut era = decode::<u64>(&val);
|
||||
loop {
|
||||
let mut index = 0usize;
|
||||
while let Some(rlp_data) = db.get({
|
||||
@ -245,10 +245,10 @@ impl JournalDB {
|
||||
}
|
||||
index += 1;
|
||||
};
|
||||
if index == 0 {
|
||||
if index == 0 || era == 0 {
|
||||
break;
|
||||
}
|
||||
era += 1;
|
||||
era -= 1;
|
||||
}
|
||||
}
|
||||
trace!("Recovered {} counters", res.len());
|
||||
@ -426,4 +426,32 @@ mod tests {
|
||||
jdb.commit(2, &b"2a".sha3(), Some((1, b"1a".sha3()))).unwrap();
|
||||
assert!(jdb.exists(&foo));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn reopen() {
|
||||
use rocksdb::DB;
|
||||
let mut dir = ::std::env::temp_dir();
|
||||
dir.push(H32::random().hex());
|
||||
|
||||
let foo = {
|
||||
let mut jdb = JournalDB::new(DB::open_default(dir.to_str().unwrap()).unwrap());
|
||||
// history is 1
|
||||
let foo = jdb.insert(b"foo");
|
||||
jdb.commit(0, &b"0".sha3(), None).unwrap();
|
||||
foo
|
||||
};
|
||||
|
||||
{
|
||||
let mut jdb = JournalDB::new(DB::open_default(dir.to_str().unwrap()).unwrap());
|
||||
jdb.remove(&foo);
|
||||
jdb.commit(1, &b"1".sha3(), Some((0, b"0".sha3()))).unwrap();
|
||||
}
|
||||
|
||||
{
|
||||
let mut jdb = JournalDB::new(DB::open_default(dir.to_str().unwrap()).unwrap());
|
||||
assert!(jdb.exists(&foo));
|
||||
jdb.commit(2, &b"2".sha3(), Some((1, b"1".sha3()))).unwrap();
|
||||
assert!(!jdb.exists(&foo));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
1088
util/src/keys/directory.rs
Normal file
1088
util/src/keys/directory.rs
Normal file
File diff suppressed because it is too large
Load Diff
19
util/src/keys/mod.rs
Normal file
19
util/src/keys/mod.rs
Normal file
@ -0,0 +1,19 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Key management module
|
||||
|
||||
pub mod directory;
|
@ -19,8 +19,18 @@
|
||||
#![feature(augmented_assignments)]
|
||||
#![feature(associated_consts)]
|
||||
#![feature(plugin)]
|
||||
#![feature(catch_panic)]
|
||||
// Clippy settings
|
||||
#![plugin(clippy)]
|
||||
#![allow(needless_range_loop, match_bool)]
|
||||
// TODO [todr] not really sure
|
||||
#![allow(needless_range_loop)]
|
||||
// Shorter than if-else
|
||||
#![allow(match_bool)]
|
||||
// We use that to be more explicit about handled cases
|
||||
#![allow(match_same_arms)]
|
||||
// Keeps consistency (all lines with `.clone()`) and helpful when changing ref to non-ref.
|
||||
#![allow(clone_on_copy)]
|
||||
|
||||
//! Ethcore-util library
|
||||
//!
|
||||
//! ### Rust version:
|
||||
@ -98,6 +108,7 @@ extern crate crossbeam;
|
||||
extern crate serde;
|
||||
#[macro_use]
|
||||
extern crate log as rlog;
|
||||
extern crate igd;
|
||||
|
||||
pub mod standard;
|
||||
#[macro_use]
|
||||
@ -129,6 +140,8 @@ pub mod semantic_version;
|
||||
pub mod io;
|
||||
pub mod network;
|
||||
pub mod log;
|
||||
pub mod panics;
|
||||
pub mod keys;
|
||||
|
||||
pub use common::*;
|
||||
pub use misc::*;
|
||||
@ -149,3 +162,6 @@ pub use semantic_version::*;
|
||||
pub use network::*;
|
||||
pub use io::*;
|
||||
pub use log::*;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
//! Diff misc.
|
||||
|
||||
use std::fs::File;
|
||||
use common::*;
|
||||
|
||||
#[derive(Debug,Clone,PartialEq,Eq)]
|
||||
@ -53,3 +54,11 @@ pub enum Filth {
|
||||
/// Data has been changed.
|
||||
Dirty,
|
||||
}
|
||||
|
||||
/// Read the whole contents of a file `name`.
|
||||
pub fn contents(name: &str) -> Result<Bytes, UtilError> {
|
||||
let mut file = try!(File::open(name));
|
||||
let mut ret: Vec<u8> = Vec::new();
|
||||
try!(file.read_to_end(&mut ret));
|
||||
Ok(ret)
|
||||
}
|
||||
|
@ -211,7 +211,7 @@ impl Discovery {
|
||||
}
|
||||
|
||||
let mut ret:Vec<&NodeId> = Vec::new();
|
||||
for (_, nodes) in found {
|
||||
for nodes in found.values() {
|
||||
for n in nodes {
|
||||
if ret.len() < BUCKET_SIZE as usize /* && n->endpoint && n->endpoint.isAllowed() */ {
|
||||
ret.push(n);
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
use std::net::{SocketAddr};
|
||||
use std::net::{SocketAddr, SocketAddrV4};
|
||||
use std::collections::{HashMap};
|
||||
use std::hash::{Hasher};
|
||||
use std::str::{FromStr};
|
||||
@ -36,6 +36,7 @@ use network::NetworkProtocolHandler;
|
||||
use network::node::*;
|
||||
use network::stats::NetworkStats;
|
||||
use network::error::DisconnectReason;
|
||||
use igd::{PortMappingProtocol,search_gateway};
|
||||
|
||||
type Slab<T> = ::slab::Slab<T, usize>;
|
||||
|
||||
@ -86,6 +87,42 @@ impl NetworkConfiguration {
|
||||
config.public_address = SocketAddr::from_str(&format!("0.0.0.0:{}", port)).unwrap();
|
||||
config
|
||||
}
|
||||
|
||||
/// Conduct NAT if needed.
|
||||
pub fn prepared(self) -> Self {
|
||||
let mut listen = self.listen_address;
|
||||
let mut public = self.public_address;
|
||||
|
||||
if self.nat_enabled {
|
||||
info!("Enabling NAT...");
|
||||
match search_gateway() {
|
||||
Err(ref err) => info!("Error: {}", err),
|
||||
Ok(gateway) => {
|
||||
let int_addr = SocketAddrV4::from_str("127.0.0.1:30304").unwrap();
|
||||
match gateway.get_any_address(PortMappingProtocol::TCP, int_addr, 0, "Parity Node/TCP") {
|
||||
Err(ref err) => {
|
||||
info!("There was an error! {}", err);
|
||||
},
|
||||
Ok(ext_addr) => {
|
||||
info!("Local gateway: {}, External ip address: {}", gateway, ext_addr);
|
||||
public = SocketAddr::V4(ext_addr);
|
||||
listen = SocketAddr::V4(int_addr);
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
NetworkConfiguration {
|
||||
listen_address: listen,
|
||||
public_address: public,
|
||||
nat_enabled: false,
|
||||
discovery_enabled: self.discovery_enabled,
|
||||
pin: self.pin,
|
||||
boot_nodes: self.boot_nodes,
|
||||
use_secret: self.use_secret,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tokens
|
||||
@ -180,6 +217,7 @@ impl<'s, Message> NetworkContext<'s, Message> where Message: Send + Sync + Clone
|
||||
s.send_packet(self.protocol, packet_id as u8, &data).unwrap_or_else(|e| {
|
||||
warn!(target: "net", "Send error: {:?}", e);
|
||||
}); //TODO: don't copy vector data
|
||||
try!(self.io.update_registration(peer));
|
||||
},
|
||||
_ => warn!(target: "net", "Send: Peer is not connected yet")
|
||||
}
|
||||
@ -296,6 +334,8 @@ pub struct Host<Message> where Message: Send + Sync + Clone {
|
||||
impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
||||
/// Create a new instance
|
||||
pub fn new(config: NetworkConfiguration) -> Host<Message> {
|
||||
let config = config.prepared();
|
||||
|
||||
let addr = config.listen_address;
|
||||
// Setup the server socket
|
||||
let tcp_listener = TcpListener::bind(&addr).unwrap();
|
||||
@ -373,7 +413,7 @@ impl<Message> Host<Message> where Message: Send + Sync + Clone {
|
||||
let mut to_kill = Vec::new();
|
||||
for e in self.connections.write().unwrap().iter_mut() {
|
||||
if let ConnectionEntry::Session(ref mut s) = *e.lock().unwrap().deref_mut() {
|
||||
if !s.keep_alive() {
|
||||
if !s.keep_alive(io) {
|
||||
s.disconnect(DisconnectReason::PingTimeout);
|
||||
to_kill.push(s.token());
|
||||
}
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
use std::sync::*;
|
||||
use error::*;
|
||||
use panics::*;
|
||||
use network::{NetworkProtocolHandler, NetworkConfiguration};
|
||||
use network::error::{NetworkError};
|
||||
use network::host::{Host, NetworkIoMessage, ProtocolId};
|
||||
@ -27,13 +28,17 @@ use io::*;
|
||||
pub struct NetworkService<Message> where Message: Send + Sync + Clone + 'static {
|
||||
io_service: IoService<NetworkIoMessage<Message>>,
|
||||
host_info: String,
|
||||
stats: Arc<NetworkStats>
|
||||
stats: Arc<NetworkStats>,
|
||||
panic_handler: Arc<PanicHandler>
|
||||
}
|
||||
|
||||
impl<Message> NetworkService<Message> where Message: Send + Sync + Clone + 'static {
|
||||
/// Starts IO event loop
|
||||
pub fn start(config: NetworkConfiguration) -> Result<NetworkService<Message>, UtilError> {
|
||||
let panic_handler = PanicHandler::new_in_arc();
|
||||
let mut io_service = try!(IoService::<NetworkIoMessage<Message>>::start());
|
||||
panic_handler.forward_from(&io_service);
|
||||
|
||||
let host = Arc::new(Host::new(config));
|
||||
let stats = host.stats().clone();
|
||||
let host_info = host.client_version();
|
||||
@ -43,6 +48,7 @@ impl<Message> NetworkService<Message> where Message: Send + Sync + Clone + 'stat
|
||||
io_service: io_service,
|
||||
host_info: host_info,
|
||||
stats: stats,
|
||||
panic_handler: panic_handler
|
||||
})
|
||||
}
|
||||
|
||||
@ -72,3 +78,9 @@ impl<Message> NetworkService<Message> where Message: Send + Sync + Clone + 'stat
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl<Message> MayPanic for NetworkService<Message> where Message: Send + Sync + Clone + 'static {
|
||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
||||
self.panic_handler.on_panic(closure);
|
||||
}
|
||||
}
|
||||
|
@ -180,7 +180,7 @@ impl Session {
|
||||
}
|
||||
|
||||
/// Keep this session alive. Returns false if ping timeout happened
|
||||
pub fn keep_alive(&mut self) -> bool {
|
||||
pub fn keep_alive<Message>(&mut self, io: &IoContext<Message>) -> bool where Message: Send + Sync + Clone {
|
||||
let timed_out = if let Some(pong) = self.pong_time_ns {
|
||||
pong - self.ping_time_ns > PING_TIMEOUT_SEC * 1000_000_000
|
||||
} else {
|
||||
@ -191,6 +191,7 @@ impl Session {
|
||||
if let Err(e) = self.send_ping() {
|
||||
debug!("Error sending ping message: {:?}", e);
|
||||
}
|
||||
io.update_registration(self.token()).unwrap_or_else(|e| debug!(target: "net", "Session registration error: {:?}", e));
|
||||
}
|
||||
!timed_out
|
||||
}
|
||||
@ -324,7 +325,7 @@ impl Session {
|
||||
let mut rlp = RlpStream::new();
|
||||
rlp.append(&(PACKET_DISCONNECT as u32));
|
||||
rlp.begin_list(1);
|
||||
rlp.append(&(reason.clone() as u32));
|
||||
rlp.append(&(reason as u32));
|
||||
self.connection.send_packet(&rlp.out()).ok();
|
||||
NetworkError::Disconnect(reason)
|
||||
}
|
||||
|
183
util/src/panics.rs
Normal file
183
util/src/panics.rs
Normal file
@ -0,0 +1,183 @@
|
||||
// Copyright 2015, 2016 Ethcore (UK) Ltd.
|
||||
// This file is part of Parity.
|
||||
|
||||
// Parity is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// Parity is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
//! Panic utilities
|
||||
|
||||
use std::thread;
|
||||
use std::ops::DerefMut;
|
||||
use std::any::Any;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
/// Thread-safe closure for handling possible panics
|
||||
pub trait OnPanicListener: Send + Sync + 'static {
|
||||
/// Invoke listener
|
||||
fn call(&mut self, arg: &str);
|
||||
}
|
||||
|
||||
/// Forwards panics from child
|
||||
pub trait ForwardPanic {
|
||||
/// Attach `on_panic` listener to `child` and rethrow all panics
|
||||
fn forward_from<S>(&self, child: &S) where S : MayPanic;
|
||||
}
|
||||
|
||||
/// Trait indicating that the structure catches some of the panics (most probably from spawned threads)
|
||||
/// and it's possbile to be notified when one of the threads panics.
|
||||
pub trait MayPanic {
|
||||
/// `closure` will be invoked whenever panic in thread is caught
|
||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener;
|
||||
}
|
||||
|
||||
/// Structure that allows to catch panics and notify listeners
|
||||
pub struct PanicHandler {
|
||||
listeners: Mutex<Vec<Box<OnPanicListener>>>
|
||||
}
|
||||
|
||||
impl PanicHandler {
|
||||
/// Creates new `PanicHandler` wrapped in `Arc`
|
||||
pub fn new_in_arc() -> Arc<PanicHandler> {
|
||||
Arc::new(Self::new())
|
||||
}
|
||||
|
||||
/// Creates new `PanicHandler`
|
||||
pub fn new() -> PanicHandler {
|
||||
PanicHandler {
|
||||
listeners: Mutex::new(vec![])
|
||||
}
|
||||
}
|
||||
|
||||
/// Invoke closure and catch any possible panics.
|
||||
/// In case of panic notifies all listeners about it.
|
||||
#[allow(deprecated)]
|
||||
// TODO [todr] catch_panic is deprecated but panic::recover has different bounds (not allowing mutex)
|
||||
pub fn catch_panic<G, R>(&self, g: G) -> thread::Result<R> where G: FnOnce() -> R + Send + 'static {
|
||||
let result = thread::catch_panic(g);
|
||||
|
||||
if let Err(ref e) = result {
|
||||
let res = convert_to_string(e);
|
||||
if let Some(r) = res {
|
||||
self.notify_all(r);
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn notify_all(&self, r: String) {
|
||||
let mut listeners = self.listeners.lock().unwrap();
|
||||
for listener in listeners.deref_mut() {
|
||||
listener.call(&r);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MayPanic for PanicHandler {
|
||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
||||
self.listeners.lock().unwrap().push(Box::new(closure));
|
||||
}
|
||||
}
|
||||
|
||||
impl ForwardPanic for Arc<PanicHandler> {
|
||||
fn forward_from<S>(&self, child: &S) where S : MayPanic {
|
||||
let p = self.clone();
|
||||
child.on_panic(move |t| p.notify_all(t));
|
||||
}
|
||||
}
|
||||
|
||||
impl<F> OnPanicListener for F
|
||||
where F: FnMut(String) + Send + Sync + 'static {
|
||||
fn call(&mut self, arg: &str) {
|
||||
self(arg.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_to_string(t: &Box<Any + Send>) -> Option<String> {
|
||||
let as_str = t.downcast_ref::<&'static str>().cloned().map(|t| t.to_owned());
|
||||
let as_string = t.downcast_ref::<String>().cloned();
|
||||
|
||||
as_str.or(as_string)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_notify_listeners_about_panic () {
|
||||
use std::sync::RwLock;
|
||||
// given
|
||||
let invocations = Arc::new(RwLock::new(vec![]));
|
||||
let i = invocations.clone();
|
||||
let p = PanicHandler::new();
|
||||
p.on_panic(move |t| i.write().unwrap().push(t));
|
||||
|
||||
// when
|
||||
p.catch_panic(|| panic!("Panic!")).unwrap_err();
|
||||
|
||||
// then
|
||||
assert!(invocations.read().unwrap()[0] == "Panic!");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_notify_listeners_about_panic_when_string_is_dynamic () {
|
||||
use std::sync::RwLock;
|
||||
// given
|
||||
let invocations = Arc::new(RwLock::new(vec![]));
|
||||
let i = invocations.clone();
|
||||
let p = PanicHandler::new();
|
||||
p.on_panic(move |t| i.write().unwrap().push(t));
|
||||
|
||||
// when
|
||||
p.catch_panic(|| panic!("Panic: {}", 1)).unwrap_err();
|
||||
|
||||
// then
|
||||
assert!(invocations.read().unwrap()[0] == "Panic: 1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_notify_listeners_about_panic_in_other_thread () {
|
||||
use std::thread;
|
||||
use std::sync::RwLock;
|
||||
|
||||
// given
|
||||
let invocations = Arc::new(RwLock::new(vec![]));
|
||||
let i = invocations.clone();
|
||||
let p = PanicHandler::new();
|
||||
p.on_panic(move |t| i.write().unwrap().push(t));
|
||||
|
||||
// when
|
||||
let t = thread::spawn(move ||
|
||||
p.catch_panic(|| panic!("Panic!")).unwrap()
|
||||
);
|
||||
t.join().unwrap_err();
|
||||
|
||||
// then
|
||||
assert!(invocations.read().unwrap()[0] == "Panic!");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn should_forward_panics () {
|
||||
use std::sync::RwLock;
|
||||
// given
|
||||
let invocations = Arc::new(RwLock::new(vec![]));
|
||||
let i = invocations.clone();
|
||||
let p = PanicHandler::new_in_arc();
|
||||
p.on_panic(move |t| i.write().unwrap().push(t));
|
||||
|
||||
let p2 = PanicHandler::new();
|
||||
p.forward_from(&p2);
|
||||
|
||||
// when
|
||||
p2.catch_panic(|| panic!("Panic!")).unwrap_err();
|
||||
|
||||
// then
|
||||
assert!(invocations.read().unwrap()[0] == "Panic!");
|
||||
}
|
@ -408,7 +408,7 @@ impl Decodable for Vec<u8> {
|
||||
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
|
||||
decoder.read_value(| bytes | {
|
||||
let mut res = vec![];
|
||||
res.extend(bytes);
|
||||
res.extend_from_slice(bytes);
|
||||
Ok(res)
|
||||
})
|
||||
}
|
||||
|
31
util/src/tests/helpers.rs
Normal file
31
util/src/tests/helpers.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use common::*;
|
||||
use std::path::PathBuf;
|
||||
use std::fs::{remove_dir_all};
|
||||
use std::env;
|
||||
|
||||
pub struct RandomTempPath {
|
||||
path: PathBuf
|
||||
}
|
||||
|
||||
impl RandomTempPath {
|
||||
pub fn create_dir() -> RandomTempPath {
|
||||
let mut dir = env::temp_dir();
|
||||
dir.push(H32::random().hex());
|
||||
fs::create_dir_all(dir.as_path()).unwrap();
|
||||
RandomTempPath {
|
||||
path: dir.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_path(&self) -> &PathBuf {
|
||||
&self.path
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for RandomTempPath {
|
||||
fn drop(&mut self) {
|
||||
if let Err(e) = remove_dir_all(self.as_path()) {
|
||||
panic!("failed to remove temp directory, probably something failed to destroyed ({})", e);
|
||||
}
|
||||
}
|
||||
}
|
1
util/src/tests/mod.rs
Normal file
1
util/src/tests/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
pub mod helpers;
|
@ -293,7 +293,7 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let b = match self.trail.last_mut() {
|
||||
Some(ref mut b) => { b.increment(); b.clone() },
|
||||
Some(mut b) => { b.increment(); b.clone() },
|
||||
None => return None
|
||||
};
|
||||
match (b.status, b.node) {
|
||||
@ -309,9 +309,8 @@ impl<'a> Iterator for TrieDBIterator<'a> {
|
||||
self.trail.pop();
|
||||
self.next()
|
||||
},
|
||||
(Status::At, Node::Leaf(_, v)) => Some((self.key(), v)),
|
||||
(Status::At, Node::Leaf(_, v)) | (Status::At, Node::Branch(_, Some(v))) => Some((self.key(), v)),
|
||||
(Status::At, Node::Extension(_, d)) => self.descend_next(d),
|
||||
(Status::At, Node::Branch(_, Some(v))) => Some((self.key(), v)),
|
||||
(Status::At, Node::Branch(_, _)) => self.next(),
|
||||
(Status::AtChild(i), Node::Branch(children, _)) if children[i].len() > 0 => {
|
||||
match i {
|
||||
|
@ -458,7 +458,8 @@ macro_rules! construct_uint {
|
||||
let mut hex = "0x".to_owned();
|
||||
let mut bytes = [0u8; 8 * $n_words];
|
||||
self.to_bytes(&mut bytes);
|
||||
hex.push_str(bytes.to_hex().as_ref());
|
||||
let len = cmp::max((self.bits() + 7) / 8, 1);
|
||||
hex.push_str(bytes[bytes.len() - len..].to_hex().as_ref());
|
||||
serializer.visit_str(hex.as_ref())
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user