Merge branch 'master' into ui-2
This commit is contained in:
commit
72f3576c78
61
Cargo.lock
generated
61
Cargo.lock
generated
@ -858,6 +858,11 @@ dependencies = [
|
|||||||
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "getopts"
|
||||||
|
version = "0.2.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "glob"
|
name = "glob"
|
||||||
version = "0.2.11"
|
version = "0.2.11"
|
||||||
@ -1043,7 +1048,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-core"
|
name = "jsonrpc-core"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -1055,7 +1060,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-http-server"
|
name = "jsonrpc-http-server"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"hyper 0.10.0-a.0 (git+https://github.com/paritytech/hyper)",
|
"hyper 0.10.0-a.0 (git+https://github.com/paritytech/hyper)",
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
@ -1068,7 +1073,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-ipc-server"
|
name = "jsonrpc-ipc-server"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bytes 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
@ -1081,7 +1086,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-macros"
|
name = "jsonrpc-macros"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
"jsonrpc-pubsub 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-pubsub 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
@ -1091,7 +1096,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-minihttp-server"
|
name = "jsonrpc-minihttp-server"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
@ -1105,7 +1110,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-pubsub"
|
name = "jsonrpc-pubsub"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -1115,7 +1120,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-server-utils"
|
name = "jsonrpc-server-utils"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"globset 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"globset 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
@ -1127,7 +1132,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-tcp-server"
|
name = "jsonrpc-tcp-server"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bytes 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"bytes 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
@ -1141,7 +1146,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "jsonrpc-ws-server"
|
name = "jsonrpc-ws-server"
|
||||||
version = "7.0.0"
|
version = "7.0.0"
|
||||||
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#87db29043826f152cce171351fa34fada287764d"
|
source = "git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7#d8dae528860d7fe976a8e3ec060a792e2496573b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-core 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
"jsonrpc-server-utils 7.0.0 (git+https://github.com/paritytech/jsonrpc.git?branch=parity-1.7)",
|
||||||
@ -1212,6 +1217,16 @@ name = "linked-hash-map"
|
|||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "local-encoding"
|
||||||
|
version = "0.2.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"skeptic 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "log"
|
name = "log"
|
||||||
version = "0.3.7"
|
version = "0.3.7"
|
||||||
@ -1818,7 +1833,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "parity-ui-precompiled"
|
name = "parity-ui-precompiled"
|
||||||
version = "1.4.0"
|
version = "1.4.0"
|
||||||
source = "git+https://github.com/paritytech/js-precompiled.git#259741a8ff42f0d4cd0a2c60cc7fab4f0b55aa4f"
|
source = "git+https://github.com/paritytech/js-precompiled.git#6bc749402a9dd05ce41f97a4cc4f1ee48512e6df"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"parity-dapps-glue 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
@ -1971,6 +1986,14 @@ dependencies = [
|
|||||||
"primal-estimate 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
"primal-estimate 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pulldown-cmark"
|
||||||
|
version = "0.0.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quasi"
|
name = "quasi"
|
||||||
version = "0.32.0"
|
version = "0.32.0"
|
||||||
@ -2115,16 +2138,17 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "rocksdb"
|
name = "rocksdb"
|
||||||
version = "0.4.5"
|
version = "0.4.5"
|
||||||
source = "git+https://github.com/paritytech/rust-rocksdb#a5526297746b51a33abfeb58a0540e54dedcdcb9"
|
source = "git+https://github.com/paritytech/rust-rocksdb#4364caec4dd5da1a1d78c39276774ee65bf55c7d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rocksdb-sys 0.3.0 (git+https://github.com/paritytech/rust-rocksdb)",
|
"rocksdb-sys 0.3.0 (git+https://github.com/paritytech/rust-rocksdb)",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rocksdb-sys"
|
name = "rocksdb-sys"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
source = "git+https://github.com/paritytech/rust-rocksdb#a5526297746b51a33abfeb58a0540e54dedcdcb9"
|
source = "git+https://github.com/paritytech/rust-rocksdb#4364caec4dd5da1a1d78c39276774ee65bf55c7d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
"gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -2378,6 +2402,15 @@ name = "siphasher"
|
|||||||
version = "0.1.1"
|
version = "0.1.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "skeptic"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"pulldown-cmark 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"tempdir 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "slab"
|
name = "slab"
|
||||||
version = "0.2.0"
|
version = "0.2.0"
|
||||||
@ -2921,6 +2954,7 @@ dependencies = [
|
|||||||
"checksum futures-cpupool 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bb982bb25cd8fa5da6a8eb3a460354c984ff1113da82bcb4f0b0862b5795db82"
|
"checksum futures-cpupool 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "bb982bb25cd8fa5da6a8eb3a460354c984ff1113da82bcb4f0b0862b5795db82"
|
||||||
"checksum gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)" = "c07c758b972368e703a562686adb39125707cc1ef3399da8c019fc6c2498a75d"
|
"checksum gcc 0.3.43 (registry+https://github.com/rust-lang/crates.io-index)" = "c07c758b972368e703a562686adb39125707cc1ef3399da8c019fc6c2498a75d"
|
||||||
"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
|
"checksum gdi32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0912515a8ff24ba900422ecda800b52f4016a56251922d397c576bf92c690518"
|
||||||
|
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
|
||||||
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
"checksum glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "8be18de09a56b60ed0edf84bc9df007e30040691af7acd1c41874faac5895bfb"
|
||||||
"checksum globset 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90d069fe6beb9be359ef505650b3f73228c5591a3c4b1f32be2f4f44459ffa3a"
|
"checksum globset 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90d069fe6beb9be359ef505650b3f73228c5591a3c4b1f32be2f4f44459ffa3a"
|
||||||
"checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1"
|
"checksum hamming 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "65043da274378d68241eb9a8f8f8aa54e349136f7b8e12f63e3ef44043cc30e1"
|
||||||
@ -2957,6 +2991,7 @@ dependencies = [
|
|||||||
"checksum libusb-sys 0.2.3 (git+https://github.com/paritytech/libusb-sys)" = "<none>"
|
"checksum libusb-sys 0.2.3 (git+https://github.com/paritytech/libusb-sys)" = "<none>"
|
||||||
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
|
"checksum linked-hash-map 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "bda158e0dabeb97ee8a401f4d17e479d6b891a14de0bba79d5cc2d4d325b5e48"
|
||||||
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
|
"checksum linked-hash-map 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6d262045c5b87c0861b3f004610afd0e2c851e2908d08b6c870cbb9d5f494ecd"
|
||||||
|
"checksum local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e1ceb20f39ff7ae42f3ff9795f3986b1daad821caaa1e1732a0944103a5a1a66"
|
||||||
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
|
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
|
||||||
"checksum lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "656fa4dfcb02bcf1063c592ba3ff6a5303ee1f2afe98c8a889e8b1a77c6dfdb7"
|
"checksum lru-cache 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "656fa4dfcb02bcf1063c592ba3ff6a5303ee1f2afe98c8a889e8b1a77c6dfdb7"
|
||||||
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
|
"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e"
|
||||||
@ -3011,6 +3046,7 @@ dependencies = [
|
|||||||
"checksum primal-check 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "647c81b67bb9551a7b88d0bcd785ac35b7d0bf4b2f358683d7c2375d04daec51"
|
"checksum primal-check 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "647c81b67bb9551a7b88d0bcd785ac35b7d0bf4b2f358683d7c2375d04daec51"
|
||||||
"checksum primal-estimate 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56ea4531dde757b56906493c8604641da14607bf9cdaa80fb9c9cabd2429f8d5"
|
"checksum primal-estimate 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "56ea4531dde757b56906493c8604641da14607bf9cdaa80fb9c9cabd2429f8d5"
|
||||||
"checksum primal-sieve 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7aa73fd87e5984a00bdb4c1b14d3d5d6d0bad01b2caaaf924c16ab7260ac946c"
|
"checksum primal-sieve 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7aa73fd87e5984a00bdb4c1b14d3d5d6d0bad01b2caaaf924c16ab7260ac946c"
|
||||||
|
"checksum pulldown-cmark 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8361e81576d2e02643b04950e487ec172b687180da65c731c03cf336784e6c07"
|
||||||
"checksum quasi 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18c45c4854d6d1cf5d531db97c75880feb91c958b0720f4ec1057135fec358b3"
|
"checksum quasi 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "18c45c4854d6d1cf5d531db97c75880feb91c958b0720f4ec1057135fec358b3"
|
||||||
"checksum quasi_codegen 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "51b9e25fa23c044c1803f43ca59c98dac608976dd04ce799411edd58ece776d4"
|
"checksum quasi_codegen 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "51b9e25fa23c044c1803f43ca59c98dac608976dd04ce799411edd58ece776d4"
|
||||||
"checksum quasi_macros 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "29cec87bc2816766d7e4168302d505dd06b0a825aed41b00633d296e922e02dd"
|
"checksum quasi_macros 0.32.0 (registry+https://github.com/rust-lang/crates.io-index)" = "29cec87bc2816766d7e4168302d505dd06b0a825aed41b00633d296e922e02dd"
|
||||||
@ -3055,6 +3091,7 @@ dependencies = [
|
|||||||
"checksum sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc30b1e1e8c40c121ca33b86c23308a090d19974ef001b4bf6e61fd1a0fb095c"
|
"checksum sha1 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "cc30b1e1e8c40c121ca33b86c23308a090d19974ef001b4bf6e61fd1a0fb095c"
|
||||||
"checksum shell32-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72f20b8f3c060374edb8046591ba28f62448c369ccbdc7b02075103fb3a9e38d"
|
"checksum shell32-sys 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "72f20b8f3c060374edb8046591ba28f62448c369ccbdc7b02075103fb3a9e38d"
|
||||||
"checksum siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c44e42fa187b5a8782489cf7740cc27c3125806be2bf33563cf5e02e9533fcd"
|
"checksum siphasher 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5c44e42fa187b5a8782489cf7740cc27c3125806be2bf33563cf5e02e9533fcd"
|
||||||
|
"checksum skeptic 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "24ebf8a06f5f8bae61ae5bbc7af7aac4ef6907ae975130faba1199e5fe82256a"
|
||||||
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
|
"checksum slab 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "6dbdd334bd28d328dad1c41b0ea662517883d8880d8533895ef96c8003dec9c4"
|
||||||
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
|
"checksum slab 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "17b4fcaed89ab08ef143da37bc52adbcc04d4a69014f4c1208d6b51f0c47bc23"
|
||||||
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
|
"checksum smallvec 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "fcc8d19212aacecf95e4a7a2179b26f7aeb9732a915cf01f05b0d3e044865410"
|
||||||
|
@ -95,6 +95,9 @@ secretstore = ["ethcore-secretstore"]
|
|||||||
path = "parity/main.rs"
|
path = "parity/main.rs"
|
||||||
name = "parity"
|
name = "parity"
|
||||||
|
|
||||||
|
[profile.debug]
|
||||||
|
panic = "abort"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
debug = false
|
debug = false
|
||||||
lto = false
|
lto = false
|
||||||
|
@ -71,7 +71,7 @@ use std::path::PathBuf;
|
|||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use jsonrpc_http_server::{self as http, hyper};
|
use jsonrpc_http_server::{self as http, hyper, Origin};
|
||||||
|
|
||||||
use fetch::Fetch;
|
use fetch::Fetch;
|
||||||
use parity_reactor::Remote;
|
use parity_reactor::Remote;
|
||||||
@ -90,12 +90,12 @@ impl<F> SyncStatus for F where F: Fn() -> bool + Send + Sync {
|
|||||||
|
|
||||||
/// Validates Web Proxy tokens
|
/// Validates Web Proxy tokens
|
||||||
pub trait WebProxyTokens: Send + Sync {
|
pub trait WebProxyTokens: Send + Sync {
|
||||||
/// Should return true if token is a valid web proxy access token.
|
/// Should return a domain allowed to be accessed by this token or `None` if the token is not valid
|
||||||
fn is_web_proxy_token_valid(&self, token: &str) -> bool;
|
fn domain(&self, token: &str) -> Option<Origin>;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<F> WebProxyTokens for F where F: Fn(String) -> bool + Send + Sync {
|
impl<F> WebProxyTokens for F where F: Fn(String) -> Option<Origin> + Send + Sync {
|
||||||
fn is_web_proxy_token_valid(&self, token: &str) -> bool { self(token.to_owned()) }
|
fn domain(&self, token: &str) -> Option<Origin> { self(token.to_owned()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Current supported endpoints.
|
/// Current supported endpoints.
|
||||||
|
@ -312,7 +312,7 @@ fn should_encode_and_decode_base32() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_stream_web_content() {
|
fn should_stream_web_content() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("token");
|
let (server, fetch) = serve_with_fetch("token", "https://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -335,7 +335,7 @@ fn should_stream_web_content() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_support_base32_encoded_web_urls() {
|
fn should_support_base32_encoded_web_urls() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("token");
|
let (server, fetch) = serve_with_fetch("token", "https://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -358,7 +358,7 @@ fn should_support_base32_encoded_web_urls() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_correctly_handle_long_label_when_splitted() {
|
fn should_correctly_handle_long_label_when_splitted() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("xolrg9fePeQyKLnL");
|
let (server, fetch) = serve_with_fetch("xolrg9fePeQyKLnL", "https://contribution.melonport.com");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -382,7 +382,7 @@ fn should_correctly_handle_long_label_when_splitted() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_support_base32_encoded_web_urls_as_path() {
|
fn should_support_base32_encoded_web_urls_as_path() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("token");
|
let (server, fetch) = serve_with_fetch("token", "https://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -402,10 +402,32 @@ fn should_support_base32_encoded_web_urls_as_path() {
|
|||||||
fetch.assert_no_more_requests();
|
fetch.assert_no_more_requests();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn should_return_error_on_non_whitelisted_domain() {
|
||||||
|
// given
|
||||||
|
let (server, fetch) = serve_with_fetch("token", "https://ethcore.io");
|
||||||
|
|
||||||
|
// when
|
||||||
|
let response = request(server,
|
||||||
|
"\
|
||||||
|
GET / HTTP/1.1\r\n\
|
||||||
|
Host: EHQPPSBE5DM78X3GECX2YBVGC5S6JX3S5SMPY.web.web3.site\r\n\
|
||||||
|
Connection: close\r\n\
|
||||||
|
\r\n\
|
||||||
|
"
|
||||||
|
);
|
||||||
|
|
||||||
|
// then
|
||||||
|
response.assert_status("HTTP/1.1 400 Bad Request");
|
||||||
|
assert_security_headers_for_embed(&response.headers);
|
||||||
|
|
||||||
|
fetch.assert_no_more_requests();
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_return_error_on_invalid_token() {
|
fn should_return_error_on_invalid_token() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("test");
|
let (server, fetch) = serve_with_fetch("test", "https://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -427,7 +449,7 @@ fn should_return_error_on_invalid_token() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_return_error_on_invalid_protocol() {
|
fn should_return_error_on_invalid_protocol() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("token");
|
let (server, fetch) = serve_with_fetch("token", "ftp://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -449,7 +471,7 @@ fn should_return_error_on_invalid_protocol() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_disallow_non_get_requests() {
|
fn should_disallow_non_get_requests() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("token");
|
let (server, fetch) = serve_with_fetch("token", "https://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -474,7 +496,7 @@ fn should_disallow_non_get_requests() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_fix_absolute_requests_based_on_referer() {
|
fn should_fix_absolute_requests_based_on_referer() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("token");
|
let (server, fetch) = serve_with_fetch("token", "https://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
@ -497,7 +519,7 @@ fn should_fix_absolute_requests_based_on_referer() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn should_fix_absolute_requests_based_on_referer_in_url() {
|
fn should_fix_absolute_requests_based_on_referer_in_url() {
|
||||||
// given
|
// given
|
||||||
let (server, fetch) = serve_with_fetch("token");
|
let (server, fetch) = serve_with_fetch("token", "https://parity.io");
|
||||||
|
|
||||||
// when
|
// when
|
||||||
let response = request(server,
|
let response = request(server,
|
||||||
|
@ -100,13 +100,15 @@ pub fn serve_with_registrar_and_fetch_and_threads(multi_threaded: bool) -> (Serv
|
|||||||
(server, fetch, reg)
|
(server, fetch, reg)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn serve_with_fetch(web_token: &'static str) -> (Server, FakeFetch) {
|
pub fn serve_with_fetch(web_token: &'static str, domain: &'static str) -> (Server, FakeFetch) {
|
||||||
let fetch = FakeFetch::default();
|
let fetch = FakeFetch::default();
|
||||||
let f = fetch.clone();
|
let f = fetch.clone();
|
||||||
let (server, _) = init_server(move |builder| {
|
let (server, _) = init_server(move |builder| {
|
||||||
builder
|
builder
|
||||||
.fetch(f.clone())
|
.fetch(f.clone())
|
||||||
.web_proxy_tokens(Arc::new(move |token| &token == web_token))
|
.web_proxy_tokens(Arc::new(move |token| {
|
||||||
|
if &token == web_token { Some(domain.into()) } else { None }
|
||||||
|
}))
|
||||||
}, Default::default(), Remote::new_sync());
|
}, Default::default(), Remote::new_sync());
|
||||||
|
|
||||||
(server, fetch)
|
(server, fetch)
|
||||||
@ -147,7 +149,7 @@ impl ServerBuilder {
|
|||||||
dapps_path: dapps_path.as_ref().to_owned(),
|
dapps_path: dapps_path.as_ref().to_owned(),
|
||||||
registrar: registrar,
|
registrar: registrar,
|
||||||
sync_status: Arc::new(|| false),
|
sync_status: Arc::new(|| false),
|
||||||
web_proxy_tokens: Arc::new(|_| false),
|
web_proxy_tokens: Arc::new(|_| None),
|
||||||
signer_address: None,
|
signer_address: None,
|
||||||
allowed_hosts: DomainsValidation::Disabled,
|
allowed_hosts: DomainsValidation::Disabled,
|
||||||
remote: remote,
|
remote: remote,
|
||||||
|
@ -133,14 +133,14 @@ impl<F: Fetch> WebHandler<F> {
|
|||||||
let target_url = token_it.next();
|
let target_url = token_it.next();
|
||||||
|
|
||||||
// Check if token supplied in URL is correct.
|
// Check if token supplied in URL is correct.
|
||||||
match token {
|
let domain = match token.and_then(|token| self.web_proxy_tokens.domain(token)) {
|
||||||
Some(token) if self.web_proxy_tokens.is_web_proxy_token_valid(token) => {},
|
Some(domain) => domain,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(State::Error(ContentHandler::error(
|
return Err(State::Error(ContentHandler::error(
|
||||||
StatusCode::BadRequest, "Invalid Access Token", "Invalid or old web proxy access token supplied.", Some("Try refreshing the page."), self.embeddable_on.clone()
|
StatusCode::BadRequest, "Invalid Access Token", "Invalid or old web proxy access token supplied.", Some("Try refreshing the page."), self.embeddable_on.clone()
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
// Validate protocol
|
// Validate protocol
|
||||||
let mut target_url = match target_url {
|
let mut target_url = match target_url {
|
||||||
@ -152,6 +152,12 @@ impl<F: Fetch> WebHandler<F> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if !target_url.starts_with(&*domain) {
|
||||||
|
return Err(State::Error(ContentHandler::error(
|
||||||
|
StatusCode::BadRequest, "Invalid Domain", "Dapp attempted to access invalid domain.", Some(&target_url), self.embeddable_on.clone(),
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
if !target_url.ends_with("/") {
|
if !target_url.ends_with("/") {
|
||||||
target_url = format!("{}/", target_url);
|
target_url = format!("{}/", target_url);
|
||||||
}
|
}
|
||||||
|
@ -141,7 +141,6 @@ pub struct Client {
|
|||||||
block_queue: BlockQueue,
|
block_queue: BlockQueue,
|
||||||
report: RwLock<ClientReport>,
|
report: RwLock<ClientReport>,
|
||||||
import_lock: Mutex<()>,
|
import_lock: Mutex<()>,
|
||||||
panic_handler: Arc<PanicHandler>,
|
|
||||||
verifier: Box<Verifier>,
|
verifier: Box<Verifier>,
|
||||||
miner: Arc<Miner>,
|
miner: Arc<Miner>,
|
||||||
sleep_state: Mutex<SleepState>,
|
sleep_state: Mutex<SleepState>,
|
||||||
@ -213,8 +212,6 @@ impl Client {
|
|||||||
let engine = spec.engine.clone();
|
let engine = spec.engine.clone();
|
||||||
|
|
||||||
let block_queue = BlockQueue::new(config.queue.clone(), engine.clone(), message_channel.clone(), config.verifier_type.verifying_seal());
|
let block_queue = BlockQueue::new(config.queue.clone(), engine.clone(), message_channel.clone(), config.verifier_type.verifying_seal());
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
panic_handler.forward_from(&block_queue);
|
|
||||||
|
|
||||||
let awake = match config.mode { Mode::Dark(..) | Mode::Off => false, _ => true };
|
let awake = match config.mode { Mode::Dark(..) | Mode::Off => false, _ => true };
|
||||||
|
|
||||||
@ -234,7 +231,6 @@ impl Client {
|
|||||||
block_queue: block_queue,
|
block_queue: block_queue,
|
||||||
report: RwLock::new(Default::default()),
|
report: RwLock::new(Default::default()),
|
||||||
import_lock: Mutex::new(()),
|
import_lock: Mutex::new(()),
|
||||||
panic_handler: panic_handler,
|
|
||||||
miner: miner,
|
miner: miner,
|
||||||
io_channel: Mutex::new(message_channel),
|
io_channel: Mutex::new(message_channel),
|
||||||
notify: RwLock::new(Vec::new()),
|
notify: RwLock::new(Vec::new()),
|
||||||
@ -1723,12 +1719,6 @@ impl EngineClient for Client {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MayPanic for Client {
|
|
||||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
|
||||||
self.panic_handler.on_panic(closure);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ProvingBlockChainClient for Client {
|
impl ProvingBlockChainClient for Client {
|
||||||
fn prove_storage(&self, key1: H256, key2: H256, id: BlockId) -> Option<(Vec<Bytes>, H256)> {
|
fn prove_storage(&self, key1: H256, key2: H256, id: BlockId) -> Option<(Vec<Bytes>, H256)> {
|
||||||
self.state_at(id)
|
self.state_at(id)
|
||||||
|
@ -319,14 +319,20 @@ impl Engine for AuthorityRound {
|
|||||||
/// This operation is synchronous and may (quite reasonably) not be available, in which `false` will
|
/// This operation is synchronous and may (quite reasonably) not be available, in which `false` will
|
||||||
/// be returned.
|
/// be returned.
|
||||||
fn generate_seal(&self, block: &ExecutedBlock) -> Seal {
|
fn generate_seal(&self, block: &ExecutedBlock) -> Seal {
|
||||||
|
// first check to avoid generating signature most of the time
|
||||||
|
// (but there's still a race to the `compare_and_swap`)
|
||||||
if self.proposed.load(AtomicOrdering::SeqCst) { return Seal::None; }
|
if self.proposed.load(AtomicOrdering::SeqCst) { return Seal::None; }
|
||||||
|
|
||||||
let header = block.header();
|
let header = block.header();
|
||||||
let step = self.step.load();
|
let step = self.step.load();
|
||||||
if self.is_step_proposer(header.parent_hash(), step, header.author()) {
|
if self.is_step_proposer(header.parent_hash(), step, header.author()) {
|
||||||
if let Ok(signature) = self.signer.sign(header.bare_hash()) {
|
if let Ok(signature) = self.signer.sign(header.bare_hash()) {
|
||||||
trace!(target: "engine", "generate_seal: Issuing a block for step {}.", step);
|
trace!(target: "engine", "generate_seal: Issuing a block for step {}.", step);
|
||||||
self.proposed.store(true, AtomicOrdering::SeqCst);
|
|
||||||
|
// only issue the seal if we were the first to reach the compare_and_swap.
|
||||||
|
if !self.proposed.compare_and_swap(false, true, AtomicOrdering::SeqCst) {
|
||||||
return Seal::Regular(vec![encode(&step).to_vec(), encode(&(&H520::from(signature) as &[u8])).to_vec()]);
|
return Seal::Regular(vec![encode(&step).to_vec(), encode(&(&H520::from(signature) as &[u8])).to_vec()]);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
warn!(target: "engine", "generate_seal: FAIL: Accounts secret key unavailable.");
|
warn!(target: "engine", "generate_seal: FAIL: Accounts secret key unavailable.");
|
||||||
}
|
}
|
||||||
|
@ -56,7 +56,6 @@ pub struct ClientService {
|
|||||||
io_service: Arc<IoService<ClientIoMessage>>,
|
io_service: Arc<IoService<ClientIoMessage>>,
|
||||||
client: Arc<Client>,
|
client: Arc<Client>,
|
||||||
snapshot: Arc<SnapshotService>,
|
snapshot: Arc<SnapshotService>,
|
||||||
panic_handler: Arc<PanicHandler>,
|
|
||||||
database: Arc<Database>,
|
database: Arc<Database>,
|
||||||
_stop_guard: ::devtools::StopGuard,
|
_stop_guard: ::devtools::StopGuard,
|
||||||
}
|
}
|
||||||
@ -72,9 +71,7 @@ impl ClientService {
|
|||||||
miner: Arc<Miner>,
|
miner: Arc<Miner>,
|
||||||
) -> Result<ClientService, Error>
|
) -> Result<ClientService, Error>
|
||||||
{
|
{
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
let io_service = IoService::<ClientIoMessage>::start()?;
|
let io_service = IoService::<ClientIoMessage>::start()?;
|
||||||
panic_handler.forward_from(&io_service);
|
|
||||||
|
|
||||||
info!("Configured for {} using {} engine", Colour::White.bold().paint(spec.name.clone()), Colour::Yellow.bold().paint(spec.engine.name()));
|
info!("Configured for {} using {} engine", Colour::White.bold().paint(spec.name.clone()), Colour::Yellow.bold().paint(spec.engine.name()));
|
||||||
|
|
||||||
@ -109,7 +106,6 @@ impl ClientService {
|
|||||||
};
|
};
|
||||||
let snapshot = Arc::new(SnapshotService::new(snapshot_params)?);
|
let snapshot = Arc::new(SnapshotService::new(snapshot_params)?);
|
||||||
|
|
||||||
panic_handler.forward_from(&*client);
|
|
||||||
let client_io = Arc::new(ClientIoHandler {
|
let client_io = Arc::new(ClientIoHandler {
|
||||||
client: client.clone(),
|
client: client.clone(),
|
||||||
snapshot: snapshot.clone(),
|
snapshot: snapshot.clone(),
|
||||||
@ -125,7 +121,6 @@ impl ClientService {
|
|||||||
io_service: Arc::new(io_service),
|
io_service: Arc::new(io_service),
|
||||||
client: client,
|
client: client,
|
||||||
snapshot: snapshot,
|
snapshot: snapshot,
|
||||||
panic_handler: panic_handler,
|
|
||||||
database: db,
|
database: db,
|
||||||
_stop_guard: stop_guard,
|
_stop_guard: stop_guard,
|
||||||
})
|
})
|
||||||
@ -160,12 +155,6 @@ impl ClientService {
|
|||||||
pub fn db(&self) -> Arc<KeyValueDB> { self.database.clone() }
|
pub fn db(&self) -> Arc<KeyValueDB> { self.database.clone() }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MayPanic for ClientService {
|
|
||||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
|
||||||
self.panic_handler.on_panic(closure);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// IO interface for the Client handler
|
/// IO interface for the Client handler
|
||||||
struct ClientIoHandler {
|
struct ClientIoHandler {
|
||||||
client: Arc<Client>,
|
client: Arc<Client>,
|
||||||
|
@ -125,7 +125,6 @@ struct Sizes {
|
|||||||
/// A queue of items to be verified. Sits between network or other I/O and the `BlockChain`.
|
/// A queue of items to be verified. Sits between network or other I/O and the `BlockChain`.
|
||||||
/// Keeps them in the same order as inserted, minus invalid items.
|
/// Keeps them in the same order as inserted, minus invalid items.
|
||||||
pub struct VerificationQueue<K: Kind> {
|
pub struct VerificationQueue<K: Kind> {
|
||||||
panic_handler: Arc<PanicHandler>,
|
|
||||||
engine: Arc<Engine>,
|
engine: Arc<Engine>,
|
||||||
more_to_verify: Arc<SCondvar>,
|
more_to_verify: Arc<SCondvar>,
|
||||||
verification: Arc<Verification<K>>,
|
verification: Arc<Verification<K>>,
|
||||||
@ -221,7 +220,6 @@ impl<K: Kind> VerificationQueue<K> {
|
|||||||
message_channel: Mutex::new(message_channel),
|
message_channel: Mutex::new(message_channel),
|
||||||
});
|
});
|
||||||
let empty = Arc::new(SCondvar::new());
|
let empty = Arc::new(SCondvar::new());
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
let scale_verifiers = config.verifier_settings.scale_verifiers;
|
let scale_verifiers = config.verifier_settings.scale_verifiers;
|
||||||
|
|
||||||
let num_cpus = ::num_cpus::get();
|
let num_cpus = ::num_cpus::get();
|
||||||
@ -236,7 +234,6 @@ impl<K: Kind> VerificationQueue<K> {
|
|||||||
for i in 0..max_verifiers {
|
for i in 0..max_verifiers {
|
||||||
debug!(target: "verification", "Adding verification thread #{}", i);
|
debug!(target: "verification", "Adding verification thread #{}", i);
|
||||||
|
|
||||||
let panic_handler = panic_handler.clone();
|
|
||||||
let verification = verification.clone();
|
let verification = verification.clone();
|
||||||
let engine = engine.clone();
|
let engine = engine.clone();
|
||||||
let wait = more_to_verify.clone();
|
let wait = more_to_verify.clone();
|
||||||
@ -247,7 +244,6 @@ impl<K: Kind> VerificationQueue<K> {
|
|||||||
let handle = thread::Builder::new()
|
let handle = thread::Builder::new()
|
||||||
.name(format!("Verifier #{}", i))
|
.name(format!("Verifier #{}", i))
|
||||||
.spawn(move || {
|
.spawn(move || {
|
||||||
panic_handler.catch_panic(move || {
|
|
||||||
VerificationQueue::verify(
|
VerificationQueue::verify(
|
||||||
verification,
|
verification,
|
||||||
engine,
|
engine,
|
||||||
@ -257,7 +253,6 @@ impl<K: Kind> VerificationQueue<K> {
|
|||||||
state,
|
state,
|
||||||
i,
|
i,
|
||||||
)
|
)
|
||||||
}).unwrap()
|
|
||||||
})
|
})
|
||||||
.expect("Failed to create verifier thread.");
|
.expect("Failed to create verifier thread.");
|
||||||
verifier_handles.push(handle);
|
verifier_handles.push(handle);
|
||||||
@ -265,7 +260,6 @@ impl<K: Kind> VerificationQueue<K> {
|
|||||||
|
|
||||||
VerificationQueue {
|
VerificationQueue {
|
||||||
engine: engine,
|
engine: engine,
|
||||||
panic_handler: panic_handler,
|
|
||||||
ready_signal: ready_signal,
|
ready_signal: ready_signal,
|
||||||
more_to_verify: more_to_verify,
|
more_to_verify: more_to_verify,
|
||||||
verification: verification,
|
verification: verification,
|
||||||
@ -692,12 +686,6 @@ impl<K: Kind> VerificationQueue<K> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<K: Kind> MayPanic for VerificationQueue<K> {
|
|
||||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
|
||||||
self.panic_handler.on_panic(closure);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<K: Kind> Drop for VerificationQueue<K> {
|
impl<K: Kind> Drop for VerificationQueue<K> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
trace!(target: "shutdown", "[VerificationQueue] Closing...");
|
trace!(target: "shutdown", "[VerificationQueue] Closing...");
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "parity.js",
|
"name": "parity.js",
|
||||||
"version": "1.7.92",
|
"version": "1.7.94",
|
||||||
"main": "release/index.js",
|
"main": "release/index.js",
|
||||||
"jsnext:main": "src/index.js",
|
"jsnext:main": "src/index.js",
|
||||||
"author": "Parity Team <admin@parity.io>",
|
"author": "Parity Team <admin@parity.io>",
|
||||||
@ -102,6 +102,7 @@
|
|||||||
"css-loader": "0.26.1",
|
"css-loader": "0.26.1",
|
||||||
"ejs-loader": "0.3.0",
|
"ejs-loader": "0.3.0",
|
||||||
"ejsify": "1.0.0",
|
"ejsify": "1.0.0",
|
||||||
|
"empty-module": "0.0.2",
|
||||||
"enzyme": "2.7.1",
|
"enzyme": "2.7.1",
|
||||||
"eslint": "3.16.1",
|
"eslint": "3.16.1",
|
||||||
"eslint-config-semistandard": "7.0.0",
|
"eslint-config-semistandard": "7.0.0",
|
||||||
|
@ -24,10 +24,17 @@ import Api from './api';
|
|||||||
describe('api/Api', () => {
|
describe('api/Api', () => {
|
||||||
describe('interface', () => {
|
describe('interface', () => {
|
||||||
const api = new Api(new Api.Provider.Http(TEST_HTTP_URL, -1));
|
const api = new Api(new Api.Provider.Http(TEST_HTTP_URL, -1));
|
||||||
|
const ignored = [
|
||||||
|
'eth_subscribe', 'eth_unsubscribe',
|
||||||
|
'parity_subscribe', 'parity_unsubscribe',
|
||||||
|
'signer_subscribePending', 'signer_unsubscribePending'
|
||||||
|
];
|
||||||
|
|
||||||
Object.keys(ethereumRpc).sort().forEach((endpoint) => {
|
Object.keys(ethereumRpc).sort().forEach((endpoint) => {
|
||||||
describe(endpoint, () => {
|
describe(endpoint, () => {
|
||||||
Object.keys(ethereumRpc[endpoint]).sort().forEach((method) => {
|
Object.keys(ethereumRpc[endpoint]).sort()
|
||||||
|
.filter(method => ignored.indexOf(method) !== -1)
|
||||||
|
.forEach((method) => {
|
||||||
endpointTest(api, endpoint, method);
|
endpointTest(api, endpoint, method);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -14,4 +14,8 @@
|
|||||||
// You should have received a copy of the GNU General Public License
|
// You should have received a copy of the GNU General Public License
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
if (process.env.NODE_ENV !== 'test') {
|
||||||
|
process.browser = true;
|
||||||
|
}
|
||||||
|
|
||||||
export LocalAccountsMiddleware from './localAccountsMiddleware';
|
export LocalAccountsMiddleware from './localAccountsMiddleware';
|
||||||
|
@ -42,9 +42,9 @@ export default class Signer {
|
|||||||
.send('signer_generateAuthorizationToken');
|
.send('signer_generateAuthorizationToken');
|
||||||
}
|
}
|
||||||
|
|
||||||
generateWebProxyAccessToken () {
|
generateWebProxyAccessToken (domain) {
|
||||||
return this._provider
|
return this._provider
|
||||||
.send('signer_generateWebProxyAccessToken');
|
.execute('signer_generateWebProxyAccessToken', domain);
|
||||||
}
|
}
|
||||||
|
|
||||||
rejectRequest (requestId) {
|
rejectRequest (requestId) {
|
||||||
|
@ -17,6 +17,8 @@
|
|||||||
import { Address, BlockNumber, Data, Hash, Quantity, CallRequest, TransactionRequest } from '../types';
|
import { Address, BlockNumber, Data, Hash, Quantity, CallRequest, TransactionRequest } from '../types';
|
||||||
import { withPreamble, fromDecimal, withComment, Dummy } from '../helpers';
|
import { withPreamble, fromDecimal, withComment, Dummy } from '../helpers';
|
||||||
|
|
||||||
|
const SUBDOC_PUBSUB = 'pubsub';
|
||||||
|
|
||||||
export default withPreamble(`
|
export default withPreamble(`
|
||||||
|
|
||||||
## The default block parameter
|
## The default block parameter
|
||||||
@ -1192,5 +1194,60 @@ The following options are possible for the \`defaultBlock\` parameter:
|
|||||||
type: Boolean,
|
type: Boolean,
|
||||||
desc: 'whether the call was successful'
|
desc: 'whether the call was successful'
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// Pub-Sub
|
||||||
|
subscribe: {
|
||||||
|
subdoc: SUBDOC_PUBSUB,
|
||||||
|
desc: `
|
||||||
|
Starts a subscription (on WebSockets / IPC / TCP transports) to a particular event. For every event that
|
||||||
|
matches the subscription a JSON-RPC notification with event details and subscription ID will be sent to a client.
|
||||||
|
|
||||||
|
An example notification received by subscribing to \`newHeads\` event:
|
||||||
|
\`\`\`
|
||||||
|
{"jsonrpc":"2.0","method":"eth_subscription","params":{"subscription":"0x416d77337e24399d","result":{"difficulty":"0xd9263f42a87",<...>,
|
||||||
|
"uncles":[]}}}
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
You can unsubscribe using \`eth_unsubscribe\` RPC method. Subscriptions are also tied to a transport
|
||||||
|
connection, disconnecting causes all subscriptions to be canceled.
|
||||||
|
`,
|
||||||
|
params: [
|
||||||
|
{
|
||||||
|
type: String,
|
||||||
|
desc: 'Subscription type: one of `newHeads`, `logs`',
|
||||||
|
example: 'newHeads'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: Object,
|
||||||
|
desc: `
|
||||||
|
Subscription type-specific parameters. It must be left empty for
|
||||||
|
\`newHeads\` and must contain filter object for \`logs\`.
|
||||||
|
`,
|
||||||
|
example: {
|
||||||
|
fromBlock: 'latest',
|
||||||
|
toBlock: 'latest'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
returns: {
|
||||||
|
type: String,
|
||||||
|
desc: 'Assigned subscription ID',
|
||||||
|
example: '0x416d77337e24399d'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
unsubscribe: {
|
||||||
|
subdoc: SUBDOC_PUBSUB,
|
||||||
|
desc: 'Unsubscribes from a subscription.',
|
||||||
|
params: [{
|
||||||
|
type: String,
|
||||||
|
desc: 'Subscription ID',
|
||||||
|
example: '0x416d77337e24399d'
|
||||||
|
}],
|
||||||
|
returns: {
|
||||||
|
type: Boolean,
|
||||||
|
desc: 'whether the call was successful',
|
||||||
|
example: true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -26,6 +26,7 @@ const SECTION_VAULT = 'Account Vaults';
|
|||||||
|
|
||||||
const SUBDOC_SET = 'set';
|
const SUBDOC_SET = 'set';
|
||||||
const SUBDOC_ACCOUNTS = 'accounts';
|
const SUBDOC_ACCOUNTS = 'accounts';
|
||||||
|
const SUBDOC_PUBSUB = 'pubsub';
|
||||||
|
|
||||||
export default {
|
export default {
|
||||||
accountsInfo: {
|
accountsInfo: {
|
||||||
@ -2005,6 +2006,54 @@ export default {
|
|||||||
desc: 'Base58 encoded CID',
|
desc: 'Base58 encoded CID',
|
||||||
example: 'QmSbFjqjd6nFwNHqsBCC7SK8GShGcayLUEtysJjNGhZAnC'
|
example: 'QmSbFjqjd6nFwNHqsBCC7SK8GShGcayLUEtysJjNGhZAnC'
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// Pub-Sub
|
||||||
|
subscribe: {
|
||||||
|
subdoc: SUBDOC_PUBSUB,
|
||||||
|
desc: `
|
||||||
|
Starts a subscription (on WebSockets / IPC / TCP transports) to results of calling some other RPC method.
|
||||||
|
For every change in returned value of that RPC call a JSON-RPC notification with result and subscription ID will be sent to a client.
|
||||||
|
|
||||||
|
An example notification received by subscribing to \`eth_accounts\` RPC method:
|
||||||
|
\`\`\`
|
||||||
|
{"jsonrpc":"2.0","method":"parity_subscription","params":{"subscription":"0x416d77337e24399d","result":["0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826"]}}
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
You can unsubscribe using \`parity_unsubscribe\` RPC method. Subscriptions are also tied to a transport
|
||||||
|
connection, disconnecting causes all subscriptions to be canceled.
|
||||||
|
`,
|
||||||
|
params: [
|
||||||
|
{
|
||||||
|
type: String,
|
||||||
|
desc: 'RPC method name',
|
||||||
|
example: 'eth_getBalance'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: Array,
|
||||||
|
desc: 'Parameters passed to RPC method. (Optional, defaults to no parameters)',
|
||||||
|
example: ['0xcd2a3d9f938e13cd947ec05abc7fe734df8dd826', 'latest']
|
||||||
|
}
|
||||||
|
],
|
||||||
|
returns: {
|
||||||
|
type: String,
|
||||||
|
desc: 'Assigned subscription ID',
|
||||||
|
example: '0x416d77337e24399d'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
unsubscribe: {
|
||||||
|
subdoc: SUBDOC_PUBSUB,
|
||||||
|
desc: 'Unsubscribes from a subscription.',
|
||||||
|
params: [{
|
||||||
|
type: String,
|
||||||
|
desc: 'Subscription ID',
|
||||||
|
example: '0x416d77337e24399d'
|
||||||
|
}],
|
||||||
|
returns: {
|
||||||
|
type: Boolean,
|
||||||
|
desc: 'whether the call was successful',
|
||||||
|
example: true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
||||||
|
@ -30,7 +30,11 @@ export default {
|
|||||||
|
|
||||||
generateWebProxyAccessToken: {
|
generateWebProxyAccessToken: {
|
||||||
desc: 'Generates a new web proxy access token.',
|
desc: 'Generates a new web proxy access token.',
|
||||||
params: [],
|
params: [{
|
||||||
|
type: String,
|
||||||
|
desc: 'Domain for which the token is valid. Only requests to this domain will be allowed.',
|
||||||
|
example: 'https://parity.io'
|
||||||
|
}],
|
||||||
returns: {
|
returns: {
|
||||||
type: String,
|
type: String,
|
||||||
desc: 'The new web proxy access token.',
|
desc: 'The new web proxy access token.',
|
||||||
@ -194,5 +198,40 @@ export default {
|
|||||||
desc: '`true` when enabled, `false` when disabled.',
|
desc: '`true` when enabled, `false` when disabled.',
|
||||||
example: true
|
example: true
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
// Pub-Sub
|
||||||
|
subscribePending: {
|
||||||
|
desc: `
|
||||||
|
Starts a subscription for transactions in the confirmation queue.
|
||||||
|
Each event contains all transactions currently in the queue.
|
||||||
|
|
||||||
|
An example notification received by subscribing to this event:
|
||||||
|
\`\`\`
|
||||||
|
{"jsonrpc":"2.0","method":"signer_pending","params":{"subscription":"0x416d77337e24399d","result":[]}}
|
||||||
|
\`\`\`
|
||||||
|
|
||||||
|
You can unsubscribe using \`signer_unsubscribePending\` RPC method. Subscriptions are also tied to a transport
|
||||||
|
connection, disconnecting causes all subscriptions to be canceled.
|
||||||
|
`,
|
||||||
|
params: [],
|
||||||
|
returns: {
|
||||||
|
type: String,
|
||||||
|
desc: 'Assigned subscription ID',
|
||||||
|
example: '0x416d77337e24399d'
|
||||||
|
}
|
||||||
|
},
|
||||||
|
unsubscribePending: {
|
||||||
|
desc: 'Unsubscribes from pending transactions subscription.',
|
||||||
|
params: [{
|
||||||
|
type: String,
|
||||||
|
desc: 'Subscription ID',
|
||||||
|
example: '0x416d77337e24399d'
|
||||||
|
}],
|
||||||
|
returns: {
|
||||||
|
type: Boolean,
|
||||||
|
desc: 'whether the call was successful',
|
||||||
|
example: true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -422,7 +422,6 @@ export default class TransferStore {
|
|||||||
const tokenBalance = this.getTokenBalance();
|
const tokenBalance = this.getTokenBalance();
|
||||||
const { eth, token } = this.getValues(gasTotal);
|
const { eth, token } = this.getValues(gasTotal);
|
||||||
|
|
||||||
let totalEth = gasTotal;
|
|
||||||
let totalError = null;
|
let totalError = null;
|
||||||
let valueError = null;
|
let valueError = null;
|
||||||
|
|
||||||
@ -446,7 +445,7 @@ export default class TransferStore {
|
|||||||
this.totalError = totalError;
|
this.totalError = totalError;
|
||||||
this.valueError = valueError;
|
this.valueError = valueError;
|
||||||
this.gasStore.setErrorTotal(totalError);
|
this.gasStore.setErrorTotal(totalError);
|
||||||
this.gasStore.setEthValue(totalEth);
|
this.gasStore.setEthValue(eth.sub(gasTotal));
|
||||||
|
|
||||||
this.total = this.api.util.fromWei(eth).toFixed();
|
this.total = this.api.util.fromWei(eth).toFixed();
|
||||||
|
|
||||||
|
@ -59,16 +59,18 @@ export default class Store {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@action gotoUrl = (_url) => {
|
@action gotoUrl = (_url) => {
|
||||||
transaction(() => {
|
|
||||||
let url = (_url || this.nextUrl).trim().replace(/\/+$/, '');
|
let url = (_url || this.nextUrl).trim().replace(/\/+$/, '');
|
||||||
|
|
||||||
if (!hasProtocol.test(url)) {
|
if (!hasProtocol.test(url)) {
|
||||||
url = `https://${url}`;
|
url = `https://${url}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return this.generateToken(url).then(() => {
|
||||||
|
transaction(() => {
|
||||||
this.setNextUrl(url);
|
this.setNextUrl(url);
|
||||||
this.setCurrentUrl(this.nextUrl);
|
this.setCurrentUrl(this.nextUrl);
|
||||||
});
|
});
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@action reload = () => {
|
@action reload = () => {
|
||||||
@ -134,11 +136,11 @@ export default class Store {
|
|||||||
this.nextUrl = url;
|
this.nextUrl = url;
|
||||||
}
|
}
|
||||||
|
|
||||||
generateToken = () => {
|
generateToken = (_url) => {
|
||||||
this.setToken(null);
|
this.setToken(null);
|
||||||
|
|
||||||
return this._api.signer
|
return this._api.signer
|
||||||
.generateWebProxyAccessToken()
|
.generateWebProxyAccessToken(_url)
|
||||||
.then((token) => {
|
.then((token) => {
|
||||||
this.setToken(token);
|
this.setToken(token);
|
||||||
})
|
})
|
||||||
|
@ -62,17 +62,18 @@ describe('views/Web/Store', () => {
|
|||||||
describe('gotoUrl', () => {
|
describe('gotoUrl', () => {
|
||||||
it('uses the nextUrl when none specified', () => {
|
it('uses the nextUrl when none specified', () => {
|
||||||
store.setNextUrl('https://parity.io');
|
store.setNextUrl('https://parity.io');
|
||||||
store.gotoUrl();
|
|
||||||
|
|
||||||
|
return store.gotoUrl().then(() => {
|
||||||
expect(store.currentUrl).to.equal('https://parity.io');
|
expect(store.currentUrl).to.equal('https://parity.io');
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
it('adds https when no protocol', () => {
|
it('adds https when no protocol', () => {
|
||||||
store.gotoUrl('google.com');
|
return store.gotoUrl('google.com').then(() => {
|
||||||
|
|
||||||
expect(store.currentUrl).to.equal('https://google.com');
|
expect(store.currentUrl).to.equal('https://google.com');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('restoreUrl', () => {
|
describe('restoreUrl', () => {
|
||||||
it('sets the nextUrl to the currentUrl', () => {
|
it('sets the nextUrl to the currentUrl', () => {
|
||||||
|
@ -37,7 +37,6 @@ export default class Web extends Component {
|
|||||||
|
|
||||||
componentDidMount () {
|
componentDidMount () {
|
||||||
this.store.gotoUrl(this.props.params.url);
|
this.store.gotoUrl(this.props.params.url);
|
||||||
return this.store.generateToken();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
componentWillReceiveProps (props) {
|
componentWillReceiveProps (props) {
|
||||||
|
@ -149,7 +149,8 @@ module.exports = {
|
|||||||
'~': path.resolve(__dirname, '../src'),
|
'~': path.resolve(__dirname, '../src'),
|
||||||
'@parity/wordlist': path.resolve(__dirname, '../node_modules/@parity/wordlist'),
|
'@parity/wordlist': path.resolve(__dirname, '../node_modules/@parity/wordlist'),
|
||||||
'@parity': path.resolve(__dirname, '../src'),
|
'@parity': path.resolve(__dirname, '../src'),
|
||||||
'keythereum': path.resolve(__dirname, '../node_modules/keythereum/dist/keythereum')
|
'keythereum': path.resolve(__dirname, '../node_modules/keythereum/dist/keythereum'),
|
||||||
|
'vertx': 'empty-module'
|
||||||
},
|
},
|
||||||
modules: [
|
modules: [
|
||||||
path.join(__dirname, '../node_modules')
|
path.join(__dirname, '../node_modules')
|
||||||
@ -158,6 +159,10 @@ module.exports = {
|
|||||||
unsafeCache: true
|
unsafeCache: true
|
||||||
},
|
},
|
||||||
|
|
||||||
|
node: {
|
||||||
|
fs: 'empty'
|
||||||
|
},
|
||||||
|
|
||||||
plugins: (function () {
|
plugins: (function () {
|
||||||
const DappsHTMLInjection = []
|
const DappsHTMLInjection = []
|
||||||
.concat(DAPPS_BUILTIN, DAPPS_VIEWS)
|
.concat(DAPPS_BUILTIN, DAPPS_VIEWS)
|
||||||
|
@ -46,10 +46,16 @@ module.exports = {
|
|||||||
'~': path.resolve(__dirname, '../src'),
|
'~': path.resolve(__dirname, '../src'),
|
||||||
'@parity/wordlist': path.resolve(__dirname, '../node_modules/@parity/wordlist'),
|
'@parity/wordlist': path.resolve(__dirname, '../node_modules/@parity/wordlist'),
|
||||||
'@parity': path.resolve(__dirname, '../src'),
|
'@parity': path.resolve(__dirname, '../src'),
|
||||||
'keythereum': path.resolve(__dirname, '../node_modules/keythereum/dist/keythereum')
|
'keythereum': 'empty-module',
|
||||||
|
'secp256k1': 'empty-module',
|
||||||
|
'vertx': 'empty-module'
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
node: {
|
||||||
|
fs: 'empty'
|
||||||
|
},
|
||||||
|
|
||||||
module: {
|
module: {
|
||||||
rules: [
|
rules: [
|
||||||
rulesParity,
|
rulesParity,
|
||||||
|
@ -70,14 +70,17 @@ module.exports = {
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
node: {
|
||||||
|
fs: 'empty'
|
||||||
|
},
|
||||||
resolve: {
|
resolve: {
|
||||||
alias: {
|
alias: {
|
||||||
'~': path.resolve(__dirname, '../src'),
|
'~': path.resolve(__dirname, '../src'),
|
||||||
'@parity/wordlist': path.resolve(__dirname, '../node_modules/@parity/wordlist'),
|
'@parity/wordlist': path.resolve(__dirname, '../node_modules/@parity/wordlist'),
|
||||||
'@parity': path.resolve(__dirname, '../src'),
|
'@parity': path.resolve(__dirname, '../src'),
|
||||||
'secp256k1': path.resolve(__dirname, '../node_modules/secp256k1/js'),
|
'keythereum': 'empty-module',
|
||||||
'keythereum': path.resolve(__dirname, '../node_modules/keythereum/dist/keythereum')
|
'secp256k1': 'empty-module',
|
||||||
|
'vertx': 'empty-module'
|
||||||
},
|
},
|
||||||
modules: [
|
modules: [
|
||||||
path.resolve('./src'),
|
path.resolve('./src'),
|
||||||
|
@ -21,7 +21,6 @@ use std::time::{Instant, Duration};
|
|||||||
use std::thread::sleep;
|
use std::thread::sleep;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use rustc_serialize::hex::FromHex;
|
use rustc_serialize::hex::FromHex;
|
||||||
use io::{PanicHandler, ForwardPanic};
|
|
||||||
use util::{ToPretty, U256, H256, Address, Hashable};
|
use util::{ToPretty, U256, H256, Address, Hashable};
|
||||||
use rlp::PayloadInfo;
|
use rlp::PayloadInfo;
|
||||||
use ethcore::service::ClientService;
|
use ethcore::service::ClientService;
|
||||||
@ -148,9 +147,6 @@ pub fn execute(cmd: BlockchainCmd) -> Result<(), String> {
|
|||||||
fn execute_import(cmd: ImportBlockchain) -> Result<(), String> {
|
fn execute_import(cmd: ImportBlockchain) -> Result<(), String> {
|
||||||
let timer = Instant::now();
|
let timer = Instant::now();
|
||||||
|
|
||||||
// Setup panic handler
|
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
|
|
||||||
// load spec file
|
// load spec file
|
||||||
let spec = cmd.spec.spec()?;
|
let spec = cmd.spec.spec()?;
|
||||||
|
|
||||||
@ -219,7 +215,6 @@ fn execute_import(cmd: ImportBlockchain) -> Result<(), String> {
|
|||||||
// free up the spec in memory.
|
// free up the spec in memory.
|
||||||
drop(spec);
|
drop(spec);
|
||||||
|
|
||||||
panic_handler.forward_from(&service);
|
|
||||||
let client = service.client();
|
let client = service.client();
|
||||||
|
|
||||||
let mut instream: Box<io::Read> = match cmd.file_path {
|
let mut instream: Box<io::Read> = match cmd.file_path {
|
||||||
@ -390,7 +385,6 @@ fn start_client(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn execute_export(cmd: ExportBlockchain) -> Result<(), String> {
|
fn execute_export(cmd: ExportBlockchain) -> Result<(), String> {
|
||||||
// Setup panic handler
|
|
||||||
let service = start_client(
|
let service = start_client(
|
||||||
cmd.dirs,
|
cmd.dirs,
|
||||||
cmd.spec,
|
cmd.spec,
|
||||||
@ -403,10 +397,8 @@ fn execute_export(cmd: ExportBlockchain) -> Result<(), String> {
|
|||||||
cmd.wal,
|
cmd.wal,
|
||||||
cmd.cache_config
|
cmd.cache_config
|
||||||
)?;
|
)?;
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
let format = cmd.format.unwrap_or_default();
|
let format = cmd.format.unwrap_or_default();
|
||||||
|
|
||||||
panic_handler.forward_from(&service);
|
|
||||||
let client = service.client();
|
let client = service.client();
|
||||||
|
|
||||||
let mut out: Box<io::Write> = match cmd.file_path {
|
let mut out: Box<io::Write> = match cmd.file_path {
|
||||||
@ -433,7 +425,6 @@ fn execute_export(cmd: ExportBlockchain) -> Result<(), String> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn execute_export_state(cmd: ExportState) -> Result<(), String> {
|
fn execute_export_state(cmd: ExportState) -> Result<(), String> {
|
||||||
// Setup panic handler
|
|
||||||
let service = start_client(
|
let service = start_client(
|
||||||
cmd.dirs,
|
cmd.dirs,
|
||||||
cmd.spec,
|
cmd.spec,
|
||||||
@ -447,9 +438,6 @@ fn execute_export_state(cmd: ExportState) -> Result<(), String> {
|
|||||||
cmd.cache_config
|
cmd.cache_config
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
|
|
||||||
panic_handler.forward_from(&service);
|
|
||||||
let client = service.client();
|
let client = service.client();
|
||||||
|
|
||||||
let mut out: Box<io::Write> = match cmd.file_path {
|
let mut out: Box<io::Write> = match cmd.file_path {
|
||||||
|
@ -232,7 +232,7 @@ mod server {
|
|||||||
) -> Result<Middleware, String> {
|
) -> Result<Middleware, String> {
|
||||||
let signer = deps.signer;
|
let signer = deps.signer;
|
||||||
let parity_remote = parity_reactor::Remote::new(deps.remote.clone());
|
let parity_remote = parity_reactor::Remote::new(deps.remote.clone());
|
||||||
let web_proxy_tokens = Arc::new(move |token| signer.is_valid_web_proxy_access_token(&token));
|
let web_proxy_tokens = Arc::new(move |token| signer.web_proxy_access_token_domain(&token));
|
||||||
|
|
||||||
Ok(parity_dapps::Middleware::dapps(
|
Ok(parity_dapps::Middleware::dapps(
|
||||||
parity_remote,
|
parity_remote,
|
||||||
|
@ -21,7 +21,6 @@ use fdlimit::raise_fd_limit;
|
|||||||
use parity_rpc::{NetworkSettings, informant, is_major_importing};
|
use parity_rpc::{NetworkSettings, informant, is_major_importing};
|
||||||
use ethsync::NetworkConfiguration;
|
use ethsync::NetworkConfiguration;
|
||||||
use util::{Colour, version, Mutex, Condvar};
|
use util::{Colour, version, Mutex, Condvar};
|
||||||
use io::{MayPanic, ForwardPanic, PanicHandler};
|
|
||||||
use ethcore_logger::{Config as LogConfig, RotatingLogger};
|
use ethcore_logger::{Config as LogConfig, RotatingLogger};
|
||||||
use ethcore::miner::{StratumOptions, Stratum};
|
use ethcore::miner::{StratumOptions, Stratum};
|
||||||
use ethcore::client::{Client, Mode, DatabaseCompactionProfile, VMType, BlockChainClient};
|
use ethcore::client::{Client, Mode, DatabaseCompactionProfile, VMType, BlockChainClient};
|
||||||
@ -44,7 +43,7 @@ use params::{
|
|||||||
};
|
};
|
||||||
use helpers::{to_client_config, execute_upgrades, passwords_from_files};
|
use helpers::{to_client_config, execute_upgrades, passwords_from_files};
|
||||||
use upgrade::upgrade_key_location;
|
use upgrade::upgrade_key_location;
|
||||||
use dir::Directories;
|
use dir::{Directories, DatabaseDirectories};
|
||||||
use cache::CacheConfig;
|
use cache::CacheConfig;
|
||||||
use user_defaults::UserDefaults;
|
use user_defaults::UserDefaults;
|
||||||
use dapps;
|
use dapps;
|
||||||
@ -168,8 +167,6 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
|||||||
use ethsync::{LightSyncParams, LightSync, ManageNetwork};
|
use ethsync::{LightSyncParams, LightSync, ManageNetwork};
|
||||||
use util::RwLock;
|
use util::RwLock;
|
||||||
|
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
|
|
||||||
// load spec
|
// load spec
|
||||||
let spec = cmd.spec.spec()?;
|
let spec = cmd.spec.spec()?;
|
||||||
|
|
||||||
@ -196,7 +193,9 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
|||||||
// create dirs used by parity
|
// create dirs used by parity
|
||||||
cmd.dirs.create_dirs(cmd.dapps_conf.enabled, cmd.ui_conf.enabled, cmd.secretstore_conf.enabled)?;
|
cmd.dirs.create_dirs(cmd.dapps_conf.enabled, cmd.ui_conf.enabled, cmd.secretstore_conf.enabled)?;
|
||||||
|
|
||||||
info!("Starting {}", Colour::White.bold().paint(version()));
|
//print out running parity environment
|
||||||
|
print_running_environment(&spec.name, &cmd.dirs, &db_dirs, &cmd.dapps_conf);
|
||||||
|
|
||||||
info!("Running in experimental {} mode.", Colour::Blue.bold().paint("Light Client"));
|
info!("Running in experimental {} mode.", Colour::Blue.bold().paint("Light Client"));
|
||||||
|
|
||||||
// TODO: configurable cache size.
|
// TODO: configurable cache size.
|
||||||
@ -332,7 +331,7 @@ fn execute_light(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) ->
|
|||||||
});
|
});
|
||||||
|
|
||||||
// wait for ctrl-c.
|
// wait for ctrl-c.
|
||||||
Ok(wait_for_exit(panic_handler, None, None, can_restart))
|
Ok(wait_for_exit(None, None, can_restart))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<(bool, Option<String>), String> {
|
pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> Result<(bool, Option<String>), String> {
|
||||||
@ -352,9 +351,6 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
|
|||||||
return execute_light(cmd, can_restart, logger);
|
return execute_light(cmd, can_restart, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
// set up panic handler
|
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
|
|
||||||
// load spec
|
// load spec
|
||||||
let spec = cmd.spec.spec()?;
|
let spec = cmd.spec.spec()?;
|
||||||
|
|
||||||
@ -402,8 +398,10 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
|
|||||||
daemonize(pid_file)?;
|
daemonize(pid_file)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//print out running parity environment
|
||||||
|
print_running_environment(&spec.name, &cmd.dirs, &db_dirs, &cmd.dapps_conf);
|
||||||
|
|
||||||
// display info about used pruning algorithm
|
// display info about used pruning algorithm
|
||||||
info!("Starting {}", Colour::White.bold().paint(version()));
|
|
||||||
info!("State DB configuration: {}{}{}",
|
info!("State DB configuration: {}{}{}",
|
||||||
Colour::White.bold().paint(algorithm.as_str()),
|
Colour::White.bold().paint(algorithm.as_str()),
|
||||||
match fat_db {
|
match fat_db {
|
||||||
@ -517,9 +515,6 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
|
|||||||
// drop the spec to free up genesis state.
|
// drop the spec to free up genesis state.
|
||||||
drop(spec);
|
drop(spec);
|
||||||
|
|
||||||
// forward panics from service
|
|
||||||
panic_handler.forward_from(&service);
|
|
||||||
|
|
||||||
// take handle to client
|
// take handle to client
|
||||||
let client = service.client();
|
let client = service.client();
|
||||||
let snapshot_service = service.snapshot_service();
|
let snapshot_service = service.snapshot_service();
|
||||||
@ -731,7 +726,7 @@ pub fn execute(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>) -> R
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Handle exit
|
// Handle exit
|
||||||
let restart = wait_for_exit(panic_handler, Some(updater), Some(client), can_restart);
|
let restart = wait_for_exit(Some(updater), Some(client), can_restart);
|
||||||
|
|
||||||
info!("Finishing work, please wait...");
|
info!("Finishing work, please wait...");
|
||||||
|
|
||||||
@ -767,6 +762,13 @@ fn daemonize(_pid_file: String) -> Result<(), String> {
|
|||||||
Err("daemon is no supported on windows".into())
|
Err("daemon is no supported on windows".into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_running_environment(spec_name: &String, dirs: &Directories, db_dirs: &DatabaseDirectories, dapps_conf: &dapps::Configuration) {
|
||||||
|
info!("Starting {}", Colour::White.bold().paint(version()));
|
||||||
|
info!("Keys path {}", Colour::White.bold().paint(dirs.keys_path(spec_name).to_string_lossy().into_owned()));
|
||||||
|
info!("DB path {}", Colour::White.bold().paint(db_dirs.db_root_path().to_string_lossy().into_owned()));
|
||||||
|
info!("Path to dapps {}", Colour::White.bold().paint(dapps_conf.dapps_path.to_string_lossy().into_owned()));
|
||||||
|
}
|
||||||
|
|
||||||
fn prepare_account_provider(spec: &SpecType, dirs: &Directories, data_dir: &str, cfg: AccountsConfig, passwords: &[String]) -> Result<AccountProvider, String> {
|
fn prepare_account_provider(spec: &SpecType, dirs: &Directories, data_dir: &str, cfg: AccountsConfig, passwords: &[String]) -> Result<AccountProvider, String> {
|
||||||
use ethcore::ethstore::EthStore;
|
use ethcore::ethstore::EthStore;
|
||||||
use ethcore::ethstore::dir::RootDiskDirectory;
|
use ethcore::ethstore::dir::RootDiskDirectory;
|
||||||
@ -836,7 +838,6 @@ fn build_create_account_hint(spec: &SpecType, keys: &str) -> String {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn wait_for_exit(
|
fn wait_for_exit(
|
||||||
panic_handler: Arc<PanicHandler>,
|
|
||||||
updater: Option<Arc<Updater>>,
|
updater: Option<Arc<Updater>>,
|
||||||
client: Option<Arc<Client>>,
|
client: Option<Arc<Client>>,
|
||||||
can_restart: bool
|
can_restart: bool
|
||||||
@ -847,10 +848,6 @@ fn wait_for_exit(
|
|||||||
let e = exit.clone();
|
let e = exit.clone();
|
||||||
CtrlC::set_handler(move || { e.1.notify_all(); });
|
CtrlC::set_handler(move || { e.1.notify_all(); });
|
||||||
|
|
||||||
// Handle panics
|
|
||||||
let e = exit.clone();
|
|
||||||
panic_handler.on_panic(move |_reason| { e.1.notify_all(); });
|
|
||||||
|
|
||||||
if can_restart {
|
if can_restart {
|
||||||
if let Some(updater) = updater {
|
if let Some(updater) = updater {
|
||||||
// Handle updater wanting to restart us
|
// Handle updater wanting to restart us
|
||||||
|
@ -35,8 +35,6 @@ use dir::Directories;
|
|||||||
use user_defaults::UserDefaults;
|
use user_defaults::UserDefaults;
|
||||||
use fdlimit;
|
use fdlimit;
|
||||||
|
|
||||||
use io::PanicHandler;
|
|
||||||
|
|
||||||
/// Kinds of snapshot commands.
|
/// Kinds of snapshot commands.
|
||||||
#[derive(Debug, PartialEq, Clone, Copy)]
|
#[derive(Debug, PartialEq, Clone, Copy)]
|
||||||
pub enum Kind {
|
pub enum Kind {
|
||||||
@ -133,10 +131,7 @@ fn restore_using<R: SnapshotReader>(snapshot: Arc<SnapshotService>, reader: &R,
|
|||||||
|
|
||||||
impl SnapshotCommand {
|
impl SnapshotCommand {
|
||||||
// shared portion of snapshot commands: start the client service
|
// shared portion of snapshot commands: start the client service
|
||||||
fn start_service(self) -> Result<(ClientService, Arc<PanicHandler>), String> {
|
fn start_service(self) -> Result<ClientService, String> {
|
||||||
// Setup panic handler
|
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
|
|
||||||
// load spec file
|
// load spec file
|
||||||
let spec = self.spec.spec()?;
|
let spec = self.spec.spec()?;
|
||||||
|
|
||||||
@ -196,12 +191,12 @@ impl SnapshotCommand {
|
|||||||
Arc::new(Miner::with_spec(&spec))
|
Arc::new(Miner::with_spec(&spec))
|
||||||
).map_err(|e| format!("Client service error: {:?}", e))?;
|
).map_err(|e| format!("Client service error: {:?}", e))?;
|
||||||
|
|
||||||
Ok((service, panic_handler))
|
Ok(service)
|
||||||
}
|
}
|
||||||
/// restore from a snapshot
|
/// restore from a snapshot
|
||||||
pub fn restore(self) -> Result<(), String> {
|
pub fn restore(self) -> Result<(), String> {
|
||||||
let file = self.file_path.clone();
|
let file = self.file_path.clone();
|
||||||
let (service, _panic_handler) = self.start_service()?;
|
let service = self.start_service()?;
|
||||||
|
|
||||||
warn!("Snapshot restoration is experimental and the format may be subject to change.");
|
warn!("Snapshot restoration is experimental and the format may be subject to change.");
|
||||||
warn!("On encountering an unexpected error, please ensure that you have a recent snapshot.");
|
warn!("On encountering an unexpected error, please ensure that you have a recent snapshot.");
|
||||||
@ -236,7 +231,7 @@ impl SnapshotCommand {
|
|||||||
let file_path = self.file_path.clone().ok_or("No file path provided.".to_owned())?;
|
let file_path = self.file_path.clone().ok_or("No file path provided.".to_owned())?;
|
||||||
let file_path: PathBuf = file_path.into();
|
let file_path: PathBuf = file_path.into();
|
||||||
let block_at = self.block_at;
|
let block_at = self.block_at;
|
||||||
let (service, _panic_handler) = self.start_service()?;
|
let service = self.start_service()?;
|
||||||
|
|
||||||
warn!("Snapshots are currently experimental. File formats may be subject to change.");
|
warn!("Snapshots are currently experimental. File formats may be subject to change.");
|
||||||
|
|
||||||
|
@ -162,7 +162,7 @@ impl LightFetch {
|
|||||||
|
|
||||||
let (sync, on_demand, client) = (self.sync.clone(), self.on_demand.clone(), self.client.clone());
|
let (sync, on_demand, client) = (self.sync.clone(), self.on_demand.clone(), self.client.clone());
|
||||||
let req: CallRequestHelper = req.into();
|
let req: CallRequestHelper = req.into();
|
||||||
let id = num.0.into();
|
let id = num.unwrap_or_default().into();
|
||||||
|
|
||||||
let from = req.from.unwrap_or(Address::zero());
|
let from = req.from.unwrap_or(Address::zero());
|
||||||
let nonce_fut = match req.nonce {
|
let nonce_fut = match req.nonce {
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
|
use http::Origin;
|
||||||
use util::Mutex;
|
use util::Mutex;
|
||||||
use transient_hashmap::TransientHashMap;
|
use transient_hashmap::TransientHashMap;
|
||||||
|
|
||||||
@ -29,7 +30,7 @@ const TOKEN_LIFETIME_SECS: u32 = 3600;
|
|||||||
pub struct SignerService {
|
pub struct SignerService {
|
||||||
is_enabled: bool,
|
is_enabled: bool,
|
||||||
queue: Arc<ConfirmationsQueue>,
|
queue: Arc<ConfirmationsQueue>,
|
||||||
web_proxy_tokens: Mutex<TransientHashMap<String, ()>>,
|
web_proxy_tokens: Mutex<TransientHashMap<String, Origin>>,
|
||||||
generate_new_token: Box<Fn() -> Result<String, String> + Send + Sync + 'static>,
|
generate_new_token: Box<Fn() -> Result<String, String> + Send + Sync + 'static>,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -46,16 +47,16 @@ impl SignerService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if the token is valid web proxy access token.
|
/// Checks if the token is valid web proxy access token.
|
||||||
pub fn is_valid_web_proxy_access_token(&self, token: &String) -> bool {
|
pub fn web_proxy_access_token_domain(&self, token: &String) -> Option<Origin> {
|
||||||
self.web_proxy_tokens.lock().contains_key(&token)
|
self.web_proxy_tokens.lock().get(token).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates a new web proxy access token.
|
/// Generates a new web proxy access token.
|
||||||
pub fn generate_web_proxy_access_token(&self) -> String {
|
pub fn generate_web_proxy_access_token(&self, domain: Origin) -> String {
|
||||||
let token = random_string(16);
|
let token = random_string(16);
|
||||||
let mut tokens = self.web_proxy_tokens.lock();
|
let mut tokens = self.web_proxy_tokens.lock();
|
||||||
tokens.prune();
|
tokens.prune();
|
||||||
tokens.insert(token.clone(), ());
|
tokens.insert(token.clone(), domain);
|
||||||
token
|
token
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -361,7 +361,7 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
|
|||||||
fn balance(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
fn balance(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
||||||
let address = address.into();
|
let address = address.into();
|
||||||
|
|
||||||
let res = match num.0.clone() {
|
let res = match num.unwrap_or_default() {
|
||||||
BlockNumber::Pending => {
|
BlockNumber::Pending => {
|
||||||
match self.miner.balance(&*self.client, &address) {
|
match self.miner.balance(&*self.client, &address) {
|
||||||
Some(balance) => Ok(balance.into()),
|
Some(balance) => Ok(balance.into()),
|
||||||
@ -384,7 +384,7 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
|
|||||||
let address: Address = RpcH160::into(address);
|
let address: Address = RpcH160::into(address);
|
||||||
let position: U256 = RpcU256::into(pos);
|
let position: U256 = RpcU256::into(pos);
|
||||||
|
|
||||||
let res = match num.0.clone() {
|
let res = match num.unwrap_or_default() {
|
||||||
BlockNumber::Pending => {
|
BlockNumber::Pending => {
|
||||||
match self.miner.storage_at(&*self.client, &address, &H256::from(position)) {
|
match self.miner.storage_at(&*self.client, &address, &H256::from(position)) {
|
||||||
Some(s) => Ok(s.into()),
|
Some(s) => Ok(s.into()),
|
||||||
@ -406,7 +406,7 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
|
|||||||
fn transaction_count(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
fn transaction_count(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
||||||
let address: Address = RpcH160::into(address);
|
let address: Address = RpcH160::into(address);
|
||||||
|
|
||||||
let res = match num.0.clone() {
|
let res = match num.unwrap_or_default() {
|
||||||
BlockNumber::Pending if self.options.pending_nonce_from_queue => {
|
BlockNumber::Pending if self.options.pending_nonce_from_queue => {
|
||||||
let nonce = self.miner.last_nonce(&address)
|
let nonce = self.miner.last_nonce(&address)
|
||||||
.map(|n| n + 1.into())
|
.map(|n| n + 1.into())
|
||||||
@ -468,7 +468,7 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
|
|||||||
fn code_at(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<Bytes, Error> {
|
fn code_at(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<Bytes, Error> {
|
||||||
let address: Address = RpcH160::into(address);
|
let address: Address = RpcH160::into(address);
|
||||||
|
|
||||||
let res = match num.0.clone() {
|
let res = match num.unwrap_or_default() {
|
||||||
BlockNumber::Pending => {
|
BlockNumber::Pending => {
|
||||||
match self.miner.code(&*self.client, &address) {
|
match self.miner.code(&*self.client, &address) {
|
||||||
Some(code) => Ok(code.map_or_else(Bytes::default, Bytes::new)),
|
Some(code) => Ok(code.map_or_else(Bytes::default, Bytes::new)),
|
||||||
@ -553,7 +553,7 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn work(&self, no_new_work_timeout: Trailing<u64>) -> Result<Work, Error> {
|
fn work(&self, no_new_work_timeout: Trailing<u64>) -> Result<Work, Error> {
|
||||||
let no_new_work_timeout = no_new_work_timeout.0;
|
let no_new_work_timeout = no_new_work_timeout.unwrap_or_default();
|
||||||
|
|
||||||
// check if we're still syncing and return empty strings in that case
|
// check if we're still syncing and return empty strings in that case
|
||||||
{
|
{
|
||||||
@ -638,7 +638,7 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
|
|||||||
Err(e) => return future::err(e).boxed(),
|
Err(e) => return future::err(e).boxed(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = match num.0 {
|
let result = match num.unwrap_or_default() {
|
||||||
BlockNumber::Pending => self.miner.call(&*self.client, &signed, Default::default()),
|
BlockNumber::Pending => self.miner.call(&*self.client, &signed, Default::default()),
|
||||||
num => self.client.call(&signed, num.into(), Default::default()),
|
num => self.client.call(&signed, num.into(), Default::default()),
|
||||||
};
|
};
|
||||||
@ -655,7 +655,7 @@ impl<C, SN: ?Sized, S: ?Sized, M, EM> Eth for EthClient<C, SN, S, M, EM> where
|
|||||||
Ok(signed) => signed,
|
Ok(signed) => signed,
|
||||||
Err(e) => return future::err(e).boxed(),
|
Err(e) => return future::err(e).boxed(),
|
||||||
};
|
};
|
||||||
future::done(self.client.estimate_gas(&signed, num.0.into())
|
future::done(self.client.estimate_gas(&signed, num.unwrap_or_default().into())
|
||||||
.map(Into::into)
|
.map(Into::into)
|
||||||
.map_err(errors::from_call_error)
|
.map_err(errors::from_call_error)
|
||||||
).boxed()
|
).boxed()
|
||||||
|
@ -144,8 +144,9 @@ impl<C: Send + Sync + 'static> EthPubSub for EthPubSubClient<C> {
|
|||||||
kind: pubsub::Kind,
|
kind: pubsub::Kind,
|
||||||
params: Trailing<pubsub::Params>,
|
params: Trailing<pubsub::Params>,
|
||||||
) {
|
) {
|
||||||
match (kind, params.0) {
|
let params: Option<pubsub::Params> = params.into();
|
||||||
(pubsub::Kind::NewHeads, pubsub::Params::None) => {
|
match (kind, params) {
|
||||||
|
(pubsub::Kind::NewHeads, None) => {
|
||||||
self.heads_subscribers.lock().push(subscriber)
|
self.heads_subscribers.lock().push(subscriber)
|
||||||
},
|
},
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -270,7 +270,7 @@ impl Eth for EthClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn balance(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
fn balance(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
||||||
self.fetcher().account(address.into(), num.0.into())
|
self.fetcher().account(address.into(), num.unwrap_or_default().into())
|
||||||
.map(|acc| acc.map_or(0.into(), |a| a.balance).into()).boxed()
|
.map(|acc| acc.map_or(0.into(), |a| a.balance).into()).boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -287,7 +287,7 @@ impl Eth for EthClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn transaction_count(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
fn transaction_count(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<RpcU256, Error> {
|
||||||
self.fetcher().account(address.into(), num.0.into())
|
self.fetcher().account(address.into(), num.unwrap_or_default().into())
|
||||||
.map(|acc| acc.map_or(0.into(), |a| a.nonce).into()).boxed()
|
.map(|acc| acc.map_or(0.into(), |a| a.nonce).into()).boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,7 +98,7 @@ impl Parity for ParityClient {
|
|||||||
type Metadata = Metadata;
|
type Metadata = Metadata;
|
||||||
|
|
||||||
fn accounts_info(&self, dapp: Trailing<DappId>) -> Result<BTreeMap<H160, AccountInfo>, Error> {
|
fn accounts_info(&self, dapp: Trailing<DappId>) -> Result<BTreeMap<H160, AccountInfo>, Error> {
|
||||||
let dapp = dapp.0;
|
let dapp = dapp.unwrap_or_default();
|
||||||
|
|
||||||
let store = &self.accounts;
|
let store = &self.accounts;
|
||||||
let dapp_accounts = store
|
let dapp_accounts = store
|
||||||
@ -383,7 +383,7 @@ impl Parity for ParityClient {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
self.fetcher().header(number.0.into()).map(from_encoded).boxed()
|
self.fetcher().header(number.unwrap_or_default().into()).map(from_encoded).boxed()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ipfs_cid(&self, content: Bytes) -> Result<String, Error> {
|
fn ipfs_cid(&self, content: Bytes) -> Result<String, Error> {
|
||||||
|
@ -124,7 +124,7 @@ impl<C, M, S: ?Sized, U> Parity for ParityClient<C, M, S, U> where
|
|||||||
type Metadata = Metadata;
|
type Metadata = Metadata;
|
||||||
|
|
||||||
fn accounts_info(&self, dapp: Trailing<DappId>) -> Result<BTreeMap<H160, AccountInfo>, Error> {
|
fn accounts_info(&self, dapp: Trailing<DappId>) -> Result<BTreeMap<H160, AccountInfo>, Error> {
|
||||||
let dapp = dapp.0;
|
let dapp = dapp.unwrap_or_default();
|
||||||
|
|
||||||
let store = self.account_provider()?;
|
let store = self.account_provider()?;
|
||||||
let dapp_accounts = store
|
let dapp_accounts = store
|
||||||
@ -272,13 +272,13 @@ impl<C, M, S: ?Sized, U> Parity for ParityClient<C, M, S, U> where
|
|||||||
|
|
||||||
fn list_accounts(&self, count: u64, after: Option<H160>, block_number: Trailing<BlockNumber>) -> Result<Option<Vec<H160>>, Error> {
|
fn list_accounts(&self, count: u64, after: Option<H160>, block_number: Trailing<BlockNumber>) -> Result<Option<Vec<H160>>, Error> {
|
||||||
Ok(self.client
|
Ok(self.client
|
||||||
.list_accounts(block_number.0.into(), after.map(Into::into).as_ref(), count)
|
.list_accounts(block_number.unwrap_or_default().into(), after.map(Into::into).as_ref(), count)
|
||||||
.map(|a| a.into_iter().map(Into::into).collect()))
|
.map(|a| a.into_iter().map(Into::into).collect()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn list_storage_keys(&self, address: H160, count: u64, after: Option<H256>, block_number: Trailing<BlockNumber>) -> Result<Option<Vec<H256>>, Error> {
|
fn list_storage_keys(&self, address: H160, count: u64, after: Option<H256>, block_number: Trailing<BlockNumber>) -> Result<Option<Vec<H256>>, Error> {
|
||||||
Ok(self.client
|
Ok(self.client
|
||||||
.list_storage(block_number.0.into(), &address.into(), after.map(Into::into).as_ref(), count)
|
.list_storage(block_number.unwrap_or_default().into(), &address.into(), after.map(Into::into).as_ref(), count)
|
||||||
.map(|a| a.into_iter().map(Into::into).collect()))
|
.map(|a| a.into_iter().map(Into::into).collect()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -394,7 +394,7 @@ impl<C, M, S: ?Sized, U> Parity for ParityClient<C, M, S, U> where
|
|||||||
fn block_header(&self, number: Trailing<BlockNumber>) -> BoxFuture<RichHeader, Error> {
|
fn block_header(&self, number: Trailing<BlockNumber>) -> BoxFuture<RichHeader, Error> {
|
||||||
const EXTRA_INFO_PROOF: &'static str = "Object exists in in blockchain (fetched earlier), extra_info is always available if object exists; qed";
|
const EXTRA_INFO_PROOF: &'static str = "Object exists in in blockchain (fetched earlier), extra_info is always available if object exists; qed";
|
||||||
|
|
||||||
let id: BlockId = number.0.into();
|
let id: BlockId = number.unwrap_or_default().into();
|
||||||
let encoded = match self.client.block_header(id.clone()) {
|
let encoded = match self.client.block_header(id.clone()) {
|
||||||
Some(encoded) => encoded,
|
Some(encoded) => encoded,
|
||||||
None => return future::err(errors::unknown_block()).boxed(),
|
None => return future::err(errors::unknown_block()).boxed(),
|
||||||
|
@ -22,6 +22,7 @@ use util::RwLock;
|
|||||||
|
|
||||||
use futures::{self, BoxFuture, Future, Stream, Sink};
|
use futures::{self, BoxFuture, Future, Stream, Sink};
|
||||||
use jsonrpc_core::{self as core, Error, MetaIoHandler};
|
use jsonrpc_core::{self as core, Error, MetaIoHandler};
|
||||||
|
use jsonrpc_macros::Trailing;
|
||||||
use jsonrpc_macros::pubsub::Subscriber;
|
use jsonrpc_macros::pubsub::Subscriber;
|
||||||
use jsonrpc_pubsub::SubscriptionId;
|
use jsonrpc_pubsub::SubscriptionId;
|
||||||
use tokio_timer;
|
use tokio_timer;
|
||||||
@ -74,7 +75,8 @@ impl PubSubClient<core::NoopMiddleware> {
|
|||||||
impl<S: core::Middleware<Metadata>> PubSub for PubSubClient<S> {
|
impl<S: core::Middleware<Metadata>> PubSub for PubSubClient<S> {
|
||||||
type Metadata = Metadata;
|
type Metadata = Metadata;
|
||||||
|
|
||||||
fn parity_subscribe(&self, mut meta: Metadata, subscriber: Subscriber<core::Value>, method: String, params: core::Params) {
|
fn parity_subscribe(&self, mut meta: Metadata, subscriber: Subscriber<core::Value>, method: String, params: Trailing<core::Params>) {
|
||||||
|
let params = params.unwrap_or(core::Params::Array(vec![]));
|
||||||
// Make sure to get rid of PubSub session otherwise it will never be dropped.
|
// Make sure to get rid of PubSub session otherwise it will never be dropped.
|
||||||
meta.session = None;
|
meta.session = None;
|
||||||
|
|
||||||
|
@ -245,8 +245,8 @@ impl<D: Dispatcher + 'static> Signer for SignerClient<D> {
|
|||||||
.map_err(|e| errors::token(e))
|
.map_err(|e| errors::token(e))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_web_proxy_token(&self) -> Result<String, Error> {
|
fn generate_web_proxy_token(&self, domain: String) -> Result<String, Error> {
|
||||||
Ok(self.signer.generate_web_proxy_access_token())
|
Ok(self.signer.generate_web_proxy_access_token(domain.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn subscribe_pending(&self, _meta: Self::Metadata, sub: Subscriber<Vec<ConfirmationRequest>>) {
|
fn subscribe_pending(&self, _meta: Self::Metadata, sub: Subscriber<Vec<ConfirmationRequest>>) {
|
||||||
|
@ -80,7 +80,7 @@ impl<C, M> Traces for TracesClient<C, M> where C: MiningBlockChainClient + 'stat
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn call(&self, request: CallRequest, flags: Vec<String>, block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {
|
fn call(&self, request: CallRequest, flags: Vec<String>, block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {
|
||||||
let block = block.0;
|
let block = block.unwrap_or_default();
|
||||||
|
|
||||||
let request = CallRequest::into(request);
|
let request = CallRequest::into(request);
|
||||||
let signed = fake_sign::sign_call(&self.client, &self.miner, request)?;
|
let signed = fake_sign::sign_call(&self.client, &self.miner, request)?;
|
||||||
@ -91,7 +91,7 @@ impl<C, M> Traces for TracesClient<C, M> where C: MiningBlockChainClient + 'stat
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn raw_transaction(&self, raw_transaction: Bytes, flags: Vec<String>, block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {
|
fn raw_transaction(&self, raw_transaction: Bytes, flags: Vec<String>, block: Trailing<BlockNumber>) -> Result<TraceResults, Error> {
|
||||||
let block = block.0;
|
let block = block.unwrap_or_default();
|
||||||
|
|
||||||
let tx = UntrustedRlp::new(&raw_transaction.into_vec()).as_val().map_err(|e| errors::invalid_params("Transaction is not valid RLP", e))?;
|
let tx = UntrustedRlp::new(&raw_transaction.into_vec()).as_val().map_err(|e| errors::invalid_params("Transaction is not valid RLP", e))?;
|
||||||
let signed = SignedTransaction::new(tx).map_err(errors::from_transaction_error)?;
|
let signed = SignedTransaction::new(tx).map_err(errors::from_transaction_error)?;
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
|
|
||||||
use jsonrpc_core::{Error, Value, Params};
|
use jsonrpc_core::{Error, Value, Params};
|
||||||
use jsonrpc_pubsub::SubscriptionId;
|
use jsonrpc_pubsub::SubscriptionId;
|
||||||
|
use jsonrpc_macros::Trailing;
|
||||||
use jsonrpc_macros::pubsub::Subscriber;
|
use jsonrpc_macros::pubsub::Subscriber;
|
||||||
use futures::BoxFuture;
|
use futures::BoxFuture;
|
||||||
|
|
||||||
@ -29,7 +30,7 @@ build_rpc_trait! {
|
|||||||
#[pubsub(name = "parity_subscription")] {
|
#[pubsub(name = "parity_subscription")] {
|
||||||
/// Subscribe to changes of any RPC method in Parity.
|
/// Subscribe to changes of any RPC method in Parity.
|
||||||
#[rpc(name = "parity_subscribe")]
|
#[rpc(name = "parity_subscribe")]
|
||||||
fn parity_subscribe(&self, Self::Metadata, Subscriber<Value>, String, Params);
|
fn parity_subscribe(&self, Self::Metadata, Subscriber<Value>, String, Trailing<Params>);
|
||||||
|
|
||||||
/// Unsubscribe from existing Parity subscription.
|
/// Unsubscribe from existing Parity subscription.
|
||||||
#[rpc(name = "parity_unsubscribe")]
|
#[rpc(name = "parity_unsubscribe")]
|
||||||
|
@ -51,9 +51,9 @@ build_rpc_trait! {
|
|||||||
#[rpc(name = "signer_generateAuthorizationToken")]
|
#[rpc(name = "signer_generateAuthorizationToken")]
|
||||||
fn generate_token(&self) -> Result<String, Error>;
|
fn generate_token(&self) -> Result<String, Error>;
|
||||||
|
|
||||||
/// Generates new web proxy access token.
|
/// Generates new web proxy access token for particular domain.
|
||||||
#[rpc(name = "signer_generateWebProxyAccessToken")]
|
#[rpc(name = "signer_generateWebProxyAccessToken")]
|
||||||
fn generate_web_proxy_token(&self) -> Result<String, Error>;
|
fn generate_web_proxy_token(&self, String) -> Result<String, Error>;
|
||||||
|
|
||||||
#[pubsub(name = "signer_pending")] {
|
#[pubsub(name = "signer_pending")] {
|
||||||
/// Subscribe to new pending requests on signer interface.
|
/// Subscribe to new pending requests on signer interface.
|
||||||
|
@ -66,7 +66,6 @@ extern crate parking_lot;
|
|||||||
|
|
||||||
mod service;
|
mod service;
|
||||||
mod worker;
|
mod worker;
|
||||||
mod panics;
|
|
||||||
|
|
||||||
use mio::{Token};
|
use mio::{Token};
|
||||||
use mio::deprecated::{EventLoop, NotifyError};
|
use mio::deprecated::{EventLoop, NotifyError};
|
||||||
@ -137,7 +136,6 @@ pub use service::IoService;
|
|||||||
pub use service::IoChannel;
|
pub use service::IoChannel;
|
||||||
pub use service::IoManager;
|
pub use service::IoManager;
|
||||||
pub use service::TOKENS_PER_HANDLER;
|
pub use service::TOKENS_PER_HANDLER;
|
||||||
pub use panics::{PanicHandler, MayPanic, OnPanicListener, ForwardPanic};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
@ -171,5 +169,4 @@ mod tests {
|
|||||||
let service = IoService::<MyMessage>::start().expect("Error creating network service");
|
let service = IoService::<MyMessage>::start().expect("Error creating network service");
|
||||||
service.register_handler(Arc::new(MyHandler)).unwrap();
|
service.register_handler(Arc::new(MyHandler)).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,191 +0,0 @@
|
|||||||
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
|
|
||||||
// This file is part of Parity.
|
|
||||||
|
|
||||||
// Parity is free software: you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU General Public License as published by
|
|
||||||
// the Free Software Foundation, either version 3 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
|
|
||||||
// Parity is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU General Public License for more details.
|
|
||||||
|
|
||||||
// You should have received a copy of the GNU General Public License
|
|
||||||
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
//! Panic utilities
|
|
||||||
|
|
||||||
use std::thread;
|
|
||||||
use std::sync::Arc;
|
|
||||||
use std::default::Default;
|
|
||||||
|
|
||||||
use parking_lot::Mutex;
|
|
||||||
|
|
||||||
/// Thread-safe closure for handling possible panics
|
|
||||||
pub trait OnPanicListener: Send + Sync + 'static {
|
|
||||||
/// Invoke listener
|
|
||||||
fn call(&mut self, arg: &str);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Forwards panics from child
|
|
||||||
pub trait ForwardPanic {
|
|
||||||
/// Attach `on_panic` listener to `child` and rethrow all panics
|
|
||||||
fn forward_from<S>(&self, child: &S) where S : MayPanic;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Trait indicating that the structure catches some of the panics (most probably from spawned threads)
|
|
||||||
/// and it's possbile to be notified when one of the threads panics.
|
|
||||||
pub trait MayPanic {
|
|
||||||
/// `closure` will be invoked whenever panic in thread is caught
|
|
||||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener;
|
|
||||||
}
|
|
||||||
|
|
||||||
struct PanicGuard<'a> {
|
|
||||||
handler: &'a PanicHandler,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Drop for PanicGuard<'a> {
|
|
||||||
fn drop(&mut self) {
|
|
||||||
if thread::panicking() {
|
|
||||||
self.handler.notify_all("Panic!".to_owned());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Structure that allows to catch panics and notify listeners
|
|
||||||
pub struct PanicHandler {
|
|
||||||
listeners: Mutex<Vec<Box<OnPanicListener>>>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for PanicHandler {
|
|
||||||
fn default() -> Self {
|
|
||||||
PanicHandler::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PanicHandler {
|
|
||||||
/// Creates new `PanicHandler` wrapped in `Arc`
|
|
||||||
pub fn new_in_arc() -> Arc<Self> {
|
|
||||||
Arc::new(Self::new())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Creates new `PanicHandler`
|
|
||||||
pub fn new() -> Self {
|
|
||||||
PanicHandler {
|
|
||||||
listeners: Mutex::new(vec![])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Invoke closure and catch any possible panics.
|
|
||||||
/// In case of panic notifies all listeners about it.
|
|
||||||
#[cfg_attr(feature="dev", allow(deprecated))]
|
|
||||||
pub fn catch_panic<G, R>(&self, g: G) -> thread::Result<R> where G: FnOnce() -> R + Send + 'static {
|
|
||||||
let _guard = PanicGuard { handler: self };
|
|
||||||
let result = g();
|
|
||||||
Ok(result)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Notifies all listeners in case there is a panic.
|
|
||||||
/// You should use `catch_panic` instead of calling this method explicitly.
|
|
||||||
pub fn notify_all(&self, r: String) {
|
|
||||||
let mut listeners = self.listeners.lock();
|
|
||||||
for mut listener in &mut **listeners {
|
|
||||||
listener.call(&r);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl MayPanic for PanicHandler {
|
|
||||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
|
||||||
self.listeners.lock().push(Box::new(closure));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ForwardPanic for Arc<PanicHandler> {
|
|
||||||
fn forward_from<S>(&self, child: &S) where S : MayPanic {
|
|
||||||
let p = self.clone();
|
|
||||||
child.on_panic(move |t| p.notify_all(t));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<F> OnPanicListener for F
|
|
||||||
where F: FnMut(String) + Send + Sync + 'static {
|
|
||||||
fn call(&mut self, arg: &str) {
|
|
||||||
self(arg.to_owned())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore] // panic forwarding doesnt work on the same thread in beta
|
|
||||||
fn should_notify_listeners_about_panic () {
|
|
||||||
use parking_lot::RwLock;
|
|
||||||
// given
|
|
||||||
let invocations = Arc::new(RwLock::new(vec![]));
|
|
||||||
let i = invocations.clone();
|
|
||||||
let p = PanicHandler::new();
|
|
||||||
p.on_panic(move |t| i.write().push(t));
|
|
||||||
|
|
||||||
// when
|
|
||||||
p.catch_panic(|| panic!("Panic!")).unwrap_err();
|
|
||||||
|
|
||||||
// then
|
|
||||||
assert!(invocations.read()[0] == "Panic!");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore] // panic forwarding doesnt work on the same thread in beta
|
|
||||||
fn should_notify_listeners_about_panic_when_string_is_dynamic () {
|
|
||||||
use parking_lot::RwLock;
|
|
||||||
// given
|
|
||||||
let invocations = Arc::new(RwLock::new(vec![]));
|
|
||||||
let i = invocations.clone();
|
|
||||||
let p = PanicHandler::new();
|
|
||||||
p.on_panic(move |t| i.write().push(t));
|
|
||||||
|
|
||||||
// when
|
|
||||||
p.catch_panic(|| panic!("Panic: {}", 1)).unwrap_err();
|
|
||||||
|
|
||||||
// then
|
|
||||||
assert!(invocations.read()[0] == "Panic: 1");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn should_notify_listeners_about_panic_in_other_thread () {
|
|
||||||
use std::thread;
|
|
||||||
use parking_lot::RwLock;
|
|
||||||
// given
|
|
||||||
let invocations = Arc::new(RwLock::new(vec![]));
|
|
||||||
let i = invocations.clone();
|
|
||||||
let p = PanicHandler::new();
|
|
||||||
p.on_panic(move |t| i.write().push(t));
|
|
||||||
|
|
||||||
// when
|
|
||||||
let t = thread::spawn(move ||
|
|
||||||
p.catch_panic(|| panic!("Panic!")).unwrap()
|
|
||||||
);
|
|
||||||
t.join().unwrap_err();
|
|
||||||
|
|
||||||
// then
|
|
||||||
assert!(invocations.read()[0] == "Panic!");
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
#[ignore] // panic forwarding doesnt work on the same thread in beta
|
|
||||||
fn should_forward_panics () {
|
|
||||||
use parking_lot::RwLock;
|
|
||||||
// given
|
|
||||||
let invocations = Arc::new(RwLock::new(vec![]));
|
|
||||||
let i = invocations.clone();
|
|
||||||
let p = PanicHandler::new_in_arc();
|
|
||||||
p.on_panic(move |t| i.write().push(t));
|
|
||||||
|
|
||||||
let p2 = PanicHandler::new();
|
|
||||||
p.forward_from(&p2);
|
|
||||||
|
|
||||||
// when
|
|
||||||
p2.catch_panic(|| panic!("Panic!")).unwrap_err();
|
|
||||||
|
|
||||||
// then
|
|
||||||
assert!(invocations.read()[0] == "Panic!");
|
|
||||||
}
|
|
@ -24,7 +24,6 @@ use crossbeam::sync::chase_lev;
|
|||||||
use slab::Slab;
|
use slab::Slab;
|
||||||
use {IoError, IoHandler};
|
use {IoError, IoHandler};
|
||||||
use worker::{Worker, Work, WorkType};
|
use worker::{Worker, Work, WorkType};
|
||||||
use panics::*;
|
|
||||||
use parking_lot::{RwLock, Mutex};
|
use parking_lot::{RwLock, Mutex};
|
||||||
use std::sync::{Condvar as SCondvar, Mutex as SMutex};
|
use std::sync::{Condvar as SCondvar, Mutex as SMutex};
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
@ -191,7 +190,6 @@ pub struct IoManager<Message> where Message: Send + Sync {
|
|||||||
impl<Message> IoManager<Message> where Message: Send + Sync + Clone + 'static {
|
impl<Message> IoManager<Message> where Message: Send + Sync + Clone + 'static {
|
||||||
/// Creates a new instance and registers it with the event loop.
|
/// Creates a new instance and registers it with the event loop.
|
||||||
pub fn start(
|
pub fn start(
|
||||||
panic_handler: Arc<PanicHandler>,
|
|
||||||
event_loop: &mut EventLoop<IoManager<Message>>,
|
event_loop: &mut EventLoop<IoManager<Message>>,
|
||||||
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>, HandlerId>>>
|
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>, HandlerId>>>
|
||||||
) -> Result<(), IoError> {
|
) -> Result<(), IoError> {
|
||||||
@ -206,7 +204,6 @@ impl<Message> IoManager<Message> where Message: Send + Sync + Clone + 'static {
|
|||||||
IoChannel::new(event_loop.channel(), Arc::downgrade(&handlers)),
|
IoChannel::new(event_loop.channel(), Arc::downgrade(&handlers)),
|
||||||
work_ready.clone(),
|
work_ready.clone(),
|
||||||
work_ready_mutex.clone(),
|
work_ready_mutex.clone(),
|
||||||
panic_handler.clone(),
|
|
||||||
)
|
)
|
||||||
).collect();
|
).collect();
|
||||||
|
|
||||||
@ -417,37 +414,24 @@ impl<Message> IoChannel<Message> where Message: Send + Clone + Sync + 'static {
|
|||||||
/// General IO Service. Starts an event loop and dispatches IO requests.
|
/// General IO Service. Starts an event loop and dispatches IO requests.
|
||||||
/// 'Message' is a notification message type
|
/// 'Message' is a notification message type
|
||||||
pub struct IoService<Message> where Message: Send + Sync + Clone + 'static {
|
pub struct IoService<Message> where Message: Send + Sync + Clone + 'static {
|
||||||
panic_handler: Arc<PanicHandler>,
|
|
||||||
thread: Mutex<Option<JoinHandle<()>>>,
|
thread: Mutex<Option<JoinHandle<()>>>,
|
||||||
host_channel: Mutex<Sender<IoMessage<Message>>>,
|
host_channel: Mutex<Sender<IoMessage<Message>>>,
|
||||||
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>, HandlerId>>>,
|
handlers: Arc<RwLock<Slab<Arc<IoHandler<Message>>, HandlerId>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<Message> MayPanic for IoService<Message> where Message: Send + Sync + Clone + 'static {
|
|
||||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
|
||||||
self.panic_handler.on_panic(closure);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
|
impl<Message> IoService<Message> where Message: Send + Sync + Clone + 'static {
|
||||||
/// Starts IO event loop
|
/// Starts IO event loop
|
||||||
pub fn start() -> Result<IoService<Message>, IoError> {
|
pub fn start() -> Result<IoService<Message>, IoError> {
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
let mut config = EventLoopBuilder::new();
|
let mut config = EventLoopBuilder::new();
|
||||||
config.messages_per_tick(1024);
|
config.messages_per_tick(1024);
|
||||||
let mut event_loop = config.build().expect("Error creating event loop");
|
let mut event_loop = config.build().expect("Error creating event loop");
|
||||||
let channel = event_loop.channel();
|
let channel = event_loop.channel();
|
||||||
let panic = panic_handler.clone();
|
|
||||||
let handlers = Arc::new(RwLock::new(Slab::new(MAX_HANDLERS)));
|
let handlers = Arc::new(RwLock::new(Slab::new(MAX_HANDLERS)));
|
||||||
let h = handlers.clone();
|
let h = handlers.clone();
|
||||||
let thread = thread::spawn(move || {
|
let thread = thread::spawn(move || {
|
||||||
let p = panic.clone();
|
IoManager::<Message>::start(&mut event_loop, h).expect("Error starting IO service");
|
||||||
panic.catch_panic(move || {
|
|
||||||
IoManager::<Message>::start(p, &mut event_loop, h).expect("Error starting IO service");
|
|
||||||
}).expect("Error starting panic handler")
|
|
||||||
});
|
});
|
||||||
Ok(IoService {
|
Ok(IoService {
|
||||||
panic_handler: panic_handler,
|
|
||||||
thread: Mutex::new(Some(thread)),
|
thread: Mutex::new(Some(thread)),
|
||||||
host_channel: Mutex::new(channel),
|
host_channel: Mutex::new(channel),
|
||||||
handlers: handlers,
|
handlers: handlers,
|
||||||
|
@ -20,7 +20,6 @@ use std::sync::atomic::{AtomicBool, Ordering as AtomicOrdering};
|
|||||||
use crossbeam::sync::chase_lev;
|
use crossbeam::sync::chase_lev;
|
||||||
use service::{HandlerId, IoChannel, IoContext};
|
use service::{HandlerId, IoChannel, IoContext};
|
||||||
use IoHandler;
|
use IoHandler;
|
||||||
use panics::*;
|
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
|
||||||
use std::sync::{Condvar as SCondvar, Mutex as SMutex};
|
use std::sync::{Condvar as SCondvar, Mutex as SMutex};
|
||||||
@ -65,7 +64,6 @@ impl Worker {
|
|||||||
channel: IoChannel<Message>,
|
channel: IoChannel<Message>,
|
||||||
wait: Arc<SCondvar>,
|
wait: Arc<SCondvar>,
|
||||||
wait_mutex: Arc<SMutex<()>>,
|
wait_mutex: Arc<SMutex<()>>,
|
||||||
panic_handler: Arc<PanicHandler>
|
|
||||||
) -> Worker
|
) -> Worker
|
||||||
where Message: Send + Sync + Clone + 'static {
|
where Message: Send + Sync + Clone + 'static {
|
||||||
let deleting = Arc::new(AtomicBool::new(false));
|
let deleting = Arc::new(AtomicBool::new(false));
|
||||||
@ -78,9 +76,7 @@ impl Worker {
|
|||||||
worker.thread = Some(thread::Builder::new().stack_size(STACK_SIZE).name(format!("IO Worker #{}", index)).spawn(
|
worker.thread = Some(thread::Builder::new().stack_size(STACK_SIZE).name(format!("IO Worker #{}", index)).spawn(
|
||||||
move || {
|
move || {
|
||||||
LOCAL_STACK_SIZE.with(|val| val.set(STACK_SIZE));
|
LOCAL_STACK_SIZE.with(|val| val.set(STACK_SIZE));
|
||||||
panic_handler.catch_panic(move || {
|
|
||||||
Worker::work_loop(stealer, channel.clone(), wait, wait_mutex.clone(), deleting)
|
Worker::work_loop(stealer, channel.clone(), wait, wait_mutex.clone(), deleting)
|
||||||
}).expect("Error starting panic handler")
|
|
||||||
})
|
})
|
||||||
.expect("Error creating worker thread"));
|
.expect("Error creating worker thread"));
|
||||||
worker
|
worker
|
||||||
|
@ -46,7 +46,6 @@ pub struct NetworkService {
|
|||||||
host_info: String,
|
host_info: String,
|
||||||
host: RwLock<Option<Arc<Host>>>,
|
host: RwLock<Option<Arc<Host>>>,
|
||||||
stats: Arc<NetworkStats>,
|
stats: Arc<NetworkStats>,
|
||||||
panic_handler: Arc<PanicHandler>,
|
|
||||||
host_handler: Arc<HostHandler>,
|
host_handler: Arc<HostHandler>,
|
||||||
config: NetworkConfiguration,
|
config: NetworkConfiguration,
|
||||||
}
|
}
|
||||||
@ -55,9 +54,7 @@ impl NetworkService {
|
|||||||
/// Starts IO event loop
|
/// Starts IO event loop
|
||||||
pub fn new(config: NetworkConfiguration) -> Result<NetworkService, NetworkError> {
|
pub fn new(config: NetworkConfiguration) -> Result<NetworkService, NetworkError> {
|
||||||
let host_handler = Arc::new(HostHandler { public_url: RwLock::new(None) });
|
let host_handler = Arc::new(HostHandler { public_url: RwLock::new(None) });
|
||||||
let panic_handler = PanicHandler::new_in_arc();
|
|
||||||
let io_service = IoService::<NetworkIoMessage>::start()?;
|
let io_service = IoService::<NetworkIoMessage>::start()?;
|
||||||
panic_handler.forward_from(&io_service);
|
|
||||||
|
|
||||||
let stats = Arc::new(NetworkStats::new());
|
let stats = Arc::new(NetworkStats::new());
|
||||||
let host_info = Host::client_version();
|
let host_info = Host::client_version();
|
||||||
@ -65,7 +62,6 @@ impl NetworkService {
|
|||||||
io_service: io_service,
|
io_service: io_service,
|
||||||
host_info: host_info,
|
host_info: host_info,
|
||||||
stats: stats,
|
stats: stats,
|
||||||
panic_handler: panic_handler,
|
|
||||||
host: RwLock::new(None),
|
host: RwLock::new(None),
|
||||||
config: config,
|
config: config,
|
||||||
host_handler: host_handler,
|
host_handler: host_handler,
|
||||||
@ -192,9 +188,3 @@ impl NetworkService {
|
|||||||
host.as_ref().map(|ref host| host.with_context_eval(protocol, &io, action))
|
host.as_ref().map(|ref host| host.with_context_eval(protocol, &io, action))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl MayPanic for NetworkService {
|
|
||||||
fn on_panic<F>(&self, closure: F) where F: OnPanicListener {
|
|
||||||
self.panic_handler.on_panic(closure);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
Loading…
Reference in New Issue
Block a user