Fix slow balances (#6471)

* Update token updates

* Update token info fetching

* Update logger

* Minor fixes to updates and notifications for balances

* Use Pubsub

* Fix timeout.

* Use pubsub for status.

* Fix signer subscription.

* Process tokens in chunks.

* Fix tokens loaded by chunks

* Linting

* Dispatch tokens asap

* Fix chunks processing.

* Better filter options

* Parallel log fetching.

* Fix signer polling.

* Fix initial block query.

* Token balances updates : the right(er) way

* Better tokens info fetching

* Fixes in token data fetching

* Only fetch what's needed (tokens)

* Fix linting issues

* Revert "Transaction permissioning (#6441)"

This reverts commit eed0e8b03a.

* Revert "Revert "Transaction permissioning (#6441)""

This reverts commit 8f96415e58dde652e5828706eb2639d43416f448.

* Update wasm-tests.

* Fixing balances fetching

* Fix requests tracking in UI

* Fix request watching

* Update the Logger

* PR Grumbles Fixes

* PR Grumbles fixes

* Linting...
This commit is contained in:
Nicolas Gotchac 2017-09-10 18:03:35 +02:00 committed by Gav Wood
parent ee14a3fb31
commit f1a050366f
51 changed files with 1819 additions and 857 deletions

52
Cargo.lock generated
View File

@ -295,6 +295,15 @@ dependencies = [
"unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "coco"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"either 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "common-types" name = "common-types"
version = "0.1.0" version = "0.1.0"
@ -395,14 +404,6 @@ dependencies = [
"winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "deque"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "difference" name = "difference"
version = "1.0.0" version = "1.0.0"
@ -544,6 +545,7 @@ dependencies = [
"parking_lot 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "parking_lot 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"price-info 1.7.0", "price-info 1.7.0",
"rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rlp 0.2.0", "rlp 0.2.0",
"rlp_derive 0.1.0", "rlp_derive 0.1.0",
"rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)", "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1048,7 +1050,7 @@ name = "gcc"
version = "0.3.51" version = "0.3.51"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"rayon 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -2433,18 +2435,27 @@ dependencies = [
[[package]] [[package]]
name = "rayon" name = "rayon"
version = "0.7.0" version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"rayon-core 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon-core 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rayon"
version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rayon-core 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
name = "rayon-core" name = "rayon-core"
version = "1.0.0" version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "coco 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"futures 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -2496,7 +2507,7 @@ dependencies = [
"gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.51 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", "lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.21 (registry+https://github.com/rust-lang/crates.io-index)",
"rayon 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
"untrusted 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "untrusted 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -2647,6 +2658,11 @@ name = "scoped-tls"
version = "0.1.0" version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "scopeguard"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
name = "secur32-sys" name = "secur32-sys"
version = "0.2.0" version = "0.2.0"
@ -3388,6 +3404,7 @@ dependencies = [
"checksum clap 2.24.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6b8f69e518f967224e628896b54e41ff6acfb4dcfefc5076325c36525dac900f" "checksum clap 2.24.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6b8f69e518f967224e628896b54e41ff6acfb4dcfefc5076325c36525dac900f"
"checksum clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "5b4fabf979ddf6419a313c1c0ada4a5b95cfd2049c56e8418d622d27b4b6ff32" "checksum clippy 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "5b4fabf979ddf6419a313c1c0ada4a5b95cfd2049c56e8418d622d27b4b6ff32"
"checksum clippy_lints 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "ce96ec05bfe018a0d5d43da115e54850ea2217981ff0f2e462780ab9d594651a" "checksum clippy_lints 0.0.103 (registry+https://github.com/rust-lang/crates.io-index)" = "ce96ec05bfe018a0d5d43da115e54850ea2217981ff0f2e462780ab9d594651a"
"checksum coco 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c06169f5beb7e31c7c67ebf5540b8b472d23e3eade3b2ec7d1f5b504a85f91bd"
"checksum conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "78ff10625fd0ac447827aa30ea8b861fead473bb60aeb73af6c1c58caf0d1299" "checksum conv 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "78ff10625fd0ac447827aa30ea8b861fead473bb60aeb73af6c1c58caf0d1299"
"checksum cookie 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d53b80dde876f47f03cda35303e368a79b91c70b0d65ecba5fd5280944a08591" "checksum cookie 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d53b80dde876f47f03cda35303e368a79b91c70b0d65ecba5fd5280944a08591"
"checksum core-foundation 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "20a6d0448d3a99d977ae4a2aa5a98d886a923e863e81ad9ff814645b6feb3bbd" "checksum core-foundation 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "20a6d0448d3a99d977ae4a2aa5a98d886a923e863e81ad9ff814645b6feb3bbd"
@ -3399,7 +3416,6 @@ dependencies = [
"checksum custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ef8ae57c4978a2acd8b869ce6b9ca1dfe817bff704c220209fdef2c0b75a01b9" "checksum custom_derive 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "ef8ae57c4978a2acd8b869ce6b9ca1dfe817bff704c220209fdef2c0b75a01b9"
"checksum daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "271ec51b7e0bee92f0d04601422c73eb76ececf197026711c97ad25038a010cf" "checksum daemonize 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "271ec51b7e0bee92f0d04601422c73eb76ececf197026711c97ad25038a010cf"
"checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850" "checksum dbghelp-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97590ba53bcb8ac28279161ca943a924d1fd4a8fb3fa63302591647c4fc5b850"
"checksum deque 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "1614659040e711785ed8ea24219140654da1729f3ec8a47a9719d041112fe7bf"
"checksum difference 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b3304d19798a8e067e48d8e69b2c37f0b5e9b4e462504ad9e27e9f3fce02bba8" "checksum difference 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b3304d19798a8e067e48d8e69b2c37f0b5e9b4e462504ad9e27e9f3fce02bba8"
"checksum docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b5b93718f8b3e5544fcc914c43de828ca6c6ace23e0332c6080a2977b49787a" "checksum docopt 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3b5b93718f8b3e5544fcc914c43de828ca6c6ace23e0332c6080a2977b49787a"
"checksum dtoa 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5edd69c67b2f8e0911629b7e6b8a34cb3956613cd7c6e6414966dee349c2db4f" "checksum dtoa 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5edd69c67b2f8e0911629b7e6b8a34cb3956613cd7c6e6414966dee349c2db4f"
@ -3524,8 +3540,9 @@ dependencies = [
"checksum quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6683b0e23d80813b1a535841f0048c1537d3f86d63c999e8373b39a9b0eb74a" "checksum quine-mc_cluskey 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a6683b0e23d80813b1a535841f0048c1537d3f86d63c999e8373b39a9b0eb74a"
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
"checksum rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2791d88c6defac799c3f20d74f094ca33b9332612d9aef9078519c82e4fe04a5" "checksum rand 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "2791d88c6defac799c3f20d74f094ca33b9332612d9aef9078519c82e4fe04a5"
"checksum rayon 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8c83adcb08e5b922e804fe1918142b422602ef11f2fd670b0b52218cb5984a20" "checksum rayon 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a77c51c07654ddd93f6cb543c7a849863b03abc7e82591afda6dc8ad4ac3ac4a"
"checksum rayon-core 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "767d91bacddf07d442fe39257bf04fd95897d1c47c545d009f6beb03efd038f8" "checksum rayon 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)" = "b614fe08b6665cb9a231d07ac1364b0ef3cb3698f1239ee0c4c3a88a524f54c8"
"checksum rayon-core 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7febc28567082c345f10cddc3612c6ea020fc3297a1977d472cf9fdb73e6e493"
"checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01" "checksum regex 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4278c17d0f6d62dfef0ab00028feb45bd7d2102843f80763474eeb1be8a10c01"
"checksum regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "841591b1e05609a643e3b4d0045fce04f701daba7151ddcd3ad47b080693d5a9" "checksum regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "841591b1e05609a643e3b4d0045fce04f701daba7151ddcd3ad47b080693d5a9"
"checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457" "checksum regex-syntax 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9191b1f57603095f105d317e375d19b1c9c5c3185ea9633a99a6dcbed04457"
@ -3544,6 +3561,7 @@ dependencies = [
"checksum rustc_version 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1e114e275f7c9b5d50bb52b28f9aac1921209f02aa6077c8b255e21eefaf8ffa" "checksum rustc_version 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1e114e275f7c9b5d50bb52b28f9aac1921209f02aa6077c8b255e21eefaf8ffa"
"checksum schannel 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4e45ac5e9e4698c1c138d2972bedcd90b81fe1efeba805449d2bdd54512de5f9" "checksum schannel 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "4e45ac5e9e4698c1c138d2972bedcd90b81fe1efeba805449d2bdd54512de5f9"
"checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d" "checksum scoped-tls 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f417c22df063e9450888a7561788e9bd46d3bb3c1466435b4eccb903807f147d"
"checksum scopeguard 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "c79eb2c3ac4bc2507cda80e7f3ac5b88bd8eae4c0914d5663e6a8933994be918"
"checksum secur32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f412dfa83308d893101dd59c10d6fda8283465976c28c287c5c855bf8d216bc" "checksum secur32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3f412dfa83308d893101dd59c10d6fda8283465976c28c287c5c855bf8d216bc"
"checksum security-framework 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "42ddf098d78d0b64564b23ee6345d07573e7d10e52ad86875d89ddf5f8378a02" "checksum security-framework 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "42ddf098d78d0b64564b23ee6345d07573e7d10e52ad86875d89ddf5f8378a02"
"checksum security-framework-sys 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "5bacdada57ea62022500c457c8571c17dfb5e6240b7c8eac5916ffa8c7138a55" "checksum security-framework-sys 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "5bacdada57ea62022500c457c8571c17dfb5e6240b7c8eac5916ffa8c7138a55"

View File

@ -49,6 +49,7 @@ num = "0.1"
num_cpus = "1.2" num_cpus = "1.2"
parking_lot = "0.4" parking_lot = "0.4"
price-info = { path = "../price-info" } price-info = { path = "../price-info" }
rayon = "0.8"
rand = "0.3" rand = "0.3"
rlp = { path = "../util/rlp" } rlp = { path = "../util/rlp" }
rlp_derive = { path = "../util/rlp_derive" } rlp_derive = { path = "../util/rlp_derive" }

View File

@ -44,6 +44,7 @@ use db::{self, Writable, Readable, CacheUpdatePolicy};
use cache_manager::CacheManager; use cache_manager::CacheManager;
use encoded; use encoded;
use engines::epoch::{Transition as EpochTransition, PendingTransition as PendingEpochTransition}; use engines::epoch::{Transition as EpochTransition, PendingTransition as PendingEpochTransition};
use rayon::prelude::*;
use ansi_term::Colour; use ansi_term::Colour;
const LOG_BLOOMS_LEVELS: usize = 3; const LOG_BLOOMS_LEVELS: usize = 3;
@ -152,7 +153,7 @@ pub trait BlockProvider {
/// Returns logs matching given filter. /// Returns logs matching given filter.
fn logs<F>(&self, blocks: Vec<BlockNumber>, matches: F, limit: Option<usize>) -> Vec<LocalizedLogEntry> fn logs<F>(&self, blocks: Vec<BlockNumber>, matches: F, limit: Option<usize>) -> Vec<LocalizedLogEntry>
where F: Fn(&LogEntry) -> bool, Self: Sized; where F: Fn(&LogEntry) -> bool + Send + Sync, Self: Sized;
} }
macro_rules! otry { macro_rules! otry {
@ -363,50 +364,56 @@ impl BlockProvider for BlockChain {
} }
fn logs<F>(&self, mut blocks: Vec<BlockNumber>, matches: F, limit: Option<usize>) -> Vec<LocalizedLogEntry> fn logs<F>(&self, mut blocks: Vec<BlockNumber>, matches: F, limit: Option<usize>) -> Vec<LocalizedLogEntry>
where F: Fn(&LogEntry) -> bool, Self: Sized { where F: Fn(&LogEntry) -> bool + Send + Sync, Self: Sized {
// sort in reverse order // sort in reverse order
blocks.sort_by(|a, b| b.cmp(a)); blocks.sort_by(|a, b| b.cmp(a));
let mut log_index = 0; let mut logs = blocks
let mut logs = blocks.into_iter() .chunks(128)
.filter_map(|number| self.block_hash(number).map(|hash| (number, hash))) .flat_map(move |blocks_chunk| {
.filter_map(|(number, hash)| self.block_receipts(&hash).map(|r| (number, hash, r.receipts))) blocks_chunk.into_par_iter()
.filter_map(|(number, hash, receipts)| self.block_body(&hash).map(|ref b| (number, hash, receipts, b.transaction_hashes()))) .filter_map(|number| self.block_hash(*number).map(|hash| (*number, hash)))
.flat_map(|(number, hash, mut receipts, mut hashes)| { .filter_map(|(number, hash)| self.block_receipts(&hash).map(|r| (number, hash, r.receipts)))
if receipts.len() != hashes.len() { .filter_map(|(number, hash, receipts)| self.block_body(&hash).map(|ref b| (number, hash, receipts, b.transaction_hashes())))
warn!("Block {} ({}) has different number of receipts ({}) to transactions ({}). Database corrupt?", number, hash, receipts.len(), hashes.len()); .flat_map(|(number, hash, mut receipts, mut hashes)| {
assert!(false); if receipts.len() != hashes.len() {
} warn!("Block {} ({}) has different number of receipts ({}) to transactions ({}). Database corrupt?", number, hash, receipts.len(), hashes.len());
log_index = receipts.iter().fold(0, |sum, receipt| sum + receipt.logs.len()); assert!(false);
}
let mut log_index = receipts.iter().fold(0, |sum, receipt| sum + receipt.logs.len());
let receipts_len = receipts.len(); let receipts_len = receipts.len();
hashes.reverse(); hashes.reverse();
receipts.reverse(); receipts.reverse();
receipts.into_iter() receipts.into_iter()
.map(|receipt| receipt.logs) .map(|receipt| receipt.logs)
.zip(hashes) .zip(hashes)
.enumerate()
.flat_map(move |(index, (mut logs, tx_hash))| {
let current_log_index = log_index;
let no_of_logs = logs.len();
log_index -= no_of_logs;
logs.reverse();
logs.into_iter()
.enumerate() .enumerate()
.map(move |(i, log)| LocalizedLogEntry { .flat_map(move |(index, (mut logs, tx_hash))| {
entry: log, let current_log_index = log_index;
block_hash: hash, let no_of_logs = logs.len();
block_number: number, log_index -= no_of_logs;
transaction_hash: tx_hash,
// iterating in reverse order logs.reverse();
transaction_index: receipts_len - index - 1, logs.into_iter()
transaction_log_index: no_of_logs - i - 1, .enumerate()
log_index: current_log_index - i - 1, .map(move |(i, log)| LocalizedLogEntry {
entry: log,
block_hash: hash,
block_number: number,
transaction_hash: tx_hash,
// iterating in reverse order
transaction_index: receipts_len - index - 1,
transaction_log_index: no_of_logs - i - 1,
log_index: current_log_index - i - 1,
})
}) })
.filter(|log_entry| matches(&log_entry.entry))
.take(limit.unwrap_or(::std::usize::MAX))
.collect::<Vec<_>>()
}) })
.collect::<Vec<_>>()
}) })
.filter(|log_entry| matches(&log_entry.entry))
.take(limit.unwrap_or(::std::usize::MAX)) .take(limit.unwrap_or(::std::usize::MAX))
.collect::<Vec<LocalizedLogEntry>>(); .collect::<Vec<LocalizedLogEntry>>();
logs.reverse(); logs.reverse();

View File

@ -918,7 +918,7 @@ impl Client {
_ => {}, _ => {},
} }
let block_number = match self.block_number(id.clone()) { let block_number = match self.block_number(id) {
Some(num) => num, Some(num) => num,
None => return None, None => return None,
}; };
@ -1155,6 +1155,16 @@ impl Client {
(false, false) => call(state, env_info, engine, state_diff, t, TransactOptions::with_no_tracing()), (false, false) => call(state, env_info, engine, state_diff, t, TransactOptions::with_no_tracing()),
} }
} }
fn block_number_ref(&self, id: &BlockId) -> Option<BlockNumber> {
match *id {
BlockId::Number(number) => Some(number),
BlockId::Hash(ref hash) => self.chain.read().block_number(hash),
BlockId::Earliest => Some(0),
BlockId::Latest => Some(self.chain.read().best_block_number()),
BlockId::Pending => Some(self.chain.read().best_block_number() + 1),
}
}
} }
impl snapshot::DatabaseRestore for Client { impl snapshot::DatabaseRestore for Client {
@ -1364,13 +1374,7 @@ impl BlockChainClient for Client {
} }
fn block_number(&self, id: BlockId) -> Option<BlockNumber> { fn block_number(&self, id: BlockId) -> Option<BlockNumber> {
match id { self.block_number_ref(&id)
BlockId::Number(number) => Some(number),
BlockId::Hash(ref hash) => self.chain.read().block_number(hash),
BlockId::Earliest => Some(0),
BlockId::Latest => Some(self.chain.read().best_block_number()),
BlockId::Pending => Some(self.chain.read().best_block_number() + 1),
}
} }
fn block_body(&self, id: BlockId) -> Option<encoded::Body> { fn block_body(&self, id: BlockId) -> Option<encoded::Body> {
@ -1651,16 +1655,17 @@ impl BlockChainClient for Client {
self.engine.additional_params().into_iter().collect() self.engine.additional_params().into_iter().collect()
} }
fn blocks_with_bloom(&self, bloom: &H2048, from_block: BlockId, to_block: BlockId) -> Option<Vec<BlockNumber>> {
match (self.block_number(from_block), self.block_number(to_block)) {
(Some(from), Some(to)) => Some(self.chain.read().blocks_with_bloom(bloom, from, to)),
_ => None
}
}
fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry> { fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry> {
let (from, to) = match (self.block_number_ref(&filter.from_block), self.block_number_ref(&filter.to_block)) {
(Some(from), Some(to)) => (from, to),
_ => return Vec::new(),
};
let chain = self.chain.read();
let blocks = filter.bloom_possibilities().iter() let blocks = filter.bloom_possibilities().iter()
.filter_map(|bloom| self.blocks_with_bloom(bloom, filter.from_block.clone(), filter.to_block.clone())) .map(move |bloom| {
chain.blocks_with_bloom(bloom, from, to)
})
.flat_map(|m| m) .flat_map(|m| m)
// remove duplicate elements // remove duplicate elements
.collect::<HashSet<u64>>() .collect::<HashSet<u64>>()

View File

@ -24,7 +24,7 @@ use itertools::Itertools;
use rustc_hex::FromHex; use rustc_hex::FromHex;
use hash::keccak; use hash::keccak;
use bigint::prelude::U256; use bigint::prelude::U256;
use bigint::hash::{H256, H2048}; use bigint::hash::H256;
use parking_lot::RwLock; use parking_lot::RwLock;
use util::*; use util::*;
use rlp::*; use rlp::*;
@ -508,10 +508,6 @@ impl BlockChainClient for TestBlockChainClient {
self.receipts.read().get(&id).cloned() self.receipts.read().get(&id).cloned()
} }
fn blocks_with_bloom(&self, _bloom: &H2048, _from_block: BlockId, _to_block: BlockId) -> Option<Vec<BlockNumber>> {
unimplemented!();
}
fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry> { fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry> {
let mut logs = self.logs.read().clone(); let mut logs = self.logs.read().clone();
let len = logs.len(); let len = logs.len();

View File

@ -35,7 +35,7 @@ use transaction::{LocalizedTransaction, PendingTransaction, SignedTransaction};
use verification::queue::QueueInfo as BlockQueueInfo; use verification::queue::QueueInfo as BlockQueueInfo;
use bigint::prelude::U256; use bigint::prelude::U256;
use bigint::hash::{H256, H2048}; use bigint::hash::H256;
use util::{Address, Bytes}; use util::{Address, Bytes};
use util::hashdb::DBValue; use util::hashdb::DBValue;
@ -181,9 +181,6 @@ pub trait BlockChainClient : Sync + Send {
/// Get the best block header. /// Get the best block header.
fn best_block_header(&self) -> encoded::Header; fn best_block_header(&self) -> encoded::Header;
/// Returns numbers of blocks containing given bloom.
fn blocks_with_bloom(&self, bloom: &H2048, from_block: BlockId, to_block: BlockId) -> Option<Vec<BlockNumber>>;
/// Returns logs matching given filter. /// Returns logs matching given filter.
fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry>; fn logs(&self, filter: Filter) -> Vec<LocalizedLogEntry>;

View File

@ -102,6 +102,7 @@ extern crate num;
extern crate parking_lot; extern crate parking_lot;
extern crate price_info; extern crate price_info;
extern crate rand; extern crate rand;
extern crate rayon;
extern crate rlp; extern crate rlp;
extern crate hash; extern crate hash;
extern crate heapsize; extern crate heapsize;

View File

@ -392,14 +392,13 @@ mod tests {
self.numbers.get(&index).cloned() self.numbers.get(&index).cloned()
} }
fn blocks_with_bloom(&self, _bloom: &H2048, _from_block: BlockNumber, _to_block: BlockNumber) -> Vec<BlockNumber> {
unimplemented!()
}
fn block_receipts(&self, _hash: &H256) -> Option<BlockReceipts> { fn block_receipts(&self, _hash: &H256) -> Option<BlockReceipts> {
unimplemented!() unimplemented!()
} }
fn blocks_with_bloom(&self, _bloom: &H2048, _from_block: BlockNumber, _to_block: BlockNumber) -> Vec<BlockNumber> {
unimplemented!()
}
fn logs<F>(&self, _blocks: Vec<BlockNumber>, _matches: F, _limit: Option<usize>) -> Vec<LocalizedLogEntry> fn logs<F>(&self, _blocks: Vec<BlockNumber>, _matches: F, _limit: Option<usize>) -> Vec<LocalizedLogEntry>
where F: Fn(&LogEntry) -> bool, Self: Sized { where F: Fn(&LogEntry) -> bool, Self: Sized {

View File

@ -71,10 +71,15 @@ export default class Api extends EventEmitter {
} }
} }
get isPubSub () {
return !!this._pubsub;
}
get pubsub () { get pubsub () {
if (!this._pubsub) { if (!this.isPubSub) {
throw Error('Pubsub is only available with a subscribing-supported transport injected!'); throw Error('Pubsub is only available with a subscribing-supported transport injected!');
} }
return this._pubsub; return this._pubsub;
} }

View File

@ -25,7 +25,7 @@ export default class Eth extends PubsubBase {
} }
newHeads (callback) { newHeads (callback) {
return this.addListener('eth', 'newHeads', callback); return this.addListener('eth', 'newHeads', callback, null);
} }
logs (callback) { logs (callback) {

View File

@ -267,7 +267,7 @@ export default class Parity extends PubsubBase {
// parity accounts API (only secure API or configured to be exposed) // parity accounts API (only secure API or configured to be exposed)
allAccountsInfo (callback) { allAccountsInfo (callback) {
return this._addListener(this._api, 'parity_allAccountsInfo', (error, data) => { return this.addListener(this._api, 'parity_allAccountsInfo', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outAccountInfo(data)); : callback(null, outAccountInfo(data));
@ -275,7 +275,7 @@ export default class Parity extends PubsubBase {
} }
getDappAddresses (callback, dappId) { getDappAddresses (callback, dappId) {
return this._addListener(this._api, 'parity_getDappAddresses', (error, data) => { return this.addListener(this._api, 'parity_getDappAddresses', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outAddresses(data)); : callback(null, outAddresses(data));
@ -283,7 +283,7 @@ export default class Parity extends PubsubBase {
} }
getDappDefaultAddress (callback, dappId) { getDappDefaultAddress (callback, dappId) {
return this._addListener(this._api, 'parity_getDappDefaultAddress', (error, data) => { return this.addListener(this._api, 'parity_getDappDefaultAddress', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outAddress(data)); : callback(null, outAddress(data));
@ -291,7 +291,7 @@ export default class Parity extends PubsubBase {
} }
getNewDappsAddresses (callback) { getNewDappsAddresses (callback) {
return this._addListener(this._api, 'parity_getDappDefaultAddress', (error, addresses) => { return this.addListener(this._api, 'parity_getDappDefaultAddress', (error, addresses) => {
error error
? callback(error) ? callback(error)
: callback(null, addresses ? addresses.map(outAddress) : null); : callback(null, addresses ? addresses.map(outAddress) : null);
@ -299,7 +299,7 @@ export default class Parity extends PubsubBase {
} }
getNewDappsDefaultAddress (callback) { getNewDappsDefaultAddress (callback) {
return this._addListener(this._api, 'parity_getNewDappsDefaultAddress', (error, data) => { return this.addListener(this._api, 'parity_getNewDappsDefaultAddress', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outAddress(data)); : callback(null, outAddress(data));
@ -307,7 +307,7 @@ export default class Parity extends PubsubBase {
} }
listRecentDapps (callback) { listRecentDapps (callback) {
return this._addListener(this._api, 'parity_listRecentDapps', (error, data) => { return this.addListener(this._api, 'parity_listRecentDapps', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outRecentDapps(data)); : callback(null, outRecentDapps(data));
@ -315,7 +315,7 @@ export default class Parity extends PubsubBase {
} }
listGethAccounts (callback) { listGethAccounts (callback) {
return this._addListener(this._api, 'parity_listGethAccounts', (error, data) => { return this.addListener(this._api, 'parity_listGethAccounts', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outAddresses(data)); : callback(null, outAddresses(data));
@ -323,15 +323,15 @@ export default class Parity extends PubsubBase {
} }
listVaults (callback) { listVaults (callback) {
return this._addListener(this._api, 'parity_listVaults', callback); return this.addListener(this._api, 'parity_listVaults', callback);
} }
listOpenedVaults (callback) { listOpenedVaults (callback) {
return this._addListener(this._api, 'parity_listOpenedVaults', callback); return this.addListener(this._api, 'parity_listOpenedVaults', callback);
} }
getVaultMeta (callback, vaultName) { getVaultMeta (callback, vaultName) {
return this._addListener(this._api, 'parity_getVaultMeta', (error, data) => { return this.addListener(this._api, 'parity_getVaultMeta', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outVaultMeta(data)); : callback(null, outVaultMeta(data));
@ -339,7 +339,7 @@ export default class Parity extends PubsubBase {
} }
deriveAddressHash (callback, address, password, hash, shouldSave) { deriveAddressHash (callback, address, password, hash, shouldSave) {
return this._addListener(this._api, 'parity_deriveAddressHash', (error, data) => { return this.addListener(this._api, 'parity_deriveAddressHash', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outAddress(data)); : callback(null, outAddress(data));
@ -347,10 +347,18 @@ export default class Parity extends PubsubBase {
} }
deriveAddressIndex (callback, address, password, index, shouldSave) { deriveAddressIndex (callback, address, password, index, shouldSave) {
return this._addListener(this._api, 'parity_deriveAddressIndex', (error, data) => { return this.addListener(this._api, 'parity_deriveAddressIndex', (error, data) => {
error error
? callback(error) ? callback(error)
: callback(null, outAddress(data)); : callback(null, outAddress(data));
}, [inAddress(address), password, inDeriveIndex(index), !!shouldSave]); }, [inAddress(address), password, inDeriveIndex(index), !!shouldSave]);
} }
nodeHealth (callback) {
return this.addListener(this._api, 'parity_nodeHealth', (error, data) => {
error
? callback(error)
: callback(null, data);
});
}
} }

View File

@ -16,6 +16,7 @@
import Eth from './eth'; import Eth from './eth';
import Parity from './parity'; import Parity from './parity';
import Signer from './signer';
import Net from './net'; import Net from './net';
import { isFunction } from '../util/types'; import { isFunction } from '../util/types';
@ -29,6 +30,7 @@ export default class Pubsub {
this._eth = new Eth(transport); this._eth = new Eth(transport);
this._net = new Net(transport); this._net = new Net(transport);
this._parity = new Parity(transport); this._parity = new Parity(transport);
this._signer = new Signer(transport);
} }
get net () { get net () {
@ -43,8 +45,35 @@ export default class Pubsub {
return this._parity; return this._parity;
} }
get signer () {
return this._signer;
}
unsubscribe (subscriptionIds) { unsubscribe (subscriptionIds) {
// subscriptions are namespace independent. Thus we can simply removeListener from any. // subscriptions are namespace independent. Thus we can simply removeListener from any.
return this._parity.removeListener(subscriptionIds); return this._parity.removeListener(subscriptionIds);
} }
subscribeAndGetResult (f, callback) {
return new Promise((resolve, reject) => {
let isFirst = true;
let onSubscription = (error, data) => {
const p1 = error ? Promise.reject(error) : Promise.resolve(data);
const p2 = p1.then(callback);
if (isFirst) {
isFirst = false;
p2
.then(resolve)
.catch(reject);
}
};
try {
f.call(this, onSubscription).catch(reject);
} catch (err) {
reject(err);
}
});
}
} }

View File

@ -20,11 +20,12 @@ export default class PubsubBase {
this._transport = transport; this._transport = transport;
} }
addListener (module, eventName, callback, eventParams) { addListener (module, eventName, callback, eventParams = []) {
return eventParams if (eventName) {
? this._transport.subscribe(module, callback, eventName, eventParams) return this._transport.subscribe(module, callback, eventParams ? [eventName, eventParams] : [eventName]);
: this._transport.subscribe(module, callback, eventName, []); }
// this._transport.subscribe(module, callback, eventName); After Patch from tomac is merged to master! => eth_subscribe does not support empty array as params
return this._transport.subscribe(module, callback, eventParams);
} }
removeListener (subscriptionIds) { removeListener (subscriptionIds) {

View File

@ -0,0 +1,16 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
export default from './signer';

View File

@ -0,0 +1,37 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import PubsubBase from '../pubsubBase';
import { outSignerRequest } from '../../format/output';
export default class Net extends PubsubBase {
constructor (transport) {
super(transport);
this._api = {
subscribe: 'signer_subscribePending',
unsubscribe: 'signer_unsubscribePending',
subscription: 'signer_pending'
};
}
pendingRequests (callback) {
return this.addListener(this._api, null, (error, data) => {
error
? callback(error)
: callback(null, data.map(outSignerRequest));
});
}
}

View File

@ -44,6 +44,15 @@ export default class Parity {
.execute('parity_addReservedPeer', enode); .execute('parity_addReservedPeer', enode);
} }
call (requests, blockNumber = 'latest') {
return this._transport
.execute(
'parity_call',
requests.map((options) => inOptions(options)),
inBlockNumber(blockNumber)
);
}
chainStatus () { chainStatus () {
return this._transport return this._transport
.execute('parity_chainStatus') .execute('parity_chainStatus')

View File

@ -24,6 +24,13 @@ export default class Eth {
this._lastBlock = new BigNumber(-1); this._lastBlock = new BigNumber(-1);
this._pollTimerId = null; this._pollTimerId = null;
// Try to restart subscription if transport is closed
this._api.transport.on('close', () => {
if (this.isStarted) {
this.start();
}
});
} }
get isStarted () { get isStarted () {
@ -33,31 +40,56 @@ export default class Eth {
start () { start () {
this._started = true; this._started = true;
return this._blockNumber(); if (this._api.isPubSub) {
return Promise.all([
this._pollBlockNumber(false),
this._api.pubsub
.subscribeAndGetResult(
callback => this._api.pubsub.eth.newHeads(callback),
() => {
return this._api.eth
.blockNumber()
.then(blockNumber => {
this.updateBlock(blockNumber);
return blockNumber;
});
}
)
]);
}
// fallback to polling
return this._pollBlockNumber(true);
} }
_blockNumber = () => { _pollBlockNumber = (doTimeout) => {
const nextTimeout = (timeout = 1000) => { const nextTimeout = (timeout = 1000, forceTimeout = doTimeout) => {
this._pollTimerId = setTimeout(() => { if (forceTimeout) {
this._blockNumber(); this._pollTimerId = setTimeout(() => {
}, timeout); this._pollBlockNumber(doTimeout);
}, timeout);
}
}; };
if (!this._api.transport.isConnected) { if (!this._api.transport.isConnected) {
nextTimeout(500); nextTimeout(500, true);
return; return;
} }
return this._api.eth return this._api.eth
.blockNumber() .blockNumber()
.then((blockNumber) => { .then((blockNumber) => {
if (!blockNumber.eq(this._lastBlock)) { this.updateBlock(blockNumber);
this._lastBlock = blockNumber;
this._updateSubscriptions('eth_blockNumber', null, blockNumber);
}
nextTimeout(); nextTimeout();
}) })
.catch(() => nextTimeout()); .catch(() => nextTimeout());
} }
updateBlock (blockNumber) {
if (!blockNumber.eq(this._lastBlock)) {
this._lastBlock = blockNumber;
this._updateSubscriptions('eth_blockNumber', null, blockNumber);
}
}
} }

View File

@ -29,7 +29,8 @@ function stubApi (blockNumber) {
return { return {
_calls, _calls,
transport: { transport: {
isConnected: true isConnected: true,
on: () => {}
}, },
eth: { eth: {
blockNumber: () => { blockNumber: () => {

View File

@ -23,6 +23,13 @@ export default class Personal {
this._lastDefaultAccount = '0x0'; this._lastDefaultAccount = '0x0';
this._pollTimerId = null; this._pollTimerId = null;
// Try to restart subscription if transport is closed
this._api.transport.on('close', () => {
if (this.isStarted) {
this.start();
}
});
} }
get isStarted () { get isStarted () {
@ -32,20 +39,42 @@ export default class Personal {
start () { start () {
this._started = true; this._started = true;
let defaultAccount = null;
if (this._api.isPubSub) {
defaultAccount = this._api.pubsub
.subscribeAndGetResult(
callback => this._api.pubsub.parity.defaultAccount(callback),
(defaultAccount) => {
this.updateDefaultAccount(defaultAccount);
return defaultAccount;
}
);
} else {
defaultAccount = this._defaultAccount();
}
return Promise.all([ return Promise.all([
this._defaultAccount(), defaultAccount,
this._listAccounts(), this._listAccounts(),
this._accountsInfo(), this._accountsInfo(),
this._loggingSubscribe() this._loggingSubscribe()
]); ]);
} }
updateDefaultAccount (defaultAccount) {
if (this._lastDefaultAccount !== defaultAccount) {
this._lastDefaultAccount = defaultAccount;
this._updateSubscriptions('parity_defaultAccount', null, defaultAccount);
}
}
// FIXME: Because of the different API instances, the "wait for valid changes" approach // FIXME: Because of the different API instances, the "wait for valid changes" approach
// doesn't work. Since the defaultAccount is critical to operation, we poll in exactly // doesn't work. Since the defaultAccount is critical to operation, we poll in exactly
// same way we do in ../eth (ala eth_blockNumber) and update. This should be moved // same way we do in ../eth (ala eth_blockNumber) and update. This should be moved
// to pub-sub as it becomes available // to pub-sub as it becomes available
_defaultAccount = (timerDisabled = false) => { _defaultAccount = (timerDisabled = false) => {
const nextTimeout = (timeout = 1000) => { const nextTimeout = (timeout = 3000) => {
if (!timerDisabled) { if (!timerDisabled) {
this._pollTimerId = setTimeout(() => { this._pollTimerId = setTimeout(() => {
this._defaultAccount(); this._defaultAccount();
@ -61,11 +90,7 @@ export default class Personal {
return this._api.parity return this._api.parity
.defaultAccount() .defaultAccount()
.then((defaultAccount) => { .then((defaultAccount) => {
if (this._lastDefaultAccount !== defaultAccount) { this.updateDefaultAccount(defaultAccount);
this._lastDefaultAccount = defaultAccount;
this._updateSubscriptions('parity_defaultAccount', null, defaultAccount);
}
nextTimeout(); nextTimeout();
}) })
.catch(() => nextTimeout()); .catch(() => nextTimeout());

View File

@ -22,6 +22,13 @@ export default class Signer {
this._api = api; this._api = api;
this._updateSubscriptions = updateSubscriptions; this._updateSubscriptions = updateSubscriptions;
this._started = false; this._started = false;
// Try to restart subscription if transport is closed
this._api.transport.on('close', () => {
if (this.isStarted) {
this.start();
}
});
} }
get isStarted () { get isStarted () {
@ -31,30 +38,50 @@ export default class Signer {
start () { start () {
this._started = true; this._started = true;
if (this._api.isPubSub) {
const subscription = this._api.pubsub
.subscribeAndGetResult(
callback => this._api.pubsub.signer.pendingRequests(callback),
requests => {
this.updateSubscriptions(requests);
return requests;
}
);
return Promise.all([
this._listRequests(false),
subscription
]);
}
return Promise.all([ return Promise.all([
this._listRequests(true), this._listRequests(true),
this._loggingSubscribe() this._loggingSubscribe()
]); ]);
} }
updateSubscriptions (requests) {
return this._updateSubscriptions('signer_requestsToConfirm', null, requests);
}
_listRequests = (doTimeout) => { _listRequests = (doTimeout) => {
const nextTimeout = (timeout = 1000) => { const nextTimeout = (timeout = 1000, forceTimeout = doTimeout) => {
if (doTimeout) { if (forceTimeout) {
setTimeout(() => { setTimeout(() => {
this._listRequests(true); this._listRequests(doTimeout);
}, timeout); }, timeout);
} }
}; };
if (!this._api.transport.isConnected) { if (!this._api.transport.isConnected) {
nextTimeout(500); nextTimeout(500, true);
return; return;
} }
return this._api.signer return this._api.signer
.requestsToConfirm() .requestsToConfirm()
.then((requests) => { .then((requests) => {
this._updateSubscriptions('signer_requestsToConfirm', null, requests); this.updateSubscriptions(requests);
nextTimeout(); nextTimeout();
}) })
.catch(() => nextTimeout()); .catch(() => nextTimeout());

View File

@ -15,7 +15,11 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import EventEmitter from 'eventemitter3'; import EventEmitter from 'eventemitter3';
import { Logging } from '../subscriptions'; import { Logging } from '../subscriptions';
import logger from './logger';
const LOGGER_ENABLED = process.env.NODE_ENV !== 'production';
export default class JsonRpcBase extends EventEmitter { export default class JsonRpcBase extends EventEmitter {
constructor () { constructor () {
@ -75,6 +79,14 @@ export default class JsonRpcBase extends EventEmitter {
} }
execute (method, ...params) { execute (method, ...params) {
let start;
let logId;
if (LOGGER_ENABLED) {
start = Date.now();
logId = logger.log({ method, params });
}
return this._middlewareList.then((middlewareList) => { return this._middlewareList.then((middlewareList) => {
for (const middleware of middlewareList) { for (const middleware of middlewareList) {
const res = middleware.handle(method, params); const res = middleware.handle(method, params);
@ -93,7 +105,18 @@ export default class JsonRpcBase extends EventEmitter {
} }
} }
return this._execute(method, params); const result = this._execute(method, params);
if (!LOGGER_ENABLED) {
return result;
}
return result
.then((result) => {
logger.set(logId, { result, time: Date.now() - start });
return result;
});
}); });
} }

View File

@ -0,0 +1,150 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import MethodDecodingStore from '~/ui/MethodDecoding/methodDecodingStore';
const LOGGER_ENABLED = process.env.NODE_ENV !== 'production';
let logger;
if (LOGGER_ENABLED) {
class Logger {
_logs = {};
_id = 0;
log ({ method, params }) {
const logId = this._id++;
this._logs[logId] = { method, params, date: Date.now() };
return logId;
}
set (logId, data) {
this._logs[logId] = {
...this._logs[logId],
...data
};
}
static sorter (logA, logB) {
return logA.date - logB.date;
}
get calls () {
const calls = this.methods['eth_call'] || [];
const decoding = MethodDecodingStore.get(window.secureApi);
const contracts = {};
const progress = Math.round(calls.length / 20);
return calls
.reduce((promise, call, index) => {
const { data, to } = call.params[0];
contracts[to] = contracts[to] || [];
return promise
.then(() => decoding.lookup(null, { data, to }))
.then((lookup) => {
if (!lookup.name) {
contracts[to].push(data);
return;
}
const inputs = lookup.inputs.map((input) => {
if (/bytes/.test(input.type)) {
return '0x' + input.value.map((v) => v.toString(16).padStart(2, 0)).join('');
}
return input.value;
});
const called = `${lookup.name}(${inputs.join(', ')})`;
contracts[to].push(called);
if (index % progress === 0) {
console.warn(`progress: ${Math.round(100 * index / calls.length)}%`);
}
});
}, Promise.resolve())
.then(() => {
return Object.keys(contracts)
.map((address) => {
const count = contracts[address].length;
return {
count,
calls: contracts[address],
to: address
};
})
.sort((cA, cB) => cB.count - cA.count);
});
}
get logs () {
return Object.values(this._logs).sort(Logger.sorter);
}
get methods () {
return this.logs.reduce((methods, log) => {
methods[log.method] = methods[log.method] || [];
methods[log.method].push(log);
return methods;
}, {});
}
get stats () {
const logs = this.logs;
const methods = this.methods;
const start = logs[0].date;
const end = logs[logs.length - 1].date;
// Duration in seconds
const duration = (end - start) / 1000;
const speed = logs.length / duration;
const sortedMethods = Object.keys(methods)
.map((method) => {
const methodLogs = methods[method].sort(Logger.sorter);
const methodSpeed = methodLogs.length / duration;
return {
speed: methodSpeed,
count: methodLogs.length,
logs: methodLogs,
method
};
})
.sort((mA, mB) => mB.count - mA.count);
return {
methods: sortedMethods,
speed
};
}
}
logger = new Logger();
if (window) {
window._logger = logger;
}
}
export default logger;

View File

@ -29,7 +29,7 @@ export default class Ws extends JsonRpcBase {
this._url = url; this._url = url;
this._token = token; this._token = token;
this._messages = {}; this._messages = {};
this._subscriptions = { 'eth_subscription': [], 'parity_subscription': [], 'shh_subscription': [] }; this._subscriptions = {};
this._sessionHash = null; this._sessionHash = null;
this._connecting = false; this._connecting = false;
@ -209,6 +209,7 @@ export default class Ws extends JsonRpcBase {
// initial pubsub ACK // initial pubsub ACK
if (id && msg.subscription) { if (id && msg.subscription) {
// save subscription to map subId -> messageId // save subscription to map subId -> messageId
this._subscriptions[msg.subscription] = this._subscriptions[msg.subscription] || {};
this._subscriptions[msg.subscription][res] = id; this._subscriptions[msg.subscription][res] = id;
// resolve promise with messageId because subId's can collide (eth/parity) // resolve promise with messageId because subId's can collide (eth/parity)
msg.resolve(id); msg.resolve(id);
@ -223,7 +224,7 @@ export default class Ws extends JsonRpcBase {
} }
// pubsub format // pubsub format
if (method.includes('subscription')) { if (this._subscriptions[method]) {
const messageId = this._messages[this._subscriptions[method][params.subscription]]; const messageId = this._messages[this._subscriptions[method][params.subscription]];
if (messageId) { if (messageId) {
@ -302,6 +303,16 @@ export default class Ws extends JsonRpcBase {
} }
_methodsFromApi (api) { _methodsFromApi (api) {
if (api.subscription) {
const { subscribe, unsubscribe, subscription } = api;
return {
method: subscribe,
uMethod: unsubscribe,
subscription
};
}
const method = `${api}_subscribe`; const method = `${api}_subscribe`;
const uMethod = `${api}_unsubscribe`; const uMethod = `${api}_unsubscribe`;
const subscription = `${api}_subscription`; const subscription = `${api}_subscription`;
@ -309,7 +320,7 @@ export default class Ws extends JsonRpcBase {
return { method, uMethod, subscription }; return { method, uMethod, subscription };
} }
subscribe (api, callback, ...params) { subscribe (api, callback, params) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const id = this.id; const id = this.id;
const { method, uMethod, subscription } = this._methodsFromApi(api); const { method, uMethod, subscription } = this._methodsFromApi(api);

View File

@ -36,7 +36,6 @@ import muiTheme from '~/ui/Theme';
import MainApplication from './main'; import MainApplication from './main';
import { loadSender, patchApi } from '~/util/tx'; import { loadSender, patchApi } from '~/util/tx';
import { setApi } from '~/redux/providers/apiActions';
import './environment'; import './environment';
@ -69,9 +68,6 @@ ContractInstances.create(api);
const store = initStore(api, hashHistory); const store = initStore(api, hashHistory);
store.dispatch({ type: 'initAll', api });
store.dispatch(setApi(api));
window.secureApi = api; window.secureApi = api;
ReactDOM.render( ReactDOM.render(

View File

@ -31,6 +31,10 @@ export default class HardwareStore {
this._pollId = null; this._pollId = null;
this._pollScan(); this._pollScan();
this._subscribeParity();
this._api.transport.on('close', () => {
this._subscribeParity();
});
} }
isConnected (address) { isConnected (address) {
@ -78,26 +82,30 @@ export default class HardwareStore {
}); });
} }
scanParity () { _subscribeParity () {
return this._api.parity const onError = error => {
.hardwareAccountsInfo() console.warn('HardwareStore::scanParity', error);
.then((hwInfo) => {
Object
.keys(hwInfo)
.forEach((address) => {
const info = hwInfo[address];
info.address = address; return {};
info.via = 'parity'; };
});
return hwInfo; return this._api.pubsub
}) .subscribeAndGetResult(
.catch((error) => { callback => this._api.pubsub.parity.hardwareAccountsInfo(callback),
console.warn('HardwareStore::scanParity', error); hwInfo => {
Object
.keys(hwInfo)
.forEach((address) => {
const info = hwInfo[address];
return {}; info.address = address;
}); info.via = 'parity';
});
this.setWallets(hwInfo);
return hwInfo;
},
onError
).catch(onError);
} }
scan () { scan () {
@ -107,14 +115,10 @@ export default class HardwareStore {
// is done, different results will be retrieved via Parity vs. the browser APIs // is done, different results will be retrieved via Parity vs. the browser APIs
// (latter is Chrome-only, needs the browser app enabled on a Ledger, former is // (latter is Chrome-only, needs the browser app enabled on a Ledger, former is
// not intended as a network call, i.e. hw wallet is with the user) // not intended as a network call, i.e. hw wallet is with the user)
return Promise return this.scanLedger()
.all([ .then((ledgerAccounts) => {
this.scanParity(),
this.scanLedger()
])
.then(([hwAccounts, ledgerAccounts]) => {
transaction(() => { transaction(() => {
this.setWallets(Object.assign({}, hwAccounts, ledgerAccounts)); this.setWallets(Object.assign({}, ledgerAccounts));
this.setScanning(false); this.setScanning(false);
}); });
}); });

View File

@ -133,8 +133,8 @@ export default class TransferStore {
} }
@action handleClose = () => { @action handleClose = () => {
this.stage = 0;
this.onClose(); this.onClose();
this.stage = 0;
} }
@action onUpdateDetails = (type, value) => { @action onUpdateDetails = (type, value) => {
@ -169,7 +169,6 @@ export default class TransferStore {
} }
@action onSend = () => { @action onSend = () => {
this.onNext();
this.sending = true; this.sending = true;
this this

View File

@ -192,7 +192,7 @@ class Transfer extends Component {
renderDialogActions () { renderDialogActions () {
const { account } = this.props; const { account } = this.props;
const { extras, sending, stage } = this.store; const { extras, sending, stage, isValid } = this.store;
const cancelBtn = ( const cancelBtn = (
<Button <Button
@ -236,7 +236,7 @@ class Transfer extends Component {
); );
const sendBtn = ( const sendBtn = (
<Button <Button
disabled={ !this.store.isValid || sending } disabled={ !isValid || sending }
icon={ icon={
<IdentityIcon <IdentityIcon
address={ account.address } address={ account.address }

View File

@ -16,7 +16,7 @@
import { handleActions } from 'redux-actions'; import { handleActions } from 'redux-actions';
const initialState = {}; const initialState = null;
export default handleActions({ export default handleActions({
setApi (state, action) { setApi (state, action) {

View File

@ -16,12 +16,11 @@
import { throttle } from 'lodash'; import { throttle } from 'lodash';
import { fetchBalances, fetchTokensBalances, queryTokensFilter } from './balancesActions'; import { LOG_KEYS, getLogger } from '~/config';
import { loadTokens, fetchTokens } from './tokensActions';
import { padRight } from '~/api/util/format';
import Contracts from '~/contracts'; import { fetchBalances, queryTokensFilter, updateTokensFilter } from './balancesActions';
const log = getLogger(LOG_KEYS.Balances);
let instance = null; let instance = null;
export default class Balances { export default class Balances {
@ -29,40 +28,20 @@ export default class Balances {
this._api = api; this._api = api;
this._store = store; this._store = store;
this._tokenreg = null; this._apiSubs = [];
this._tokenregSID = null;
this._tokenMetaSID = null;
this._blockNumberSID = null; // Throttled `_fetchEthBalances` function
this._accountsInfoSID = null;
// Throtthled load tokens (no more than once
// every minute)
this.loadTokens = throttle(
this._loadTokens,
60 * 1000,
{ leading: true, trailing: true }
);
// Throttled `_fetchBalances` function
// that gets called max once every 40s // that gets called max once every 40s
this.longThrottledFetch = throttle( this.longThrottledFetch = throttle(
this._fetchBalances, this._fetchEthBalances,
40 * 1000, 40 * 1000,
{ leading: false, trailing: true } { leading: true, trailing: false }
); );
this.shortThrottledFetch = throttle( this.shortThrottledFetch = throttle(
this._fetchBalances, this._fetchEthBalances,
2 * 1000, 2 * 1000,
{ leading: false, trailing: true } { leading: true, trailing: false }
);
// Fetch all tokens every 2 minutes
this.throttledTokensFetch = throttle(
this._fetchTokens,
2 * 60 * 1000,
{ leading: false, trailing: true }
); );
// Unsubscribe previous instance if it exists // Unsubscribe previous instance if it exists
@ -71,17 +50,19 @@ export default class Balances {
} }
} }
static get (store = {}) { static get (store) {
if (!instance && store) { if (!instance && store) {
const { api } = store.getState(); return Balances.init(store);
} else if (!instance) {
return Balances.instantiate(store, api); throw new Error('The Balances Provider has not been initialized yet');
} }
return instance; return instance;
} }
static instantiate (store, api) { static init (store) {
const { api } = store.getState();
if (!instance) { if (!instance) {
instance = new Balances(store, api); instance = new Balances(store, api);
} }
@ -91,15 +72,13 @@ export default class Balances {
static start () { static start () {
if (!instance) { if (!instance) {
return Promise.reject('BalancesProvider has not been intiated yet'); return Promise.reject('BalancesProvider has not been initiated yet');
} }
const self = instance; const self = instance;
// Unsubscribe from previous subscriptions // Unsubscribe from previous subscriptions
return Balances return Balances.stop()
.stop()
.then(() => self.loadTokens())
.then(() => { .then(() => {
const promises = [ const promises = [
self.subscribeBlockNumber(), self.subscribeBlockNumber(),
@ -107,7 +86,8 @@ export default class Balances {
]; ];
return Promise.all(promises); return Promise.all(promises);
}); })
.then(() => self.fetchEthBalances());
} }
static stop () { static stop () {
@ -116,71 +96,35 @@ export default class Balances {
} }
const self = instance; const self = instance;
const promises = []; const promises = self._apiSubs.map((subId) => self._api.unsubscribe(subId));
if (self._blockNumberSID) { return Promise.all(promises)
const p = self._api .then(() => {
.unsubscribe(self._blockNumberSID) self._apiSubs = [];
.then(() => { });
self._blockNumberSID = null;
});
promises.push(p);
}
if (self._accountsInfoSID) {
const p = self._api
.unsubscribe(self._accountsInfoSID)
.then(() => {
self._accountsInfoSID = null;
});
promises.push(p);
}
// Unsubscribe without adding the promises
// to the result, since it would have to wait for a
// reconnection to resolve if the Node is disconnected
if (self._tokenreg) {
if (self._tokenregSID) {
const tokenregSID = self._tokenregSID;
self._tokenreg
.unsubscribe(tokenregSID)
.then(() => {
if (self._tokenregSID === tokenregSID) {
self._tokenregSID = null;
}
});
}
if (self._tokenMetaSID) {
const tokenMetaSID = self._tokenMetaSID;
self._tokenreg
.unsubscribe(tokenMetaSID)
.then(() => {
if (self._tokenMetaSID === tokenMetaSID) {
self._tokenMetaSID = null;
}
});
}
}
return Promise.all(promises);
} }
subscribeAccountsInfo () { subscribeAccountsInfo () {
// Don't trigger the balances updates on first call (when the
// subscriptions are setup)
let firstcall = true;
return this._api return this._api
.subscribe('parity_allAccountsInfo', (error, accountsInfo) => { .subscribe('parity_allAccountsInfo', (error, accountsInfo) => {
if (error) { if (error) {
return console.warn('balances::subscribeAccountsInfo', error);
}
if (firstcall) {
firstcall = false;
return; return;
} }
this.fetchAllBalances(); this._store.dispatch(updateTokensFilter());
this.fetchEthBalances();
}) })
.then((accountsInfoSID) => { .then((subId) => {
this._accountsInfoSID = accountsInfoSID; this._apiSubs.push(subId);
}) })
.catch((error) => { .catch((error) => {
console.warn('_subscribeAccountsInfo', error); console.warn('_subscribeAccountsInfo', error);
@ -188,161 +132,57 @@ export default class Balances {
} }
subscribeBlockNumber () { subscribeBlockNumber () {
// Don't trigger the balances updates on first call (when the
// subscriptions are setup)
let firstcall = true;
return this._api return this._api
.subscribe('eth_blockNumber', (error) => { .subscribe('eth_blockNumber', (error, block) => {
if (error) { if (error) {
return console.warn('_subscribeBlockNumber', error); return console.warn('balances::subscribeBlockNumber', error);
}
if (firstcall) {
firstcall = false;
return;
} }
this._store.dispatch(queryTokensFilter()); this._store.dispatch(queryTokensFilter());
return this.fetchAllBalances(); return this.fetchEthBalances();
}) })
.then((blockNumberSID) => { .then((subId) => {
this._blockNumberSID = blockNumberSID; this._apiSubs.push(subId);
}) })
.catch((error) => { .catch((error) => {
console.warn('_subscribeBlockNumber', error); console.warn('_subscribeBlockNumber', error);
}); });
} }
fetchAllBalances (options = {}) { fetchEthBalances (options = {}) {
// If it's a network change, reload the tokens log.debug('fetching eth balances (throttled)...');
// ( and then fetch the tokens balances ) and fetch
// the accounts balances
if (options.changedNetwork) {
this.loadTokens({ skipNotifications: true });
this.loadTokens.flush();
this.fetchBalances({
force: true,
skipNotifications: true
});
return;
}
this.fetchTokensBalances(options);
this.fetchBalances(options);
}
fetchTokensBalances (options) {
const { skipNotifications = false, force = false } = options;
this.throttledTokensFetch(skipNotifications);
if (force) {
this.throttledTokensFetch.flush();
}
}
fetchBalances (options) {
const { skipNotifications = false, force = false } = options;
const { syncing } = this._store.getState().nodeStatus; const { syncing } = this._store.getState().nodeStatus;
if (options.force) {
return this._fetchEthBalances();
}
// If syncing, only retrieve balances once every // If syncing, only retrieve balances once every
// few seconds // few seconds
if (syncing || syncing === null) { if (syncing || syncing === null) {
this.shortThrottledFetch.cancel(); this.shortThrottledFetch.cancel();
this.longThrottledFetch(skipNotifications); return this.longThrottledFetch();
if (force) {
this.longThrottledFetch.flush();
}
return;
} }
this.longThrottledFetch.cancel(); this.longThrottledFetch.cancel();
this.shortThrottledFetch(skipNotifications); return this.shortThrottledFetch();
if (force) {
this.shortThrottledFetch.flush();
}
} }
_fetchBalances (skipNotifications = false) { _fetchEthBalances (skipNotifications = false) {
this._store.dispatch(fetchBalances(null, skipNotifications)); log.debug('fetching eth balances (real)...');
}
_fetchTokens (skipNotifications = false) { const { dispatch, getState } = this._store;
this._store.dispatch(fetchTokensBalances(null, null, skipNotifications));
}
getTokenRegistry () { return fetchBalances(null, skipNotifications)(dispatch, getState);
return Contracts.get().tokenReg.getContract();
}
_loadTokens (options = {}) {
return this
.getTokenRegistry()
.then((tokenreg) => {
this._tokenreg = tokenreg;
this._store.dispatch(loadTokens(options));
return this.attachToTokens(tokenreg);
})
.catch((error) => {
console.warn('balances::loadTokens', error);
});
}
attachToTokens (tokenreg) {
return Promise
.all([
this.attachToTokenMetaChange(tokenreg),
this.attachToNewToken(tokenreg)
]);
}
attachToNewToken (tokenreg) {
if (this._tokenregSID) {
return Promise.resolve();
}
return tokenreg.instance.Registered
.subscribe({
fromBlock: 0,
toBlock: 'latest',
skipInitFetch: true
}, (error, logs) => {
if (error) {
return console.error('balances::attachToNewToken', 'failed to attach to tokenreg Registered', error.toString(), error.stack);
}
this.handleTokensLogs(logs);
})
.then((tokenregSID) => {
this._tokenregSID = tokenregSID;
});
}
attachToTokenMetaChange (tokenreg) {
if (this._tokenMetaSID) {
return Promise.resolve();
}
return tokenreg.instance.MetaChanged
.subscribe({
fromBlock: 0,
toBlock: 'latest',
topics: [ null, padRight(this._api.util.asciiToHex('IMG'), 32) ],
skipInitFetch: true
}, (error, logs) => {
if (error) {
return console.error('balances::attachToTokenMetaChange', 'failed to attach to tokenreg MetaChanged', error.toString(), error.stack);
}
this.handleTokensLogs(logs);
})
.then((tokenMetaSID) => {
this._tokenMetaSID = tokenMetaSID;
});
}
handleTokensLogs (logs) {
const tokenIds = logs.map((log) => log.params.id.value.toNumber());
this._store.dispatch(fetchTokens(tokenIds));
} }
} }

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { uniq, isEqual } from 'lodash'; import { difference, uniq } from 'lodash';
import { push } from 'react-router-redux'; import { push } from 'react-router-redux';
import { notifyTransaction } from '~/util/notifications'; import { notifyTransaction } from '~/util/notifications';
@ -22,11 +22,16 @@ import { ETH_TOKEN, fetchAccountsBalances } from '~/util/tokens';
import { LOG_KEYS, getLogger } from '~/config'; import { LOG_KEYS, getLogger } from '~/config';
import { sha3 } from '~/api/util/sha3'; import { sha3 } from '~/api/util/sha3';
import { fetchTokens } from './tokensActions';
const TRANSFER_SIGNATURE = sha3('Transfer(address,address,uint256)'); const TRANSFER_SIGNATURE = sha3('Transfer(address,address,uint256)');
const log = getLogger(LOG_KEYS.Balances); const log = getLogger(LOG_KEYS.Balances);
let tokensFilter = {}; let tokensFilter = {
tokenAddresses: [],
addresses: []
};
function _setBalances (balances) { function _setBalances (balances) {
return { return {
@ -63,13 +68,10 @@ function setBalances (updates, skipNotifications = false) {
dispatch(notifyBalanceChange(who, prevTokenValue, nextTokenValue, token)); dispatch(notifyBalanceChange(who, prevTokenValue, nextTokenValue, token));
} }
// Add the token if it's native ETH or if it has a value nextBalances[who] = {
if (token.native || nextTokenValue.gt(0)) { ...(nextBalances[who] || {}),
nextBalances[who] = { [tokenId]: nextTokenValue
...(nextBalances[who] || {}), };
[tokenId]: nextTokenValue
};
}
}); });
}); });
@ -100,41 +102,92 @@ function notifyBalanceChange (who, fromValue, toValue, token) {
} }
// TODO: fetch txCount when needed // TODO: fetch txCount when needed
export function fetchBalances (_addresses, skipNotifications = false) { export function fetchBalances (addresses, skipNotifications = false) {
return fetchTokensBalances(_addresses, [ ETH_TOKEN ], skipNotifications); return (dispatch, getState) => {
const { personal } = getState();
const { visibleAccounts, accounts } = personal;
const addressesToFetch = addresses || uniq(visibleAccounts.concat(Object.keys(accounts)));
const updates = addressesToFetch.reduce((updates, who) => {
updates[who] = [ ETH_TOKEN.id ];
return updates;
}, {});
return fetchTokensBalances(updates, skipNotifications)(dispatch, getState);
};
} }
export function updateTokensFilter (_addresses, _tokens, options = {}) { export function updateTokensFilter (options = {}) {
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, personal, tokens } = getState(); const { api, personal, tokens } = getState();
const { visibleAccounts, accounts } = personal; const { visibleAccounts, accounts } = personal;
const addressesToFetch = uniq(visibleAccounts.concat(Object.keys(accounts))); const addresses = uniq(visibleAccounts.concat(Object.keys(accounts)));
const addresses = uniq(_addresses || addressesToFetch || []).sort(); const tokensToUpdate = Object.values(tokens);
const tokensAddressMap = Object.values(tokens).reduce((map, token) => {
map[token.address] = token;
return map;
}, {});
const tokensToUpdate = _tokens || Object.values(tokens);
const tokenAddresses = tokensToUpdate const tokenAddresses = tokensToUpdate
.map((t) => t.address) .map((t) => t.address)
.filter((address) => address) .filter((address) => address && !/^(0x)?0*$/.test(address));
.sort();
// Token Addresses that are not in the current filter
const newTokenAddresses = difference(tokenAddresses, tokensFilter.tokenAddresses);
// Addresses that are not in the current filter (omit those
// that the filter includes)
const newAddresses = difference(addresses, tokensFilter.addresses);
if (tokensFilter.filterFromId || tokensFilter.filterToId) { if (tokensFilter.filterFromId || tokensFilter.filterToId) {
// Has the tokens addresses changed (eg. a network change)
const sameTokens = isEqual(tokenAddresses, tokensFilter.tokenAddresses);
// Addresses that are not in the current filter (omit those
// that the filter includes)
const newAddresses = addresses.filter((address) => !tokensFilter.addresses.includes(address));
// If no new addresses and the same tokens, don't change the filter // If no new addresses and the same tokens, don't change the filter
if (sameTokens && newAddresses.length === 0) { if (newTokenAddresses.length === 0 && newAddresses.length === 0) {
log.debug('no need to update token filter', addresses, tokenAddresses, tokensFilter); log.debug('no need to update token filter', addresses, tokenAddresses, tokensFilter);
return queryTokensFilter(tokensFilter)(dispatch, getState); return;
} }
} }
log.debug('updating the token filter', addresses, tokenAddresses);
const promises = []; const promises = [];
const updates = {};
const allTokenIds = tokensToUpdate.map((token) => token.id);
const newTokenIds = newTokenAddresses.map((address) => tokensAddressMap[address].id);
newAddresses.forEach((newAddress) => {
updates[newAddress] = allTokenIds;
});
difference(addresses, newAddresses).forEach((oldAddress) => {
updates[oldAddress] = newTokenIds;
});
log.debug('updating the token filter', addresses, tokenAddresses);
const topicsFrom = [ TRANSFER_SIGNATURE, addresses, null ];
const topicsTo = [ TRANSFER_SIGNATURE, null, addresses ];
const filterOptions = {
fromBlock: 'latest',
toBlock: 'latest',
address: tokenAddresses
};
const optionsFrom = {
...filterOptions,
topics: topicsFrom
};
const optionsTo = {
...filterOptions,
topics: topicsTo
};
promises.push(
api.eth.newFilter(optionsFrom),
api.eth.newFilter(optionsTo)
);
if (tokensFilter.filterFromId) { if (tokensFilter.filterFromId) {
promises.push(api.eth.uninstallFilter(tokensFilter.filterFromId)); promises.push(api.eth.uninstallFilter(tokensFilter.filterFromId));
@ -144,48 +197,16 @@ export function updateTokensFilter (_addresses, _tokens, options = {}) {
promises.push(api.eth.uninstallFilter(tokensFilter.filterToId)); promises.push(api.eth.uninstallFilter(tokensFilter.filterToId));
} }
Promise return Promise.all(promises)
.all([
api.eth.blockNumber()
].concat(promises))
.then(([ block ]) => {
const topicsFrom = [ TRANSFER_SIGNATURE, addresses, null ];
const topicsTo = [ TRANSFER_SIGNATURE, null, addresses ];
const filterOptions = {
fromBlock: block,
toBlock: 'pending',
address: tokenAddresses
};
const optionsFrom = {
...filterOptions,
topics: topicsFrom
};
const optionsTo = {
...filterOptions,
topics: topicsTo
};
const newFilters = Promise.all([
api.eth.newFilter(optionsFrom),
api.eth.newFilter(optionsTo)
]);
return newFilters;
})
.then(([ filterFromId, filterToId ]) => { .then(([ filterFromId, filterToId ]) => {
const nextTokensFilter = { const nextTokensFilter = {
filterFromId, filterToId, filterFromId, filterToId,
addresses, tokenAddresses addresses, tokenAddresses
}; };
const { skipNotifications } = options;
tokensFilter = nextTokensFilter; tokensFilter = nextTokensFilter;
fetchTokensBalances(addresses, tokensToUpdate, skipNotifications)(dispatch, getState);
}) })
.then(() => fetchTokensBalances(updates)(dispatch, getState))
.catch((error) => { .catch((error) => {
console.warn('balances::updateTokensFilter', error); console.warn('balances::updateTokensFilter', error);
}); });
@ -194,12 +215,7 @@ export function updateTokensFilter (_addresses, _tokens, options = {}) {
export function queryTokensFilter () { export function queryTokensFilter () {
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, personal, tokens } = getState(); const { api } = getState();
const { visibleAccounts, accounts } = personal;
const allAddresses = visibleAccounts.concat(Object.keys(accounts));
const addressesToFetch = uniq(allAddresses);
const lcAddresses = addressesToFetch.map((a) => a.toLowerCase());
Promise Promise
.all([ .all([
@ -207,67 +223,107 @@ export function queryTokensFilter () {
api.eth.getFilterChanges(tokensFilter.filterToId) api.eth.getFilterChanges(tokensFilter.filterToId)
]) ])
.then(([ logsFrom, logsTo ]) => { .then(([ logsFrom, logsTo ]) => {
const addresses = []; const logs = [].concat(logsFrom, logsTo);
const tokenAddresses = [];
const logs = logsFrom.concat(logsTo);
if (logs.length > 0) { if (logs.length === 0) {
return;
} else {
log.debug('got tokens filter logs', logs); log.debug('got tokens filter logs', logs);
} }
const { personal, tokens } = getState();
const { visibleAccounts, accounts } = personal;
const addressesToFetch = uniq(visibleAccounts.concat(Object.keys(accounts)));
const lcAddresses = addressesToFetch.map((a) => a.toLowerCase());
const lcTokensMap = Object.values(tokens).reduce((map, token) => {
map[token.address.toLowerCase()] = token;
return map;
});
// The keys are the account addresses,
// and the value is an Array of the tokens addresses
// to update
const updates = {};
logs logs
.forEach((log) => { .forEach((log, index) => {
const tokenAddress = log.address; const tokenAddress = log.address.toLowerCase();
const token = lcTokensMap[tokenAddress];
const fromAddress = '0x' + log.topics[1].slice(-40); // logs = [ ...logsFrom, ...logsTo ]
const toAddress = '0x' + log.topics[2].slice(-40); const topicIdx = index < logsFrom.length ? 1 : 2;
const address = ('0x' + log.topics[topicIdx].slice(-40)).toLowerCase();
const addressIndex = lcAddresses.indexOf(address);
const fromAddressIndex = lcAddresses.indexOf(fromAddress); if (addressIndex > -1) {
const toAddressIndex = lcAddresses.indexOf(toAddress); const who = addressesToFetch[addressIndex];
if (fromAddressIndex > -1) { updates[who] = [].concat(updates[who] || [], token.id);
addresses.push(addressesToFetch[fromAddressIndex]);
} }
if (toAddressIndex > -1) {
addresses.push(addressesToFetch[toAddressIndex]);
}
tokenAddresses.push(tokenAddress);
}); });
if (addresses.length === 0) { // No accounts to update
if (Object.keys(updates).length === 0) {
return; return;
} }
const tokensToUpdate = Object.values(tokens) Object.keys(updates).forEach((who) => {
.filter((t) => tokenAddresses.includes(t.address)); // Keep non-empty token addresses
updates[who] = uniq(updates[who]);
});
fetchTokensBalances(uniq(addresses), tokensToUpdate)(dispatch, getState); fetchTokensBalances(updates)(dispatch, getState);
}); });
}; };
} }
export function fetchTokensBalances (_addresses = null, _tokens = null, skipNotifications = false) { export function fetchTokensBalances (updates, skipNotifications = false) {
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, personal, tokens } = getState(); const { api, personal, tokens } = getState();
const { visibleAccounts, accounts } = personal;
const allTokens = Object.values(tokens); const allTokens = Object.values(tokens);
const addressesToFetch = uniq(visibleAccounts.concat(Object.keys(accounts))); if (!updates) {
const addresses = _addresses || addressesToFetch; const { visibleAccounts, accounts } = personal;
const tokensToUpdate = _tokens || allTokens; const addressesToFetch = uniq(visibleAccounts.concat(Object.keys(accounts)));
if (addresses.length === 0) { updates = addressesToFetch.reduce((updates, who) => {
return Promise.resolve(); updates[who] = allTokens.map((token) => token.id);
return updates;
}, {});
} }
const updates = addresses.reduce((updates, who) => { let start = Date.now();
updates[who] = tokensToUpdate.map((token) => token.id);
return updates;
}, {});
return fetchAccountsBalances(api, allTokens, updates) return fetchAccountsBalances(api, allTokens, updates)
.then((balances) => {
log.debug('got tokens balances', balances, updates, `(took ${Date.now() - start}ms)`);
// Tokens info might not be fetched yet (to not load
// tokens we don't care about)
const tokenIdsToFetch = Object.values(balances)
.reduce((tokenIds, balance) => {
const nextTokenIds = Object.keys(balance)
.filter((tokenId) => balance[tokenId].gt(0));
return tokenIds.concat(nextTokenIds);
}, []);
const tokenIndexesToFetch = uniq(tokenIdsToFetch)
.filter((tokenId) => tokens[tokenId] && tokens[tokenId].index && !tokens[tokenId].fetched)
.map((tokenId) => tokens[tokenId].index);
if (tokenIndexesToFetch.length === 0) {
return balances;
}
start = Date.now();
return fetchTokens(tokenIndexesToFetch)(dispatch, getState)
.then(() => log.debug('token indexes fetched', tokenIndexesToFetch, `(took ${Date.now() - start}ms)`))
.then(() => balances);
})
.then((balances) => { .then((balances) => {
dispatch(setBalances(balances, skipNotifications)); dispatch(setBalances(balances, skipNotifications));
}) })

View File

@ -38,7 +38,7 @@ const updatableFilter = (api, onFilter) => {
filter = (filter || Promise.resolve()) filter = (filter || Promise.resolve())
.then(() => api.eth.newFilter({ .then(() => api.eth.newFilter({
fromBlock: 0, fromBlock: 'latest',
toBlock: 'latest', toBlock: 'latest',
address, address,
topics topics

View File

@ -18,6 +18,7 @@ export Balances from './balances';
export Personal from './personal'; export Personal from './personal';
export Signer from './signer'; export Signer from './signer';
export Status from './status'; export Status from './status';
export Tokens from './tokens';
export apiReducer from './apiReducer'; export apiReducer from './apiReducer';
export balancesReducer from './balancesReducer'; export balancesReducer from './balancesReducer';

View File

@ -16,37 +16,117 @@
import { personalAccountsInfo } from './personalActions'; import { personalAccountsInfo } from './personalActions';
let instance;
export default class Personal { export default class Personal {
constructor (store, api) { constructor (store, api) {
this._api = api; this._api = api;
this._store = store; this._store = store;
} }
start () { static get (store) {
this._removeDeleted(); if (!instance && store) {
this._subscribeAccountsInfo(); return Personal.init(store);
}
return instance;
}
static init (store) {
const { api } = store.getState();
if (!instance) {
instance = new Personal(store, api);
} else if (!instance) {
throw new Error('The Personal Provider has not been initialized yet');
}
return instance;
}
static start () {
const self = instance;
return Personal.stop()
.then(() => Promise.all([
self._removeDeleted(),
self._subscribeAccountsInfo()
]));
}
static stop () {
if (!instance) {
return Promise.resolve();
}
const self = instance;
return self._unsubscribeAccountsInfo();
} }
_subscribeAccountsInfo () { _subscribeAccountsInfo () {
this._api let resolved = false;
.subscribe('parity_allAccountsInfo', (error, accountsInfo) => {
if (error) {
console.error('parity_allAccountsInfo', error);
return;
}
// Add the address to each accounts // The Promise will be resolved when the first
Object.keys(accountsInfo) // accounts are loaded
.forEach((address) => { return new Promise((resolve, reject) => {
accountsInfo[address].address = address; this._api
}); .subscribe('parity_allAccountsInfo', (error, accountsInfo) => {
if (error) {
console.error('parity_allAccountsInfo', error);
this._store.dispatch(personalAccountsInfo(accountsInfo)); if (!resolved) {
}); resolved = true;
return reject(error);
}
return;
}
// Add the address to each accounts
Object.keys(accountsInfo)
.forEach((address) => {
accountsInfo[address].address = address;
});
const { dispatch, getState } = this._store;
personalAccountsInfo(accountsInfo)(dispatch, getState)
.then(() => {
if (!resolved) {
resolved = true;
return resolve();
}
})
.catch((error) => {
if (!resolved) {
resolved = true;
return reject(error);
}
});
})
.then((subId) => {
this.subscriptionId = subId;
});
});
}
_unsubscribeAccountsInfo () {
// Unsubscribe to any previous
// subscriptions
if (this.subscriptionId) {
return this._api
.unsubscribe(this.subscriptionId)
.then(() => {
this.subscriptionId = null;
});
}
return Promise.resolve();
} }
_removeDeleted () { _removeDeleted () {
this._api.parity return this._api.parity
.allAccountsInfo() .allAccountsInfo()
.then((accountsInfo) => { .then((accountsInfo) => {
return Promise.all( return Promise.all(

View File

@ -17,6 +17,7 @@
import { isEqual, intersection } from 'lodash'; import { isEqual, intersection } from 'lodash';
import BalancesProvider from './balances'; import BalancesProvider from './balances';
import TokensProvider from './tokens';
import { updateTokensFilter } from './balancesActions'; import { updateTokensFilter } from './balancesActions';
import { attachWallets } from './walletActions'; import { attachWallets } from './walletActions';
@ -70,7 +71,7 @@ export function personalAccountsInfo (accountsInfo) {
return WalletsUtils.fetchOwners(walletContract.at(wallet.address)); return WalletsUtils.fetchOwners(walletContract.at(wallet.address));
}); });
Promise return Promise
.all(_fetchOwners) .all(_fetchOwners)
.then((walletsOwners) => { .then((walletsOwners) => {
return Object return Object
@ -135,10 +136,6 @@ export function personalAccountsInfo (accountsInfo) {
hardware hardware
})); }));
dispatch(attachWallets(wallets)); dispatch(attachWallets(wallets));
BalancesProvider.get().fetchAllBalances({
force: true
});
}) })
.catch((error) => { .catch((error) => {
console.warn('personalAccountsInfo', error); console.warn('personalAccountsInfo', error);
@ -176,12 +173,17 @@ export function setVisibleAccounts (addresses) {
return; return;
} }
// Update the Tokens filter to take into account the new const promises = [];
// addresses
dispatch(updateTokensFilter());
BalancesProvider.get().fetchBalances({ // Update the Tokens filter to take into account the new
force: true // addresses if it is not loading (it fetches the
}); // balances automatically after loading)
if (!TokensProvider.get().loading) {
promises.push(updateTokensFilter()(dispatch, getState));
}
promises.push(BalancesProvider.get().fetchEthBalances({ force: true }));
return Promise.all(promises);
}; };
} }

View File

@ -23,12 +23,19 @@ import SavedRequests from '~/views/Application/Requests/savedRequests';
const savedRequests = new SavedRequests(); const savedRequests = new SavedRequests();
export const init = (api) => (dispatch) => { export const init = (api) => (dispatch) => {
api.subscribe('parity_postTransaction', (error, request) => { api.subscribe('signer_requestsToConfirm', (error, pending) => {
if (error) { if (error) {
return console.error(error); return;
} }
dispatch(watchRequest(request)); const requests = pending
.filter((p) => p.payload && p.payload.sendTransaction)
.map((p) => ({
requestId: '0x' + p.id.toString(16),
transaction: p.payload.sendTransaction
}));
requests.forEach((request) => dispatch(watchRequest(request)));
}); });
api.once('connected', () => { api.once('connected', () => {

View File

@ -19,7 +19,6 @@ import { isEqual } from 'lodash';
import { LOG_KEYS, getLogger } from '~/config'; import { LOG_KEYS, getLogger } from '~/config';
import UpgradeStore from '~/modals/UpgradeParity/store'; import UpgradeStore from '~/modals/UpgradeParity/store';
import BalancesProvider from './balances';
import { statusBlockNumber, statusCollection } from './statusActions'; import { statusBlockNumber, statusCollection } from './statusActions';
const log = getLogger(LOG_KEYS.Signer); const log = getLogger(LOG_KEYS.Signer);
@ -31,7 +30,6 @@ const STATUS_BAD = 'bad';
export default class Status { export default class Status {
_apiStatus = {}; _apiStatus = {};
_status = {};
_longStatus = {}; _longStatus = {};
_minerSettings = {}; _minerSettings = {};
_timeoutIds = {}; _timeoutIds = {};
@ -43,19 +41,12 @@ export default class Status {
this._store = store; this._store = store;
this._upgradeStore = UpgradeStore.get(api); this._upgradeStore = UpgradeStore.get(api);
// On connecting, stop all subscriptions
api.on('connecting', this.stop, this);
// On connected, start the subscriptions
api.on('connected', this.start, this);
// On disconnected, stop all subscriptions
api.on('disconnected', this.stop, this);
this.updateApiStatus(); this.updateApiStatus();
} }
static instantiate (store, api) { static init (store) {
const { api } = store.getState();
if (!instance) { if (!instance) {
instance = new Status(store, api); instance = new Status(store, api);
} }
@ -63,59 +54,61 @@ export default class Status {
return instance; return instance;
} }
static get () { static get (store) {
if (!instance) { if (!instance && store) {
return Status.init(store);
} else if (!instance) {
throw new Error('The Status Provider has not been initialized yet'); throw new Error('The Status Provider has not been initialized yet');
} }
return instance; return instance;
} }
start () { static start () {
const self = instance;
log.debug('status::start'); log.debug('status::start');
Promise const promises = [
.all([ self._subscribeBlockNumber(),
this._subscribeBlockNumber(), self._subscribeNetPeers(),
self._subscribeEthSyncing(),
self._subscribeNodeHealth(),
self._pollLongStatus(),
self._pollApiStatus()
];
this._pollLongStatus(), return Status.stop()
this._pollStatus() .then(() => Promise.all(promises));
])
.then(() => {
return BalancesProvider.start();
});
} }
stop () { static stop () {
log.debug('status::stop'); if (!instance) {
return Promise.resolve();
const promises = [];
if (this._blockNumberSubscriptionId) {
const promise = this._api
.unsubscribe(this._blockNumberSubscriptionId)
.then(() => {
this._blockNumberSubscriptionId = null;
});
promises.push(promise);
} }
Object.values(this._timeoutIds).forEach((timeoutId) => { const self = instance;
clearTimeout(timeoutId);
});
const promise = BalancesProvider.stop(); log.debug('status::stop');
promises.push(promise); self._clearTimeouts();
return Promise.all(promises) return self._unsubscribeBlockNumber()
.then(() => true)
.catch((error) => { .catch((error) => {
console.error('status::stop', error); console.error('status::stop', error);
return true;
}) })
.then(() => this.updateApiStatus()); .then(() => self.updateApiStatus());
}
getApiStatus = () => {
const { isConnected, isConnecting, needsToken, secureToken } = this._api;
return {
isConnected,
isConnecting,
needsToken,
secureToken
};
} }
updateApiStatus () { updateApiStatus () {
@ -129,6 +122,33 @@ export default class Status {
} }
} }
_clearTimeouts () {
Object.values(this._timeoutIds).forEach((timeoutId) => {
clearTimeout(timeoutId);
});
}
_overallStatus (health) {
const allWithTime = [health.peers, health.sync, health.time].filter(x => x);
const all = [health.peers, health.sync].filter(x => x);
const statuses = all.map(x => x.status);
const bad = statuses.find(x => x === STATUS_BAD);
const needsAttention = statuses.find(x => x === STATUS_WARN);
const message = allWithTime.map(x => x.message).filter(x => x);
if (all.length) {
return {
status: bad || needsAttention || STATUS_OK,
message
};
}
return {
status: STATUS_BAD,
message: ['Unable to fetch node health.']
};
}
_subscribeBlockNumber = () => { _subscribeBlockNumber = () => {
return this._api return this._api
.subscribe('eth_blockNumber', (error, blockNumber) => { .subscribe('eth_blockNumber', (error, blockNumber) => {
@ -159,92 +179,74 @@ export default class Status {
}); });
} }
_pollTraceMode = () => { _subscribeEthSyncing = () => {
return this._api.trace return this._api.pubsub
.block() .eth
.then(blockTraces => { .syncing((error, syncing) => {
// Assumes not in Trace Mode if no transactions if (error) {
// in latest block... return;
return blockTraces.length > 0; }
})
.catch(() => false); this._store.dispatch(statusCollection({ syncing }));
});
} }
getApiStatus = () => { _subscribeNetPeers = () => {
const { isConnected, isConnecting, needsToken, secureToken } = this._api; return this._api.pubsub
.parity
return { .netPeers((error, netPeers) => {
isConnected, if (error || !netPeers) {
isConnecting, return;
needsToken, }
secureToken this._store.dispatch(statusCollection({ netPeers }));
}; });
} }
_pollStatus = () => { _subscribeNodeHealth = () => {
const nextTimeout = (timeout = 1000) => { return this._api.pubsub
if (this._timeoutIds.status) { .parity
clearTimeout(this._timeoutIds.status); .nodeHealth((error, health) => {
} if (error || !health) {
return;
this._timeoutIds.status = setTimeout(() => this._pollStatus(), timeout); }
};
this.updateApiStatus();
if (!this._api.isConnected) {
nextTimeout(250);
return Promise.resolve();
}
const statusPromises = [
this._api.eth.syncing(),
this._api.parity.netPeers(),
this._api.parity.nodeHealth()
];
return Promise
.all(statusPromises)
.then(([ syncing, netPeers, health ]) => {
const status = { netPeers, syncing, health };
health.overall = this._overallStatus(health); health.overall = this._overallStatus(health);
health.peers = health.peers || {}; health.peers = health.peers || {};
health.sync = health.sync || {}; health.sync = health.sync || {};
health.time = health.time || {}; health.time = health.time || {};
if (!isEqual(status, this._status)) { this._store.dispatch(statusCollection({ health }));
this._store.dispatch(statusCollection(status));
this._status = status;
}
})
.catch((error) => {
console.error('_pollStatus', error);
})
.then(() => {
nextTimeout();
}); });
} }
_overallStatus = (health) => { _unsubscribeBlockNumber () {
const allWithTime = [health.peers, health.sync, health.time].filter(x => x); if (this._blockNumberSubscriptionId) {
const all = [health.peers, health.sync].filter(x => x); return this._api
const statuses = all.map(x => x.status); .unsubscribe(this._blockNumberSubscriptionId)
const bad = statuses.find(x => x === STATUS_BAD); .then(() => {
const needsAttention = statuses.find(x => x === STATUS_WARN); this._blockNumberSubscriptionId = null;
const message = allWithTime.map(x => x.message).filter(x => x); });
if (all.length) {
return {
status: bad || needsAttention || STATUS_OK,
message
};
} }
return { return Promise.resolve();
status: STATUS_BAD, }
message: ['Unable to fetch node health.']
_pollApiStatus = () => {
const nextTimeout = (timeout = 1000) => {
if (this._timeoutIds.status) {
clearTimeout(this._timeoutIds.status);
}
this._timeoutIds.status = setTimeout(() => this._pollApiStatus(), timeout);
}; };
this.updateApiStatus();
if (!this._api.isConnected) {
nextTimeout(250);
} else {
nextTimeout();
}
} }
/** /**
@ -259,7 +261,7 @@ export default class Status {
} }
const { nodeKindFull } = this._store.getState().nodeStatus; const { nodeKindFull } = this._store.getState().nodeStatus;
const defaultTimeout = (nodeKindFull === false ? 240 : 30) * 1000; const defaultTimeout = (nodeKindFull === false ? 240 : 60) * 1000;
const nextTimeout = (timeout = defaultTimeout) => { const nextTimeout = (timeout = defaultTimeout) => {
if (this._timeoutIds.longStatus) { if (this._timeoutIds.longStatus) {
@ -271,7 +273,6 @@ export default class Status {
const statusPromises = [ const statusPromises = [
this._api.parity.nodeKind(), this._api.parity.nodeKind(),
this._api.parity.netPeers(),
this._api.web3.clientVersion(), this._api.web3.clientVersion(),
this._api.net.version(), this._api.net.version(),
this._api.parity.netChain() this._api.parity.netChain()
@ -283,7 +284,7 @@ export default class Status {
return Promise return Promise
.all(statusPromises) .all(statusPromises)
.then(([nodeKind, netPeers, clientVersion, netVersion, netChain]) => { .then(([nodeKind, clientVersion, netVersion, netChain]) => {
const isTest = [ const isTest = [
'2', // morden '2', // morden
'3', // ropsten, '3', // ropsten,
@ -298,7 +299,6 @@ export default class Status {
const longStatus = { const longStatus = {
nodeKind, nodeKind,
nodeKindFull, nodeKindFull,
netPeers,
clientVersion, clientVersion,
netChain, netChain,
netVersion, netVersion,
@ -310,11 +310,12 @@ export default class Status {
this._longStatus = longStatus; this._longStatus = longStatus;
} }
}) })
.then(() => {
nextTimeout();
})
.catch((error) => { .catch((error) => {
console.error('_pollLongStatus', error); console.error('_pollLongStatus', error);
}) nextTimeout(30000);
.then(() => {
nextTimeout(60000);
}); });
} }
} }

View File

@ -0,0 +1,161 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { updateTokensFilter } from './balancesActions';
import { loadTokens, fetchTokens } from './tokensActions';
import { padRight } from '~/api/util/format';
import Contracts from '~/contracts';
let instance = null;
export default class Tokens {
constructor (store, api) {
this._api = api;
this._store = store;
this._tokenreg = null;
this._tokenregSubs = [];
this._loading = false;
}
get loading () {
return this._loading;
}
static get (store) {
if (!instance && store) {
return Tokens.init(store);
} else if (!instance) {
throw new Error('The Tokens Provider has not been initialized yet');
}
return instance;
}
static init (store) {
const { api } = store.getState();
if (!instance) {
instance = new Tokens(store, api);
}
return instance;
}
static start () {
if (!instance) {
return Promise.reject('Tokens Provider has not been initiated yet');
}
const self = instance;
self._loading = true;
// Unsubscribe from previous subscriptions
return Tokens.stop()
.then(() => self.loadTokens())
.then(() => {
self._loading = false;
});
}
static stop () {
if (!instance) {
return Promise.resolve();
}
const self = instance;
// Unsubscribe without adding the promises
// to the result, since it would have to wait for a
// reconnection to resolve if the Node is disconnected
if (self._tokenreg) {
const tokenregPromises = self._tokenregSubs
.map((tokenregSID) => self._tokenreg.unsubscribe(tokenregSID));
Promise.all(tokenregPromises)
.then(() => {
self._tokenregSubs = [];
});
}
return Promise.resolve();
}
attachToTokensEvents (tokenreg) {
const metaTopics = [ null, padRight(this._api.util.asciiToHex('IMG'), 32) ];
return Promise
.all([
this._attachToTokenregEvents(tokenreg, 'Registered'),
this._attachToTokenregEvents(tokenreg, 'MetaChanged', metaTopics)
]);
}
getTokenRegistry () {
return Contracts.get().tokenReg.getContract();
}
loadTokens (options = {}) {
const { dispatch, getState } = this._store;
return this
.getTokenRegistry()
.then((tokenreg) => {
this._tokenreg = tokenreg;
return loadTokens(options)(dispatch, getState);
})
.then(() => updateTokensFilter()(dispatch, getState))
.then(() => this.attachToTokensEvents(this._tokenreg))
.catch((error) => {
console.warn('balances::loadTokens', error);
});
}
_attachToTokenregEvents (tokenreg, event, topics = []) {
if (this._tokenregSID) {
return Promise.resolve();
}
return tokenreg.instance[event]
.subscribe({
fromBlock: 'latest',
toBlock: 'latest',
topics: topics,
skipInitFetch: true
}, (error, logs) => {
if (error) {
return console.error('balances::attachToNewToken', 'failed to attach to tokenreg Registered', error.toString(), error.stack);
}
this._handleTokensLogs(logs);
})
.then((tokenregSID) => {
this._tokenregSubs.push(tokenregSID);
});
}
_handleTokensLogs (logs) {
const { dispatch, getState } = this._store;
const tokenIds = logs.map((log) => log.params.id.value.toNumber());
return fetchTokens(tokenIds)(dispatch, getState)
.then(() => updateTokensFilter()(dispatch, getState));
}
}

View File

@ -14,56 +14,225 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { uniq } from 'lodash'; import { chunk, uniq } from 'lodash';
import store from 'store';
import Contracts from '~/contracts'; import Contracts from '~/contracts';
import { LOG_KEYS, getLogger } from '~/config'; import { LOG_KEYS, getLogger } from '~/config';
import { fetchTokenIds, fetchTokenInfo } from '~/util/tokens'; import { fetchTokenIds, fetchTokensBasics, fetchTokensInfo, fetchTokensImages } from '~/util/tokens';
import { updateTokensFilter } from './balancesActions';
import { setAddressImage } from './imagesActions'; import { setAddressImage } from './imagesActions';
const TOKENS_CACHE_LS_KEY_PREFIX = '_parity::tokens::';
const log = getLogger(LOG_KEYS.Balances); const log = getLogger(LOG_KEYS.Balances);
export function setTokens (tokens) { function _setTokens (tokens) {
return { return {
type: 'setTokens', type: 'setTokens',
tokens tokens
}; };
} }
export function setTokens (nextTokens) {
return (dispatch, getState) => {
const { nodeStatus, tokens: prevTokens } = getState();
const { tokenReg } = Contracts.get();
const tokens = {
...prevTokens,
...nextTokens
};
return tokenReg.getContract()
.then((tokenRegContract) => {
const lsKey = TOKENS_CACHE_LS_KEY_PREFIX + nodeStatus.netChain;
store.set(lsKey, {
tokenreg: tokenRegContract.address,
tokens
});
})
.catch((error) => {
console.error(error);
})
.then(() => {
dispatch(_setTokens(nextTokens));
});
};
}
function loadCachedTokens (tokenRegContract) {
return (dispatch, getState) => {
const { nodeStatus } = getState();
const lsKey = TOKENS_CACHE_LS_KEY_PREFIX + nodeStatus.netChain;
const cached = store.get(lsKey);
if (cached) {
// Check if we have data from the right contract
if (cached.tokenreg === tokenRegContract.address && cached.tokens) {
log.debug('found cached tokens', cached.tokens);
dispatch(_setTokens(cached.tokens));
// Fetch all the tokens images on load
// (it's the only thing that might have changed)
const tokenIndexes = Object.values(cached.tokens)
.filter((t) => t && t.fetched)
.map((t) => t.index);
fetchTokensData(tokenRegContract, tokenIndexes)(dispatch, getState);
} else {
store.remove(lsKey);
}
}
};
}
export function loadTokens (options = {}) { export function loadTokens (options = {}) {
log.debug('loading tokens', Object.keys(options).length ? options : ''); log.debug('loading tokens', Object.keys(options).length ? options : '');
return (dispatch, getState) => { return (dispatch, getState) => {
const { tokenReg } = Contracts.get(); const { tokenReg } = Contracts.get();
tokenReg.getInstance() return tokenReg.getContract()
.then((tokenRegInstance) => { .then((tokenRegContract) => {
return fetchTokenIds(tokenRegInstance); loadCachedTokens(tokenRegContract)(dispatch, getState);
return fetchTokenIds(tokenRegContract.instance);
}) })
.then((tokenIndexes) => dispatch(fetchTokens(tokenIndexes, options))) .then((tokenIndexes) => loadTokensBasics(tokenIndexes, options)(dispatch, getState))
.catch((error) => { .catch((error) => {
console.warn('tokens::loadTokens', error); console.warn('tokens::loadTokens', error);
}); });
}; };
} }
export function fetchTokens (_tokenIndexes, options = {}) { export function loadTokensBasics (_tokenIndexes, options) {
const tokenIndexes = uniq(_tokenIndexes || []); const limit = 64;
return (dispatch, getState) => {
const { api, tokens } = getState();
const { tokenReg } = Contracts.get();
const nextTokens = {};
const prevTokensIndexes = Object.values(tokens).map((t) => t.index);
// Only fetch tokens we don't have yet
const tokenIndexes = _tokenIndexes.filter((tokenIndex) => {
return !prevTokensIndexes.includes(tokenIndex);
});
const count = tokenIndexes.length;
log.debug('loading basic tokens', tokenIndexes);
if (count === 0) {
return Promise.resolve();
}
return tokenReg.getContract()
.then((tokenRegContract) => {
let promise = Promise.resolve();
for (let start = 0; start < count; start += limit) {
promise = promise
.then(() => fetchTokensBasics(api, tokenRegContract, start, limit))
.then((results) => {
results
.forEach((token) => {
nextTokens[token.id] = token;
});
});
}
return promise;
})
.then(() => {
log.debug('fetched tokens basic info', nextTokens);
dispatch(setTokens(nextTokens));
})
.catch((error) => {
console.warn('tokens::fetchTokens', error);
});
};
}
export function fetchTokens (_tokenIndexes, options = {}) {
const tokenIndexes = uniq(_tokenIndexes || []);
const tokenChunks = chunk(tokenIndexes, 64);
return (dispatch, getState) => { return (dispatch, getState) => {
const { api, images } = getState();
const { tokenReg } = Contracts.get(); const { tokenReg } = Contracts.get();
return tokenReg.getInstance() return tokenReg.getContract()
.then((tokenRegInstance) => { .then((tokenRegContract) => {
const promises = tokenIndexes.map((id) => fetchTokenInfo(api, tokenRegInstance, id)); let promise = Promise.resolve();
return Promise.all(promises); tokenChunks.forEach((tokenChunk) => {
promise = promise
.then(() => fetchTokensData(tokenRegContract, tokenChunk)(dispatch, getState));
});
return promise;
}) })
.then((results) => { .then(() => {
const tokens = results log.debug('fetched token', getState().tokens);
})
.catch((error) => {
console.warn('tokens::fetchTokens', error);
});
};
}
/**
* Split the given token indexes between those for whom
* we already have some info, and thus just need to fetch
* the image, and those for whom we don't have anything and
* need to fetch all the info.
*/
function fetchTokensData (tokenRegContract, tokenIndexes) {
return (dispatch, getState) => {
const { api, tokens, images } = getState();
const allTokens = Object.values(tokens);
const tokensIndexesMap = allTokens
.reduce((map, token) => {
map[token.index] = token;
return map;
}, {});
const fetchedTokenIndexes = allTokens
.filter((token) => token.fetched)
.map((token) => token.index);
const fullIndexes = [];
const partialIndexes = [];
tokenIndexes.forEach((tokenIndex) => {
if (fetchedTokenIndexes.includes(tokenIndex)) {
partialIndexes.push(tokenIndex);
} else {
fullIndexes.push(tokenIndex);
}
});
log.debug('need to fully fetch', fullIndexes);
log.debug('need to partially fetch', partialIndexes);
const fullPromise = fetchTokensInfo(api, tokenRegContract, fullIndexes);
const partialPromise = fetchTokensImages(api, tokenRegContract, partialIndexes)
.then((imagesResult) => {
return imagesResult.map((image, index) => {
const tokenIndex = partialIndexes[index];
const token = tokensIndexesMap[tokenIndex];
return { ...token, image };
});
});
return Promise.all([ fullPromise, partialPromise ])
.then(([ fullResults, partialResults ]) => {
log.debug('fetched', { fullResults, partialResults });
return [].concat(fullResults, partialResults)
.reduce((tokens, token) => { .reduce((tokens, token) => {
const { id, image, address } = token; const { id, image, address } = token;
@ -75,14 +244,9 @@ export function fetchTokens (_tokenIndexes, options = {}) {
tokens[id] = token; tokens[id] = token;
return tokens; return tokens;
}, {}); }, {});
log.debug('fetched token', tokens);
dispatch(setTokens(tokens));
dispatch(updateTokensFilter(null, null, options));
}) })
.catch((error) => { .then((tokens) => {
console.warn('tokens::fetchTokens', error); dispatch(setTokens(tokens));
}); });
}; };
} }

View File

@ -25,10 +25,15 @@ const initialState = {
export default handleActions({ export default handleActions({
setTokens (state, action) { setTokens (state, action) {
const { tokens } = action; const { tokens } = action;
const nextTokens = { ...state };
return { Object.keys(tokens).forEach((tokenId) => {
...state, nextTokens[tokenId] = {
...tokens ...(nextTokens[tokenId]),
}; ...tokens[tokenId]
};
});
return nextTokens;
} }
}, initialState); }, initialState);

View File

@ -22,12 +22,14 @@ import initReducers from './reducers';
import { load as loadWallet } from './providers/walletActions'; import { load as loadWallet } from './providers/walletActions';
import { init as initRequests } from './providers/requestsActions'; import { init as initRequests } from './providers/requestsActions';
import { setupWorker } from './providers/workerWrapper'; import { setupWorker } from './providers/workerWrapper';
import { setApi } from './providers/apiActions';
import { import {
Balances as BalancesProvider, Balances as BalancesProvider,
Personal as PersonalProvider, Personal as PersonalProvider,
Signer as SignerProvider, Signer as SignerProvider,
Status as StatusProvider Status as StatusProvider,
Tokens as TokensProvider
} from './providers'; } from './providers';
const storeCreation = window.devToolsExtension const storeCreation = window.devToolsExtension
@ -39,14 +41,56 @@ export default function (api, browserHistory, forEmbed = false) {
const middleware = initMiddleware(api, browserHistory, forEmbed); const middleware = initMiddleware(api, browserHistory, forEmbed);
const store = applyMiddleware(...middleware)(storeCreation)(reducers); const store = applyMiddleware(...middleware)(storeCreation)(reducers);
BalancesProvider.instantiate(store, api); // Add the `api` to the Redux Store
StatusProvider.instantiate(store, api); store.dispatch({ type: 'initAll', api });
new PersonalProvider(store, api).start(); store.dispatch(setApi(api));
// Initialise the Store Providers
BalancesProvider.init(store);
PersonalProvider.init(store);
StatusProvider.init(store);
TokensProvider.init(store);
new SignerProvider(store, api).start(); new SignerProvider(store, api).start();
store.dispatch(loadWallet(api)); store.dispatch(loadWallet(api));
store.dispatch(initRequests(api)); store.dispatch(initRequests(api));
setupWorker(store); setupWorker(store);
const start = () => {
return Promise.resolve()
.then(() => console.log('starting Status Provider...'))
.then(() => StatusProvider.start())
.then(() => console.log('started Status Provider'))
.then(() => console.log('starting Personal Provider...'))
.then(() => PersonalProvider.start())
.then(() => console.log('started Personal Provider'))
.then(() => console.log('starting Balances Provider...'))
.then(() => BalancesProvider.start())
.then(() => console.log('started Balances Provider'))
.then(() => console.log('starting Tokens Provider...'))
.then(() => TokensProvider.start())
.then(() => console.log('started Tokens Provider'));
};
const stop = () => {
return StatusProvider.stop()
.then(() => PersonalProvider.stop())
.then(() => TokensProvider.stop())
.then(() => BalancesProvider.stop());
};
// On connecting, stop all subscriptions
api.on('connecting', stop);
// On connected, start the subscriptions
api.on('connected', start);
// On disconnected, stop all subscriptions
api.on('disconnected', stop);
return store; return store;
} }

View File

@ -50,6 +50,20 @@ export class Balance extends Component {
} }
let body = Object.keys(balance) let body = Object.keys(balance)
.sort((tokenIdA, tokenIdB) => {
const tokenA = tokens[tokenIdA];
const tokenB = tokens[tokenIdB];
if (tokenA.native) {
return -1;
}
if (tokenB.native) {
return 1;
}
return (tokenA.name || tokenA.tag || '').localeCompare(tokenB.name || tokenB.tag || '');
})
.map((tokenId) => { .map((tokenId) => {
const token = tokens[tokenId]; const token = tokens[tokenId];
const balanceValue = balance[tokenId]; const balanceValue = balance[tokenId];
@ -73,7 +87,8 @@ export class Balance extends Component {
decimals = 1; decimals = 1;
} }
const value = new BigNumber(balanceValue).div(bnf).toFormat(decimals); const rawValue = new BigNumber(balanceValue).div(bnf);
const value = rawValue.toFormat(decimals);
const classNames = [styles.balance]; const classNames = [styles.balance];
let details = null; let details = null;
@ -85,7 +100,7 @@ export class Balance extends Component {
className={ styles.value } className={ styles.value }
key='value' key='value'
> >
<span title={ value }> <span title={ `${rawValue.toFormat()} ${token.tag}` }>
{ value } { value }
</span> </span>
</div>, </div>,

View File

@ -23,7 +23,7 @@ import Api from '~/api';
import TxRow from './txRow'; import TxRow from './txRow';
const api = new Api({ execute: sinon.stub() }); const api = new Api({ execute: sinon.stub(), on: sinon.stub() });
const STORE = { const STORE = {
dispatch: sinon.stub(), dispatch: sinon.stub(),

View File

@ -22,7 +22,7 @@ import Api from '~/api';
import TxList from './txList'; import TxList from './txList';
const api = new Api({ execute: sinon.stub() }); const api = new Api({ execute: sinon.stub(), on: sinon.stub() });
const STORE = { const STORE = {
dispatch: sinon.stub(), dispatch: sinon.stub(),

View File

@ -1,133 +0,0 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { range } from 'lodash';
import BigNumber from 'bignumber.js';
import { hashToImageUrl } from '~/redux/util';
import { sha3 } from '~/api/util/sha3';
import imagesEthereum from '~/../assets/images/contracts/ethereum-black-64x64.png';
const BALANCEOF_SIGNATURE = sha3('balanceOf(address)');
const ADDRESS_PADDING = range(24).map(() => '0').join('');
export const ETH_TOKEN = {
address: '',
format: new BigNumber(10).pow(18),
id: sha3('eth_native_token').slice(0, 10),
image: imagesEthereum,
name: 'Ethereum',
native: true,
tag: 'ETH'
};
export function fetchTokenIds (tokenregInstance) {
return tokenregInstance.tokenCount
.call()
.then((numTokens) => {
const tokenIndexes = range(numTokens.toNumber());
return tokenIndexes;
});
}
export function fetchTokenInfo (api, tokenregInstace, tokenIndex) {
return Promise
.all([
tokenregInstace.token.call({}, [tokenIndex]),
tokenregInstace.meta.call({}, [tokenIndex, 'IMG'])
])
.then(([ tokenData, image ]) => {
const [ address, tag, format, name ] = tokenData;
const token = {
format: format.toString(),
index: tokenIndex,
image: hashToImageUrl(image),
id: sha3(address + tokenIndex).slice(0, 10),
address,
name,
tag
};
return token;
});
}
/**
* `updates` should be in the shape:
* {
* [ who ]: [ tokenId ] // Array of tokens to updates
* }
*
* Returns a Promise resolved witht the balances in the shape:
* {
* [ who ]: { [ tokenId ]: BigNumber } // The balances of `who`
* }
*/
export function fetchAccountsBalances (api, tokens, updates) {
const addresses = Object.keys(updates);
const promises = addresses
.map((who) => {
const tokensIds = updates[who];
const tokensToUpdate = tokensIds.map((tokenId) => tokens.find((t) => t.id === tokenId));
return fetchAccountBalances(api, tokensToUpdate, who);
});
return Promise.all(promises)
.then((results) => {
return results.reduce((balances, accountBalances, index) => {
balances[addresses[index]] = accountBalances;
return balances;
}, {});
});
}
/**
* Returns a Promise resolved with the balances in the shape:
* {
* [ tokenId ]: BigNumber // Token balance value
* }
*/
export function fetchAccountBalances (api, tokens, who) {
const calldata = '0x' + BALANCEOF_SIGNATURE.slice(2, 10) + ADDRESS_PADDING + who.slice(2);
const promises = tokens.map((token) => fetchTokenBalance(api, token, { who, calldata }));
return Promise.all(promises)
.then((results) => {
return results.reduce((balances, value, index) => {
const token = tokens[index];
balances[token.id] = value;
return balances;
}, {});
});
}
export function fetchTokenBalance (api, token, { who, calldata }) {
if (token.native) {
return api.eth.getBalance(who);
}
return api.eth
.call({ data: calldata, to: token.address })
.then((result) => {
const cleanResult = result.replace(/^0x/, '');
return new BigNumber(`0x${cleanResult || 0}`);
});
}

View File

@ -0,0 +1,23 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
// build from : https://raw.githubusercontent.com/paritytech/contracts/4c8501e908166aab7ff4d2ebb05db61b5d017024/TokenCalls.sol
// metadata (include build version and options):
// {"compiler":{"version":"0.4.16+commit.d7661dd9"},"language":"Solidity","output":{"abi":[{"inputs":[{"name":"tokenRegAddress","type":"address"},{"name":"start","type":"uint256"},{"name":"limit","type":"uint256"}],"payable":false,"stateMutability":"nonpayable","type":"constructor"}],"devdoc":{"methods":{}},"userdoc":{"methods":{}}},"settings":{"compilationTarget":{"":"Tokens"},"libraries":{},"optimizer":{"enabled":true,"runs":200},"remappings":[]},"sources":{"":{"keccak256":"0x4790e490f418d1a5884c27ffe9684914dab2d55bd1d23b99cff7aa2ca289e2d3","urls":["bzzr://bb200beae6849f1f5bb97b36c57cd493be52877ec0b55ee9969fa5f8159cf37b"]}},"version":1}
// {"compiler":{"version":"0.4.16+commit.d7661dd9"},"language":"Solidity","output":{"abi":[{"inputs":[{"name":"who","type":"address[]"},{"name":"tokens","type":"address[]"}],"payable":false,"stateMutability":"nonpayable","type":"constructor"}],"devdoc":{"methods":{}},"userdoc":{"methods":{}}},"settings":{"compilationTarget":{"":"TokensBalances"},"libraries":{},"optimizer":{"enabled":true,"runs":200},"remappings":[]},"sources":{"":{"keccak256":"0x4790e490f418d1a5884c27ffe9684914dab2d55bd1d23b99cff7aa2ca289e2d3","urls":["bzzr://bb200beae6849f1f5bb97b36c57cd493be52877ec0b55ee9969fa5f8159cf37b"]}},"version":1}
export const tokenAddresses = '0x6060604052341561000f57600080fd5b6040516060806102528339810160405280805191906020018051919060200180519150505b6000806000806100426101fc565b600088955085600160a060020a0316639f181b5e6000604051602001526040518163ffffffff167c0100000000000000000000000000000000000000000000000000000000028152600401602060405180830381600087803b15156100a657600080fd5b6102c65a03f115156100b757600080fd5b50505060405180519550508688018890116100ce57fe5b8785116100de57600093506100f6565b8487890111156100f25787850393506100f6565b8693505b5b83602002602001925060405191508282016040528382528790505b8388018110156101ea5785600160a060020a031663044215c682600060405160a001526040517c010000000000000000000000000000000000000000000000000000000063ffffffff8416028152600481019190915260240160a060405180830381600087803b151561018457600080fd5b6102c65a03f1151561019557600080fd5b50505060405180519060200180519060200180519060200180519060200180515086935050508a84039050815181106101ca57fe5b600160a060020a039092166020928302909101909101525b600101610112565b8282f35b50505050505050505061020e565b60206040519081016040526000815290565b60368061021c6000396000f30060606040525b600080fd00a165627a7a72305820a9a09f013393cf3c6398ce0f8175073fe363b6f594f9bd569261d0bb94aa84d40029';
export const tokensBalances = '0x6060604052341561000f57600080fd5b60405161018b38038061018b8339810160405280805182019190602001805190910190505b6000806000610041610135565b60008060008060008060008c518c51029a506020808c020199507f70a0823100000000000000000000000000000000000000000000000000000000985060405197508988016040528a8852604051965060248701604052888752879550866004019450600093505b8c5184101561011f57600092505b8b51831015610113578c84815181106100cc57fe5b9060200190602002015191508b83815181106100e457fe5b90602001906020020151905060208601955081855260208660248960008561fffff1505b6001909201916100b7565b5b6001909301926100a9565b8988f35b50505050505050505050505050610147565b60206040519081016040526000815290565b6036806101556000396000f30060606040525b600080fd00a165627a7a723058203cfc17c394936aa87b7db79e4f082a7cfdcefef54acd3124d17525b56c92e7950029';

283
js/src/util/tokens/index.js Normal file
View File

@ -0,0 +1,283 @@
// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { range } from 'lodash';
import BigNumber from 'bignumber.js';
import { hashToImageUrl } from '~/redux/util';
import { sha3 } from '~/api/util/sha3';
import imagesEthereum from '~/../assets/images/contracts/ethereum-black-64x64.png';
import {
tokenAddresses as tokenAddressesBytcode,
tokensBalances as tokensBalancesBytecode
} from './bytecodes';
export const ETH_TOKEN = {
address: '',
format: new BigNumber(10).pow(18),
id: getTokenId('eth_native_token'),
image: imagesEthereum,
name: 'Ethereum',
native: true,
tag: 'ETH'
};
export function fetchTokenIds (tokenregInstance) {
return tokenregInstance.tokenCount
.call()
.then((numTokens) => {
const tokenIndexes = range(numTokens.toNumber());
return tokenIndexes;
});
}
export function fetchTokensBasics (api, tokenReg, start = 0, limit = 100) {
const tokenAddressesCallData = encode(
api,
[ 'address', 'uint', 'uint' ],
[ tokenReg.address, start, limit ]
);
return api.eth
.call({ data: tokenAddressesBytcode + tokenAddressesCallData })
.then((result) => {
const tokenAddresses = decodeArray(api, 'address[]', result);
return tokenAddresses.map((tokenAddress, index) => {
const tokenIndex = start + index;
return {
address: tokenAddress,
id: getTokenId(tokenAddress, tokenIndex),
index: tokenIndex,
fetched: false
};
});
});
}
export function fetchTokensInfo (api, tokenReg, tokenIndexes) {
const requests = tokenIndexes.map((tokenIndex) => {
const tokenCalldata = tokenReg.getCallData(tokenReg.instance.token, {}, [tokenIndex]);
return { to: tokenReg.address, data: tokenCalldata };
});
const calls = requests.map((req) => api.eth.call(req));
const imagesPromise = fetchTokensImages(api, tokenReg, tokenIndexes);
return Promise.all(calls)
.then((results) => {
return imagesPromise.then((images) => [ results, images ]);
})
.then(([ results, images ]) => {
return results.map((rawTokenData, index) => {
const tokenIndex = tokenIndexes[index];
const tokenData = tokenReg.instance.token
.decodeOutput(rawTokenData)
.map((t) => t.value);
const [ address, tag, format, name ] = tokenData;
const image = images[index];
const token = {
address,
id: getTokenId(address, tokenIndex),
index: tokenIndex,
format: format.toString(),
image,
name,
tag,
fetched: true
};
return token;
});
});
}
export function fetchTokensImages (api, tokenReg, tokenIndexes) {
const requests = tokenIndexes.map((tokenIndex) => {
const metaCalldata = tokenReg.getCallData(tokenReg.instance.meta, {}, [tokenIndex, 'IMG']);
return { to: tokenReg.address, data: metaCalldata };
});
const calls = requests.map((req) => api.eth.call(req));
return Promise.all(calls)
.then((results) => {
return results.map((rawImage) => {
const image = tokenReg.instance.meta.decodeOutput(rawImage)[0].value;
return hashToImageUrl(image);
});
});
}
/**
* `updates` should be in the shape:
* {
* [ who ]: [ tokenId ] // Array of tokens to updates
* }
*
* Returns a Promise resolved with the balances in the shape:
* {
* [ who ]: { [ tokenId ]: BigNumber } // The balances of `who`
* }
*/
export function fetchAccountsBalances (api, tokens, updates) {
const accountAddresses = Object.keys(updates);
// Updates for the ETH balances
const ethUpdates = accountAddresses
.filter((accountAddress) => {
return updates[accountAddress].find((tokenId) => tokenId === ETH_TOKEN.id);
})
.reduce((nextUpdates, accountAddress) => {
nextUpdates[accountAddress] = [ETH_TOKEN.id];
return nextUpdates;
}, {});
// Updates for Tokens balances
const tokenUpdates = Object.keys(updates)
.reduce((nextUpdates, accountAddress) => {
const tokenIds = updates[accountAddress].filter((tokenId) => tokenId !== ETH_TOKEN.id);
if (tokenIds.length > 0) {
nextUpdates[accountAddress] = tokenIds;
}
return nextUpdates;
}, {});
let ethBalances = {};
let tokensBalances = {};
const ethPromise = fetchEthBalances(api, Object.keys(ethUpdates))
.then((_ethBalances) => {
ethBalances = _ethBalances;
});
const tokenPromise = Object.keys(tokenUpdates)
.reduce((tokenPromise, accountAddress) => {
const tokenIds = tokenUpdates[accountAddress];
const updateTokens = tokens
.filter((t) => tokenIds.includes(t.id));
return tokenPromise
.then(() => fetchTokensBalances(api, updateTokens, [ accountAddress ]))
.then((balances) => {
tokensBalances[accountAddress] = balances[accountAddress];
});
}, Promise.resolve());
return Promise.all([ ethPromise, tokenPromise ])
.then(() => {
const balances = Object.assign({}, tokensBalances);
Object.keys(ethBalances).forEach((accountAddress) => {
if (!balances[accountAddress]) {
balances[accountAddress] = {};
}
balances[accountAddress] = Object.assign(
{},
balances[accountAddress],
ethBalances[accountAddress]
);
});
return balances;
});
}
function fetchEthBalances (api, accountAddresses) {
const promises = accountAddresses
.map((accountAddress) => api.eth.getBalance(accountAddress));
return Promise.all(promises)
.then((balancesArray) => {
return balancesArray.reduce((balances, balance, index) => {
balances[accountAddresses[index]] = {
[ETH_TOKEN.id]: balance
};
return balances;
}, {});
});
}
function fetchTokensBalances (api, tokens, accountAddresses) {
const tokenAddresses = tokens.map((t) => t.address);
const tokensBalancesCallData = encode(
api,
[ 'address[]', 'address[]' ],
[ accountAddresses, tokenAddresses ]
);
return api.eth
.call({ data: tokensBalancesBytecode + tokensBalancesCallData })
.then((result) => {
const rawBalances = decodeArray(api, 'uint[]', result);
const balances = {};
accountAddresses.forEach((accountAddress, accountIndex) => {
const balance = {};
const preIndex = accountIndex * tokenAddresses.length;
tokenAddresses.forEach((tokenAddress, tokenIndex) => {
const index = preIndex + tokenIndex;
const token = tokens[tokenIndex];
balance[token.id] = rawBalances[index];
});
balances[accountAddress] = balance;
});
return balances;
});
}
function getTokenId (...args) {
return sha3(args.join('')).slice(0, 10);
}
function encode (api, types, values) {
return api.util.abiEncode(
null,
types,
values
).replace('0x', '');
}
function decodeArray (api, type, data) {
return api.util
.abiDecode(
[type],
[
'0x',
(32).toString(16).padStart(64, 0),
data.replace('0x', '')
].join('')
)[0]
.map((t) => t.value);
}

View File

@ -57,6 +57,11 @@ class Requests extends Component {
renderRequest (request, extras = {}) { renderRequest (request, extras = {}) {
const { show, transaction } = request; const { show, transaction } = request;
if (!transaction) {
return null;
}
const state = this.getTransactionState(request); const state = this.getTransactionState(request);
const displayedTransaction = { ...transaction }; const displayedTransaction = { ...transaction };
@ -156,9 +161,9 @@ class Requests extends Component {
return ( return (
<FormattedMessage <FormattedMessage
id='requests.status.transactionMined' id='requests.status.transactionMined'
defaultMessage='Transaction mined at block #{blockNumber} ({blockHeight} blocks ago)' defaultMessage='Transaction mined at block #{blockNumber} ({blockHeight} confirmations)'
values={ { values={ {
blockHeight: +request.blockHeight, blockHeight: (+request.blockHeight || 0).toString(),
blockNumber: +transactionReceipt.blockNumber blockNumber: +transactionReceipt.blockNumber
} } } }
/> />

View File

@ -35,9 +35,14 @@ export default class Dapp extends Component {
} }
componentWillMount () { componentWillMount () {
this.isInactive = false;
return this.loadApp(); return this.loadApp();
} }
componentWillUnmount () {
this.isInactive = true;
}
render () { render () {
const { id, timestamp } = this.props; const { id, timestamp } = this.props;
const { dapp } = this.state; const { dapp } = this.state;
@ -83,6 +88,10 @@ export default class Dapp extends Component {
return store return store
.loadApp(id) .loadApp(id)
.then((dapp) => { .then((dapp) => {
if (this.isInactive) {
return;
}
this.setState({ dapp }); this.setState({ dapp });
}); });
} }

View File

@ -24,10 +24,8 @@ export default class AccountStore {
constructor (api) { constructor (api) {
this._api = api; this._api = api;
this.loadDefaultAccount() this.subscribeDefaultAccount()
.then(() => this.loadAccounts()); .then(() => this.loadAccounts());
this.subscribeDefaultAccount();
} }
@action setAccounts = (accounts) => { @action setAccounts = (accounts) => {
@ -60,12 +58,6 @@ export default class AccountStore {
}); });
} }
loadDefaultAccount () {
return this._api.parity
.defaultAccount()
.then((address) => this.setDefaultAccount(address));
}
loadAccounts () { loadAccounts () {
this.setLoading(true); this.setLoading(true);

View File

@ -48,22 +48,27 @@ function createReduxStore () {
function render () { function render () {
reduxStore = createReduxStore(); reduxStore = createReduxStore();
signerStore = createSignerStore(); signerStore = createSignerStore();
const context = {
store: reduxStore,
api: {
transport: {
on: sinon.stub()
},
pubsub: {
subscribeAndGetResult: sinon.stub().returns(Promise.resolve(1))
},
util: {
sha3: (x) => x,
hexToBytes: (x) => x,
asciiToHex: (x) => x
}
}
};
component = shallow( component = shallow(
<SignRequest signerStore={ signerStore } />, <SignRequest signerStore={ signerStore } />,
{ { context }
context: { ).find('SignRequest').shallow({ context });
store: reduxStore,
api: {
util: {
sha3: (x) => x,
hexToBytes: (x) => x,
asciiToHex: (x) => x
}
}
}
}
).find('SignRequest').shallow();
return component; return component;
} }

View File

@ -211,30 +211,35 @@ impl<T: Filterable + Send + Sync + 'static> EthFilter for T {
} }
fn filter_logs(&self, index: Index) -> BoxFuture<Vec<Log>, Error> { fn filter_logs(&self, index: Index) -> BoxFuture<Vec<Log>, Error> {
let mut polls = self.polls().lock(); let filter = {
match polls.poll(&index.value()) { let mut polls = self.polls().lock();
Some(&PollFilter::Logs(ref _block_number, ref _previous_log, ref filter)) => {
let include_pending = filter.to_block == Some(BlockNumber::Pending);
let filter: EthcoreFilter = filter.clone().into();
// fetch pending logs. match polls.poll(&index.value()) {
let pending = if include_pending { Some(&PollFilter::Logs(ref _block_number, ref _previous_log, ref filter)) => filter.clone(),
let best_block = self.best_block_number(); // just empty array
self.pending_logs(best_block, &filter) _ => return future::ok(Vec::new()).boxed(),
} else { }
Vec::new() };
};
// retrieve logs asynchronously, appending pending logs. let include_pending = filter.to_block == Some(BlockNumber::Pending);
let limit = filter.limit; let filter: EthcoreFilter = filter.into();
self.logs(filter)
.map(move |mut logs| { logs.extend(pending); logs }) // fetch pending logs.
.map(move |logs| limit_logs(logs, limit)) let pending = if include_pending {
.boxed() let best_block = self.best_block_number();
}, self.pending_logs(best_block, &filter)
// just empty array } else {
_ => future::ok(Vec::new()).boxed() Vec::new()
} };
// retrieve logs asynchronously, appending pending logs.
let limit = filter.limit;
let logs = self.logs(filter);
let res = logs
.map(move |mut logs| { logs.extend(pending); logs })
.map(move |logs| limit_logs(logs, limit))
.boxed();
res
} }
fn uninstall_filter(&self, index: Index) -> Result<bool, Error> { fn uninstall_filter(&self, index: Index) -> Result<bool, Error> {