Backporting to beta (#3525)

* v1.4.4

* Fixing tests, fixing refreshing precompiled (#3483)

* Fixing tests, fixing refreshing precompiled

* Commit only js and Cargo.lock

* Moving commands to the right place

* Adding js content as well

* Wallet names shouldn't include address.

(Actually wallet files shouldn't contain it either, but we'll
leave that for a later PR).

* sudo -c Is not supported on Mac

* Add trace_{call, rawTransaction, replayTransaction}

* Remove dangling only

* check for sanity

* Better Erros Snackbar in UI #3473

* Sync bandwidth optimization

* Updated sync algo desription

* Check transaction signature when adding to the queue

* Limit sync reorg to 20 blocks (#3519)

* Limit sync reorg

* Fixed tests

* updated the european warp bootnode addresses (#3528)

* Fix dapp account selection (#3399)

* Fix GHH accounts (filter non accounts)

* SignatureReg handle undefined ABI names gracefully

* SignatureReg fix accounts (filter non accounts)

* TokenReg fix accounts (filter non accounts)

* Registry fix accounts (filter non accounts)

* Remove addresses, display non-refundable warning (#3403)

* Use Contract owner for unregistering Token #3440

* Make tokenreg dapp fast again (#3474)

* Using proper TokenReg Instance in TokenReg dApp #3371

* remove unnecessary logs in tokereg dapp

* Improved Redux managment in TokeReg dApp #3371

* Fixfing linting


Former-commit-id: 3e0d033eaf789cfdf517f4a97effc500f1f9263b
This commit is contained in:
Arkadiy Paronyan 2016-11-18 20:50:20 +01:00 committed by GitHub
parent 2077793b4f
commit de23d7a2d7
52 changed files with 675 additions and 353 deletions

28
Cargo.lock generated
View File

@ -1,6 +1,6 @@
[root]
name = "parity"
version = "1.4.3"
version = "1.4.4"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"clippy 0.0.96 (registry+https://github.com/rust-lang/crates.io-index)",
@ -21,7 +21,7 @@ dependencies = [
"ethcore-rpc 1.4.0",
"ethcore-signer 1.4.0",
"ethcore-stratum 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"ethsync 1.4.0",
"fdlimit 0.1.0",
"hyper 0.9.10 (registry+https://github.com/rust-lang/crates.io-index)",
@ -289,7 +289,7 @@ dependencies = [
"ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"ethjson 0.1.0",
"ethkey 0.2.0",
"ethstore 0.1.0",
@ -336,7 +336,7 @@ dependencies = [
"ethabi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0",
"ethcore-rpc 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"fetch 0.1.0",
"hyper 0.9.4 (git+https://github.com/ethcore/hyper)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
@ -381,7 +381,7 @@ name = "ethcore-ipc"
version = "1.4.0"
dependencies = [
"ethcore-devtools 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
@ -428,7 +428,7 @@ dependencies = [
"ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"nanomsg 0.5.1 (git+https://github.com/ethcore/nanomsg.rs.git)",
"semver 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
@ -439,7 +439,7 @@ name = "ethcore-logger"
version = "1.4.0"
dependencies = [
"env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"isatty 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
@ -455,7 +455,7 @@ dependencies = [
"bytes 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ethcore-devtools 1.4.0",
"ethcore-io 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"ethcrypto 0.1.0",
"ethkey 0.2.0",
"igd 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -482,7 +482,7 @@ dependencies = [
"ethcore-devtools 1.4.0",
"ethcore-io 1.4.0",
"ethcore-ipc 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"ethcrypto 0.1.0",
"ethjson 0.1.0",
"ethkey 0.2.0",
@ -511,7 +511,7 @@ dependencies = [
"ethcore-devtools 1.4.0",
"ethcore-io 1.4.0",
"ethcore-rpc 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -530,7 +530,7 @@ dependencies = [
"ethcore-ipc 1.4.0",
"ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"json-tcp-server 0.1.0 (git+https://github.com/ethcore/json-tcp-server)",
"jsonrpc-core 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lazy_static 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -541,7 +541,7 @@ dependencies = [
[[package]]
name = "ethcore-util"
version = "1.4.3"
version = "1.4.4"
dependencies = [
"ansi_term 0.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.3.16 (registry+https://github.com/rust-lang/crates.io-index)",
@ -590,7 +590,7 @@ dependencies = [
name = "ethjson"
version = "0.1.0"
dependencies = [
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_codegen 0.8.4 (registry+https://github.com/rust-lang/crates.io-index)",
@ -643,7 +643,7 @@ dependencies = [
"ethcore-ipc-codegen 1.4.0",
"ethcore-ipc-nano 1.4.0",
"ethcore-network 1.4.0",
"ethcore-util 1.4.3",
"ethcore-util 1.4.4",
"heapsize 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -1,7 +1,7 @@
[package]
description = "Ethcore client."
name = "parity"
version = "1.4.3"
version = "1.4.4"
license = "GPL-3.0"
authors = ["Ethcore <admin@ethcore.io>"]
build = "build.rs"

View File

@ -182,8 +182,8 @@
"enode://89d5dc2a81e574c19d0465f497c1af96732d1b61a41de89c2a37f35707689ac416529fae1038809852b235c2d30fd325abdc57c122feeefbeaaf802cc7e9580d@45.55.33.62:30303",
"enode://605e04a43b1156966b3a3b66b980c87b7f18522f7f712035f84576016be909a2798a438b2b17b1a8c58db314d88539a77419ca4be36148c086900fba487c9d39@188.166.255.12:30303",
"enode://016b20125f447a3b203a3cae953b2ede8ffe51290c071e7599294be84317635730c397b8ff74404d6be412d539ee5bb5c3c700618723d3b53958c92bd33eaa82@159.203.210.80:30303",
"enode://01f76fa0561eca2b9a7e224378dd854278735f1449793c46ad0c4e79e8775d080c21dcc455be391e90a98153c3b05dcc8935c8440de7b56fe6d67251e33f4e3c@10.6.6.117:30303",
"enode://fe11ef89fc5ac9da358fc160857855f25bbf9e332c79b9ca7089330c02b728b2349988c6062f10982041702110745e203d26975a6b34bcc97144f9fe439034e8@10.1.72.117:30303"
"enode://01f76fa0561eca2b9a7e224378dd854278735f1449793c46ad0c4e79e8775d080c21dcc455be391e90a98153c3b05dcc8935c8440de7b56fe6d67251e33f4e3c@51.15.42.252:30303",
"enode://8d91c8137890d29110b9463882f17ae4e279cd2c90cf56573187ed1c8546fca5f590a9f05e9f108eb1bd91767ed01ede4daad9e001b61727885eaa246ddb39c2@163.172.171.38:30303"
],
"accounts": {
"0000000000000000000000000000000000000001": { "builtin": { "name": "ecrecover", "pricing": { "linear": { "base": 3000, "word": 0 } } } },

View File

@ -21,6 +21,7 @@ use util::*;
use util::using_queue::{UsingQueue, GetAction};
use account_provider::AccountProvider;
use views::{BlockView, HeaderView};
use header::Header;
use state::{State, CleanupMode};
use client::{MiningBlockChainClient, Executive, Executed, EnvInfo, TransactOptions, BlockID, CallAnalytics};
use executive::contract_address;
@ -564,7 +565,16 @@ impl Miner {
let schedule = chain.latest_schedule();
let gas_required = |tx: &SignedTransaction| tx.gas_required(&schedule).into();
let best_block_header: Header = ::rlp::decode(&chain.best_block_header());
transactions.into_iter()
.filter(|tx| match self.engine.verify_transaction_basic(tx, &best_block_header) {
Ok(()) => true,
Err(e) => {
debug!(target: "miner", "Rejected tx {:?} with invalid signature: {:?}", tx.hash(), e);
false
}
}
)
.map(|tx| match origin {
TransactionOrigin::Local | TransactionOrigin::RetractedBlock => {
transaction_queue.add(tx, origin, &fetch_account, &gas_required)

View File

@ -34,16 +34,43 @@ pub const KEY_LENGTH: usize = 32;
pub const KEY_ITERATIONS: usize = 10240;
pub const KEY_LENGTH_AES: usize = KEY_LENGTH / 2;
#[derive(PartialEq, Debug)]
pub enum ScryptError {
// log(N) < r / 16
InvalidN,
// p <= (2^31-1 * 32)/(128 * r)
InvalidP,
}
impl fmt::Display for ScryptError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let s = match *self {
ScryptError::InvalidN => "Invalid N argument of the scrypt encryption" ,
ScryptError::InvalidP => "Invalid p argument of the scrypt encryption",
};
write!(f, "{}", s)
}
}
#[derive(PartialEq, Debug)]
pub enum Error {
Secp(SecpError),
Scrypt(ScryptError),
InvalidMessage,
}
impl From<ScryptError> for Error {
fn from(err: ScryptError) -> Self {
Error::Scrypt(err)
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let s = match *self {
Error::Secp(ref err) => err.to_string(),
Error::Scrypt(ref err) => err.to_string(),
Error::InvalidMessage => "Invalid message".into(),
};
@ -80,13 +107,23 @@ pub fn derive_key_iterations(password: &str, salt: &[u8; 32], c: u32) -> (Vec<u8
(derived_right_bits.to_vec(), derived_left_bits.to_vec())
}
pub fn derive_key_scrypt(password: &str, salt: &[u8; 32], n: u32, p: u32, r: u32) -> (Vec<u8>, Vec<u8>) {
pub fn derive_key_scrypt(password: &str, salt: &[u8; 32], n: u32, p: u32, r: u32) -> Result<(Vec<u8>, Vec<u8>), Error> {
// sanity checks
let log_n = (32 - n.leading_zeros() - 1) as u8;
if log_n as u32 >= r * 16 {
return Err(Error::Scrypt(ScryptError::InvalidN));
}
if p as u64 > ((u32::max_value() as u64 - 1) * 32)/(128 * (r as u64)) {
return Err(Error::Scrypt(ScryptError::InvalidP));
}
let mut derived_key = vec![0u8; KEY_LENGTH];
let scrypt_params = ScryptParams::new(n.trailing_zeros() as u8, r, p);
let scrypt_params = ScryptParams::new(log_n, r, p);
scrypt(password.as_bytes(), salt, &scrypt_params, &mut derived_key);
let derived_right_bits = &derived_key[0..KEY_LENGTH_AES];
let derived_left_bits = &derived_key[KEY_LENGTH_AES..KEY_LENGTH];
(derived_right_bits.to_vec(), derived_left_bits.to_vec())
Ok((derived_right_bits.to_vec(), derived_left_bits.to_vec()))
}
pub fn derive_mac(derived_left_bits: &[u8], cipher_text: &[u8]) -> Vec<u8> {

View File

@ -113,7 +113,7 @@ impl Crypto {
let (derived_left_bits, derived_right_bits) = match self.kdf {
Kdf::Pbkdf2(ref params) => crypto::derive_key_iterations(password, &params.salt, params.c),
Kdf::Scrypt(ref params) => crypto::derive_key_scrypt(password, &params.salt, params.n, params.p, params.r),
Kdf::Scrypt(ref params) => try!(crypto::derive_key_scrypt(password, &params.salt, params.n, params.p, params.r)),
};
let mac = crypto::derive_mac(&derived_right_bits, &self.ciphertext).keccak256();

View File

@ -20,6 +20,7 @@ use std::collections::HashMap;
use time;
use ethkey::Address;
use {json, SafeAccount, Error};
use json::UUID;
use super::KeyDirectory;
const IGNORED_FILES: &'static [&'static str] = &["thumbs.db", "address_book.json"];
@ -112,7 +113,7 @@ impl KeyDirectory for DiskDirectory {
// build file path
let filename = account.filename.as_ref().cloned().unwrap_or_else(|| {
let timestamp = time::strftime("%Y-%m-%dT%H-%M-%S", &time::now_utc()).expect("Time-format string is valid.");
format!("UTC--{}Z--{:?}", timestamp, account.address)
format!("UTC--{}Z--{}", timestamp, UUID::from(account.id))
});
// update account filename

View File

@ -68,11 +68,13 @@ if [ "$BRANCH" == "master" ]; then
fi
echo "*** Updating cargo parity-ui-precompiled#$PRECOMPILED_HASH"
git submodule update
cargo update -p parity-ui-precompiled
# --precise "$PRECOMPILED_HASH"
echo "*** Committing updated files"
git add .
git add js
git add Cargo.lock
git commit -m "[ci skip] js-precompiled $UTCDATE"
git push origin HEAD:refs/heads/$BRANCH 2>$GITLOG

View File

@ -162,3 +162,11 @@ export function inTraceFilter (filterObject) {
return filterObject;
}
export function inTraceType (whatTrace) {
if (isString(whatTrace)) {
return [whatTrace];
}
return whatTrace;
}

View File

@ -16,7 +16,7 @@
import BigNumber from 'bignumber.js';
import { inAddress, inBlockNumber, inData, inFilter, inHex, inNumber10, inNumber16, inOptions } from './input';
import { inAddress, inBlockNumber, inData, inFilter, inHex, inNumber10, inNumber16, inOptions, inTraceType } from './input';
import { isAddress } from '../../../test/types';
describe('api/format/input', () => {
@ -242,4 +242,16 @@ describe('api/format/input', () => {
});
});
});
describe('inTraceType', () => {
it('returns array of types as is', () => {
const types = ['vmTrace', 'trace', 'stateDiff'];
expect(inTraceType(types)).to.deep.equal(types);
});
it('formats single string type into array', () => {
const type = 'vmTrace';
expect(inTraceType(type)).to.deep.equal([type]);
});
});
});

View File

@ -254,3 +254,25 @@ export function outTrace (trace) {
return trace;
}
export function outTraces (traces) {
if (traces) {
return traces.map(outTrace);
}
return traces;
}
export function outTraceReplay (trace) {
if (trace) {
Object.keys(trace).forEach((key) => {
switch (key) {
case 'trace':
trace[key] = outTraces(trace[key]);
break;
}
});
}
return trace;
}

View File

@ -20,15 +20,25 @@ describe('ethapi.trace', () => {
const ethapi = createHttpApi();
describe('block', () => {
it('returns the latest block', () => {
return ethapi.trace.block().then((block) => {
expect(block).to.be.ok;
it('returns the latest block traces', () => {
return ethapi.trace.block().then((traces) => {
expect(traces).to.be.ok;
});
});
it('returns a specified block', () => {
return ethapi.trace.block('0x65432').then((block) => {
expect(block).to.be.ok;
it('returns traces for a specified block', () => {
return ethapi.trace.block('0x65432').then((traces) => {
expect(traces).to.be.ok;
});
});
});
describe('replayTransaction', () => {
it('returns traces for a specific transaction', () => {
return ethapi.eth.getBlockByNumber().then((latestBlock) => {
return ethapi.trace.replayTransaction(latestBlock.transactions[0]).then((traces) => {
expect(traces).to.be.ok;
});
});
});
});

View File

@ -14,35 +14,53 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { inBlockNumber, inHex, inNumber16, inTraceFilter } from '../../format/input';
import { outTrace } from '../../format/output';
import { inBlockNumber, inData, inHex, inNumber16, inOptions, inTraceFilter, inTraceType } from '../../format/input';
import { outTraces, outTraceReplay } from '../../format/output';
export default class Trace {
constructor (transport) {
this._transport = transport;
}
block (blockNumber = 'latest') {
return this._transport
.execute('trace_block', inBlockNumber(blockNumber))
.then(outTraces);
}
call (options, blockNumber = 'latest', whatTrace = ['trace']) {
return this._transport
.execute('trace_call', inOptions(options), inBlockNumber(blockNumber), inTraceType(whatTrace))
.then(outTraceReplay);
}
filter (filterObj) {
return this._transport
.execute('trace_filter', inTraceFilter(filterObj))
.then(traces => traces.map(trace => outTrace(trace)));
.then(outTraces);
}
get (txHash, position) {
return this._transport
.execute('trace_get', inHex(txHash), inNumber16(position))
.then(trace => outTrace(trace));
.then(outTraces);
}
rawTransaction (data, whatTrace = ['trace']) {
return this._transport
.execute('trace_rawTransaction', inData(data), inTraceType(whatTrace))
.then(outTraceReplay);
}
replayTransaction (txHash, whatTrace = ['trace']) {
return this._transport
.execute('trace_replayTransaction', txHash, inTraceType(whatTrace))
.then(outTraceReplay);
}
transaction (txHash) {
return this._transport
.execute('trace_transaction', inHex(txHash))
.then(traces => traces.map(trace => outTrace(trace)));
}
block (blockNumber = 'latest') {
return this._transport
.execute('trace_block', inBlockNumber(blockNumber))
.then(traces => traces.map(trace => outTrace(trace)));
.then(outTraces);
}
}

View File

@ -18,6 +18,7 @@ import DappReg from './dappreg';
import Registry from './registry';
import SignatureReg from './signaturereg';
import TokenReg from './tokenreg';
import GithubHint from './githubhint';
let instance = null;
@ -30,6 +31,7 @@ export default class Contracts {
this._dappreg = new DappReg(api, this._registry);
this._signaturereg = new SignatureReg(api, this._registry);
this._tokenreg = new TokenReg(api, this._registry);
this._githubhint = new GithubHint(api, this._registry);
}
get registry () {
@ -48,6 +50,10 @@ export default class Contracts {
return this._tokenreg;
}
get githubHint () {
return this._githubhint;
}
static create (api) {
return new Contracts(api);
}

View File

@ -0,0 +1,32 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
export default class GithubHint {
constructor (api, registry) {
this._api = api;
this._registry = registry;
this.getInstance();
}
getContract () {
return this._registry.getContract('githubhint');
}
getInstance () {
return this.getContract().instance;
}
}

View File

@ -42,7 +42,7 @@ export default class Registry {
});
}
getContractInstance (_name) {
getContract (_name) {
const name = _name.toLowerCase();
return new Promise((resolve, reject) => {
@ -54,13 +54,19 @@ export default class Registry {
this
.lookupAddress(name)
.then((address) => {
this._contracts[name] = this._api.newContract(abis[name], address).instance;
this._contracts[name] = this._api.newContract(abis[name], address);
resolve(this._contracts[name]);
})
.catch(reject);
});
}
getContractInstance (_name) {
return this
.getContract(_name)
.then((contract) => contract.instance);
}
lookupAddress (_name) {
const name = _name.toLowerCase();
const sha3 = this._api.util.sha3(name);

View File

@ -22,8 +22,12 @@ export default class TokenReg {
this.getInstance();
}
getContract () {
return this._registry.getContract('tokenreg');
}
getInstance () {
return this._registry.getContractInstance('tokenreg');
return this.getContract().instance;
}
tokenCount () {

View File

@ -28,26 +28,26 @@ export function attachInterface () {
return Promise
.all([
registry.getAddress.call({}, [api.util.sha3('githubhint'), 'A']),
api.eth.accounts(),
api.parity.accounts()
]);
})
.then(([address, addresses, accountsInfo]) => {
accountsInfo = accountsInfo || {};
.then(([address, accountsInfo]) => {
console.log(`githubhint was found at ${address}`);
const contract = api.newContract(abis.githubhint, address);
const accounts = addresses.reduce((obj, address) => {
const info = accountsInfo[address] || {};
const accounts = Object
.keys(accountsInfo)
.filter((address) => accountsInfo[address].uuid)
.reduce((obj, address) => {
const account = accountsInfo[address];
return Object.assign(obj, {
[address]: {
address,
name: info.name,
uuid: info.uuid
}
});
}, {});
return Object.assign(obj, {
[address]: {
address,
name: account.name
}
});
}, {});
const fromAddress = Object.keys(accounts)[0];
return {

View File

@ -49,3 +49,15 @@
padding-bottom: 0 !important;
}
}
.warning {
background: #f80;
bottom: 0;
color: #fff;
left: 0;
opacity: 1;
padding: 1.5em;
position: fixed;
right: 50%;
z-index: 100;
}

View File

@ -53,6 +53,7 @@ export default class Application extends Component {
};
render () {
const { api } = window.parity;
const {
actions,
accounts, contacts,
@ -60,9 +61,11 @@ export default class Application extends Component {
lookup,
events
} = this.props;
let warning = null;
return (
<div>
{ warning }
<div className={ styles.header }>
<h1>RΞgistry</h1>
<Accounts { ...accounts } actions={ actions.accounts } />
@ -70,13 +73,11 @@ export default class Application extends Component {
{ contract && fee ? (
<div>
<Lookup { ...lookup } accounts={ accounts.all } contacts={ contacts } actions={ actions.lookup } />
{ this.renderActions() }
<Events { ...events } accounts={ accounts.all } contacts={ contacts } actions={ actions.events } />
<p className={ styles.address }>
The Registry is provided by the contract at <code>{ contract.address }.</code>
</p>
<div className={ styles.warning }>
WARNING: The name registry is experimental. Please ensure that you understand the risks, benefits & consequences of registering a name before doing so. A non-refundable fee of { api.util.fromWei(fee).toFormat(3) }<small>ETH</small> is required for all registrations.
</div>
</div>
) : (
<CircularProgress size={ 60 } />

View File

@ -19,18 +19,16 @@ import { api } from '../parity';
export const set = (addresses) => ({ type: 'addresses set', addresses });
export const fetch = () => (dispatch) => {
return Promise
.all([
api.eth.accounts(),
api.parity.accounts()
])
.then(([ accounts, data ]) => {
data = data || {};
const addresses = Object.keys(data)
.filter((address) => data[address] && !data[address].meta.deleted)
return api.parity
.accounts()
.then((accountsInfo) => {
const addresses = Object
.keys(accountsInfo)
.filter((address) => accountsInfo[address] && !accountsInfo[address].meta.deleted)
.map((address) => ({
...data[address], address,
isAccount: accounts.includes(address)
...accountsInfo[address],
address,
isAccount: !!accountsInfo[address].uuid
}));
dispatch(set(addresses));
})

View File

@ -146,7 +146,7 @@ export default class Import extends Component {
}
sortFunctions = (a, b) => {
return a.name.localeCompare(b.name);
return (a.name || '').localeCompare(b.name || '');
}
countFunctions () {

View File

@ -49,26 +49,26 @@ export function attachInterface (callback) {
return Promise
.all([
registry.getAddress.call({}, [api.util.sha3('signaturereg'), 'A']),
api.eth.accounts(),
api.parity.accounts()
]);
})
.then(([address, addresses, accountsInfo]) => {
accountsInfo = accountsInfo || {};
.then(([address, accountsInfo]) => {
console.log(`signaturereg was found at ${address}`);
const contract = api.newContract(abis.signaturereg, address);
const accounts = addresses.reduce((obj, address) => {
const info = accountsInfo[address] || {};
const accounts = Object
.keys(accountsInfo)
.filter((address) => accountsInfo[address].uuid)
.reduce((obj, address) => {
const info = accountsInfo[address] || {};
return Object.assign(obj, {
[address]: {
address,
name: info.name || 'Unnamed',
uuid: info.uuid
}
});
}, {});
return Object.assign(obj, {
[address]: {
address,
name: info.name || 'Unnamed'
}
});
}, {});
const fromAddress = Object.keys(accounts)[0];
return {

View File

@ -35,22 +35,17 @@ export const setSelectedAccount = (address) => ({
});
export const loadAccounts = () => (dispatch) => {
Promise
.all([
api.eth.accounts(),
api.parity.accounts()
])
.then(([ accounts, accountsInfo ]) => {
accountsInfo = accountsInfo || {};
const accountsList = accounts
.map(address => ({
api.parity
.accounts()
.then((accountsInfo) => {
const accountsList = Object
.keys(accountsInfo)
.filter((address) => accountsInfo[address].uuid)
.map((address) => ({
...accountsInfo[address],
address
}));
console.log('accounts', accountsList);
dispatch(setAccounts(accountsList));
dispatch(setAccountsInfo(accountsInfo));
dispatch(setSelectedAccount(accountsList[0].address));

View File

@ -42,12 +42,9 @@ export default class QueryAction extends Component {
onClose: PropTypes.func.isRequired,
handleQueryToken: PropTypes.func.isRequired,
handleQueryMetaLookup: PropTypes.func.isRequired,
data: PropTypes.object,
notFound: PropTypes.bool,
metaLoading: PropTypes.bool,
metaData: PropTypes.object
notFound: PropTypes.bool
}
state = initState;
@ -131,11 +128,8 @@ export default class QueryAction extends Component {
return (
<Token
fullWidth
handleMetaLookup={ this.props.handleQueryMetaLookup }
isMetaLoading={ this.props.metaLoading }
meta={ this.props.metaData }
{ ...data }
/>
tla={ data.tla }
/>
);
}

View File

@ -16,8 +16,6 @@
import { getTokenTotalSupply } from '../utils';
const { sha3, bytesToHex } = window.parity.api.util;
export const SET_REGISTER_SENDING = 'SET_REGISTER_SENDING';
export const setRegisterSending = (isSending) => ({
type: SET_REGISTER_SENDING,
@ -41,8 +39,6 @@ export const registerCompleted = () => ({
});
export const registerToken = (tokenData) => (dispatch, getState) => {
console.log('registering token', tokenData);
const state = getState();
const contractInstance = state.status.contract.instance;
const fee = state.status.contract.fee;
@ -83,8 +79,6 @@ export const registerToken = (tokenData) => (dispatch, getState) => {
})
.then((gasEstimate) => {
options.gas = gasEstimate.mul(1.2).toFixed(0);
console.log(`transfer: gas estimated as ${gasEstimate.toFixed(0)} setting to ${options.gas}`);
return contractInstance.register.postTransaction(options, values);
})
.then((result) => {
@ -183,34 +177,3 @@ export const queryToken = (key, query) => (dispatch, getState) => {
dispatch(setQueryLoading(false));
});
};
export const queryTokenMeta = (id, query) => (dispatch, getState) => {
console.log('loading token meta', query);
const state = getState();
const contractInstance = state.status.contract.instance;
const key = sha3(query);
const startDate = Date.now();
dispatch(setQueryMetaLoading(true));
contractInstance
.meta
.call({}, [ id, key ])
.then((value) => {
const meta = {
key, query,
value: value.find(v => v !== 0) ? bytesToHex(value) : null
};
dispatch(setQueryMeta(meta));
setTimeout(() => {
dispatch(setQueryMetaLoading(false));
}, 500 - (Date.now() - startDate));
})
.catch((e) => {
console.error('load meta query error', e);
});
};

View File

@ -37,7 +37,6 @@ export default class Actions extends Component {
handleQueryToken: PropTypes.func.isRequired,
handleQueryClose: PropTypes.func.isRequired,
handleQueryMetaLookup: PropTypes.func.isRequired,
query: PropTypes.object.isRequired
};
@ -82,7 +81,6 @@ export default class Actions extends Component {
show={ this.state.show[ QUERY_ACTION ] }
onClose={ this.onQueryClose }
handleQueryToken={ this.props.handleQueryToken }
handleQueryMetaLookup={ this.props.handleQueryMetaLookup }
{ ...this.props.query } />
</div>
);

View File

@ -19,7 +19,7 @@ import { connect } from 'react-redux';
import Actions from './component';
import { registerToken, registerReset, queryToken, queryReset, queryTokenMeta } from './actions';
import { registerToken, registerReset, queryToken, queryReset } from './actions';
class TokensContainer extends Component {
@ -49,9 +49,6 @@ const mapDispatchToProps = (dispatch) => {
},
handleQueryClose: () => {
dispatch(queryReset());
},
handleQueryMetaLookup: (id, query) => {
dispatch(queryTokenMeta(id, query));
}
};
};

View File

@ -19,4 +19,17 @@
display: flex;
flex-direction: column;
align-items: center;
padding-bottom: 10em;
}
.warning {
background: #f80;
bottom: 0;
color: #fff;
left: 0;
opacity: 1;
padding: 1.5em;
position: fixed;
right: 50%;
z-index: 100;
}

View File

@ -17,6 +17,8 @@
import React, { Component, PropTypes } from 'react';
import getMuiTheme from 'material-ui/styles/getMuiTheme';
import { api } from '../parity';
import Loading from '../Loading';
import Status from '../Status';
import Tokens from '../Tokens';
@ -59,6 +61,9 @@ export default class Application extends Component {
<Actions />
<Tokens />
<div className={ styles.warning }>
WARNING: The token registry is experimental. Please ensure that you understand the steps, risks, benefits & consequences of registering a token before doing so. A non-refundable fee of { api.util.fromWei(contract.fee).toFormat(3) }<small>ETH</small> is required for all registrations.
</div>
</div>
);
}

View File

@ -14,11 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import {
registry as registryAbi,
tokenreg as tokenregAbi,
githubhint as githubhintAbi
} from '../../../contracts/abi';
import Contracts from '../../../contracts';
import { loadToken, setTokenPending, deleteToken, setTokenData } from '../Tokens/actions';
@ -34,43 +30,31 @@ export const FIND_CONTRACT = 'FIND_CONTRACT';
export const loadContract = () => (dispatch) => {
dispatch(setLoading(true));
api.parity
.registryAddress()
.then((registryAddress) => {
console.log(`registry found at ${registryAddress}`);
const registry = api.newContract(registryAbi, registryAddress).instance;
return Promise.all([
registry.getAddress.call({}, [api.util.sha3('tokenreg'), 'A']),
registry.getAddress.call({}, [api.util.sha3('githubhint'), 'A'])
]);
})
.then(([ tokenregAddress, githubhintAddress ]) => {
console.log(`tokenreg was found at ${tokenregAddress}`);
const tokenregContract = api
.newContract(tokenregAbi, tokenregAddress);
const githubhintContract = api
.newContract(githubhintAbi, githubhintAddress);
const { tokenReg, githubHint } = new Contracts(api);
return Promise
.all([
tokenReg.getContract(),
githubHint.getContract()
])
.then(([ tokenRegContract, githubHintContract ]) => {
dispatch(setContractDetails({
address: tokenregAddress,
instance: tokenregContract.instance,
raw: tokenregContract
address: tokenRegContract.address,
instance: tokenRegContract.instance,
raw: tokenRegContract
}));
dispatch(setGithubhintDetails({
address: githubhintAddress,
instance: githubhintContract.instance,
raw: githubhintContract
address: githubHintContract.address,
instance: githubHintContract.instance,
raw: githubHintContract
}));
dispatch(loadContractDetails());
dispatch(subscribeEvents());
})
.catch((error) => {
console.error('loadContract error', error);
throw error;
});
};
@ -78,7 +62,7 @@ export const LOAD_CONTRACT_DETAILS = 'LOAD_CONTRACT_DETAILS';
export const loadContractDetails = () => (dispatch, getState) => {
const state = getState();
const instance = state.status.contract.instance;
const { instance } = state.status.contract;
Promise
.all([
@ -87,8 +71,6 @@ export const loadContractDetails = () => (dispatch, getState) => {
instance.fee.call()
])
.then(([accounts, owner, fee]) => {
console.log(`owner as ${owner}, fee set at ${fee.toFormat()}`);
const isOwner = accounts.filter(a => a === owner).length > 0;
dispatch(setContractDetails({
@ -119,14 +101,14 @@ export const setGithubhintDetails = (details) => ({
export const subscribeEvents = () => (dispatch, getState) => {
const state = getState();
const contract = state.status.contract.raw;
const { raw } = state.status.contract;
const previousSubscriptionId = state.status.subscriptionId;
if (previousSubscriptionId) {
contract.unsubscribe(previousSubscriptionId);
raw.unsubscribe(previousSubscriptionId);
}
contract
raw
.subscribe(null, {
fromBlock: 'latest',
toBlock: 'pending',
@ -187,7 +169,7 @@ export const subscribeEvents = () => (dispatch, getState) => {
));
}
console.log('new log event', log);
console.warn('unknown log event', log);
});
})
.then((subscriptionId) => {

View File

@ -25,17 +25,15 @@ const initialState = {
isLoading: true,
subscriptionId: null,
contract: {
addres: null,
address: null,
instance: null,
raw: null,
owner: null,
isOwner: false,
fee: null
},
githubhint: {
address: null,
instance: null,
raw: null
instance: null
}
};

View File

@ -31,6 +31,12 @@
.title {
font-size: 3rem;
font-weight: 300;
margin-top: 0;
margin: 0;
text-transform: uppercase;
}
.byline {
font-size: 1.25em;
opacity: 0.75;
margin: 0 0 1.75em 0;
}

View File

@ -29,17 +29,12 @@ export default class Status extends Component {
};
render () {
const { address, fee } = this.props;
const { fee } = this.props;
return (
<div className={ styles.status }>
<h1 className={ styles.title }>Token Registry</h1>
<Chip
isAddress
value={ address }
label='Address' />
<h3 className={ styles.byline }>A global registry of all recognised tokens on the network</h3>
<Chip
isAddress={ false }
value={ api.util.fromWei(fee).toFixed(3) + 'ETH' }

View File

@ -14,4 +14,4 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
export default from './token';
export default from './tokenContainer';

View File

@ -57,15 +57,28 @@ export default class Token extends Component {
isLoading: PropTypes.bool,
isPending: PropTypes.bool,
isTokenOwner: PropTypes.bool.isRequired,
isContractOwner: PropTypes.bool.isRequired,
isContractOwner: PropTypes.bool,
fullWidth: PropTypes.bool
};
state = {
metaKeyIndex: 0
static defaultProps = {
isContractOwner: false
};
state = {
metaKeyIndex: 0,
showMeta: false
};
shouldComponentUpdate (nextProps) {
if (nextProps.isLoading && this.props.isLoading) {
return false;
}
return true;
}
render () {
const { isLoading, fullWidth } = this.props;
@ -237,7 +250,12 @@ export default class Token extends Component {
}
renderMeta (meta) {
const isMetaLoading = this.props.isMetaLoading;
const { isMetaLoading } = this.props;
const { showMeta } = this.state;
if (!showMeta) {
return null;
}
if (isMetaLoading) {
return (<div>
@ -331,6 +349,7 @@ export default class Token extends Component {
const key = metaDataKeys[keyIndex].value;
const index = this.props.index;
this.setState({ showMeta: true });
this.props.handleMetaLookup(index, key);
}

View File

@ -0,0 +1,73 @@
// Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux';
import Token from './token';
import { queryTokenMeta, unregisterToken, addTokenMeta } from '../actions';
class TokenContainer extends Component {
static propTypes = {
handleMetaLookup: PropTypes.func.isRequired,
handleUnregister: PropTypes.func.isRequired,
handleAddMeta: PropTypes.func.isRequired,
tla: PropTypes.string.isRequired
};
render () {
return (
<Token
{ ...this.props }
/>
);
}
}
const mapStateToProps = (_, initProps) => {
const { tla } = initProps;
return (state) => {
const { isOwner } = state.status.contract;
const { tokens } = state.tokens;
const token = tokens.find((t) => t.tla === tla);
return { ...token, isContractOwner: isOwner };
};
};
const mapDispatchToProps = (dispatch) => {
return {
handleMetaLookup: (index, query) => {
dispatch(queryTokenMeta(index, query));
},
handleUnregister: (index) => {
dispatch(unregisterToken(index));
},
handleAddMeta: (index, key, value) => {
dispatch(addTokenMeta(index, key, value));
}
};
};
export default connect(
mapStateToProps,
mapDispatchToProps
)(TokenContainer);

View File

@ -67,8 +67,6 @@ export const deleteToken = (index) => ({
});
export const loadTokens = () => (dispatch, getState) => {
console.log('loading tokens...');
const state = getState();
const contractInstance = state.status.contract.instance;
@ -79,7 +77,6 @@ export const loadTokens = () => (dispatch, getState) => {
.call()
.then((count) => {
const tokenCount = parseInt(count);
console.log(`token count: ${tokenCount}`);
dispatch(setTokenCount(tokenCount));
for (let i = 0; i < tokenCount; i++) {
@ -94,8 +91,6 @@ export const loadTokens = () => (dispatch, getState) => {
};
export const loadToken = (index) => (dispatch, getState) => {
console.log('loading token', index);
const state = getState();
const contractInstance = state.status.contract.instance;
@ -144,7 +139,7 @@ export const loadToken = (index) => (dispatch, getState) => {
}
data.totalSupply = data.totalSupply.toNumber();
console.log(`token loaded: #${index}`, data);
dispatch(setTokenData(index, data));
dispatch(setTokenLoading(index, false));
})
@ -159,8 +154,6 @@ export const loadToken = (index) => (dispatch, getState) => {
};
export const queryTokenMeta = (index, query) => (dispatch, getState) => {
console.log('loading token meta', index, query);
const state = getState();
const contractInstance = state.status.contract.instance;
@ -176,7 +169,6 @@ export const queryTokenMeta = (index, query) => (dispatch, getState) => {
value: value.find(v => v !== 0) ? bytesToHex(value) : null
};
console.log(`token meta loaded: #${index}`, value);
dispatch(setTokenMeta(index, meta));
setTimeout(() => {
@ -189,8 +181,6 @@ export const queryTokenMeta = (index, query) => (dispatch, getState) => {
};
export const addTokenMeta = (index, key, value) => (dispatch, getState) => {
console.log('add token meta', index, key, value);
const state = getState();
const contractInstance = state.status.contract.instance;
const token = state.tokens.tokens.find(t => t.index === index);
@ -203,8 +193,6 @@ export const addTokenMeta = (index, key, value) => (dispatch, getState) => {
.estimateGas(options, values)
.then((gasEstimate) => {
options.gas = gasEstimate.mul(1.2).toFixed(0);
console.log(`addTokenMeta: gas estimated as ${gasEstimate.toFixed(0)} setting to ${options.gas}`);
return contractInstance.setMeta.postTransaction(options, values);
})
.catch((e) => {
@ -213,8 +201,6 @@ export const addTokenMeta = (index, key, value) => (dispatch, getState) => {
};
export const addGithubhintURL = (from, key, url) => (dispatch, getState) => {
console.log('add githubhint url', key, url);
const state = getState();
const contractInstance = state.status.githubhint.instance;
@ -227,8 +213,6 @@ export const addGithubhintURL = (from, key, url) => (dispatch, getState) => {
.estimateGas(options, values)
.then((gasEstimate) => {
options.gas = gasEstimate.mul(1.2).toFixed(0);
console.log(`transfer: gas estimated as ${gasEstimate.toFixed(0)} setting to ${options.gas}`);
return contractInstance.hintURL.postTransaction(options, values);
})
.catch((e) => {
@ -237,24 +221,20 @@ export const addGithubhintURL = (from, key, url) => (dispatch, getState) => {
};
export const unregisterToken = (index) => (dispatch, getState) => {
console.log('unregistering token', index);
const state = getState();
const contractInstance = state.status.contract.instance;
const { contract } = getState().status;
const { instance, owner } = contract;
const values = [ index ];
const options = {
from: state.accounts.selected.address
from: owner
};
contractInstance
instance
.unregister
.estimateGas(options, values)
.then((gasEstimate) => {
options.gas = gasEstimate.mul(1.2).toFixed(0);
console.log(`transfer: gas estimated as ${gasEstimate.toFixed(0)} setting to ${options.gas}`);
return contractInstance.unregister.postTransaction(options, values);
return instance.unregister.postTransaction(options, values);
})
.catch((e) => {
console.error(`unregisterToken #${index} error`, e);

View File

@ -19,16 +19,13 @@ import { connect } from 'react-redux';
import Tokens from './tokens';
import { loadTokens, queryTokenMeta, unregisterToken, addTokenMeta } from './actions';
import { loadTokens } from './actions';
class TokensContainer extends Component {
static propTypes = {
isOwner: PropTypes.bool,
isLoading: PropTypes.bool,
tokens: PropTypes.array,
tokenCount: PropTypes.number,
onLoadTokens: PropTypes.func,
accounts: PropTypes.array
onLoadTokens: PropTypes.func
};
componentDidMount () {
@ -36,7 +33,6 @@ class TokensContainer extends Component {
}
render () {
console.log(this.props);
return (
<Tokens
{ ...this.props }
@ -46,30 +42,19 @@ class TokensContainer extends Component {
}
const mapStateToProps = (state) => {
const { list } = state.accounts;
const { isLoading, tokens, tokenCount } = state.tokens;
const { isLoading, tokens } = state.tokens;
const { isOwner } = state.status.contract;
const filteredTokens = tokens
.filter((token) => token && token.tla)
.map((token) => ({ tla: token.tla, owner: token.owner }));
return { isLoading, tokens, tokenCount, isOwner, accounts: list };
return { isLoading, tokens: filteredTokens };
};
const mapDispatchToProps = (dispatch) => {
return {
onLoadTokens: () => {
dispatch(loadTokens());
},
handleMetaLookup: (index, query) => {
dispatch(queryTokenMeta(index, query));
},
handleUnregister: (index) => {
dispatch(unregisterToken(index));
},
handleAddMeta: (index, key, value) => {
dispatch(addTokenMeta(index, key, value));
}
};
};

View File

@ -23,13 +23,8 @@ import styles from './tokens.css';
export default class Tokens extends Component {
static propTypes = {
handleAddMeta: PropTypes.func.isRequired,
handleUnregister: PropTypes.func.isRequired,
handleMetaLookup: PropTypes.func.isRequired,
isOwner: PropTypes.bool.isRequired,
isLoading: PropTypes.bool.isRequired,
tokens: PropTypes.array,
accounts: PropTypes.array
tokens: PropTypes.array
};
render () {
@ -45,24 +40,12 @@ export default class Tokens extends Component {
}
renderTokens (tokens) {
const { accounts, isOwner } = this.props;
return tokens.map((token, index) => {
if (!token || !token.tla) {
return null;
}
const isTokenOwner = !!accounts.find((account) => account.address === token.owner);
return tokens.map((token) => {
return (
<Token
{ ...token }
handleUnregister={ this.props.handleUnregister }
handleMetaLookup={ this.props.handleMetaLookup }
handleAddMeta={ this.props.handleAddMeta }
key={ index }
isTokenOwner={ isTokenOwner }
isContractOwner={ isOwner } />
key={ token.tla }
tla={ token.tla }
/>
);
});
}

View File

@ -14,9 +14,45 @@
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { BlockNumber, Hash, Integer } from '../types';
import { BlockNumber, Data, Hash, Integer } from '../types';
export default {
block: {
desc: 'Returns traces created at given block',
params: [
{
type: BlockNumber,
desc: 'Integer block number, or \'latest\' for the last mined block or \'pending\', \'earliest\' for not yet mined transactions'
}
],
returns: {
type: Array,
desc: 'Block traces'
}
},
call: {
desc: 'Returns traces for a specific call',
params: [
{
type: Object,
desc: 'Call options'
},
{
type: BlockNumber,
desc: 'The blockNumber'
},
{
type: Array,
desc: 'Type of trace, one or more of \'vmTrace\', \'trace\' and/or \'stateDiff\''
}
],
returns: {
type: Array,
desc: 'Block traces'
}
},
filter: {
desc: 'Returns traces matching given filter',
params: [
@ -49,6 +85,42 @@ export default {
}
},
rawTransaction: {
desc: 'Traces a call to eth_sendRawTransaction without making the call, returning the traces',
params: [
{
type: Data,
desc: 'Transaction data'
},
{
type: Array,
desc: 'Type of trace, one or more of \'vmTrace\', \'trace\' and/or \'stateDiff\''
}
],
returns: {
type: Array,
desc: 'Block traces'
}
},
replayTransaction: {
desc: 'Replays a transaction, returning the traces',
params: [
{
type: Hash,
desc: 'Transaction hash'
},
{
type: Array,
desc: 'Type of trace, one or more of \'vmTrace\', \'trace\' and/or \'stateDiff\''
}
],
returns: {
type: Array,
desc: 'Block traces'
}
},
transaction: {
desc: 'Returns all traces of given transaction',
params: [
@ -61,19 +133,5 @@ export default {
type: Array,
desc: 'Traces of given transaction'
}
},
block: {
desc: 'Returns traces created at given block',
params: [
{
type: BlockNumber,
desc: 'Integer block number, or \'latest\' for the last mined block or \'pending\', \'earliest\' for not yet mined transactions'
}
],
returns: {
type: Array,
desc: 'Block traces'
}
}
};

View File

@ -17,4 +17,10 @@
.container {
z-index: 10101 !important;
button {
color: white !important;
margin: 0 !important;
margin-right: -16px !important;
}
}

View File

@ -23,9 +23,12 @@ import { closeErrors } from './actions';
import styles from './errors.css';
const ERROR_REGEX = /-(\d+): (.+)$/;
class Errors extends Component {
static propTypes = {
message: PropTypes.string,
error: PropTypes.object,
visible: PropTypes.bool,
onCloseErrors: PropTypes.func
};
@ -37,22 +40,60 @@ class Errors extends Component {
return null;
}
const text = this.getErrorMessage();
return (
<Snackbar
open
className={ styles.container }
message={ message }
autoHideDuration={ 5000 }
onRequestClose={ onCloseErrors } />
open
action='close'
autoHideDuration={ 60000 }
message={ text }
onActionTouchTap={ onCloseErrors }
onRequestClose={ this.onRequestClose }
bodyStyle={ {
whiteSpace: 'pre-line',
height: 'auto'
} }
contentStyle={ {
display: 'flex',
flexDirection: 'row',
lineHeight: '1.5em',
padding: '0.75em 0',
alignItems: 'center'
} }
/>
);
}
getErrorMessage = () => {
const { message, error } = this.props;
if (!error.text && !ERROR_REGEX.test(message)) {
return message;
}
const matches = ERROR_REGEX.exec(message);
const code = error.code || parseInt(matches[1]) * -1;
const text = error.text || matches[2];
return `[${code}] ${text}`;
}
onRequestClose = (reason) => {
if (reason === 'timeout') {
this.props.onCloseErrors();
}
}
}
function mapStateToProps (state) {
const { message, visible } = state.errors;
const { message, error, visible } = state.errors;
return {
message,
error,
visible
};
}

View File

@ -19,7 +19,8 @@ function newError (state, action) {
return Object.assign({}, state, {
visible: true,
message: error.message
message: error.message,
error
});
}

View File

@ -578,7 +578,7 @@
<key>OVERWRITE_PERMISSIONS</key>
<false/>
<key>VERSION</key>
<string>1.4.3</string>
<string>1.4.4</string>
</dict>
<key>UUID</key>
<string>2DCD5B81-7BAF-4DA1-9251-6274B089FD36</string>

View File

@ -14,7 +14,7 @@ To temporarily disable Parity Wallet (and stop Parity) use:
To completely uninstall Parity Wallet use:
sudo -c /usr/local/libexec/uninstall-parity.sh
sudo /usr/local/libexec/uninstall-parity.sh
Parity is distributed under the terms of the GPL.
Parity is distributed under the terms of the GPL.

View File

@ -10,7 +10,7 @@
!define DESCRIPTION "Fast, light, robust Ethereum implementation"
!define VERSIONMAJOR 1
!define VERSIONMINOR 4
!define VERSIONBUILD 3
!define VERSIONBUILD 4
!define ARGS "--warp"
!define FIRST_START_ARGS "ui --warp --mode=passive"

View File

@ -33,6 +33,8 @@ const MAX_BODIES_TO_REQUEST: usize = 64;
const MAX_RECEPITS_TO_REQUEST: usize = 128;
const SUBCHAIN_SIZE: u64 = 256;
const MAX_ROUND_PARENTS: usize = 32;
const MAX_PARALLEL_SUBCHAIN_DOWNLOAD: usize = 5;
const MAX_REORG_BLOCKS: u64 = 20;
#[derive(Copy, Clone, Eq, PartialEq, Debug)]
/// Downloader state
@ -62,6 +64,14 @@ pub enum BlockRequest {
},
}
/// Indicates sync action
pub enum DownloadAction {
/// Do nothing
None,
/// Reset downloads for all peers
Reset
}
#[derive(Eq, PartialEq, Debug)]
pub enum BlockDownloaderImportError {
/// Imported data is rejected as invalid.
@ -175,11 +185,11 @@ impl BlockDownloader {
}
/// Add new block headers.
pub fn import_headers(&mut self, io: &mut SyncIo, r: &UntrustedRlp, expected_hash: Option<H256>) -> Result<(), BlockDownloaderImportError> {
pub fn import_headers(&mut self, io: &mut SyncIo, r: &UntrustedRlp, expected_hash: Option<H256>) -> Result<DownloadAction, BlockDownloaderImportError> {
let item_count = r.item_count();
if self.state == State::Idle {
trace!(target: "sync", "Ignored unexpected block headers");
return Ok(())
return Ok(DownloadAction::None)
}
if item_count == 0 && (self.state == State::Blocks) {
return Err(BlockDownloaderImportError::Invalid);
@ -188,6 +198,7 @@ impl BlockDownloader {
let mut headers = Vec::new();
let mut hashes = Vec::new();
let mut valid_response = item_count == 0; //empty response is valid
let mut any_known = false;
for i in 0..item_count {
let info: BlockHeader = try!(r.val_at(i).map_err(|e| {
trace!(target: "sync", "Error decoding block header RLP: {:?}", e);
@ -200,6 +211,7 @@ impl BlockDownloader {
valid_response = expected == info.hash()
}
}
any_known = any_known || self.blocks.contains_head(&info.hash());
if self.blocks.contains(&info.hash()) {
trace!(target: "sync", "Skipping existing block header {} ({:?})", number, info.hash());
continue;
@ -245,17 +257,23 @@ impl BlockDownloader {
trace!(target: "sync", "Received {} subchain heads, proceeding to download", headers.len());
self.blocks.reset_to(hashes);
self.state = State::Blocks;
return Ok(DownloadAction::Reset);
}
},
State::Blocks => {
let count = headers.len();
// At least one of the heades must advance the subchain. Otherwise they are all useless.
if count == 0 || !any_known {
trace!(target: "sync", "No useful headers");
return Err(BlockDownloaderImportError::Useless);
}
self.blocks.insert_headers(headers);
trace!(target: "sync", "Inserted {} headers", count);
},
_ => trace!(target: "sync", "Unexpected headers({})", headers.len()),
}
Ok(())
Ok(DownloadAction::None)
}
/// Called by peer once it has new block bodies
@ -324,14 +342,21 @@ impl BlockDownloader {
self.last_imported_hash = p.clone();
trace!(target: "sync", "Searching common header from the last round {} ({})", self.last_imported_block, self.last_imported_hash);
} else {
match io.chain().block_hash(BlockID::Number(self.last_imported_block - 1)) {
Some(h) => {
self.last_imported_block -= 1;
self.last_imported_hash = h;
trace!(target: "sync", "Searching common header in the blockchain {} ({})", self.last_imported_block, self.last_imported_hash);
}
None => {
debug!(target: "sync", "Could not revert to previous block, last: {} ({})", self.last_imported_block, self.last_imported_hash);
let best = io.chain().chain_info().best_block_number;
if best > self.last_imported_block && best - self.last_imported_block > MAX_REORG_BLOCKS {
debug!(target: "sync", "Could not revert to previous ancient block, last: {} ({})", self.last_imported_block, self.last_imported_hash);
self.reset();
} else {
match io.chain().block_hash(BlockID::Number(self.last_imported_block - 1)) {
Some(h) => {
self.last_imported_block -= 1;
self.last_imported_hash = h;
trace!(target: "sync", "Searching common header in the blockchain {} ({})", self.last_imported_block, self.last_imported_hash);
}
None => {
debug!(target: "sync", "Could not revert to previous block, last: {} ({})", self.last_imported_block, self.last_imported_hash);
self.reset();
}
}
}
}
@ -342,22 +367,26 @@ impl BlockDownloader {
}
/// Find some headers or blocks to download for a peer.
pub fn request_blocks(&mut self, io: &mut SyncIo) -> Option<BlockRequest> {
pub fn request_blocks(&mut self, io: &mut SyncIo, num_active_peers: usize) -> Option<BlockRequest> {
match self.state {
State::Idle => {
self.start_sync_round(io);
return self.request_blocks(io);
if self.state == State::ChainHead {
return self.request_blocks(io, num_active_peers);
}
},
State::ChainHead => {
// Request subchain headers
trace!(target: "sync", "Starting sync with better chain");
// Request MAX_HEADERS_TO_REQUEST - 2 headers apart so that
// MAX_HEADERS_TO_REQUEST would include headers for neighbouring subchains
return Some(BlockRequest::Headers {
start: self.last_imported_hash.clone(),
count: SUBCHAIN_SIZE,
skip: (MAX_HEADERS_TO_REQUEST - 2) as u64,
});
if num_active_peers < MAX_PARALLEL_SUBCHAIN_DOWNLOAD {
// Request subchain headers
trace!(target: "sync", "Starting sync with better chain");
// Request MAX_HEADERS_TO_REQUEST - 2 headers apart so that
// MAX_HEADERS_TO_REQUEST would include headers for neighbouring subchains
return Some(BlockRequest::Headers {
start: self.last_imported_hash.clone(),
count: SUBCHAIN_SIZE,
skip: (MAX_HEADERS_TO_REQUEST - 2) as u64,
});
}
},
State::Blocks => {
// check to see if we need to download any block bodies first

View File

@ -301,11 +301,16 @@ impl BlockCollection {
self.heads.len() == 0 || (self.heads.len() == 1 && self.head.map_or(false, |h| h == self.heads[0]))
}
/// Chech is collection contains a block header.
/// Check if collection contains a block header.
pub fn contains(&self, hash: &H256) -> bool {
self.blocks.contains_key(hash)
}
/// Check if collection contains a block header.
pub fn contains_head(&self, hash: &H256) -> bool {
self.heads.contains(hash)
}
/// Return used heap size.
pub fn heap_size(&self) -> usize {
self.heads.heap_size_of_children()

View File

@ -37,7 +37,7 @@
/// Workflow for `ChainHead` state.
/// In this state we try to get subchain headers with a single `GetBlockHeaders` request.
/// On `NewPeer` / On `Restart`:
/// If peer's total difficulty is higher, request N/M headers with interval M+1 starting from l
/// If peer's total difficulty is higher and there are less than 5 peers downloading, request N/M headers with interval M+1 starting from l
/// On `BlockHeaders(R)`:
/// If R is empty:
/// If l is equal to genesis block hash or l is more than 1000 blocks behind our best hash:
@ -49,8 +49,8 @@
/// Else
/// Set S to R, set s to `Blocks`.
///
///
/// All other messages are ignored.
///
/// Workflow for `Blocks` state.
/// In this state we download block headers and bodies from multiple peers.
/// On `NewPeer` / On `Restart`:
@ -62,7 +62,9 @@
///
/// On `BlockHeaders(R)`:
/// If R is empty remove current peer from P and restart.
/// Validate received headers. For each header find a parent in H or R or the blockchain. Restart if there is a block with unknown parent.
/// Validate received headers:
/// For each header find a parent in H or R or the blockchain. Restart if there is a block with unknown parent.
/// Find at least one header from the received list in S. Restart if there is none.
/// Go to `CollectBlocks`.
///
/// On `BlockBodies(R)`:
@ -98,7 +100,7 @@ use ethcore::snapshot::{ManifestData, RestorationStatus};
use sync_io::SyncIo;
use time;
use super::SyncConfig;
use block_sync::{BlockDownloader, BlockRequest, BlockDownloaderImportError as DownloaderImportError};
use block_sync::{BlockDownloader, BlockRequest, BlockDownloaderImportError as DownloaderImportError, DownloadAction};
use snapshot::{Snapshot, ChunkType};
use rand::{thread_rng, Rng};
use api::{PeerInfo as PeerInfoDigest, WARP_SYNC_PROTOCOL_ID};
@ -306,6 +308,15 @@ impl PeerInfo {
fn is_allowed(&self) -> bool {
self.confirmation != ForkConfirmation::Unconfirmed && !self.expired
}
fn reset_asking(&mut self) {
self.asking_blocks.clear();
self.asking_hash = None;
// mark any pending requests as expired
if self.asking != PeerAsking::Nothing && self.is_allowed() {
self.expired = true;
}
}
}
/// Blockchain sync handler.
@ -425,12 +436,7 @@ impl ChainSync {
}
for (_, ref mut p) in &mut self.peers {
if p.block_set != Some(BlockSet::OldBlocks) {
p.asking_blocks.clear();
p.asking_hash = None;
// mark any pending requests as expired
if p.asking != PeerAsking::Nothing && p.is_allowed() {
p.expired = true;
}
p.reset_asking();
}
}
self.state = SyncState::Idle;
@ -641,8 +647,9 @@ impl ChainSync {
self.clear_peer_download(peer_id);
let expected_hash = self.peers.get(&peer_id).and_then(|p| p.asking_hash);
let allowed = self.peers.get(&peer_id).map(|p| p.is_allowed()).unwrap_or(false);
let block_set = self.peers.get(&peer_id).and_then(|p| p.block_set).unwrap_or(BlockSet::NewBlocks);
if !self.reset_peer_asking(peer_id, PeerAsking::BlockHeaders) || expected_hash.is_none() {
if !self.reset_peer_asking(peer_id, PeerAsking::BlockHeaders) || expected_hash.is_none() || !allowed {
trace!(target: "sync", "{}: Ignored unexpected headers, expected_hash = {:?}", peer_id, expected_hash);
self.continue_sync(io);
return Ok(());
@ -687,7 +694,15 @@ impl ChainSync {
self.continue_sync(io);
return Ok(());
},
Ok(()) => (),
Ok(DownloadAction::Reset) => {
// mark all outstanding requests as expired
trace!("Resetting downloads for {:?}", block_set);
for (_, ref mut p) in self.peers.iter_mut().filter(|&(_, ref p)| p.block_set == Some(block_set)) {
p.reset_asking();
}
}
Ok(DownloadAction::None) => {},
}
self.collect_blocks(io, block_set);
@ -979,7 +994,7 @@ impl ChainSync {
return Ok(());
}
self.clear_peer_download(peer_id);
if !self.reset_peer_asking(peer_id, PeerAsking::SnapshotData) || self.state != SyncState::SnapshotData {
if !self.reset_peer_asking(peer_id, PeerAsking::SnapshotData) || (self.state != SyncState::SnapshotData && self.state != SyncState::SnapshotWaiting) {
trace!(target: "sync", "{}: Ignored unexpected snapshot data", peer_id);
self.continue_sync(io);
return Ok(());
@ -1111,6 +1126,7 @@ impl ChainSync {
};
let chain_info = io.chain().chain_info();
let syncing_difficulty = chain_info.pending_total_difficulty;
let num_active_peers = self.peers.values().filter(|p| p.asking != PeerAsking::Nothing).count();
let higher_difficulty = peer_difficulty.map_or(true, |pd| pd > syncing_difficulty);
if force || self.state == SyncState::NewBlocks || higher_difficulty || self.old_blocks.is_some() {
@ -1128,7 +1144,8 @@ impl ChainSync {
let have_latest = io.chain().block_status(BlockID::Hash(peer_latest)) != BlockStatus::Unknown;
if !have_latest && (higher_difficulty || force || self.state == SyncState::NewBlocks) {
// check if got new blocks to download
if let Some(request) = self.new_blocks.request_blocks(io) {
trace!(target: "sync", "Syncing with {}, force={}, td={:?}, our td={}, state={:?}", peer_id, force, peer_difficulty, syncing_difficulty, self.state);
if let Some(request) = self.new_blocks.request_blocks(io, num_active_peers) {
self.request_blocks(io, peer_id, request, BlockSet::NewBlocks);
if self.state == SyncState::Idle {
self.state = SyncState::Blocks;
@ -1137,7 +1154,7 @@ impl ChainSync {
}
}
if let Some(request) = self.old_blocks.as_mut().and_then(|d| d.request_blocks(io)) {
if let Some(request) = self.old_blocks.as_mut().and_then(|d| d.request_blocks(io, num_active_peers)) {
self.request_blocks(io, peer_id, request, BlockSet::OldBlocks);
return;
}

View File

@ -79,14 +79,14 @@ fn empty_blocks() {
fn forked() {
::env_logger::init().ok();
let mut net = TestNet::new(3);
net.peer_mut(0).chain.add_blocks(300, EachBlockWith::Uncle);
net.peer_mut(1).chain.add_blocks(300, EachBlockWith::Uncle);
net.peer_mut(2).chain.add_blocks(300, EachBlockWith::Uncle);
net.peer_mut(0).chain.add_blocks(100, EachBlockWith::Nothing); //fork
net.peer_mut(1).chain.add_blocks(200, EachBlockWith::Uncle);
net.peer_mut(2).chain.add_blocks(200, EachBlockWith::Uncle);
net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Uncle); //fork between 1 and 2
net.peer_mut(2).chain.add_blocks(10, EachBlockWith::Nothing);
net.peer_mut(0).chain.add_blocks(30, EachBlockWith::Uncle);
net.peer_mut(1).chain.add_blocks(30, EachBlockWith::Uncle);
net.peer_mut(2).chain.add_blocks(30, EachBlockWith::Uncle);
net.peer_mut(0).chain.add_blocks(10, EachBlockWith::Nothing); //fork
net.peer_mut(1).chain.add_blocks(20, EachBlockWith::Uncle);
net.peer_mut(2).chain.add_blocks(20, EachBlockWith::Uncle);
net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Uncle); //fork between 1 and 2
net.peer_mut(2).chain.add_blocks(1, EachBlockWith::Nothing);
// peer 1 has the best chain of 601 blocks
let peer1_chain = net.peer(1).chain.numbers.read().clone();
net.sync();
@ -102,12 +102,12 @@ fn forked_with_misbehaving_peer() {
let mut net = TestNet::new(3);
// peer 0 is on a totally different chain with higher total difficulty
net.peer_mut(0).chain = TestBlockChainClient::new_with_extra_data(b"fork".to_vec());
net.peer_mut(0).chain.add_blocks(500, EachBlockWith::Nothing);
net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Nothing);
net.peer_mut(2).chain.add_blocks(100, EachBlockWith::Nothing);
net.peer_mut(0).chain.add_blocks(50, EachBlockWith::Nothing);
net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Nothing);
net.peer_mut(2).chain.add_blocks(10, EachBlockWith::Nothing);
net.peer_mut(1).chain.add_blocks(100, EachBlockWith::Nothing);
net.peer_mut(2).chain.add_blocks(200, EachBlockWith::Uncle);
net.peer_mut(1).chain.add_blocks(10, EachBlockWith::Nothing);
net.peer_mut(2).chain.add_blocks(20, EachBlockWith::Uncle);
// peer 1 should sync to peer 2, others should not change
let peer0_chain = net.peer(0).chain.numbers.read().clone();
let peer2_chain = net.peer(2).chain.numbers.read().clone();

View File

@ -3,7 +3,7 @@ description = "Ethcore utility library"
homepage = "http://ethcore.io"
license = "GPL-3.0"
name = "ethcore-util"
version = "1.4.3"
version = "1.4.4"
authors = ["Ethcore <admin@ethcore.io>"]
build = "build.rs"