Merge pull request #3465 from ethcore/backporting

Backports to beta

Former-commit-id: b44486946a2f1e3188f4181f1022423b0f5ab4e6
This commit is contained in:
Gav Wood 2016-11-16 11:21:43 +08:00 committed by GitHub
commit 7a15ea448d
18 changed files with 344 additions and 165 deletions

View File

@ -131,7 +131,7 @@ impl<A: Authorization + 'static> server::Handler<HttpStream> for Router<A> {
StatusCode::NotFound, StatusCode::NotFound,
"404 Not Found", "404 Not Found",
"Your homepage is not available when Trusted Signer is disabled.", "Your homepage is not available when Trusted Signer is disabled.",
Some("You can still access dapps by writing a correct address, though. Re-enabled Signer to get your homepage back."), Some("You can still access dapps by writing a correct address, though. Re-enable Signer to get your homepage back."),
self.signer_address.clone(), self.signer_address.clone(),
)) ))
} }

View File

@ -14,6 +14,7 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::thread;
use std::time::Duration; use std::time::Duration;
use std::io::{Read, Write}; use std::io::{Read, Write};
use std::str::{self, Lines}; use std::str::{self, Lines};
@ -42,8 +43,28 @@ pub fn read_block(lines: &mut Lines, all: bool) -> String {
block block
} }
fn connect(address: &SocketAddr) -> TcpStream {
let mut retries = 0;
let mut last_error = None;
while retries < 10 {
retries += 1;
let res = TcpStream::connect(address);
match res {
Ok(stream) => {
return stream;
},
Err(e) => {
last_error = Some(e);
thread::sleep(Duration::from_millis(retries * 10));
}
}
}
panic!("Unable to connect to the server. Last error: {:?}", last_error);
}
pub fn request(address: &SocketAddr, request: &str) -> Response { pub fn request(address: &SocketAddr, request: &str) -> Response {
let mut req = TcpStream::connect(address).unwrap(); let mut req = connect(address);
req.set_read_timeout(Some(Duration::from_secs(1))).unwrap(); req.set_read_timeout(Some(Duration::from_secs(1))).unwrap();
req.write_all(request.as_bytes()).unwrap(); req.write_all(request.as_bytes()).unwrap();

View File

@ -19,11 +19,11 @@
use account_db::{AccountDB, AccountDBMut}; use account_db::{AccountDB, AccountDBMut};
use snapshot::Error; use snapshot::Error;
use util::{U256, FixedHash, H256, Bytes, HashDB, DBValue, SHA3_EMPTY, SHA3_NULL_RLP}; use util::{U256, FixedHash, H256, Bytes, HashDB, SHA3_EMPTY, SHA3_NULL_RLP};
use util::trie::{TrieDB, Trie}; use util::trie::{TrieDB, Trie};
use rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View}; use rlp::{Rlp, RlpStream, Stream, UntrustedRlp, View};
use std::collections::{HashMap, HashSet}; use std::collections::HashSet;
// An empty account -- these are replaced with RLP null data for a space optimization. // An empty account -- these are replaced with RLP null data for a space optimization.
const ACC_EMPTY: Account = Account { const ACC_EMPTY: Account = Account {
@ -150,7 +150,6 @@ impl Account {
pub fn from_fat_rlp( pub fn from_fat_rlp(
acct_db: &mut AccountDBMut, acct_db: &mut AccountDBMut,
rlp: UntrustedRlp, rlp: UntrustedRlp,
code_map: &HashMap<H256, Bytes>,
) -> Result<(Self, Option<Bytes>), Error> { ) -> Result<(Self, Option<Bytes>), Error> {
use util::{TrieDBMut, TrieMut}; use util::{TrieDBMut, TrieMut};
@ -177,9 +176,6 @@ impl Account {
} }
CodeState::Hash => { CodeState::Hash => {
let code_hash = try!(rlp.val_at(3)); let code_hash = try!(rlp.val_at(3));
if let Some(code) = code_map.get(&code_hash) {
acct_db.emplace(code_hash.clone(), DBValue::from_slice(code));
}
(code_hash, None) (code_hash, None)
} }
@ -229,7 +225,7 @@ mod tests {
use util::{Address, FixedHash, H256, HashDB, DBValue}; use util::{Address, FixedHash, H256, HashDB, DBValue};
use rlp::{UntrustedRlp, View}; use rlp::{UntrustedRlp, View};
use std::collections::{HashSet, HashMap}; use std::collections::HashSet;
use super::{ACC_EMPTY, Account}; use super::{ACC_EMPTY, Account};
@ -250,7 +246,7 @@ mod tests {
let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap(); let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlp);
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, &Default::default()).unwrap().0, account); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account);
} }
#[test] #[test]
@ -275,7 +271,7 @@ mod tests {
let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap(); let fat_rlp = account.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &addr), &mut Default::default()).unwrap();
let fat_rlp = UntrustedRlp::new(&fat_rlp); let fat_rlp = UntrustedRlp::new(&fat_rlp);
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp, &Default::default()).unwrap().0, account); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr), fat_rlp).unwrap().0, account);
} }
#[test] #[test]
@ -318,12 +314,11 @@ mod tests {
let fat_rlp1 = UntrustedRlp::new(&fat_rlp1); let fat_rlp1 = UntrustedRlp::new(&fat_rlp1);
let fat_rlp2 = UntrustedRlp::new(&fat_rlp2); let fat_rlp2 = UntrustedRlp::new(&fat_rlp2);
let code_map = HashMap::new(); let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2).unwrap();
let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr2), fat_rlp2, &code_map).unwrap();
assert!(maybe_code.is_none()); assert!(maybe_code.is_none());
assert_eq!(acc, account2); assert_eq!(acc, account2);
let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1, &code_map).unwrap(); let (acc, maybe_code) = Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &addr1), fat_rlp1).unwrap();
assert_eq!(maybe_code, Some(b"this is definitely code".to_vec())); assert_eq!(maybe_code, Some(b"this is definitely code".to_vec()));
assert_eq!(acc, account1); assert_eq!(acc, account1);
} }
@ -332,9 +327,8 @@ mod tests {
fn encoding_empty_acc() { fn encoding_empty_acc() {
let mut db = get_temp_state_db(); let mut db = get_temp_state_db();
let mut used_code = HashSet::new(); let mut used_code = HashSet::new();
let code_map = HashMap::new();
assert_eq!(ACC_EMPTY.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code).unwrap(), ::rlp::NULL_RLP.to_vec()); assert_eq!(ACC_EMPTY.to_fat_rlp(&AccountDB::new(db.as_hashdb(), &Address::default()), &mut used_code).unwrap(), ::rlp::NULL_RLP.to_vec());
assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP), &code_map).unwrap(), (ACC_EMPTY, None)); assert_eq!(Account::from_fat_rlp(&mut AccountDBMut::new(db.as_hashdb_mut(), &Address::default()), UntrustedRlp::new(&::rlp::NULL_RLP)).unwrap(), (ACC_EMPTY, None));
} }
} }

View File

@ -389,7 +389,7 @@ pub fn chunk_state<'a>(db: &HashDB, root: &H256, writer: &Mutex<SnapshotWriter +
pub struct StateRebuilder { pub struct StateRebuilder {
db: Box<JournalDB>, db: Box<JournalDB>,
state_root: H256, state_root: H256,
code_map: HashMap<H256, Bytes>, // maps code hashes to code itself. known_code: HashMap<H256, H256>, // code hashes mapped to first account with this code.
missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code. missing_code: HashMap<H256, Vec<H256>>, // maps code hashes to lists of accounts missing that code.
bloom: Bloom, bloom: Bloom,
} }
@ -400,7 +400,7 @@ impl StateRebuilder {
StateRebuilder { StateRebuilder {
db: journaldb::new(db.clone(), pruning, ::db::COL_STATE), db: journaldb::new(db.clone(), pruning, ::db::COL_STATE),
state_root: SHA3_NULL_RLP, state_root: SHA3_NULL_RLP,
code_map: HashMap::new(), known_code: HashMap::new(),
missing_code: HashMap::new(), missing_code: HashMap::new(),
bloom: StateDB::load_bloom(&*db), bloom: StateDB::load_bloom(&*db),
} }
@ -419,24 +419,26 @@ impl StateRebuilder {
let chunk_size = account_fat_rlps.len() / ::num_cpus::get() + 1; let chunk_size = account_fat_rlps.len() / ::num_cpus::get() + 1;
// new code contained within this chunk. // new code contained within this chunk.
let mut chunk_code = HashMap::new(); let mut chunk_code = Vec::new();
for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) { for (account_chunk, out_pairs_chunk) in account_fat_rlps.chunks(chunk_size).zip(pairs.chunks_mut(chunk_size)) {
let code_map = &self.code_map; let status = try!(rebuild_accounts(self.db.as_hashdb_mut(), account_chunk, out_pairs_chunk, &self.known_code));
let status = try!(rebuild_accounts(self.db.as_hashdb_mut(), account_chunk, out_pairs_chunk, code_map));
chunk_code.extend(status.new_code); chunk_code.extend(status.new_code);
// update missing code.
for (addr_hash, code_hash) in status.missing_code { for (addr_hash, code_hash) in status.missing_code {
self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash); self.missing_code.entry(code_hash).or_insert_with(Vec::new).push(addr_hash);
} }
} }
// patch up all missing code. must be done after collecting all new missing code entries. // patch up all missing code. must be done after collecting all new missing code entries.
for (code_hash, code) in chunk_code { for (code_hash, code, first_with) in chunk_code {
for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) { for addr_hash in self.missing_code.remove(&code_hash).unwrap_or_else(Vec::new) {
let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash); let mut db = AccountDBMut::from_hash(self.db.as_hashdb_mut(), addr_hash);
db.emplace(code_hash, DBValue::from_slice(&code)); db.emplace(code_hash, DBValue::from_slice(&code));
} }
self.code_map.insert(code_hash, code); self.known_code.insert(code_hash, first_with);
} }
let backing = self.db.backing().clone(); let backing = self.db.backing().clone();
@ -482,7 +484,8 @@ impl StateRebuilder {
#[derive(Default)] #[derive(Default)]
struct RebuiltStatus { struct RebuiltStatus {
new_code: Vec<(H256, Bytes)>, // new code that's become available. // new code that's become available. (code_hash, code, addr_hash)
new_code: Vec<(H256, Bytes, H256)>,
missing_code: Vec<(H256, H256)>, // accounts that are missing code. missing_code: Vec<(H256, H256)>, // accounts that are missing code.
} }
@ -492,9 +495,8 @@ fn rebuild_accounts(
db: &mut HashDB, db: &mut HashDB,
account_chunk: &[&[u8]], account_chunk: &[&[u8]],
out_chunk: &mut [(H256, Bytes)], out_chunk: &mut [(H256, Bytes)],
code_map: &HashMap<H256, Bytes> known_code: &HashMap<H256, H256>,
) -> Result<RebuiltStatus, ::error::Error> ) -> Result<RebuiltStatus, ::error::Error> {
{
let mut status = RebuiltStatus::default(); let mut status = RebuiltStatus::default();
for (account_pair, out) in account_chunk.into_iter().zip(out_chunk) { for (account_pair, out) in account_chunk.into_iter().zip(out_chunk) {
let account_rlp = UntrustedRlp::new(account_pair); let account_rlp = UntrustedRlp::new(account_pair);
@ -503,17 +505,33 @@ fn rebuild_accounts(
let fat_rlp = try!(account_rlp.at(1)); let fat_rlp = try!(account_rlp.at(1));
let thin_rlp = { let thin_rlp = {
let mut acct_db = AccountDBMut::from_hash(db, hash);
// fill out the storage trie and code while decoding. // fill out the storage trie and code while decoding.
let (acc, maybe_code) = try!(Account::from_fat_rlp(&mut acct_db, fat_rlp, code_map)); let (acc, maybe_code) = {
let mut acct_db = AccountDBMut::from_hash(db, hash);
try!(Account::from_fat_rlp(&mut acct_db, fat_rlp))
};
let code_hash = acc.code_hash().clone(); let code_hash = acc.code_hash().clone();
match maybe_code { match maybe_code {
Some(code) => status.new_code.push((code_hash, code)), // new inline code
Some(code) => status.new_code.push((code_hash, code, hash)),
None => { None => {
if code_hash != ::util::SHA3_EMPTY && !code_map.contains_key(&code_hash) { if code_hash != ::util::SHA3_EMPTY {
status.missing_code.push((hash, code_hash)); // see if this code has already been included inline
match known_code.get(&code_hash) {
Some(&first_with) => {
// if so, load it from the database.
let code = try!(AccountDB::from_hash(db, first_with)
.get(&code_hash)
.ok_or_else(|| Error::MissingCode(vec![first_with])));
// and write it again under a different mangled key
AccountDBMut::from_hash(db, hash).emplace(code_hash, code);
}
// if not, queue it up to be filled later
None => status.missing_code.push((hash, code_hash)),
}
} }
} }
} }

View File

@ -17,6 +17,7 @@
//! State snapshotting tests. //! State snapshotting tests.
use snapshot::{chunk_state, Progress, StateRebuilder}; use snapshot::{chunk_state, Progress, StateRebuilder};
use snapshot::account::Account;
use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter}; use snapshot::io::{PackedReader, PackedWriter, SnapshotReader, SnapshotWriter};
use super::helpers::{compare_dbs, StateProducer}; use super::helpers::{compare_dbs, StateProducer};
@ -28,6 +29,8 @@ use util::memorydb::MemoryDB;
use util::Mutex; use util::Mutex;
use devtools::RandomTempPath; use devtools::RandomTempPath;
use util::sha3::SHA3_NULL_RLP;
use std::sync::Arc; use std::sync::Arc;
#[test] #[test]
@ -82,3 +85,54 @@ fn snap_and_restore() {
compare_dbs(&old_db, new_db.as_hashdb()); compare_dbs(&old_db, new_db.as_hashdb());
} }
#[test]
fn get_code_from_prev_chunk() {
use std::collections::HashSet;
use rlp::{RlpStream, Stream};
use util::{HashDB, H256, FixedHash, U256, Hashable};
use account_db::{AccountDBMut, AccountDB};
let code = b"this is definitely code";
let mut used_code = HashSet::new();
let mut acc_stream = RlpStream::new_list(4);
acc_stream.append(&U256::default())
.append(&U256::default())
.append(&SHA3_NULL_RLP)
.append(&code.sha3());
let (h1, h2) = (H256::random(), H256::random());
// two accounts with the same code, one per chunk.
// first one will have code inlined,
// second will just have its hash.
let thin_rlp = acc_stream.out();
let acc1 = Account::from_thin_rlp(&thin_rlp);
let acc2 = Account::from_thin_rlp(&thin_rlp);
let mut make_chunk = |acc: Account, hash| {
let mut db = MemoryDB::new();
AccountDBMut::from_hash(&mut db, hash).insert(&code[..]);
let fat_rlp = acc.to_fat_rlp(&AccountDB::from_hash(&db, hash), &mut used_code).unwrap();
let mut stream = RlpStream::new_list(1);
stream.begin_list(2).append(&hash).append_raw(&fat_rlp, 1);
stream.out()
};
let chunk1 = make_chunk(acc1, h1);
let chunk2 = make_chunk(acc2, h2);
let db_path = RandomTempPath::create_dir();
let db_cfg = DatabaseConfig::with_columns(::db::NUM_COLUMNS);
let new_db = Arc::new(Database::open(&db_cfg, &db_path.to_string_lossy()).unwrap());
let mut rebuilder = StateRebuilder::new(new_db, Algorithm::Archive);
rebuilder.feed(&chunk1).unwrap();
rebuilder.feed(&chunk2).unwrap();
rebuilder.check_missing().unwrap();
}

View File

@ -70,7 +70,8 @@ export default class AccountSelector extends Component {
static propTypes = { static propTypes = {
list: PropTypes.array.isRequired, list: PropTypes.array.isRequired,
selected: PropTypes.object.isRequired, selected: PropTypes.object.isRequired,
handleSetSelected: PropTypes.func.isRequired handleSetSelected: PropTypes.func.isRequired,
onAccountChange: PropTypes.func
}; };
state = { state = {
@ -85,7 +86,8 @@ export default class AccountSelector extends Component {
nestedItems={ nestedAccounts } nestedItems={ nestedAccounts }
open={ this.state.open } open={ this.state.open }
onSelectAccount={ this.onToggleOpen } onSelectAccount={ this.onToggleOpen }
autoGenerateNestedIndicator={ false } /> autoGenerateNestedIndicator={ false }
nestedListStyle={ { maxHeight: '14em', overflow: 'auto' } } />
); );
return ( return (
@ -110,6 +112,10 @@ export default class AccountSelector extends Component {
onToggleOpen = () => { onToggleOpen = () => {
this.setState({ open: !this.state.open }); this.setState({ open: !this.state.open });
if (typeof this.props.onAccountChange === 'function') {
this.props.onAccountChange();
}
} }
onSelectAccount = (address) => { onSelectAccount = (address) => {

View File

@ -21,7 +21,7 @@ import { Dialog, FlatButton } from 'material-ui';
import AccountSelector from '../../Accounts/AccountSelector'; import AccountSelector from '../../Accounts/AccountSelector';
import InputText from '../../Inputs/Text'; import InputText from '../../Inputs/Text';
import { TOKEN_ADDRESS_TYPE, TLA_TYPE, UINT_TYPE, STRING_TYPE } from '../../Inputs/validation'; import { TOKEN_ADDRESS_TYPE, TLA_TYPE, DECIMAL_TYPE, STRING_TYPE } from '../../Inputs/validation';
import styles from '../actions.css'; import styles from '../actions.css';
@ -41,11 +41,11 @@ const initState = {
floatingLabelText: 'Token TLA', floatingLabelText: 'Token TLA',
hintText: 'The token short name (3 characters)' hintText: 'The token short name (3 characters)'
}, },
base: { decimals: {
...defaultField, ...defaultField,
type: UINT_TYPE, type: DECIMAL_TYPE,
floatingLabelText: 'Token Base', floatingLabelText: 'Token Decimals',
hintText: 'The token precision' hintText: 'The number of decimals (0-18)'
}, },
name: { name: {
...defaultField, ...defaultField,
@ -81,6 +81,7 @@ export default class RegisterAction extends Component {
className={ styles.dialog } className={ styles.dialog }
onRequestClose={ this.onClose } onRequestClose={ this.onClose }
actions={ this.renderActions() } actions={ this.renderActions() }
ref='dialog'
autoScrollBodyContent autoScrollBodyContent
> >
{ this.renderContent() } { this.renderContent() }
@ -149,7 +150,9 @@ export default class RegisterAction extends Component {
renderForm () { renderForm () {
return ( return (
<div> <div>
<AccountSelector /> <AccountSelector
onAccountChange={ this.onAccountChange }
/>
{ this.renderInputs() } { this.renderInputs() }
</div> </div>
); );
@ -175,6 +178,11 @@ export default class RegisterAction extends Component {
}); });
} }
onAccountChange = () => {
const { dialog } = this.refs;
dialog.forceUpdate();
}
onChange (fieldKey, valid, value) { onChange (fieldKey, valid, value) {
const { fields } = this.state; const { fields } = this.state;
const field = fields[fieldKey]; const field = fields[fieldKey];

View File

@ -47,7 +47,8 @@ export const registerToken = (tokenData) => (dispatch, getState) => {
const contractInstance = state.status.contract.instance; const contractInstance = state.status.contract.instance;
const fee = state.status.contract.fee; const fee = state.status.contract.fee;
const { address, base, name, tla } = tokenData; const { address, decimals, name, tla } = tokenData;
const base = Math.pow(10, decimals);
dispatch(setRegisterSending(true)); dispatch(setRegisterSending(true));

View File

@ -32,6 +32,7 @@ export const SIMPLE_TOKEN_ADDRESS_TYPE = 'SIMPLE_TOKEN_ADDRESS_TYPE';
export const TLA_TYPE = 'TLA_TYPE'; export const TLA_TYPE = 'TLA_TYPE';
export const SIMPLE_TLA_TYPE = 'SIMPLE_TLA_TYPE'; export const SIMPLE_TLA_TYPE = 'SIMPLE_TLA_TYPE';
export const UINT_TYPE = 'UINT_TYPE'; export const UINT_TYPE = 'UINT_TYPE';
export const DECIMAL_TYPE = 'DECIMAL_TYPE';
export const STRING_TYPE = 'STRING_TYPE'; export const STRING_TYPE = 'STRING_TYPE';
export const HEX_TYPE = 'HEX_TYPE'; export const HEX_TYPE = 'HEX_TYPE';
export const URL_TYPE = 'URL_TYPE'; export const URL_TYPE = 'URL_TYPE';
@ -39,6 +40,7 @@ export const URL_TYPE = 'URL_TYPE';
export const ERRORS = { export const ERRORS = {
invalidTLA: 'The TLA should be 3 characters long', invalidTLA: 'The TLA should be 3 characters long',
invalidUint: 'Please enter a non-negative integer', invalidUint: 'Please enter a non-negative integer',
invalidDecimal: 'Please enter a value between 0 and 18',
invalidString: 'Please enter at least a character', invalidString: 'Please enter at least a character',
invalidAccount: 'Please select an account to transact with', invalidAccount: 'Please select an account to transact with',
invalidRecipient: 'Please select an account to send to', invalidRecipient: 'Please select an account to send to',
@ -75,7 +77,7 @@ const validateTokenAddress = (address, contract, simple) => {
return getTokenTotalSupply(address) return getTokenTotalSupply(address)
.then(balance => { .then(balance => {
if (balance === null) { if (balance === null || balance.equals(0)) {
return { return {
error: ERRORS.invalidTokenAddress, error: ERRORS.invalidTokenAddress,
valid: false valid: false
@ -152,6 +154,21 @@ const validateUint = (uint) => {
}; };
}; };
const validateDecimal = (decimal) => {
if (!/^\d+$/.test(decimal) || parseInt(decimal) < 0 || parseInt(decimal) > 18) {
return {
error: ERRORS.invalidDecimal,
valid: false
};
}
return {
value: parseInt(decimal),
error: null,
valid: true
};
};
const validateString = (string) => { const validateString = (string) => {
if (string.toString().length === 0) { if (string.toString().length === 0) {
return { return {
@ -204,6 +221,7 @@ export const validate = (value, type, contract) => {
if (type === TLA_TYPE) return validateTLA(value, contract); if (type === TLA_TYPE) return validateTLA(value, contract);
if (type === SIMPLE_TLA_TYPE) return validateTLA(value, contract, true); if (type === SIMPLE_TLA_TYPE) return validateTLA(value, contract, true);
if (type === UINT_TYPE) return validateUint(value); if (type === UINT_TYPE) return validateUint(value);
if (type === DECIMAL_TYPE) return validateDecimal(value);
if (type === STRING_TYPE) return validateString(value); if (type === STRING_TYPE) return validateString(value);
if (type === HEX_TYPE) return validateHex(value); if (type === HEX_TYPE) return validateHex(value);
if (type === URL_TYPE) return validateURL(value); if (type === URL_TYPE) return validateURL(value);

View File

@ -57,6 +57,7 @@ export default class Token extends Component {
isLoading: PropTypes.bool, isLoading: PropTypes.bool,
isPending: PropTypes.bool, isPending: PropTypes.bool,
isTokenOwner: PropTypes.bool.isRequired, isTokenOwner: PropTypes.bool.isRequired,
isContractOwner: PropTypes.bool.isRequired,
fullWidth: PropTypes.bool fullWidth: PropTypes.bool
}; };
@ -151,8 +152,8 @@ export default class Token extends Component {
if (!base || base < 0) return null; if (!base || base < 0) return null;
return ( return (
<Chip <Chip
value={ base.toString() } value={ Math.log10(base).toString() }
label='Base' /> label='Decimals' />
); );
} }
@ -220,7 +221,7 @@ export default class Token extends Component {
} }
renderUnregister () { renderUnregister () {
if (!this.props.isTokenOwner) { if (!this.props.isContractOwner) {
return null; return null;
} }

View File

@ -45,7 +45,7 @@ export default class Tokens extends Component {
} }
renderTokens (tokens) { renderTokens (tokens) {
const { accounts } = this.props; const { accounts, isOwner } = this.props;
return tokens.map((token, index) => { return tokens.map((token, index) => {
if (!token || !token.tla) { if (!token || !token.tla) {
@ -61,7 +61,8 @@ export default class Tokens extends Component {
handleMetaLookup={ this.props.handleMetaLookup } handleMetaLookup={ this.props.handleMetaLookup }
handleAddMeta={ this.props.handleAddMeta } handleAddMeta={ this.props.handleAddMeta }
key={ index } key={ index }
isTokenOwner={ isTokenOwner } /> isTokenOwner={ isTokenOwner }
isContractOwner={ isOwner } />
); );
}); });
} }

View File

@ -314,7 +314,7 @@ export default class Transfer extends Component {
} }
const token = balance.tokens.find((balance) => balance.token.tag === tag).token; const token = balance.tokens.find((balance) => balance.token.tag === tag).token;
const s = new BigNumber(num).mul(token.format || 1).toString(); const s = new BigNumber(num).mul(token.format || 1).toFixed();
if (s.indexOf('.') !== -1) { if (s.indexOf('.') !== -1) {
return ERRORS.invalidDecimals; return ERRORS.invalidDecimals;
@ -516,6 +516,13 @@ export default class Transfer extends Component {
} }
recalculateGas = () => { recalculateGas = () => {
if (!this.isValid()) {
this.setState({
gas: '0'
}, this.recalculate);
return;
}
(this.state.isEth (this.state.isEth
? this._estimateGasEth() ? this._estimateGasEth()
: this._estimateGasToken() : this._estimateGasToken()

View File

@ -0,0 +1,43 @@
[
{
"id": "0xf9f2d620c2e08f83e45555247146c62185e4ab7cf82a4b9002a265a0d020348f",
"url": "basiccoin",
"name": "Token Deployment",
"description": "Deploy new basic tokens that you are able to send around",
"author": "Parity Team <admin@ethcore.io>",
"version": "1.0.0"
},
{
"id": "0xd1adaede68d344519025e2ff574650cd99d3830fe6d274c7a7843cdc00e17938",
"url": "registry",
"name": "Registry",
"description": "A global registry of addresses on the network",
"author": "Parity Team <admin@ethcore.io>",
"version": "1.0.0"
},
{
"id": "0x0a8048117e51e964628d0f2d26342b3cd915248b59bcce2721e1d05f5cfa2208",
"url": "tokenreg",
"name": "Token Registry",
"description": "A registry of transactable tokens on the network",
"author": "Parity Team <admin@ethcore.io>",
"version": "1.0.0"
},
{
"id": "0xf49089046f53f5d2e5f3513c1c32f5ff57d986e46309a42d2b249070e4e72c46",
"url": "signaturereg",
"name": "Method Registry",
"description": "A registry of method signatures for lookups on transactions",
"author": "Parity Team <admin@ethcore.io>",
"version": "1.0.0"
},
{
"id": "0x058740ee9a5a3fb9f1cfa10752baec87e09cc45cd7027fd54708271aca300c75",
"url": "githubhint",
"name": "GitHub Hint",
"description": "A mapping of GitHub URLs to hashes for use in contracts as references",
"author": "Parity Team <admin@ethcore.io>",
"version": "1.0.0",
"secure": true
}
]

View File

@ -14,69 +14,39 @@
// You should have received a copy of the GNU General Public License // You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
import { action, computed, observable } from 'mobx'; import { action, computed, observable, transaction } from 'mobx';
import Contracts from '../../contracts'; import Contracts from '../../contracts';
import { hashToImageUrl } from '../../redux/util'; import { hashToImageUrl } from '../../redux/util';
const builtinApps = [ import builtinApps from './builtin.json';
{
id: '0xf9f2d620c2e08f83e45555247146c62185e4ab7cf82a4b9002a265a0d020348f', const LS_KEY_HIDDEN = 'hiddenApps';
url: 'basiccoin', const LS_KEY_EXTERNAL = 'externalApps';
name: 'Token Deployment',
description: 'Deploy new basic tokens that you are able to send around',
author: 'Parity Team <admin@ethcore.io>',
version: '1.0.0'
},
{
id: '0xd1adaede68d344519025e2ff574650cd99d3830fe6d274c7a7843cdc00e17938',
url: 'registry',
name: 'Registry',
description: 'A global registry of addresses on the network',
author: 'Parity Team <admin@ethcore.io>',
version: '1.0.0'
},
{
id: '0x0a8048117e51e964628d0f2d26342b3cd915248b59bcce2721e1d05f5cfa2208',
url: 'tokenreg',
name: 'Token Registry',
description: 'A registry of transactable tokens on the network',
author: 'Parity Team <admin@ethcore.io>',
version: '1.0.0'
},
{
id: '0xf49089046f53f5d2e5f3513c1c32f5ff57d986e46309a42d2b249070e4e72c46',
url: 'signaturereg',
name: 'Method Registry',
description: 'A registry of method signatures for lookups on transactions',
author: 'Parity Team <admin@ethcore.io>',
version: '1.0.0'
},
{
id: '0x058740ee9a5a3fb9f1cfa10752baec87e09cc45cd7027fd54708271aca300c75',
url: 'githubhint',
name: 'GitHub Hint',
description: 'A mapping of GitHub URLs to hashes for use in contracts as references',
author: 'Parity Team <admin@ethcore.io>',
version: '1.0.0',
secure: true
}
];
export default class DappsStore { export default class DappsStore {
@observable apps = []; @observable apps = [];
@observable hidden = []; @observable externalApps = [];
@observable hiddenApps = [];
@observable modalOpen = false; @observable modalOpen = false;
constructor (api) { constructor (api) {
this._api = api; this._api = api;
this._readHiddenApps(); this._readHiddenApps();
this._fetch(); this._readExternalApps();
this._fetchBuiltinApps();
this._fetchLocalApps();
this._fetchRegistryApps();
} }
@computed get visible () { @computed get visible () {
return this.apps.filter((app) => !this.hidden.includes(app.id)); return this.apps
.filter((app) => {
return this.externalApps.includes(app.id) || !this.hiddenApps.includes(app.id);
})
.sort((a, b) => a.name.localeCompare(b.name));
} }
@action openModal = () => { @action openModal = () => {
@ -88,12 +58,12 @@ export default class DappsStore {
} }
@action hideApp = (id) => { @action hideApp = (id) => {
this.hidden = this.hidden.concat(id); this.hiddenApps = this.hiddenApps.concat(id);
this._writeHiddenApps(); this._writeHiddenApps();
} }
@action showApp = (id) => { @action showApp = (id) => {
this.hidden = this.hidden.filter((_id) => _id !== id); this.hiddenApps = this.hiddenApps.filter((_id) => _id !== id);
this._writeHiddenApps(); this._writeHiddenApps();
} }
@ -103,25 +73,48 @@ export default class DappsStore {
: ''; : '';
} }
_fetch () { _fetchBuiltinApps () {
Promise const { dappReg } = Contracts.get();
.all([
this._fetchLocal(), return Promise
this._fetchRegistry() .all(builtinApps.map((app) => dappReg.getImage(app.id)))
]) .then((imageIds) => {
.then(([localApps, registryApps]) => { transaction(() => {
this.apps = [] builtinApps.forEach((app, index) => {
.concat(localApps) app.type = 'builtin';
.concat(registryApps) app.image = hashToImageUrl(imageIds[index]);
.filter((app) => app.id) this.apps.push(app);
.sort((a, b) => (a.name || '').localeCompare(b.name || '')); });
}) });
.catch((error) => {
console.warn('DappStore:fetch', error);
}); });
} }
_fetchRegistry () { _fetchLocalApps () {
return fetch(`${this._getHost()}/api/apps`)
.then((response) => {
return response.ok
? response.json()
: [];
})
.then((apps) => {
return apps
.map((app) => {
app.type = 'local';
return app;
})
.filter((app) => app.id && !['ui'].includes(app.id));
})
.then((apps) => {
transaction(() => {
(apps || []).forEach((app) => this.apps.push(app));
});
})
.catch((error) => {
console.warn('DappsStore:fetchLocal', error);
});
}
_fetchRegistryApps () {
const { dappReg } = Contracts.get(); const { dappReg } = Contracts.get();
return dappReg return dappReg
@ -137,9 +130,9 @@ export default class DappsStore {
return Promise.all(promises); return Promise.all(promises);
}) })
.then((appsInfo) => { .then((appsInfo) => {
const appIds = appsInfo.map(([appId, owner]) => { const appIds = appsInfo
return this._api.util.bytesToHex(appId); .map(([appId, owner]) => this._api.util.bytesToHex(appId))
}); .filter((appId) => !builtinApps.find((app) => app.id === appId));
return Promise return Promise
.all([ .all([
@ -149,27 +142,21 @@ export default class DappsStore {
]) ])
.then(([imageIds, contentIds, manifestIds]) => { .then(([imageIds, contentIds, manifestIds]) => {
return appIds.map((appId, index) => { return appIds.map((appId, index) => {
const app = builtinApps.find((ba) => ba.id === appId) || { const app = {
id: appId, id: appId,
image: hashToImageUrl(imageIds[index]),
contentHash: this._api.util.bytesToHex(contentIds[index]).substr(2), contentHash: this._api.util.bytesToHex(contentIds[index]).substr(2),
manifestHash: this._api.util.bytesToHex(manifestIds[index]).substr(2), manifestHash: this._api.util.bytesToHex(manifestIds[index]).substr(2),
type: 'network' type: 'network'
}; };
app.image = hashToImageUrl(imageIds[index]);
app.type = app.type || 'builtin';
return app; return app;
}); });
}); });
}) })
.then((apps) => { .then((apps) => {
return Promise return Promise
.all(apps.map((app) => { .all(apps.map((app) => this._fetchManifest(app.manifestHash)))
return app.manifestHash
? this._fetchManifest(app.manifestHash)
: null;
}))
.then((manifests) => { .then((manifests) => {
return apps.map((app, index) => { return apps.map((app, index) => {
const manifest = manifests[index]; const manifest = manifests[index];
@ -177,7 +164,7 @@ export default class DappsStore {
if (manifest) { if (manifest) {
app.manifestHash = null; app.manifestHash = null;
Object.keys(manifest) Object.keys(manifest)
.filter((key) => key !== 'id') .filter((key) => ['author', 'description', 'name', 'version'].includes(key))
.forEach((key) => { .forEach((key) => {
app[key] = manifest[key]; app[key] = manifest[key];
}); });
@ -192,6 +179,11 @@ export default class DappsStore {
}); });
}); });
}) })
.then((apps) => {
transaction(() => {
(apps || []).forEach((app) => this.apps.push(app));
});
})
.catch((error) => { .catch((error) => {
console.warn('DappsStore:fetchRegistry', error); console.warn('DappsStore:fetchRegistry', error);
}); });
@ -210,39 +202,43 @@ export default class DappsStore {
}); });
} }
_fetchLocal () {
return fetch(`${this._getHost()}/api/apps`)
.then((response) => {
return response.ok
? response.json()
: [];
})
.then((localApps) => {
return localApps
.filter((app) => app && app.id && !['ui'].includes(app.id))
.map((app) => {
app.type = 'local';
return app;
});
})
.catch((error) => {
console.warn('DappsStore:fetchLocal', error);
});
}
_readHiddenApps () { _readHiddenApps () {
const stored = localStorage.getItem('hiddenApps'); const stored = localStorage.getItem(LS_KEY_HIDDEN);
if (stored) { if (stored) {
try { try {
this.hidden = JSON.parse(stored); this.hiddenApps = JSON.parse(stored);
} catch (error) { } catch (error) {
console.warn('DappsStore:readHiddenApps', error); console.warn('DappsStore:readHiddenApps', error);
} }
} }
} }
_readExternalApps () {
const stored = localStorage.getItem(LS_KEY_EXTERNAL);
if (stored) {
try {
this.externalApps = JSON.parse(stored);
} catch (error) {
console.warn('DappsStore:readExternalApps', error);
}
}
}
_writeExternalApps () {
try {
localStorage.setItem(LS_KEY_EXTERNAL, JSON.stringify(this.externalApps));
} catch (error) {
console.error('DappsStore:writeExternalApps', error);
}
}
_writeHiddenApps () { _writeHiddenApps () {
localStorage.setItem('hiddenApps', JSON.stringify(this.hidden)); try {
localStorage.setItem(LS_KEY_HIDDEN, JSON.stringify(this.hiddenApps));
} catch (error) {
console.error('DappsStore:writeHiddenApps', error);
}
} }
} }

View File

@ -15,9 +15,8 @@
// along with Parity. If not, see <http://www.gnu.org/licenses/>. // along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::{io, env}; use std::{io, env};
use std::io::{Write, Read, BufReader, BufRead}; use std::io::{Write, BufReader, BufRead};
use std::time::Duration; use std::time::Duration;
use std::path::Path;
use std::fs::File; use std::fs::File;
use util::{clean_0x, U256, Uint, Address, path, CompactionProfile}; use util::{clean_0x, U256, Uint, Address, path, CompactionProfile};
use util::journaldb::Algorithm; use util::journaldb::Algorithm;
@ -299,13 +298,11 @@ pub fn password_prompt() -> Result<String, String> {
} }
/// Read a password from password file. /// Read a password from password file.
pub fn password_from_file<P>(path: P) -> Result<String, String> where P: AsRef<Path> { pub fn password_from_file(path: String) -> Result<String, String> {
let mut file = try!(File::open(path).map_err(|_| "Unable to open password file.")); let passwords = try!(passwords_from_files(vec![path]));
let mut file_content = String::new(); // use only first password from the file
match file.read_to_string(&mut file_content) { passwords.get(0).map(String::to_owned)
Ok(_) => Ok(file_content.trim().into()), .ok_or_else(|| "Password file seems to be empty.".to_owned())
Err(_) => Err("Unable to read password file.".into()),
}
} }
/// Reads passwords from files. Treats each line as a separate password. /// Reads passwords from files. Treats each line as a separate password.
@ -314,7 +311,8 @@ pub fn passwords_from_files(files: Vec<String>) -> Result<Vec<String>, String> {
let file = try!(File::open(filename).map_err(|_| format!("{} Unable to read password file. Ensure it exists and permissions are correct.", filename))); let file = try!(File::open(filename).map_err(|_| format!("{} Unable to read password file. Ensure it exists and permissions are correct.", filename)));
let reader = BufReader::new(&file); let reader = BufReader::new(&file);
let lines = reader.lines() let lines = reader.lines()
.map(|l| l.unwrap()) .filter_map(|l| l.ok())
.map(|pwd| pwd.trim().to_owned())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
Ok(lines) Ok(lines)
}).collect::<Result<Vec<Vec<String>>, String>>(); }).collect::<Result<Vec<Vec<String>>, String>>();
@ -418,7 +416,20 @@ mod tests {
let path = RandomTempPath::new(); let path = RandomTempPath::new();
let mut file = File::create(path.as_path()).unwrap(); let mut file = File::create(path.as_path()).unwrap();
file.write_all(b"a bc ").unwrap(); file.write_all(b"a bc ").unwrap();
assert_eq!(password_from_file(path).unwrap().as_bytes(), b"a bc"); assert_eq!(password_from_file(path.as_str().into()).unwrap().as_bytes(), b"a bc");
}
#[test]
fn test_password_multiline() {
let path = RandomTempPath::new();
let mut file = File::create(path.as_path()).unwrap();
file.write_all(br#" password with trailing whitespace
those passwords should be
ignored
but the first password is trimmed
"#).unwrap();
assert_eq!(&password_from_file(path.as_str().into()).unwrap(), "password with trailing whitespace");
} }
#[test] #[test]

View File

@ -16,7 +16,7 @@
use std::ops::{Deref, DerefMut}; use std::ops::{Deref, DerefMut};
use std::thread; use std::thread;
use std::time::{self, Duration}; use std::time;
use std::sync::Arc; use std::sync::Arc;
use devtools::{http_client, RandomTempPath}; use devtools::{http_client, RandomTempPath};
use rpc::ConfirmationsQueue; use rpc::ConfirmationsQueue;
@ -50,7 +50,6 @@ pub fn serve() -> (Server, usize, GuardedAuthCodes) {
let builder = ServerBuilder::new(queue, path.to_path_buf()); let builder = ServerBuilder::new(queue, path.to_path_buf());
let port = 35000 + rand::random::<usize>() % 10000; let port = 35000 + rand::random::<usize>() % 10000;
let res = builder.start(format!("127.0.0.1:{}", port).parse().unwrap()).unwrap(); let res = builder.start(format!("127.0.0.1:{}", port).parse().unwrap()).unwrap();
thread::sleep(Duration::from_millis(25));
(res, port, GuardedAuthCodes { (res, port, GuardedAuthCodes {
authcodes: AuthCodes::from_file(&path).unwrap(), authcodes: AuthCodes::from_file(&path).unwrap(),

View File

@ -17,10 +17,10 @@
//! Database of byte-slices keyed to their Keccak hash. //! Database of byte-slices keyed to their Keccak hash.
use hash::*; use hash::*;
use std::collections::HashMap; use std::collections::HashMap;
use elastic_array::ElasticArray256; use elastic_array::ElasticArray128;
/// `HashDB` value type. /// `HashDB` value type.
pub type DBValue = ElasticArray256<u8>; pub type DBValue = ElasticArray128<u8>;
/// Trait modelling datastore keyed by a 32-byte Keccak hash. /// Trait modelling datastore keyed by a 32-byte Keccak hash.
pub trait HashDB: AsHashDB + Send + Sync { pub trait HashDB: AsHashDB + Send + Sync {

View File

@ -464,6 +464,7 @@ impl Database {
try!(db.write_opt(batch, &self.write_opts)); try!(db.write_opt(batch, &self.write_opts));
for column in self.flushing.write().iter_mut() { for column in self.flushing.write().iter_mut() {
column.clear(); column.clear();
column.shrink_to_fit();
} }
Ok(()) Ok(())
}, },