Implement hardcoded sync in the light client (#8075)
* Implement hardcoded sync * Fix concerns * Remove artifact * Fix cli tests * Fix compilation * Update hardcoded sync block * Don't use any data fetch for the light service
This commit is contained in:
		
							parent
							
								
									dbc4d85f0a
								
							
						
					
					
						commit
						04931618ed
					
				@ -31,11 +31,11 @@ use std::sync::Arc;
 | 
				
			|||||||
use cht;
 | 
					use cht;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
use ethcore::block_status::BlockStatus;
 | 
					use ethcore::block_status::BlockStatus;
 | 
				
			||||||
use ethcore::error::{BlockImportError, BlockError};
 | 
					use ethcore::error::{Error, BlockImportError, BlockError};
 | 
				
			||||||
use ethcore::encoded;
 | 
					use ethcore::encoded;
 | 
				
			||||||
use ethcore::header::Header;
 | 
					use ethcore::header::Header;
 | 
				
			||||||
use ethcore::ids::BlockId;
 | 
					use ethcore::ids::BlockId;
 | 
				
			||||||
use ethcore::spec::Spec;
 | 
					use ethcore::spec::{Spec, SpecHardcodedSync};
 | 
				
			||||||
use ethcore::engines::epoch::{
 | 
					use ethcore::engines::epoch::{
 | 
				
			||||||
	Transition as EpochTransition,
 | 
						Transition as EpochTransition,
 | 
				
			||||||
	PendingTransition as PendingEpochTransition
 | 
						PendingTransition as PendingEpochTransition
 | 
				
			||||||
@ -45,7 +45,7 @@ use rlp::{Encodable, Decodable, DecoderError, RlpStream, Rlp, UntrustedRlp};
 | 
				
			|||||||
use heapsize::HeapSizeOf;
 | 
					use heapsize::HeapSizeOf;
 | 
				
			||||||
use ethereum_types::{H256, H264, U256};
 | 
					use ethereum_types::{H256, H264, U256};
 | 
				
			||||||
use plain_hasher::H256FastMap;
 | 
					use plain_hasher::H256FastMap;
 | 
				
			||||||
use kvdb::{self, DBTransaction, KeyValueDB};
 | 
					use kvdb::{DBTransaction, KeyValueDB};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
use cache::Cache;
 | 
					use cache::Cache;
 | 
				
			||||||
use parking_lot::{Mutex, RwLock};
 | 
					use parking_lot::{Mutex, RwLock};
 | 
				
			||||||
@ -180,6 +180,12 @@ pub struct PendingChanges {
 | 
				
			|||||||
	best_block: Option<BlockDescriptor>, // new best block.
 | 
						best_block: Option<BlockDescriptor>, // new best block.
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/// Whether or not the hardcoded sync feature is allowed.
 | 
				
			||||||
 | 
					pub enum HardcodedSync {
 | 
				
			||||||
 | 
						Allow,
 | 
				
			||||||
 | 
						Deny,
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/// Header chain. See module docs for more details.
 | 
					/// Header chain. See module docs for more details.
 | 
				
			||||||
pub struct HeaderChain {
 | 
					pub struct HeaderChain {
 | 
				
			||||||
	genesis_header: encoded::Header, // special-case the genesis.
 | 
						genesis_header: encoded::Header, // special-case the genesis.
 | 
				
			||||||
@ -198,7 +204,8 @@ impl HeaderChain {
 | 
				
			|||||||
		col: Option<u32>,
 | 
							col: Option<u32>,
 | 
				
			||||||
		spec: &Spec,
 | 
							spec: &Spec,
 | 
				
			||||||
		cache: Arc<Mutex<Cache>>,
 | 
							cache: Arc<Mutex<Cache>>,
 | 
				
			||||||
	) -> Result<Self, kvdb::Error> {
 | 
							allow_hs: HardcodedSync,
 | 
				
			||||||
 | 
						) -> Result<Self, Error> {
 | 
				
			||||||
		let mut live_epoch_proofs = ::std::collections::HashMap::default();
 | 
							let mut live_epoch_proofs = ::std::collections::HashMap::default();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let genesis = ::rlp::encode(&spec.genesis_header()).into_vec();
 | 
							let genesis = ::rlp::encode(&spec.genesis_header()).into_vec();
 | 
				
			||||||
@ -240,7 +247,7 @@ impl HeaderChain {
 | 
				
			|||||||
			let best_block = {
 | 
								let best_block = {
 | 
				
			||||||
				let era = match candidates.get(&best_number) {
 | 
									let era = match candidates.get(&best_number) {
 | 
				
			||||||
					Some(era) => era,
 | 
										Some(era) => era,
 | 
				
			||||||
					None => return Err("Database corrupt: highest block referenced but no data.".into()),
 | 
										None => return Err(Error::Database("Database corrupt: highest block referenced but no data.".into())),
 | 
				
			||||||
				};
 | 
									};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
				let best = &era.candidates[0];
 | 
									let best = &era.candidates[0];
 | 
				
			||||||
@ -260,8 +267,9 @@ impl HeaderChain {
 | 
				
			|||||||
				col: col,
 | 
									col: col,
 | 
				
			||||||
				cache: cache,
 | 
									cache: cache,
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		} else {
 | 
							} else {
 | 
				
			||||||
			HeaderChain {
 | 
								let chain = HeaderChain {
 | 
				
			||||||
				genesis_header: encoded::Header::new(genesis),
 | 
									genesis_header: encoded::Header::new(genesis),
 | 
				
			||||||
				best_block: RwLock::new(BlockDescriptor {
 | 
									best_block: RwLock::new(BlockDescriptor {
 | 
				
			||||||
					hash: decoded_header.hash(),
 | 
										hash: decoded_header.hash(),
 | 
				
			||||||
@ -270,15 +278,49 @@ impl HeaderChain {
 | 
				
			|||||||
				}),
 | 
									}),
 | 
				
			||||||
				candidates: RwLock::new(BTreeMap::new()),
 | 
									candidates: RwLock::new(BTreeMap::new()),
 | 
				
			||||||
				live_epoch_proofs: RwLock::new(live_epoch_proofs),
 | 
									live_epoch_proofs: RwLock::new(live_epoch_proofs),
 | 
				
			||||||
				db: db,
 | 
									db: db.clone(),
 | 
				
			||||||
				col: col,
 | 
									col: col,
 | 
				
			||||||
				cache: cache,
 | 
									cache: cache,
 | 
				
			||||||
 | 
								};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
								// insert the hardcoded sync into the database.
 | 
				
			||||||
 | 
								if let (&Some(ref hardcoded_sync), HardcodedSync::Allow) = (&spec.hardcoded_sync, allow_hs) {
 | 
				
			||||||
 | 
									let mut batch = db.transaction();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
									// insert the hardcoded CHT roots into the database.
 | 
				
			||||||
 | 
									for (cht_num, cht_root) in hardcoded_sync.chts.iter().enumerate() {
 | 
				
			||||||
 | 
										batch.put(col, cht_key(cht_num as u64).as_bytes(), &::rlp::encode(cht_root));
 | 
				
			||||||
				}
 | 
									}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
									let decoded_header = hardcoded_sync.header.decode();
 | 
				
			||||||
 | 
									let decoded_header_num = decoded_header.number();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
									// write the block in the DB.
 | 
				
			||||||
 | 
									info!(target: "chain", "Inserting hardcoded block #{} in chain",
 | 
				
			||||||
 | 
										  decoded_header_num);
 | 
				
			||||||
 | 
									let pending = chain.insert_with_td(&mut batch, decoded_header,
 | 
				
			||||||
 | 
																	hardcoded_sync.total_difficulty, None)?;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
									// check that we have enough hardcoded CHT roots. avoids panicking later.
 | 
				
			||||||
 | 
									let cht_num = cht::block_to_cht_number(decoded_header_num - 1)
 | 
				
			||||||
 | 
										.expect("specs provided a hardcoded block with height 0");
 | 
				
			||||||
 | 
									if cht_num >= hardcoded_sync.chts.len() as u64 {
 | 
				
			||||||
 | 
										warn!(target: "chain", "specs didn't provide enough CHT roots for its \
 | 
				
			||||||
 | 
																hardcoded block ; falling back to non-hardcoded sync \
 | 
				
			||||||
 | 
																mode");
 | 
				
			||||||
 | 
									} else {
 | 
				
			||||||
 | 
										db.write_buffered(batch);
 | 
				
			||||||
 | 
										chain.apply_pending(pending);
 | 
				
			||||||
 | 
									}
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
								chain
 | 
				
			||||||
		};
 | 
							};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		// instantiate genesis epoch data if it doesn't exist.
 | 
							// instantiate genesis epoch data if it doesn't exist.
 | 
				
			||||||
		if let None = chain.db.get(col, LAST_CANONICAL_TRANSITION)? {
 | 
							if let None = chain.db.get(col, LAST_CANONICAL_TRANSITION)? {
 | 
				
			||||||
			let genesis_data = spec.genesis_epoch_data()?;
 | 
								let genesis_data = spec.genesis_epoch_data()
 | 
				
			||||||
 | 
									.map_err(|s| Error::Database(s.into()))?;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
			{
 | 
								{
 | 
				
			||||||
				let mut batch = chain.db.transaction();
 | 
									let mut batch = chain.db.transaction();
 | 
				
			||||||
@ -304,6 +346,29 @@ impl HeaderChain {
 | 
				
			|||||||
		transaction: &mut DBTransaction,
 | 
							transaction: &mut DBTransaction,
 | 
				
			||||||
		header: Header,
 | 
							header: Header,
 | 
				
			||||||
		transition_proof: Option<Vec<u8>>,
 | 
							transition_proof: Option<Vec<u8>>,
 | 
				
			||||||
 | 
						) -> Result<PendingChanges, BlockImportError> {
 | 
				
			||||||
 | 
							self.insert_inner(transaction, header, None, transition_proof)
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						/// Insert a pre-verified header, with a known total difficulty. Similary to `insert`.
 | 
				
			||||||
 | 
						///
 | 
				
			||||||
 | 
						/// This blindly trusts that the data given to it is sensible.
 | 
				
			||||||
 | 
						pub fn insert_with_td(
 | 
				
			||||||
 | 
							&self,
 | 
				
			||||||
 | 
							transaction: &mut DBTransaction,
 | 
				
			||||||
 | 
							header: Header,
 | 
				
			||||||
 | 
							total_difficulty: U256,
 | 
				
			||||||
 | 
							transition_proof: Option<Vec<u8>>,
 | 
				
			||||||
 | 
						) -> Result<PendingChanges, BlockImportError> {
 | 
				
			||||||
 | 
							self.insert_inner(transaction, header, Some(total_difficulty), transition_proof)
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						fn insert_inner(
 | 
				
			||||||
 | 
							&self,
 | 
				
			||||||
 | 
							transaction: &mut DBTransaction,
 | 
				
			||||||
 | 
							header: Header,
 | 
				
			||||||
 | 
							total_difficulty: Option<U256>,
 | 
				
			||||||
 | 
							transition_proof: Option<Vec<u8>>,
 | 
				
			||||||
	) -> Result<PendingChanges, BlockImportError> {
 | 
						) -> Result<PendingChanges, BlockImportError> {
 | 
				
			||||||
		let hash = header.hash();
 | 
							let hash = header.hash();
 | 
				
			||||||
		let number = header.number();
 | 
							let number = header.number();
 | 
				
			||||||
@ -321,7 +386,10 @@ impl HeaderChain {
 | 
				
			|||||||
		// hold candidates the whole time to guard import order.
 | 
							// hold candidates the whole time to guard import order.
 | 
				
			||||||
		let mut candidates = self.candidates.write();
 | 
							let mut candidates = self.candidates.write();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		// find parent details.
 | 
							// find total difficulty.
 | 
				
			||||||
 | 
							let total_difficulty = match total_difficulty {
 | 
				
			||||||
 | 
								Some(td) => td,
 | 
				
			||||||
 | 
								None => {
 | 
				
			||||||
				let parent_td =
 | 
									let parent_td =
 | 
				
			||||||
					if number == 1 {
 | 
										if number == 1 {
 | 
				
			||||||
						self.genesis_header.difficulty()
 | 
											self.genesis_header.difficulty()
 | 
				
			||||||
@ -333,7 +401,9 @@ impl HeaderChain {
 | 
				
			|||||||
							.map_err(BlockImportError::Block)?
 | 
												.map_err(BlockImportError::Block)?
 | 
				
			||||||
					};
 | 
										};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let total_difficulty = parent_td + *header.difficulty();
 | 
									parent_td + *header.difficulty()
 | 
				
			||||||
 | 
								},
 | 
				
			||||||
 | 
							};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		// insert headers and candidates entries and write era to disk.
 | 
							// insert headers and candidates entries and write era to disk.
 | 
				
			||||||
		{
 | 
							{
 | 
				
			||||||
@ -479,6 +549,65 @@ impl HeaderChain {
 | 
				
			|||||||
		Ok(pending)
 | 
							Ok(pending)
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						/// Generates the specifications for hardcoded sync. This is typically only called manually
 | 
				
			||||||
 | 
						/// from time to time by a Parity developer in order to update the chain specifications.
 | 
				
			||||||
 | 
						///
 | 
				
			||||||
 | 
						/// Returns `None` if we are at the genesis block, or if an error happens .
 | 
				
			||||||
 | 
						pub fn read_hardcoded_sync(&self) -> Result<Option<SpecHardcodedSync>, Error> {
 | 
				
			||||||
 | 
							let mut chts = Vec::new();
 | 
				
			||||||
 | 
							let mut cht_num = 0;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
							loop {
 | 
				
			||||||
 | 
								let cht = match self.cht_root(cht_num) {
 | 
				
			||||||
 | 
									Some(cht) => cht,
 | 
				
			||||||
 | 
									None if cht_num != 0 => {
 | 
				
			||||||
 | 
										// end of the iteration
 | 
				
			||||||
 | 
										let h_num = 1 + cht_num as u64 * cht::SIZE;
 | 
				
			||||||
 | 
										let header = if let Some(header) = self.block_header(BlockId::Number(h_num)) {
 | 
				
			||||||
 | 
											header
 | 
				
			||||||
 | 
										} else {
 | 
				
			||||||
 | 
											let msg = format!("header of block #{} not found in DB ; database in an \
 | 
				
			||||||
 | 
																inconsistent state", h_num);
 | 
				
			||||||
 | 
											return Err(Error::Database(msg.into()));
 | 
				
			||||||
 | 
										};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
										let decoded = header.decode();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
										let entry: Entry = {
 | 
				
			||||||
 | 
											let bytes = self.db.get(self.col, era_key(h_num).as_bytes())?
 | 
				
			||||||
 | 
												.ok_or_else(|| {
 | 
				
			||||||
 | 
													let msg = format!("entry for era #{} not found in DB ; database \
 | 
				
			||||||
 | 
																		in an inconsistent state", h_num);
 | 
				
			||||||
 | 
													Error::Database(msg.into())
 | 
				
			||||||
 | 
												})?;
 | 
				
			||||||
 | 
											::rlp::decode(&bytes)
 | 
				
			||||||
 | 
										};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
										let total_difficulty = entry.candidates.iter()
 | 
				
			||||||
 | 
											.find(|c| c.hash == decoded.hash())
 | 
				
			||||||
 | 
											.ok_or_else(|| {
 | 
				
			||||||
 | 
												let msg = "no candidate matching block found in DB ; database in an \
 | 
				
			||||||
 | 
															inconsistent state";
 | 
				
			||||||
 | 
												Error::Database(msg.into())
 | 
				
			||||||
 | 
											})?
 | 
				
			||||||
 | 
											.total_difficulty;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
										break Ok(Some(SpecHardcodedSync {
 | 
				
			||||||
 | 
											header: header,
 | 
				
			||||||
 | 
											total_difficulty: total_difficulty,
 | 
				
			||||||
 | 
											chts: chts,
 | 
				
			||||||
 | 
										}));
 | 
				
			||||||
 | 
									},
 | 
				
			||||||
 | 
									None => {
 | 
				
			||||||
 | 
										break Ok(None);
 | 
				
			||||||
 | 
									},
 | 
				
			||||||
 | 
								};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
								chts.push(cht);
 | 
				
			||||||
 | 
								cht_num += 1;
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	/// Apply pending changes from a previous `insert` operation.
 | 
						/// Apply pending changes from a previous `insert` operation.
 | 
				
			||||||
	/// Must be done before the next `insert` call.
 | 
						/// Must be done before the next `insert` call.
 | 
				
			||||||
	pub fn apply_pending(&self, pending: PendingChanges) {
 | 
						pub fn apply_pending(&self, pending: PendingChanges) {
 | 
				
			||||||
@ -721,7 +850,7 @@ impl<'a> Iterator for AncestryIter<'a> {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
#[cfg(test)]
 | 
					#[cfg(test)]
 | 
				
			||||||
mod tests {
 | 
					mod tests {
 | 
				
			||||||
	use super::HeaderChain;
 | 
						use super::{HeaderChain, HardcodedSync};
 | 
				
			||||||
	use std::sync::Arc;
 | 
						use std::sync::Arc;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	use ethereum_types::U256;
 | 
						use ethereum_types::U256;
 | 
				
			||||||
@ -747,7 +876,7 @@ mod tests {
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
		let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache, HardcodedSync::Allow).unwrap();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let mut parent_hash = genesis_header.hash();
 | 
							let mut parent_hash = genesis_header.hash();
 | 
				
			||||||
		let mut rolling_timestamp = genesis_header.timestamp();
 | 
							let mut rolling_timestamp = genesis_header.timestamp();
 | 
				
			||||||
@ -780,7 +909,7 @@ mod tests {
 | 
				
			|||||||
		let db = make_db();
 | 
							let db = make_db();
 | 
				
			||||||
		let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache, HardcodedSync::Allow).unwrap();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let mut parent_hash = genesis_header.hash();
 | 
							let mut parent_hash = genesis_header.hash();
 | 
				
			||||||
		let mut rolling_timestamp = genesis_header.timestamp();
 | 
							let mut rolling_timestamp = genesis_header.timestamp();
 | 
				
			||||||
@ -862,7 +991,7 @@ mod tests {
 | 
				
			|||||||
		let db = make_db();
 | 
							let db = make_db();
 | 
				
			||||||
		let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache, HardcodedSync::Allow).unwrap();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		assert!(chain.block_header(BlockId::Earliest).is_some());
 | 
							assert!(chain.block_header(BlockId::Earliest).is_some());
 | 
				
			||||||
		assert!(chain.block_header(BlockId::Latest).is_some());
 | 
							assert!(chain.block_header(BlockId::Latest).is_some());
 | 
				
			||||||
@ -876,7 +1005,8 @@ mod tests {
 | 
				
			|||||||
		let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		{
 | 
							{
 | 
				
			||||||
			let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
 | 
								let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone(),
 | 
				
			||||||
 | 
															HardcodedSync::Allow).unwrap();
 | 
				
			||||||
			let mut parent_hash = genesis_header.hash();
 | 
								let mut parent_hash = genesis_header.hash();
 | 
				
			||||||
			let mut rolling_timestamp = genesis_header.timestamp();
 | 
								let mut rolling_timestamp = genesis_header.timestamp();
 | 
				
			||||||
			for i in 1..10000 {
 | 
								for i in 1..10000 {
 | 
				
			||||||
@ -896,7 +1026,8 @@ mod tests {
 | 
				
			|||||||
			}
 | 
								}
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone(),
 | 
				
			||||||
 | 
														HardcodedSync::Allow).unwrap();
 | 
				
			||||||
		assert!(chain.block_header(BlockId::Number(10)).is_none());
 | 
							assert!(chain.block_header(BlockId::Number(10)).is_none());
 | 
				
			||||||
		assert!(chain.block_header(BlockId::Number(9000)).is_some());
 | 
							assert!(chain.block_header(BlockId::Number(9000)).is_some());
 | 
				
			||||||
		assert!(chain.cht_root(2).is_some());
 | 
							assert!(chain.cht_root(2).is_some());
 | 
				
			||||||
@ -912,7 +1043,8 @@ mod tests {
 | 
				
			|||||||
		let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		{
 | 
							{
 | 
				
			||||||
			let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
 | 
								let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone(),
 | 
				
			||||||
 | 
															HardcodedSync::Allow).unwrap();
 | 
				
			||||||
			let mut parent_hash = genesis_header.hash();
 | 
								let mut parent_hash = genesis_header.hash();
 | 
				
			||||||
			let mut rolling_timestamp = genesis_header.timestamp();
 | 
								let mut rolling_timestamp = genesis_header.timestamp();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -954,7 +1086,8 @@ mod tests {
 | 
				
			|||||||
		}
 | 
							}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		// after restoration, non-canonical eras should still be loaded.
 | 
							// after restoration, non-canonical eras should still be loaded.
 | 
				
			||||||
		let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone(),
 | 
				
			||||||
 | 
														HardcodedSync::Allow).unwrap();
 | 
				
			||||||
		assert_eq!(chain.block_header(BlockId::Latest).unwrap().number(), 10);
 | 
							assert_eq!(chain.block_header(BlockId::Latest).unwrap().number(), 10);
 | 
				
			||||||
		assert!(chain.candidates.read().get(&100).is_some())
 | 
							assert!(chain.candidates.read().get(&100).is_some())
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
@ -966,7 +1099,8 @@ mod tests {
 | 
				
			|||||||
		let db = make_db();
 | 
							let db = make_db();
 | 
				
			||||||
		let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone()).unwrap();
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache.clone(),
 | 
				
			||||||
 | 
														HardcodedSync::Allow).unwrap();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		assert!(chain.block_header(BlockId::Earliest).is_some());
 | 
							assert!(chain.block_header(BlockId::Earliest).is_some());
 | 
				
			||||||
		assert!(chain.block_header(BlockId::Number(0)).is_some());
 | 
							assert!(chain.block_header(BlockId::Number(0)).is_some());
 | 
				
			||||||
@ -980,7 +1114,7 @@ mod tests {
 | 
				
			|||||||
		let db = make_db();
 | 
							let db = make_db();
 | 
				
			||||||
		let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let chain = HeaderChain::new(db.clone(), None, &spec, cache).unwrap();
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache, HardcodedSync::Allow).unwrap();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		let mut parent_hash = genesis_header.hash();
 | 
							let mut parent_hash = genesis_header.hash();
 | 
				
			||||||
		let mut rolling_timestamp = genesis_header.timestamp();
 | 
							let mut rolling_timestamp = genesis_header.timestamp();
 | 
				
			||||||
@ -1038,4 +1172,45 @@ mod tests {
 | 
				
			|||||||
		assert!(chain.live_epoch_proofs.read().is_empty());
 | 
							assert!(chain.live_epoch_proofs.read().is_empty());
 | 
				
			||||||
		assert_eq!(chain.epoch_transition_for(parent_hash).unwrap().1, vec![1, 2, 3, 4]);
 | 
							assert_eq!(chain.epoch_transition_for(parent_hash).unwrap().1, vec![1, 2, 3, 4]);
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						#[test]
 | 
				
			||||||
 | 
						fn hardcoded_sync_gen() {
 | 
				
			||||||
 | 
							let spec = Spec::new_test();
 | 
				
			||||||
 | 
							let genesis_header = spec.genesis_header();
 | 
				
			||||||
 | 
							let db = make_db();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
							let cache = Arc::new(Mutex::new(Cache::new(Default::default(), Duration::from_secs(6 * 3600))));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
							let chain = HeaderChain::new(db.clone(), None, &spec, cache, HardcodedSync::Allow).unwrap();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
							let mut parent_hash = genesis_header.hash();
 | 
				
			||||||
 | 
							let mut rolling_timestamp = genesis_header.timestamp();
 | 
				
			||||||
 | 
							let mut total_difficulty = *genesis_header.difficulty();
 | 
				
			||||||
 | 
							let h_num = 3 * ::cht::SIZE + 1;
 | 
				
			||||||
 | 
							for i in 1..10000 {
 | 
				
			||||||
 | 
								let mut header = Header::new();
 | 
				
			||||||
 | 
								header.set_parent_hash(parent_hash);
 | 
				
			||||||
 | 
								header.set_number(i);
 | 
				
			||||||
 | 
								header.set_timestamp(rolling_timestamp);
 | 
				
			||||||
 | 
								let diff = *genesis_header.difficulty() * i as u32;
 | 
				
			||||||
 | 
								header.set_difficulty(diff);
 | 
				
			||||||
 | 
								if i <= h_num {
 | 
				
			||||||
 | 
									total_difficulty = total_difficulty + diff;
 | 
				
			||||||
 | 
								}
 | 
				
			||||||
 | 
								parent_hash = header.hash();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
								let mut tx = db.transaction();
 | 
				
			||||||
 | 
								let pending = chain.insert(&mut tx, header, None).unwrap();
 | 
				
			||||||
 | 
								db.write(tx).unwrap();
 | 
				
			||||||
 | 
								chain.apply_pending(pending);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
								rolling_timestamp += 10;
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
							let hardcoded_sync = chain.read_hardcoded_sync().unwrap().unwrap();
 | 
				
			||||||
 | 
							assert_eq!(hardcoded_sync.chts.len(), 3);
 | 
				
			||||||
 | 
							assert_eq!(hardcoded_sync.total_difficulty, total_difficulty);
 | 
				
			||||||
 | 
							let decoded: Header = hardcoded_sync.header.decode();
 | 
				
			||||||
 | 
							assert_eq!(decoded.number(), h_num);
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
				
			|||||||
@ -22,22 +22,22 @@ use ethcore::block_status::BlockStatus;
 | 
				
			|||||||
use ethcore::client::{ClientReport, EnvInfo, ClientIoMessage};
 | 
					use ethcore::client::{ClientReport, EnvInfo, ClientIoMessage};
 | 
				
			||||||
use ethcore::engines::{epoch, EthEngine, EpochChange, EpochTransition, Proof};
 | 
					use ethcore::engines::{epoch, EthEngine, EpochChange, EpochTransition, Proof};
 | 
				
			||||||
use ethcore::machine::EthereumMachine;
 | 
					use ethcore::machine::EthereumMachine;
 | 
				
			||||||
use ethcore::error::BlockImportError;
 | 
					use ethcore::error::{Error, BlockImportError};
 | 
				
			||||||
use ethcore::ids::BlockId;
 | 
					use ethcore::ids::BlockId;
 | 
				
			||||||
use ethcore::header::{BlockNumber, Header};
 | 
					use ethcore::header::{BlockNumber, Header};
 | 
				
			||||||
use ethcore::verification::queue::{self, HeaderQueue};
 | 
					use ethcore::verification::queue::{self, HeaderQueue};
 | 
				
			||||||
use ethcore::blockchain_info::BlockChainInfo;
 | 
					use ethcore::blockchain_info::BlockChainInfo;
 | 
				
			||||||
use ethcore::spec::Spec;
 | 
					use ethcore::spec::{Spec, SpecHardcodedSync};
 | 
				
			||||||
use ethcore::encoded;
 | 
					use ethcore::encoded;
 | 
				
			||||||
use io::IoChannel;
 | 
					use io::IoChannel;
 | 
				
			||||||
use parking_lot::{Mutex, RwLock};
 | 
					use parking_lot::{Mutex, RwLock};
 | 
				
			||||||
use ethereum_types::{H256, U256};
 | 
					use ethereum_types::{H256, U256};
 | 
				
			||||||
use futures::{IntoFuture, Future};
 | 
					use futures::{IntoFuture, Future};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
use kvdb::{self, KeyValueDB};
 | 
					use kvdb::KeyValueDB;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
use self::fetch::ChainDataFetcher;
 | 
					use self::fetch::ChainDataFetcher;
 | 
				
			||||||
use self::header_chain::{AncestryIter, HeaderChain};
 | 
					use self::header_chain::{AncestryIter, HeaderChain, HardcodedSync};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
use cache::Cache;
 | 
					use cache::Cache;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -59,6 +59,8 @@ pub struct Config {
 | 
				
			|||||||
	pub verify_full: bool,
 | 
						pub verify_full: bool,
 | 
				
			||||||
	/// Should it check the seal of blocks?
 | 
						/// Should it check the seal of blocks?
 | 
				
			||||||
	pub check_seal: bool,
 | 
						pub check_seal: bool,
 | 
				
			||||||
 | 
						/// Disable hardcoded sync.
 | 
				
			||||||
 | 
						pub no_hardcoded_sync: bool,
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
impl Default for Config {
 | 
					impl Default for Config {
 | 
				
			||||||
@ -68,6 +70,7 @@ impl Default for Config {
 | 
				
			|||||||
			chain_column: None,
 | 
								chain_column: None,
 | 
				
			||||||
			verify_full: true,
 | 
								verify_full: true,
 | 
				
			||||||
			check_seal: true,
 | 
								check_seal: true,
 | 
				
			||||||
 | 
								no_hardcoded_sync: false,
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
@ -175,11 +178,14 @@ impl<T: ChainDataFetcher> Client<T> {
 | 
				
			|||||||
		fetcher: T,
 | 
							fetcher: T,
 | 
				
			||||||
		io_channel: IoChannel<ClientIoMessage>,
 | 
							io_channel: IoChannel<ClientIoMessage>,
 | 
				
			||||||
		cache: Arc<Mutex<Cache>>
 | 
							cache: Arc<Mutex<Cache>>
 | 
				
			||||||
	) -> Result<Self, kvdb::Error> {
 | 
						) -> Result<Self, Error> {
 | 
				
			||||||
		Ok(Client {
 | 
							Ok(Client {
 | 
				
			||||||
			queue: HeaderQueue::new(config.queue, spec.engine.clone(), io_channel, config.check_seal),
 | 
								queue: HeaderQueue::new(config.queue, spec.engine.clone(), io_channel, config.check_seal),
 | 
				
			||||||
			engine: spec.engine.clone(),
 | 
								engine: spec.engine.clone(),
 | 
				
			||||||
			chain: HeaderChain::new(db.clone(), chain_col, &spec, cache)?,
 | 
								chain: {
 | 
				
			||||||
 | 
									let hs_cfg = if config.no_hardcoded_sync { HardcodedSync::Deny } else { HardcodedSync::Allow };
 | 
				
			||||||
 | 
									HeaderChain::new(db.clone(), chain_col, &spec, cache, hs_cfg)?
 | 
				
			||||||
 | 
								},
 | 
				
			||||||
			report: RwLock::new(ClientReport::default()),
 | 
								report: RwLock::new(ClientReport::default()),
 | 
				
			||||||
			import_lock: Mutex::new(()),
 | 
								import_lock: Mutex::new(()),
 | 
				
			||||||
			db: db,
 | 
								db: db,
 | 
				
			||||||
@ -189,6 +195,14 @@ impl<T: ChainDataFetcher> Client<T> {
 | 
				
			|||||||
		})
 | 
							})
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						/// Generates the specifications for hardcoded sync. This is typically only called manually
 | 
				
			||||||
 | 
						/// from time to time by a Parity developer in order to update the chain specifications.
 | 
				
			||||||
 | 
						///
 | 
				
			||||||
 | 
						/// Returns `None` if we are at the genesis block.
 | 
				
			||||||
 | 
						pub fn read_hardcoded_sync(&self) -> Result<Option<SpecHardcodedSync>, Error> {
 | 
				
			||||||
 | 
							self.chain.read_hardcoded_sync()
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	/// Adds a new `LightChainNotify` listener.
 | 
						/// Adds a new `LightChainNotify` listener.
 | 
				
			||||||
	pub fn add_listener(&self, listener: Weak<LightChainNotify>) {
 | 
						pub fn add_listener(&self, listener: Weak<LightChainNotify>) {
 | 
				
			||||||
		self.listeners.write().push(listener);
 | 
							self.listeners.write().push(listener);
 | 
				
			||||||
 | 
				
			|||||||
@ -22,9 +22,10 @@ use std::sync::Arc;
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
use ethcore::client::ClientIoMessage;
 | 
					use ethcore::client::ClientIoMessage;
 | 
				
			||||||
use ethcore::db;
 | 
					use ethcore::db;
 | 
				
			||||||
 | 
					use ethcore::error::Error as CoreError;
 | 
				
			||||||
use ethcore::spec::Spec;
 | 
					use ethcore::spec::Spec;
 | 
				
			||||||
use io::{IoContext, IoError, IoHandler, IoService};
 | 
					use io::{IoContext, IoError, IoHandler, IoService};
 | 
				
			||||||
use kvdb::{self, KeyValueDB};
 | 
					use kvdb::KeyValueDB;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
use cache::Cache;
 | 
					use cache::Cache;
 | 
				
			||||||
use parking_lot::Mutex;
 | 
					use parking_lot::Mutex;
 | 
				
			||||||
@ -34,16 +35,23 @@ use super::{ChainDataFetcher, Client, Config as ClientConfig};
 | 
				
			|||||||
/// Errors on service initialization.
 | 
					/// Errors on service initialization.
 | 
				
			||||||
#[derive(Debug)]
 | 
					#[derive(Debug)]
 | 
				
			||||||
pub enum Error {
 | 
					pub enum Error {
 | 
				
			||||||
	/// Database error.
 | 
						/// Core error.
 | 
				
			||||||
	Database(kvdb::Error),
 | 
						Core(CoreError),
 | 
				
			||||||
	/// I/O service error.
 | 
						/// I/O service error.
 | 
				
			||||||
	Io(IoError),
 | 
						Io(IoError),
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					impl From<CoreError> for Error {
 | 
				
			||||||
 | 
						#[inline]
 | 
				
			||||||
 | 
						fn from(err: CoreError) -> Error {
 | 
				
			||||||
 | 
							Error::Core(err)
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
impl fmt::Display for Error {
 | 
					impl fmt::Display for Error {
 | 
				
			||||||
	fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 | 
						fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 | 
				
			||||||
		match *self {
 | 
							match *self {
 | 
				
			||||||
			Error::Database(ref msg) => write!(f, "Database error: {}", msg),
 | 
								Error::Core(ref msg) => write!(f, "Core error: {}", msg),
 | 
				
			||||||
			Error::Io(ref err) => write!(f, "I/O service error: {}", err),
 | 
								Error::Io(ref err) => write!(f, "I/O service error: {}", err),
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
@ -67,7 +75,7 @@ impl<T: ChainDataFetcher> Service<T> {
 | 
				
			|||||||
			fetcher,
 | 
								fetcher,
 | 
				
			||||||
			io_service.channel(),
 | 
								io_service.channel(),
 | 
				
			||||||
			cache,
 | 
								cache,
 | 
				
			||||||
		).map_err(Error::Database)?);
 | 
							)?);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
		io_service.register_handler(Arc::new(ImportBlocks(client.clone()))).map_err(Error::Io)?;
 | 
							io_service.register_handler(Arc::new(ImportBlocks(client.clone()))).map_err(Error::Io)?;
 | 
				
			||||||
		spec.engine.register_client(Arc::downgrade(&client) as _);
 | 
							spec.engine.register_client(Arc::downgrade(&client) as _);
 | 
				
			||||||
 | 
				
			|||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@ -21,4 +21,4 @@ mod seal;
 | 
				
			|||||||
mod spec;
 | 
					mod spec;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub use self::genesis::Genesis;
 | 
					pub use self::genesis::Genesis;
 | 
				
			||||||
pub use self::spec::{Spec, SpecParams, CommonParams, OptimizeFor};
 | 
					pub use self::spec::{Spec, SpecHardcodedSync, SpecParams, CommonParams, OptimizeFor};
 | 
				
			||||||
 | 
				
			|||||||
@ -28,10 +28,11 @@ use hash::{KECCAK_NULL_RLP, keccak};
 | 
				
			|||||||
use memorydb::MemoryDB;
 | 
					use memorydb::MemoryDB;
 | 
				
			||||||
use parking_lot::RwLock;
 | 
					use parking_lot::RwLock;
 | 
				
			||||||
use rlp::{Rlp, RlpStream};
 | 
					use rlp::{Rlp, RlpStream};
 | 
				
			||||||
use rustc_hex::FromHex;
 | 
					use rustc_hex::{FromHex, ToHex};
 | 
				
			||||||
use vm::{EnvInfo, CallType, ActionValue, ActionParams, ParamsType};
 | 
					use vm::{EnvInfo, CallType, ActionValue, ActionParams, ParamsType};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
use builtin::Builtin;
 | 
					use builtin::Builtin;
 | 
				
			||||||
 | 
					use encoded;
 | 
				
			||||||
use engines::{EthEngine, NullEngine, InstantSeal, BasicAuthority, AuthorityRound, Tendermint, DEFAULT_BLOCKHASH_CONTRACT};
 | 
					use engines::{EthEngine, NullEngine, InstantSeal, BasicAuthority, AuthorityRound, Tendermint, DEFAULT_BLOCKHASH_CONTRACT};
 | 
				
			||||||
use error::Error;
 | 
					use error::Error;
 | 
				
			||||||
use executive::Executive;
 | 
					use executive::Executive;
 | 
				
			||||||
@ -319,6 +320,9 @@ pub struct Spec {
 | 
				
			|||||||
	/// Each seal field, expressed as RLP, concatenated.
 | 
						/// Each seal field, expressed as RLP, concatenated.
 | 
				
			||||||
	pub seal_rlp: Bytes,
 | 
						pub seal_rlp: Bytes,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						/// Hardcoded synchronization. Allows the light client to immediately jump to a specific block.
 | 
				
			||||||
 | 
						pub hardcoded_sync: Option<SpecHardcodedSync>,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	/// Contract constructors to be executed on genesis.
 | 
						/// Contract constructors to be executed on genesis.
 | 
				
			||||||
	constructors: Vec<(Address, Bytes)>,
 | 
						constructors: Vec<(Address, Bytes)>,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -347,6 +351,7 @@ impl Clone for Spec {
 | 
				
			|||||||
			timestamp: self.timestamp.clone(),
 | 
								timestamp: self.timestamp.clone(),
 | 
				
			||||||
			extra_data: self.extra_data.clone(),
 | 
								extra_data: self.extra_data.clone(),
 | 
				
			||||||
			seal_rlp: self.seal_rlp.clone(),
 | 
								seal_rlp: self.seal_rlp.clone(),
 | 
				
			||||||
 | 
								hardcoded_sync: self.hardcoded_sync.clone(),
 | 
				
			||||||
			constructors: self.constructors.clone(),
 | 
								constructors: self.constructors.clone(),
 | 
				
			||||||
			state_root_memo: RwLock::new(*self.state_root_memo.read()),
 | 
								state_root_memo: RwLock::new(*self.state_root_memo.read()),
 | 
				
			||||||
			genesis_state: self.genesis_state.clone(),
 | 
								genesis_state: self.genesis_state.clone(),
 | 
				
			||||||
@ -354,6 +359,45 @@ impl Clone for Spec {
 | 
				
			|||||||
	}
 | 
						}
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/// Part of `Spec`. Describes the hardcoded synchronization parameters.
 | 
				
			||||||
 | 
					pub struct SpecHardcodedSync {
 | 
				
			||||||
 | 
						/// Header of the block to jump to for hardcoded sync, and total difficulty.
 | 
				
			||||||
 | 
						pub header: encoded::Header,
 | 
				
			||||||
 | 
						/// Total difficulty of the block to jump to.
 | 
				
			||||||
 | 
						pub total_difficulty: U256,
 | 
				
			||||||
 | 
						/// List of hardcoded CHTs, in order. If `hardcoded_sync` is set, the CHTs should include the
 | 
				
			||||||
 | 
						/// header of `hardcoded_sync`.
 | 
				
			||||||
 | 
						pub chts: Vec<H256>,
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					impl SpecHardcodedSync {
 | 
				
			||||||
 | 
						/// Turns this specifications back into JSON. Useful for pretty printing.
 | 
				
			||||||
 | 
						pub fn to_json(self) -> ethjson::spec::HardcodedSync {
 | 
				
			||||||
 | 
							self.into()
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#[cfg(test)]
 | 
				
			||||||
 | 
					impl Clone for SpecHardcodedSync {
 | 
				
			||||||
 | 
						fn clone(&self) -> SpecHardcodedSync {
 | 
				
			||||||
 | 
							SpecHardcodedSync {
 | 
				
			||||||
 | 
								header: self.header.clone(),
 | 
				
			||||||
 | 
								total_difficulty: self.total_difficulty.clone(),
 | 
				
			||||||
 | 
								chts: self.chts.clone(),
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					impl From<SpecHardcodedSync> for ethjson::spec::HardcodedSync {
 | 
				
			||||||
 | 
						fn from(sync: SpecHardcodedSync) -> ethjson::spec::HardcodedSync {
 | 
				
			||||||
 | 
							ethjson::spec::HardcodedSync {
 | 
				
			||||||
 | 
								header: sync.header.into_inner().to_hex(),
 | 
				
			||||||
 | 
								total_difficulty: ethjson::uint::Uint(sync.total_difficulty),
 | 
				
			||||||
 | 
								chts: sync.chts.into_iter().map(Into::into).collect(),
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
fn load_machine_from(s: ethjson::spec::Spec) -> EthereumMachine {
 | 
					fn load_machine_from(s: ethjson::spec::Spec) -> EthereumMachine {
 | 
				
			||||||
	let builtins = s.accounts.builtins().into_iter().map(|p| (p.0.into(), From::from(p.1))).collect();
 | 
						let builtins = s.accounts.builtins().into_iter().map(|p| (p.0.into(), From::from(p.1))).collect();
 | 
				
			||||||
	let params = CommonParams::from(s.params);
 | 
						let params = CommonParams::from(s.params);
 | 
				
			||||||
@ -372,6 +416,23 @@ fn load_from(spec_params: SpecParams, s: ethjson::spec::Spec) -> Result<Spec, Er
 | 
				
			|||||||
	let GenericSeal(seal_rlp) = g.seal.into();
 | 
						let GenericSeal(seal_rlp) = g.seal.into();
 | 
				
			||||||
	let params = CommonParams::from(s.params);
 | 
						let params = CommonParams::from(s.params);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						let hardcoded_sync = if let Some(ref hs) = s.hardcoded_sync {
 | 
				
			||||||
 | 
							if let Ok(header) = hs.header.from_hex() {
 | 
				
			||||||
 | 
								Some(SpecHardcodedSync {
 | 
				
			||||||
 | 
									header: encoded::Header::new(header),
 | 
				
			||||||
 | 
									total_difficulty: hs.total_difficulty.into(),
 | 
				
			||||||
 | 
									chts: s.hardcoded_sync
 | 
				
			||||||
 | 
										.as_ref()
 | 
				
			||||||
 | 
										.map(|s| s.chts.iter().map(|c| c.clone().into()).collect())
 | 
				
			||||||
 | 
										.unwrap_or(Vec::new()),
 | 
				
			||||||
 | 
								})
 | 
				
			||||||
 | 
							} else {
 | 
				
			||||||
 | 
								None
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
 | 
						} else {
 | 
				
			||||||
 | 
							None
 | 
				
			||||||
 | 
						};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	let mut s = Spec {
 | 
						let mut s = Spec {
 | 
				
			||||||
		name: s.name.clone().into(),
 | 
							name: s.name.clone().into(),
 | 
				
			||||||
		engine: Spec::engine(spec_params, s.engine, params, builtins),
 | 
							engine: Spec::engine(spec_params, s.engine, params, builtins),
 | 
				
			||||||
@ -387,6 +448,7 @@ fn load_from(spec_params: SpecParams, s: ethjson::spec::Spec) -> Result<Spec, Er
 | 
				
			|||||||
		timestamp: g.timestamp,
 | 
							timestamp: g.timestamp,
 | 
				
			||||||
		extra_data: g.extra_data,
 | 
							extra_data: g.extra_data,
 | 
				
			||||||
		seal_rlp: seal_rlp,
 | 
							seal_rlp: seal_rlp,
 | 
				
			||||||
 | 
							hardcoded_sync: hardcoded_sync,
 | 
				
			||||||
		constructors: s.accounts
 | 
							constructors: s.accounts
 | 
				
			||||||
			.constructors()
 | 
								.constructors()
 | 
				
			||||||
			.into_iter()
 | 
								.into_iter()
 | 
				
			||||||
 | 
				
			|||||||
							
								
								
									
										63
									
								
								json/src/spec/hardcoded_sync.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										63
									
								
								json/src/spec/hardcoded_sync.rs
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,63 @@
 | 
				
			|||||||
 | 
					// Copyright 2018 Parity Technologies (UK) Ltd.
 | 
				
			||||||
 | 
					// This file is part of Parity.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// Parity is free software: you can redistribute it and/or modify
 | 
				
			||||||
 | 
					// it under the terms of the GNU General Public License as published by
 | 
				
			||||||
 | 
					// the Free Software Foundation, either version 3 of the License, or
 | 
				
			||||||
 | 
					// (at your option) any later version.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// Parity is distributed in the hope that it will be useful,
 | 
				
			||||||
 | 
					// but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
				
			||||||
 | 
					// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
				
			||||||
 | 
					// GNU General Public License for more details.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// You should have received a copy of the GNU General Public License
 | 
				
			||||||
 | 
					// along with Parity.  If not, see <http://www.gnu.org/licenses/>.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					//! Spec hardcoded synchronization deserialization for the light client.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					use hash::H256;
 | 
				
			||||||
 | 
					use uint::Uint;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					/// Spec hardcoded sync.
 | 
				
			||||||
 | 
					#[derive(Debug, PartialEq, Serialize, Deserialize)]
 | 
				
			||||||
 | 
					pub struct HardcodedSync {
 | 
				
			||||||
 | 
						/// Hexadecimal of the RLP encoding of the header of the block to start synchronization from.
 | 
				
			||||||
 | 
						pub header: String,
 | 
				
			||||||
 | 
						/// Total difficulty including the block of `header`.
 | 
				
			||||||
 | 
						#[serde(rename="totalDifficulty")]
 | 
				
			||||||
 | 
						pub total_difficulty: Uint,
 | 
				
			||||||
 | 
						/// Ordered trie roots of blocks before and including `header`.
 | 
				
			||||||
 | 
						#[serde(rename="CHTs")]
 | 
				
			||||||
 | 
						pub chts: Vec<H256>,
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#[cfg(test)]
 | 
				
			||||||
 | 
					mod tests {
 | 
				
			||||||
 | 
						use serde_json;
 | 
				
			||||||
 | 
						use uint::Uint;
 | 
				
			||||||
 | 
						use ethereum_types::{U256, H256 as Eth256};
 | 
				
			||||||
 | 
						use hash::H256;
 | 
				
			||||||
 | 
						use spec::hardcoded_sync::HardcodedSync;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						#[test]
 | 
				
			||||||
 | 
						fn hardcoded_sync_deserialization() {
 | 
				
			||||||
 | 
							let s = r#"{
 | 
				
			||||||
 | 
								"header": "f901f9a0d405da4e66f1445d455195229624e133f5baafe72b5cf7b3c36c12c8146e98b7a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a05fb2b4bfdef7b314451cb138a534d225c922fc0e5fbe25e451142732c3e25c25a088d2ec6b9860aae1a2c3b299f72b6a5d70d7f7ba4722c78f2c49ba96273c2158a007c6fdfa8eea7e86b81f5b0fc0f78f90cc19f4aa60d323151e0cac660199e9a1b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302008003832fefba82524d84568e932a80a0a0349d8c3df71f1a48a9df7d03fd5f14aeee7d91332c009ecaff0a71ead405bd88ab4e252a7e8c2a23",
 | 
				
			||||||
 | 
								"totalDifficulty": "0x400000000",
 | 
				
			||||||
 | 
								"CHTs": [
 | 
				
			||||||
 | 
									"0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa",
 | 
				
			||||||
 | 
									"0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
 | 
				
			||||||
 | 
								]
 | 
				
			||||||
 | 
							}"#;
 | 
				
			||||||
 | 
							let deserialized: HardcodedSync = serde_json::from_str(s).unwrap();
 | 
				
			||||||
 | 
							assert_eq!(deserialized, HardcodedSync {
 | 
				
			||||||
 | 
								header: String::from("f901f9a0d405da4e66f1445d455195229624e133f5baafe72b5cf7b3c36c12c8146e98b7a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a05fb2b4bfdef7b314451cb138a534d225c922fc0e5fbe25e451142732c3e25c25a088d2ec6b9860aae1a2c3b299f72b6a5d70d7f7ba4722c78f2c49ba96273c2158a007c6fdfa8eea7e86b81f5b0fc0f78f90cc19f4aa60d323151e0cac660199e9a1b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302008003832fefba82524d84568e932a80a0a0349d8c3df71f1a48a9df7d03fd5f14aeee7d91332c009ecaff0a71ead405bd88ab4e252a7e8c2a23"),
 | 
				
			||||||
 | 
								total_difficulty: Uint(U256::from(0x400000000u64)),
 | 
				
			||||||
 | 
								chts: vec![
 | 
				
			||||||
 | 
									H256(Eth256::from("0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa")),
 | 
				
			||||||
 | 
									H256(Eth256::from("0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544")),
 | 
				
			||||||
 | 
								]
 | 
				
			||||||
 | 
							});
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@ -30,6 +30,7 @@ pub mod basic_authority;
 | 
				
			|||||||
pub mod authority_round;
 | 
					pub mod authority_round;
 | 
				
			||||||
pub mod tendermint;
 | 
					pub mod tendermint;
 | 
				
			||||||
pub mod null_engine;
 | 
					pub mod null_engine;
 | 
				
			||||||
 | 
					pub mod hardcoded_sync;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub use self::account::Account;
 | 
					pub use self::account::Account;
 | 
				
			||||||
pub use self::builtin::{Builtin, Pricing, Linear};
 | 
					pub use self::builtin::{Builtin, Pricing, Linear};
 | 
				
			||||||
@ -45,3 +46,4 @@ pub use self::basic_authority::{BasicAuthority, BasicAuthorityParams};
 | 
				
			|||||||
pub use self::authority_round::{AuthorityRound, AuthorityRoundParams};
 | 
					pub use self::authority_round::{AuthorityRound, AuthorityRoundParams};
 | 
				
			||||||
pub use self::tendermint::{Tendermint, TendermintParams};
 | 
					pub use self::tendermint::{Tendermint, TendermintParams};
 | 
				
			||||||
pub use self::null_engine::{NullEngine, NullEngineParams};
 | 
					pub use self::null_engine::{NullEngine, NullEngineParams};
 | 
				
			||||||
 | 
					pub use self::hardcoded_sync::HardcodedSync;
 | 
				
			||||||
 | 
				
			|||||||
@ -19,7 +19,7 @@
 | 
				
			|||||||
use std::io::Read;
 | 
					use std::io::Read;
 | 
				
			||||||
use serde_json;
 | 
					use serde_json;
 | 
				
			||||||
use serde_json::Error;
 | 
					use serde_json::Error;
 | 
				
			||||||
use spec::{Params, Genesis, Engine, State};
 | 
					use spec::{Params, Genesis, Engine, State, HardcodedSync};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
/// Spec deserialization.
 | 
					/// Spec deserialization.
 | 
				
			||||||
#[derive(Debug, PartialEq, Deserialize)]
 | 
					#[derive(Debug, PartialEq, Deserialize)]
 | 
				
			||||||
@ -39,6 +39,9 @@ pub struct Spec {
 | 
				
			|||||||
	pub accounts: State,
 | 
						pub accounts: State,
 | 
				
			||||||
	/// Boot nodes.
 | 
						/// Boot nodes.
 | 
				
			||||||
	pub nodes: Option<Vec<String>>,
 | 
						pub nodes: Option<Vec<String>>,
 | 
				
			||||||
 | 
						/// Hardcoded synchronization for the light client.
 | 
				
			||||||
 | 
						#[serde(rename="hardcodedSync")]
 | 
				
			||||||
 | 
						pub hardcoded_sync: Option<HardcodedSync>,
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
impl Spec {
 | 
					impl Spec {
 | 
				
			||||||
@ -104,6 +107,14 @@ mod tests {
 | 
				
			|||||||
		"0000000000000000000000000000000000000003": { "balance": "1", "nonce": "1048576", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
 | 
							"0000000000000000000000000000000000000003": { "balance": "1", "nonce": "1048576", "builtin": { "name": "ripemd160", "pricing": { "linear": { "base": 600, "word": 120 } } } },
 | 
				
			||||||
		"0000000000000000000000000000000000000004": { "balance": "1", "nonce": "1048576", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
 | 
							"0000000000000000000000000000000000000004": { "balance": "1", "nonce": "1048576", "builtin": { "name": "identity", "pricing": { "linear": { "base": 15, "word": 3 } } } },
 | 
				
			||||||
		"102e61f5d8f9bc71d0ad4a084df4e65e05ce0e1c": { "balance": "1606938044258990275541962092341162602522202993782792835301376", "nonce": "1048576" }
 | 
							"102e61f5d8f9bc71d0ad4a084df4e65e05ce0e1c": { "balance": "1606938044258990275541962092341162602522202993782792835301376", "nonce": "1048576" }
 | 
				
			||||||
 | 
						},
 | 
				
			||||||
 | 
						"hardcodedSync": {
 | 
				
			||||||
 | 
							"header": "f901f9a0d405da4e66f1445d455195229624e133f5baafe72b5cf7b3c36c12c8146e98b7a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347948888f1f195afa192cfee860698584c030f4c9db1a05fb2b4bfdef7b314451cb138a534d225c922fc0e5fbe25e451142732c3e25c25a088d2ec6b9860aae1a2c3b299f72b6a5d70d7f7ba4722c78f2c49ba96273c2158a007c6fdfa8eea7e86b81f5b0fc0f78f90cc19f4aa60d323151e0cac660199e9a1b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302008003832fefba82524d84568e932a80a0a0349d8c3df71f1a48a9df7d03fd5f14aeee7d91332c009ecaff0a71ead405bd88ab4e252a7e8c2a23",
 | 
				
			||||||
 | 
							"totalDifficulty": "0x400000000",
 | 
				
			||||||
 | 
							"CHTs": [
 | 
				
			||||||
 | 
								"0x11bbe8db4e347b4e8c937c1c8370e4b5ed33adb3db69cbdb7a38e1e50b1b82fa",
 | 
				
			||||||
 | 
								"0xd7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"
 | 
				
			||||||
 | 
							]
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
		}"#;
 | 
							}"#;
 | 
				
			||||||
		let _deserialized: Spec = serde_json::from_str(s).unwrap();
 | 
							let _deserialized: Spec = serde_json::from_str(s).unwrap();
 | 
				
			||||||
 | 
				
			|||||||
@ -18,7 +18,7 @@
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
use std::fmt;
 | 
					use std::fmt;
 | 
				
			||||||
use std::str::FromStr;
 | 
					use std::str::FromStr;
 | 
				
			||||||
use serde::{Deserialize, Deserializer};
 | 
					use serde::{Deserialize, Deserializer, Serialize, Serializer};
 | 
				
			||||||
use serde::de::{Error, Visitor, Unexpected};
 | 
					use serde::de::{Error, Visitor, Unexpected};
 | 
				
			||||||
use ethereum_types::U256;
 | 
					use ethereum_types::U256;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -50,6 +50,13 @@ impl Into<u8> for Uint {
 | 
				
			|||||||
	}
 | 
						}
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					impl Serialize for Uint {
 | 
				
			||||||
 | 
						fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
 | 
				
			||||||
 | 
							where S: Serializer {
 | 
				
			||||||
 | 
							self.0.to_string().serialize(serializer)
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
impl<'a> Deserialize<'a> for Uint {
 | 
					impl<'a> Deserialize<'a> for Uint {
 | 
				
			||||||
	fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
 | 
						fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
 | 
				
			||||||
		where D: Deserializer<'a> {
 | 
							where D: Deserializer<'a> {
 | 
				
			||||||
 | 
				
			|||||||
@ -201,6 +201,7 @@ fn execute_import_light(cmd: ImportBlockchain) -> Result<(), String> {
 | 
				
			|||||||
		chain_column: ::ethcore::db::COL_LIGHT_CHAIN,
 | 
							chain_column: ::ethcore::db::COL_LIGHT_CHAIN,
 | 
				
			||||||
		verify_full: true,
 | 
							verify_full: true,
 | 
				
			||||||
		check_seal: cmd.check_seal,
 | 
							check_seal: cmd.check_seal,
 | 
				
			||||||
 | 
							no_hardcoded_sync: true,
 | 
				
			||||||
	};
 | 
						};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	config.queue.max_mem_use = cmd.cache_config.queue() as usize * 1024 * 1024;
 | 
						config.queue.max_mem_use = cmd.cache_config.queue() as usize * 1024 * 1024;
 | 
				
			||||||
 | 
				
			|||||||
@ -231,6 +231,11 @@ usage! {
 | 
				
			|||||||
				"Clean the database",
 | 
									"Clean the database",
 | 
				
			||||||
			}
 | 
								}
 | 
				
			||||||
		}
 | 
							}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
							CMD cmd_export_hardcoded_sync
 | 
				
			||||||
 | 
							{
 | 
				
			||||||
 | 
								"Export the hardcoded sync JSON file from the existing light client database",
 | 
				
			||||||
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
	{
 | 
						{
 | 
				
			||||||
		// Global flags and arguments
 | 
							// Global flags and arguments
 | 
				
			||||||
@ -251,6 +256,10 @@ usage! {
 | 
				
			|||||||
			"--light",
 | 
								"--light",
 | 
				
			||||||
			"Experimental: run in light client mode. Light clients synchronize a bare minimum of data and fetch necessary data on-demand from the network. Much lower in storage, potentially higher in bandwidth. Has no effect with subcommands.",
 | 
								"Experimental: run in light client mode. Light clients synchronize a bare minimum of data and fetch necessary data on-demand from the network. Much lower in storage, potentially higher in bandwidth. Has no effect with subcommands.",
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
								FLAG flag_no_hardcoded_sync: (bool) = false, or |c: &Config| c.parity.as_ref()?.no_hardcoded_sync,
 | 
				
			||||||
 | 
								"--no-hardcoded-sync",
 | 
				
			||||||
 | 
								"By default, if there is no existing database the light client will automatically jump to a block hardcoded in the chain's specifications. This disables this feature.",
 | 
				
			||||||
 | 
					
 | 
				
			||||||
			FLAG flag_force_direct: (bool) = false, or |_| None,
 | 
								FLAG flag_force_direct: (bool) = false, or |_| None,
 | 
				
			||||||
			"--force-direct",
 | 
								"--force-direct",
 | 
				
			||||||
			"Run the originally installed version of Parity, ignoring any updates that have since been installed.",
 | 
								"Run the originally installed version of Parity, ignoring any updates that have since been installed.",
 | 
				
			||||||
@ -1006,6 +1015,7 @@ struct Operating {
 | 
				
			|||||||
	identity: Option<String>,
 | 
						identity: Option<String>,
 | 
				
			||||||
	light: Option<bool>,
 | 
						light: Option<bool>,
 | 
				
			||||||
	no_persistent_txqueue: Option<bool>,
 | 
						no_persistent_txqueue: Option<bool>,
 | 
				
			||||||
 | 
						no_hardcoded_sync: Option<bool>,
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
#[derive(Default, Debug, PartialEq, Deserialize)]
 | 
					#[derive(Default, Debug, PartialEq, Deserialize)]
 | 
				
			||||||
@ -1409,6 +1419,7 @@ mod tests {
 | 
				
			|||||||
			cmd_tools_hash: false,
 | 
								cmd_tools_hash: false,
 | 
				
			||||||
			cmd_db: false,
 | 
								cmd_db: false,
 | 
				
			||||||
			cmd_db_kill: false,
 | 
								cmd_db_kill: false,
 | 
				
			||||||
 | 
								cmd_export_hardcoded_sync: false,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
			// Arguments
 | 
								// Arguments
 | 
				
			||||||
			arg_daemon_pid_file: None,
 | 
								arg_daemon_pid_file: None,
 | 
				
			||||||
@ -1443,6 +1454,7 @@ mod tests {
 | 
				
			|||||||
			arg_keys_path: "$HOME/.parity/keys".into(),
 | 
								arg_keys_path: "$HOME/.parity/keys".into(),
 | 
				
			||||||
			arg_identity: "".into(),
 | 
								arg_identity: "".into(),
 | 
				
			||||||
			flag_light: false,
 | 
								flag_light: false,
 | 
				
			||||||
 | 
								flag_no_hardcoded_sync: false,
 | 
				
			||||||
			flag_no_persistent_txqueue: false,
 | 
								flag_no_persistent_txqueue: false,
 | 
				
			||||||
			flag_force_direct: false,
 | 
								flag_force_direct: false,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@ -1697,6 +1709,7 @@ mod tests {
 | 
				
			|||||||
				keys_path: None,
 | 
									keys_path: None,
 | 
				
			||||||
				identity: None,
 | 
									identity: None,
 | 
				
			||||||
				light: None,
 | 
									light: None,
 | 
				
			||||||
 | 
									no_hardcoded_sync: None,
 | 
				
			||||||
				no_persistent_txqueue: None,
 | 
									no_persistent_txqueue: None,
 | 
				
			||||||
			}),
 | 
								}),
 | 
				
			||||||
			account: Some(Account {
 | 
								account: Some(Account {
 | 
				
			||||||
 | 
				
			|||||||
@ -49,6 +49,7 @@ use secretstore::{NodeSecretKey, Configuration as SecretStoreConfiguration, Cont
 | 
				
			|||||||
use updater::{UpdatePolicy, UpdateFilter, ReleaseTrack};
 | 
					use updater::{UpdatePolicy, UpdateFilter, ReleaseTrack};
 | 
				
			||||||
use run::RunCmd;
 | 
					use run::RunCmd;
 | 
				
			||||||
use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, KillBlockchain, ExportState, DataFormat};
 | 
					use blockchain::{BlockchainCmd, ImportBlockchain, ExportBlockchain, KillBlockchain, ExportState, DataFormat};
 | 
				
			||||||
 | 
					use export_hardcoded_sync::ExportHsyncCmd;
 | 
				
			||||||
use presale::ImportWallet;
 | 
					use presale::ImportWallet;
 | 
				
			||||||
use account::{AccountCmd, NewAccount, ListAccounts, ImportAccounts, ImportFromGethAccounts};
 | 
					use account::{AccountCmd, NewAccount, ListAccounts, ImportAccounts, ImportFromGethAccounts};
 | 
				
			||||||
use snapshot::{self, SnapshotCommand};
 | 
					use snapshot::{self, SnapshotCommand};
 | 
				
			||||||
@ -79,6 +80,7 @@ pub enum Cmd {
 | 
				
			|||||||
	},
 | 
						},
 | 
				
			||||||
	Snapshot(SnapshotCommand),
 | 
						Snapshot(SnapshotCommand),
 | 
				
			||||||
	Hash(Option<String>),
 | 
						Hash(Option<String>),
 | 
				
			||||||
 | 
						ExportHardcodedSync(ExportHsyncCmd),
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub struct Execute {
 | 
					pub struct Execute {
 | 
				
			||||||
@ -317,6 +319,16 @@ impl Configuration {
 | 
				
			|||||||
				block_at: to_block_id("latest")?, // unimportant.
 | 
									block_at: to_block_id("latest")?, // unimportant.
 | 
				
			||||||
			};
 | 
								};
 | 
				
			||||||
			Cmd::Snapshot(restore_cmd)
 | 
								Cmd::Snapshot(restore_cmd)
 | 
				
			||||||
 | 
							} else if self.args.cmd_export_hardcoded_sync {
 | 
				
			||||||
 | 
								let export_hs_cmd = ExportHsyncCmd {
 | 
				
			||||||
 | 
									cache_config: cache_config,
 | 
				
			||||||
 | 
									dirs: dirs,
 | 
				
			||||||
 | 
									spec: spec,
 | 
				
			||||||
 | 
									pruning: pruning,
 | 
				
			||||||
 | 
									compaction: compaction,
 | 
				
			||||||
 | 
									wal: wal,
 | 
				
			||||||
 | 
								};
 | 
				
			||||||
 | 
								Cmd::ExportHardcodedSync(export_hs_cmd)
 | 
				
			||||||
		} else {
 | 
							} else {
 | 
				
			||||||
			let daemon = if self.args.cmd_daemon {
 | 
								let daemon = if self.args.cmd_daemon {
 | 
				
			||||||
				Some(self.args.arg_daemon_pid_file.clone().expect("CLI argument is required; qed"))
 | 
									Some(self.args.arg_daemon_pid_file.clone().expect("CLI argument is required; qed"))
 | 
				
			||||||
@ -375,6 +387,7 @@ impl Configuration {
 | 
				
			|||||||
				light: self.args.flag_light,
 | 
									light: self.args.flag_light,
 | 
				
			||||||
				no_persistent_txqueue: self.args.flag_no_persistent_txqueue,
 | 
									no_persistent_txqueue: self.args.flag_no_persistent_txqueue,
 | 
				
			||||||
				whisper: whisper_config,
 | 
									whisper: whisper_config,
 | 
				
			||||||
 | 
									no_hardcoded_sync: self.args.flag_no_hardcoded_sync,
 | 
				
			||||||
			};
 | 
								};
 | 
				
			||||||
			Cmd::Run(run_cmd)
 | 
								Cmd::Run(run_cmd)
 | 
				
			||||||
		};
 | 
							};
 | 
				
			||||||
@ -1422,6 +1435,7 @@ mod tests {
 | 
				
			|||||||
			verifier_settings: Default::default(),
 | 
								verifier_settings: Default::default(),
 | 
				
			||||||
			serve_light: true,
 | 
								serve_light: true,
 | 
				
			||||||
			light: false,
 | 
								light: false,
 | 
				
			||||||
 | 
								no_hardcoded_sync: false,
 | 
				
			||||||
			no_persistent_txqueue: false,
 | 
								no_persistent_txqueue: false,
 | 
				
			||||||
			whisper: Default::default(),
 | 
								whisper: Default::default(),
 | 
				
			||||||
		};
 | 
							};
 | 
				
			||||||
 | 
				
			|||||||
							
								
								
									
										117
									
								
								parity/export_hardcoded_sync.rs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										117
									
								
								parity/export_hardcoded_sync.rs
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,117 @@
 | 
				
			|||||||
 | 
					// Copyright 2018 Parity Technologies (UK) Ltd.
 | 
				
			||||||
 | 
					// This file is part of Parity.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// Parity is free software: you can redistribute it and/or modify
 | 
				
			||||||
 | 
					// it under the terms of the GNU General Public License as published by
 | 
				
			||||||
 | 
					// the Free Software Foundation, either version 3 of the License, or
 | 
				
			||||||
 | 
					// (at your option) any later version.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// Parity is distributed in the hope that it will be useful,
 | 
				
			||||||
 | 
					// but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
				
			||||||
 | 
					// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
				
			||||||
 | 
					// GNU General Public License for more details.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// You should have received a copy of the GNU General Public License
 | 
				
			||||||
 | 
					// along with Parity.  If not, see <http://www.gnu.org/licenses/>.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					use std::sync::Arc;
 | 
				
			||||||
 | 
					use std::time::Duration;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					use ethcore::client::DatabaseCompactionProfile;
 | 
				
			||||||
 | 
					use ethcore::db::NUM_COLUMNS;
 | 
				
			||||||
 | 
					use ethcore::spec::{SpecParams, OptimizeFor};
 | 
				
			||||||
 | 
					use kvdb_rocksdb::{Database, DatabaseConfig};
 | 
				
			||||||
 | 
					use light::client::fetch::Unavailable as UnavailableDataFetcher;
 | 
				
			||||||
 | 
					use light::Cache as LightDataCache;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					use params::{SpecType, Pruning};
 | 
				
			||||||
 | 
					use helpers::execute_upgrades;
 | 
				
			||||||
 | 
					use dir::Directories;
 | 
				
			||||||
 | 
					use cache::CacheConfig;
 | 
				
			||||||
 | 
					use user_defaults::UserDefaults;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					// Number of minutes before a given gas price corpus should expire.
 | 
				
			||||||
 | 
					// Light client only.
 | 
				
			||||||
 | 
					const GAS_CORPUS_EXPIRATION_MINUTES: u64 = 60 * 6;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#[derive(Debug, PartialEq)]
 | 
				
			||||||
 | 
					pub struct ExportHsyncCmd {
 | 
				
			||||||
 | 
						pub cache_config: CacheConfig,
 | 
				
			||||||
 | 
						pub dirs: Directories,
 | 
				
			||||||
 | 
						pub spec: SpecType,
 | 
				
			||||||
 | 
						pub pruning: Pruning,
 | 
				
			||||||
 | 
						pub compaction: DatabaseCompactionProfile,
 | 
				
			||||||
 | 
						pub wal: bool,
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					pub fn execute(cmd: ExportHsyncCmd) -> Result<String, String> {
 | 
				
			||||||
 | 
						use light::client as light_client;
 | 
				
			||||||
 | 
						use parking_lot::Mutex;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// load spec
 | 
				
			||||||
 | 
						let spec = cmd.spec.spec(SpecParams::new(cmd.dirs.cache.as_ref(), OptimizeFor::Memory))?;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// load genesis hash
 | 
				
			||||||
 | 
						let genesis_hash = spec.genesis_header().hash();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// database paths
 | 
				
			||||||
 | 
						let db_dirs = cmd.dirs.database(genesis_hash, cmd.spec.legacy_fork_name(), spec.data_dir.clone());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// user defaults path
 | 
				
			||||||
 | 
						let user_defaults_path = db_dirs.user_defaults_path();
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// load user defaults
 | 
				
			||||||
 | 
						let user_defaults = UserDefaults::load(&user_defaults_path)?;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// select pruning algorithm
 | 
				
			||||||
 | 
						let algorithm = cmd.pruning.to_algorithm(&user_defaults);
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						let compaction = cmd.compaction.compaction_profile(db_dirs.db_root_path().as_path());
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// execute upgrades
 | 
				
			||||||
 | 
						execute_upgrades(&cmd.dirs.base, &db_dirs, algorithm, compaction.clone())?;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// create dirs used by parity
 | 
				
			||||||
 | 
						cmd.dirs.create_dirs(false, false, false)?;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// TODO: configurable cache size.
 | 
				
			||||||
 | 
						let cache = LightDataCache::new(Default::default(), Duration::from_secs(60 * GAS_CORPUS_EXPIRATION_MINUTES));
 | 
				
			||||||
 | 
						let cache = Arc::new(Mutex::new(cache));
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// start client and create transaction queue.
 | 
				
			||||||
 | 
						let mut config = light_client::Config {
 | 
				
			||||||
 | 
							queue: Default::default(),
 | 
				
			||||||
 | 
							chain_column: ::ethcore::db::COL_LIGHT_CHAIN,
 | 
				
			||||||
 | 
							verify_full: true,
 | 
				
			||||||
 | 
							check_seal: true,
 | 
				
			||||||
 | 
							no_hardcoded_sync: true,
 | 
				
			||||||
 | 
						};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						config.queue.max_mem_use = cmd.cache_config.queue() as usize * 1024 * 1024;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// initialize database.
 | 
				
			||||||
 | 
						let db = {
 | 
				
			||||||
 | 
							let db_config = DatabaseConfig {
 | 
				
			||||||
 | 
								memory_budget: Some(cmd.cache_config.blockchain() as usize * 1024 * 1024),
 | 
				
			||||||
 | 
								compaction: compaction,
 | 
				
			||||||
 | 
								wal: cmd.wal,
 | 
				
			||||||
 | 
								.. DatabaseConfig::with_columns(NUM_COLUMNS)
 | 
				
			||||||
 | 
							};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
							Arc::new(Database::open(
 | 
				
			||||||
 | 
								&db_config,
 | 
				
			||||||
 | 
								&db_dirs.client_path(algorithm).to_str().expect("DB path could not be converted to string.")
 | 
				
			||||||
 | 
							).map_err(|e| format!("Error opening database: {}", e))?)
 | 
				
			||||||
 | 
						};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						let service = light_client::Service::start(config, &spec, UnavailableDataFetcher, db, cache)
 | 
				
			||||||
 | 
							.map_err(|e| format!("Error starting light client: {}", e))?;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						let hs = service.client().read_hardcoded_sync()
 | 
				
			||||||
 | 
							.map_err(|e| format!("Error reading hardcoded sync: {}", e))?;
 | 
				
			||||||
 | 
						if let Some(hs) = hs {
 | 
				
			||||||
 | 
							Ok(::serde_json::to_string_pretty(&hs.to_json()).expect("generated JSON is always valid"))
 | 
				
			||||||
 | 
						} else {
 | 
				
			||||||
 | 
							Err("Error: cannot generate hardcoded sync because the database is empty.".into())
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
@ -106,6 +106,7 @@ mod cache;
 | 
				
			|||||||
mod cli;
 | 
					mod cli;
 | 
				
			||||||
mod configuration;
 | 
					mod configuration;
 | 
				
			||||||
mod dapps;
 | 
					mod dapps;
 | 
				
			||||||
 | 
					mod export_hardcoded_sync;
 | 
				
			||||||
mod ipfs;
 | 
					mod ipfs;
 | 
				
			||||||
mod deprecated;
 | 
					mod deprecated;
 | 
				
			||||||
mod helpers;
 | 
					mod helpers;
 | 
				
			||||||
@ -175,6 +176,7 @@ fn execute(command: Execute, can_restart: bool) -> Result<PostExecutionAction, S
 | 
				
			|||||||
		Cmd::SignerList { port, authfile } => rpc_cli::signer_list(port, authfile).map(|s| PostExecutionAction::Print(s)),
 | 
							Cmd::SignerList { port, authfile } => rpc_cli::signer_list(port, authfile).map(|s| PostExecutionAction::Print(s)),
 | 
				
			||||||
		Cmd::SignerReject { id, port, authfile } => rpc_cli::signer_reject(id, port, authfile).map(|s| PostExecutionAction::Print(s)),
 | 
							Cmd::SignerReject { id, port, authfile } => rpc_cli::signer_reject(id, port, authfile).map(|s| PostExecutionAction::Print(s)),
 | 
				
			||||||
		Cmd::Snapshot(snapshot_cmd) => snapshot::execute(snapshot_cmd).map(|s| PostExecutionAction::Print(s)),
 | 
							Cmd::Snapshot(snapshot_cmd) => snapshot::execute(snapshot_cmd).map(|s| PostExecutionAction::Print(s)),
 | 
				
			||||||
 | 
							Cmd::ExportHardcodedSync(export_hs_cmd) => export_hardcoded_sync::execute(export_hs_cmd).map(|s| PostExecutionAction::Print(s)),
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
				
			|||||||
@ -130,7 +130,8 @@ pub struct RunCmd {
 | 
				
			|||||||
	pub serve_light: bool,
 | 
						pub serve_light: bool,
 | 
				
			||||||
	pub light: bool,
 | 
						pub light: bool,
 | 
				
			||||||
	pub no_persistent_txqueue: bool,
 | 
						pub no_persistent_txqueue: bool,
 | 
				
			||||||
	pub whisper: ::whisper::Config
 | 
						pub whisper: ::whisper::Config,
 | 
				
			||||||
 | 
						pub no_hardcoded_sync: bool,
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
pub fn open_ui(ws_conf: &rpc::WsConfiguration, ui_conf: &rpc::UiConfiguration, logger_config: &LogConfig) -> Result<(), String> {
 | 
					pub fn open_ui(ws_conf: &rpc::WsConfiguration, ui_conf: &rpc::UiConfiguration, logger_config: &LogConfig) -> Result<(), String> {
 | 
				
			||||||
@ -226,6 +227,7 @@ fn execute_light_impl(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger
 | 
				
			|||||||
		chain_column: ::ethcore::db::COL_LIGHT_CHAIN,
 | 
							chain_column: ::ethcore::db::COL_LIGHT_CHAIN,
 | 
				
			||||||
		verify_full: true,
 | 
							verify_full: true,
 | 
				
			||||||
		check_seal: cmd.check_seal,
 | 
							check_seal: cmd.check_seal,
 | 
				
			||||||
 | 
							no_hardcoded_sync: cmd.no_hardcoded_sync,
 | 
				
			||||||
	};
 | 
						};
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	config.queue.max_mem_use = cmd.cache_config.queue() as usize * 1024 * 1024;
 | 
						config.queue.max_mem_use = cmd.cache_config.queue() as usize * 1024 * 1024;
 | 
				
			||||||
@ -567,6 +569,11 @@ pub fn execute_impl(cmd: RunCmd, can_restart: bool, logger: Arc<RotatingLogger>)
 | 
				
			|||||||
		}
 | 
							}
 | 
				
			||||||
	}
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
						// display warning if using --no-hardcoded-sync
 | 
				
			||||||
 | 
						if !cmd.no_hardcoded_sync {
 | 
				
			||||||
 | 
							warn!("The --no-hardcoded-sync flag has no effect if you don't use --light");
 | 
				
			||||||
 | 
						}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
	// create client config
 | 
						// create client config
 | 
				
			||||||
	let mut client_config = to_client_config(
 | 
						let mut client_config = to_client_config(
 | 
				
			||||||
		&cmd.cache_config,
 | 
							&cmd.cache_config,
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
		Reference in New Issue
	
	Block a user