parent
cf10450108
commit
568dc33a02
@ -18,9 +18,9 @@ linux-stable:
|
|||||||
stage: build
|
stage: build
|
||||||
image: parity/rust:gitlab-ci
|
image: parity/rust:gitlab-ci
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- rustup default stable
|
- rustup default stable
|
||||||
@ -36,9 +36,9 @@ linux-stable-debian:
|
|||||||
stage: build
|
stage: build
|
||||||
image: parity/rust-debian:gitlab-ci
|
image: parity/rust-debian:gitlab-ci
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- export LIBSSL="libssl1.1 (>=1.1.0)"
|
- export LIBSSL="libssl1.1 (>=1.1.0)"
|
||||||
@ -53,9 +53,9 @@ linux-centos:
|
|||||||
stage: build
|
stage: build
|
||||||
image: parity/rust-centos:gitlab-ci
|
image: parity/rust-centos:gitlab-ci
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab-build.sh x86_64-unknown-centos-gnu x86_64-unknown-linux-gnu x86_64 gcc g++
|
- scripts/gitlab-build.sh x86_64-unknown-centos-gnu x86_64-unknown-linux-gnu x86_64 gcc g++
|
||||||
@ -69,9 +69,9 @@ linux-i686:
|
|||||||
stage: build
|
stage: build
|
||||||
image: parity/rust-i686:gitlab-ci
|
image: parity/rust-i686:gitlab-ci
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab-build.sh i686-unknown-linux-gnu i686-unknown-linux-gnu i386 gcc g++
|
- scripts/gitlab-build.sh i686-unknown-linux-gnu i686-unknown-linux-gnu i386 gcc g++
|
||||||
@ -85,9 +85,9 @@ linux-armv7:
|
|||||||
stage: build
|
stage: build
|
||||||
image: parity/rust-armv7:gitlab-ci
|
image: parity/rust-armv7:gitlab-ci
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab-build.sh armv7-unknown-linux-gnueabihf armv7-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++
|
- scripts/gitlab-build.sh armv7-unknown-linux-gnueabihf armv7-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++
|
||||||
@ -101,9 +101,9 @@ linux-arm:
|
|||||||
stage: build
|
stage: build
|
||||||
image: parity/rust-arm:gitlab-ci
|
image: parity/rust-arm:gitlab-ci
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab-build.sh arm-unknown-linux-gnueabihf arm-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++
|
- scripts/gitlab-build.sh arm-unknown-linux-gnueabihf arm-unknown-linux-gnueabihf armhf arm-linux-gnueabihf-gcc arm-linux-gnueabihf-g++
|
||||||
@ -117,9 +117,9 @@ linux-aarch64:
|
|||||||
stage: build
|
stage: build
|
||||||
image: parity/rust-arm64:gitlab-ci
|
image: parity/rust-arm64:gitlab-ci
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab-build.sh aarch64-unknown-linux-gnu aarch64-unknown-linux-gnu arm64 aarch64-linux-gnu-gcc aarch64-linux-gnu-g++
|
- scripts/gitlab-build.sh aarch64-unknown-linux-gnu aarch64-unknown-linux-gnu arm64 aarch64-linux-gnu-gcc aarch64-linux-gnu-g++
|
||||||
@ -149,9 +149,9 @@ linux-snap:
|
|||||||
darwin:
|
darwin:
|
||||||
stage: build
|
stage: build
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- scripts/gitlab-build.sh x86_64-apple-darwin x86_64-apple-darwin macos gcc g++
|
- scripts/gitlab-build.sh x86_64-apple-darwin x86_64-apple-darwin macos gcc g++
|
||||||
@ -167,9 +167,9 @@ windows:
|
|||||||
untracked: true
|
untracked: true
|
||||||
stage: build
|
stage: build
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
- tags
|
- tags
|
||||||
|
- stable
|
||||||
- triggers
|
- triggers
|
||||||
script:
|
script:
|
||||||
- sh scripts/gitlab-build.sh x86_64-pc-windows-msvc x86_64-pc-windows-msvc installer "" "" ""
|
- sh scripts/gitlab-build.sh x86_64-pc-windows-msvc x86_64-pc-windows-msvc installer "" "" ""
|
||||||
@ -182,8 +182,6 @@ windows:
|
|||||||
docker-build:
|
docker-build:
|
||||||
stage: build
|
stage: build
|
||||||
only:
|
only:
|
||||||
- stable
|
|
||||||
- beta
|
|
||||||
- tags
|
- tags
|
||||||
- triggers
|
- triggers
|
||||||
before_script:
|
before_script:
|
||||||
@ -246,8 +244,8 @@ js-release:
|
|||||||
stage: js-build
|
stage: js-build
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
- stable
|
|
||||||
- beta
|
- beta
|
||||||
|
- stable
|
||||||
- tags
|
- tags
|
||||||
- triggers
|
- triggers
|
||||||
image: parity/rust:gitlab-ci
|
image: parity/rust:gitlab-ci
|
||||||
|
5
Cargo.lock
generated
5
Cargo.lock
generated
@ -2671,7 +2671,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "rocksdb"
|
name = "rocksdb"
|
||||||
version = "0.4.5"
|
version = "0.4.5"
|
||||||
source = "git+https://github.com/paritytech/rust-rocksdb#ecf06adf3148ab10f6f7686b724498382ff4f36e"
|
source = "git+https://github.com/paritytech/rust-rocksdb#7adec2311d31387a832b0ef051472cdef906b480"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -2681,11 +2681,10 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "rocksdb-sys"
|
name = "rocksdb-sys"
|
||||||
version = "0.3.0"
|
version = "0.3.0"
|
||||||
source = "git+https://github.com/paritytech/rust-rocksdb#ecf06adf3148ab10f6f7686b724498382ff4f36e"
|
source = "git+https://github.com/paritytech/rust-rocksdb#7adec2311d31387a832b0ef051472cdef906b480"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"cc 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
"libc 0.2.31 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"local-encoding 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
|
||||||
"snappy-sys 0.1.0 (git+https://github.com/paritytech/rust-snappy)",
|
"snappy-sys 0.1.0 (git+https://github.com/paritytech/rust-snappy)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -47,8 +47,6 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
|||||||
|
|
||||||
// Content Security Policy headers
|
// Content Security Policy headers
|
||||||
headers.set_raw("Content-Security-Policy", String::new()
|
headers.set_raw("Content-Security-Policy", String::new()
|
||||||
// Restrict everything to the same origin by default.
|
|
||||||
+ "default-src 'self';"
|
|
||||||
// Allow connecting to WS servers and HTTP(S) servers.
|
// Allow connecting to WS servers and HTTP(S) servers.
|
||||||
// We could be more restrictive and allow only RPC server URL.
|
// We could be more restrictive and allow only RPC server URL.
|
||||||
+ "connect-src http: https: ws: wss:;"
|
+ "connect-src http: https: ws: wss:;"
|
||||||
@ -66,9 +64,7 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
|||||||
+ "style-src 'self' 'unsafe-inline' data: blob: https:;"
|
+ "style-src 'self' 'unsafe-inline' data: blob: https:;"
|
||||||
// Allow fonts from data: and HTTPS.
|
// Allow fonts from data: and HTTPS.
|
||||||
+ "font-src 'self' data: https:;"
|
+ "font-src 'self' data: https:;"
|
||||||
// Disallow objects
|
// Allow inline scripts and scripts eval (webpack/jsconsole)
|
||||||
+ "object-src 'none';"
|
|
||||||
// Allow scripts
|
|
||||||
+ {
|
+ {
|
||||||
let script_src = embeddable_on.as_ref()
|
let script_src = embeddable_on.as_ref()
|
||||||
.map(|e| e.extra_script_src.iter()
|
.map(|e| e.extra_script_src.iter()
|
||||||
@ -76,16 +72,18 @@ pub fn add_security_headers(headers: &mut header::Headers, embeddable_on: Embedd
|
|||||||
.join(" ")
|
.join(" ")
|
||||||
).unwrap_or_default();
|
).unwrap_or_default();
|
||||||
&format!(
|
&format!(
|
||||||
"script-src 'self' {};",
|
"script-src 'self' 'unsafe-inline' 'unsafe-eval' {};",
|
||||||
script_src
|
script_src
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
// Same restrictions as script-src with additional
|
// Same restrictions as script-src with additional
|
||||||
// blob: that is required for camera access (worker)
|
// blob: that is required for camera access (worker)
|
||||||
+ "worker-src 'self' https: blob:;"
|
+ "worker-src 'self' 'unsafe-inline' 'unsafe-eval' https: blob:;"
|
||||||
|
// Restrict everything else to the same origin.
|
||||||
|
+ "default-src 'self';"
|
||||||
// Run in sandbox mode (although it's not fully safe since we allow same-origin and script)
|
// Run in sandbox mode (although it's not fully safe since we allow same-origin and script)
|
||||||
+ "sandbox allow-same-origin allow-forms allow-modals allow-popups allow-presentation allow-scripts;"
|
+ "sandbox allow-same-origin allow-forms allow-modals allow-popups allow-presentation allow-scripts;"
|
||||||
// Disallow submitting forms from any dapps
|
// Disallow subitting forms from any dapps
|
||||||
+ "form-action 'none';"
|
+ "form-action 'none';"
|
||||||
// Never allow mixed content
|
// Never allow mixed content
|
||||||
+ "block-all-mixed-content;"
|
+ "block-all-mixed-content;"
|
||||||
|
@ -44,7 +44,8 @@ use bigint::hash::{H256, H520};
|
|||||||
use semantic_version::SemanticVersion;
|
use semantic_version::SemanticVersion;
|
||||||
use parking_lot::{Mutex, RwLock};
|
use parking_lot::{Mutex, RwLock};
|
||||||
use unexpected::{Mismatch, OutOfBounds};
|
use unexpected::{Mismatch, OutOfBounds};
|
||||||
use util::Address;
|
use util::*;
|
||||||
|
use bytes::Bytes;
|
||||||
|
|
||||||
mod finality;
|
mod finality;
|
||||||
|
|
||||||
@ -290,11 +291,9 @@ struct EpochVerifier {
|
|||||||
|
|
||||||
impl super::EpochVerifier<EthereumMachine> for EpochVerifier {
|
impl super::EpochVerifier<EthereumMachine> for EpochVerifier {
|
||||||
fn verify_light(&self, header: &Header) -> Result<(), Error> {
|
fn verify_light(&self, header: &Header) -> Result<(), Error> {
|
||||||
// Validate the timestamp
|
|
||||||
verify_timestamp(&*self.step, header_step(header)?)?;
|
|
||||||
// always check the seal since it's fast.
|
// always check the seal since it's fast.
|
||||||
// nothing heavier to do.
|
// nothing heavier to do.
|
||||||
verify_external(header, &self.subchain_validators)
|
verify_external(header, &self.subchain_validators, &*self.step, |_| {})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_finality_proof(&self, proof: &[u8]) -> Option<Vec<H256>> {
|
fn check_finality_proof(&self, proof: &[u8]) -> Option<Vec<H256>> {
|
||||||
@ -318,7 +317,7 @@ impl super::EpochVerifier<EthereumMachine> for EpochVerifier {
|
|||||||
//
|
//
|
||||||
// `verify_external` checks that signature is correct and author == signer.
|
// `verify_external` checks that signature is correct and author == signer.
|
||||||
if header.seal().len() != 2 { return None }
|
if header.seal().len() != 2 { return None }
|
||||||
otry!(verify_external(header, &self.subchain_validators).ok());
|
otry!(verify_external(header, &self.subchain_validators, &*self.step, |_| {}).ok());
|
||||||
|
|
||||||
let newly_finalized = otry!(finality_checker.push_hash(header.hash(), header.author().clone()).ok());
|
let newly_finalized = otry!(finality_checker.push_hash(header.hash(), header.author().clone()).ok());
|
||||||
finalized.extend(newly_finalized);
|
finalized.extend(newly_finalized);
|
||||||
@ -328,6 +327,16 @@ impl super::EpochVerifier<EthereumMachine> for EpochVerifier {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Report misbehavior
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(dead_code)]
|
||||||
|
enum Report {
|
||||||
|
// Malicious behavior
|
||||||
|
Malicious(Address, BlockNumber, Bytes),
|
||||||
|
// benign misbehavior
|
||||||
|
Benign(Address, BlockNumber),
|
||||||
|
}
|
||||||
|
|
||||||
fn header_step(header: &Header) -> Result<usize, ::rlp::DecoderError> {
|
fn header_step(header: &Header) -> Result<usize, ::rlp::DecoderError> {
|
||||||
UntrustedRlp::new(&header.seal().get(0).expect("was either checked with verify_block_basic or is genesis; has 2 fields; qed (Make sure the spec file has a correct genesis seal)")).as_val()
|
UntrustedRlp::new(&header.seal().get(0).expect("was either checked with verify_block_basic or is genesis; has 2 fields; qed (Make sure the spec file has a correct genesis seal)")).as_val()
|
||||||
}
|
}
|
||||||
@ -346,35 +355,34 @@ fn is_step_proposer(validators: &ValidatorSet, bh: &H256, step: usize, address:
|
|||||||
step_proposer(validators, bh, step) == *address
|
step_proposer(validators, bh, step) == *address
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify_timestamp(step: &Step, header_step: usize) -> Result<(), BlockError> {
|
fn verify_external<F: Fn(Report)>(header: &Header, validators: &ValidatorSet, step: &Step, report: F)
|
||||||
match step.check_future(header_step) {
|
-> Result<(), Error>
|
||||||
Err(None) => {
|
{
|
||||||
trace!(target: "engine", "verify_timestamp: block from the future");
|
|
||||||
Err(BlockError::InvalidSeal.into())
|
|
||||||
},
|
|
||||||
Err(Some(oob)) => {
|
|
||||||
// NOTE This error might be returned only in early stage of verification (Stage 1).
|
|
||||||
// Returning it further won't recover the sync process.
|
|
||||||
trace!(target: "engine", "verify_timestamp: block too early");
|
|
||||||
Err(BlockError::TemporarilyInvalid(oob).into())
|
|
||||||
},
|
|
||||||
Ok(_) => Ok(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn verify_external(header: &Header, validators: &ValidatorSet) -> Result<(), Error> {
|
|
||||||
let header_step = header_step(header)?;
|
let header_step = header_step(header)?;
|
||||||
|
|
||||||
let proposer_signature = header_signature(header)?;
|
match step.check_future(header_step) {
|
||||||
let correct_proposer = validators.get(header.parent_hash(), header_step);
|
Err(None) => {
|
||||||
let is_invalid_proposer = *header.author() != correct_proposer ||
|
trace!(target: "engine", "verify_block_external: block from the future");
|
||||||
!verify_address(&correct_proposer, &proposer_signature, &header.bare_hash())?;
|
report(Report::Benign(*header.author(), header.number()));
|
||||||
|
return Err(BlockError::InvalidSeal.into())
|
||||||
|
},
|
||||||
|
Err(Some(oob)) => {
|
||||||
|
trace!(target: "engine", "verify_block_external: block too early");
|
||||||
|
return Err(BlockError::TemporarilyInvalid(oob).into())
|
||||||
|
},
|
||||||
|
Ok(_) => {
|
||||||
|
let proposer_signature = header_signature(header)?;
|
||||||
|
let correct_proposer = validators.get(header.parent_hash(), header_step);
|
||||||
|
let is_invalid_proposer = *header.author() != correct_proposer ||
|
||||||
|
!verify_address(&correct_proposer, &proposer_signature, &header.bare_hash())?;
|
||||||
|
|
||||||
if is_invalid_proposer {
|
if is_invalid_proposer {
|
||||||
trace!(target: "engine", "verify_block_external: bad proposer for step: {}", header_step);
|
trace!(target: "engine", "verify_block_external: bad proposer for step: {}", header_step);
|
||||||
Err(EngineError::NotProposer(Mismatch { expected: correct_proposer, found: header.author().clone() }))?
|
Err(EngineError::NotProposer(Mismatch { expected: correct_proposer, found: header.author().clone() }))?
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -647,38 +655,26 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
|||||||
/// Check the number of seal fields.
|
/// Check the number of seal fields.
|
||||||
fn verify_block_basic(&self, header: &Header) -> Result<(), Error> {
|
fn verify_block_basic(&self, header: &Header) -> Result<(), Error> {
|
||||||
if header.number() >= self.validate_score_transition && *header.difficulty() >= U256::from(U128::max_value()) {
|
if header.number() >= self.validate_score_transition && *header.difficulty() >= U256::from(U128::max_value()) {
|
||||||
return Err(From::from(BlockError::DifficultyOutOfBounds(
|
Err(From::from(BlockError::DifficultyOutOfBounds(
|
||||||
OutOfBounds { min: None, max: Some(U256::from(U128::max_value())), found: *header.difficulty() }
|
OutOfBounds { min: None, max: Some(U256::from(U128::max_value())), found: *header.difficulty() }
|
||||||
)));
|
)))
|
||||||
}
|
} else {
|
||||||
|
Ok(())
|
||||||
// TODO [ToDr] Should this go from epoch manager?
|
|
||||||
// If yes then probably benign reporting needs to be moved further in the verification.
|
|
||||||
let set_number = header.number();
|
|
||||||
|
|
||||||
match verify_timestamp(&*self.step, header_step(header)?) {
|
|
||||||
Err(BlockError::InvalidSeal) => {
|
|
||||||
self.validators.report_benign(header.author(), set_number, header.number());
|
|
||||||
Err(BlockError::InvalidSeal.into())
|
|
||||||
}
|
|
||||||
Err(e) => Err(e.into()),
|
|
||||||
Ok(()) => Ok(()),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Do the step and gas limit validation.
|
/// Do the step and gas limit validation.
|
||||||
fn verify_block_family(&self, header: &Header, parent: &Header) -> Result<(), Error> {
|
fn verify_block_family(&self, header: &Header, parent: &Header) -> Result<(), Error> {
|
||||||
let step = header_step(header)?;
|
let step = header_step(header)?;
|
||||||
|
|
||||||
let parent_step = header_step(parent)?;
|
let parent_step = header_step(parent)?;
|
||||||
// TODO [ToDr] Should this go from epoch manager?
|
|
||||||
let set_number = header.number();
|
|
||||||
|
|
||||||
// Ensure header is from the step after parent.
|
// Ensure header is from the step after parent.
|
||||||
if step == parent_step
|
if step == parent_step
|
||||||
|| (header.number() >= self.validate_step_transition && step <= parent_step) {
|
|| (header.number() >= self.validate_step_transition && step <= parent_step) {
|
||||||
trace!(target: "engine", "Multiple blocks proposed for step {}.", parent_step);
|
trace!(target: "engine", "Multiple blocks proposed for step {}.", parent_step);
|
||||||
|
|
||||||
self.validators.report_malicious(header.author(), set_number, header.number(), Default::default());
|
self.validators.report_malicious(header.author(), header.number(), header.number(), Default::default());
|
||||||
Err(EngineError::DoubleVote(header.author().clone()))?;
|
Err(EngineError::DoubleVote(header.author().clone()))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -691,7 +687,7 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
|||||||
let skipped_primary = step_proposer(&*self.validators, &parent.hash(), s);
|
let skipped_primary = step_proposer(&*self.validators, &parent.hash(), s);
|
||||||
// Do not report this signer.
|
// Do not report this signer.
|
||||||
if skipped_primary != me {
|
if skipped_primary != me {
|
||||||
self.validators.report_benign(&skipped_primary, set_number, header.number());
|
self.validators.report_benign(&skipped_primary, header.number(), header.number());
|
||||||
}
|
}
|
||||||
// Stop reporting once validators start repeating.
|
// Stop reporting once validators start repeating.
|
||||||
if !reported.insert(skipped_primary) { break; }
|
if !reported.insert(skipped_primary) { break; }
|
||||||
@ -706,8 +702,9 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
|||||||
// fetch correct validator set for current epoch, taking into account
|
// fetch correct validator set for current epoch, taking into account
|
||||||
// finality of previous transitions.
|
// finality of previous transitions.
|
||||||
let active_set;
|
let active_set;
|
||||||
let validators = if self.immediate_transitions {
|
|
||||||
&*self.validators
|
let (validators, set_number) = if self.immediate_transitions {
|
||||||
|
(&*self.validators, header.number())
|
||||||
} else {
|
} else {
|
||||||
// get correct validator set for epoch.
|
// get correct validator set for epoch.
|
||||||
let client = match self.client.read().as_ref().and_then(|weak| weak.upgrade()) {
|
let client = match self.client.read().as_ref().and_then(|weak| weak.upgrade()) {
|
||||||
@ -725,12 +722,21 @@ impl Engine<EthereumMachine> for AuthorityRound {
|
|||||||
}
|
}
|
||||||
|
|
||||||
active_set = epoch_manager.validators().clone();
|
active_set = epoch_manager.validators().clone();
|
||||||
&active_set as &_
|
(&active_set as &_, epoch_manager.epoch_transition_number)
|
||||||
|
};
|
||||||
|
|
||||||
|
// always report with "self.validators" so that the report actually gets
|
||||||
|
// to the contract.
|
||||||
|
let report = |report| match report {
|
||||||
|
Report::Benign(address, block_number) =>
|
||||||
|
self.validators.report_benign(&address, set_number, block_number),
|
||||||
|
Report::Malicious(address, block_number, proof) =>
|
||||||
|
self.validators.report_malicious(&address, set_number, block_number, proof),
|
||||||
};
|
};
|
||||||
|
|
||||||
// verify signature against fixed list, but reports should go to the
|
// verify signature against fixed list, but reports should go to the
|
||||||
// contract itself.
|
// contract itself.
|
||||||
verify_external(header, validators)
|
verify_external(header, validators, &*self.step, report)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn genesis_epoch_data(&self, header: &Header, call: &Call) -> Result<Vec<u8>, String> {
|
fn genesis_epoch_data(&self, header: &Header, call: &Call) -> Result<Vec<u8>, String> {
|
||||||
@ -1053,7 +1059,8 @@ mod tests {
|
|||||||
assert!(engine.verify_block_family(&header, &parent_header).is_ok());
|
assert!(engine.verify_block_family(&header, &parent_header).is_ok());
|
||||||
assert!(engine.verify_block_external(&header).is_ok());
|
assert!(engine.verify_block_external(&header).is_ok());
|
||||||
header.set_seal(vec![encode(&5usize).into_vec(), encode(&(&*signature as &[u8])).into_vec()]);
|
header.set_seal(vec![encode(&5usize).into_vec(), encode(&(&*signature as &[u8])).into_vec()]);
|
||||||
assert!(engine.verify_block_basic(&header).is_err());
|
assert!(engine.verify_block_family(&header, &parent_header).is_ok());
|
||||||
|
assert!(engine.verify_block_external(&header).is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1193,4 +1200,3 @@ mod tests {
|
|||||||
AuthorityRound::new(params, machine).unwrap();
|
AuthorityRound::new(params, machine).unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,7 +192,7 @@ mod tests {
|
|||||||
header.set_number(2);
|
header.set_number(2);
|
||||||
header.set_parent_hash(client.chain_info().best_block_hash);
|
header.set_parent_hash(client.chain_info().best_block_hash);
|
||||||
// `reportBenign` when the designated proposer releases block from the future (bad clock).
|
// `reportBenign` when the designated proposer releases block from the future (bad clock).
|
||||||
assert!(client.engine().verify_block_basic(&header).is_err());
|
assert!(client.engine().verify_block_external(&header).is_err());
|
||||||
// Seal a block.
|
// Seal a block.
|
||||||
client.engine().step();
|
client.engine().step();
|
||||||
assert_eq!(client.chain_info().best_block_number, 1);
|
assert_eq!(client.chain_info().best_block_number, 1);
|
||||||
|
@ -1406,7 +1406,7 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn should_trace_delegatecall_properly() {
|
fn should_not_trace_delegatecall() {
|
||||||
init_log();
|
init_log();
|
||||||
|
|
||||||
let mut state = get_temp_state();
|
let mut state = get_temp_state();
|
||||||
@ -1426,7 +1426,7 @@ mod tests {
|
|||||||
}.sign(&secret(), None);
|
}.sign(&secret(), None);
|
||||||
|
|
||||||
state.init_code(&0xa.into(), FromHex::from_hex("6000600060006000600b618000f4").unwrap()).unwrap();
|
state.init_code(&0xa.into(), FromHex::from_hex("6000600060006000600b618000f4").unwrap()).unwrap();
|
||||||
state.init_code(&0xb.into(), FromHex::from_hex("60056000526001601ff3").unwrap()).unwrap();
|
state.init_code(&0xb.into(), FromHex::from_hex("6000").unwrap()).unwrap();
|
||||||
let result = state.apply(&info, &machine, &t, true).unwrap();
|
let result = state.apply(&info, &machine, &t, true).unwrap();
|
||||||
|
|
||||||
let expected_trace = vec![FlatTrace {
|
let expected_trace = vec![FlatTrace {
|
||||||
@ -1441,23 +1441,23 @@ mod tests {
|
|||||||
call_type: CallType::Call,
|
call_type: CallType::Call,
|
||||||
}),
|
}),
|
||||||
result: trace::Res::Call(trace::CallResult {
|
result: trace::Res::Call(trace::CallResult {
|
||||||
gas_used: U256::from(736), // in post-eip150
|
gas_used: U256::from(721), // in post-eip150
|
||||||
output: vec![]
|
output: vec![]
|
||||||
}),
|
}),
|
||||||
}, FlatTrace {
|
}, FlatTrace {
|
||||||
trace_address: vec![0].into_iter().collect(),
|
trace_address: vec![0].into_iter().collect(),
|
||||||
subtraces: 0,
|
subtraces: 0,
|
||||||
action: trace::Action::Call(trace::Call {
|
action: trace::Action::Call(trace::Call {
|
||||||
from: 0xa.into(),
|
from: "9cce34f7ab185c7aba1b7c8140d620b4bda941d6".into(),
|
||||||
to: 0xb.into(),
|
to: 0xa.into(),
|
||||||
value: 0.into(),
|
value: 0.into(),
|
||||||
gas: 32768.into(),
|
gas: 32768.into(),
|
||||||
input: vec![],
|
input: vec![],
|
||||||
call_type: CallType::DelegateCall,
|
call_type: CallType::DelegateCall,
|
||||||
}),
|
}),
|
||||||
result: trace::Res::Call(trace::CallResult {
|
result: trace::Res::Call(trace::CallResult {
|
||||||
gas_used: 18.into(),
|
gas_used: 3.into(),
|
||||||
output: vec![5],
|
output: vec![],
|
||||||
}),
|
}),
|
||||||
}];
|
}];
|
||||||
|
|
||||||
|
@ -74,23 +74,13 @@ pub struct Call {
|
|||||||
|
|
||||||
impl From<ActionParams> for Call {
|
impl From<ActionParams> for Call {
|
||||||
fn from(p: ActionParams) -> Self {
|
fn from(p: ActionParams) -> Self {
|
||||||
match p.call_type {
|
Call {
|
||||||
CallType::DelegateCall => Call {
|
from: p.sender,
|
||||||
from: p.address,
|
to: p.address,
|
||||||
to: p.code_address,
|
value: p.value.value(),
|
||||||
value: p.value.value(),
|
gas: p.gas,
|
||||||
gas: p.gas,
|
input: p.data.unwrap_or_else(Vec::new),
|
||||||
input: p.data.unwrap_or_else(Vec::new),
|
call_type: p.call_type,
|
||||||
call_type: p.call_type,
|
|
||||||
},
|
|
||||||
_ => Call {
|
|
||||||
from: p.sender,
|
|
||||||
to: p.address,
|
|
||||||
value: p.value.value(),
|
|
||||||
gas: p.gas,
|
|
||||||
input: p.data.unwrap_or_else(Vec::new),
|
|
||||||
call_type: p.call_type,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -65,15 +65,15 @@ export default function (api, browserHistory, forEmbed = false) {
|
|||||||
.then(() => console.log('v1: started Status Provider'))
|
.then(() => console.log('v1: started Status Provider'))
|
||||||
|
|
||||||
.then(() => console.log('v1: starting Personal Provider...'))
|
.then(() => console.log('v1: starting Personal Provider...'))
|
||||||
.then(() => withTimeoutForLight('personal', PersonalProvider.start(), store))
|
.then(() => PersonalProvider.start())
|
||||||
.then(() => console.log('v1: started Personal Provider'))
|
.then(() => console.log('v1: started Personal Provider'))
|
||||||
|
|
||||||
.then(() => console.log('v1: starting Balances Provider...'))
|
.then(() => console.log('v1: starting Balances Provider...'))
|
||||||
.then(() => withTimeoutForLight('balances', BalancesProvider.start(), store))
|
.then(() => BalancesProvider.start())
|
||||||
.then(() => console.log('v1: started Balances Provider'))
|
.then(() => console.log('v1: started Balances Provider'))
|
||||||
|
|
||||||
.then(() => console.log('v1: starting Tokens Provider...'))
|
.then(() => console.log('v1: starting Tokens Provider...'))
|
||||||
.then(() => withTimeoutForLight('tokens', TokensProvider.start(), store))
|
.then(() => TokensProvider.start())
|
||||||
.then(() => console.log('v1: started Tokens Provider'));
|
.then(() => console.log('v1: started Tokens Provider'));
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -97,39 +97,3 @@ export default function (api, browserHistory, forEmbed = false) {
|
|||||||
|
|
||||||
return store;
|
return store;
|
||||||
}
|
}
|
||||||
|
|
||||||
function withTimeoutForLight (id, promise, store) {
|
|
||||||
const { nodeKind } = store.getState().nodeStatus;
|
|
||||||
const isLightNode = nodeKind.capability !== 'full';
|
|
||||||
|
|
||||||
if (!isLightNode) {
|
|
||||||
// make sure that no values are passed
|
|
||||||
return promise.then(() => {});
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
let isResolved = false;
|
|
||||||
const doResolve = () => {
|
|
||||||
if (!isResolved) {
|
|
||||||
isResolved = true;
|
|
||||||
resolve();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
const timeout = setTimeout(() => {
|
|
||||||
console.warn(`Resolving ${id} by timeout.`);
|
|
||||||
doResolve();
|
|
||||||
}, 1000);
|
|
||||||
|
|
||||||
promise
|
|
||||||
.then(() => {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
doResolve();
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
clearTimeout(timeout);
|
|
||||||
if (!isResolved) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
1504
js/package-lock.json
generated
1504
js/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -170,6 +170,7 @@
|
|||||||
"redux": "3.7.2",
|
"redux": "3.7.2",
|
||||||
"semantic-ui-react": "0.77.0",
|
"semantic-ui-react": "0.77.0",
|
||||||
"solc": "ngotchac/solc-js",
|
"solc": "ngotchac/solc-js",
|
||||||
"store": "1.3.20"
|
"store": "1.3.20",
|
||||||
|
"web3": "1.0.0-beta.26"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,14 +26,11 @@
|
|||||||
.list {
|
.list {
|
||||||
margin: 0 !important;
|
margin: 0 !important;
|
||||||
padding: 1em 1em !important;
|
padding: 1em 1em !important;
|
||||||
background-color: white;
|
background-color: #f5f5f5;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.accountsList {
|
.isDefault {
|
||||||
background-color: #f5f5f5;
|
background-color: white;
|
||||||
height: 300px;
|
|
||||||
overflow-y: auto;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.hasOtherAccounts {
|
.hasOtherAccounts {
|
||||||
|
@ -68,14 +68,14 @@ class DefaultAccount extends Component {
|
|||||||
}
|
}
|
||||||
content={
|
content={
|
||||||
<div>
|
<div>
|
||||||
<List relaxed='very' selection className={ [styles.list, allAccounts.length > 1 && styles.hasOtherAccounts].join(' ') }>
|
<List relaxed='very' selection className={ [styles.list, styles.isDefault, allAccounts.length > 1 && styles.hasOtherAccounts].join(' ') }>
|
||||||
<AccountItem
|
<AccountItem
|
||||||
isDefault
|
isDefault
|
||||||
account={ defaultAccount }
|
account={ defaultAccount }
|
||||||
/>
|
/>
|
||||||
</List>
|
</List>
|
||||||
{allAccounts.length > 1 &&
|
{allAccounts.length > 1 &&
|
||||||
<List relaxed='very' selection className={ [styles.list, styles.accountsList].join(' ') } divided>
|
<List relaxed='very' selection className={ styles.list } divided>
|
||||||
{allAccounts
|
{allAccounts
|
||||||
.filter(({ address }) => address !== defaultAddress)
|
.filter(({ address }) => address !== defaultAddress)
|
||||||
.map(account => (
|
.map(account => (
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
|
|
||||||
import Api from '@parity/api';
|
import Api from '@parity/api';
|
||||||
import qs from 'query-string';
|
import qs from 'query-string';
|
||||||
|
import Web3 from 'web3';
|
||||||
|
|
||||||
function initProvider () {
|
function initProvider () {
|
||||||
const path = window.location.pathname.split('/');
|
const path = window.location.pathname.split('/');
|
||||||
@ -47,9 +48,24 @@ function initProvider () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function initWeb3 (ethereum) {
|
function initWeb3 (ethereum) {
|
||||||
const currentProvider = new Api.Provider.SendAsync(ethereum);
|
// FIXME: Use standard provider for web3
|
||||||
|
const provider = new Api.Provider.SendAsync(ethereum);
|
||||||
|
const web3 = new Web3(provider);
|
||||||
|
|
||||||
window.web3 = { currentProvider };
|
if (!web3.currentProvider) {
|
||||||
|
web3.currentProvider = provider;
|
||||||
|
}
|
||||||
|
|
||||||
|
// set default account
|
||||||
|
web3.eth.getAccounts((error, accounts) => {
|
||||||
|
if (error || !accounts || !accounts[0]) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
web3.eth.defaultAccount = accounts[0];
|
||||||
|
});
|
||||||
|
|
||||||
|
window.web3 = web3;
|
||||||
}
|
}
|
||||||
|
|
||||||
function initParity (ethereum) {
|
function initParity (ethereum) {
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
!define COMPANYNAME "Parity Technologies"
|
!define COMPANYNAME "Parity Technologies"
|
||||||
!define DESCRIPTION "Fast, light, robust Ethereum implementation"
|
!define DESCRIPTION "Fast, light, robust Ethereum implementation"
|
||||||
!define VERSIONMAJOR 1
|
!define VERSIONMAJOR 1
|
||||||
!define VERSIONMINOR 9
|
!define VERSIONMINOR 10
|
||||||
!define VERSIONBUILD 0
|
!define VERSIONBUILD 0
|
||||||
!define ARGS ""
|
!define ARGS ""
|
||||||
!define FIRST_START_ARGS "--mode=passive ui"
|
!define FIRST_START_ARGS "--mode=passive ui"
|
||||||
|
@ -586,12 +586,7 @@ impl Configuration {
|
|||||||
let mut extra_embed = dev_ui.clone();
|
let mut extra_embed = dev_ui.clone();
|
||||||
match self.ui_hosts() {
|
match self.ui_hosts() {
|
||||||
// In case host validation is disabled allow all frame ancestors
|
// In case host validation is disabled allow all frame ancestors
|
||||||
None => {
|
None => extra_embed.push(("*".to_owned(), ui_port)),
|
||||||
// NOTE Chrome does not seem to support "*:<port>"
|
|
||||||
// we use `http(s)://*:<port>` instead.
|
|
||||||
extra_embed.push(("http://*".to_owned(), ui_port));
|
|
||||||
extra_embed.push(("https://*".to_owned(), ui_port));
|
|
||||||
},
|
|
||||||
Some(hosts) => extra_embed.extend(hosts.into_iter().filter_map(|host| {
|
Some(hosts) => extra_embed.extend(hosts.into_iter().filter_map(|host| {
|
||||||
let mut it = host.split(":");
|
let mut it = host.split(":");
|
||||||
let host = it.next();
|
let host = it.next();
|
||||||
|
@ -147,30 +147,19 @@ impl LightFetch {
|
|||||||
Err(e) => return Box::new(future::err(e)),
|
Err(e) => return Box::new(future::err(e)),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let maybe_future = self.sync.with_context(move |ctx| {
|
||||||
|
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||||
|
.expect("all back-references known to be valid; qed")
|
||||||
|
.map(|res| extract_header(&res, header_ref)
|
||||||
|
.expect("these responses correspond to requests that header_ref belongs to. \
|
||||||
|
therefore it will not fail; qed"))
|
||||||
|
.map_err(errors::on_demand_cancel))
|
||||||
|
});
|
||||||
|
|
||||||
self.send_requests(reqs, |res|
|
match maybe_future {
|
||||||
extract_header(&res, header_ref)
|
Some(recv) => recv,
|
||||||
.expect("these responses correspond to requests that header_ref belongs to \
|
None => Box::new(future::err(errors::network_disabled()))
|
||||||
therefore it will not fail; qed")
|
}
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Helper for getting contract code at a given block.
|
|
||||||
pub fn code(&self, address: Address, id: BlockId) -> BoxFuture<Vec<u8>> {
|
|
||||||
let mut reqs = Vec::new();
|
|
||||||
let header_ref = match self.make_header_requests(id, &mut reqs) {
|
|
||||||
Ok(r) => r,
|
|
||||||
Err(e) => return Box::new(future::err(e)),
|
|
||||||
};
|
|
||||||
|
|
||||||
reqs.push(request::Account { header: header_ref.clone(), address: address }.into());
|
|
||||||
let account_idx = reqs.len() - 1;
|
|
||||||
reqs.push(request::Code { header: header_ref, code_hash: Field::back_ref(account_idx, 0) }.into());
|
|
||||||
|
|
||||||
self.send_requests(reqs, |mut res| match res.pop() {
|
|
||||||
Some(OnDemandResponse::Code(code)) => code,
|
|
||||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper for getting account info at a given block.
|
/// Helper for getting account info at a given block.
|
||||||
@ -184,10 +173,20 @@ impl LightFetch {
|
|||||||
|
|
||||||
reqs.push(request::Account { header: header_ref, address: address }.into());
|
reqs.push(request::Account { header: header_ref, address: address }.into());
|
||||||
|
|
||||||
self.send_requests(reqs, |mut res|match res.pop() {
|
let maybe_future = self.sync.with_context(move |ctx| {
|
||||||
Some(OnDemandResponse::Account(acc)) => acc,
|
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
.expect("all back-references known to be valid; qed")
|
||||||
})
|
.map(|mut res| match res.pop() {
|
||||||
|
Some(OnDemandResponse::Account(acc)) => acc,
|
||||||
|
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||||
|
})
|
||||||
|
.map_err(errors::on_demand_cancel))
|
||||||
|
});
|
||||||
|
|
||||||
|
match maybe_future {
|
||||||
|
Some(recv) => recv,
|
||||||
|
None => Box::new(future::err(errors::network_disabled()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Helper for getting proved execution.
|
/// Helper for getting proved execution.
|
||||||
@ -278,10 +277,20 @@ impl LightFetch {
|
|||||||
|
|
||||||
reqs.push(request::Body(header_ref).into());
|
reqs.push(request::Body(header_ref).into());
|
||||||
|
|
||||||
self.send_requests(reqs, |mut res| match res.pop() {
|
let maybe_future = self.sync.with_context(move |ctx| {
|
||||||
Some(OnDemandResponse::Body(b)) => b,
|
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
.expect(NO_INVALID_BACK_REFS)
|
||||||
})
|
.map(|mut res| match res.pop() {
|
||||||
|
Some(OnDemandResponse::Body(b)) => b,
|
||||||
|
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||||
|
})
|
||||||
|
.map_err(errors::on_demand_cancel))
|
||||||
|
});
|
||||||
|
|
||||||
|
match maybe_future {
|
||||||
|
Some(recv) => recv,
|
||||||
|
None => Box::new(future::err(errors::network_disabled()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the block receipts. Fails on unknown block ID.
|
/// Get the block receipts. Fails on unknown block ID.
|
||||||
@ -294,10 +303,20 @@ impl LightFetch {
|
|||||||
|
|
||||||
reqs.push(request::BlockReceipts(header_ref).into());
|
reqs.push(request::BlockReceipts(header_ref).into());
|
||||||
|
|
||||||
self.send_requests(reqs, |mut res| match res.pop() {
|
let maybe_future = self.sync.with_context(move |ctx| {
|
||||||
Some(OnDemandResponse::Receipts(b)) => b,
|
Box::new(self.on_demand.request_raw(ctx, reqs)
|
||||||
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
.expect(NO_INVALID_BACK_REFS)
|
||||||
})
|
.map(|mut res| match res.pop() {
|
||||||
|
Some(OnDemandResponse::Receipts(b)) => b,
|
||||||
|
_ => panic!("responses correspond directly with requests in amount and type; qed"),
|
||||||
|
})
|
||||||
|
.map_err(errors::on_demand_cancel))
|
||||||
|
});
|
||||||
|
|
||||||
|
match maybe_future {
|
||||||
|
Some(recv) => recv,
|
||||||
|
None => Box::new(future::err(errors::network_disabled()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get transaction logs
|
/// Get transaction logs
|
||||||
@ -414,23 +433,6 @@ impl LightFetch {
|
|||||||
Either::B(extract_transaction)
|
Either::B(extract_transaction)
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn send_requests<T, F>(&self, reqs: Vec<OnDemandRequest>, parse_response: F) -> BoxFuture<T> where
|
|
||||||
F: FnOnce(Vec<OnDemandResponse>) -> T + Send + 'static,
|
|
||||||
T: Send + 'static,
|
|
||||||
{
|
|
||||||
let maybe_future = self.sync.with_context(move |ctx| {
|
|
||||||
Box::new(self.on_demand.request_raw(ctx, reqs)
|
|
||||||
.expect(NO_INVALID_BACK_REFS)
|
|
||||||
.map(parse_response)
|
|
||||||
.map_err(errors::on_demand_cancel))
|
|
||||||
});
|
|
||||||
|
|
||||||
match maybe_future {
|
|
||||||
Some(recv) => recv,
|
|
||||||
None => Box::new(future::err(errors::network_disabled()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
|
@ -349,8 +349,8 @@ impl<T: LightChainClient + 'static> Eth for EthClient<T> {
|
|||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn code_at(&self, address: RpcH160, num: Trailing<BlockNumber>) -> BoxFuture<Bytes> {
|
fn code_at(&self, _address: RpcH160, _num: Trailing<BlockNumber>) -> BoxFuture<Bytes> {
|
||||||
Box::new(self.fetcher().code(address.into(), num.unwrap_or_default().into()).map(Into::into))
|
Box::new(future::err(errors::unimplemented(None)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn send_raw_transaction(&self, raw: Bytes) -> Result<RpcH256> {
|
fn send_raw_transaction(&self, raw: Bytes) -> Result<RpcH256> {
|
||||||
|
@ -208,12 +208,7 @@ impl Parity for ParityClient {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn registry_address(&self) -> Result<Option<H160>> {
|
fn registry_address(&self) -> Result<Option<H160>> {
|
||||||
let reg = self.light_dispatch.client.engine().params().registrar;
|
Err(errors::light_unimplemented(None))
|
||||||
if reg == Default::default() {
|
|
||||||
Ok(None)
|
|
||||||
} else {
|
|
||||||
Ok(Some(reg.into()))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rpc_settings(&self) -> Result<RpcSettings> {
|
fn rpc_settings(&self) -> Result<RpcSettings> {
|
||||||
|
@ -1,100 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#ARGUMENT test for RUST, JS, COVERAGE or JS_RELEASE
|
|
||||||
set -e # fail on any error
|
|
||||||
set -u # treat unset variables as error
|
|
||||||
|
|
||||||
if [[ "$CI_COMMIT_REF_NAME" = "beta" || "$CI_COMMIT_REF_NAME" = "stable" ]]; then
|
|
||||||
export GIT_COMPARE=$CI_COMMIT_REF_NAME;
|
|
||||||
else
|
|
||||||
export GIT_COMPARE=master;
|
|
||||||
fi
|
|
||||||
export JS_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep ^js/ | wc -l)"
|
|
||||||
export JS_OLD_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep ^js-old/ | wc -l)"
|
|
||||||
export RUST_FILES_MODIFIED="$(git --no-pager diff --name-only $GIT_COMPARE...$CI_COMMIT_SHA | grep -v -e ^js -e ^\\. -e ^LICENSE -e ^README.md -e ^test.sh -e ^windows/ -e ^scripts/ -e ^mac/ -e ^nsis/ | wc -l)"
|
|
||||||
|
|
||||||
echo "RUST_FILES_MODIFIED: $RUST_FILES_MODIFIED"
|
|
||||||
echo "JS_FILES_MODIFIED: $JS_FILES_MODIFIED"
|
|
||||||
echo "JS_OLD_FILES_MODIFIED: $JS_OLD_FILES_MODIFIED"
|
|
||||||
TEST_SWITCH=$1
|
|
||||||
rust_test () {
|
|
||||||
git submodule update --init --recursive
|
|
||||||
rustup show
|
|
||||||
if [[ "${RUST_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping Rust tests since no Rust files modified.";
|
|
||||||
else ./test.sh;
|
|
||||||
fi
|
|
||||||
if [[ "$CI_COMMIT_REF_NAME" == "nightly" ]];
|
|
||||||
then sh scripts/aura-test.sh;
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
js_test () {
|
|
||||||
git submodule update --init --recursive
|
|
||||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS deps install since no JS files modified.";
|
|
||||||
else ./js/scripts/install-deps.sh;
|
|
||||||
fi
|
|
||||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS (old) deps install since no JS files modified.";
|
|
||||||
else ./js-old/scripts/install-deps.sh;
|
|
||||||
fi
|
|
||||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS lint since no JS files modified.";
|
|
||||||
else ./js/scripts/lint.sh && ./js/scripts/test.sh && ./js/scripts/build.sh;
|
|
||||||
fi
|
|
||||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS (old) lint since no JS files modified.";
|
|
||||||
else ./js-old/scripts/lint.sh && ./js-old/scripts/test.sh && ./js-old/scripts/build.sh;
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
js_release () {
|
|
||||||
rustup default stable
|
|
||||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS deps install since no JS files modified.";
|
|
||||||
else echo "install JS deps---------------"&&./js/scripts/install-deps.sh&&echo "done----------------";
|
|
||||||
fi
|
|
||||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS rebuild since no JS files modified.";
|
|
||||||
else echo "build JS--------------"&&./js/scripts/build.sh&&echo "Puch JS precompiled-----------------"&&./js/scripts/push-precompiled.sh&&echo "done----------------";
|
|
||||||
fi
|
|
||||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS (old) deps install since no JS files modified.";
|
|
||||||
else echo "install JS (old) deps---------------"&&./js-old/scripts/install-deps.sh&&echo "done----------------";
|
|
||||||
fi
|
|
||||||
if [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping JS (old) rebuild since no JS files modified.";
|
|
||||||
else echo "build JS (old)--------------"&&./js-old/scripts/build.sh&&echo "Puch JS (old) precompiled-----------------"&&./js-old/scripts/push-precompiled.sh&&echo "done----------------";
|
|
||||||
fi
|
|
||||||
if [[ "${JS_FILES_MODIFIED}" == "0" ]] && [[ "${JS_OLD_FILES_MODIFIED}" == "0" ]];
|
|
||||||
then echo "Skipping Cargo update since no JS files modified.";
|
|
||||||
else echo "push cargo---------"&&./js/scripts/push-cargo.sh&&echo "done----------------";
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
coverage_test () {
|
|
||||||
git submodule update --init --recursive
|
|
||||||
rm -rf target/*
|
|
||||||
rm -rf js/.coverage
|
|
||||||
scripts/cov.sh
|
|
||||||
}
|
|
||||||
case $TEST_SWITCH in
|
|
||||||
stable )
|
|
||||||
rustup default stable
|
|
||||||
rust_test
|
|
||||||
;;
|
|
||||||
beta)
|
|
||||||
rustup default beta
|
|
||||||
rust_test
|
|
||||||
;;
|
|
||||||
nightly)
|
|
||||||
rustup default nightly
|
|
||||||
rust_test
|
|
||||||
;;
|
|
||||||
js-test)
|
|
||||||
js_test
|
|
||||||
;;
|
|
||||||
js-release)
|
|
||||||
js_release
|
|
||||||
;;
|
|
||||||
test-coverage)
|
|
||||||
coverage_test
|
|
||||||
;;
|
|
||||||
esac
|
|
@ -17,6 +17,6 @@ apps:
|
|||||||
|
|
||||||
parts:
|
parts:
|
||||||
parity:
|
parity:
|
||||||
source: ..
|
source: .
|
||||||
plugin: rust
|
plugin: rust
|
||||||
build-packages: [g++, libudev-dev, libssl-dev, make, pkg-config]
|
build-packages: [g++, libudev-dev, libssl-dev, make, pkg-config]
|
||||||
|
@ -170,18 +170,7 @@ pub struct AttachedProtocol {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl AttachedProtocol {
|
impl AttachedProtocol {
|
||||||
fn register(&self, network: &NetworkService) {
|
fn register(&self, _network: &NetworkService) {}
|
||||||
let res = network.register_protocol(
|
|
||||||
self.handler.clone(),
|
|
||||||
self.protocol_id,
|
|
||||||
self.packet_count,
|
|
||||||
self.versions
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Err(e) = res {
|
|
||||||
warn!(target: "sync", "Error attaching protocol {:?}: {:?}", self.protocol_id, e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// EthSync initialization parameters.
|
/// EthSync initialization parameters.
|
||||||
|
@ -522,10 +522,6 @@ impl BlockDownloader {
|
|||||||
trace!(target: "sync", "Unknown new block parent, restarting sync");
|
trace!(target: "sync", "Unknown new block parent, restarting sync");
|
||||||
break;
|
break;
|
||||||
},
|
},
|
||||||
Err(BlockImportError::Block(BlockError::TemporarilyInvalid(_))) => {
|
|
||||||
debug!(target: "sync", "Block temporarily invalid, restarting sync");
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
debug!(target: "sync", "Bad block {:?} : {:?}", h, e);
|
debug!(target: "sync", "Bad block {:?} : {:?}", h, e);
|
||||||
bad = true;
|
bad = true;
|
||||||
|
@ -32,7 +32,7 @@ use std::cmp;
|
|||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{PathBuf, Path};
|
||||||
use std::{fs, io, mem, result};
|
use std::{mem, fs, io};
|
||||||
|
|
||||||
use parking_lot::{Mutex, MutexGuard, RwLock};
|
use parking_lot::{Mutex, MutexGuard, RwLock};
|
||||||
use rocksdb::{
|
use rocksdb::{
|
||||||
@ -257,25 +257,7 @@ pub struct Database {
|
|||||||
flushing_lock: Mutex<bool>,
|
flushing_lock: Mutex<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
|
||||||
fn check_for_corruption<T, P: AsRef<Path>>(path: P, res: result::Result<T, String>) -> result::Result<T, String> {
|
|
||||||
if let Err(ref s) = res {
|
|
||||||
if s.starts_with("Corruption:") {
|
|
||||||
warn!("DB corrupted: {}. Repair will be triggered on next restart", s);
|
|
||||||
let _ = fs::File::create(path.as_ref().join(Database::CORRUPTION_FILE_NAME));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
fn is_corrupted(s: &str) -> bool {
|
|
||||||
s.starts_with("Corruption:") || s.starts_with("Invalid argument: You have to open all column families")
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Database {
|
impl Database {
|
||||||
const CORRUPTION_FILE_NAME: &'static str = "CORRUPTED";
|
|
||||||
|
|
||||||
/// Open database with default settings.
|
/// Open database with default settings.
|
||||||
pub fn open_default(path: &str) -> Result<Database> {
|
pub fn open_default(path: &str) -> Result<Database> {
|
||||||
Database::open(&DatabaseConfig::default(), path)
|
Database::open(&DatabaseConfig::default(), path)
|
||||||
@ -305,14 +287,6 @@ impl Database {
|
|||||||
block_opts.set_cache(cache);
|
block_opts.set_cache(cache);
|
||||||
}
|
}
|
||||||
|
|
||||||
// attempt database repair if it has been previously marked as corrupted
|
|
||||||
let db_corrupted = Path::new(path).join(Database::CORRUPTION_FILE_NAME);
|
|
||||||
if db_corrupted.exists() {
|
|
||||||
warn!("DB has been previously marked as corrupted, attempting repair");
|
|
||||||
DB::repair(&opts, path)?;
|
|
||||||
fs::remove_file(db_corrupted)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let columns = config.columns.unwrap_or(0) as usize;
|
let columns = config.columns.unwrap_or(0) as usize;
|
||||||
|
|
||||||
let mut cf_options = Vec::with_capacity(columns);
|
let mut cf_options = Vec::with_capacity(columns);
|
||||||
@ -332,11 +306,12 @@ impl Database {
|
|||||||
|
|
||||||
let mut cfs: Vec<Column> = Vec::new();
|
let mut cfs: Vec<Column> = Vec::new();
|
||||||
let db = match config.columns {
|
let db = match config.columns {
|
||||||
Some(_) => {
|
Some(columns) => {
|
||||||
match DB::open_cf(&opts, path, &cfnames, &cf_options) {
|
match DB::open_cf(&opts, path, &cfnames, &cf_options) {
|
||||||
Ok(db) => {
|
Ok(db) => {
|
||||||
cfs = cfnames.iter().map(|n| db.cf_handle(n)
|
cfs = cfnames.iter().map(|n| db.cf_handle(n)
|
||||||
.expect("rocksdb opens a cf_handle for each cfname; qed")).collect();
|
.expect("rocksdb opens a cf_handle for each cfname; qed")).collect();
|
||||||
|
assert!(cfs.len() == columns as usize);
|
||||||
Ok(db)
|
Ok(db)
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
@ -346,7 +321,7 @@ impl Database {
|
|||||||
cfs = cfnames.iter().enumerate().map(|(i, n)| db.create_cf(n, &cf_options[i])).collect::<::std::result::Result<_, _>>()?;
|
cfs = cfnames.iter().enumerate().map(|(i, n)| db.create_cf(n, &cf_options[i])).collect::<::std::result::Result<_, _>>()?;
|
||||||
Ok(db)
|
Ok(db)
|
||||||
},
|
},
|
||||||
err => err,
|
err @ Err(_) => err,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -356,18 +331,14 @@ impl Database {
|
|||||||
|
|
||||||
let db = match db {
|
let db = match db {
|
||||||
Ok(db) => db,
|
Ok(db) => db,
|
||||||
Err(ref s) if is_corrupted(s) => {
|
Err(ref s) if s.starts_with("Corruption:") => {
|
||||||
warn!("DB corrupted: {}, attempting repair", s);
|
info!("{}", s);
|
||||||
|
info!("Attempting DB repair for {}", path);
|
||||||
DB::repair(&opts, path)?;
|
DB::repair(&opts, path)?;
|
||||||
|
|
||||||
match cfnames.is_empty() {
|
match cfnames.is_empty() {
|
||||||
true => DB::open(&opts, path)?,
|
true => DB::open(&opts, path)?,
|
||||||
false => {
|
false => DB::open_cf(&opts, path, &cfnames, &cf_options)?
|
||||||
let db = DB::open_cf(&opts, path, &cfnames, &cf_options)?;
|
|
||||||
cfs = cfnames.iter().map(|n| db.cf_handle(n)
|
|
||||||
.expect("rocksdb opens a cf_handle for each cfname; qed")).collect();
|
|
||||||
db
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
Err(s) => { return Err(s.into()); }
|
Err(s) => { return Err(s.into()); }
|
||||||
@ -454,11 +425,7 @@ impl Database {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
db.write_opt(batch, &self.write_opts)?;
|
||||||
check_for_corruption(
|
|
||||||
&self.path,
|
|
||||||
db.write_opt(batch, &self.write_opts))?;
|
|
||||||
|
|
||||||
for column in self.flushing.write().iter_mut() {
|
for column in self.flushing.write().iter_mut() {
|
||||||
column.clear();
|
column.clear();
|
||||||
column.shrink_to_fit();
|
column.shrink_to_fit();
|
||||||
@ -504,10 +471,7 @@ impl Database {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
db.write_opt(batch, &self.write_opts).map_err(Into::into)
|
||||||
check_for_corruption(
|
|
||||||
&self.path,
|
|
||||||
db.write_opt(batch, &self.write_opts)).map_err(Into::into)
|
|
||||||
},
|
},
|
||||||
None => Err("Database is closed".into())
|
None => Err("Database is closed".into())
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user