diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 233aa0be9..0c9a256e3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -166,27 +166,30 @@ linux-armv7: - export CXX=arm-linux-gnueabihf-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=armv7-unknown-linux-gnueabihf - rm -rf .cargo - mkdir -p .cargo - - echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target armv7-unknown-linux-gnueabihf --features final --release $CARGOFLAGS - - arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity - - md5sum target/armv7-unknown-linux-gnueabihf/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - arm-linux-gnueabihf-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - sh scripts/deb-build.sh armhf - - cp target/armv7-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity + - cp target/$PLATFORM/release/parity deb/usr/bin/parity - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - dpkg-deb -b deb "parity_"$VER"_armhf.deb" - md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5" - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity.md5 --body parity.md5 - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -208,27 +211,30 @@ linux-arm: - export CXX=arm-linux-gnueabihf-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=arm-unknown-linux-gnueabihf - rm -rf .cargo - mkdir -p .cargo - - echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target arm-unknown-linux-gnueabihf --features final --release $CARGOFLAGS - - arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity - - md5sum target/arm-unknown-linux-gnueabihf/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - arm-linux-gnueabihf-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - sh scripts/deb-build.sh armhf - - cp target/arm-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity + - cp target/$PLATFORM/release/parity deb/usr/bin/parity - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - dpkg-deb -b deb "parity_"$VER"_armhf.deb" - md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5" - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity.md5 --body parity.md5 - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -250,20 +256,23 @@ linux-armv6: - export CXX=arm-linux-gnueabi-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=arm-unknown-linux-gnueabi - rm -rf .cargo - mkdir -p .cargo - - echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"arm-linux-gnueabi-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target arm-unknown-linux-gnueabi --features final --release $CARGOFLAGS - - arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity - - md5sum target/arm-unknown-linux-gnueabi/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - arm-linux-gnueabi-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity.md5 --body parity.md5 + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -285,26 +294,29 @@ linux-aarch64: - export CXX=aarch64-linux-gnu-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=aarch64-unknown-linux-gnu - rm -rf .cargo - mkdir -p .cargo - - echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target aarch64-unknown-linux-gnu --features final --release $CARGOFLAGS - - aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity - - md5sum target/aarch64-unknown-linux-gnu/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - aarch64-linux-gnu-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - sh scripts/deb-build.sh arm64 - - cp target/aarch64-unknown-linux-gnu/release/parity deb/usr/bin/parity + - cp target/$PLATFORM/release/parity deb/usr/bin/parity - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - dpkg-deb -b deb "parity_"$VER"_arm64.deb" - md5sum "parity_"$VER"_arm64.deb" > "parity_"$VER"_arm64.deb.md5" - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5 - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb" - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5" + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb" + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5" + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm diff --git a/Cargo.lock b/Cargo.lock index 5e8f3315e..ac1da1e37 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1368,7 +1368,7 @@ dependencies = [ [[package]] name = "parity-ui-precompiled" version = "1.4.0" -source = "git+https://github.com/ethcore/js-precompiled.git#e3e33f97c0f3b3d788a859b5bd10f5ca1ee45871" +source = "git+https://github.com/ethcore/js-precompiled.git#c8eb24c13e6fa57bf3b85b16209d281d89b31cbf" dependencies = [ "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/appveyor.yml b/appveyor.yml index ca477b997..e76e37d60 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -6,7 +6,7 @@ environment: certpass: secure: 0BgXJqxq9Ei34/hZ7121FQ== keyfile: C:\users\appveyor\Certificates.p12 - RUSTFLAGS: -Zorbit=off -D warnings + RUSTFLAGS: -D warnings branches: only: diff --git a/ethcore/light/src/client/mod.rs b/ethcore/light/src/client/mod.rs index 13e2fe0a1..16b4547df 100644 --- a/ethcore/light/src/client/mod.rs +++ b/ethcore/light/src/client/mod.rs @@ -215,31 +215,31 @@ impl Provider for Client { None } - fn block_headers(&self, _req: request::Headers) -> Vec { + fn block_header(&self, id: BlockId) -> Option { + self.chain.get_header(id) + } + + fn block_body(&self, _id: BlockId) -> Option { + None + } + + fn block_receipts(&self, _hash: &H256) -> Option { + None + } + + fn state_proof(&self, _req: request::StateProof) -> Vec { Vec::new() } - fn block_bodies(&self, _req: request::Bodies) -> Vec { + fn contract_code(&self, _req: request::ContractCode) -> Bytes { Vec::new() } - fn receipts(&self, _req: request::Receipts) -> Vec { - Vec::new() - } - - fn proofs(&self, _req: request::StateProofs) -> Vec { - Vec::new() - } - - fn contract_code(&self, _req: request::ContractCodes) -> Vec { - Vec::new() - } - - fn header_proofs(&self, _req: request::HeaderProofs) -> Vec { - Vec::new() + fn header_proof(&self, _req: request::HeaderProof) -> Option<(Bytes, Vec)> { + None } fn ready_transactions(&self) -> Vec { - Client::ready_transactions(self) + Vec::new() } } diff --git a/ethcore/light/src/net/mod.rs b/ethcore/light/src/net/mod.rs index 766cd9c4b..e27ba8612 100644 --- a/ethcore/light/src/net/mod.rs +++ b/ethcore/light/src/net/mod.rs @@ -264,7 +264,7 @@ impl LightProtocol { /// Check the maximum amount of requests of a specific type /// which a peer would be able to serve. Returns zero if the /// peer is unknown or has no buffer flow parameters. - pub fn max_requests(&self, peer: PeerId, kind: request::Kind) -> usize { + fn max_requests(&self, peer: PeerId, kind: request::Kind) -> usize { self.peers.read().get(&peer).and_then(|peer| { let mut peer = peer.lock(); let idle = peer.idle; @@ -989,7 +989,7 @@ impl LightProtocol { let max_cost = try!(peer.deduct_max(&self.flow_params, request::Kind::Codes, req.code_requests.len())); - let response = self.provider.contract_code(req); + let response = self.provider.contract_codes(req); let response_len = response.iter().filter(|x| !x.is_empty()).count(); let actual_cost = self.flow_params.compute_cost(request::Kind::Codes, response_len); assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost."); diff --git a/ethcore/light/src/net/tests/mod.rs b/ethcore/light/src/net/tests/mod.rs index 64d53d9c8..0cfc8cac7 100644 --- a/ethcore/light/src/net/tests/mod.rs +++ b/ethcore/light/src/net/tests/mod.rs @@ -94,79 +94,36 @@ impl Provider for TestProvider { None } - fn block_headers(&self, req: request::Headers) -> Vec { - use request::HashOrNumber; - use ethcore::views::HeaderView; + fn block_header(&self, id: BlockId) -> Option { + self.0.client.block_header(id) + } - let best_num = self.chain_info().best_block_number; - let start_num = match req.start { - HashOrNumber::Number(start_num) => start_num, - HashOrNumber::Hash(hash) => match self.0.client.block_header(BlockId::Hash(hash)) { - None => { - return Vec::new(); - } - Some(header) => { - let num = HeaderView::new(&header).number(); - if req.max == 1 || self.0.client.block_hash(BlockId::Number(num)) != Some(hash) { - // Non-canonical header or single header requested. - return vec![header]; - } + fn block_body(&self, id: BlockId) -> Option { + self.0.client.block_body(id) + } - num - } + fn block_receipts(&self, hash: &H256) -> Option { + self.0.client.block_receipts(&hash) + } + + fn state_proof(&self, req: request::StateProof) -> Vec { + match req.key2 { + Some(_) => vec![::util::sha3::SHA3_NULL_RLP.to_vec()], + None => { + // sort of a leaf node + let mut stream = RlpStream::new_list(2); + stream.append(&req.key1).append_empty_data(); + vec![stream.out()] } - }; - - (0u64..req.max as u64) - .map(|x: u64| x.saturating_mul(req.skip + 1)) - .take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x }) - .map(|x| if req.reverse { start_num - x } else { start_num + x }) - .map(|x| self.0.client.block_header(BlockId::Number(x))) - .take_while(|x| x.is_some()) - .flat_map(|x| x) - .collect() + } } - fn block_bodies(&self, req: request::Bodies) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.0.client.block_body(BlockId::Hash(hash))) - .map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() + fn contract_code(&self, req: request::ContractCode) -> Bytes { + req.account_key.iter().chain(req.account_key.iter()).cloned().collect() } - fn receipts(&self, req: request::Receipts) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.0.client.block_receipts(&hash)) - .map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() - } - - fn proofs(&self, req: request::StateProofs) -> Vec { - req.requests.into_iter() - .map(|req| { - match req.key2 { - Some(_) => ::util::sha3::SHA3_NULL_RLP.to_vec(), - None => { - // sort of a leaf node - let mut stream = RlpStream::new_list(2); - stream.append(&req.key1).append_empty_data(); - stream.out() - } - } - }) - .collect() - } - - fn contract_code(&self, req: request::ContractCodes) -> Vec { - req.code_requests.into_iter() - .map(|req| { - req.account_key.iter().chain(req.account_key.iter()).cloned().collect() - }) - .collect() - } - - fn header_proofs(&self, req: request::HeaderProofs) -> Vec { - req.requests.into_iter().map(|_| ::rlp::EMPTY_LIST_RLP.to_vec()).collect() + fn header_proof(&self, _req: request::HeaderProof) -> Option<(Bytes, Vec)> { + None } fn ready_transactions(&self) -> Vec { @@ -455,8 +412,8 @@ fn get_state_proofs() { let request_body = encode_request(&request, req_id); let response = { let proofs = vec![ - { let mut stream = RlpStream::new_list(2); stream.append(&key1).append_empty_data(); stream.out() }, - ::util::sha3::SHA3_NULL_RLP.to_vec(), + { let mut stream = RlpStream::new_list(2); stream.append(&key1).append_empty_data(); vec![stream.out()] }, + vec![::util::sha3::SHA3_NULL_RLP.to_vec()], ]; let new_buf = *flow_params.limit() - flow_params.compute_cost(request::Kind::StateProofs, 2); @@ -465,7 +422,10 @@ fn get_state_proofs() { response_stream.append(&req_id).append(&new_buf).begin_list(2); for proof in proofs { - response_stream.append_raw(&proof, 1); + response_stream.begin_list(proof.len()); + for node in proof { + response_stream.append_raw(&node, 1); + } } response_stream.out() diff --git a/ethcore/light/src/provider.rs b/ethcore/light/src/provider.rs index 1f9bbf8aa..afc5294fa 100644 --- a/ethcore/light/src/provider.rs +++ b/ethcore/light/src/provider.rs @@ -52,31 +52,139 @@ pub trait Provider: Send + Sync { /// /// The returned vector may have any length in the range [0, `max`], but the /// results within must adhere to the `skip` and `reverse` parameters. - fn block_headers(&self, req: request::Headers) -> Vec; + fn block_headers(&self, req: request::Headers) -> Vec { + use request::HashOrNumber; + use ethcore::views::HeaderView; + + if req.max == 0 { return Vec::new() } + + let best_num = self.chain_info().best_block_number; + let start_num = match req.start { + HashOrNumber::Number(start_num) => start_num, + HashOrNumber::Hash(hash) => match self.block_header(BlockId::Hash(hash)) { + None => { + trace!(target: "les_provider", "Unknown block hash {} requested", hash); + return Vec::new(); + } + Some(header) => { + let num = HeaderView::new(&header).number(); + let canon_hash = self.block_header(BlockId::Number(num)) + .map(|h| HeaderView::new(&h).hash()); + + if req.max == 1 || canon_hash != Some(hash) { + // Non-canonical header or single header requested. + return vec![header]; + } + + num + } + } + }; + + (0u64..req.max as u64) + .map(|x: u64| x.saturating_mul(req.skip + 1)) + .take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x }) + .map(|x| if req.reverse { start_num - x } else { start_num + x }) + .map(|x| self.block_header(BlockId::Number(x))) + .take_while(|x| x.is_some()) + .flat_map(|x| x) + .collect() + } + + /// Get a block header by id. + fn block_header(&self, id: BlockId) -> Option; /// Provide as many as possible of the requested blocks (minus the headers) encoded /// in RLP format. - fn block_bodies(&self, req: request::Bodies) -> Vec; + fn block_bodies(&self, req: request::Bodies) -> Vec { + req.block_hashes.into_iter() + .map(|hash| self.block_body(BlockId::Hash(hash))) + .map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) + .collect() + } + + /// Get a block body by id. + fn block_body(&self, id: BlockId) -> Option; /// Provide the receipts as many as possible of the requested blocks. /// Returns a vector of RLP-encoded lists of receipts. - fn receipts(&self, req: request::Receipts) -> Vec; + fn receipts(&self, req: request::Receipts) -> Vec { + req.block_hashes.into_iter() + .map(|hash| self.block_receipts(&hash)) + .map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) + .collect() + } + + /// Get a block's receipts as an RLP-encoded list by block hash. + fn block_receipts(&self, hash: &H256) -> Option; /// Provide a set of merkle proofs, as requested. Each request is a /// block hash and request parameters. /// /// Returns a vector of RLP-encoded lists satisfying the requests. - fn proofs(&self, req: request::StateProofs) -> Vec; + fn proofs(&self, req: request::StateProofs) -> Vec { + use rlp::{RlpStream, Stream}; + + let mut results = Vec::with_capacity(req.requests.len()); + + for request in req.requests { + let proof = self.state_proof(request); + + let mut stream = RlpStream::new_list(proof.len()); + for node in proof { + stream.append_raw(&node, 1); + } + + results.push(stream.out()); + } + + results + } + + /// Get a state proof from a request. Each proof should be a vector + /// of rlp-encoded trie nodes, in ascending order by distance from the root. + fn state_proof(&self, req: request::StateProof) -> Vec; /// Provide contract code for the specified (block_hash, account_hash) pairs. /// Each item in the resulting vector is either the raw bytecode or empty. - fn contract_code(&self, req: request::ContractCodes) -> Vec; + fn contract_codes(&self, req: request::ContractCodes) -> Vec { + req.code_requests.into_iter() + .map(|req| self.contract_code(req)) + .collect() + } - /// Provide header proofs from the Canonical Hash Tries as well as the headers + /// Get contract code by request. Either the raw bytecode or empty. + fn contract_code(&self, req: request::ContractCode) -> Bytes; + + /// Provide header proofs from the Canonical Hash Tries as well as the headers /// they correspond to -- each element in the returned vector is a 2-tuple. - /// The first element is a block header and the second a merkle proof of + /// The first element is a block header and the second a merkle proof of /// the header in a requested CHT. - fn header_proofs(&self, req: request::HeaderProofs) -> Vec; + fn header_proofs(&self, req: request::HeaderProofs) -> Vec { + use rlp::{self, RlpStream, Stream}; + + req.requests.into_iter() + .map(|req| self.header_proof(req)) + .map(|maybe_proof| match maybe_proof { + None => rlp::EMPTY_LIST_RLP.to_vec(), + Some((header, proof)) => { + let mut stream = RlpStream::new_list(2); + stream.append_raw(&header, 1).begin_list(proof.len()); + + for node in proof { + stream.append_raw(&node, 1); + } + + stream.out() + } + }) + .collect() + } + + /// Provide a header proof from a given Canonical Hash Trie as well as the + /// corresponding header. The first element is the block header and the + /// second is a merkle proof of the CHT. + fn header_proof(&self, req: request::HeaderProof) -> Option<(Bytes, Vec)>; /// Provide pending transactions. fn ready_transactions(&self) -> Vec; @@ -96,86 +204,31 @@ impl Provider for T { Some(self.pruning_info().earliest_state) } - fn block_headers(&self, req: request::Headers) -> Vec { - use request::HashOrNumber; - use ethcore::views::HeaderView; - - let best_num = self.chain_info().best_block_number; - let start_num = match req.start { - HashOrNumber::Number(start_num) => start_num, - HashOrNumber::Hash(hash) => match self.block_header(BlockId::Hash(hash)) { - None => { - trace!(target: "les_provider", "Unknown block hash {} requested", hash); - return Vec::new(); - } - Some(header) => { - let num = HeaderView::new(&header).number(); - if req.max == 1 || self.block_hash(BlockId::Number(num)) != Some(hash) { - // Non-canonical header or single header requested. - return vec![header]; - } - - num - } - } - }; - - (0u64..req.max as u64) - .map(|x: u64| x.saturating_mul(req.skip + 1)) - .take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x }) - .map(|x| if req.reverse { start_num - x } else { start_num + x }) - .map(|x| self.block_header(BlockId::Number(x))) - .take_while(|x| x.is_some()) - .flat_map(|x| x) - .collect() + fn block_header(&self, id: BlockId) -> Option { + BlockChainClient::block_header(self, id) } - fn block_bodies(&self, req: request::Bodies) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.block_body(BlockId::Hash(hash))) - .map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() + fn block_body(&self, id: BlockId) -> Option { + BlockChainClient::block_body(self, id) } - fn receipts(&self, req: request::Receipts) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.block_receipts(&hash)) - .map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() + fn block_receipts(&self, hash: &H256) -> Option { + BlockChainClient::block_receipts(self, hash) } - fn proofs(&self, req: request::StateProofs) -> Vec { - use rlp::{RlpStream, Stream}; - - let mut results = Vec::with_capacity(req.requests.len()); - - for request in req.requests { - let proof = match request.key2 { - Some(key2) => self.prove_storage(request.key1, key2, request.from_level, BlockId::Hash(request.block)), - None => self.prove_account(request.key1, request.from_level, BlockId::Hash(request.block)), - }; - - let mut stream = RlpStream::new_list(proof.len()); - for node in proof { - stream.append_raw(&node, 1); - } - - results.push(stream.out()); + fn state_proof(&self, req: request::StateProof) -> Vec { + match req.key2 { + Some(key2) => self.prove_storage(req.key1, key2, req.from_level, BlockId::Hash(req.block)), + None => self.prove_account(req.key1, req.from_level, BlockId::Hash(req.block)), } - - results } - fn contract_code(&self, req: request::ContractCodes) -> Vec { - req.code_requests.into_iter() - .map(|req| { - self.code_by_hash(req.account_key, BlockId::Hash(req.block_hash)) - }) - .collect() + fn contract_code(&self, req: request::ContractCode) -> Bytes { + self.code_by_hash(req.account_key, BlockId::Hash(req.block_hash)) } - fn header_proofs(&self, req: request::HeaderProofs) -> Vec { - req.requests.into_iter().map(|_| ::rlp::EMPTY_LIST_RLP.to_vec()).collect() + fn header_proof(&self, _req: request::HeaderProof) -> Option<(Bytes, Vec)> { + None } fn ready_transactions(&self) -> Vec { diff --git a/ethcore/src/verification/queue/mod.rs b/ethcore/src/verification/queue/mod.rs index 4f1e18a20..673275102 100644 --- a/ethcore/src/verification/queue/mod.rs +++ b/ethcore/src/verification/queue/mod.rs @@ -137,7 +137,7 @@ pub struct VerificationQueue { max_queue_size: usize, max_mem_use: usize, scale_verifiers: bool, - verifier_handles: Vec>, + verifier_handles: Vec>, state: Arc<(Mutex, Condvar)>, } @@ -225,8 +225,8 @@ impl VerificationQueue { let num_cpus = ::num_cpus::get(); let max_verifiers = min(num_cpus, MAX_VERIFIERS); - let default_amount = max(1, min(max_verifiers, config.verifier_settings.num_verifiers)); - let state = Arc::new((Mutex::new(State::Work(default_amount)), Condvar::new())); + let default_amount = max(1, min(max_verifiers, config.verifier_settings.num_verifiers)); + let state = Arc::new((Mutex::new(State::Work(default_amount)), Condvar::new())); let mut verifier_handles = Vec::with_capacity(max_verifiers); debug!(target: "verification", "Allocating {} verifiers, {} initially active", max_verifiers, default_amount); @@ -248,11 +248,11 @@ impl VerificationQueue { .spawn(move || { panic_handler.catch_panic(move || { VerificationQueue::verify( - verification, - engine, - wait, - ready, - empty, + verification, + engine, + wait, + ready, + empty, state, i, ) @@ -299,11 +299,11 @@ impl VerificationQueue { debug!(target: "verification", "verifier {} sleeping", id); state.1.wait(&mut cur_state); - debug!(target: "verification", "verifier {} waking up", id); + debug!(target: "verification", "verifier {} waking up", id); } - if let State::Exit = *cur_state { - debug!(target: "verification", "verifier {} exiting", id); + if let State::Exit = *cur_state { + debug!(target: "verification", "verifier {} exiting", id); break; } } @@ -326,7 +326,7 @@ impl VerificationQueue { } if let State::Exit = *state.0.lock() { - debug!(target: "verification", "verifier {} exiting", id); + debug!(target: "verification", "verifier {} exiting", id); return; } } @@ -687,8 +687,12 @@ impl Drop for VerificationQueue { *self.state.0.lock() = State::Exit; self.state.1.notify_all(); - // wake up all threads waiting for more work. - self.more_to_verify.notify_all(); + // acquire this lock to force threads to reach the waiting point + // if they're in-between the exit check and the more_to_verify wait. + { + let _more = self.verification.more_to_verify.lock().unwrap(); + self.more_to_verify.notify_all(); + } // wait for all verifier threads to join. for thread in self.verifier_handles.drain(..) { @@ -817,7 +821,7 @@ mod tests { fn readjust_verifiers() { let queue = get_test_queue(true); - // put all the verifiers to sleep to ensure + // put all the verifiers to sleep to ensure // the test isn't timing sensitive. *queue.state.0.lock() = State::Work(0); diff --git a/js/package.json b/js/package.json index de52d0d0f..c7ab53a2e 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "parity.js", - "version": "0.2.130", + "version": "0.2.131", "main": "release/index.js", "jsnext:main": "src/index.js", "author": "Parity Team ", @@ -117,6 +117,7 @@ "react-hot-loader": "3.0.0-beta.6", "react-intl-aggregate-webpack-plugin": "0.0.1", "rucksack-css": "0.9.1", + "serviceworker-webpack-plugin": "0.1.7", "sinon": "1.17.6", "sinon-as-promised": "4.0.2", "sinon-chai": "2.8.0", @@ -156,6 +157,7 @@ "mobx-react-devtools": "4.2.10", "moment": "2.17.0", "phoneformat.js": "1.0.3", + "promise-worker": "1.1.1", "push.js": "0.0.11", "qs": "6.3.0", "react": "15.4.1", @@ -182,7 +184,6 @@ "valid-url": "1.0.9", "validator": "6.2.0", "web3": "0.17.0-beta", - "whatwg-fetch": "2.0.1", - "worker-loader": "0.7.1" + "whatwg-fetch": "2.0.1" } } diff --git a/js/src/redux/providers/compilerActions.js b/js/src/redux/providers/compilerActions.js index c3b3a9bdd..d638c03a2 100644 --- a/js/src/redux/providers/compilerActions.js +++ b/js/src/redux/providers/compilerActions.js @@ -14,7 +14,26 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -import CompilerWorker from 'worker-loader!./compilerWorker.js'; +import PromiseWorker from 'promise-worker'; +import runtime from 'serviceworker-webpack-plugin/lib/runtime'; + +let workerRegistration; + +// Setup the Service Worker +if ('serviceWorker' in navigator) { + workerRegistration = runtime + .register() + .then(() => navigator.serviceWorker.ready) + .then((registration) => { + const _worker = registration.active; + _worker.controller = registration.active; + const worker = new PromiseWorker(_worker); + + return worker; + }); +} else { + workerRegistration = Promise.reject('Service Worker is not available in your browser.'); +} export function setWorker (worker) { return { @@ -23,6 +42,13 @@ export function setWorker (worker) { }; } +export function setError (error) { + return { + type: 'setError', + error + }; +} + export function setupWorker () { return (dispatch, getState) => { const state = getState(); @@ -31,7 +57,13 @@ export function setupWorker () { return; } - const worker = new CompilerWorker(); - dispatch(setWorker(worker)); + workerRegistration + .then((worker) => { + dispatch(setWorker(worker)); + }) + .catch((error) => { + console.error('sw', error); + dispatch(setWorker(null)); + }); }; } diff --git a/js/src/redux/providers/compilerReducer.js b/js/src/redux/providers/compilerReducer.js index 7163ac7a5..e23bf3b16 100644 --- a/js/src/redux/providers/compilerReducer.js +++ b/js/src/redux/providers/compilerReducer.js @@ -17,13 +17,18 @@ import { handleActions } from 'redux-actions'; const initialState = { - worker: null + worker: undefined, + error: null }; export default handleActions({ setWorker (state, action) { const { worker } = action; - return Object.assign({}, state, { worker }); + }, + + setError (state, action) { + const { error } = action; + return Object.assign({}, state, { error }); } }, initialState); diff --git a/js/src/redux/providers/compilerWorker.js b/js/src/redux/providers/compilerWorker.js deleted file mode 100644 index 60a07355f..000000000 --- a/js/src/redux/providers/compilerWorker.js +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2015, 2016 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -import solc from 'solc/browser-wrapper'; -import { isWebUri } from 'valid-url'; - -self.solcVersions = {}; -self.files = {}; -self.lastCompile = { - sourcecode: '', - result: '', - version: '' -}; - -// eslint-disable-next-line no-undef -onmessage = (event) => { - const message = JSON.parse(event.data); - - switch (message.action) { - case 'compile': - compile(message.data); - break; - case 'load': - load(message.data); - break; - case 'setFiles': - setFiles(message.data); - break; - case 'close': - close(); - break; - } -}; - -function setFiles (files) { - const prevFiles = self.files; - const nextFiles = files.reduce((obj, file) => { - obj[file.name] = file.sourcecode; - return obj; - }, {}); - - self.files = { - ...prevFiles, - ...nextFiles - }; -} - -function findImports (path) { - if (self.files[path]) { - if (self.files[path].error) { - return { error: self.files[path].error }; - } - - return { contents: self.files[path] }; - } - - if (isWebUri(path)) { - console.log('[worker] fetching', path); - - fetch(path) - .then((r) => r.text()) - .then((c) => { - console.log('[worker]', 'got content at ' + path); - self.files[path] = c; - - postMessage(JSON.stringify({ - event: 'try-again' - })); - }) - .catch((e) => { - console.error('[worker]', 'fetching', path, e); - self.files[path] = { error: e }; - }); - - return { error: '__parity_tryAgain' }; - } - - console.log(`[worker] path ${path} not found...`); - return { error: 'File not found' }; -} - -function compile (data, optimized = 1) { - const { sourcecode, build } = data; - const { longVersion } = build; - - if (self.lastCompile.sourcecode === sourcecode && self.lastCompile.longVersion === longVersion) { - return postMessage(JSON.stringify({ - event: 'compiled', - data: self.lastCompile.result - })); - } - - fetchSolc(build) - .then((compiler) => { - const input = { - '': sourcecode - }; - - const compiled = compiler.compile({ sources: input }, optimized, findImports); - - self.lastCompile = { - version: longVersion, result: compiled, - sourcecode - }; - - postMessage(JSON.stringify({ - event: 'compiled', - data: compiled - })); - }); -} - -function load (build) { - postMessage(JSON.stringify({ - event: 'loading', - data: true - })); - - fetchSolc(build) - .then(() => { - postMessage(JSON.stringify({ - event: 'loading', - data: false - })); - }) - .catch(() => { - postMessage(JSON.stringify({ - event: 'loading', - data: false - })); - }); -} - -function fetchSolc (build) { - const { path, longVersion } = build; - - if (self.solcVersions[path]) { - return Promise.resolve(self.solcVersions[path]); - } - - const URL = `https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/${path}`; - console.log(`[worker] fetching solc-bin ${longVersion} at ${URL}`); - - return fetch(URL) - .then((r) => r.text()) - .then((code) => { - const solcCode = code.replace(/^var Module;/, 'var Module=self.__solcModule;'); - self.__solcModule = {}; - - console.log(`[worker] evaluating ${longVersion}`); - - // eslint-disable-next-line no-eval - eval(solcCode); - - console.log(`[worker] done evaluating ${longVersion}`); - - const compiler = solc(self.__solcModule); - self.solcVersions[path] = compiler; - return compiler; - }) - .catch((e) => { - console.error('fetching solc', e); - }); -} diff --git a/js/src/serviceWorker.js b/js/src/serviceWorker.js new file mode 100644 index 000000000..c558a57cf --- /dev/null +++ b/js/src/serviceWorker.js @@ -0,0 +1,144 @@ +// Copyright 2015, 2016 Parity Technologies (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +import registerPromiseWorker from 'promise-worker/register'; +import SolidityUtils from '~/util/solidity'; + +const CACHE_NAME = 'parity-cache-v1'; + +registerPromiseWorker((msg) => { + return handleMessage(msg); +}); + +self.addEventListener('install', (event) => { + event.waitUntil(self.skipWaiting()); +}); + +self.addEventListener('activate', (event) => { + event.waitUntil(self.clients.claim()); +}); + +self.addEventListener('fetch', (event) => { + const { url } = event.request; + + if (/raw.githubusercontent.com\/ethereum\/solc-bin(.+)list\.json$/.test(url)) { + // Return the cached version, but still update it in background + return event.respondWith(cachedFetcher(event.request, true)); + } + + if (/raw.githubusercontent.com\/ethereum\/solc-bin(.+)soljson(.+)\.js$/.test(url)) { + return event.respondWith(cachedFetcher(event.request)); + } +}); + +self.solc = {}; +self.files = {}; + +function cachedFetcher (request, update = false) { + return caches + .match(request) + .then((response) => { + // Return cached response if exists and no + // updates needed + if (response && !update) { + return response; + } + + const fetcher = fetch(request.clone()) + .then((response) => { + // Check if we received a valid response + if (!response || response.status !== 200) { + return response; + } + + return caches + .open(CACHE_NAME) + .then((cache) => { + cache.put(request, response.clone()); + return response; + }); + }); + + // Cache hit - return response + // Still want to perform the fetch (update) + if (response) { + return response; + } + + return fetcher; + }); +} + +function handleMessage (message) { + switch (message.action) { + case 'compile': + return compile(message.data); + + case 'load': + return getCompiler(message.data).then(() => 'ok'); + + case 'setFiles': + return setFiles(message.data); + + default: + console.warn(`unknown action "${message.action}"`); + return null; + } +} + +function compile (data) { + const { build } = data; + + return getCompiler(build) + .then((compiler) => { + return SolidityUtils.compile(data, compiler); + }); +} + +function setFiles (files) { + const prevFiles = self.files; + const nextFiles = files.reduce((obj, file) => { + obj[file.name] = file.sourcecode; + return obj; + }, {}); + + self.files = { + ...prevFiles, + ...nextFiles + }; + + return 'ok'; +} + +function getCompiler (build) { + const { longVersion } = build; + + const fetcher = (url) => { + const request = new Request(url); + return cachedFetcher(request); + }; + + if (!self.solc[longVersion]) { + self.solc[longVersion] = SolidityUtils + .getCompiler(build, fetcher) + .then((compiler) => { + self.solc[longVersion] = compiler; + return compiler; + }); + } + + return self.solc[longVersion]; +} diff --git a/js/src/util/solidity.js b/js/src/util/solidity.js new file mode 100644 index 000000000..d4c9686d5 --- /dev/null +++ b/js/src/util/solidity.js @@ -0,0 +1,89 @@ +// Copyright 2015, 2016 Parity Technologies (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +import solc from 'solc/browser-wrapper'; + +export default class SolidityUtils { + + static compile (data, compiler) { + const { sourcecode, build, optimize, files } = data; + + const start = Date.now(); + console.log('[solidity] compiling...'); + + const input = { + '': sourcecode + }; + + const findFiles = (path) => { + const file = files.find((f) => f.name === path); + + if (file) { + return { contents: file.sourcecode }; + } else { + return { error: 'File not found' }; + } + }; + + const compiled = compiler.compile({ sources: input }, optimize ? 1 : 0, findFiles); + + const time = Math.round((Date.now() - start) / 100) / 10; + console.log(`[solidity] done compiling in ${time}s`); + + compiled.version = build.longVersion; + compiled.sourcecode = sourcecode; + + return compiled; + } + + static getCompiler (build, _fetcher) { + const { longVersion, path } = build; + + const URL = `https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/${path}`; + + const fetcher = typeof _fetcher === 'function' + ? _fetcher + : (url) => fetch(url); + + const isWorker = typeof window !== 'object'; + + return fetcher(URL) + .then((r) => r.text()) + .then((code) => { + // `window` for main thread, `self` for workers + const _self = isWorker ? self : window; + _self.Module = {}; + + const solcCode = code.replace('var Module;', `var Module=${isWorker ? 'self' : 'window'}.Module;`); + + console.log(`[solidity] evaluating ${longVersion}`); + + try { + // eslint-disable-next-line no-eval + eval(solcCode); + } catch (e) { + return Promise.reject(e); + } + + console.log(`[solidity] done evaluating ${longVersion}`); + + const compiler = solc(_self.Module); + delete _self.Module; + + return compiler; + }); + } +} diff --git a/js/src/views/WriteContract/writeContract.css b/js/src/views/WriteContract/writeContract.css index 2502c4060..c5cefcf7a 100644 --- a/js/src/views/WriteContract/writeContract.css +++ b/js/src/views/WriteContract/writeContract.css @@ -26,6 +26,16 @@ color: #ccc; } +.toggles { + display: flex; + flex-direction: row; + margin: 1em 0 0; + + > * { + flex: 1; + } +} + .container { padding: 1em 0; display: flex; @@ -45,6 +55,14 @@ } } +.error { + background-color: rgba(200, 0, 0, 0.25); + padding: 1em 0.5em; + margin-top: -0.5em; + font-family: monospace; + font-size: 0.9em; +} + .mainEditor { &:global(.ace-solarized-dark) { background-color: rgba(0, 0, 0, 0.5); @@ -87,13 +105,13 @@ display: flex; flex-direction: column; margin-right: 0.5em; - .panel { background-color: rgba(0, 0, 0, 0.5); padding: 1em; flex: 1; display: flex; flex-direction: column; + box-sizing: border-box; } .compilation { diff --git a/js/src/views/WriteContract/writeContract.js b/js/src/views/WriteContract/writeContract.js index 31c4dd244..c95c09c04 100644 --- a/js/src/views/WriteContract/writeContract.js +++ b/js/src/views/WriteContract/writeContract.js @@ -16,7 +16,7 @@ import React, { PropTypes, Component } from 'react'; import { observer } from 'mobx-react'; -import { MenuItem } from 'material-ui'; +import { MenuItem, Toggle } from 'material-ui'; import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import CircularProgress from 'material-ui/CircularProgress'; @@ -42,10 +42,11 @@ class WriteContract extends Component { static propTypes = { accounts: PropTypes.object.isRequired, setupWorker: PropTypes.func.isRequired, - worker: PropTypes.object + worker: PropTypes.object, + workerError: PropTypes.any }; - store = new WriteContractStore(); + store = WriteContractStore.get(); state = { resizing: false, @@ -56,23 +57,32 @@ class WriteContract extends Component { const { setupWorker, worker } = this.props; setupWorker(); - if (worker) { - this.store.setCompiler(worker); + if (worker !== undefined) { + this.store.setWorker(worker); } } componentDidMount () { this.store.setEditor(this.refs.editor); + if (this.props.workerError) { + this.store.setWorkerError(this.props.workerError); + } + // Wait for editor to be loaded window.setTimeout(() => { this.store.resizeEditor(); }, 2000); } + // Set the worker if not set before (eg. first page loading) componentWillReceiveProps (nextProps) { - if (!this.props.worker && nextProps.worker) { - this.store.setCompiler(nextProps.worker); + if (this.props.worker === undefined && nextProps.worker !== undefined) { + this.store.setWorker(nextProps.worker); + } + + if (this.props.workerError !== nextProps.workerError) { + this.store.setWorkerError(nextProps.workerError); } } @@ -217,7 +227,18 @@ class WriteContract extends Component { } renderParameters () { - const { compiling, contract, selectedBuild, loading } = this.store; + const { compiling, contract, selectedBuild, loading, workerError } = this.store; + + if (workerError) { + return ( +
+
+

Unfortuantely, an error occurred...

+
{ workerError.toString() }
+
+
+ ); + } if (selectedBuild < 0) { return ( @@ -262,6 +283,24 @@ class WriteContract extends Component { : null } +
+
+ +
+
+ +
+
{ this.renderSolidityVersions() } { this.renderCompilation() } @@ -485,8 +524,8 @@ class WriteContract extends Component { function mapStateToProps (state) { const { accounts } = state.personal; - const { worker } = state.compiler; - return { accounts, worker }; + const { worker, error } = state.compiler; + return { accounts, worker, workerError: error }; } function mapDispatchToProps (dispatch) { diff --git a/js/src/views/WriteContract/writeContractStore.js b/js/src/views/WriteContract/writeContractStore.js index dd1985466..141569af2 100644 --- a/js/src/views/WriteContract/writeContractStore.js +++ b/js/src/views/WriteContract/writeContractStore.js @@ -14,10 +14,13 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -import { action, observable } from 'mobx'; +import { action, observable, transaction } from 'mobx'; import store from 'store'; import { debounce } from 'lodash'; +import { sha3 } from '~/api/util/sha3'; +import SolidityUtils from '~/util/solidity'; + const WRITE_CONTRACT_STORE_KEY = '_parity::writeContractStore'; const SNIPPETS = { @@ -43,6 +46,8 @@ const SNIPPETS = { } }; +let instance = null; + export default class WriteContractStore { @observable sourcecode = ''; @@ -61,6 +66,9 @@ export default class WriteContractStore { @observable builds = []; @observable selectedBuild = -1; + @observable autocompile = false; + @observable optimize = false; + @observable showDeployModal = false; @observable showSaveModal = false; @observable showLoadModal = false; @@ -68,45 +76,55 @@ export default class WriteContractStore { @observable savedContracts = {}; @observable selectedContract = {}; + @observable workerError = null; + + loadingSolidity = false; + lastCompilation = {}; snippets = SNIPPETS; + worker = undefined; + + useWorker = true; + solc = {}; constructor () { - this.reloadContracts(); - this.fetchSolidityVersions(); - this.debouncedCompile = debounce(this.handleCompile, 1000); } + static get () { + if (!instance) { + instance = new WriteContractStore(); + } + + return instance; + } + + @action setWorkerError (error) { + this.workerError = error; + } + @action setEditor (editor) { this.editor = editor; } - @action setCompiler (compiler) { - this.compiler = compiler; + @action setWorker (worker) { + if (this.worker !== undefined) { + return; + } - this.compiler.onmessage = (event) => { - const message = JSON.parse(event.data); + this.worker = worker; - switch (message.event) { - case 'compiled': - this.parseCompiled(message.data); - break; - case 'loading': - this.parseLoading(message.data); - break; - case 'try-again': - this.handleCompile(); - break; - } - }; + this + .fetchSolidityVersions() + .then(() => this.reloadContracts()); } fetchSolidityVersions () { - fetch('https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/list.json') + return fetch('https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/list.json') .then((r) => r.json()) .then((data) => { const { builds, releases, latestRelease } = data; let latestIndex = -1; + let promise = Promise.resolve(); this.builds = builds.reverse().map((build, index) => { if (releases[build.version] === build.path) { @@ -114,7 +132,7 @@ export default class WriteContractStore { if (build.version === latestRelease) { build.latest = true; - this.loadSolidityVersion(build); + promise = promise.then(() => this.loadSolidityVersion(build)); latestIndex = index; } } @@ -123,29 +141,93 @@ export default class WriteContractStore { }); this.selectedBuild = latestIndex; + return promise; + }) + .catch((error) => { + this.setWorkerError(error); }); } - @action closeWorker = () => { - this.compiler.postMessage(JSON.stringify({ - action: 'close' - })); - } - @action handleImport = (sourcecode) => { this.reloadContracts(-1, sourcecode); } @action handleSelectBuild = (_, index, value) => { this.selectedBuild = value; - this.loadSolidityVersion(this.builds[value]); + return this + .loadSolidityVersion(this.builds[value]) + .then(() => this.handleCompile()); + } + + getCompiler (build) { + const { longVersion } = build; + + if (!this.solc[longVersion]) { + this.solc[longVersion] = SolidityUtils + .getCompiler(build) + .then((compiler) => { + this.solc[longVersion] = compiler; + return compiler; + }) + .catch((error) => { + this.setWorkerError(error); + }); + } + + return Promise.resolve(this.solc[longVersion]); } @action loadSolidityVersion = (build) => { - this.compiler.postMessage(JSON.stringify({ - action: 'load', - data: build - })); + if (this.worker === undefined) { + return; + } else if (this.worker === null) { + this.useWorker = false; + } + + if (this.loadingSolidity) { + return this.loadingSolidity; + } + + this.loading = true; + + if (this.useWorker) { + this.loadingSolidity = this.worker + .postMessage({ + action: 'load', + data: build + }) + .then((result) => { + if (result !== 'ok') { + throw new Error('error while loading solidity: ' + result); + } + + this.loadingSolidity = false; + this.loading = false; + }) + .catch((error) => { + console.warn('error while loading solidity', error); + this.useWorker = false; + this.loadingSolidity = null; + + return this.loadSolidityVersion(build); + }); + } else { + this.loadingSolidity = this + .getCompiler(build) + .then(() => { + this.loadingSolidity = false; + this.loading = false; + + return 'ok'; + }) + .catch((error) => { + this.setWorkerError(error); + this.loadingSolidity = false; + this.loading = false; + }); + } + + return this.loadingSolidity; } @action handleOpenDeployModal = () => { @@ -177,23 +259,120 @@ export default class WriteContractStore { this.contract = this.contracts[Object.keys(this.contracts)[value]]; } + compile = (data) => { + if (this.useWorker) { + return this.worker.postMessage({ + action: 'compile', + data + }); + } + + return new Promise((resolve, reject) => { + window.setTimeout(() => { + this + .getCompiler(data.build) + .then((compiler) => { + return SolidityUtils.compile(data, compiler); + }) + .then(resolve) + .catch(reject); + }, 0); + }); + } + @action handleCompile = () => { - this.compiled = false; - this.compiling = true; + transaction(() => { + this.compiled = false; + this.compiling = true; + }); const build = this.builds[this.selectedBuild]; + const version = build.longVersion; + const sourcecode = this.sourcecode.replace(/\n+/g, '\n').replace(/\s(\s+)/g, ' '); + const hash = sha3(JSON.stringify({ version, sourcecode, optimize: this.optimize })); - if (this.compiler && typeof this.compiler.postMessage === 'function') { - this.sendFilesToWorker(); + let promise = Promise.resolve(null); - this.compiler.postMessage(JSON.stringify({ - action: 'compile', - data: { - sourcecode: this.sourcecode, - build: build - } - })); + if (hash === this.lastCompilation.hash) { + promise = new Promise((resolve) => { + window.setTimeout(() => { + resolve(this.lastCompilation); + }, 500); + }); + } else { + promise = this + .compile({ + sourcecode: sourcecode, + build: build, + optimize: this.optimize, + files: this.files + }) + .then((data) => { + const result = this.parseCompiled(data); + + this.lastCompilation = { + result: result, + date: new Date(), + version: data.version, + hash + }; + + return this.lastCompilation; + }) + .catch((error) => { + this.setWorkerError(error); + }); } + + return promise.then((data = null) => { + if (data) { + const { + contract, contractIndex, + annotations, contracts, errors + } = data.result; + + this.contract = contract; + this.contractIndex = contractIndex; + + this.annotations = annotations; + this.contracts = contracts; + this.errors = errors; + } + + this.compiled = true; + this.compiling = false; + }); + } + + @action handleAutocompileToggle = () => { + this.autocompile = !this.autocompile; + } + + @action handleOptimizeToggle = () => { + this.optimize = !this.optimize; + } + + @action parseCompiled = (data) => { + const { contracts } = data; + + const { errors = [] } = data; + const errorAnnotations = this.parseErrors(errors); + const formalAnnotations = this.parseErrors(data.formal && data.formal.errors, true); + + const annotations = [].concat( + errorAnnotations, + formalAnnotations + ); + + const contractKeys = Object.keys(contracts || {}); + + const contract = contractKeys.length ? contracts[contractKeys[0]] : null; + const contractIndex = contractKeys.length ? 0 : -1; + + return { + contract, contractIndex, + contracts, errors, annotations + }; } parseErrors = (data, formal = false) => { @@ -220,43 +399,6 @@ export default class WriteContractStore { }); } - @action parseCompiled = (data) => { - const { contracts } = data; - - const { errors = [] } = data; - const errorAnnotations = this.parseErrors(errors); - const formalAnnotations = this.parseErrors(data.formal && data.formal.errors, true); - - const annotations = [].concat( - errorAnnotations, - formalAnnotations - ); - - if (annotations.findIndex((a) => /__parity_tryAgain/.test(a.text)) > -1) { - return; - } - - const contractKeys = Object.keys(contracts || {}); - - this.contract = contractKeys.length ? contracts[contractKeys[0]] : null; - this.contractIndex = contractKeys.length ? 0 : -1; - - this.contracts = contracts; - this.errors = errors; - this.annotations = annotations; - - this.compiled = true; - this.compiling = false; - } - - @action parseLoading = (isLoading) => { - this.loading = isLoading; - - if (!isLoading) { - this.handleCompile(); - } - } - @action handleEditSourcecode = (value, compile = false) => { this.sourcecode = value; @@ -268,7 +410,7 @@ export default class WriteContractStore { if (compile) { this.handleCompile(); - } else { + } else if (this.autocompile) { this.debouncedCompile(); } } @@ -327,8 +469,9 @@ export default class WriteContractStore { current: this.sourcecode }); - this.handleCompile(); this.resizeEditor(); + + return this.handleCompile(); } @action handleLoadContract = (contract) => { @@ -363,16 +506,13 @@ export default class WriteContractStore { } catch (e) {} } - sendFilesToWorker = () => { + get files () { const files = [].concat( Object.values(this.snippets), Object.values(this.savedContracts) ); - this.compiler.postMessage(JSON.stringify({ - action: 'setFiles', - data: files - })); + return files; } } diff --git a/js/webpack/app.js b/js/webpack/app.js index cf38ec99c..df801533c 100644 --- a/js/webpack/app.js +++ b/js/webpack/app.js @@ -22,6 +22,7 @@ const WebpackErrorNotificationPlugin = require('webpack-error-notification'); const CopyWebpackPlugin = require('copy-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const ExtractTextPlugin = require('extract-text-webpack-plugin'); +const ServiceWorkerWebpackPlugin = require('serviceworker-webpack-plugin'); const Shared = require('./shared'); const DAPPS = require('../src/dapps'); @@ -50,7 +51,7 @@ module.exports = { rules: [ { test: /\.js$/, - exclude: /node_modules/, + exclude: /(node_modules)/, // use: [ 'happypack/loader?id=js' ] use: isProd ? ['babel-loader'] : [ 'babel-loader?cacheDirectory=true' @@ -136,7 +137,18 @@ module.exports = { }, plugins: (function () { - const plugins = Shared.getPlugins().concat([ + const DappsHTMLInjection = DAPPS.map((dapp) => { + return new HtmlWebpackPlugin({ + title: dapp.title, + filename: dapp.name + '.html', + template: './dapps/index.ejs', + favicon: FAVICON, + secure: dapp.secure, + chunks: [ isProd ? null : 'commons', dapp.name ] + }); + }); + + const plugins = Shared.getPlugins().concat( new CopyWebpackPlugin([{ from: './error_pages.css', to: 'styles.css' }], {}), new WebpackErrorNotificationPlugin(), @@ -151,17 +163,14 @@ module.exports = { template: './index.ejs', favicon: FAVICON, chunks: [ isProd ? null : 'commons', 'index' ] - }) - ], DAPPS.map((dapp) => { - return new HtmlWebpackPlugin({ - title: dapp.title, - filename: dapp.name + '.html', - template: './dapps/index.ejs', - favicon: FAVICON, - secure: dapp.secure, - chunks: [ isProd ? null : 'commons', dapp.name ] - }); - })); + }), + + new ServiceWorkerWebpackPlugin({ + entry: path.join(__dirname, '../src/serviceWorker.js') + }), + + DappsHTMLInjection + ); if (!isProd) { const DEST_I18N = path.join(__dirname, '..', DEST, 'i18n'); diff --git a/parity/upgrade.rs b/parity/upgrade.rs index 94d0adfe4..2963c3d05 100644 --- a/parity/upgrade.rs +++ b/parity/upgrade.rs @@ -139,9 +139,7 @@ fn file_exists(path: &Path) -> bool { } pub fn upgrade_key_location(from: &PathBuf, to: &PathBuf) { - let mut parent = to.clone(); - parent.pop(); - match fs::create_dir_all(&parent).and_then(|()| fs::read_dir(from)) { + match fs::create_dir_all(&to).and_then(|()| fs::read_dir(from)) { Ok(entries) => { let files: Vec<_> = entries.filter_map(|f| f.ok().and_then(|f| if f.file_type().ok().map_or(false, |f| f.is_file()) { f.file_name().to_str().map(|s| s.to_owned()) } else { None })).collect(); let mut num: usize = 0; @@ -165,7 +163,7 @@ pub fn upgrade_key_location(from: &PathBuf, to: &PathBuf) { } }, Err(e) => { - warn!("Error moving keys from {:?} to {:?}: {:?}", from, to, e); + debug!("Error moving keys from {:?} to {:?}: {:?}", from, to, e); } } }