From 365ab60fedc07eda86d01334f5f64dae8295025c Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Thu, 15 Dec 2016 19:06:05 +0100 Subject: [PATCH 01/16] Use ServiceWorker for Contract compilation --- js/package.json | 1 + js/src/redux/providers/compilerActions.js | 57 ++++- js/src/redux/providers/compilerReducer.js | 9 +- js/src/serviceWorker.js | 185 +++++++++++++++ js/src/views/WriteContract/writeContract.css | 10 +- js/src/views/WriteContract/writeContract.js | 35 ++- .../views/WriteContract/writeContractStore.js | 219 +++++++++++------- js/webpack/app.js | 35 +-- 8 files changed, 442 insertions(+), 109 deletions(-) create mode 100644 js/src/serviceWorker.js diff --git a/js/package.json b/js/package.json index 615562adc..c43561799 100644 --- a/js/package.json +++ b/js/package.json @@ -117,6 +117,7 @@ "react-hot-loader": "3.0.0-beta.6", "react-intl-aggregate-webpack-plugin": "0.0.1", "rucksack-css": "0.9.1", + "serviceworker-webpack-plugin": "0.1.7", "sinon": "1.17.6", "sinon-as-promised": "4.0.2", "sinon-chai": "2.8.0", diff --git a/js/src/redux/providers/compilerActions.js b/js/src/redux/providers/compilerActions.js index c3b3a9bdd..b679830d2 100644 --- a/js/src/redux/providers/compilerActions.js +++ b/js/src/redux/providers/compilerActions.js @@ -14,7 +14,45 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -import CompilerWorker from 'worker-loader!./compilerWorker.js'; +import PromiseWorker from 'promise-worker'; +import runtime from 'serviceworker-webpack-plugin/lib/runtime'; + +let workerRegistration; + +// Setup the Service Worker +if ('serviceWorker' in navigator) { + workerRegistration = runtime + .register() + .then(() => { + console.log('registering service worker'); + + if (navigator.serviceWorker.controller) { + // already active and controlling this page + return navigator.serviceWorker; + } + // wait for a new service worker to control this page + return new Promise((resolve, reject) => { + try { + const onControllerChange = () => { + navigator.serviceWorker.removeEventListener('controllerchange', onControllerChange); + resolve(navigator.serviceWorker); + }; + + navigator.serviceWorker.addEventListener('controllerchange', onControllerChange); + } catch (error) { + reject(error); + } + }); + }) + .then((_worker) => { + const worker = new PromiseWorker(_worker); + + console.log('registered service worker'); + return worker; + }); +} else { + workerRegistration = Promise.reject('Service Worker is not available in your browser.'); +} export function setWorker (worker) { return { @@ -23,6 +61,13 @@ export function setWorker (worker) { }; } +export function setError (error) { + return { + type: 'setError', + error + }; +} + export function setupWorker () { return (dispatch, getState) => { const state = getState(); @@ -31,7 +76,13 @@ export function setupWorker () { return; } - const worker = new CompilerWorker(); - dispatch(setWorker(worker)); + workerRegistration + .then((worker) => { + dispatch(setWorker(worker)); + }) + .catch((error) => { + console.error('sw', error); + dispatch(setError(error)); + }); }; } diff --git a/js/src/redux/providers/compilerReducer.js b/js/src/redux/providers/compilerReducer.js index 7163ac7a5..7470f0751 100644 --- a/js/src/redux/providers/compilerReducer.js +++ b/js/src/redux/providers/compilerReducer.js @@ -17,13 +17,18 @@ import { handleActions } from 'redux-actions'; const initialState = { - worker: null + worker: null, + error: null }; export default handleActions({ setWorker (state, action) { const { worker } = action; - return Object.assign({}, state, { worker }); + }, + + setError (state, action) { + const { error } = action; + return Object.assign({}, state, { error }); } }, initialState); diff --git a/js/src/serviceWorker.js b/js/src/serviceWorker.js new file mode 100644 index 000000000..0488c2b61 --- /dev/null +++ b/js/src/serviceWorker.js @@ -0,0 +1,185 @@ +// Copyright 2015, 2016 Parity Technologies (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +import solc from 'solc/browser-wrapper'; +import { isWebUri } from 'valid-url'; +import registerPromiseWorker from 'promise-worker/register'; + +const CACHE_NAME = 'parity-cache-v1'; + +registerPromiseWorker((msg) => { + return handleMessage(msg); +}); + +self.addEventListener('install', (event) => { + event.waitUntil(self.skipWaiting()); +}); + +self.addEventListener('activate', (event) => { + event.waitUntil(self.clients.claim()); +}); + +self.solcVersions = {}; +self.files = {}; + +function handleMessage (message) { + switch (message.action) { + case 'compile': + return compile(message.data); + + case 'load': + return load(message.data); + + case 'setFiles': + return setFiles(message.data); + + default: + console.warn(`unknown action "${message.action}"`); + return null; + } +} + +function setFiles (files) { + const prevFiles = self.files; + const nextFiles = files.reduce((obj, file) => { + obj[file.name] = file.sourcecode; + return obj; + }, {}); + + self.files = { + ...prevFiles, + ...nextFiles + }; + + return 'ok'; +} + +// @todo re-implement find imports (with ASYNC fetch) +// function findImports (path) { +// if (self.files[path]) { +// if (self.files[path].error) { +// return Promise.reject(self.files[path].error); +// } + +// return Promise.resolve(self.files[path]); +// } + +// if (isWebUri(path)) { +// console.log('[sw] fetching', path); + +// return fetch(path) +// .then((r) => r.text()) +// .then((c) => { +// console.log('[sw]', 'got content at ' + path); +// self.files[path] = c; +// return c; +// }) +// .catch((e) => { +// console.error('[sw]', 'fetching', path, e); +// self.files[path] = { error: e }; +// throw e; +// }); +// } + +// console.log(`[sw] path ${path} not found...`); +// return Promise.reject('File not found'); +// } + +function compile (data, optimized = 1) { + const { sourcecode, build } = data; + + return fetchSolidity(build) + .then((compiler) => { + const start = Date.now(); + console.log('[sw] compiling...'); + + const input = { + '': sourcecode + }; + + const compiled = compiler.compile({ sources: input }, optimized); + + const time = Math.round((Date.now() - start) / 100) / 10; + console.log(`[sw] done compiling in ${time}s`); + + compiled.version = build.longVersion; + + return compiled; + }); +} + +function load (build) { + return fetchSolidity(build) + .then(() => 'ok'); +} + +function fetchSolc (build) { + const { path, longVersion } = build; + const URL = `https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/${path}`; + + return caches + .match(URL) + .then((response) => { + if (response) { + return response; + } + + console.log(`[sw] fetching solc-bin ${longVersion} at ${URL}`); + + return fetch(URL) + .then((response) => { + if (!response || response.status !== 200 || response.type !== 'basic') { + return response; + } + + const responseToCache = response.clone(); + + caches.open(CACHE_NAME) + .then((cache) => { + cache.put(URL, responseToCache); + }); + + return response; + }); + }); +} + +function fetchSolidity (build) { + const { path, longVersion } = build; + + if (self.solcVersions[path]) { + return Promise.resolve(self.solcVersions[path]); + } + + return fetchSolc(build) + .then((r) => r.text()) + .then((code) => { + const solcCode = code.replace(/^var Module;/, 'var Module=self.__solcModule;'); + self.__solcModule = {}; + + console.log(`[sw] evaluating ${longVersion}`); + + // eslint-disable-next-line no-eval + eval(solcCode); + + console.log(`[sw] done evaluating ${longVersion}`); + + const compiler = solc(self.__solcModule); + self.solcVersions[path] = compiler; + + return compiler; + }); +} diff --git a/js/src/views/WriteContract/writeContract.css b/js/src/views/WriteContract/writeContract.css index 2502c4060..feed8616e 100644 --- a/js/src/views/WriteContract/writeContract.css +++ b/js/src/views/WriteContract/writeContract.css @@ -45,6 +45,14 @@ } } +.error { + background-color: rgba(200, 0, 0, 0.25); + padding: 1em 0.5em; + margin-top: -0.5em; + font-family: monospace; + font-size: 0.9em; +} + .mainEditor { &:global(.ace-solarized-dark) { background-color: rgba(0, 0, 0, 0.5); @@ -87,13 +95,13 @@ display: flex; flex-direction: column; margin-right: 0.5em; - .panel { background-color: rgba(0, 0, 0, 0.5); padding: 1em; flex: 1; display: flex; flex-direction: column; + box-sizing: border-box; } .compilation { diff --git a/js/src/views/WriteContract/writeContract.js b/js/src/views/WriteContract/writeContract.js index 31c4dd244..f6a03df9e 100644 --- a/js/src/views/WriteContract/writeContract.js +++ b/js/src/views/WriteContract/writeContract.js @@ -42,10 +42,11 @@ class WriteContract extends Component { static propTypes = { accounts: PropTypes.object.isRequired, setupWorker: PropTypes.func.isRequired, - worker: PropTypes.object + worker: PropTypes.object, + workerError: PropTypes.any }; - store = new WriteContractStore(); + store = WriteContractStore.get(); state = { resizing: false, @@ -57,22 +58,31 @@ class WriteContract extends Component { setupWorker(); if (worker) { - this.store.setCompiler(worker); + this.store.setWorker(worker); } } componentDidMount () { this.store.setEditor(this.refs.editor); + if (this.props.workerError) { + this.store.setWorkerError(this.props.workerError); + } + // Wait for editor to be loaded window.setTimeout(() => { this.store.resizeEditor(); }, 2000); } + // Set the worker if not set before (eg. first page loading) componentWillReceiveProps (nextProps) { if (!this.props.worker && nextProps.worker) { - this.store.setCompiler(nextProps.worker); + this.store.setWorker(nextProps.worker); + } + + if (this.props.workerError !== nextProps.workerError) { + this.store.setWorkerError(nextProps.workerError); } } @@ -217,7 +227,18 @@ class WriteContract extends Component { } renderParameters () { - const { compiling, contract, selectedBuild, loading } = this.store; + const { compiling, contract, selectedBuild, loading, workerError } = this.store; + + if (workerError) { + return ( +
+
+

Unfortuantely, an error occurred...

+
{ workerError }
+
+
+ ); + } if (selectedBuild < 0) { return ( @@ -485,8 +506,8 @@ class WriteContract extends Component { function mapStateToProps (state) { const { accounts } = state.personal; - const { worker } = state.compiler; - return { accounts, worker }; + const { worker, error } = state.compiler; + return { accounts, worker, workerError: error }; } function mapDispatchToProps (dispatch) { diff --git a/js/src/views/WriteContract/writeContractStore.js b/js/src/views/WriteContract/writeContractStore.js index dd1985466..9f81a63e5 100644 --- a/js/src/views/WriteContract/writeContractStore.js +++ b/js/src/views/WriteContract/writeContractStore.js @@ -18,6 +18,8 @@ import { action, observable } from 'mobx'; import store from 'store'; import { debounce } from 'lodash'; +import { sha3 } from '~/api/util/sha3'; + const WRITE_CONTRACT_STORE_KEY = '_parity::writeContractStore'; const SNIPPETS = { @@ -43,6 +45,8 @@ const SNIPPETS = { } }; +let instance = null; + export default class WriteContractStore { @observable sourcecode = ''; @@ -68,45 +72,47 @@ export default class WriteContractStore { @observable savedContracts = {}; @observable selectedContract = {}; + @observable workerError = null; + + lastCompilation = {}; snippets = SNIPPETS; + worker = null; constructor () { - this.reloadContracts(); - this.fetchSolidityVersions(); - this.debouncedCompile = debounce(this.handleCompile, 1000); } + static get () { + if (!instance) { + instance = new WriteContractStore(); + } + + return instance; + } + + @action setWorkerError (error) { + this.workerError = error; + } + @action setEditor (editor) { this.editor = editor; } - @action setCompiler (compiler) { - this.compiler = compiler; + @action setWorker (worker) { + this.worker = worker; - this.compiler.onmessage = (event) => { - const message = JSON.parse(event.data); - - switch (message.event) { - case 'compiled': - this.parseCompiled(message.data); - break; - case 'loading': - this.parseLoading(message.data); - break; - case 'try-again': - this.handleCompile(); - break; - } - }; + this + .fetchSolidityVersions() + .then(() => this.reloadContracts()); } fetchSolidityVersions () { - fetch('https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/list.json') + return fetch('https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/list.json') .then((r) => r.json()) .then((data) => { const { builds, releases, latestRelease } = data; let latestIndex = -1; + let promise = Promise.resolve(); this.builds = builds.reverse().map((build, index) => { if (releases[build.version] === build.path) { @@ -114,7 +120,7 @@ export default class WriteContractStore { if (build.version === latestRelease) { build.latest = true; - this.loadSolidityVersion(build); + promise = promise.then(() => this.loadSolidityVersion(build)); latestIndex = index; } } @@ -123,29 +129,40 @@ export default class WriteContractStore { }); this.selectedBuild = latestIndex; + return promise; }); } - @action closeWorker = () => { - this.compiler.postMessage(JSON.stringify({ - action: 'close' - })); - } - @action handleImport = (sourcecode) => { this.reloadContracts(-1, sourcecode); } @action handleSelectBuild = (_, index, value) => { this.selectedBuild = value; - this.loadSolidityVersion(this.builds[value]); + return this.loadSolidityVersion(this.builds[value]); } @action loadSolidityVersion = (build) => { - this.compiler.postMessage(JSON.stringify({ - action: 'load', - data: build - })); + if (!this.worker) { + return; + } + + return this.worker + .postMessage({ + action: 'load', + data: build + }) + .then((result) => { + if (result !== 'ok') { + this.setWorkerError(result); + } + }) + .catch((error) => { + this.setWorkerError(error); + }) + .then(() => { + this.loading = false; + }); } @action handleOpenDeployModal = () => { @@ -177,23 +194,94 @@ export default class WriteContractStore { this.contract = this.contracts[Object.keys(this.contracts)[value]]; } - @action handleCompile = () => { + @action handleCompile = (loadFiles = false) => { this.compiled = false; this.compiling = true; const build = this.builds[this.selectedBuild]; + const version = build.longVersion; + const sourcecode = this.sourcecode.replace(/\n+/g, '\n').replace(/\s(\s+)/g, ' '); + const hash = sha3(JSON.stringify({ version, sourcecode })); - if (this.compiler && typeof this.compiler.postMessage === 'function') { - this.sendFilesToWorker(); + let promise = Promise.resolve(null); - this.compiler.postMessage(JSON.stringify({ - action: 'compile', - data: { - sourcecode: this.sourcecode, - build: build - } - })); + if (hash === this.lastCompilation.hash) { + promise = new Promise((resolve) => { + window.setTimeout(() => { + resolve(this.lastCompilation); + }, 500); + }); + } else if (this.worker) { + promise = loadFiles + ? this.sendFilesToWorker() + : Promise.resolve(); + + promise = promise + .then(() => { + return this.worker.postMessage({ + action: 'compile', + data: { + sourcecode: sourcecode, + build: build + } + }); + }) + .then((data) => { + const result = this.parseCompiled(data); + + this.lastCompilation = { + result: result, + date: new Date(), + version: data.version, + hash + }; + + return this.lastCompilation; + }) + .catch((error) => { + this.setWorkerError(error); + }); } + + return promise.then((data = {}) => { + const { + contract, contractIndex, + annotations, contracts, errors + } = data.result; + + this.contract = contract; + this.contractIndex = contractIndex; + + this.annotations = annotations; + this.contracts = contracts; + this.errors = errors; + + this.compiled = true; + this.compiling = false; + }); + } + + @action parseCompiled = (data) => { + const { contracts } = data; + + const { errors = [] } = data; + const errorAnnotations = this.parseErrors(errors); + const formalAnnotations = this.parseErrors(data.formal && data.formal.errors, true); + + const annotations = [].concat( + errorAnnotations, + formalAnnotations + ); + + const contractKeys = Object.keys(contracts || {}); + + const contract = contractKeys.length ? contracts[contractKeys[0]] : null; + const contractIndex = contractKeys.length ? 0 : -1; + + return { + contract, contractIndex, + contracts, errors, annotations + }; } parseErrors = (data, formal = false) => { @@ -220,43 +308,6 @@ export default class WriteContractStore { }); } - @action parseCompiled = (data) => { - const { contracts } = data; - - const { errors = [] } = data; - const errorAnnotations = this.parseErrors(errors); - const formalAnnotations = this.parseErrors(data.formal && data.formal.errors, true); - - const annotations = [].concat( - errorAnnotations, - formalAnnotations - ); - - if (annotations.findIndex((a) => /__parity_tryAgain/.test(a.text)) > -1) { - return; - } - - const contractKeys = Object.keys(contracts || {}); - - this.contract = contractKeys.length ? contracts[contractKeys[0]] : null; - this.contractIndex = contractKeys.length ? 0 : -1; - - this.contracts = contracts; - this.errors = errors; - this.annotations = annotations; - - this.compiled = true; - this.compiling = false; - } - - @action parseLoading = (isLoading) => { - this.loading = isLoading; - - if (!isLoading) { - this.handleCompile(); - } - } - @action handleEditSourcecode = (value, compile = false) => { this.sourcecode = value; @@ -327,8 +378,10 @@ export default class WriteContractStore { current: this.sourcecode }); - this.handleCompile(); this.resizeEditor(); + + // Send the new files to the Worker and compile + return this.handleCompile(true); } @action handleLoadContract = (contract) => { @@ -369,10 +422,10 @@ export default class WriteContractStore { Object.values(this.savedContracts) ); - this.compiler.postMessage(JSON.stringify({ + return this.worker.postMessage({ action: 'setFiles', data: files - })); + }); } } diff --git a/js/webpack/app.js b/js/webpack/app.js index cf38ec99c..a2ff20ced 100644 --- a/js/webpack/app.js +++ b/js/webpack/app.js @@ -22,6 +22,7 @@ const WebpackErrorNotificationPlugin = require('webpack-error-notification'); const CopyWebpackPlugin = require('copy-webpack-plugin'); const HtmlWebpackPlugin = require('html-webpack-plugin'); const ExtractTextPlugin = require('extract-text-webpack-plugin'); +const ServiceWorkerWebpackPlugin = require('serviceworker-webpack-plugin'); const Shared = require('./shared'); const DAPPS = require('../src/dapps'); @@ -50,7 +51,7 @@ module.exports = { rules: [ { test: /\.js$/, - exclude: /node_modules/, + exclude: /(node_modules)/, // use: [ 'happypack/loader?id=js' ] use: isProd ? ['babel-loader'] : [ 'babel-loader?cacheDirectory=true' @@ -136,7 +137,18 @@ module.exports = { }, plugins: (function () { - const plugins = Shared.getPlugins().concat([ + const DappsHTMLInjection = DAPPS.map((dapp) => { + return new HtmlWebpackPlugin({ + title: dapp.title, + filename: dapp.name + '.html', + template: './dapps/index.ejs', + favicon: FAVICON, + secure: dapp.secure, + chunks: [ isProd ? null : 'commons', dapp.name ] + }); + }); + + const plugins = Shared.getPlugins().concat( new CopyWebpackPlugin([{ from: './error_pages.css', to: 'styles.css' }], {}), new WebpackErrorNotificationPlugin(), @@ -151,17 +163,14 @@ module.exports = { template: './index.ejs', favicon: FAVICON, chunks: [ isProd ? null : 'commons', 'index' ] - }) - ], DAPPS.map((dapp) => { - return new HtmlWebpackPlugin({ - title: dapp.title, - filename: dapp.name + '.html', - template: './dapps/index.ejs', - favicon: FAVICON, - secure: dapp.secure, - chunks: [ isProd ? null : 'commons', dapp.name ] - }); - })); + }), + + new ServiceWorkerWebpackPlugin({ + entry: path.join(__dirname, '../src/serviceWorker.js'), + }), + + DappsHTMLInjection + ); if (!isProd) { const DEST_I18N = path.join(__dirname, '..', DEST, 'i18n'); From 5983a0c1a5b10e5fd48e2304a623cf408da89254 Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Thu, 15 Dec 2016 19:07:13 +0100 Subject: [PATCH 02/16] Linting fixes --- js/src/serviceWorker.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/serviceWorker.js b/js/src/serviceWorker.js index 0488c2b61..c90583613 100644 --- a/js/src/serviceWorker.js +++ b/js/src/serviceWorker.js @@ -15,7 +15,7 @@ // along with Parity. If not, see . import solc from 'solc/browser-wrapper'; -import { isWebUri } from 'valid-url'; +// import { isWebUri } from 'valid-url'; import registerPromiseWorker from 'promise-worker/register'; const CACHE_NAME = 'parity-cache-v1'; From 1d04f25a0a3ae92d3ff6ddfedba863a68c58b044 Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Thu, 15 Dec 2016 19:35:16 +0100 Subject: [PATCH 03/16] Add error toString --- js/src/views/WriteContract/writeContract.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/views/WriteContract/writeContract.js b/js/src/views/WriteContract/writeContract.js index f6a03df9e..2293eff73 100644 --- a/js/src/views/WriteContract/writeContract.js +++ b/js/src/views/WriteContract/writeContract.js @@ -234,7 +234,7 @@ class WriteContract extends Component {

Unfortuantely, an error occurred...

-
{ workerError }
+
{ workerError.toString() }
); From e377ec3194eb97b6cc2c5c15b14d4b2ca05246ee Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Mon, 19 Dec 2016 14:39:10 +0100 Subject: [PATCH 04/16] Working ServiceWorker --- js/src/redux/providers/compilerActions.js | 26 ++++----------- js/src/serviceWorker.js | 13 +++++--- .../views/WriteContract/writeContractStore.js | 32 ++++++++++++------- js/webpack/app.js | 2 +- 4 files changed, 36 insertions(+), 37 deletions(-) diff --git a/js/src/redux/providers/compilerActions.js b/js/src/redux/providers/compilerActions.js index b679830d2..9c4eb9535 100644 --- a/js/src/redux/providers/compilerActions.js +++ b/js/src/redux/providers/compilerActions.js @@ -25,29 +25,15 @@ if ('serviceWorker' in navigator) { .register() .then(() => { console.log('registering service worker'); - - if (navigator.serviceWorker.controller) { - // already active and controlling this page - return navigator.serviceWorker; - } - // wait for a new service worker to control this page - return new Promise((resolve, reject) => { - try { - const onControllerChange = () => { - navigator.serviceWorker.removeEventListener('controllerchange', onControllerChange); - resolve(navigator.serviceWorker); - }; - - navigator.serviceWorker.addEventListener('controllerchange', onControllerChange); - } catch (error) { - reject(error); - } - }); + return navigator.serviceWorker.ready; }) - .then((_worker) => { + .then((registration) => { + console.log('registered service worker'); + + const _worker = registration.active; + _worker.controller = registration.active; const worker = new PromiseWorker(_worker); - console.log('registered service worker'); return worker; }); } else { diff --git a/js/src/serviceWorker.js b/js/src/serviceWorker.js index c90583613..a7052ad84 100644 --- a/js/src/serviceWorker.js +++ b/js/src/serviceWorker.js @@ -25,10 +25,12 @@ registerPromiseWorker((msg) => { }); self.addEventListener('install', (event) => { + console.warn('installing sw'); event.waitUntil(self.skipWaiting()); }); self.addEventListener('activate', (event) => { + console.warn('activating sw'); event.waitUntil(self.clients.claim()); }); @@ -141,18 +143,19 @@ function fetchSolc (build) { return fetch(URL) .then((response) => { - if (!response || response.status !== 200 || response.type !== 'basic') { + if (!response || response.status !== 200) { return response; } const responseToCache = response.clone(); - caches.open(CACHE_NAME) + return caches.open(CACHE_NAME) .then((cache) => { - cache.put(URL, responseToCache); + return cache.put(URL, responseToCache); + }) + .then(() => { + return response; }); - - return response; }); }); } diff --git a/js/src/views/WriteContract/writeContractStore.js b/js/src/views/WriteContract/writeContractStore.js index 9f81a63e5..96d9c4d07 100644 --- a/js/src/views/WriteContract/writeContractStore.js +++ b/js/src/views/WriteContract/writeContractStore.js @@ -74,6 +74,7 @@ export default class WriteContractStore { @observable workerError = null; + loadingSolidity = false; lastCompilation = {}; snippets = SNIPPETS; worker = null; @@ -147,7 +148,11 @@ export default class WriteContractStore { return; } - return this.worker + if (this.loadingSolidity) { + return this.loadingSolidity; + } + + this.loadingSolidity = this.worker .postMessage({ action: 'load', data: build @@ -161,8 +166,11 @@ export default class WriteContractStore { this.setWorkerError(error); }) .then(() => { + this.loadingSolidity = false; this.loading = false; }); + + return this.loadingSolidity; } @action handleOpenDeployModal = () => { @@ -243,18 +251,20 @@ export default class WriteContractStore { }); } - return promise.then((data = {}) => { - const { - contract, contractIndex, - annotations, contracts, errors - } = data.result; + return promise.then((data = null) => { + if (data) { + const { + contract, contractIndex, + annotations, contracts, errors + } = data.result; - this.contract = contract; - this.contractIndex = contractIndex; + this.contract = contract; + this.contractIndex = contractIndex; - this.annotations = annotations; - this.contracts = contracts; - this.errors = errors; + this.annotations = annotations; + this.contracts = contracts; + this.errors = errors; + } this.compiled = true; this.compiling = false; diff --git a/js/webpack/app.js b/js/webpack/app.js index a2ff20ced..df801533c 100644 --- a/js/webpack/app.js +++ b/js/webpack/app.js @@ -166,7 +166,7 @@ module.exports = { }), new ServiceWorkerWebpackPlugin({ - entry: path.join(__dirname, '../src/serviceWorker.js'), + entry: path.join(__dirname, '../src/serviceWorker.js') }), DappsHTMLInjection From 4c7dc9f2d877f3a44e5879cc3841d24390574a4d Mon Sep 17 00:00:00 2001 From: Robert Habermeier Date: Mon, 19 Dec 2016 14:54:10 +0100 Subject: [PATCH 05/16] require only simpler methods on Provider --- ethcore/light/src/client.rs | 34 ++--- ethcore/light/src/net/mod.rs | 60 ++++----- ethcore/light/src/net/tests/mod.rs | 98 ++++---------- ethcore/light/src/provider.rs | 207 ++++++++++++++++++----------- 4 files changed, 206 insertions(+), 193 deletions(-) diff --git a/ethcore/light/src/client.rs b/ethcore/light/src/client.rs index edadc440c..9a594f3dd 100644 --- a/ethcore/light/src/client.rs +++ b/ethcore/light/src/client.rs @@ -58,7 +58,7 @@ impl Client { /// Import a local transaction. pub fn import_own_transaction(&self, tx: SignedTransaction) { self.tx_pool.lock().insert(tx.hash(), tx); - } + } /// Fetch a vector of all pending transactions. pub fn pending_transactions(&self) -> Vec { @@ -90,28 +90,28 @@ impl Provider for Client { None } - fn block_headers(&self, _req: request::Headers) -> Vec { + fn block_header(&self, _id: BlockId) -> Option { + None + } + + fn block_body(&self, _id: BlockId) -> Option { + None + } + + fn block_receipts(&self, _hash: &H256) -> Option { + None + } + + fn state_proof(&self, _req: request::StateProof) -> Vec { Vec::new() } - fn block_bodies(&self, _req: request::Bodies) -> Vec { + fn contract_code(&self, _req: request::ContractCode) -> Bytes { Vec::new() } - fn receipts(&self, _req: request::Receipts) -> Vec { - Vec::new() - } - - fn proofs(&self, _req: request::StateProofs) -> Vec { - Vec::new() - } - - fn contract_code(&self, _req: request::ContractCodes) -> Vec { - Vec::new() - } - - fn header_proofs(&self, _req: request::HeaderProofs) -> Vec { - Vec::new() + fn header_proof(&self, _req: request::HeaderProof) -> Option<(Bytes, Vec)> { + None } fn ready_transactions(&self) -> Vec { diff --git a/ethcore/light/src/net/mod.rs b/ethcore/light/src/net/mod.rs index 491e1d0ac..17898c24c 100644 --- a/ethcore/light/src/net/mod.rs +++ b/ethcore/light/src/net/mod.rs @@ -157,7 +157,7 @@ impl Peer { /// An LES event handler. /// -/// Each handler function takes a context which describes the relevant peer +/// Each handler function takes a context which describes the relevant peer /// and gives references to the IO layer and protocol structure so new messages /// can be dispatched immediately. /// @@ -185,7 +185,7 @@ pub trait Handler: Send + Sync { fn on_state_proofs(&self, _ctx: &EventContext, _req_id: ReqId, _proofs: &[Vec]) { } /// Called when a peer responds with contract code. fn on_code(&self, _ctx: &EventContext, _req_id: ReqId, _codes: &[Bytes]) { } - /// Called when a peer responds with header proofs. Each proof is a block header coupled + /// Called when a peer responds with header proofs. Each proof is a block header coupled /// with a series of trie nodes is ascending order by distance from the root. fn on_header_proofs(&self, _ctx: &EventContext, _req_id: ReqId, _proofs: &[(Bytes, Vec)]) { } /// Called on abort. @@ -215,9 +215,9 @@ pub struct Params { /// This is simply designed for request-response purposes. Higher level uses /// of the protocol, such as synchronization, will function as wrappers around /// this system. -// +// // LOCK ORDER: -// Locks must be acquired in the order declared, and when holding a read lock +// Locks must be acquired in the order declared, and when holding a read lock // on the peers, only one peer may be held at a time. pub struct LightProtocol { provider: Arc, @@ -252,7 +252,7 @@ impl LightProtocol { } } - /// Check the maximum amount of requests of a specific type + /// Check the maximum amount of requests of a specific type /// which a peer would be able to serve. pub fn max_requests(&self, peer: PeerId, kind: request::Kind) -> Option { self.peers.read().get(&peer).and_then(|peer| { @@ -267,11 +267,11 @@ impl LightProtocol { }) } - /// Make a request to a peer. + /// Make a request to a peer. /// /// Fails on: nonexistent peer, network error, peer not server, /// insufficient buffer. Does not check capabilities before sending. - /// On success, returns a request id which can later be coordinated + /// On success, returns a request id which can later be coordinated /// with an event. pub fn request_from(&self, io: &IoContext, peer_id: &PeerId, request: Request) -> Result { let peers = self.peers.read(); @@ -325,10 +325,10 @@ impl LightProtocol { // TODO: "urgent" announcements like new blocks? // the timer approach will skip 1 (possibly 2) in rare occasions. - if peer_info.sent_head == announcement.head_hash || + if peer_info.sent_head == announcement.head_hash || peer_info.status.head_num >= announcement.head_num || now - peer_info.last_update < Duration::milliseconds(UPDATE_INTERVAL_MS) { - continue + continue } peer_info.last_update = now; @@ -357,7 +357,7 @@ impl LightProtocol { /// Add an event handler. /// Ownership will be transferred to the protocol structure, /// and the handler will be kept alive as long as it is. - /// These are intended to be added when the protocol structure + /// These are intended to be added when the protocol structure /// is initialized as a means of customizing its behavior. pub fn add_handler(&mut self, handler: Box) { self.handlers.push(handler); @@ -380,7 +380,7 @@ impl LightProtocol { pending_requests.clear(); } - // Does the common pre-verification of responses before the response itself + // Does the common pre-verification of responses before the response itself // is actually decoded: // - check whether peer exists // - check whether request was made @@ -406,7 +406,7 @@ impl LightProtocol { let mut peer_info = peer_info.lock(); match peer_info.remote_flow.as_mut() { Some(&mut (ref mut buf, ref mut flow)) => { - let actual_buffer = ::std::cmp::min(cur_buffer, *flow.limit()); + let actual_buffer = ::std::cmp::min(cur_buffer, *flow.limit()); buf.update_to(actual_buffer) } None => return Err(Error::NotServer), // this really should be impossible. @@ -488,17 +488,17 @@ impl LightProtocol { request::Kind::Receipts => timeout::RECEIPTS, request::Kind::StateProofs => timeout::PROOFS, request::Kind::Codes => timeout::CONTRACT_CODES, - request::Kind::HeaderProofs => timeout::HEADER_PROOFS, + request::Kind::HeaderProofs => timeout::HEADER_PROOFS, }; if r.timestamp + Duration::milliseconds(kind_timeout) <= now { - debug!(target: "les", "Request for {:?} from peer {} timed out", + debug!(target: "les", "Request for {:?} from peer {} timed out", r.request.kind(), r.peer_id); - + // keep the request in the `pending` set for now so // on_disconnect will pass unfulfilled ReqIds to handlers. // in the case that a response is received after this, the - // disconnect won't be cancelled but the ReqId won't be + // disconnect won't be cancelled but the ReqId won't be // marked as abandoned. io.disconnect_peer(r.peer_id); } @@ -519,7 +519,7 @@ impl LightProtocol { punish(*peer, io, Error::UnsupportedProtocolVersion(proto_version)); return; } - + let chain_info = self.provider.chain_info(); let status = Status { @@ -540,7 +540,7 @@ impl LightProtocol { last_update: SteadyTime::now(), }); - io.send(*peer, packet::STATUS, status_packet); + io.send(*peer, packet::STATUS, status_packet); } // called when a peer disconnects. @@ -569,7 +569,7 @@ impl LightProtocol { io: io, proto: self, }, &unfulfilled) - } + } } } @@ -608,7 +608,7 @@ impl LightProtocol { for handler in &self.handlers { handler.on_connect(&Ctx { peer: *peer, - io: io, + io: io, proto: self, }, &status, &capabilities) } @@ -662,7 +662,7 @@ impl LightProtocol { } // Handle a request for block headers. - fn get_block_headers(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> { + fn get_block_headers(&self, peer: &PeerId, io: &IoContext, data: UntrustedRlp) -> Result<(), Error> { const MAX_HEADERS: usize = 512; let peers = self.peers.read(); @@ -914,7 +914,7 @@ impl LightProtocol { .map(|x| x.iter().map(|node| node.as_raw().to_owned()).collect()) .collect(); - for handler in &self.handlers { + for handler in &self.handlers { handler.on_state_proofs(&Ctx { peer: *peer, io: io, @@ -956,7 +956,7 @@ impl LightProtocol { let max_cost = try!(peer.deduct_max(&self.flow_params, request::Kind::Codes, req.code_requests.len())); - let response = self.provider.contract_code(req); + let response = self.provider.contract_codes(req); let response_len = response.iter().filter(|x| !x.is_empty()).count(); let actual_cost = self.flow_params.compute_cost(request::Kind::Codes, response_len); assert!(max_cost >= actual_cost, "Actual cost exceeded maximum computed cost."); @@ -983,7 +983,7 @@ impl LightProtocol { let raw_code: Vec = try!(try!(raw.at(2)).iter().map(|x| x.as_val()).collect()); - for handler in &self.handlers { + for handler in &self.handlers { handler.on_code(&Ctx { peer: *peer, io: io, @@ -1055,11 +1055,11 @@ impl LightProtocol { try!(raw.at(1)).iter().map(|x| x.as_raw().to_owned()).collect(), )) } - + let req_id = try!(self.pre_verify_response(peer, request::Kind::HeaderProofs, &raw)); let raw_proofs: Vec<_> = try!(try!(raw.at(2)).iter().map(decode_res).collect()); - for handler in &self.handlers { + for handler in &self.handlers { handler.on_header_proofs(&Ctx { peer: *peer, io: io, @@ -1082,7 +1082,7 @@ impl LightProtocol { handler.on_transactions(&Ctx { peer: *peer, io: io, - proto: self, + proto: self, }, &txs); } @@ -1136,12 +1136,12 @@ fn encode_request(req: &Request, req_id: usize) -> Vec { Request::Headers(ref headers) => { let mut stream = RlpStream::new_list(2); stream.append(&req_id).begin_list(4); - + match headers.start { HashOrNumber::Hash(ref hash) => stream.append(hash), HashOrNumber::Number(ref num) => stream.append(num), }; - + stream .append(&headers.max) .append(&headers.skip) @@ -1214,4 +1214,4 @@ fn encode_request(req: &Request, req_id: usize) -> Vec { stream.out() } } -} \ No newline at end of file +} diff --git a/ethcore/light/src/net/tests/mod.rs b/ethcore/light/src/net/tests/mod.rs index 64d53d9c8..0cfc8cac7 100644 --- a/ethcore/light/src/net/tests/mod.rs +++ b/ethcore/light/src/net/tests/mod.rs @@ -94,79 +94,36 @@ impl Provider for TestProvider { None } - fn block_headers(&self, req: request::Headers) -> Vec { - use request::HashOrNumber; - use ethcore::views::HeaderView; + fn block_header(&self, id: BlockId) -> Option { + self.0.client.block_header(id) + } - let best_num = self.chain_info().best_block_number; - let start_num = match req.start { - HashOrNumber::Number(start_num) => start_num, - HashOrNumber::Hash(hash) => match self.0.client.block_header(BlockId::Hash(hash)) { - None => { - return Vec::new(); - } - Some(header) => { - let num = HeaderView::new(&header).number(); - if req.max == 1 || self.0.client.block_hash(BlockId::Number(num)) != Some(hash) { - // Non-canonical header or single header requested. - return vec![header]; - } + fn block_body(&self, id: BlockId) -> Option { + self.0.client.block_body(id) + } - num - } + fn block_receipts(&self, hash: &H256) -> Option { + self.0.client.block_receipts(&hash) + } + + fn state_proof(&self, req: request::StateProof) -> Vec { + match req.key2 { + Some(_) => vec![::util::sha3::SHA3_NULL_RLP.to_vec()], + None => { + // sort of a leaf node + let mut stream = RlpStream::new_list(2); + stream.append(&req.key1).append_empty_data(); + vec![stream.out()] } - }; - - (0u64..req.max as u64) - .map(|x: u64| x.saturating_mul(req.skip + 1)) - .take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x }) - .map(|x| if req.reverse { start_num - x } else { start_num + x }) - .map(|x| self.0.client.block_header(BlockId::Number(x))) - .take_while(|x| x.is_some()) - .flat_map(|x| x) - .collect() + } } - fn block_bodies(&self, req: request::Bodies) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.0.client.block_body(BlockId::Hash(hash))) - .map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() + fn contract_code(&self, req: request::ContractCode) -> Bytes { + req.account_key.iter().chain(req.account_key.iter()).cloned().collect() } - fn receipts(&self, req: request::Receipts) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.0.client.block_receipts(&hash)) - .map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() - } - - fn proofs(&self, req: request::StateProofs) -> Vec { - req.requests.into_iter() - .map(|req| { - match req.key2 { - Some(_) => ::util::sha3::SHA3_NULL_RLP.to_vec(), - None => { - // sort of a leaf node - let mut stream = RlpStream::new_list(2); - stream.append(&req.key1).append_empty_data(); - stream.out() - } - } - }) - .collect() - } - - fn contract_code(&self, req: request::ContractCodes) -> Vec { - req.code_requests.into_iter() - .map(|req| { - req.account_key.iter().chain(req.account_key.iter()).cloned().collect() - }) - .collect() - } - - fn header_proofs(&self, req: request::HeaderProofs) -> Vec { - req.requests.into_iter().map(|_| ::rlp::EMPTY_LIST_RLP.to_vec()).collect() + fn header_proof(&self, _req: request::HeaderProof) -> Option<(Bytes, Vec)> { + None } fn ready_transactions(&self) -> Vec { @@ -455,8 +412,8 @@ fn get_state_proofs() { let request_body = encode_request(&request, req_id); let response = { let proofs = vec![ - { let mut stream = RlpStream::new_list(2); stream.append(&key1).append_empty_data(); stream.out() }, - ::util::sha3::SHA3_NULL_RLP.to_vec(), + { let mut stream = RlpStream::new_list(2); stream.append(&key1).append_empty_data(); vec![stream.out()] }, + vec![::util::sha3::SHA3_NULL_RLP.to_vec()], ]; let new_buf = *flow_params.limit() - flow_params.compute_cost(request::Kind::StateProofs, 2); @@ -465,7 +422,10 @@ fn get_state_proofs() { response_stream.append(&req_id).append(&new_buf).begin_list(2); for proof in proofs { - response_stream.append_raw(&proof, 1); + response_stream.begin_list(proof.len()); + for node in proof { + response_stream.append_raw(&node, 1); + } } response_stream.out() diff --git a/ethcore/light/src/provider.rs b/ethcore/light/src/provider.rs index 1f9bbf8aa..afc5294fa 100644 --- a/ethcore/light/src/provider.rs +++ b/ethcore/light/src/provider.rs @@ -52,31 +52,139 @@ pub trait Provider: Send + Sync { /// /// The returned vector may have any length in the range [0, `max`], but the /// results within must adhere to the `skip` and `reverse` parameters. - fn block_headers(&self, req: request::Headers) -> Vec; + fn block_headers(&self, req: request::Headers) -> Vec { + use request::HashOrNumber; + use ethcore::views::HeaderView; + + if req.max == 0 { return Vec::new() } + + let best_num = self.chain_info().best_block_number; + let start_num = match req.start { + HashOrNumber::Number(start_num) => start_num, + HashOrNumber::Hash(hash) => match self.block_header(BlockId::Hash(hash)) { + None => { + trace!(target: "les_provider", "Unknown block hash {} requested", hash); + return Vec::new(); + } + Some(header) => { + let num = HeaderView::new(&header).number(); + let canon_hash = self.block_header(BlockId::Number(num)) + .map(|h| HeaderView::new(&h).hash()); + + if req.max == 1 || canon_hash != Some(hash) { + // Non-canonical header or single header requested. + return vec![header]; + } + + num + } + } + }; + + (0u64..req.max as u64) + .map(|x: u64| x.saturating_mul(req.skip + 1)) + .take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x }) + .map(|x| if req.reverse { start_num - x } else { start_num + x }) + .map(|x| self.block_header(BlockId::Number(x))) + .take_while(|x| x.is_some()) + .flat_map(|x| x) + .collect() + } + + /// Get a block header by id. + fn block_header(&self, id: BlockId) -> Option; /// Provide as many as possible of the requested blocks (minus the headers) encoded /// in RLP format. - fn block_bodies(&self, req: request::Bodies) -> Vec; + fn block_bodies(&self, req: request::Bodies) -> Vec { + req.block_hashes.into_iter() + .map(|hash| self.block_body(BlockId::Hash(hash))) + .map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) + .collect() + } + + /// Get a block body by id. + fn block_body(&self, id: BlockId) -> Option; /// Provide the receipts as many as possible of the requested blocks. /// Returns a vector of RLP-encoded lists of receipts. - fn receipts(&self, req: request::Receipts) -> Vec; + fn receipts(&self, req: request::Receipts) -> Vec { + req.block_hashes.into_iter() + .map(|hash| self.block_receipts(&hash)) + .map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) + .collect() + } + + /// Get a block's receipts as an RLP-encoded list by block hash. + fn block_receipts(&self, hash: &H256) -> Option; /// Provide a set of merkle proofs, as requested. Each request is a /// block hash and request parameters. /// /// Returns a vector of RLP-encoded lists satisfying the requests. - fn proofs(&self, req: request::StateProofs) -> Vec; + fn proofs(&self, req: request::StateProofs) -> Vec { + use rlp::{RlpStream, Stream}; + + let mut results = Vec::with_capacity(req.requests.len()); + + for request in req.requests { + let proof = self.state_proof(request); + + let mut stream = RlpStream::new_list(proof.len()); + for node in proof { + stream.append_raw(&node, 1); + } + + results.push(stream.out()); + } + + results + } + + /// Get a state proof from a request. Each proof should be a vector + /// of rlp-encoded trie nodes, in ascending order by distance from the root. + fn state_proof(&self, req: request::StateProof) -> Vec; /// Provide contract code for the specified (block_hash, account_hash) pairs. /// Each item in the resulting vector is either the raw bytecode or empty. - fn contract_code(&self, req: request::ContractCodes) -> Vec; + fn contract_codes(&self, req: request::ContractCodes) -> Vec { + req.code_requests.into_iter() + .map(|req| self.contract_code(req)) + .collect() + } - /// Provide header proofs from the Canonical Hash Tries as well as the headers + /// Get contract code by request. Either the raw bytecode or empty. + fn contract_code(&self, req: request::ContractCode) -> Bytes; + + /// Provide header proofs from the Canonical Hash Tries as well as the headers /// they correspond to -- each element in the returned vector is a 2-tuple. - /// The first element is a block header and the second a merkle proof of + /// The first element is a block header and the second a merkle proof of /// the header in a requested CHT. - fn header_proofs(&self, req: request::HeaderProofs) -> Vec; + fn header_proofs(&self, req: request::HeaderProofs) -> Vec { + use rlp::{self, RlpStream, Stream}; + + req.requests.into_iter() + .map(|req| self.header_proof(req)) + .map(|maybe_proof| match maybe_proof { + None => rlp::EMPTY_LIST_RLP.to_vec(), + Some((header, proof)) => { + let mut stream = RlpStream::new_list(2); + stream.append_raw(&header, 1).begin_list(proof.len()); + + for node in proof { + stream.append_raw(&node, 1); + } + + stream.out() + } + }) + .collect() + } + + /// Provide a header proof from a given Canonical Hash Trie as well as the + /// corresponding header. The first element is the block header and the + /// second is a merkle proof of the CHT. + fn header_proof(&self, req: request::HeaderProof) -> Option<(Bytes, Vec)>; /// Provide pending transactions. fn ready_transactions(&self) -> Vec; @@ -96,86 +204,31 @@ impl Provider for T { Some(self.pruning_info().earliest_state) } - fn block_headers(&self, req: request::Headers) -> Vec { - use request::HashOrNumber; - use ethcore::views::HeaderView; - - let best_num = self.chain_info().best_block_number; - let start_num = match req.start { - HashOrNumber::Number(start_num) => start_num, - HashOrNumber::Hash(hash) => match self.block_header(BlockId::Hash(hash)) { - None => { - trace!(target: "les_provider", "Unknown block hash {} requested", hash); - return Vec::new(); - } - Some(header) => { - let num = HeaderView::new(&header).number(); - if req.max == 1 || self.block_hash(BlockId::Number(num)) != Some(hash) { - // Non-canonical header or single header requested. - return vec![header]; - } - - num - } - } - }; - - (0u64..req.max as u64) - .map(|x: u64| x.saturating_mul(req.skip + 1)) - .take_while(|x| if req.reverse { x < &start_num } else { best_num - start_num >= *x }) - .map(|x| if req.reverse { start_num - x } else { start_num + x }) - .map(|x| self.block_header(BlockId::Number(x))) - .take_while(|x| x.is_some()) - .flat_map(|x| x) - .collect() + fn block_header(&self, id: BlockId) -> Option { + BlockChainClient::block_header(self, id) } - fn block_bodies(&self, req: request::Bodies) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.block_body(BlockId::Hash(hash))) - .map(|body| body.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() + fn block_body(&self, id: BlockId) -> Option { + BlockChainClient::block_body(self, id) } - fn receipts(&self, req: request::Receipts) -> Vec { - req.block_hashes.into_iter() - .map(|hash| self.block_receipts(&hash)) - .map(|receipts| receipts.unwrap_or_else(|| ::rlp::EMPTY_LIST_RLP.to_vec())) - .collect() + fn block_receipts(&self, hash: &H256) -> Option { + BlockChainClient::block_receipts(self, hash) } - fn proofs(&self, req: request::StateProofs) -> Vec { - use rlp::{RlpStream, Stream}; - - let mut results = Vec::with_capacity(req.requests.len()); - - for request in req.requests { - let proof = match request.key2 { - Some(key2) => self.prove_storage(request.key1, key2, request.from_level, BlockId::Hash(request.block)), - None => self.prove_account(request.key1, request.from_level, BlockId::Hash(request.block)), - }; - - let mut stream = RlpStream::new_list(proof.len()); - for node in proof { - stream.append_raw(&node, 1); - } - - results.push(stream.out()); + fn state_proof(&self, req: request::StateProof) -> Vec { + match req.key2 { + Some(key2) => self.prove_storage(req.key1, key2, req.from_level, BlockId::Hash(req.block)), + None => self.prove_account(req.key1, req.from_level, BlockId::Hash(req.block)), } - - results } - fn contract_code(&self, req: request::ContractCodes) -> Vec { - req.code_requests.into_iter() - .map(|req| { - self.code_by_hash(req.account_key, BlockId::Hash(req.block_hash)) - }) - .collect() + fn contract_code(&self, req: request::ContractCode) -> Bytes { + self.code_by_hash(req.account_key, BlockId::Hash(req.block_hash)) } - fn header_proofs(&self, req: request::HeaderProofs) -> Vec { - req.requests.into_iter().map(|_| ::rlp::EMPTY_LIST_RLP.to_vec()).collect() + fn header_proof(&self, _req: request::HeaderProof) -> Option<(Bytes, Vec)> { + None } fn ready_transactions(&self) -> Vec { From ddb242c9695a2d1e891b0f3d48cec8b4ef4f6c76 Mon Sep 17 00:00:00 2001 From: arkpar Date: Mon, 19 Dec 2016 17:41:55 +0100 Subject: [PATCH 06/16] Fixed upgrading keys on the first run --- ethcore/res/ethereum/tests | 2 +- parity/upgrade.rs | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/ethcore/res/ethereum/tests b/ethcore/res/ethereum/tests index e8f4624b7..9028c4801 160000 --- a/ethcore/res/ethereum/tests +++ b/ethcore/res/ethereum/tests @@ -1 +1 @@ -Subproject commit e8f4624b7f1a15c63674eecf577c7ab76c3b16be +Subproject commit 9028c4801fd39fbb71a9796979182549a24e81c8 diff --git a/parity/upgrade.rs b/parity/upgrade.rs index 94d0adfe4..2963c3d05 100644 --- a/parity/upgrade.rs +++ b/parity/upgrade.rs @@ -139,9 +139,7 @@ fn file_exists(path: &Path) -> bool { } pub fn upgrade_key_location(from: &PathBuf, to: &PathBuf) { - let mut parent = to.clone(); - parent.pop(); - match fs::create_dir_all(&parent).and_then(|()| fs::read_dir(from)) { + match fs::create_dir_all(&to).and_then(|()| fs::read_dir(from)) { Ok(entries) => { let files: Vec<_> = entries.filter_map(|f| f.ok().and_then(|f| if f.file_type().ok().map_or(false, |f| f.is_file()) { f.file_name().to_str().map(|s| s.to_owned()) } else { None })).collect(); let mut num: usize = 0; @@ -165,7 +163,7 @@ pub fn upgrade_key_location(from: &PathBuf, to: &PathBuf) { } }, Err(e) => { - warn!("Error moving keys from {:?} to {:?}: {:?}", from, to, e); + debug!("Error moving keys from {:?} to {:?}: {:?}", from, to, e); } } } From ae8f77bc7c012f75a1971aee8d3c2c7690a2389c Mon Sep 17 00:00:00 2001 From: Robert Habermeier Date: Mon, 19 Dec 2016 17:15:54 +0100 Subject: [PATCH 07/16] fix deadlock in queue drop --- ethcore/src/verification/queue/mod.rs | 34 +++++++++++++++------------ 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/ethcore/src/verification/queue/mod.rs b/ethcore/src/verification/queue/mod.rs index d268b1cff..64e7e59e7 100644 --- a/ethcore/src/verification/queue/mod.rs +++ b/ethcore/src/verification/queue/mod.rs @@ -137,7 +137,7 @@ pub struct VerificationQueue { max_queue_size: usize, max_mem_use: usize, scale_verifiers: bool, - verifier_handles: Vec>, + verifier_handles: Vec>, state: Arc<(Mutex, Condvar)>, } @@ -225,8 +225,8 @@ impl VerificationQueue { let num_cpus = ::num_cpus::get(); let max_verifiers = min(num_cpus, MAX_VERIFIERS); - let default_amount = max(1, min(max_verifiers, config.verifier_settings.num_verifiers)); - let state = Arc::new((Mutex::new(State::Work(default_amount)), Condvar::new())); + let default_amount = max(1, min(max_verifiers, config.verifier_settings.num_verifiers)); + let state = Arc::new((Mutex::new(State::Work(default_amount)), Condvar::new())); let mut verifier_handles = Vec::with_capacity(max_verifiers); debug!(target: "verification", "Allocating {} verifiers, {} initially active", max_verifiers, default_amount); @@ -248,11 +248,11 @@ impl VerificationQueue { .spawn(move || { panic_handler.catch_panic(move || { VerificationQueue::verify( - verification, - engine, - wait, - ready, - empty, + verification, + engine, + wait, + ready, + empty, state, i, ) @@ -299,11 +299,11 @@ impl VerificationQueue { debug!(target: "verification", "verifier {} sleeping", id); state.1.wait(&mut cur_state); - debug!(target: "verification", "verifier {} waking up", id); + debug!(target: "verification", "verifier {} waking up", id); } - if let State::Exit = *cur_state { - debug!(target: "verification", "verifier {} exiting", id); + if let State::Exit = *cur_state { + debug!(target: "verification", "verifier {} exiting", id); break; } } @@ -326,7 +326,7 @@ impl VerificationQueue { } if let State::Exit = *state.0.lock() { - debug!(target: "verification", "verifier {} exiting", id); + debug!(target: "verification", "verifier {} exiting", id); return; } } @@ -681,8 +681,12 @@ impl Drop for VerificationQueue { *self.state.0.lock() = State::Exit; self.state.1.notify_all(); - // wake up all threads waiting for more work. - self.more_to_verify.notify_all(); + // acquire this lock to force threads to reach the waiting point + // if they're in-between the exit check and the more_to_verify wait. + { + let _more = self.verification.more_to_verify.lock().unwrap(); + self.more_to_verify.notify_all(); + } // wait for all verifier threads to join. for thread in self.verifier_handles.drain(..) { @@ -811,7 +815,7 @@ mod tests { fn readjust_verifiers() { let queue = get_test_queue(true); - // put all the verifiers to sleep to ensure + // put all the verifiers to sleep to ensure // the test isn't timing sensitive. *queue.state.0.lock() = State::Work(0); From 11f382ca1b3a066ec1afffe302554ed773ac44f1 Mon Sep 17 00:00:00 2001 From: Robert Habermeier Date: Mon, 19 Dec 2016 19:15:18 +0100 Subject: [PATCH 08/16] remove -Zorbit=off from rustflags on windows --- appveyor.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index ca477b997..e76e37d60 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -6,7 +6,7 @@ environment: certpass: secure: 0BgXJqxq9Ei34/hZ7121FQ== keyfile: C:\users\appveyor\Certificates.p12 - RUSTFLAGS: -Zorbit=off -D warnings + RUSTFLAGS: -D warnings branches: only: From 6659a4ec2130ea7f782f79600566c8ab42228b1c Mon Sep 17 00:00:00 2001 From: Gav Wood Date: Tue, 20 Dec 2016 00:21:19 +0100 Subject: [PATCH 09/16] Use rhash for non-native CI platforms and submit build. --- .gitlab-ci.yml | 84 ++++++++++++++++++++++++++++---------------------- 1 file changed, 48 insertions(+), 36 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 233aa0be9..0c9a256e3 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -166,27 +166,30 @@ linux-armv7: - export CXX=arm-linux-gnueabihf-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=armv7-unknown-linux-gnueabihf - rm -rf .cargo - mkdir -p .cargo - - echo "[target.armv7-unknown-linux-gnueabihf]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target armv7-unknown-linux-gnueabihf --features final --release $CARGOFLAGS - - arm-linux-gnueabihf-strip target/armv7-unknown-linux-gnueabihf/release/parity - - md5sum target/armv7-unknown-linux-gnueabihf/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - arm-linux-gnueabihf-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - sh scripts/deb-build.sh armhf - - cp target/armv7-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity + - cp target/$PLATFORM/release/parity deb/usr/bin/parity - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - dpkg-deb -b deb "parity_"$VER"_armhf.deb" - md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5" - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity --body target/armv7-unknown-linux-gnueabihf/release/parity - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/parity.md5 --body parity.md5 - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/armv7-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -208,27 +211,30 @@ linux-arm: - export CXX=arm-linux-gnueabihf-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=arm-unknown-linux-gnueabihf - rm -rf .cargo - mkdir -p .cargo - - echo "[target.arm-unknown-linux-gnueabihf]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"arm-linux-gnueabihf-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target arm-unknown-linux-gnueabihf --features final --release $CARGOFLAGS - - arm-linux-gnueabihf-strip target/arm-unknown-linux-gnueabihf/release/parity - - md5sum target/arm-unknown-linux-gnueabihf/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - arm-linux-gnueabihf-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - sh scripts/deb-build.sh armhf - - cp target/arm-unknown-linux-gnueabihf/release/parity deb/usr/bin/parity + - cp target/$PLATFORM/release/parity deb/usr/bin/parity - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - dpkg-deb -b deb "parity_"$VER"_armhf.deb" - md5sum "parity_"$VER"_armhf.deb" > "parity_"$VER"_armhf.deb.md5" - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity --body target/arm-unknown-linux-gnueabihf/release/parity - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/parity.md5 --body parity.md5 - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabihf/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb" --body "parity_"$VER"_armhf.deb" + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_armhf.deb.md5" --body "parity_"$VER"_armhf.deb.md5" + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -250,20 +256,23 @@ linux-armv6: - export CXX=arm-linux-gnueabi-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=arm-unknown-linux-gnueabi - rm -rf .cargo - mkdir -p .cargo - - echo "[target.arm-unknown-linux-gnueabi]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"arm-linux-gnueabi-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target arm-unknown-linux-gnueabi --features final --release $CARGOFLAGS - - arm-linux-gnueabi-strip target/arm-unknown-linux-gnueabi/release/parity - - md5sum target/arm-unknown-linux-gnueabi/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - arm-linux-gnueabi-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity --body target/arm-unknown-linux-gnueabi/release/parity - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/arm-unknown-linux-gnueabi/parity.md5 --body parity.md5 + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity --body target/$PLATFORM/release/parity + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm @@ -285,26 +294,29 @@ linux-aarch64: - export CXX=aarch64-linux-gnu-g++ - export HOST_CC=gcc - export HOST_CXX=g++ + - export PLATFORM=aarch64-unknown-linux-gnu - rm -rf .cargo - mkdir -p .cargo - - echo "[target.aarch64-unknown-linux-gnu]" >> .cargo/config + - echo "[target.$PLATFORM]" >> .cargo/config - echo "linker= \"aarch64-linux-gnu-gcc\"" >> .cargo/config - cat .cargo/config - - cargo build -j $(nproc) --target aarch64-unknown-linux-gnu --features final --release $CARGOFLAGS - - aarch64-linux-gnu-strip target/aarch64-unknown-linux-gnu/release/parity - - md5sum target/aarch64-unknown-linux-gnu/release/parity > parity.md5 + - cargo build -j $(nproc) --target $PLATFORM --features final --release $CARGOFLAGS + - aarch64-linux-gnu-strip target/$PLATFORM/release/parity + - export SHA3=$(rhash --sha3-256 ~/Core/parity/target/release/parity -p %h) + - md5sum target/$PLATFORM/release/parity > parity.md5 - sh scripts/deb-build.sh arm64 - - cp target/aarch64-unknown-linux-gnu/release/parity deb/usr/bin/parity + - cp target/$PLATFORM/release/parity deb/usr/bin/parity - export VER=$(grep -m 1 version Cargo.toml | awk '{print $3}' | tr -d '"' | tr -d "\n") - dpkg-deb -b deb "parity_"$VER"_arm64.deb" - md5sum "parity_"$VER"_arm64.deb" > "parity_"$VER"_arm64.deb.md5" - aws configure set aws_access_key_id $s3_key - aws configure set aws_secret_access_key $s3_secret - if [[ $CI_BUILD_REF_NAME =~ ^(master|beta|stable)$ ]]; then export S3_BUCKET=builds-parity-published; else export S3_BUCKET=builds-parity; fi - - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/parity.md5 --body parity.md5 - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb" - - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/aarch64-unknown-linux-gnu/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5" + - aws s3 rm --recursive s3://$S3_BUCKET/$CI_BUILD_REF_NAME/$PLATFORM + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/parity.md5 --body parity.md5 + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb" --body "parity_"$VER"_arm64.deb" + - aws s3api put-object --bucket $S3_BUCKET --key $CI_BUILD_REF_NAME/$PLATFORM/"parity_"$VER"_arm64.deb.md5" --body "parity_"$VER"_arm64.deb.md5" + - curl --data "commit=$CI_BUILD_REF&sha3=$SHA3&filename=parity&secret=$RELEASES_SECRET" http://icarus.parity.io:1337/push-build/$CI_BUILD_REF_NAME/$PLATFORM tags: - rust - rust-arm From f70e80805687f70180bdf42be2157c89e82e4237 Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Tue, 20 Dec 2016 01:55:57 +0100 Subject: [PATCH 10/16] Fallback in Contract Dev if no worker --- js/src/redux/providers/compilerActions.js | 9 +- js/src/redux/providers/compilerReducer.js | 2 +- js/src/serviceWorker.js | 192 +++++++----------- js/src/util/solidity.js | 79 +++++++ js/src/views/WriteContract/writeContract.js | 4 +- .../views/WriteContract/writeContractStore.js | 128 +++++++++--- 6 files changed, 257 insertions(+), 157 deletions(-) create mode 100644 js/src/util/solidity.js diff --git a/js/src/redux/providers/compilerActions.js b/js/src/redux/providers/compilerActions.js index 9c4eb9535..d638c03a2 100644 --- a/js/src/redux/providers/compilerActions.js +++ b/js/src/redux/providers/compilerActions.js @@ -23,13 +23,8 @@ let workerRegistration; if ('serviceWorker' in navigator) { workerRegistration = runtime .register() - .then(() => { - console.log('registering service worker'); - return navigator.serviceWorker.ready; - }) + .then(() => navigator.serviceWorker.ready) .then((registration) => { - console.log('registered service worker'); - const _worker = registration.active; _worker.controller = registration.active; const worker = new PromiseWorker(_worker); @@ -68,7 +63,7 @@ export function setupWorker () { }) .catch((error) => { console.error('sw', error); - dispatch(setError(error)); + dispatch(setWorker(null)); }); }; } diff --git a/js/src/redux/providers/compilerReducer.js b/js/src/redux/providers/compilerReducer.js index 7470f0751..e23bf3b16 100644 --- a/js/src/redux/providers/compilerReducer.js +++ b/js/src/redux/providers/compilerReducer.js @@ -17,7 +17,7 @@ import { handleActions } from 'redux-actions'; const initialState = { - worker: null, + worker: undefined, error: null }; diff --git a/js/src/serviceWorker.js b/js/src/serviceWorker.js index a7052ad84..c558a57cf 100644 --- a/js/src/serviceWorker.js +++ b/js/src/serviceWorker.js @@ -14,9 +14,8 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -import solc from 'solc/browser-wrapper'; -// import { isWebUri } from 'valid-url'; import registerPromiseWorker from 'promise-worker/register'; +import SolidityUtils from '~/util/solidity'; const CACHE_NAME = 'parity-cache-v1'; @@ -25,25 +24,71 @@ registerPromiseWorker((msg) => { }); self.addEventListener('install', (event) => { - console.warn('installing sw'); event.waitUntil(self.skipWaiting()); }); self.addEventListener('activate', (event) => { - console.warn('activating sw'); event.waitUntil(self.clients.claim()); }); -self.solcVersions = {}; +self.addEventListener('fetch', (event) => { + const { url } = event.request; + + if (/raw.githubusercontent.com\/ethereum\/solc-bin(.+)list\.json$/.test(url)) { + // Return the cached version, but still update it in background + return event.respondWith(cachedFetcher(event.request, true)); + } + + if (/raw.githubusercontent.com\/ethereum\/solc-bin(.+)soljson(.+)\.js$/.test(url)) { + return event.respondWith(cachedFetcher(event.request)); + } +}); + +self.solc = {}; self.files = {}; +function cachedFetcher (request, update = false) { + return caches + .match(request) + .then((response) => { + // Return cached response if exists and no + // updates needed + if (response && !update) { + return response; + } + + const fetcher = fetch(request.clone()) + .then((response) => { + // Check if we received a valid response + if (!response || response.status !== 200) { + return response; + } + + return caches + .open(CACHE_NAME) + .then((cache) => { + cache.put(request, response.clone()); + return response; + }); + }); + + // Cache hit - return response + // Still want to perform the fetch (update) + if (response) { + return response; + } + + return fetcher; + }); +} + function handleMessage (message) { switch (message.action) { case 'compile': return compile(message.data); case 'load': - return load(message.data); + return getCompiler(message.data).then(() => 'ok'); case 'setFiles': return setFiles(message.data); @@ -54,6 +99,15 @@ function handleMessage (message) { } } +function compile (data) { + const { build } = data; + + return getCompiler(build) + .then((compiler) => { + return SolidityUtils.compile(data, compiler); + }); +} + function setFiles (files) { const prevFiles = self.files; const nextFiles = files.reduce((obj, file) => { @@ -69,120 +123,22 @@ function setFiles (files) { return 'ok'; } -// @todo re-implement find imports (with ASYNC fetch) -// function findImports (path) { -// if (self.files[path]) { -// if (self.files[path].error) { -// return Promise.reject(self.files[path].error); -// } +function getCompiler (build) { + const { longVersion } = build; -// return Promise.resolve(self.files[path]); -// } + const fetcher = (url) => { + const request = new Request(url); + return cachedFetcher(request); + }; -// if (isWebUri(path)) { -// console.log('[sw] fetching', path); - -// return fetch(path) -// .then((r) => r.text()) -// .then((c) => { -// console.log('[sw]', 'got content at ' + path); -// self.files[path] = c; -// return c; -// }) -// .catch((e) => { -// console.error('[sw]', 'fetching', path, e); -// self.files[path] = { error: e }; -// throw e; -// }); -// } - -// console.log(`[sw] path ${path} not found...`); -// return Promise.reject('File not found'); -// } - -function compile (data, optimized = 1) { - const { sourcecode, build } = data; - - return fetchSolidity(build) - .then((compiler) => { - const start = Date.now(); - console.log('[sw] compiling...'); - - const input = { - '': sourcecode - }; - - const compiled = compiler.compile({ sources: input }, optimized); - - const time = Math.round((Date.now() - start) / 100) / 10; - console.log(`[sw] done compiling in ${time}s`); - - compiled.version = build.longVersion; - - return compiled; - }); -} - -function load (build) { - return fetchSolidity(build) - .then(() => 'ok'); -} - -function fetchSolc (build) { - const { path, longVersion } = build; - const URL = `https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/${path}`; - - return caches - .match(URL) - .then((response) => { - if (response) { - return response; - } - - console.log(`[sw] fetching solc-bin ${longVersion} at ${URL}`); - - return fetch(URL) - .then((response) => { - if (!response || response.status !== 200) { - return response; - } - - const responseToCache = response.clone(); - - return caches.open(CACHE_NAME) - .then((cache) => { - return cache.put(URL, responseToCache); - }) - .then(() => { - return response; - }); - }); - }); -} - -function fetchSolidity (build) { - const { path, longVersion } = build; - - if (self.solcVersions[path]) { - return Promise.resolve(self.solcVersions[path]); + if (!self.solc[longVersion]) { + self.solc[longVersion] = SolidityUtils + .getCompiler(build, fetcher) + .then((compiler) => { + self.solc[longVersion] = compiler; + return compiler; + }); } - return fetchSolc(build) - .then((r) => r.text()) - .then((code) => { - const solcCode = code.replace(/^var Module;/, 'var Module=self.__solcModule;'); - self.__solcModule = {}; - - console.log(`[sw] evaluating ${longVersion}`); - - // eslint-disable-next-line no-eval - eval(solcCode); - - console.log(`[sw] done evaluating ${longVersion}`); - - const compiler = solc(self.__solcModule); - self.solcVersions[path] = compiler; - - return compiler; - }); + return self.solc[longVersion]; } diff --git a/js/src/util/solidity.js b/js/src/util/solidity.js new file mode 100644 index 000000000..f09213ac5 --- /dev/null +++ b/js/src/util/solidity.js @@ -0,0 +1,79 @@ +// Copyright 2015, 2016 Parity Technologies (UK) Ltd. +// This file is part of Parity. + +// Parity is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. + +// Parity is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. + +// You should have received a copy of the GNU General Public License +// along with Parity. If not, see . + +import solc from 'solc/browser-wrapper'; + +export default class SolidityUtils { + + static compile (data, compiler) { + const { sourcecode, build, optimized = 1 } = data; + + const start = Date.now(); + console.log('[solidity] compiling...'); + + const input = { + '': sourcecode + }; + + const compiled = compiler.compile({ sources: input }, optimized); + + const time = Math.round((Date.now() - start) / 100) / 10; + console.log(`[solidity] done compiling in ${time}s`); + + compiled.version = build.longVersion; + compiled.sourcecode = sourcecode; + + return compiled; + } + + static getCompiler (build, _fetcher) { + const { longVersion, path } = build; + + const URL = `https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/${path}`; + + const fetcher = typeof _fetcher === 'function' + ? _fetcher + : (url) => fetch(url); + + const isWorker = typeof window !== 'object'; + + return fetcher(URL) + .then((r) => r.text()) + .then((code) => { + // `window` for main thread, `self` for workers + const _self = isWorker ? self : window; + _self.Module = {}; + + const solcCode = code.replace('var Module;', `var Module=${isWorker ? 'self' : 'window'}.Module;`); + + console.log(`[solidity] evaluating ${longVersion}`); + + try { + // eslint-disable-next-line no-eval + eval(solcCode); + } catch (e) { + return Promise.reject(e); + } + + console.log(`[solidity] done evaluating ${longVersion}`); + + const compiler = solc(_self.Module); + delete _self.Module; + + return compiler; + }); + } +} diff --git a/js/src/views/WriteContract/writeContract.js b/js/src/views/WriteContract/writeContract.js index 2293eff73..5266fb625 100644 --- a/js/src/views/WriteContract/writeContract.js +++ b/js/src/views/WriteContract/writeContract.js @@ -57,7 +57,7 @@ class WriteContract extends Component { const { setupWorker, worker } = this.props; setupWorker(); - if (worker) { + if (worker !== undefined) { this.store.setWorker(worker); } } @@ -77,7 +77,7 @@ class WriteContract extends Component { // Set the worker if not set before (eg. first page loading) componentWillReceiveProps (nextProps) { - if (!this.props.worker && nextProps.worker) { + if (this.props.worker === undefined && nextProps.worker !== undefined) { this.store.setWorker(nextProps.worker); } diff --git a/js/src/views/WriteContract/writeContractStore.js b/js/src/views/WriteContract/writeContractStore.js index 96d9c4d07..3a9a40aab 100644 --- a/js/src/views/WriteContract/writeContractStore.js +++ b/js/src/views/WriteContract/writeContractStore.js @@ -14,11 +14,12 @@ // You should have received a copy of the GNU General Public License // along with Parity. If not, see . -import { action, observable } from 'mobx'; +import { action, observable, transaction } from 'mobx'; import store from 'store'; import { debounce } from 'lodash'; import { sha3 } from '~/api/util/sha3'; +import SolidityUtils from '~/util/solidity'; const WRITE_CONTRACT_STORE_KEY = '_parity::writeContractStore'; @@ -79,6 +80,9 @@ export default class WriteContractStore { snippets = SNIPPETS; worker = null; + useWorker = true; + solc = {}; + constructor () { this.debouncedCompile = debounce(this.handleCompile, 1000); } @@ -131,6 +135,9 @@ export default class WriteContractStore { this.selectedBuild = latestIndex; return promise; + }) + .catch((error) => { + this.setWorkerError(error); }); } @@ -143,32 +150,71 @@ export default class WriteContractStore { return this.loadSolidityVersion(this.builds[value]); } + getCompiler (build) { + const { longVersion } = build; + + if (!this.solc[longVersion]) { + this.solc[longVersion] = SolidityUtils + .getCompiler(build) + .then((compiler) => { + this.solc[longVersion] = compiler; + return compiler; + }) + .catch((error) => { + this.setWorkerError(error); + }); + } + + return Promise.resolve(this.solc[longVersion]); + } + @action loadSolidityVersion = (build) => { - if (!this.worker) { + if (this.worker === undefined) { return; + } else if (this.worker === null) { + this.useWorker = false; } if (this.loadingSolidity) { return this.loadingSolidity; } - this.loadingSolidity = this.worker - .postMessage({ - action: 'load', - data: build - }) - .then((result) => { - if (result !== 'ok') { - this.setWorkerError(result); - } - }) - .catch((error) => { - this.setWorkerError(error); - }) - .then(() => { - this.loadingSolidity = false; - this.loading = false; - }); + if (this.useWorker) { + this.loadingSolidity = this.worker + .postMessage({ + action: 'load', + data: build + }) + .then((result) => { + if (result !== 'ok') { + throw new Error('error while loading solidity: ' + result); + } + + this.loadingSolidity = false; + this.loading = false; + }) + .catch((error) => { + console.warn('error while loading solidity', error); + this.useWorker = false; + this.loadingSolidity = null; + + return this.loadSolidityVersion(build); + }); + } else { + this.loadingSolidity = this + .getCompiler(build) + .then(() => { + this.loadingSolidity = false; + this.loading = false; + + return 'ok'; + }) + .catch((error) => { + this.setWorkerError(error); + this.loadingSolidity = false; + this.loading = false; + }); + } return this.loadingSolidity; } @@ -202,9 +248,32 @@ export default class WriteContractStore { this.contract = this.contracts[Object.keys(this.contracts)[value]]; } + compile = (data) => { + if (this.useWorker) { + return this.worker.postMessage({ + action: 'compile', + data + }); + } + + return new Promise((resolve, reject) => { + window.setTimeout(() => { + this + .getCompiler(data.build) + .then((compiler) => { + return SolidityUtils.compile(data, compiler); + }) + .then(resolve) + .catch(reject); + }, 0); + }); + } + @action handleCompile = (loadFiles = false) => { - this.compiled = false; - this.compiling = true; + transaction(() => { + this.compiled = false; + this.compiling = true; + }); const build = this.builds[this.selectedBuild]; const version = build.longVersion; @@ -219,19 +288,16 @@ export default class WriteContractStore { resolve(this.lastCompilation); }, 500); }); - } else if (this.worker) { - promise = loadFiles + } else { + promise = loadFiles && this.useWorker ? this.sendFilesToWorker() : Promise.resolve(); promise = promise .then(() => { - return this.worker.postMessage({ - action: 'compile', - data: { - sourcecode: sourcecode, - build: build - } + return this.compile({ + sourcecode: sourcecode, + build: build }); }) .then((data) => { @@ -427,6 +493,10 @@ export default class WriteContractStore { } sendFilesToWorker = () => { + if (!this.useWorker) { + return Promise.resolve(); + } + const files = [].concat( Object.values(this.snippets), Object.values(this.savedContracts) From 5886034265405f8deaac9ad1add0a80a3bb6a391 Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Tue, 20 Dec 2016 02:11:04 +0100 Subject: [PATCH 11/16] Add optimize and autocompile toggles --- js/src/util/solidity.js | 4 ++-- js/src/views/WriteContract/writeContract.css | 10 ++++++++++ js/src/views/WriteContract/writeContract.js | 20 ++++++++++++++++++- .../views/WriteContract/writeContractStore.js | 18 ++++++++++++++--- 4 files changed, 46 insertions(+), 6 deletions(-) diff --git a/js/src/util/solidity.js b/js/src/util/solidity.js index f09213ac5..72e8e6d81 100644 --- a/js/src/util/solidity.js +++ b/js/src/util/solidity.js @@ -19,7 +19,7 @@ import solc from 'solc/browser-wrapper'; export default class SolidityUtils { static compile (data, compiler) { - const { sourcecode, build, optimized = 1 } = data; + const { sourcecode, build, optimize } = data; const start = Date.now(); console.log('[solidity] compiling...'); @@ -28,7 +28,7 @@ export default class SolidityUtils { '': sourcecode }; - const compiled = compiler.compile({ sources: input }, optimized); + const compiled = compiler.compile({ sources: input }, optimize ? 1 : 0); const time = Math.round((Date.now() - start) / 100) / 10; console.log(`[solidity] done compiling in ${time}s`); diff --git a/js/src/views/WriteContract/writeContract.css b/js/src/views/WriteContract/writeContract.css index feed8616e..c5cefcf7a 100644 --- a/js/src/views/WriteContract/writeContract.css +++ b/js/src/views/WriteContract/writeContract.css @@ -26,6 +26,16 @@ color: #ccc; } +.toggles { + display: flex; + flex-direction: row; + margin: 1em 0 0; + + > * { + flex: 1; + } +} + .container { padding: 1em 0; display: flex; diff --git a/js/src/views/WriteContract/writeContract.js b/js/src/views/WriteContract/writeContract.js index 5266fb625..c95c09c04 100644 --- a/js/src/views/WriteContract/writeContract.js +++ b/js/src/views/WriteContract/writeContract.js @@ -16,7 +16,7 @@ import React, { PropTypes, Component } from 'react'; import { observer } from 'mobx-react'; -import { MenuItem } from 'material-ui'; +import { MenuItem, Toggle } from 'material-ui'; import { connect } from 'react-redux'; import { bindActionCreators } from 'redux'; import CircularProgress from 'material-ui/CircularProgress'; @@ -283,6 +283,24 @@ class WriteContract extends Component { : null } +
+
+ +
+
+ +
+
{ this.renderSolidityVersions() } { this.renderCompilation() } diff --git a/js/src/views/WriteContract/writeContractStore.js b/js/src/views/WriteContract/writeContractStore.js index 3a9a40aab..c16fb5e23 100644 --- a/js/src/views/WriteContract/writeContractStore.js +++ b/js/src/views/WriteContract/writeContractStore.js @@ -66,6 +66,9 @@ export default class WriteContractStore { @observable builds = []; @observable selectedBuild = -1; + @observable autocompile = false; + @observable optimize = false; + @observable showDeployModal = false; @observable showSaveModal = false; @observable showLoadModal = false; @@ -278,7 +281,7 @@ export default class WriteContractStore { const build = this.builds[this.selectedBuild]; const version = build.longVersion; const sourcecode = this.sourcecode.replace(/\n+/g, '\n').replace(/\s(\s+)/g, ' '); - const hash = sha3(JSON.stringify({ version, sourcecode })); + const hash = sha3(JSON.stringify({ version, sourcecode, optimize: this.optimize })); let promise = Promise.resolve(null); @@ -297,7 +300,8 @@ export default class WriteContractStore { .then(() => { return this.compile({ sourcecode: sourcecode, - build: build + build: build, + optimize: this.optimize }); }) .then((data) => { @@ -337,6 +341,14 @@ export default class WriteContractStore { }); } + @action handleAutocompileToggle = () => { + this.autocompile = !this.autocompile; + } + + @action handleOptimizeToggle = () => { + this.optimize = !this.optimize; + } + @action parseCompiled = (data) => { const { contracts } = data; @@ -395,7 +407,7 @@ export default class WriteContractStore { if (compile) { this.handleCompile(); - } else { + } else if (this.autocompile) { this.debouncedCompile(); } } From 1627c3fa71357f56aaf2da92ff88383bccd18303 Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Tue, 20 Dec 2016 02:37:16 +0100 Subject: [PATCH 12/16] Add import files in Contract Dev --- js/src/util/solidity.js | 14 +++++- .../views/WriteContract/writeContractStore.js | 45 +++++++++---------- 2 files changed, 32 insertions(+), 27 deletions(-) diff --git a/js/src/util/solidity.js b/js/src/util/solidity.js index 72e8e6d81..d4c9686d5 100644 --- a/js/src/util/solidity.js +++ b/js/src/util/solidity.js @@ -19,7 +19,7 @@ import solc from 'solc/browser-wrapper'; export default class SolidityUtils { static compile (data, compiler) { - const { sourcecode, build, optimize } = data; + const { sourcecode, build, optimize, files } = data; const start = Date.now(); console.log('[solidity] compiling...'); @@ -28,7 +28,17 @@ export default class SolidityUtils { '': sourcecode }; - const compiled = compiler.compile({ sources: input }, optimize ? 1 : 0); + const findFiles = (path) => { + const file = files.find((f) => f.name === path); + + if (file) { + return { contents: file.sourcecode }; + } else { + return { error: 'File not found' }; + } + }; + + const compiled = compiler.compile({ sources: input }, optimize ? 1 : 0, findFiles); const time = Math.round((Date.now() - start) / 100) / 10; console.log(`[solidity] done compiling in ${time}s`); diff --git a/js/src/views/WriteContract/writeContractStore.js b/js/src/views/WriteContract/writeContractStore.js index c16fb5e23..e8f18dc0e 100644 --- a/js/src/views/WriteContract/writeContractStore.js +++ b/js/src/views/WriteContract/writeContractStore.js @@ -81,7 +81,7 @@ export default class WriteContractStore { loadingSolidity = false; lastCompilation = {}; snippets = SNIPPETS; - worker = null; + worker = undefined; useWorker = true; solc = {}; @@ -107,6 +107,10 @@ export default class WriteContractStore { } @action setWorker (worker) { + if (this.worker !== undefined) { + return; + } + this.worker = worker; this @@ -150,7 +154,9 @@ export default class WriteContractStore { @action handleSelectBuild = (_, index, value) => { this.selectedBuild = value; - return this.loadSolidityVersion(this.builds[value]); + return this + .loadSolidityVersion(this.builds[value]) + .then(() => this.handleCompile()); } getCompiler (build) { @@ -182,6 +188,8 @@ export default class WriteContractStore { return this.loadingSolidity; } + this.loading = true; + if (this.useWorker) { this.loadingSolidity = this.worker .postMessage({ @@ -272,7 +280,7 @@ export default class WriteContractStore { }); } - @action handleCompile = (loadFiles = false) => { + @action handleCompile = () => { transaction(() => { this.compiled = false; this.compiling = true; @@ -292,17 +300,12 @@ export default class WriteContractStore { }, 500); }); } else { - promise = loadFiles && this.useWorker - ? this.sendFilesToWorker() - : Promise.resolve(); - - promise = promise - .then(() => { - return this.compile({ - sourcecode: sourcecode, - build: build, - optimize: this.optimize - }); + promise = this + .compile({ + sourcecode: sourcecode, + build: build, + optimize: this.optimize, + files: this.files }) .then((data) => { const result = this.parseCompiled(data); @@ -468,8 +471,7 @@ export default class WriteContractStore { this.resizeEditor(); - // Send the new files to the Worker and compile - return this.handleCompile(true); + return this.handleCompile(); } @action handleLoadContract = (contract) => { @@ -504,20 +506,13 @@ export default class WriteContractStore { } catch (e) {} } - sendFilesToWorker = () => { - if (!this.useWorker) { - return Promise.resolve(); - } - + get files() { const files = [].concat( Object.values(this.snippets), Object.values(this.savedContracts) ); - return this.worker.postMessage({ - action: 'setFiles', - data: files - }); + return files; } } From 244d1aefc67d54c581f3feba5ea006077baeb79e Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Tue, 20 Dec 2016 02:38:40 +0100 Subject: [PATCH 13/16] linting --- js/src/views/WriteContract/writeContractStore.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/js/src/views/WriteContract/writeContractStore.js b/js/src/views/WriteContract/writeContractStore.js index e8f18dc0e..141569af2 100644 --- a/js/src/views/WriteContract/writeContractStore.js +++ b/js/src/views/WriteContract/writeContractStore.js @@ -506,7 +506,7 @@ export default class WriteContractStore { } catch (e) {} } - get files() { + get files () { const files = [].concat( Object.values(this.snippets), Object.values(this.savedContracts) From efb928b8f16f95d3ffa738b92909c4e1ad9df6ac Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Tue, 20 Dec 2016 03:51:23 +0100 Subject: [PATCH 14/16] Removed Worker --- js/src/redux/providers/compilerWorker.js | 177 ----------------------- 1 file changed, 177 deletions(-) delete mode 100644 js/src/redux/providers/compilerWorker.js diff --git a/js/src/redux/providers/compilerWorker.js b/js/src/redux/providers/compilerWorker.js deleted file mode 100644 index 60a07355f..000000000 --- a/js/src/redux/providers/compilerWorker.js +++ /dev/null @@ -1,177 +0,0 @@ -// Copyright 2015, 2016 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -import solc from 'solc/browser-wrapper'; -import { isWebUri } from 'valid-url'; - -self.solcVersions = {}; -self.files = {}; -self.lastCompile = { - sourcecode: '', - result: '', - version: '' -}; - -// eslint-disable-next-line no-undef -onmessage = (event) => { - const message = JSON.parse(event.data); - - switch (message.action) { - case 'compile': - compile(message.data); - break; - case 'load': - load(message.data); - break; - case 'setFiles': - setFiles(message.data); - break; - case 'close': - close(); - break; - } -}; - -function setFiles (files) { - const prevFiles = self.files; - const nextFiles = files.reduce((obj, file) => { - obj[file.name] = file.sourcecode; - return obj; - }, {}); - - self.files = { - ...prevFiles, - ...nextFiles - }; -} - -function findImports (path) { - if (self.files[path]) { - if (self.files[path].error) { - return { error: self.files[path].error }; - } - - return { contents: self.files[path] }; - } - - if (isWebUri(path)) { - console.log('[worker] fetching', path); - - fetch(path) - .then((r) => r.text()) - .then((c) => { - console.log('[worker]', 'got content at ' + path); - self.files[path] = c; - - postMessage(JSON.stringify({ - event: 'try-again' - })); - }) - .catch((e) => { - console.error('[worker]', 'fetching', path, e); - self.files[path] = { error: e }; - }); - - return { error: '__parity_tryAgain' }; - } - - console.log(`[worker] path ${path} not found...`); - return { error: 'File not found' }; -} - -function compile (data, optimized = 1) { - const { sourcecode, build } = data; - const { longVersion } = build; - - if (self.lastCompile.sourcecode === sourcecode && self.lastCompile.longVersion === longVersion) { - return postMessage(JSON.stringify({ - event: 'compiled', - data: self.lastCompile.result - })); - } - - fetchSolc(build) - .then((compiler) => { - const input = { - '': sourcecode - }; - - const compiled = compiler.compile({ sources: input }, optimized, findImports); - - self.lastCompile = { - version: longVersion, result: compiled, - sourcecode - }; - - postMessage(JSON.stringify({ - event: 'compiled', - data: compiled - })); - }); -} - -function load (build) { - postMessage(JSON.stringify({ - event: 'loading', - data: true - })); - - fetchSolc(build) - .then(() => { - postMessage(JSON.stringify({ - event: 'loading', - data: false - })); - }) - .catch(() => { - postMessage(JSON.stringify({ - event: 'loading', - data: false - })); - }); -} - -function fetchSolc (build) { - const { path, longVersion } = build; - - if (self.solcVersions[path]) { - return Promise.resolve(self.solcVersions[path]); - } - - const URL = `https://raw.githubusercontent.com/ethereum/solc-bin/gh-pages/bin/${path}`; - console.log(`[worker] fetching solc-bin ${longVersion} at ${URL}`); - - return fetch(URL) - .then((r) => r.text()) - .then((code) => { - const solcCode = code.replace(/^var Module;/, 'var Module=self.__solcModule;'); - self.__solcModule = {}; - - console.log(`[worker] evaluating ${longVersion}`); - - // eslint-disable-next-line no-eval - eval(solcCode); - - console.log(`[worker] done evaluating ${longVersion}`); - - const compiler = solc(self.__solcModule); - self.solcVersions[path] = compiler; - return compiler; - }) - .catch((e) => { - console.error('fetching solc', e); - }); -} From 7b3b82b750cf68a3b72d2fffec3a6c16e025ff56 Mon Sep 17 00:00:00 2001 From: Nicolas Gotchac Date: Tue, 20 Dec 2016 11:43:13 +0100 Subject: [PATCH 15/16] Updating dependencies --- js/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/js/package.json b/js/package.json index f5db16908..8c741fa56 100644 --- a/js/package.json +++ b/js/package.json @@ -157,6 +157,7 @@ "mobx-react-devtools": "4.2.10", "moment": "2.17.0", "phoneformat.js": "1.0.3", + "promise-worker": "1.1.1", "push.js": "0.0.11", "qs": "6.3.0", "react": "15.4.1", @@ -183,7 +184,6 @@ "valid-url": "1.0.9", "validator": "6.2.0", "web3": "0.17.0-beta", - "whatwg-fetch": "2.0.1", - "worker-loader": "0.7.1" + "whatwg-fetch": "2.0.1" } } From ee050d4fd87784340b4fd87bee0cc162f6807fc0 Mon Sep 17 00:00:00 2001 From: GitLab Build Bot Date: Tue, 20 Dec 2016 11:20:41 +0000 Subject: [PATCH 16/16] [ci skip] js-precompiled 20161220-111754 --- Cargo.lock | 2 +- js/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3ceedcc9e..a4083fd6d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1367,7 +1367,7 @@ dependencies = [ [[package]] name = "parity-ui-precompiled" version = "1.4.0" -source = "git+https://github.com/ethcore/js-precompiled.git#e3e33f97c0f3b3d788a859b5bd10f5ca1ee45871" +source = "git+https://github.com/ethcore/js-precompiled.git#c8eb24c13e6fa57bf3b85b16209d281d89b31cbf" dependencies = [ "parity-dapps-glue 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/js/package.json b/js/package.json index 8c741fa56..c7ab53a2e 100644 --- a/js/package.json +++ b/js/package.json @@ -1,6 +1,6 @@ { "name": "parity.js", - "version": "0.2.130", + "version": "0.2.131", "main": "release/index.js", "jsnext:main": "src/index.js", "author": "Parity Team ",