From 3833f872568ff50c4123ac969e0e512cc7ab9790 Mon Sep 17 00:00:00 2001 From: Jaco Greeff Date: Mon, 13 Nov 2017 13:44:51 +0100 Subject: [PATCH] Complete token merge, remove unused files (#7037) * Either js or js-old to have changes to trigger push-cargo * Complete merge of token changes (code move to js-shared) * Update setting variable --- .gitlab-ci.yml | 2 +- js/scripts/push-cargo.sh | 2 +- js/src/redux/providers/tokensActions.js | 250 -------------------- js/src/util/tokens/index.js | 299 ------------------------ 4 files changed, 2 insertions(+), 551 deletions(-) delete mode 100644 js/src/redux/providers/tokensActions.js delete mode 100644 js/src/util/tokens/index.js diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 65161317e..402f83d94 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -693,7 +693,7 @@ js-release: - if [ $JS_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS rebuild since no JS files modified."; else ./js/scripts/build.sh && ./js/scripts/push-precompiled.sh; fi - echo $JS_OLD_FILES_MODIFIED - if [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping JS (old) rebuild since no JS files modified."; else ./js-old/scripts/build.sh && ./js-old/scripts/push-precompiled.sh; fi - - if [ $JS_FILES_MODIFIED -eq 0 ] || [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping Cargo update since no JS files modified."; else ./js/scripts/push-cargo.sh; fi + - if [ $JS_FILES_MODIFIED -eq 0 ] && [ $JS_OLD_FILES_MODIFIED -eq 0 ]; then echo "Skipping Cargo update since no JS files modified."; else ./js/scripts/push-cargo.sh; fi tags: - javascript diff --git a/js/scripts/push-cargo.sh b/js/scripts/push-cargo.sh index 4fb65abe0..22ff9f364 100755 --- a/js/scripts/push-cargo.sh +++ b/js/scripts/push-cargo.sh @@ -16,7 +16,7 @@ function setup_git_user { # change into the build directory BASEDIR=`dirname $0` -GITLOG=./.git/gitcommand.log +GITLOG="./.git/gitcommand.log" pushd $BASEDIR echo "*** [cargo] Setting up GitHub config for parity" diff --git a/js/src/redux/providers/tokensActions.js b/js/src/redux/providers/tokensActions.js deleted file mode 100644 index 2e1e8c052..000000000 --- a/js/src/redux/providers/tokensActions.js +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright 2015-2017 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -import { chunk, uniq } from 'lodash'; -import store from 'store'; - -import Contracts from '~/contracts'; -import { LOG_KEYS, getLogger } from '~/config'; -import { fetchTokenIds, fetchTokensBasics, fetchTokensInfo, fetchTokensImages } from '~/util/tokens'; - -import { setAddressImage } from './imagesActions'; - -const TOKENS_CACHE_LS_KEY_PREFIX = '_parity::tokens::'; -const log = getLogger(LOG_KEYS.Balances); - -function _setTokens (tokens) { - return { - type: 'setTokens', - tokens - }; -} - -export function setTokens (nextTokens) { - return (dispatch, getState) => { - const { nodeStatus, tokens: prevTokens } = getState(); - const { tokenReg } = Contracts.get(); - const tokens = { - ...prevTokens, - ...nextTokens - }; - - return tokenReg.getContract() - .then((tokenRegContract) => { - const lsKey = TOKENS_CACHE_LS_KEY_PREFIX + nodeStatus.netChain; - - store.set(lsKey, { - tokenreg: tokenRegContract.address, - tokens - }); - }) - .catch((error) => { - console.error(error); - }) - .then(() => { - dispatch(_setTokens(nextTokens)); - }); - }; -} - -function loadCachedTokens (tokenRegContract) { - return (dispatch, getState) => { - const { nodeStatus } = getState(); - - const lsKey = TOKENS_CACHE_LS_KEY_PREFIX + nodeStatus.netChain; - const cached = store.get(lsKey); - - if (cached) { - // Check if we have data from the right contract - if (cached.tokenreg === tokenRegContract.address && cached.tokens) { - log.debug('found cached tokens', cached.tokens); - - // Fetch all the tokens images on load - // (it's the only thing that might have changed) - const tokenIndexes = Object.values(cached.tokens) - .filter((t) => t && t.fetched) - .map((t) => t.index); - - fetchTokensData(tokenRegContract, tokenIndexes)(dispatch, getState); - } else { - store.remove(lsKey); - } - } - }; -} - -export function loadTokens (options = {}) { - log.debug('loading tokens', Object.keys(options).length ? options : ''); - - return (dispatch, getState) => { - const { tokenReg } = Contracts.get(); - - return tokenReg.getContract() - .then((tokenRegContract) => { - loadCachedTokens(tokenRegContract)(dispatch, getState); - return fetchTokenIds(tokenRegContract.instance); - }) - .then((tokenIndexes) => loadTokensBasics(tokenIndexes, options)(dispatch, getState)) - .catch((error) => { - console.warn('tokens::loadTokens', error); - }); - }; -} - -export function loadTokensBasics (tokenIndexes, options) { - const limit = 64; - - return (dispatch, getState) => { - const { api } = getState(); - const { tokenReg } = Contracts.get(); - const nextTokens = {}; - const count = tokenIndexes.length; - - log.debug('loading basic tokens', tokenIndexes); - - if (count === 0) { - return Promise.resolve(); - } - - return tokenReg.getContract() - .then((tokenRegContract) => { - let promise = Promise.resolve(); - const first = tokenIndexes[0]; - const last = tokenIndexes[tokenIndexes.length - 1]; - - for (let from = first; from <= last; from += limit) { - // No need to fetch `limit` elements - const lowerLimit = Math.min(limit, last - from + 1); - - promise = promise - .then(() => fetchTokensBasics(api, tokenRegContract, from, lowerLimit)) - .then((results) => { - results - .forEach((token) => { - nextTokens[token.id] = token; - }); - }); - } - - return promise; - }) - .then(() => { - log.debug('fetched tokens basic info', nextTokens); - - dispatch(setTokens(nextTokens)); - }) - .catch((error) => { - console.warn('tokens::fetchTokens', error); - }); - }; -} - -export function fetchTokens (_tokenIndexes) { - const tokenIndexes = uniq(_tokenIndexes || []); - const tokenChunks = chunk(tokenIndexes, 64); - - return (dispatch, getState) => { - const { tokenReg } = Contracts.get(); - - return tokenReg.getContract() - .then((tokenRegContract) => { - let promise = Promise.resolve(); - - tokenChunks.forEach((tokenChunk) => { - promise = promise - .then(() => fetchTokensData(tokenRegContract, tokenChunk)(dispatch, getState)); - }); - - return promise; - }) - .then(() => { - log.debug('fetched token', getState().tokens); - }) - .catch((error) => { - console.warn('tokens::fetchTokens', error); - }); - }; -} - -/** - * Split the given token indexes between those for whom - * we already have some info, and thus just need to fetch - * the image, and those for whom we don't have anything and - * need to fetch all the info. - */ -function fetchTokensData (tokenRegContract, tokenIndexes) { - return (dispatch, getState) => { - const { api, tokens, images } = getState(); - const allTokens = Object.values(tokens); - - const tokensIndexesMap = allTokens - .reduce((map, token) => { - map[token.index] = token; - return map; - }, {}); - - const fetchedTokenIndexes = allTokens - .filter((token) => token.fetched) - .map((token) => token.index); - - const fullIndexes = []; - const partialIndexes = []; - - tokenIndexes.forEach((tokenIndex) => { - if (fetchedTokenIndexes.includes(tokenIndex)) { - partialIndexes.push(tokenIndex); - } else { - fullIndexes.push(tokenIndex); - } - }); - - log.debug('need to fully fetch', fullIndexes); - log.debug('need to partially fetch', partialIndexes); - - const fullPromise = fetchTokensInfo(api, tokenRegContract, fullIndexes); - const partialPromise = fetchTokensImages(api, tokenRegContract, partialIndexes) - .then((imagesResult) => { - return imagesResult.map((image, index) => { - const tokenIndex = partialIndexes[index]; - const token = tokensIndexesMap[tokenIndex]; - - return { ...token, image }; - }); - }); - - return Promise.all([ fullPromise, partialPromise ]) - .then(([ fullResults, partialResults ]) => { - log.debug('fetched', { fullResults, partialResults }); - - return [].concat(fullResults, partialResults) - .filter(({ address }) => !/0x0*$/.test(address)) - .reduce((tokens, token) => { - const { id, image, address } = token; - - // dispatch only the changed images - if (images[address] !== image) { - dispatch(setAddressImage(address, image, true)); - } - - tokens[id] = token; - return tokens; - }, {}); - }) - .then((tokens) => { - dispatch(setTokens(tokens)); - }); - }; -} diff --git a/js/src/util/tokens/index.js b/js/src/util/tokens/index.js deleted file mode 100644 index 11ad0f903..000000000 --- a/js/src/util/tokens/index.js +++ /dev/null @@ -1,299 +0,0 @@ -// Copyright 2015-2017 Parity Technologies (UK) Ltd. -// This file is part of Parity. - -// Parity is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// Parity is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with Parity. If not, see . - -import { range } from 'lodash'; -import BigNumber from 'bignumber.js'; - -import { hashToImageUrl } from '~/redux/util'; -import { sha3 } from '~/api/util/sha3'; -import imagesEthereum from '~/../assets/images/contracts/ethereum-black-64x64.png'; -import { - tokenAddresses as tokenAddressesBytcode, - tokensBalances as tokensBalancesBytecode -} from './bytecodes'; - -export const ETH_TOKEN = { - address: '', - format: new BigNumber(10).pow(18), - id: getTokenId('eth_native_token'), - image: imagesEthereum, - name: 'Ethereum', - native: true, - tag: 'ETH' -}; - -export function fetchTokenIds (tokenregInstance) { - return tokenregInstance.tokenCount - .call() - .then((numTokens) => { - const tokenIndexes = range(numTokens.toNumber()); - - return tokenIndexes; - }); -} - -export function fetchTokensBasics (api, tokenReg, start = 0, limit = 100) { - const tokenAddressesCallData = encode( - api, - [ 'address', 'uint', 'uint' ], - [ tokenReg.address, start, limit ] - ); - - return api.eth - .call({ data: tokenAddressesBytcode + tokenAddressesCallData }) - .then((result) => { - return decodeArray(api, 'address[]', result); - }) - .then((tokenAddresses) => { - return tokenAddresses.map((tokenAddress, index) => { - if (/^0x0*$/.test(tokenAddress)) { - return null; - } - - const tokenIndex = start + index; - - return { - address: tokenAddress, - id: getTokenId(tokenAddress, tokenIndex), - index: tokenIndex, - - fetched: false - }; - }); - }) - .then((tokens) => tokens.filter((token) => token)) - .then((tokens) => { - const randomAddress = sha3(`${Date.now()}`).substr(0, 42); - - return fetchTokensBalances(api, tokens, [randomAddress]) - .then((_balances) => { - const balances = _balances[randomAddress]; - - return tokens.filter(({ id }) => balances[id].eq(0)); - }); - }); -} - -export function fetchTokensInfo (api, tokenReg, tokenIndexes) { - const requests = tokenIndexes.map((tokenIndex) => { - const tokenCalldata = tokenReg.getCallData(tokenReg.instance.token, {}, [tokenIndex]); - - return { to: tokenReg.address, data: tokenCalldata }; - }); - - const calls = requests.map((req) => api.eth.call(req)); - const imagesPromise = fetchTokensImages(api, tokenReg, tokenIndexes); - - return Promise.all(calls) - .then((results) => { - return imagesPromise.then((images) => [ results, images ]); - }) - .then(([ results, images ]) => { - return results.map((rawTokenData, index) => { - const tokenIndex = tokenIndexes[index]; - const tokenData = tokenReg.instance.token - .decodeOutput(rawTokenData) - .map((t) => t.value); - - const [ address, tag, format, name ] = tokenData; - const image = images[index]; - - const token = { - address, - id: getTokenId(address, tokenIndex), - index: tokenIndex, - - format: format.toString(), - image, - name, - tag, - - fetched: true - }; - - return token; - }); - }); -} - -export function fetchTokensImages (api, tokenReg, tokenIndexes) { - const requests = tokenIndexes.map((tokenIndex) => { - const metaCalldata = tokenReg.getCallData(tokenReg.instance.meta, {}, [tokenIndex, 'IMG']); - - return { to: tokenReg.address, data: metaCalldata }; - }); - - const calls = requests.map((req) => api.eth.call(req)); - - return Promise.all(calls) - .then((results) => { - return results.map((rawImage) => { - const image = tokenReg.instance.meta.decodeOutput(rawImage)[0].value; - - return hashToImageUrl(image); - }); - }); -} - -/** - * `updates` should be in the shape: - * { - * [ who ]: [ tokenId ] // Array of tokens to updates - * } - * - * Returns a Promise resolved with the balances in the shape: - * { - * [ who ]: { [ tokenId ]: BigNumber } // The balances of `who` - * } - */ -export function fetchAccountsBalances (api, tokens, updates) { - const accountAddresses = Object.keys(updates); - - // Updates for the ETH balances - const ethUpdates = accountAddresses - .filter((accountAddress) => { - return updates[accountAddress].find((tokenId) => tokenId === ETH_TOKEN.id); - }) - .reduce((nextUpdates, accountAddress) => { - nextUpdates[accountAddress] = [ETH_TOKEN.id]; - return nextUpdates; - }, {}); - - // Updates for Tokens balances - const tokenUpdates = Object.keys(updates) - .reduce((nextUpdates, accountAddress) => { - const tokenIds = updates[accountAddress].filter((tokenId) => tokenId !== ETH_TOKEN.id); - - if (tokenIds.length > 0) { - nextUpdates[accountAddress] = tokenIds; - } - - return nextUpdates; - }, {}); - - let ethBalances = {}; - let tokensBalances = {}; - - const ethPromise = fetchEthBalances(api, Object.keys(ethUpdates)) - .then((_ethBalances) => { - ethBalances = _ethBalances; - }); - - const tokenPromise = Object.keys(tokenUpdates) - .reduce((tokenPromise, accountAddress) => { - const tokenIds = tokenUpdates[accountAddress]; - const updateTokens = tokens - .filter((t) => tokenIds.includes(t.id)); - - return tokenPromise - .then(() => fetchTokensBalances(api, updateTokens, [ accountAddress ])) - .then((balances) => { - tokensBalances[accountAddress] = balances[accountAddress]; - }); - }, Promise.resolve()); - - return Promise.all([ ethPromise, tokenPromise ]) - .then(() => { - const balances = Object.assign({}, tokensBalances); - - Object.keys(ethBalances).forEach((accountAddress) => { - if (!balances[accountAddress]) { - balances[accountAddress] = {}; - } - - balances[accountAddress] = Object.assign( - {}, - balances[accountAddress], - ethBalances[accountAddress] - ); - }); - - return balances; - }); -} - -function fetchEthBalances (api, accountAddresses) { - const promises = accountAddresses - .map((accountAddress) => api.eth.getBalance(accountAddress)); - - return Promise.all(promises) - .then((balancesArray) => { - return balancesArray.reduce((balances, balance, index) => { - balances[accountAddresses[index]] = { - [ETH_TOKEN.id]: balance - }; - - return balances; - }, {}); - }); -} - -function fetchTokensBalances (api, tokens, accountAddresses) { - const tokenAddresses = tokens.map((t) => t.address); - const tokensBalancesCallData = encode( - api, - [ 'address[]', 'address[]' ], - [ accountAddresses, tokenAddresses ] - ); - - return api.eth - .call({ data: tokensBalancesBytecode + tokensBalancesCallData }) - .then((result) => { - const rawBalances = decodeArray(api, 'uint[]', result); - const balances = {}; - - accountAddresses.forEach((accountAddress, accountIndex) => { - const balance = {}; - const preIndex = accountIndex * tokenAddresses.length; - - tokenAddresses.forEach((tokenAddress, tokenIndex) => { - const index = preIndex + tokenIndex; - const token = tokens[tokenIndex]; - - balance[token.id] = rawBalances[index]; - }); - - balances[accountAddress] = balance; - }); - - return balances; - }); -} - -function getTokenId (...args) { - return sha3(args.join('')).slice(0, 10); -} - -function encode (api, types, values) { - return api.util.abiEncode( - null, - types, - values - ).replace('0x', ''); -} - -function decodeArray (api, type, data) { - return api.util - .abiDecode( - [type], - [ - '0x', - (32).toString(16).padStart(64, 0), - data.replace('0x', '') - ].join('') - )[0] - .map((t) => t.value); -}