diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/ec40ac0974c1_add_chain_syncer.py b/apps/cic-cache/cic_cache/db/migrations/default/versions/6604de4203e2_add_chain_syncer.py similarity index 72% rename from apps/cic-eth/cic_eth/db/migrations/default/versions/ec40ac0974c1_add_chain_syncer.py rename to apps/cic-cache/cic_cache/db/migrations/default/versions/6604de4203e2_add_chain_syncer.py index a278997f..55a5ecca 100644 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/ec40ac0974c1_add_chain_syncer.py +++ b/apps/cic-cache/cic_cache/db/migrations/default/versions/6604de4203e2_add_chain_syncer.py @@ -1,8 +1,8 @@ """Add chain syncer -Revision ID: ec40ac0974c1 -Revises: 6ac7a1dadc46 -Create Date: 2021-02-23 06:10:19.246304 +Revision ID: 6604de4203e2 +Revises: 63b629f14a85 +Create Date: 2021-04-01 08:10:29.156243 """ from alembic import op @@ -14,15 +14,15 @@ from chainsyncer.db.migrations.sqlalchemy import ( # revision identifiers, used by Alembic. -revision = 'ec40ac0974c1' -down_revision = '6ac7a1dadc46' +revision = '6604de4203e2' +down_revision = '63b629f14a85' branch_labels = None depends_on = None - def upgrade(): chainsyncer_upgrade(0, 0, 1) def downgrade(): chainsyncer_downgrade(0, 0, 1) + diff --git a/apps/contract-migration/scripts/cmd/__init__.py b/apps/cic-cache/cic_cache/runnable/__init__.py similarity index 100% rename from apps/contract-migration/scripts/cmd/__init__.py rename to apps/cic-cache/cic_cache/runnable/__init__.py diff --git a/apps/cic-cache/cic_cache/runnable/daemons/__init__.py b/apps/cic-cache/cic_cache/runnable/daemons/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/cic-cache/cic_cache/runnable/daemons/filters/__init__.py b/apps/cic-cache/cic_cache/runnable/daemons/filters/__init__.py new file mode 100644 index 00000000..e8c9c530 --- /dev/null +++ b/apps/cic-cache/cic_cache/runnable/daemons/filters/__init__.py @@ -0,0 +1 @@ +from .erc20 import * diff --git a/apps/cic-cache/cic_cache/runnable/daemons/filters/base.py b/apps/cic-cache/cic_cache/runnable/daemons/filters/base.py new file mode 100644 index 00000000..5a57a482 --- /dev/null +++ b/apps/cic-cache/cic_cache/runnable/daemons/filters/base.py @@ -0,0 +1,2 @@ +class SyncFilter: + pass diff --git a/apps/cic-cache/cic_cache/runnable/daemons/filters/erc20.py b/apps/cic-cache/cic_cache/runnable/daemons/filters/erc20.py new file mode 100644 index 00000000..e2312a71 --- /dev/null +++ b/apps/cic-cache/cic_cache/runnable/daemons/filters/erc20.py @@ -0,0 +1,72 @@ +# standard imports +import logging + +# external imports +from chainlib.eth.erc20 import ERC20 +from chainlib.eth.address import ( + to_checksum_address, + ) +from chainlib.eth.error import RequestMismatchException +from chainlib.status import Status +from cic_eth_registry.erc20 import ERC20Token +from cic_eth_registry.error import ( + NotAContractError, + ContractMismatchError, + ) + +# local imports +from .base import SyncFilter +from cic_cache import db as cic_cache_db + +logg = logging.getLogger().getChild(__name__) + + +class ERC20TransferFilter(SyncFilter): + + def __init__(self, chain_spec): + self.chain_spec = chain_spec + + + # TODO: Verify token in declarator / token index + def filter(self, conn, block, tx, db_session=None): + logg.debug('filter {} {}'.format(block, tx)) + token = None + try: + token = ERC20Token(self.chain_spec, conn, tx.inputs[0]) + except NotAContractError: + logg.debug('not a contract {}'.format(tx.inputs[0])) + return False + except ContractMismatchError: + logg.debug('not an erc20 token {}'.format(tx.inputs[0])) + return False + + transfer_data = None + try: + transfer_data = ERC20.parse_transfer_request(tx.payload) + except RequestMismatchException: + logg.debug('erc20 match but not a transfer, skipping') + return False + + token_sender = tx.outputs[0] + token_recipient = transfer_data[0] + token_value = transfer_data[1] + + logg.debug('matched erc20 token transfer {} ({}) to {} value {}'.format(token.name, token.address, transfer_data[0], transfer_data[1])) + + cic_cache_db.add_transaction( + db_session, + tx.hash, + block.number, + tx.index, + to_checksum_address(token_sender), + to_checksum_address(token_recipient), + token.address, + token.address, + token_value, + token_value, + tx.status == Status.SUCCESS, + block.timestamp, + ) + db_session.flush() + + return True diff --git a/apps/cic-cache/cic_cache/runnable/server.py b/apps/cic-cache/cic_cache/runnable/daemons/server.py similarity index 100% rename from apps/cic-cache/cic_cache/runnable/server.py rename to apps/cic-cache/cic_cache/runnable/daemons/server.py diff --git a/apps/cic-cache/cic_cache/runnable/tasker.py b/apps/cic-cache/cic_cache/runnable/daemons/tasker.py similarity index 100% rename from apps/cic-cache/cic_cache/runnable/tasker.py rename to apps/cic-cache/cic_cache/runnable/daemons/tasker.py diff --git a/apps/cic-cache/cic_cache/runnable/daemons/tracker.py b/apps/cic-cache/cic_cache/runnable/daemons/tracker.py new file mode 100644 index 00000000..cee54144 --- /dev/null +++ b/apps/cic-cache/cic_cache/runnable/daemons/tracker.py @@ -0,0 +1,112 @@ +# standard imports +import os +import sys +import logging +import time +import argparse +import sys +import re + +# third-party imports +import confini +import celery +import rlp +import cic_base.config +import cic_base.log +import cic_base.argparse +import cic_base.rpc +from cic_eth_registry import CICRegistry +from cic_eth_registry.error import UnknownContractError +from chainlib.chain import ChainSpec +from chainlib.eth.constant import ZERO_ADDRESS +from chainlib.connection import RPCConnection +from chainlib.eth.block import ( + block_latest, + ) +from hexathon import ( + strip_0x, + ) +from chainsyncer.backend import SyncerBackend +from chainsyncer.driver import ( + HeadSyncer, + HistorySyncer, + ) +from chainsyncer.db.models.base import SessionBase + +# local imports +from cic_cache.db import dsn_from_config +from cic_cache.runnable.daemons.filters import ( + ERC20TransferFilter, + ) + +script_dir = os.path.realpath(os.path.dirname(__file__)) + +logg = cic_base.log.create() +argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template) +#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic') +args = cic_base.argparse.parse(argparser, logg) +config = cic_base.config.create(args.c, args, args.env_prefix) + +cic_base.config.log(config) + +dsn = dsn_from_config(config) + +SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG')) + +chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) + +#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default') +cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) + + +def main(): + # Connect to blockchain with chainlib + rpc = RPCConnection.connect(chain_spec, 'default') + + o = block_latest() + r = rpc.do(o) + block_offset = int(strip_0x(r), 16) + 1 + + logg.debug('starting at block {}'.format(block_offset)) + + syncers = [] + + #if SyncerBackend.first(chain_spec): + # backend = SyncerBackend.initial(chain_spec, block_offset) + syncer_backends = SyncerBackend.resume(chain_spec, block_offset) + + if len(syncer_backends) == 0: + logg.info('found no backends to resume') + syncers.append(SyncerBackend.initial(chain_spec, block_offset)) + else: + for syncer_backend in syncer_backends: + logg.info('resuming sync session {}'.format(syncer_backend)) + + for syncer_backend in syncer_backends: + syncers.append(HistorySyncer(syncer_backend)) + + syncer_backend = SyncerBackend.live(chain_spec, block_offset+1) + syncers.append(HeadSyncer(syncer_backend)) + + trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') + if trusted_addresses_src == None: + logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS') + sys.exit(1) + trusted_addresses = trusted_addresses_src.split(',') + for address in trusted_addresses: + logg.info('using trusted address {}'.format(address)) + + erc20_transfer_filter = ERC20TransferFilter(chain_spec) + + i = 0 + for syncer in syncers: + logg.debug('running syncer index {}'.format(i)) + syncer.add_filter(erc20_transfer_filter) + r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc) + sys.stderr.write("sync {} done at block {}\n".format(syncer, r)) + + i += 1 + + +if __name__ == '__main__': + main() diff --git a/apps/cic-cache/cic_cache/runnable/tracker.py b/apps/cic-cache/cic_cache/runnable/tracker.py deleted file mode 100644 index 3531e02f..00000000 --- a/apps/cic-cache/cic_cache/runnable/tracker.py +++ /dev/null @@ -1,339 +0,0 @@ -# standard imports -import sys -import os -import argparse -import logging -import time -import enum -import re - -# third-party imports -import confini -from cic_registry import CICRegistry -from cic_registry.chain import ( - ChainRegistry, - ChainSpec, - ) -#from cic_registry.bancor import BancorRegistryClient -from cic_registry.token import Token -from cic_registry.error import ( - UnknownContractError, - UnknownDeclarationError, - ) -from cic_registry.declaration import to_token_declaration -from web3.exceptions import BlockNotFound, TransactionNotFound -from websockets.exceptions import ConnectionClosedError -from requests.exceptions import ConnectionError -import web3 -from web3 import HTTPProvider, WebsocketProvider - -# local imports -from cic_cache import db -from cic_cache.db.models.base import SessionBase - -logging.basicConfig(level=logging.WARNING) -logg = logging.getLogger() -logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL) -logging.getLogger('urllib3').setLevel(logging.CRITICAL) -logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL) -logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL) -logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL) - -log_topics = { - 'transfer': '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', - 'convert': '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095', - 'accountregistry_add': '0a3b0a4f4c6e53dce3dbcad5614cb2ba3a0fa7326d03c5d64b4fa2d565492737', - } - -config_dir = os.path.join('/usr/local/etc/cic-cache') - -argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') -argparser.add_argument('-c', type=str, default=config_dir, help='config root to use') -argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec') -argparser.add_argument('--trust-address', default=[], type=str, dest='trust_address', action='append', help='Set address as trust') -argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') -argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi') -argparser.add_argument('-v', help='be verbose', action='store_true') -argparser.add_argument('-vv', help='be more verbose', action='store_true') -args = argparser.parse_args(sys.argv[1:]) - -config_dir = os.path.join(args.c) -os.makedirs(config_dir, 0o777, True) - - -if args.v == True: - logging.getLogger().setLevel(logging.INFO) -elif args.vv == True: - logging.getLogger().setLevel(logging.DEBUG) - -config = confini.Config(config_dir, args.env_prefix) -config.process() -args_override = { - 'ETH_ABI_DIR': getattr(args, 'abi_dir'), - 'CIC_TRUST_ADDRESS': ",".join(getattr(args, 'trust_address', [])), - } -config.dict_override(args_override, 'cli flag') -config.censor('PASSWORD', 'DATABASE') -config.censor('PASSWORD', 'SSL') -logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) - -# connect to database -dsn = db.dsn_from_config(config) -SessionBase.connect(dsn) - - -re_websocket = re.compile('^wss?://') -re_http = re.compile('^https?://') -blockchain_provider = config.get('ETH_PROVIDER') -if re.match(re_websocket, blockchain_provider) != None: - blockchain_provider = WebsocketProvider(blockchain_provider) -elif re.match(re_http, blockchain_provider) != None: - blockchain_provider = HTTPProvider(blockchain_provider) -else: - raise ValueError('unknown provider url {}'.format(blockchain_provider)) - -def web3_constructor(): - w3 = web3.Web3(blockchain_provider) - return (blockchain_provider, w3) - - -class RunStateEnum(enum.IntEnum): - INIT = 0 - RUN = 1 - TERMINATE = 9 - - -def rubberstamp(src): - return True - - -class Tracker: - - def __init__(self, chain_spec, trusts=[]): - self.block_height = 0 - self.tx_height = 0 - self.state = RunStateEnum.INIT - self.declarator_cache = {} - self.convert_enabled = False - self.trusts = trusts - self.chain_spec = chain_spec - self.declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', 'Declarator') - - - def __process_tx(self, w3, session, t, r, l, b): - token_value = int(l.data, 16) - token_sender = l.topics[1][-20:].hex() - token_recipient = l.topics[2][-20:].hex() - - #ts = ContractRegistry.get_address(t.address) - ts = CICRegistry.get_address(self.chain_spec, t.address()) - logg.info('add token transfer {} value {} from {} to {}'.format( - ts.symbol(), - token_value, - token_sender, - token_recipient, - ) - ) - - db.add_transaction( - session, - r.transactionHash.hex(), - r.blockNumber, - r.transactionIndex, - w3.toChecksumAddress(token_sender), - w3.toChecksumAddress(token_recipient), - t.address(), - t.address(), - token_value, - token_value, - r.status == 1, - b.timestamp, - ) - session.flush() - - - # TODO: simplify/ split up and/or comment, function is too long - def __process_convert(self, w3, session, t, r, l, b): - logg.warning('conversions are deactivated') - return -# token_source = l.topics[2][-20:].hex() -# token_source = w3.toChecksumAddress(token_source) -# token_destination = l.topics[3][-20:].hex() -# token_destination = w3.toChecksumAddress(token_destination) -# data_noox = l.data[2:] -# d = data_noox[:64] -# token_from_value = int(d, 16) -# d = data_noox[64:128] -# token_to_value = int(d, 16) -# token_trader = '0x' + data_noox[192-40:] -# -# #ts = ContractRegistry.get_address(token_source) -# ts = CICRegistry.get_address(CICRegistry.bancor_chain_spec, t.address()) -# #if ts == None: -# # ts = ContractRegistry.reserves[token_source] -# td = ContractRegistry.get_address(token_destination) -# #if td == None: -# # td = ContractRegistry.reserves[token_source] -# logg.info('add token convert {} -> {} value {} -> {} trader {}'.format( -# ts.symbol(), -# td.symbol(), -# token_from_value, -# token_to_value, -# token_trader, -# ) -# ) -# -# db.add_transaction( -# session, -# r.transactionHash.hex(), -# r.blockNumber, -# r.transactionIndex, -# w3.toChecksumAddress(token_trader), -# w3.toChecksumAddress(token_trader), -# token_source, -# token_destination, -# r.status == 1, -# b.timestamp, -# ) -# session.flush() - - - def check_token(self, address): - t = None - try: - t = CICRegistry.get_address(CICRegistry.default_chain_spec, address) - return t - except UnknownContractError: - logg.debug('contract {} not in registry'.format(address)) - - # If nothing was returned, we look up the token in the declarator - for trust in self.trusts: - logg.debug('look up declaration for contract {} with trust {}'.format(address, trust)) - fn = self.declarator.function('declaration') - # TODO: cache trust in LRUcache - declaration_array = fn(trust, address).call() - try: - declaration = to_token_declaration(trust, address, declaration_array, [rubberstamp]) - logg.debug('found declaration for token {} from trust address {}'.format(address, trust)) - except UnknownDeclarationError: - continue - - try: - c = w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=address) - t = CICRegistry.add_token(self.chain_spec, c) - break - except ValueError: - logg.error('declaration for {} validates as token, but location is not ERC20 compatible'.format(address)) - - return t - - - # TODO use input data instead of logs - def process(self, w3, session, block): - #self.refresh_registry(w3) - tx_count = w3.eth.getBlockTransactionCount(block.hash) - b = w3.eth.getBlock(block.hash) - for i in range(self.tx_height, tx_count): - tx = w3.eth.getTransactionByBlock(block.hash, i) - if tx.to == None: - logg.debug('block {} tx {} is contract creation tx, skipping'.format(block.number, i)) - continue - if len(w3.eth.getCode(tx.to)) == 0: - logg.debug('block {} tx {} not a contract tx, skipping'.format(block.number, i)) - continue - - t = self.check_token(tx.to) - if t != None and isinstance(t, Token): - r = w3.eth.getTransactionReceipt(tx.hash) - for l in r.logs: - logg.debug('block {} tx {} {} token log {} {}'.format(block.number, i, tx.hash.hex(), l.logIndex, l.topics[0].hex())) - if l.topics[0].hex() == log_topics['transfer']: - self.__process_tx(w3, session, t, r, l, b) - - # TODO: cache contracts in LRUcache - elif self.convert_enabled and tx.to == CICRegistry.get_contract(CICRegistry.default_chain_spec, 'Converter').address: - r = w3.eth.getTransactionReceipt(tx.hash) - for l in r.logs: - logg.info('block {} tx {} {} bancornetwork log {} {}'.format(block.number, i, tx.hash.hex(), l.logIndex, l.topics[0].hex())) - if l.topics[0].hex() == log_topics['convert']: - self.__process_convert(w3, session, t, r, l, b) - - session.execute("UPDATE tx_sync SET tx = '{}'".format(tx.hash.hex())) - session.commit() - self.tx_height += 1 - - - def __get_next_retry(self, backoff=False): - return 1 - - - def loop(self): - logg.info('starting at block {} tx index {}'.format(self.block_height, self.tx_height)) - self.state = RunStateEnum.RUN - while self.state == RunStateEnum.RUN: - (provider, w3) = web3_constructor() - session = SessionBase.create_session() - try: - block = w3.eth.getBlock(self.block_height) - self.process(w3, session, block) - self.block_height += 1 - self.tx_height = 0 - except BlockNotFound as e: - logg.debug('no block {} yet, zZzZ...'.format(self.block_height)) - time.sleep(self.__get_next_retry()) - except ConnectionClosedError as e: - logg.info('connection gone, retrying') - time.sleep(self.__get_next_retry(True)) - except OSError as e: - logg.error('cannot connect {}'.format(e)) - time.sleep(self.__get_next_retry(True)) - except Exception as e: - session.close() - raise(e) - session.close() - - - def load(self, w3): - session = SessionBase.create_session() - r = session.execute('SELECT tx FROM tx_sync').first() - if r != None: - if r[0] == '0x{0:0{1}X}'.format(0, 64): - logg.debug('last tx was zero-address, starting from scratch') - return - t = w3.eth.getTransaction(r[0]) - - self.block_height = t.blockNumber - self.tx_height = t.transactionIndex+1 - c = w3.eth.getBlockTransactionCount(t.blockHash.hex()) - logg.debug('last tx processed {} index {} (max index {})'.format(t.blockNumber, t.transactionIndex, c-1)) - if c == self.tx_height: - self.block_height += 1 - self.tx_height = 0 - session.close() - -(provider, w3) = web3_constructor() -trust = config.get('CIC_TRUST_ADDRESS', "").split(",") -chain_spec = args.i - -try: - w3.eth.chainId -except Exception as e: - logg.exception(e) - sys.stderr.write('cannot connect to evm node\n') - sys.exit(1) - -def main(): - chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) - - CICRegistry.init(w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec) - CICRegistry.add_path(config.get('ETH_ABI_DIR')) - chain_registry = ChainRegistry(chain_spec) - CICRegistry.add_chain_registry(chain_registry) - - t = Tracker(chain_spec, trust) - t.load(w3) - t.loop() - - -if __name__ == '__main__': - main() diff --git a/apps/cic-cache/cic_cache/version.py b/apps/cic-cache/cic_cache/version.py index 0bba8abf..338bb831 100644 --- a/apps/cic-cache/cic_cache/version.py +++ b/apps/cic-cache/cic_cache/version.py @@ -5,7 +5,7 @@ version = ( 0, 2, 0, - 'alpha.1', + 'alpha.2', ) version_object = semver.VersionInfo( diff --git a/apps/cic-cache/config/docker/cic.ini b/apps/cic-cache/config/docker/cic.ini new file mode 100644 index 00000000..1fd98612 --- /dev/null +++ b/apps/cic-cache/config/docker/cic.ini @@ -0,0 +1,4 @@ +[cic] +chain_spec = +registry_address = +trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C diff --git a/apps/cic-cache/config/docker/database.ini b/apps/cic-cache/config/docker/database.ini index 1b94bb57..db1f5086 100644 --- a/apps/cic-cache/config/docker/database.ini +++ b/apps/cic-cache/config/docker/database.ini @@ -6,4 +6,4 @@ HOST=localhost PORT=63432 ENGINE=postgresql DRIVER=psycopg2 -DEBUG=1 +DEBUG=0 diff --git a/apps/cic-cache/config/docker/eth.ini b/apps/cic-cache/config/docker/eth.ini index 879415c7..d61e9647 100644 --- a/apps/cic-cache/config/docker/eth.ini +++ b/apps/cic-cache/config/docker/eth.ini @@ -1,3 +1,2 @@ [eth] provider = ws://localhost:63546 -chain_id = 8996 diff --git a/apps/cic-cache/config/docker/syncer.ini b/apps/cic-cache/config/docker/syncer.ini new file mode 100644 index 00000000..452fb81c --- /dev/null +++ b/apps/cic-cache/config/docker/syncer.ini @@ -0,0 +1,2 @@ +[syncer] +loop_interval = 1 diff --git a/apps/cic-cache/config/eth.ini b/apps/cic-cache/config/eth.ini index 57dab806..98b831e5 100644 --- a/apps/cic-cache/config/eth.ini +++ b/apps/cic-cache/config/eth.ini @@ -1,7 +1,2 @@ [eth] provider = ws://localhost:8545 -#ttp_provider = http://localhost:8545 -#provider = http://localhost:8545 -gas_provider_address = -#chain_id = -abi_dir = /usr/local/share/cic/solidity/abi diff --git a/apps/cic-cache/config/syncer.ini b/apps/cic-cache/config/syncer.ini new file mode 100644 index 00000000..42e7cd88 --- /dev/null +++ b/apps/cic-cache/config/syncer.ini @@ -0,0 +1,2 @@ +[syncer] +loop_interval = 5 diff --git a/apps/cic-cache/docker/Dockerfile b/apps/cic-cache/docker/Dockerfile index e759dc49..b4e4aebc 100644 --- a/apps/cic-cache/docker/Dockerfile +++ b/apps/cic-cache/docker/Dockerfile @@ -17,7 +17,7 @@ RUN apt-get update && \ # Copy shared requirements from top of mono-repo RUN echo "copying root req file ${root_requirement_file}" -RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a44 +RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a58 COPY cic-cache/requirements.txt ./ COPY cic-cache/setup.cfg \ diff --git a/apps/cic-cache/requirements.txt b/apps/cic-cache/requirements.txt index 26b4548a..5669eb42 100644 --- a/apps/cic-cache/requirements.txt +++ b/apps/cic-cache/requirements.txt @@ -1,10 +1,13 @@ +cic-base~=0.1.2a66 alembic==1.4.2 -confini~=0.3.6b2 +confini~=0.3.6rc3 uwsgi==2.0.19.1 moolb~=0.1.0 -cic-registry~=0.5.3a4 +cic-eth-registry~=0.5.4a13 SQLAlchemy==1.3.20 semver==2.13.0 psycopg2==2.8.6 celery==4.4.7 redis==3.5.3 +chainlib~=0.0.2a10 +chainsyncer[sql]~=0.0.2a1 diff --git a/apps/cic-cache/setup.cfg b/apps/cic-cache/setup.cfg index 07ceec1e..4616a475 100644 --- a/apps/cic-cache/setup.cfg +++ b/apps/cic-cache/setup.cfg @@ -29,11 +29,13 @@ packages = cic_cache.db cic_cache.db.models cic_cache.runnable + cic_cache.runnable.daemons + cic_cache.runnable.daemons.filters scripts = ./scripts/migrate.py [options.entry_points] console_scripts = - cic-cache-trackerd = cic_cache.runnable.tracker:main - cic-cache-serverd = cic_cache.runnable.server:main - cic-cache-taskerd = cic_cache.runnable.tasker:main + cic-cache-trackerd = cic_cache.runnable.daemons.tracker:main + cic-cache-serverd = cic_cache.runnable.daemons.server:main + cic-cache-taskerd = cic_cache.runnable.daemons.tasker:main diff --git a/apps/cic-eth/cic_eth/admin/nonce.py b/apps/cic-eth/cic_eth/admin/nonce.py index 5da7e877..b150cbed 100644 --- a/apps/cic-eth/cic_eth/admin/nonce.py +++ b/apps/cic-eth/cic_eth/admin/nonce.py @@ -5,11 +5,13 @@ import logging import celery from chainlib.chain import ChainSpec from chainlib.eth.tx import unpack +from chainqueue.query import get_tx +from chainqueue.state import set_cancel +from chainqueue.db.models.otx import Otx +from chainqueue.db.models.tx import TxCache # local imports from cic_eth.db.models.base import SessionBase -from cic_eth.db.models.otx import Otx -from cic_eth.db.models.tx import TxCache from cic_eth.db.models.nonce import Nonce from cic_eth.admin.ctrl import ( lock_send, @@ -17,14 +19,8 @@ from cic_eth.admin.ctrl import ( lock_queue, unlock_queue, ) -from cic_eth.queue.tx import ( - get_tx, - set_cancel, - ) -from cic_eth.queue.tx import create as queue_create -from cic_eth.eth.gas import ( - create_check_gas_task, - ) +from cic_eth.queue.tx import queue_create +from cic_eth.eth.gas import create_check_gas_task celery_app = celery.current_app logg = logging.getLogger() @@ -50,8 +46,8 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1): chain_spec = ChainSpec.from_chain_str(chain_str) tx_brief = get_tx(tx_hash_orig_hex) - tx_raw = bytes.fromhex(tx_brief['signed_tx'][2:]) - tx = unpack(tx_raw, chain_spec.chain_id()) + tx_raw = bytes.fromhex(strip_0x(tx_brief['signed_tx'][2:])) + tx = unpack(tx_raw, chain_spec) nonce = tx_brief['nonce'] address = tx['from'] @@ -71,8 +67,8 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1): tx_hashes = [] txs = [] for otx in otxs: - tx_raw = bytes.fromhex(otx.signed_tx[2:]) - tx_new = unpack(tx_raw, chain_spec.chain_id()) + tx_raw = bytes.fromhex(strip_0x(otx.signed_tx)) + tx_new = unpack(tx_raw, chain_spec) tx_previous_hash_hex = tx_new['hash'] tx_previous_nonce = tx_new['nonce'] diff --git a/apps/cic-eth/cic_eth/api/api_admin.py b/apps/cic-eth/cic_eth/api/api_admin.py index bcc6174e..a0c804f2 100644 --- a/apps/cic-eth/cic_eth/api/api_admin.py +++ b/apps/cic-eth/cic_eth/api/api_admin.py @@ -22,23 +22,21 @@ from hexathon import ( add_0x, ) from chainlib.eth.gas import balance - -# local imports -from cic_eth.db.models.base import SessionBase -from cic_eth.db.models.role import AccountRole -from cic_eth.db.models.otx import Otx -from cic_eth.db.models.tx import TxCache -from cic_eth.db.models.nonce import Nonce -from cic_eth.db.enum import ( +from chainqueue.db.enum import ( StatusEnum, StatusBits, is_alive, is_error_status, status_str, ) +from chainqueue.error import TxStateChangeError + +# local imports +from cic_eth.db.models.base import SessionBase +from cic_eth.db.models.role import AccountRole +from cic_eth.db.models.nonce import Nonce from cic_eth.error import InitializationError -from cic_eth.db.error import TxStateChangeError -from cic_eth.queue.tx import get_tx +from cic_eth.queue.query import get_tx app = celery.current_app @@ -92,7 +90,7 @@ class AdminApi: def get_lock(self): s_lock = celery.signature( - 'cic_eth.queue.tx.get_lock', + 'cic_eth.queue.lock.get_lock', [], queue=self.queue, ) @@ -134,11 +132,13 @@ class AdminApi: return s_have.apply_async() - def resend(self, tx_hash_hex, chain_str, in_place=True, unlock=False): + def resend(self, tx_hash_hex, chain_spec, in_place=True, unlock=False): + logg.debug('resend {}'.format(tx_hash_hex)) s_get_tx_cache = celery.signature( - 'cic_eth.queue.tx.get_tx_cache', + 'cic_eth.queue.query.get_tx_cache', [ + chain_spec.asdict(), tx_hash_hex, ], queue=self.queue, @@ -154,9 +154,9 @@ class AdminApi: raise NotImplementedError('resend as new not yet implemented') s = celery.signature( - 'cic_eth.eth.tx.resend_with_higher_gas', + 'cic_eth.eth.gas.resend_with_higher_gas', [ - chain_str, + chain_spec.asdict(), None, 1.01, ], @@ -164,7 +164,7 @@ class AdminApi: ) s_manual = celery.signature( - 'cic_eth.queue.tx.set_manual', + 'cic_eth.queue.state.set_manual', [ tx_hash_hex, ], @@ -176,7 +176,7 @@ class AdminApi: s_gas = celery.signature( 'cic_eth.admin.ctrl.unlock_send', [ - chain_str, + chain_spec.asdict(), tx_dict['sender'], ], queue=self.queue, @@ -187,8 +187,9 @@ class AdminApi: def check_nonce(self, address): s = celery.signature( - 'cic_eth.queue.tx.get_account_tx', + 'cic_eth.queue.query.get_account_tx', [ + chain_spec.asdict(), address, True, False, @@ -203,8 +204,9 @@ class AdminApi: last_nonce = -1 for k in txs.keys(): s_get_tx = celery.signature( - 'cic_eth.queue.tx.get_tx', + 'cic_eth.queue.query.get_tx', [ + chain_spec.asdict(), k, ], queue=self.queue, @@ -218,7 +220,7 @@ class AdminApi: blocking_tx = k blocking_nonce = nonce_otx elif nonce_otx - last_nonce > 1: - logg.error('nonce gap; {} followed {}'.format(nonce_otx, last_nonce)) + logg.error('nonce gap; {} followed {} for account {}'.format(nonce_otx, last_nonce, tx['from'])) blocking_tx = k blocking_nonce = nonce_otx break @@ -242,8 +244,9 @@ class AdminApi: def fix_nonce(self, address, nonce, chain_spec): s = celery.signature( - 'cic_eth.queue.tx.get_account_tx', + 'cic_eth.queue.query.get_account_tx', [ + chain_spec.asdict(), address, True, False, @@ -293,8 +296,9 @@ class AdminApi: """ last_nonce = -1 s = celery.signature( - 'cic_eth.queue.tx.get_account_tx', + 'cic_eth.queue.query.get_account_tx', [ + chain_spec.asdict(), address, ], queue=self.queue, @@ -305,17 +309,20 @@ class AdminApi: for tx_hash in txs.keys(): errors = [] s = celery.signature( - 'cic_eth.queue.tx.get_tx_cache', - [tx_hash], + 'cic_eth.queue.query.get_tx_cache', + [ + chain_spec.asdict(), + tx_hash, + ], queue=self.queue, ) tx_dict = s.apply_async().get() if tx_dict['sender'] == address: if tx_dict['nonce'] - last_nonce > 1: - logg.error('nonce gap; {} followed {} for tx {}'.format(tx_dict['nonce'], last_nonce, tx_dict['hash'])) + logg.error('nonce gap; {} followed {} for address {} tx {}'.format(tx_dict['nonce'], last_nonce, tx_dict['sender'], tx_hash)) errors.append('nonce') elif tx_dict['nonce'] == last_nonce: - logg.warning('nonce {} duplicate in tx {}'.format(tx_dict['nonce'], tx_dict['hash'])) + logg.info('nonce {} duplicate for address {} in tx {}'.format(tx_dict['nonce'], tx_dict['sender'], tx_hash)) last_nonce = tx_dict['nonce'] if not include_sender: logg.debug('skipping sender tx {}'.format(tx_dict['tx_hash'])) @@ -366,12 +373,16 @@ class AdminApi: #tx_hash = self.w3.keccak(hexstr=tx_raw).hex() s = celery.signature( - 'cic_eth.queue.tx.get_tx_cache', - [tx_hash], + 'cic_eth.queue.query.get_tx_cache', + [ + chain_spec.asdict(), + tx_hash, + ], queue=self.queue, ) - tx = s.apply_async().get() + t = s.apply_async() + tx = t.get() source_token = None if tx['source_token'] != ZERO_ADDRESS: @@ -480,15 +491,17 @@ class AdminApi: tx['destination_token_symbol'] = destination_token.symbol() tx['recipient_token_balance'] = source_token.function('balanceOf')(tx['recipient']).call() - tx['network_status'] = 'Not submitted' + # TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which + tx['network_status'] = 'Not in node' r = None try: o = transaction(tx_hash) r = self.rpc.do(o) + if r != None: + tx['network_status'] = 'Mempool' except Exception as e: logg.warning('(too permissive exception handler, please fix!) {}'.format(e)) - tx['network_status'] = 'Mempool' if r != None: try: @@ -515,14 +528,15 @@ class AdminApi: r = self.rpc.do(o) tx['recipient_gas_balance'] = r - tx_unpacked = unpack(bytes.fromhex(tx['signed_tx'][2:]), chain_spec.chain_id()) + tx_unpacked = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec) tx['gas_price'] = tx_unpacked['gasPrice'] tx['gas_limit'] = tx_unpacked['gas'] tx['data'] = tx_unpacked['data'] s = celery.signature( - 'cic_eth.queue.tx.get_state_log', + 'cic_eth.queue.state.get_state_log', [ + chain_spec.asdict(), tx_hash, ], queue=self.queue, diff --git a/apps/cic-eth/cic_eth/api/api_task.py b/apps/cic-eth/cic_eth/api/api_task.py index cbe5c391..52aedee5 100644 --- a/apps/cic-eth/cic_eth/api/api_task.py +++ b/apps/cic-eth/cic_eth/api/api_task.py @@ -37,7 +37,7 @@ class Api: self.callback_param = callback_param self.callback_task = callback_task self.queue = queue - logg.info('api using queue {}'.format(self.queue)) + logg.debug('api using queue {}'.format(self.queue)) self.callback_success = None self.callback_error = None if callback_queue == None: @@ -92,8 +92,10 @@ class Api: queue=self.queue, ) s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', - [], + 'cic_eth.eth.nonce.reserve_nonce', + [ + self.chain_spec.asdict(), + ], queue=self.queue, ) s_tokens = celery.signature( @@ -154,8 +156,10 @@ class Api: queue=self.queue, ) s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', - [], + 'cic_eth.eth.nonce.reserve_nonce', + [ + self.chain_spec.asdict(), + ], queue=self.queue, ) s_tokens = celery.signature( @@ -213,8 +217,9 @@ class Api: queue=self.queue, ) s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ + self.chain_spec.asdict(), from_address, ], queue=self.queue, @@ -359,8 +364,9 @@ class Api: if register: s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ + self.chain_spec.asdict(), 'ACCOUNT_REGISTRY_WRITER', ], queue=self.queue, @@ -397,14 +403,15 @@ class Api: queue=self.queue, ) s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ + self.chain_spec.asdict(), 'GAS_GIFTER', ], queue=self.queue, ) s_refill = celery.signature( - 'cic_eth.eth.tx.refill_gas', + 'cic_eth.eth.gas.refill_gas', [ self.chain_spec.asdict(), ], @@ -439,8 +446,9 @@ class Api: """ offset = 0 s_local = celery.signature( - 'cic_eth.queue.tx.get_account_tx', + 'cic_eth.queue.query.get_account_tx', [ + self.chain_spec.asdict(), address, ], queue=self.queue, diff --git a/apps/cic-eth/cic_eth/db/__init__.py b/apps/cic-eth/cic_eth/db/__init__.py index 1a549508..62731220 100644 --- a/apps/cic-eth/cic_eth/db/__init__.py +++ b/apps/cic-eth/cic_eth/db/__init__.py @@ -11,10 +11,6 @@ logg = logging.getLogger() # an Engine, which the Session will use for connection # resources -# TODO: Remove the package exports, all models should be imported using full path -from .models.otx import Otx -from .models.convert import TxConvertTransfer - def dsn_from_config(config): """Generate a dsn string from the provided config dict. diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/0ec0d6d1e785_add_chainqueue.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/0ec0d6d1e785_add_chainqueue.py new file mode 100644 index 00000000..09217067 --- /dev/null +++ b/apps/cic-eth/cic_eth/db/migrations/default/versions/0ec0d6d1e785_add_chainqueue.py @@ -0,0 +1,29 @@ +"""Add chainqueue + +Revision ID: 0ec0d6d1e785 +Revises: +Create Date: 2021-04-02 18:30:55.398388 + +""" +from alembic import op +import sqlalchemy as sa + +from chainqueue.db.migrations.sqlalchemy import ( + chainqueue_upgrade, + chainqueue_downgrade, + ) + +# revision identifiers, used by Alembic. +revision = '0ec0d6d1e785' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + chainqueue_upgrade(0, 0, 1) + + +def downgrade(): + chainqueue_downgrade(0, 0, 1) + diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/1f1b3b641d08_roles.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/1f1b3b641d08_roles.py new file mode 100644 index 00000000..35fed2a2 --- /dev/null +++ b/apps/cic-eth/cic_eth/db/migrations/default/versions/1f1b3b641d08_roles.py @@ -0,0 +1,29 @@ +"""Roles + +Revision ID: 1f1b3b641d08 +Revises: 9c420530eeb2 +Create Date: 2021-04-02 18:40:27.787631 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '1f1b3b641d08' +down_revision = '9c420530eeb2' +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + 'account_role', + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('tag', sa.Text, nullable=False, unique=True), + sa.Column('address_hex', sa.String(42), nullable=False), + ) + + +def downgrade(): + op.drop_table('account_role') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/2a07b543335e_add_new_syncer_table.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/2a07b543335e_add_new_syncer_table.py deleted file mode 100644 index 30ddb96f..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/2a07b543335e_add_new_syncer_table.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Add new syncer table - -Revision ID: 2a07b543335e -Revises: a2e2aab8f331 -Create Date: 2020-12-27 09:35:44.017981 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '2a07b543335e' -down_revision = 'a2e2aab8f331' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'blockchain_sync', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('blockchain', sa.String, nullable=False), - sa.Column('block_start', sa.Integer, nullable=False, default=0), - sa.Column('tx_start', sa.Integer, nullable=False, default=0), - sa.Column('block_cursor', sa.Integer, nullable=False, default=0), - sa.Column('tx_cursor', sa.Integer, nullable=False, default=0), - sa.Column('block_target', sa.Integer, nullable=True), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('date_updated', sa.DateTime), - ) - - -def downgrade(): - op.drop_table('blockchain_sync') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/3b693afd526a_nonce_reservation.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/3b693afd526a_nonce_reservation.py deleted file mode 100644 index 580d0345..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/3b693afd526a_nonce_reservation.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Nonce reservation - -Revision ID: 3b693afd526a -Revises: f738d9962fdf -Create Date: 2021-03-05 07:09:50.898728 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '3b693afd526a' -down_revision = 'f738d9962fdf' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'nonce_task_reservation', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('address_hex', sa.String(42), nullable=False), - sa.Column('nonce', sa.Integer, nullable=False), - sa.Column('key', sa.String, nullable=False), - sa.Column('date_created', sa.DateTime, nullable=False), - ) - - -def downgrade(): - op.drop_table('nonce_task_reservation') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/49b348246d70_add_nonce_index.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/49b348246d70_add_nonce_index.py deleted file mode 100644 index 3abee1b6..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/49b348246d70_add_nonce_index.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add nonce index - -Revision ID: 49b348246d70 -Revises: 52c7c59cd0b1 -Create Date: 2020-12-19 09:45:36.186446 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '49b348246d70' -down_revision = '52c7c59cd0b1' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'nonce', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('address_hex', sa.String(42), nullable=False, unique=True), - sa.Column('nonce', sa.Integer, nullable=False), - ) - - -def downgrade(): - op.drop_table('nonce') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/52c7c59cd0b1_add_account_roles.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/52c7c59cd0b1_add_account_roles.py deleted file mode 100644 index 7aa74059..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/52c7c59cd0b1_add_account_roles.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Add account roles - -Revision ID: 52c7c59cd0b1 -Revises: 9c4bd7491015 -Create Date: 2020-12-19 07:21:38.249237 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '52c7c59cd0b1' -down_revision = '9c4bd7491015' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'account_role', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('tag', sa.Text, nullable=False, unique=True), - sa.Column('address_hex', sa.String(42), nullable=False), - ) - pass - - -def downgrade(): - op.drop_table('account_role') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/f738d9962fdf_debug_output.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/5ca4b77ce205_debug.py similarity index 73% rename from apps/cic-eth/cic_eth/db/migrations/default/versions/f738d9962fdf_debug_output.py rename to apps/cic-eth/cic_eth/db/migrations/default/versions/5ca4b77ce205_debug.py index 378f2965..5014fb2d 100644 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/f738d9962fdf_debug_output.py +++ b/apps/cic-eth/cic_eth/db/migrations/default/versions/5ca4b77ce205_debug.py @@ -1,8 +1,8 @@ -"""debug output +"""DEbug -Revision ID: f738d9962fdf -Revises: ec40ac0974c1 -Create Date: 2021-03-04 08:32:43.281214 +Revision ID: 5ca4b77ce205 +Revises: 75d4767b3031 +Create Date: 2021-04-02 18:42:12.257244 """ from alembic import op @@ -10,8 +10,8 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'f738d9962fdf' -down_revision = 'ec40ac0974c1' +revision = '5ca4b77ce205' +down_revision = '75d4767b3031' branch_labels = None depends_on = None @@ -24,9 +24,7 @@ def upgrade(): sa.Column('description', sa.String, nullable=False), sa.Column('date_created', sa.DateTime, nullable=False), ) - pass def downgrade(): op.drop_table('debug') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/6ac7a1dadc46_add_otx_state_log.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/6ac7a1dadc46_add_otx_state_log.py deleted file mode 100644 index f15834d9..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/6ac7a1dadc46_add_otx_state_log.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Add otx state log - -Revision ID: 6ac7a1dadc46 -Revises: 89e1e9baa53c -Create Date: 2021-01-30 13:59:49.022373 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '6ac7a1dadc46' -down_revision = '89e1e9baa53c' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'otx_state_log', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False), - sa.Column('date', sa.DateTime, nullable=False), - sa.Column('status', sa.Integer, nullable=False), - ) - - -def downgrade(): - op.drop_table('otx_state_log') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/71708e943dbd_add_attempts_and_version_log_for_otx.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/71708e943dbd_add_attempts_and_version_log_for_otx.py deleted file mode 100644 index 3be687eb..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/71708e943dbd_add_attempts_and_version_log_for_otx.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Add attempts and version log for otx - -Revision ID: 71708e943dbd -Revises: 7e8d7626e38f -Create Date: 2020-09-26 14:41:19.298651 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '71708e943dbd' -down_revision = '7e8d7626e38f' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'otx_attempts', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False), - sa.Column('date', sa.DateTime, nullable=False), - ) - pass - - -def downgrade(): - op.drop_table('otx_attempts') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/89e1e9baa53c_add_account_lock.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/75d4767b3031_lock.py similarity index 74% rename from apps/cic-eth/cic_eth/db/migrations/postgresql/versions/89e1e9baa53c_add_account_lock.py rename to apps/cic-eth/cic_eth/db/migrations/default/versions/75d4767b3031_lock.py index 4b1c4401..656fdd06 100644 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/89e1e9baa53c_add_account_lock.py +++ b/apps/cic-eth/cic_eth/db/migrations/default/versions/75d4767b3031_lock.py @@ -1,8 +1,8 @@ -"""Add account lock +"""Lock -Revision ID: 89e1e9baa53c -Revises: 2a07b543335e -Create Date: 2021-01-27 19:57:36.793882 +Revision ID: 75d4767b3031 +Revises: 1f1b3b641d08 +Create Date: 2021-04-02 18:41:20.864265 """ from alembic import op @@ -10,8 +10,8 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '89e1e9baa53c' -down_revision = '2a07b543335e' +revision = '75d4767b3031' +down_revision = '1f1b3b641d08' branch_labels = None depends_on = None @@ -24,10 +24,11 @@ def upgrade(): sa.Column('blockchain', sa.String), sa.Column("flags", sa.BIGINT(), nullable=False, default=0), sa.Column("date_created", sa.DateTime, nullable=False), - sa.Column("otx_id", sa.Integer, nullable=True), + sa.Column("otx_id", sa.Integer, sa.ForeignKey('otx.id'), nullable=True), ) op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True) + def downgrade(): op.drop_index('idx_chain_address') op.drop_table('lock') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/7cb65b893934_add_blocknumber_pointer.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/7cb65b893934_add_blocknumber_pointer.py deleted file mode 100644 index 74a97f83..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/7cb65b893934_add_blocknumber_pointer.py +++ /dev/null @@ -1,31 +0,0 @@ -"""add blocknumber pointer - -Revision ID: 7cb65b893934 -Revises: 8593fa1ca0f4 -Create Date: 2020-09-24 19:29:13.543648 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '7cb65b893934' -down_revision = '8593fa1ca0f4' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'watcher_state', - sa.Column('block_number', sa.Integer) - ) - conn = op.get_bind() - conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);') - pass - - -def downgrade(): - op.drop_table('watcher_state') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/7e8d7626e38f_add_block_sync.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/7e8d7626e38f_add_block_sync.py deleted file mode 100644 index 52f29a7b..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/7e8d7626e38f_add_block_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -"""Add block sync - -Revision ID: 7e8d7626e38f -Revises: cd2052be6db2 -Create Date: 2020-09-26 11:12:27.818524 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '7e8d7626e38f' -down_revision = 'cd2052be6db2' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'block_sync', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('blockchain', sa.String, nullable=False, unique=True), - sa.Column('block_height_backlog', sa.Integer, nullable=False, default=0), - sa.Column('tx_height_backlog', sa.Integer, nullable=False, default=0), - sa.Column('block_height_session', sa.Integer, nullable=False, default=0), - sa.Column('tx_height_session', sa.Integer, nullable=False, default=0), - sa.Column('block_height_head', sa.Integer, nullable=False, default=0), - sa.Column('tx_height_head', sa.Integer, nullable=False, default=0), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('date_updated', sa.DateTime), - ) - op.drop_table('watcher_state') - pass - - -def downgrade(): - op.drop_table('block_sync') - op.create_table( - 'watcher_state', - sa.Column('block_number', sa.Integer) - ) - conn = op.get_bind() - conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/8593fa1ca0f4_add_transaction_queue.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/8593fa1ca0f4_add_transaction_queue.py deleted file mode 100644 index e9f9cf6a..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/8593fa1ca0f4_add_transaction_queue.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Add transaction queue - -Revision ID: 8593fa1ca0f4 -Revises: -Create Date: 2020-09-22 21:56:42.117047 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '8593fa1ca0f4' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'otx', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('nonce', sa.Integer, nullable=False), - sa.Column('tx_hash', sa.String(66), nullable=False), - sa.Column('signed_tx', sa.Text, nullable=False), - sa.Column('status', sa.Integer, nullable=False, default=-9), - sa.Column('block', sa.Integer), - ) - op.create_index('idx_otx_tx', 'otx', ['tx_hash'], unique=True) - - -def downgrade(): - op.drop_index('idx_otx_tx') - op.drop_table('otx') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/89e1e9baa53c_add_account_lock.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/89e1e9baa53c_add_account_lock.py deleted file mode 100644 index 4b1c4401..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/89e1e9baa53c_add_account_lock.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Add account lock - -Revision ID: 89e1e9baa53c -Revises: 2a07b543335e -Create Date: 2021-01-27 19:57:36.793882 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '89e1e9baa53c' -down_revision = '2a07b543335e' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'lock', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column("address", sa.String(42), nullable=True), - sa.Column('blockchain', sa.String), - sa.Column("flags", sa.BIGINT(), nullable=False, default=0), - sa.Column("date_created", sa.DateTime, nullable=False), - sa.Column("otx_id", sa.Integer, nullable=True), - ) - op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True) - -def downgrade(): - op.drop_index('idx_chain_address') - op.drop_table('lock') diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/3b693afd526a_nonce_reservation.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/9c420530eeb2_nonce.py similarity index 54% rename from apps/cic-eth/cic_eth/db/migrations/postgresql/versions/3b693afd526a_nonce_reservation.py rename to apps/cic-eth/cic_eth/db/migrations/default/versions/9c420530eeb2_nonce.py index 580d0345..3dffac98 100644 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/3b693afd526a_nonce_reservation.py +++ b/apps/cic-eth/cic_eth/db/migrations/default/versions/9c420530eeb2_nonce.py @@ -1,8 +1,8 @@ -"""Nonce reservation +"""Nonce -Revision ID: 3b693afd526a -Revises: f738d9962fdf -Create Date: 2021-03-05 07:09:50.898728 +Revision ID: 9c420530eeb2 +Revises: b125cbf81e32 +Create Date: 2021-04-02 18:38:56.459334 """ from alembic import op @@ -10,15 +10,22 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = '3b693afd526a' -down_revision = 'f738d9962fdf' +revision = '9c420530eeb2' +down_revision = 'b125cbf81e32' branch_labels = None depends_on = None def upgrade(): op.create_table( - 'nonce_task_reservation', + 'nonce', + sa.Column('id', sa.Integer, primary_key=True), + sa.Column('address_hex', sa.String(42), nullable=False, unique=True), + sa.Column('nonce', sa.Integer, nullable=False), + ) + + op.create_table( + 'nonce_task_reservation', sa.Column('id', sa.Integer, primary_key=True), sa.Column('address_hex', sa.String(42), nullable=False), sa.Column('nonce', sa.Integer, nullable=False), @@ -29,3 +36,4 @@ def upgrade(): def downgrade(): op.drop_table('nonce_task_reservation') + op.drop_table('nonce') diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/9c4bd7491015_rename_block_sync_table.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/9c4bd7491015_rename_block_sync_table.py deleted file mode 100644 index f58721b2..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/9c4bd7491015_rename_block_sync_table.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Rename block sync table - -Revision ID: 9c4bd7491015 -Revises: 9daa16518a91 -Create Date: 2020-10-15 23:45:56.306898 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '9c4bd7491015' -down_revision = '9daa16518a91' -branch_labels = None -depends_on = None - - -def upgrade(): - op.rename_table('block_sync', 'otx_sync') - pass - - -def downgrade(): - op.rename_table('otx_sync', 'block_sync') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/9daa16518a91_add_tx_sync_state.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/9daa16518a91_add_tx_sync_state.py deleted file mode 100644 index 2c6cc0fe..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/9daa16518a91_add_tx_sync_state.py +++ /dev/null @@ -1,30 +0,0 @@ -"""add tx sync state - -Revision ID: 9daa16518a91 -Revises: e3b5330ee71c -Create Date: 2020-10-10 14:43:18.699276 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '9daa16518a91' -down_revision = 'e3b5330ee71c' -branch_labels = None -depends_on = None - - -def upgrade(): -# op.create_table( -# 'tx_sync', -# sa.Column('tx', sa.String(66), nullable=False), -# ) -# op.execute("INSERT INTO tx_sync VALUES('0x0000000000000000000000000000000000000000000000000000000000000000')") - pass - - -def downgrade(): -# op.drop_table('tx_sync') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/a2e2aab8f331_add_date_accessed_to_txcache.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/a2e2aab8f331_add_date_accessed_to_txcache.py deleted file mode 100644 index f82f2615..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/a2e2aab8f331_add_date_accessed_to_txcache.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Add date accessed to txcache - -Revision ID: a2e2aab8f331 -Revises: 49b348246d70 -Create Date: 2020-12-24 18:58:06.137812 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'a2e2aab8f331' -down_revision = '49b348246d70' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column( - 'tx_cache', - sa.Column( - 'date_checked', - sa.DateTime, - nullable=False - ) - ) - pass - - -def downgrade(): - # drop does not work withs qlite - #op.drop_column('tx_cache', 'date_checked') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/cd2052be6db2_convert_tx_index.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/aee12aeb47ec_convert.py similarity index 68% rename from apps/cic-eth/cic_eth/db/migrations/default/versions/cd2052be6db2_convert_tx_index.py rename to apps/cic-eth/cic_eth/db/migrations/default/versions/aee12aeb47ec_convert.py index 5b59d734..21aced4e 100644 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/cd2052be6db2_convert_tx_index.py +++ b/apps/cic-eth/cic_eth/db/migrations/default/versions/aee12aeb47ec_convert.py @@ -1,8 +1,8 @@ -"""convert tx index +"""Convert -Revision ID: cd2052be6db2 -Revises: 7cb65b893934 -Create Date: 2020-09-24 21:20:51.580500 +Revision ID: aee12aeb47ec +Revises: 5ca4b77ce205 +Create Date: 2021-04-02 18:42:45.233356 """ from alembic import op @@ -10,8 +10,8 @@ import sqlalchemy as sa # revision identifiers, used by Alembic. -revision = 'cd2052be6db2' -down_revision = '7cb65b893934' +revision = 'aee12aeb47ec' +down_revision = '5ca4b77ce205' branch_labels = None depends_on = None @@ -20,10 +20,8 @@ def upgrade(): op.create_table( 'tx_convert_transfer', sa.Column('id', sa.Integer, primary_key=True), - #sa.Column('approve_tx_hash', sa.String(66), nullable=False, unique=True), sa.Column('convert_tx_hash', sa.String(66), nullable=False, unique=True), sa.Column('transfer_tx_hash', sa.String(66), unique=True), -# sa.Column('holder_address', sa.String(42), nullable=False), sa.Column('recipient_address', sa.String(42), nullable=False), ) op.create_index('idx_tx_convert_address', 'tx_convert_transfer', ['recipient_address']) diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/ec40ac0974c1_add_chain_syncer.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/b125cbf81e32_add_chain_syncer.py similarity index 72% rename from apps/cic-eth/cic_eth/db/migrations/postgresql/versions/ec40ac0974c1_add_chain_syncer.py rename to apps/cic-eth/cic_eth/db/migrations/default/versions/b125cbf81e32_add_chain_syncer.py index a278997f..8f3be98f 100644 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/ec40ac0974c1_add_chain_syncer.py +++ b/apps/cic-eth/cic_eth/db/migrations/default/versions/b125cbf81e32_add_chain_syncer.py @@ -1,12 +1,13 @@ """Add chain syncer -Revision ID: ec40ac0974c1 -Revises: 6ac7a1dadc46 -Create Date: 2021-02-23 06:10:19.246304 +Revision ID: b125cbf81e32 +Revises: 0ec0d6d1e785 +Create Date: 2021-04-02 18:36:44.459603 """ from alembic import op import sqlalchemy as sa + from chainsyncer.db.migrations.sqlalchemy import ( chainsyncer_upgrade, chainsyncer_downgrade, @@ -14,15 +15,15 @@ from chainsyncer.db.migrations.sqlalchemy import ( # revision identifiers, used by Alembic. -revision = 'ec40ac0974c1' -down_revision = '6ac7a1dadc46' +revision = 'b125cbf81e32' +down_revision = '0ec0d6d1e785' branch_labels = None depends_on = None - def upgrade(): chainsyncer_upgrade(0, 0, 1) def downgrade(): chainsyncer_downgrade(0, 0, 1) + diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/df19f4e69676_add_tx_track.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/df19f4e69676_add_tx_track.py deleted file mode 100644 index 2c1ea138..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/df19f4e69676_add_tx_track.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Add tx tracker record - -Revision ID: df19f4e69676 -Revises: 71708e943dbd -Create Date: 2020-10-09 23:31:44.563498 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = 'df19f4e69676' -down_revision = '71708e943dbd' -branch_labels = None -depends_on = None - - -def upgrade(): -# op.create_table( -# 'tx', -# sa.Column('id', sa.Integer, primary_key=True), -# sa.Column('date_added', sa.DateTime, nullable=False), -# sa.Column('tx_hash', sa.String(66), nullable=False, unique=True), -# sa.Column('success', sa.Boolean(), nullable=False), -# ) - pass - - -def downgrade(): -# op.drop_table('tx') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/default/versions/e3b5330ee71c_.py b/apps/cic-eth/cic_eth/db/migrations/default/versions/e3b5330ee71c_.py deleted file mode 100644 index 6c9115b7..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/default/versions/e3b5330ee71c_.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Add cached values for tx - -Revision ID: e3b5330ee71c -Revises: df19f4e69676 -Create Date: 2020-10-10 00:17:07.094893 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'e3b5330ee71c' -down_revision = 'df19f4e69676' -branch_labels = None -depends_on = None - -def upgrade(): - op.create_table( - 'tx_cache', - sa.Column('id', sa.Integer, primary_key=True), -# sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=True), - sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=True), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('date_updated', sa.DateTime, nullable=False), - sa.Column('source_token_address', sa.String(42), nullable=False), - sa.Column('destination_token_address', sa.String(42), nullable=False), - sa.Column('sender', sa.String(42), nullable=False), - sa.Column('recipient', sa.String(42), nullable=False), - sa.Column('from_value', sa.NUMERIC(), nullable=False), - sa.Column('to_value', sa.NUMERIC(), nullable=True), - sa.Column('block_number', sa.BIGINT(), nullable=True), - sa.Column('tx_index', sa.Integer, nullable=True), - ) - -def downgrade(): - op.drop_table('tx_cache') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/alembic.ini b/apps/cic-eth/cic_eth/db/migrations/postgresql/alembic.ini deleted file mode 100644 index 1555f68c..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/alembic.ini +++ /dev/null @@ -1,85 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = . - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# timezone to use when rendering the date -# within the migration file as well as the filename. -# string value is passed to dateutil.tz.gettz() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the -# "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; this defaults -# to migrations/versions. When using multiple version -# directories, initial revisions must be specified with --version-path -# version_locations = %(here)s/bar %(here)s/bat migrations/versions - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -#sqlalchemy.url = driver://user:pass@localhost/dbname -sqlalchemy.url = postgresql+psycopg2://postgres@localhost:5432/cic-eth - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks=black -# black.type=console_scripts -# black.entrypoint=black -# black.options=-l 79 - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/env.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/env.py deleted file mode 100644 index 70518a2e..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/env.py +++ /dev/null @@ -1,77 +0,0 @@ -from logging.config import fileConfig - -from sqlalchemy import engine_from_config -from sqlalchemy import pool - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = None - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - connectable = engine_from_config( - config.get_section(config.config_ini_section), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/script.py.mako b/apps/cic-eth/cic_eth/db/migrations/postgresql/script.py.mako deleted file mode 100644 index 2c015630..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/2a07b543335e_add_new_syncer_table.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/2a07b543335e_add_new_syncer_table.py deleted file mode 100644 index 30ddb96f..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/2a07b543335e_add_new_syncer_table.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Add new syncer table - -Revision ID: 2a07b543335e -Revises: a2e2aab8f331 -Create Date: 2020-12-27 09:35:44.017981 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '2a07b543335e' -down_revision = 'a2e2aab8f331' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'blockchain_sync', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('blockchain', sa.String, nullable=False), - sa.Column('block_start', sa.Integer, nullable=False, default=0), - sa.Column('tx_start', sa.Integer, nullable=False, default=0), - sa.Column('block_cursor', sa.Integer, nullable=False, default=0), - sa.Column('tx_cursor', sa.Integer, nullable=False, default=0), - sa.Column('block_target', sa.Integer, nullable=True), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('date_updated', sa.DateTime), - ) - - -def downgrade(): - op.drop_table('blockchain_sync') diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/49b348246d70_add_nonce_index.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/49b348246d70_add_nonce_index.py deleted file mode 100644 index 3abee1b6..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/49b348246d70_add_nonce_index.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Add nonce index - -Revision ID: 49b348246d70 -Revises: 52c7c59cd0b1 -Create Date: 2020-12-19 09:45:36.186446 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '49b348246d70' -down_revision = '52c7c59cd0b1' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'nonce', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('address_hex', sa.String(42), nullable=False, unique=True), - sa.Column('nonce', sa.Integer, nullable=False), - ) - - -def downgrade(): - op.drop_table('nonce') diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/52c7c59cd0b1_add_account_roles.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/52c7c59cd0b1_add_account_roles.py deleted file mode 100644 index 7aa74059..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/52c7c59cd0b1_add_account_roles.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Add account roles - -Revision ID: 52c7c59cd0b1 -Revises: 9c4bd7491015 -Create Date: 2020-12-19 07:21:38.249237 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '52c7c59cd0b1' -down_revision = '9c4bd7491015' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'account_role', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('tag', sa.Text, nullable=False, unique=True), - sa.Column('address_hex', sa.String(42), nullable=False), - ) - pass - - -def downgrade(): - op.drop_table('account_role') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/6ac7a1dadc46_add_otx_state_log.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/6ac7a1dadc46_add_otx_state_log.py deleted file mode 100644 index f15834d9..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/6ac7a1dadc46_add_otx_state_log.py +++ /dev/null @@ -1,30 +0,0 @@ -"""Add otx state log - -Revision ID: 6ac7a1dadc46 -Revises: 89e1e9baa53c -Create Date: 2021-01-30 13:59:49.022373 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '6ac7a1dadc46' -down_revision = '89e1e9baa53c' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'otx_state_log', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False), - sa.Column('date', sa.DateTime, nullable=False), - sa.Column('status', sa.Integer, nullable=False), - ) - - -def downgrade(): - op.drop_table('otx_state_log') diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/71708e943dbd_add_attempts_and_version_log_for_otx.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/71708e943dbd_add_attempts_and_version_log_for_otx.py deleted file mode 100644 index 3be687eb..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/71708e943dbd_add_attempts_and_version_log_for_otx.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Add attempts and version log for otx - -Revision ID: 71708e943dbd -Revises: 7e8d7626e38f -Create Date: 2020-09-26 14:41:19.298651 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '71708e943dbd' -down_revision = '7e8d7626e38f' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'otx_attempts', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=False), - sa.Column('date', sa.DateTime, nullable=False), - ) - pass - - -def downgrade(): - op.drop_table('otx_attempts') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/7cb65b893934_add_blocknumber_pointer.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/7cb65b893934_add_blocknumber_pointer.py deleted file mode 100644 index 74a97f83..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/7cb65b893934_add_blocknumber_pointer.py +++ /dev/null @@ -1,31 +0,0 @@ -"""add blocknumber pointer - -Revision ID: 7cb65b893934 -Revises: 8593fa1ca0f4 -Create Date: 2020-09-24 19:29:13.543648 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '7cb65b893934' -down_revision = '8593fa1ca0f4' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'watcher_state', - sa.Column('block_number', sa.Integer) - ) - conn = op.get_bind() - conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);') - pass - - -def downgrade(): - op.drop_table('watcher_state') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/7e8d7626e38f_add_block_sync.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/7e8d7626e38f_add_block_sync.py deleted file mode 100644 index 6924fa26..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/7e8d7626e38f_add_block_sync.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Add block sync - -Revision ID: 7e8d7626e38f -Revises: cd2052be6db2 -Create Date: 2020-09-26 11:12:27.818524 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '7e8d7626e38f' -down_revision = 'cd2052be6db2' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'block_sync', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('blockchain', sa.String, nullable=False, unique=True), - sa.Column('height_backlog', sa.Integer, nullable=False, default=0), - sa.Column('height_session', sa.Integer, nullable=False, default=0), - sa.Column('height_head', sa.Integer, nullable=False, default=0), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('date_updated', sa.DateTime), - ) - op.drop_table('watcher_state') - pass - - -def downgrade(): - op.drop_table('block_sync') - op.create_table( - 'watcher_state', - sa.Column('block_number', sa.Integer) - ) - conn = op.get_bind() - conn.execute('INSERT INTO watcher_state (block_number) VALUES (0);') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/8593fa1ca0f4_add_transaction_queue.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/8593fa1ca0f4_add_transaction_queue.py deleted file mode 100644 index e9f9cf6a..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/8593fa1ca0f4_add_transaction_queue.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Add transaction queue - -Revision ID: 8593fa1ca0f4 -Revises: -Create Date: 2020-09-22 21:56:42.117047 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '8593fa1ca0f4' -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'otx', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('nonce', sa.Integer, nullable=False), - sa.Column('tx_hash', sa.String(66), nullable=False), - sa.Column('signed_tx', sa.Text, nullable=False), - sa.Column('status', sa.Integer, nullable=False, default=-9), - sa.Column('block', sa.Integer), - ) - op.create_index('idx_otx_tx', 'otx', ['tx_hash'], unique=True) - - -def downgrade(): - op.drop_index('idx_otx_tx') - op.drop_table('otx') diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/9c4bd7491015_rename_block_sync_table.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/9c4bd7491015_rename_block_sync_table.py deleted file mode 100644 index f58721b2..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/9c4bd7491015_rename_block_sync_table.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Rename block sync table - -Revision ID: 9c4bd7491015 -Revises: 9daa16518a91 -Create Date: 2020-10-15 23:45:56.306898 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '9c4bd7491015' -down_revision = '9daa16518a91' -branch_labels = None -depends_on = None - - -def upgrade(): - op.rename_table('block_sync', 'otx_sync') - pass - - -def downgrade(): - op.rename_table('otx_sync', 'block_sync') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/9daa16518a91_add_tx_sync_state.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/9daa16518a91_add_tx_sync_state.py deleted file mode 100644 index 2c6cc0fe..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/9daa16518a91_add_tx_sync_state.py +++ /dev/null @@ -1,30 +0,0 @@ -"""add tx sync state - -Revision ID: 9daa16518a91 -Revises: e3b5330ee71c -Create Date: 2020-10-10 14:43:18.699276 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = '9daa16518a91' -down_revision = 'e3b5330ee71c' -branch_labels = None -depends_on = None - - -def upgrade(): -# op.create_table( -# 'tx_sync', -# sa.Column('tx', sa.String(66), nullable=False), -# ) -# op.execute("INSERT INTO tx_sync VALUES('0x0000000000000000000000000000000000000000000000000000000000000000')") - pass - - -def downgrade(): -# op.drop_table('tx_sync') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/a2e2aab8f331_add_date_accessed_to_txcache.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/a2e2aab8f331_add_date_accessed_to_txcache.py deleted file mode 100644 index 808a503c..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/a2e2aab8f331_add_date_accessed_to_txcache.py +++ /dev/null @@ -1,33 +0,0 @@ -"""Add date accessed to txcache - -Revision ID: a2e2aab8f331 -Revises: 49b348246d70 -Create Date: 2020-12-24 18:58:06.137812 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'a2e2aab8f331' -down_revision = '49b348246d70' -branch_labels = None -depends_on = None - - -def upgrade(): - op.add_column( - 'tx_cache', - sa.Column( - 'date_checked', - sa.DateTime, - nullable=False - ) - ) - pass - - -def downgrade(): - op.drop_column('tx_cache', 'date_checked') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/cd2052be6db2_convert_tx_index.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/cd2052be6db2_convert_tx_index.py deleted file mode 100644 index 5b59d734..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/cd2052be6db2_convert_tx_index.py +++ /dev/null @@ -1,34 +0,0 @@ -"""convert tx index - -Revision ID: cd2052be6db2 -Revises: 7cb65b893934 -Create Date: 2020-09-24 21:20:51.580500 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'cd2052be6db2' -down_revision = '7cb65b893934' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'tx_convert_transfer', - sa.Column('id', sa.Integer, primary_key=True), - #sa.Column('approve_tx_hash', sa.String(66), nullable=False, unique=True), - sa.Column('convert_tx_hash', sa.String(66), nullable=False, unique=True), - sa.Column('transfer_tx_hash', sa.String(66), unique=True), -# sa.Column('holder_address', sa.String(42), nullable=False), - sa.Column('recipient_address', sa.String(42), nullable=False), - ) - op.create_index('idx_tx_convert_address', 'tx_convert_transfer', ['recipient_address']) - - -def downgrade(): - op.drop_index('idx_tx_convert_address') - op.drop_table('tx_convert_transfer') diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/df19f4e69676_add_tx_track.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/df19f4e69676_add_tx_track.py deleted file mode 100644 index 2c1ea138..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/df19f4e69676_add_tx_track.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Add tx tracker record - -Revision ID: df19f4e69676 -Revises: 71708e943dbd -Create Date: 2020-10-09 23:31:44.563498 - -""" -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision = 'df19f4e69676' -down_revision = '71708e943dbd' -branch_labels = None -depends_on = None - - -def upgrade(): -# op.create_table( -# 'tx', -# sa.Column('id', sa.Integer, primary_key=True), -# sa.Column('date_added', sa.DateTime, nullable=False), -# sa.Column('tx_hash', sa.String(66), nullable=False, unique=True), -# sa.Column('success', sa.Boolean(), nullable=False), -# ) - pass - - -def downgrade(): -# op.drop_table('tx') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/e3b5330ee71c_.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/e3b5330ee71c_.py deleted file mode 100644 index 3abafb73..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/e3b5330ee71c_.py +++ /dev/null @@ -1,37 +0,0 @@ -"""Add cached values for tx - -Revision ID: e3b5330ee71c -Revises: df19f4e69676 -Create Date: 2020-10-10 00:17:07.094893 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'e3b5330ee71c' -down_revision = 'df19f4e69676' -branch_labels = None -depends_on = None - -def upgrade(): - op.create_table( - 'tx_cache', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=True), - sa.Column('date_created', sa.DateTime, nullable=False), - sa.Column('date_updated', sa.DateTime, nullable=False), - sa.Column('source_token_address', sa.String(42), nullable=False), - sa.Column('destination_token_address', sa.String(42), nullable=False), - sa.Column('sender', sa.String(42), nullable=False), - sa.Column('recipient', sa.String(42), nullable=False), - sa.Column('from_value', sa.NUMERIC(), nullable=False), - sa.Column('to_value', sa.NUMERIC(), nullable=True), - sa.Column('block_number', sa.BIGINT(), nullable=True), - sa.Column('tx_index', sa.Integer, nullable=True), - ) - -def downgrade(): - op.drop_table('tx_cache') - pass diff --git a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/f738d9962fdf_debug_output.py b/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/f738d9962fdf_debug_output.py deleted file mode 100644 index 378f2965..00000000 --- a/apps/cic-eth/cic_eth/db/migrations/postgresql/versions/f738d9962fdf_debug_output.py +++ /dev/null @@ -1,32 +0,0 @@ -"""debug output - -Revision ID: f738d9962fdf -Revises: ec40ac0974c1 -Create Date: 2021-03-04 08:32:43.281214 - -""" -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision = 'f738d9962fdf' -down_revision = 'ec40ac0974c1' -branch_labels = None -depends_on = None - - -def upgrade(): - op.create_table( - 'debug', - sa.Column('id', sa.Integer, primary_key=True), - sa.Column('tag', sa.String, nullable=False), - sa.Column('description', sa.String, nullable=False), - sa.Column('date_created', sa.DateTime, nullable=False), - ) - pass - - -def downgrade(): - op.drop_table('debug') - pass diff --git a/apps/cic-eth/cic_eth/db/models/base.py b/apps/cic-eth/cic_eth/db/models/base.py index fc3541c5..9cdcabf7 100644 --- a/apps/cic-eth/cic_eth/db/models/base.py +++ b/apps/cic-eth/cic_eth/db/models/base.py @@ -10,6 +10,7 @@ from sqlalchemy.pool import ( StaticPool, QueuePool, AssertionPool, + NullPool, ) logg = logging.getLogger() @@ -64,6 +65,7 @@ class SessionBase(Model): if SessionBase.poolable: poolclass = QueuePool if pool_size > 1: + logg.info('db using queue pool') e = create_engine( dsn, max_overflow=pool_size*3, @@ -74,17 +76,22 @@ class SessionBase(Model): echo=debug, ) else: - if debug: + if pool_size == 0: + logg.info('db using nullpool') + poolclass = NullPool + elif debug: + logg.info('db using assertion pool') poolclass = AssertionPool else: + logg.info('db using static pool') poolclass = StaticPool - e = create_engine( dsn, poolclass=poolclass, echo=debug, ) else: + logg.info('db not poolable') e = create_engine( dsn, echo=debug, diff --git a/apps/cic-eth/cic_eth/db/models/lock.py b/apps/cic-eth/cic_eth/db/models/lock.py index 3737b893..d8dcb426 100644 --- a/apps/cic-eth/cic_eth/db/models/lock.py +++ b/apps/cic-eth/cic_eth/db/models/lock.py @@ -5,11 +5,11 @@ import logging # third-party imports from sqlalchemy import Column, String, Integer, DateTime, ForeignKey from chainlib.eth.constant import ZERO_ADDRESS +from chainqueue.db.models.tx import TxCache +from chainqueue.db.models.otx import Otx # local imports from cic_eth.db.models.base import SessionBase -from cic_eth.db.models.tx import TxCache -from cic_eth.db.models.otx import Otx logg = logging.getLogger() @@ -22,10 +22,12 @@ class Lock(SessionBase): __tablename__ = "lock" blockchain = Column(String) - address = Column(String, ForeignKey('tx_cache.sender')) + #address = Column(String, ForeignKey('tx_cache.sender')) + address = Column(String, ForeignKey(TxCache.sender)) flags = Column(Integer) date_created = Column(DateTime, default=datetime.datetime.utcnow) - otx_id = Column(Integer, ForeignKey('otx.id')) + otx_id = Column(Integer, ForeignKey(Otx.id)) + #otx_id = Column(Integer) def chain(self): diff --git a/apps/cic-eth/cic_eth/db/models/otx.py b/apps/cic-eth/cic_eth/db/models/otx.py deleted file mode 100644 index 6be5f53d..00000000 --- a/apps/cic-eth/cic_eth/db/models/otx.py +++ /dev/null @@ -1,680 +0,0 @@ -# standard imports -import datetime -import logging - -# external imports -from sqlalchemy import Column, Enum, String, Integer, DateTime, Text, or_, ForeignKey -from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method - -# local imports -from .base import SessionBase -from cic_eth.db.enum import ( - StatusEnum, - StatusBits, - status_str, - is_error_status, - ) -from cic_eth.db.error import TxStateChangeError - -logg = logging.getLogger() - - -class OtxStateLog(SessionBase): - - __tablename__ = 'otx_state_log' - - date = Column(DateTime, default=datetime.datetime.utcnow) - status = Column(Integer) - otx_id = Column(Integer, ForeignKey('otx.id')) - - - def __init__(self, otx): - self.otx_id = otx.id - self.status = otx.status - - -class Otx(SessionBase): - """Outgoing transactions with local origin. - - :param nonce: Transaction nonce - :type nonce: number - :param address: Ethereum address of recipient - NOT IN USE, REMOVE - :type address: str - :param tx_hash: Tranasction hash - :type tx_hash: str, 0x-hex - :param signed_tx: Signed raw transaction data - :type signed_tx: str, 0x-hex - """ - __tablename__ = 'otx' - - tracing = False - """Whether to enable queue state tracing""" - - nonce = Column(Integer) - date_created = Column(DateTime, default=datetime.datetime.utcnow) - tx_hash = Column(String(66)) - signed_tx = Column(Text) - status = Column(Integer) - block = Column(Integer) - - - def __set_status(self, status, session): - self.status |= status - session.add(self) - session.flush() - - - def __reset_status(self, status, session): - status_edit = ~status & self.status - self.status &= status_edit - session.add(self) - session.flush() - - - def __status_already_set(self, status): - r = bool(self.status & status) - if r: - logg.warning('status bit {} already set on {}'.format(status.name, self.tx_hash)) - return r - - - def __status_not_set(self, status): - r = not(self.status & status) - if r: - logg.warning('status bit {} not set on {}'.format(status.name, self.tx_hash)) - return r - - - def set_block(self, block, session=None): - """Set block number transaction was mined in. - - Only manipulates object, does not transaction or commit to backend. - - :param block: Block number - :type block: number - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - session = SessionBase.bind_session(session) - - if self.block != None: - SessionBase.release_session(session) - raise TxStateChangeError('Attempted set block {} when block was already {}'.format(block, self.block)) - self.block = block - session.add(self) - session.flush() - - SessionBase.release_session(session) - - - def waitforgas(self, session=None): - """Marks transaction as suspended pending gas funding. - - Only manipulates object, does not transaction or commit to backend. - - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - if self.__status_already_set(StatusBits.GAS_ISSUES): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if self.status & StatusBits.IN_NETWORK: - SessionBase.release_session(session) - raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.GAS_ISSUES, session) - self.__reset_status(StatusBits.QUEUED | StatusBits.DEFERRED, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def fubar(self, session=None): - """Marks transaction as "fubar." Any transaction marked this way is an anomaly and may be a symptom of a serious problem. - - Only manipulates object, does not transaction or commit to backend. - """ - if self.__status_already_set(StatusBits.UNKNOWN_ERROR): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('FUBAR cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if is_error_status(self.status): - SessionBase.release_session(session) - raise TxStateChangeError('FUBAR cannot be set on an entry with an error state already set ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.UNKNOWN_ERROR | StatusBits.FINAL, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def reject(self, session=None): - """Marks transaction as "rejected," which means the node rejected sending the transaction to the network. The nonce has not been spent, and the transaction should be replaced. - - Only manipulates object, does not transaction or commit to backend. - """ - if self.__status_already_set(StatusBits.NODE_ERROR): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('REJECTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if self.status & StatusBits.IN_NETWORK: - SessionBase.release_session(session) - raise TxStateChangeError('REJECTED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status))) - if is_error_status(self.status): - SessionBase.release_session(session) - raise TxStateChangeError('REJECTED cannot be set on an entry with an error state already set ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.NODE_ERROR | StatusBits.FINAL, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def override(self, manual=False, session=None): - """Marks transaction as manually overridden. - - Only manipulates object, does not transaction or commit to backend. - """ - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if self.status & StatusBits.IN_NETWORK: - SessionBase.release_session(session) - raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status))) - if self.status & StatusBits.OBSOLETE: - SessionBase.release_session(session) - raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already OBSOLETE ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.OBSOLETE, session) - #if manual: - # self.__set_status(StatusBits.MANUAL, session) - self.__reset_status(StatusBits.QUEUED | StatusBits.IN_NETWORK, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def manual(self, session=None): - - session = SessionBase.bind_session(session) - - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.MANUAL, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - def retry(self, session=None): - """Marks transaction as ready to retry after a timeout following a sendfail or a completed gas funding. - - Only manipulates object, does not transaction or commit to backend. - - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - if self.__status_already_set(StatusBits.QUEUED): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('RETRY cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if not is_error_status(self.status) and not StatusBits.IN_NETWORK & self.status > 0: - SessionBase.release_session(session) - raise TxStateChangeError('RETRY cannot be set on an entry that has no error ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.QUEUED, session) - self.__reset_status(StatusBits.GAS_ISSUES, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def readysend(self, session=None): - """Marks transaction as ready for initial send attempt. - - Only manipulates object, does not transaction or commit to backend. - - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - if self.__status_already_set(StatusBits.QUEUED): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('READYSEND cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if is_error_status(self.status): - SessionBase.release_session(session) - raise TxStateChangeError('READYSEND cannot be set on an errored state ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.QUEUED, session) - self.__reset_status(StatusBits.GAS_ISSUES, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def sent(self, session=None): - """Marks transaction as having been sent to network. - - Only manipulates object, does not transaction or commit to backend. - - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - if self.__status_already_set(StatusBits.IN_NETWORK): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('SENT cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.IN_NETWORK, session) - self.__reset_status(StatusBits.DEFERRED | StatusBits.QUEUED | StatusBits.LOCAL_ERROR | StatusBits.NODE_ERROR, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def sendfail(self, session=None): - """Marks that an attempt to send the transaction to the network has failed. - - Only manipulates object, does not transaction or commit to backend. - - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - if self.__status_already_set(StatusBits.NODE_ERROR): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('SENDFAIL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if self.status & StatusBits.IN_NETWORK: - SessionBase.release_session(session) - raise TxStateChangeError('SENDFAIL cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status))) - - self.__set_status(StatusBits.LOCAL_ERROR | StatusBits.DEFERRED, session) - self.__reset_status(StatusBits.QUEUED | StatusBits.GAS_ISSUES, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def dequeue(self, session=None): - """Marks that a process to execute send attempt is underway - - Only manipulates object, does not transaction or commit to backend. - - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - if self.__status_not_set(StatusBits.QUEUED): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('QUEUED cannot be unset on an entry with FINAL state set ({})'.format(status_str(self.status))) - if self.status & StatusBits.IN_NETWORK: - SessionBase.release_session(session) - raise TxStateChangeError('QUEUED cannot be unset on an entry with IN_NETWORK state set ({})'.format(status_str(self.status))) - - self.__reset_status(StatusBits.QUEUED, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - - def minefail(self, block, session=None): - """Marks that transaction was mined but code execution did not succeed. - - Only manipulates object, does not transaction or commit to backend. - - :param block: Block number transaction was mined in. - :type block: number - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - if self.__status_already_set(StatusBits.NETWORK_ERROR): - return - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('REVERTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if not self.status & StatusBits.IN_NETWORK: - SessionBase.release_session(session) - raise TxStateChangeError('REVERTED cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status))) - - if block != None: - self.block = block - - self.__set_status(StatusBits.NETWORK_ERROR | StatusBits.FINAL, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def cancel(self, confirmed=False, session=None): - """Marks that the transaction has been succeeded by a new transaction with same nonce. - - If set to confirmed, the previous state must be OBSOLETED, and will transition to CANCELLED - a finalized state. Otherwise, the state must follow a non-finalized state, and will be set to OBSOLETED. - - Only manipulates object, does not transaction or commit to backend. - - :param confirmed: Whether transition is to a final state. - :type confirmed: bool - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('CANCEL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - - if confirmed: - if self.status > 0 and not self.status & StatusBits.OBSOLETE: - SessionBase.release_session(session) - raise TxStateChangeError('CANCEL can only be set on an entry marked OBSOLETE ({})'.format(status_str(self.status))) - self.__set_status(StatusEnum.CANCELLED, session) - else: - self.__set_status(StatusEnum.OBSOLETED, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - def success(self, block, session=None): - """Marks that transaction was successfully mined. - - Only manipulates object, does not transaction or commit to backend. - - :param block: Block number transaction was mined in. - :type block: number - :raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist. - """ - - session = SessionBase.bind_session(session) - - if self.status & StatusBits.FINAL: - SessionBase.release_session(session) - raise TxStateChangeError('SUCCESS cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status))) - if not self.status & StatusBits.IN_NETWORK: - SessionBase.release_session(session) - raise TxStateChangeError('SUCCESS cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status))) - if is_error_status(self.status): - SessionBase.release_session(session) - raise TxStateChangeError('SUCCESS cannot be set on an entry with error state set ({})'.format(status_str(self.status))) - - if block != None: - self.block = block - self.__set_status(StatusEnum.SUCCESS, session) - - if self.tracing: - self.__state_log(session=session) - - SessionBase.release_session(session) - - - @staticmethod - def get(status=0, limit=4096, status_exact=True, session=None): - """Returns outgoing transaction lists by status. - - Status may either be matched exactly, or be an upper bound of the integer value of the status enum. - - :param status: Status value to use in query - :type status: cic_eth.db.enum.StatusEnum - :param limit: Max results to return - :type limit: number - :param status_exact: Whether or not to perform exact status match - :type bool: - :returns: List of transaction hashes - :rtype: tuple, where first element is transaction hash - """ - e = None - - session = SessionBase.bind_session(session) - - if status_exact: - e = session.query(Otx.tx_hash).filter(Otx.status==status).order_by(Otx.date_created.asc()).limit(limit).all() - else: - e = session.query(Otx.tx_hash).filter(Otx.status<=status).order_by(Otx.date_created.asc()).limit(limit).all() - - SessionBase.release_session(session) - return e - - - @staticmethod - def load(tx_hash, session=None): - """Retrieves the outgoing transaction record by transaction hash. - - :param tx_hash: Transaction hash - :type tx_hash: str, 0x-hex - """ - session = SessionBase.bind_session(session) - - q = session.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - - SessionBase.release_session(session) - - return q.first() - - - @staticmethod - def account(account_address): - """Retrieves all transaction hashes for which the given Ethereum address is sender or recipient. - - :param account_address: Ethereum address to use in query. - :type account_address: str, 0x-hex - :returns: Outgoing transactions - :rtype: tuple, where first element is transaction hash - """ - session = Otx.create_session() - q = session.query(Otx.tx_hash) - q = q.join(TxCache) - q = q.filter(or_(TxCache.sender==account_address, TxCache.recipient==account_address)) - txs = q.all() - session.close() - return list(txs) - - - def __state_log(self, session): - l = OtxStateLog(self) - session.add(l) - - - # TODO: it is not safe to return otx here unless session has been passed in - @staticmethod - def add(nonce, address, tx_hash, signed_tx, session=None): - external_session = session != None - - session = SessionBase.bind_session(session) - - otx = Otx(nonce, address, tx_hash, signed_tx) - session.add(otx) - session.flush() - if otx.tracing: - otx.__state_log(session=session) - session.flush() - - SessionBase.release_session(session) - - if not external_session: - return None - - return otx - - - def __init__(self, nonce, address, tx_hash, signed_tx): - self.nonce = nonce - self.tx_hash = tx_hash - self.signed_tx = signed_tx - self.status = StatusEnum.PENDING - signed_tx_bytes = bytes.fromhex(signed_tx[2:]) - - # sender_address = address_hex_from_signed_tx(signed_tx_bytes) - # logg.debug('decoded tx {}'.format(sender_address)) - - - -# TODO: Most of the methods on this object are obsolete, but it contains a static function for retrieving "expired" outgoing transactions that should be moved to Otx instead. -class OtxSync(SessionBase): - """Obsolete - """ - __tablename__ = 'otx_sync' - - blockchain = Column(String) - block_height_backlog = Column(Integer) - tx_height_backlog = Column(Integer) - block_height_session = Column(Integer) - tx_height_session = Column(Integer) - block_height_head = Column(Integer) - tx_height_head = Column(Integer) - date_created = Column(DateTime, default=datetime.datetime.utcnow) - date_updated = Column(DateTime) - - - def backlog(self, block_height=None, tx_height=None): - #session = OtxSync.create_session() - if block_height != None: - if tx_height == None: - raise ValueError('tx height missing') - self.block_height_backlog = block_height - self.tx_height_backlog = tx_height - #session.add(self) - self.date_updated = datetime.datetime.utcnow() - #session.commit() - block_height = self.block_height_backlog - tx_height = self.tx_height_backlog - #session.close() - return (block_height, tx_height) - - - def session(self, block_height=None, tx_height=None): - #session = OtxSync.create_session() - if block_height != None: - if tx_height == None: - raise ValueError('tx height missing') - self.block_height_session = block_height - self.tx_height_session = tx_height - #session.add(self) - self.date_updated = datetime.datetime.utcnow() - #session.commit() - block_height = self.block_height_session - tx_height = self.tx_height_session - #session.close() - return (block_height, tx_height) - - - def head(self, block_height=None, tx_height=None): - #session = OtxSync.create_session() - if block_height != None: - if tx_height == None: - raise ValueError('tx height missing') - self.block_height_head = block_height - self.tx_height_head = tx_height - #session.add(self) - self.date_updated = datetime.datetime.utcnow() - #session.commit() - block_height = self.block_height_head - tx_height = self.tx_height_head - #session.close() - return (block_height, tx_height) - - - @hybrid_property - def synced(self): - #return self.block_height_session == self.block_height_backlog and self.tx_height_session == self.block_height_backlog - return self.block_height_session == self.block_height_backlog and self.tx_height_session == self.tx_height_backlog - - - @staticmethod - def load(blockchain_string, session): - q = session.query(OtxSync) - q = q.filter(OtxSync.blockchain==blockchain_string) - return q.first() - - - @staticmethod - def latest(nonce): - session = SessionBase.create_session() - otx = session.query(Otx).filter(Otx.nonce==nonce).order_by(Otx.created.desc()).first() - session.close() - return otx - - - @staticmethod - def get_expired(datetime_threshold): - session = SessionBase.create_session() - q = session.query(Otx) - q = q.filter(Otx.date_created gas_balance: + s_nonce = celery.signature( + 'cic_eth.eth.nonce.reserve_nonce', + [ + address, + chain_spec_dict, + gas_provider, + ], + queue=queue, + ) + s_refill_gas = celery.signature( + 'cic_eth.eth.gas.refill_gas', + [ + chain_spec_dict, + ], + queue=queue, + ) + s_nonce.link(s_refill_gas) + s_nonce.apply_async() + wait_tasks = [] + for tx_hash in tx_hashes: + s = celery.signature( + 'cic_eth.queue.state.set_waitforgas', + [ + chain_spec_dict, + tx_hash, + ], + queue=queue, + ) + wait_tasks.append(s) + celery.group(wait_tasks)() + raise OutOfGasError('need to fill gas, required {}, had {}'.format(gas_required, gas_balance)) + + safe_gas = self.safe_gas_threshold_amount + if gas_balance < safe_gas: + s_nonce = celery.signature( + 'cic_eth.eth.nonce.reserve_nonce', + [ + address, + chain_spec_dict, + gas_provider, + ], + queue=queue, + ) + s_refill_gas = celery.signature( + 'cic_eth.eth.gas.refill_gas', + [ + chain_spec_dict, + ], + queue=queue, + ) + s_nonce.link(s_refill_gas) + s_nonce.apply_async() + logg.debug('requested refill from {} to {}'.format(gas_provider, address)) + ready_tasks = [] + for tx_hash in tx_hashes: + s = celery.signature( + 'cic_eth.queue.state.set_ready', + [ + chain_spec_dict, + tx_hash, + ], + queue=queue, + ) + ready_tasks.append(s) + celery.group(ready_tasks)() + + return txs + + +# TODO: if this method fails the nonce will be out of sequence. session needs to be extended to include the queue create, so that nonce is rolled back if the second sql query fails. Better yet, split each state change into separate tasks. +# TODO: method is too long, factor out code for clarity +@celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3AndSignerTask) +def refill_gas(self, recipient_address, chain_spec_dict): + """Executes a native token transaction to fund the recipient's gas expenditures. + + :param recipient_address: Recipient in need of gas + :type recipient_address: str, 0x-hex + :param chain_str: Chain spec, string representation + :type chain_str: str + :raises AlreadyFillingGasError: A gas refill transaction for this address is already executing + :returns: Transaction hash. + :rtype: str, 0x-hex + """ + # essentials + chain_spec = ChainSpec.from_dict(chain_spec_dict) + queue = self.request.delivery_info.get('routing_key') + + # Determine value of gas tokens to send + # if an uncompleted gas refill for the same recipient already exists, we still need to spend the nonce + # however, we will perform a 0-value transaction instead + zero_amount = False + session = SessionBase.create_session() + status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR + q = session.query(Otx.tx_hash) + q = q.join(TxCache) + q = q.filter(Otx.status.op('&')(StatusBits.FINAL.value)==0) + q = q.filter(TxCache.from_value!=0) + q = q.filter(TxCache.recipient==recipient_address) + c = q.count() + if c > 0: + logg.warning('already filling gas {}'.format(str(AlreadyFillingGasError(recipient_address)))) + zero_amount = True + session.flush() + + # finally determine the value to send + refill_amount = 0 + if not zero_amount: + refill_amount = self.safe_gas_refill_amount + + # determine sender + gas_provider = AccountRole.get_address('GAS_GIFTER', session=session) + session.flush() + + # set up evm RPC connection + rpc = RPCConnection.connect(chain_spec, 'default') + + # set up transaction builder + nonce_oracle = CustodialTaskNonceOracle(gas_provider, self.request.root_id, session=session) + gas_oracle = self.create_gas_oracle(rpc) + rpc_signer = RPCConnection.connect(chain_spec, 'signer') + c = Gas(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) + + # build and add transaction + logg.debug('tx send gas amount {} from provider {} to {}'.format(refill_amount, gas_provider, recipient_address)) + (tx_hash_hex, tx_signed_raw_hex) = c.create(gas_provider, recipient_address, refill_amount, tx_format=TxFormat.RLP_SIGNED) + logg.debug('adding queue refill gas tx {}'.format(tx_hash_hex)) + cache_task = 'cic_eth.eth.gas.cache_gas_data' + register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session) + + # add transaction to send queue + s_status = celery.signature( + 'cic_eth.queue.state.set_ready', + [ + chain_spec.asdict(), + tx_hash_hex, + ], + queue=queue, + ) + t = s_status.apply_async() + + return tx_signed_raw_hex + + +@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask) +def resend_with_higher_gas(self, txold_hash_hex, chain_spec_dict, gas=None, default_factor=1.1): + """Create a new transaction from an existing one with same nonce and higher gas price. + + :param txold_hash_hex: Transaction to re-create + :type txold_hash_hex: str, 0x-hex + :param chain_str: Chain spec, string representation + :type chain_str: str + :param gas: Explicitly use the specified gas amount + :type gas: number + :param default_factor: Default factor by which to increment the gas price by + :type default_factor: float + :raises NotLocalTxError: Transaction does not exist in the local queue + :returns: Transaction hash + :rtype: str, 0x-hex + """ + session = SessionBase.create_session() + + otx = Otx.load(txold_hash_hex, session) + if otx == None: + session.close() + raise NotLocalTxError(txold_hash_hex) + + chain_spec = ChainSpec.from_dict(chain_spec_dict) + + tx_signed_raw_bytes = bytes.fromhex(otx.signed_tx) + tx = unpack(tx_signed_raw_bytes, chain_spec) + logg.debug('resend otx {} {}'.format(tx, otx.signed_tx)) + + queue = self.request.delivery_info.get('routing_key') + + logg.debug('before {}'.format(tx)) + + rpc = RPCConnection.connect(chain_spec, 'default') + new_gas_price = gas + if new_gas_price == None: + o = price() + r = rpc.do(o) + current_gas_price = int(r, 16) + if tx['gasPrice'] > current_gas_price: + logg.info('Network gas price {} is lower than overdue tx gas price {}'.format(current_gas_price, tx['gasPrice'])) + #tx['gasPrice'] = int(tx['gasPrice'] * default_factor) + new_gas_price = tx['gasPrice'] + 1 + else: + new_gas_price = int(tx['gasPrice'] * default_factor) + #if gas_price > new_gas_price: + # tx['gasPrice'] = gas_price + #else: + # tx['gasPrice'] = new_gas_price + + + rpc_signer = RPCConnection.connect(chain_spec, 'signer') + gas_oracle = OverrideGasOracle(price=new_gas_price, conn=rpc) + + c = TxFactory(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle) + logg.debug('change gas price from old {} to new {} for tx {}'.format(tx['gasPrice'], new_gas_price, tx)) + tx['gasPrice'] = new_gas_price + (tx_hash_hex, tx_signed_raw_hex) = c.build_raw(tx) + queue_create( + chain_spec, + tx['nonce'], + tx['from'], + tx_hash_hex, + tx_signed_raw_hex, + session=session, + ) + TxCache.clone(txold_hash_hex, tx_hash_hex, session=session) + session.close() + + s = create_check_gas_task( + [tx_signed_raw_hex], + chain_spec, + tx['from'], + tx['gasPrice'] * tx['gas'], + [tx_hash_hex], + queue=queue, + ) + s.apply_async() + + return tx_hash_hex + + diff --git a/apps/cic-eth/cic_eth/eth/nonce.py b/apps/cic-eth/cic_eth/eth/nonce.py index fa15ad79..412e18ee 100644 --- a/apps/cic-eth/cic_eth/eth/nonce.py +++ b/apps/cic-eth/cic_eth/eth/nonce.py @@ -1,9 +1,23 @@ +# standard imports +import logging + +# external imports +import celery +from chainlib.eth.address import is_checksum_address + # local imports +from cic_eth.db.models.role import AccountRole +from cic_eth.db.models.base import SessionBase +from cic_eth.task import CriticalSQLAlchemyTask from cic_eth.db.models.nonce import ( Nonce, NonceReservation, ) +celery_app = celery.current_app +logg = logging.getLogger() + + class CustodialTaskNonceOracle(): """Ensures atomic nonce increments for all transactions across all tasks and threads. @@ -30,3 +44,36 @@ class CustodialTaskNonceOracle(): """ r = NonceReservation.release(self.address, self.uuid, session=self.session) return r[1] + + +@celery_app.task(bind=True, base=CriticalSQLAlchemyTask) +def reserve_nonce(self, chained_input, chain_spec_dict, signer_address=None): + + self.log_banner() + + session = SessionBase.create_session() + + address = None + if signer_address == None: + address = chained_input + logg.debug('non-explicit address for reserve nonce, using arg head {}'.format(chained_input)) + else: + if is_checksum_address(signer_address): + address = signer_address + logg.debug('explicit address for reserve nonce {}'.format(signer_address)) + else: + address = AccountRole.get_address(signer_address, session=session) + logg.debug('role for reserve nonce {} -> {}'.format(signer_address, address)) + + if not is_checksum_address(address): + raise ValueError('invalid result when resolving address for nonce {}'.format(address)) + + root_id = self.request.root_id + r = NonceReservation.next(address, root_id, session=session) + logg.debug('nonce {} reserved for address {} task {}'.format(r[1], address, r[0])) + + session.commit() + + session.close() + + return chained_input diff --git a/apps/cic-eth/cic_eth/eth/tx.py b/apps/cic-eth/cic_eth/eth/tx.py index aef7d537..4883e872 100644 --- a/apps/cic-eth/cic_eth/eth/tx.py +++ b/apps/cic-eth/cic_eth/eth/tx.py @@ -1,68 +1,34 @@ # standard imports import logging -# third-party imports +# external imports import celery -import requests -from chainlib.eth.constant import ZERO_ADDRESS from chainlib.chain import ChainSpec from chainlib.eth.address import is_checksum_address -from chainlib.eth.gas import balance -from chainlib.eth.error import ( - EthException, - NotFoundEthException, - ) +from chainlib.eth.error import NotFoundEthException from chainlib.eth.tx import ( transaction, receipt, raw, - TxFormat, - unpack, ) from chainlib.connection import RPCConnection from chainlib.hash import keccak256_hex_to_hex -from chainlib.eth.gas import Gas -from chainlib.eth.contract import ( - abi_decode_single, - ABIContractType, - ) from hexathon import ( add_0x, strip_0x, ) +from chainqueue.db.models.tx import Otx +from chainqueue.db.models.tx import TxCache +from chainqueue.db.enum import StatusBits +from chainqueue.error import NotLocalTxError # local imports -from cic_eth.db import ( - Otx, - SessionBase, - ) -from cic_eth.db.models.tx import TxCache -from cic_eth.db.models.nonce import NonceReservation -from cic_eth.db.models.lock import Lock -from cic_eth.db.models.role import AccountRole -from cic_eth.db.enum import ( - LockEnum, - StatusBits, - ) -from cic_eth.error import PermanentTxError -from cic_eth.error import TemporaryTxError -from cic_eth.error import NotLocalTxError -#from cic_eth.queue.tx import create as queue_create -from cic_eth.queue.tx import ( - get_tx, - register_tx, - get_nonce_tx, - ) -from cic_eth.error import OutOfGasError -from cic_eth.error import LockedError -from cic_eth.eth.gas import ( - create_check_gas_task, - ) -from cic_eth.eth.nonce import CustodialTaskNonceOracle +from cic_eth.db import SessionBase from cic_eth.error import ( - AlreadyFillingGasError, - EthError, + PermanentTxError, + TemporaryTxError, ) +from cic_eth.eth.gas import create_check_gas_task from cic_eth.admin.ctrl import lock_send from cic_eth.task import ( CriticalSQLAlchemyTask, @@ -78,127 +44,6 @@ logg = logging.getLogger() MAX_NONCE_ATTEMPTS = 3 -# TODO this function is too long -@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task) -def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=None): - """Check the gas level of the sender address of a transaction. - - If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser. - - If account balance is sufficient, but level of gas before spend is below "safe" threshold, gas refill is requested, and execution continues normally. - - :param tx_hashes: Transaction hashes due to be submitted - :type tx_hashes: list of str, 0x-hex - :param chain_spec_dict: Chain spec dict representation - :type chain_spec_dict: dict - :param txs: Signed raw transaction data, corresponding to tx_hashes - :type txs: list of str, 0x-hex - :param address: Sender address - :type address: str, 0x-hex - :param gas_required: Gas limit * gas price for transaction, (optional, if not set will be retrived from transaction data) - :type gas_required: int - :return: Signed raw transaction data list - :rtype: param txs, unchanged - """ - if len(txs) == 0: - for i in range(len(tx_hashes)): - o = get_tx(tx_hashes[i]) - txs.append(o['signed_tx']) - if address == None: - address = o['address'] - - #if not web3.Web3.isChecksumAddress(address): - if not is_checksum_address(address): - raise ValueError('invalid address {}'.format(address)) - - chain_spec = ChainSpec.from_dict(chain_spec_dict) - - queue = self.request.delivery_info.get('routing_key') - - conn = RPCConnection.connect(chain_spec) - - # TODO: it should not be necessary to pass address explicitly, if not passed should be derived from the tx - gas_balance = 0 - try: - o = balance(address) - r = conn.do(o) - conn.disconnect() - gas_balance = abi_decode_single(ABIContractType.UINT256, r) - except EthException as e: - conn.disconnect() - raise EthError('gas_balance call for {}: {}'.format(address, e)) - - logg.debug('address {} has gas {} needs {}'.format(address, gas_balance, gas_required)) - session = SessionBase.create_session() - gas_provider = AccountRole.get_address('GAS_GIFTER', session=session) - session.close() - - if gas_required > gas_balance: - s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', - [ - address, - gas_provider, - ], - queue=queue, - ) - s_refill_gas = celery.signature( - 'cic_eth.eth.tx.refill_gas', - [ - chain_spec_dict, - ], - queue=queue, - ) - s_nonce.link(s_refill_gas) - s_nonce.apply_async() - wait_tasks = [] - for tx_hash in tx_hashes: - s = celery.signature( - 'cic_eth.queue.tx.set_waitforgas', - [ - tx_hash, - ], - queue=queue, - ) - wait_tasks.append(s) - celery.group(wait_tasks)() - raise OutOfGasError('need to fill gas, required {}, had {}'.format(gas_required, gas_balance)) - - safe_gas = self.safe_gas_threshold_amount - if gas_balance < safe_gas: - s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', - [ - address, - gas_provider, - ], - queue=queue, - ) - s_refill_gas = celery.signature( - 'cic_eth.eth.tx.refill_gas', - [ - chain_spec_dict, - ], - queue=queue, - ) - s_nonce.link(s_refill_gas) - s_nonce.apply_async() - logg.debug('requested refill from {} to {}'.format(gas_provider, address)) - ready_tasks = [] - for tx_hash in tx_hashes: - s = celery.signature( - 'cic_eth.queue.tx.set_ready', - [ - tx_hash, - ], - queue=queue, - ) - ready_tasks.append(s) - celery.group(ready_tasks)() - - return txs - - # TODO: chain chainable transactions that use hashes as inputs may be chained to this function to output signed txs instead. @celery_app.task(bind=True, base=CriticalSQLAlchemyTask) def hashes_to_txs(self, tx_hashes): @@ -260,7 +105,7 @@ def send(self, txs, chain_spec_dict): chain_spec = ChainSpec.from_dict(chain_spec_dict) - tx_hex = txs[0] + tx_hex = add_0x(txs[0]) tx_hash_hex = add_0x(keccak256_hex_to_hex(tx_hex)) @@ -270,8 +115,9 @@ def send(self, txs, chain_spec_dict): r = None s_set_sent = celery.signature( - 'cic_eth.queue.tx.set_sent_status', + 'cic_eth.queue.state.set_sent', [ + chain_spec_dict, tx_hash_hex, False ], @@ -288,7 +134,10 @@ def send(self, txs, chain_spec_dict): if len(tx_tail) > 0: s = celery.signature( 'cic_eth.eth.tx.send', - [tx_tail], + [ + tx_tail, + chain_spec_dict, + ], queue=queue, ) s.apply_async() @@ -296,187 +145,6 @@ def send(self, txs, chain_spec_dict): return tx_hash_hex -# TODO: if this method fails the nonce will be out of sequence. session needs to be extended to include the queue create, so that nonce is rolled back if the second sql query fails. Better yet, split each state change into separate tasks. -# TODO: method is too long, factor out code for clarity -@celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3AndSignerTask) -def refill_gas(self, recipient_address, chain_spec_dict): - """Executes a native token transaction to fund the recipient's gas expenditures. - - :param recipient_address: Recipient in need of gas - :type recipient_address: str, 0x-hex - :param chain_str: Chain spec, string representation - :type chain_str: str - :raises AlreadyFillingGasError: A gas refill transaction for this address is already executing - :returns: Transaction hash. - :rtype: str, 0x-hex - """ - # essentials - chain_spec = ChainSpec.from_dict(chain_spec_dict) - queue = self.request.delivery_info.get('routing_key') - - # Determine value of gas tokens to send - # if an uncompleted gas refill for the same recipient already exists, we still need to spend the nonce - # however, we will perform a 0-value transaction instead - zero_amount = False - session = SessionBase.create_session() - status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR - q = session.query(Otx.tx_hash) - q = q.join(TxCache) - q = q.filter(Otx.status.op('&')(StatusBits.FINAL.value)==0) - q = q.filter(TxCache.from_value!=0) - q = q.filter(TxCache.recipient==recipient_address) - c = q.count() - if c > 0: - logg.warning('already filling gas {}'.format(str(AlreadyFillingGasError(recipient_address)))) - zero_amount = True - session.flush() - - # finally determine the value to send - refill_amount = 0 - if not zero_amount: - refill_amount = self.safe_gas_refill_amount - - # determine sender - gas_provider = AccountRole.get_address('GAS_GIFTER', session=session) - session.flush() - - # set up evm RPC connection - rpc = RPCConnection.connect(chain_spec, 'default') - - # set up transaction builder - nonce_oracle = CustodialTaskNonceOracle(gas_provider, self.request.root_id, session=session) - gas_oracle = self.create_gas_oracle(rpc) - rpc_signer = RPCConnection.connect(chain_spec, 'signer') - c = Gas(signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=chain_spec.chain_id()) - - # build and add transaction - logg.debug('tx send gas amount {} from provider {} to {}'.format(refill_amount, gas_provider, recipient_address)) - (tx_hash_hex, tx_signed_raw_hex) = c.create(gas_provider, recipient_address, refill_amount, tx_format=TxFormat.RLP_SIGNED) - logg.debug('adding queue refill gas tx {}'.format(tx_hash_hex)) - cache_task = 'cic_eth.eth.tx.cache_gas_data' - register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session) - - # add transaction to send queue - s_status = celery.signature( - 'cic_eth.queue.tx.set_ready', - [ - tx_hash_hex, - ], - queue=queue, - ) - t = s_status.apply_async() - - return tx_signed_raw_hex - - -@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask) -def resend_with_higher_gas(self, txold_hash_hex, chain_str, gas=None, default_factor=1.1): - """Create a new transaction from an existing one with same nonce and higher gas price. - - :param txold_hash_hex: Transaction to re-create - :type txold_hash_hex: str, 0x-hex - :param chain_str: Chain spec, string representation - :type chain_str: str - :param gas: Explicitly use the specified gas amount - :type gas: number - :param default_factor: Default factor by which to increment the gas price by - :type default_factor: float - :raises NotLocalTxError: Transaction does not exist in the local queue - :returns: Transaction hash - :rtype: str, 0x-hex - """ - session = SessionBase.create_session() - - q = session.query(Otx) - q = q.filter(Otx.tx_hash==txold_hash_hex) - otx = q.first() - if otx == None: - session.close() - raise NotLocalTxError(txold_hash_hex) - - chain_spec = ChainSpec.from_chain_str(chain_str) - c = RpcClient(chain_spec) - - tx_signed_raw_bytes = bytes.fromhex(otx.signed_tx[2:]) - tx = unpack(tx_signed_raw_bytes, chain_spec.chain_id()) - logg.debug('resend otx {} {}'.format(tx, otx.signed_tx)) - - queue = self.request.delivery_info['routing_key'] - - logg.debug('before {}'.format(tx)) - if gas != None: - tx['gasPrice'] = gas - else: - gas_price = c.gas_price() - if tx['gasPrice'] > gas_price: - logg.info('Network gas price {} is lower than overdue tx gas price {}'.format(gas_price, tx['gasPrice'])) - #tx['gasPrice'] = int(tx['gasPrice'] * default_factor) - tx['gasPrice'] += 1 - else: - new_gas_price = int(tx['gasPrice'] * default_factor) - if gas_price > new_gas_price: - tx['gasPrice'] = gas_price - else: - tx['gasPrice'] = new_gas_price - - (tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, chain_str) - queue_create( - tx['nonce'], - tx['from'], - tx_hash_hex, - tx_signed_raw_hex, - chain_str, - session=session, - ) - TxCache.clone(txold_hash_hex, tx_hash_hex, session=session) - session.close() - - s = create_check_gas_and_send_task( - [tx_signed_raw_hex], - chain_str, - tx['from'], - tx['gasPrice'] * tx['gas'], - [tx_hash_hex], - queue=queue, - ) - s.apply_async() - - return tx_hash_hex - - -@celery_app.task(bind=True, base=CriticalSQLAlchemyTask) -def reserve_nonce(self, chained_input, signer_address=None): - - self.log_banner() - - session = SessionBase.create_session() - - address = None - if signer_address == None: - address = chained_input - logg.debug('non-explicit address for reserve nonce, using arg head {}'.format(chained_input)) - else: - #if web3.Web3.isChecksumAddress(signer_address): - if is_checksum_address(signer_address): - address = signer_address - logg.debug('explicit address for reserve nonce {}'.format(signer_address)) - else: - address = AccountRole.get_address(signer_address, session=session) - logg.debug('role for reserve nonce {} -> {}'.format(signer_address, address)) - - if not is_checksum_address(address): - raise ValueError('invalid result when resolving address for nonce {}'.format(address)) - - root_id = self.request.root_id - r = NonceReservation.next(address, root_id) - logg.debug('nonce {} reserved for address {} task {}'.format(r[1], address, r[0])) - - session.commit() - - session.close() - - return chained_input - @celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3Task) def sync_tx(self, tx_hash_hex, chain_spec_dict): @@ -503,15 +171,17 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict): except NotFoundEthException as e: pass + # TODO: apply receipt in tx object to validate and normalize input if rcpt != None: success = rcpt['status'] == 1 logg.debug('sync tx {} mined block {} success {}'.format(tx_hash_hex, rcpt['blockNumber'], success)) s = celery.signature( - 'cic_eth.queue.tx.set_final_status', + 'cic_eth.queue.state.set_final', [ tx_hash_hex, rcpt['blockNumber'], + rcpt['transactionIndex'], not success, ], queue=queue, @@ -520,7 +190,7 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict): logg.debug('sync tx {} mempool'.format(tx_hash_hex)) s = celery.signature( - 'cic_eth.queue.tx.set_sent_status', + 'cic_eth.queue.state.set_sent', [ tx_hash_hex, ], @@ -556,7 +226,7 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict): # # tx_signed_raw_hex = r[0] # tx_signed_bytes = bytes.fromhex(tx_signed_raw_hex[2:]) -# tx = unpack_signed_raw_tx(tx_signed_bytes, chain_spec.chain_id()) +# tx = unpack(tx_signed_bytes, chain_spec) # # queue = self.request.delivery_info['routing_key'] # @@ -572,39 +242,3 @@ def sync_tx(self, tx_hash_hex, chain_spec_dict): # return txpending_hash_hex -# TODO: Move to cic_eth.eth.gas -@celery_app.task(base=CriticalSQLAlchemyTask) -def cache_gas_data( - tx_hash_hex, - tx_signed_raw_hex, - chain_spec_dict, - ): - """Helper function for otx_cache_parse_tx - - :param tx_hash_hex: Transaction hash - :type tx_hash_hex: str, 0x-hex - :param tx: Signed raw transaction - :type tx: str, 0x-hex - :returns: Transaction hash and id of cache element in storage backend, respectively - :rtype: tuple - """ - chain_spec = ChainSpec.from_dict(chain_spec_dict) - tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex)) - tx = unpack(tx_signed_raw_bytes, chain_spec.chain_id()) - - tx_cache = TxCache( - tx_hash_hex, - tx['from'], - tx['to'], - ZERO_ADDRESS, - ZERO_ADDRESS, - tx['value'], - tx['value'], - ) - - session = SessionBase.create_session() - session.add(tx_cache) - session.commit() - cache_id = tx_cache.id - session.close() - return (tx_hash_hex, cache_id) diff --git a/apps/cic-eth/cic_eth/ext/address.py b/apps/cic-eth/cic_eth/ext/address.py index 449a5f3c..3db76925 100644 --- a/apps/cic-eth/cic_eth/ext/address.py +++ b/apps/cic-eth/cic_eth/ext/address.py @@ -23,7 +23,7 @@ def translate_address(address, trusted_addresses, chain_spec, sender_address=ZER registry = CICRegistry(chain_spec, rpc) declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address) - c = AddressDeclarator() + c = AddressDeclarator(chain_spec) for trusted_address in trusted_addresses: o = c.declaration(declarator_address, trusted_address, address, sender_address=sender_address) diff --git a/apps/cic-eth/cic_eth/ext/tx.py b/apps/cic-eth/cic_eth/ext/tx.py index 1dba80eb..d635fd54 100644 --- a/apps/cic-eth/cic_eth/ext/tx.py +++ b/apps/cic-eth/cic_eth/ext/tx.py @@ -18,13 +18,14 @@ from chainlib.eth.erc20 import ERC20 from hexathon import strip_0x from cic_eth_registry import CICRegistry from cic_eth_registry.erc20 import ERC20Token +from chainqueue.db.models.otx import Otx +from chainqueue.db.enum import StatusEnum +from chainqueue.query import get_tx_cache # local imports -from cic_eth.db.models.otx import Otx -from cic_eth.db.enum import StatusEnum -from cic_eth.queue.tx import get_tx_cache from cic_eth.queue.time import tx_times from cic_eth.task import BaseTask +from cic_eth.db.models.base import SessionBase celery_app = celery.current_app logg = logging.getLogger() @@ -168,14 +169,16 @@ def tx_collate(tx_batches, chain_spec_dict, offset, limit, newest_first=True): if isinstance(tx_batches, dict): tx_batches = [tx_batches] + session = SessionBase.create_session() + for b in tx_batches: for v in b.values(): tx = None k = None try: hx = strip_0x(v) - tx = unpack(bytes.fromhex(hx), chain_spec.chain_id()) - txc = get_tx_cache(tx['hash']) + tx = unpack(bytes.fromhex(hx), chain_spec) + txc = get_tx_cache(chain_spec, tx['hash'], session) txc['timestamp'] = int(txc['date_created'].timestamp()) txc['hash'] = txc['tx_hash'] tx = txc @@ -185,6 +188,8 @@ def tx_collate(tx_batches, chain_spec_dict, offset, limit, newest_first=True): k = '{}.{}.{}'.format(tx['timestamp'], tx['sender'], tx['nonce']) txs_by_block[k] = tx + session.close() + txs = [] ks = list(txs_by_block.keys()) ks.sort() @@ -192,4 +197,5 @@ def tx_collate(tx_batches, chain_spec_dict, offset, limit, newest_first=True): ks.reverse() for k in ks: txs.append(txs_by_block[k]) + return txs diff --git a/apps/cic-eth/cic_eth/queue/__init__.py b/apps/cic-eth/cic_eth/queue/__init__.py index 8acee1b9..d53e2500 100644 --- a/apps/cic-eth/cic_eth/queue/__init__.py +++ b/apps/cic-eth/cic_eth/queue/__init__.py @@ -1,4 +1,2 @@ # third-party imports import celery - -from .tx import get_tx diff --git a/apps/cic-eth/cic_eth/queue/balance.py b/apps/cic-eth/cic_eth/queue/balance.py index 1a5cf2db..a927a17a 100644 --- a/apps/cic-eth/cic_eth/queue/balance.py +++ b/apps/cic-eth/cic_eth/queue/balance.py @@ -5,15 +5,15 @@ import logging import celery from chainlib.chain import ChainSpec from hexathon import strip_0x - -# local imports -from cic_eth.db import SessionBase -from cic_eth.db.models.otx import Otx -from cic_eth.db.models.tx import TxCache -from cic_eth.db.enum import ( +from chainqueue.db.models.otx import Otx +from chainqueue.db.models.tx import TxCache +from chainqueue.db.enum import ( StatusBits, dead, ) + +# local imports +from cic_eth.db import SessionBase from cic_eth.task import CriticalSQLAlchemyTask celery_app = celery.current_app diff --git a/apps/cic-eth/cic_eth/queue/lock.py b/apps/cic-eth/cic_eth/queue/lock.py new file mode 100644 index 00000000..baa5accb --- /dev/null +++ b/apps/cic-eth/cic_eth/queue/lock.py @@ -0,0 +1,48 @@ +# external imports +from chainqueue.db.models.otx import Otx +import celery + +# local imports +from cic_eth.task import CriticalSQLAlchemyTask +from cic_eth.db import SessionBase +from cic_eth.db.models.lock import Lock + +celery_app = celery.current_app + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def get_lock(address=None): + """Retrieve all active locks + + If address is set, the query will look up the lock for the specified address only. A list of zero or one elements is returned, depending on whether a lock is set or not. + + :param address: Get lock for only the specified address + :type address: str, 0x-hex + :returns: List of locks + :rtype: list of dicts + """ + session = SessionBase.create_session() + q = session.query( + Lock.date_created, + Lock.address, + Lock.flags, + Otx.tx_hash, + ) + q = q.join(Otx, isouter=True) + if address != None: + q = q.filter(Lock.address==address) + else: + q = q.order_by(Lock.date_created.asc()) + + locks = [] + for lock in q.all(): + o = { + 'date': lock[0], + 'address': lock[1], + 'tx_hash': lock[3], + 'flags': lock[2], + } + locks.append(o) + session.close() + + return locks diff --git a/apps/cic-eth/cic_eth/queue/query.py b/apps/cic-eth/cic_eth/queue/query.py new file mode 100644 index 00000000..1572289b --- /dev/null +++ b/apps/cic-eth/cic_eth/queue/query.py @@ -0,0 +1,157 @@ +# standard imports +import datetime + +# external imports +import celery +from chainlib.chain import ChainSpec +from chainlib.eth.tx import unpack +import chainqueue.query +from chainqueue.db.enum import ( + StatusEnum, + is_alive, + ) +from sqlalchemy import func +from sqlalchemy import or_ +from chainqueue.db.models.tx import TxCache +from chainqueue.db.models.otx import Otx + +# local imports +from cic_eth.db.enum import LockEnum +from cic_eth.task import CriticalSQLAlchemyTask +from cic_eth.db.models.lock import Lock +from cic_eth.db.models.base import SessionBase + +celery_app = celery.current_app + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def get_tx_cache(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.query.get_tx_cache(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def get_tx(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.query.get_tx(chain_spec, tx_hash) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack) + session.close() + return r + + +def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None): + return chainqueue.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack) + + +def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None): + return chainqueue.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack) + + +def get_nonce_tx(chain_spec, nonce, sender): + return get_nonce_tx_cache(chain_spec, nonce, sender, decoder=unpack) + + +def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None): + """Returns the next pending transaction, specifically the transaction with the lowest nonce, for every recipient that has pending transactions. + + Will omit addresses that have the LockEnum.SEND bit in Lock set. + + (TODO) Will not return any rows if LockEnum.SEND bit in Lock is set for zero address. + + :param status: Defines the status used to filter as upcoming. + :type status: cic_eth.db.enum.StatusEnum + :param recipient: Ethereum address of recipient to return transaction for + :type recipient: str, 0x-hex + :param before: Only return transactions if their modification date is older than the given timestamp + :type before: datetime.datetime + :param chain_id: Chain id to use to parse signed transaction data + :type chain_id: number + :raises ValueError: Status is finalized, sent or never attempted sent + :returns: Transactions + :rtype: dict, with transaction hash as key, signed raw transaction as value + """ + session = SessionBase.bind_session(session) + q_outer = session.query( + TxCache.sender, + func.min(Otx.nonce).label('nonce'), + ) + q_outer = q_outer.join(TxCache) + q_outer = q_outer.join(Lock, isouter=True) + q_outer = q_outer.filter(or_(Lock.flags==None, Lock.flags.op('&')(LockEnum.SEND.value)==0)) + + if not is_alive(status): + SessionBase.release_session(session) + raise ValueError('not a valid non-final tx value: {}'.format(status)) + if status == StatusEnum.PENDING: + q_outer = q_outer.filter(Otx.status==status.value) + else: + q_outer = q_outer.filter(Otx.status.op('&')(status)==status) + + if not_status != None: + q_outer = q_outer.filter(Otx.status.op('&')(not_status)==0) + + if recipient != None: + q_outer = q_outer.filter(TxCache.recipient==recipient) + + q_outer = q_outer.group_by(TxCache.sender) + + txs = {} + + i = 0 + for r in q_outer.all(): + q = session.query(Otx) + q = q.join(TxCache) + q = q.filter(TxCache.sender==r.sender) + q = q.filter(Otx.nonce==r.nonce) + + if before != None: + q = q.filter(TxCache.date_checked 0 and limit == i: + break + + SessionBase.release_session(session) + + return txs + diff --git a/apps/cic-eth/cic_eth/queue/state.py b/apps/cic-eth/cic_eth/queue/state.py new file mode 100644 index 00000000..89d5ce40 --- /dev/null +++ b/apps/cic-eth/cic_eth/queue/state.py @@ -0,0 +1,109 @@ +# external imports +from chainlib.chain import ChainSpec +import chainqueue.state + +# local imports +import celery +from cic_eth.task import CriticalSQLAlchemyTask +from cic_eth.db.models.base import SessionBase + +celery_app = celery.current_app + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_sent(chain_spec_dict, tx_hash, fail=False): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_sent(chain_spec, tx_hash, fail, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_cancel(chain_spec_dict, tx_hash, manual=False): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_cancel(chain_spec, tx_hash, manual, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_rejected(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_rejected(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_fubar(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_fubar(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_manual(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_manual(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_ready(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_ready(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_reserved(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_reserved(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def set_waitforgas(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.set_waitforgas(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def get_state_log(chain_spec_dict, tx_hash): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.get_state_log(chain_spec, tx_hash, session=session) + session.close() + return r + + +@celery_app.task(base=CriticalSQLAlchemyTask) +def obsolete(chain_spec_dict, tx_hash, final): + chain_spec = ChainSpec.from_dict(chain_spec_dict) + session = SessionBase.create_session() + r = chainqueue.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session) + session.close() + return r diff --git a/apps/cic-eth/cic_eth/queue/time.py b/apps/cic-eth/cic_eth/queue/time.py index f85e55c0..b3c74ece 100644 --- a/apps/cic-eth/cic_eth/queue/time.py +++ b/apps/cic-eth/cic_eth/queue/time.py @@ -7,10 +7,10 @@ from chainlib.chain import ChainSpec from chainlib.connection import RPCConnection from chainlib.eth.block import block_by_hash from chainlib.eth.tx import receipt +from chainqueue.db.models.otx import Otx +from chainqueue.error import NotLocalTxError # local imports -from cic_eth.db.models.otx import Otx -from cic_eth.error import NotLocalTxError from cic_eth.task import CriticalSQLAlchemyAndWeb3Task celery_app = celery.current_app @@ -18,7 +18,10 @@ celery_app = celery.current_app logg = logging.getLogger() -def tx_times(tx_hash, chain_spec): +def tx_times(tx_hash, chain_spec, session=None): + + session = SessionBase.bind_session(session) + rpc = RPCConnection.connect(chain_spec, 'default') time_pair = { 'network': None, @@ -35,8 +38,10 @@ def tx_times(tx_hash, chain_spec): logg.debug('error with getting timestamp details for {}: {}'.format(tx_hash, e)) pass - otx = Otx.load(tx_hash) + otx = Otx.load(tx_hash, session=session) if otx != None: time_pair['queue'] = int(otx['date_created'].timestamp()) + SessionBase.release_session(session) + return time_pair diff --git a/apps/cic-eth/cic_eth/queue/tx.py b/apps/cic-eth/cic_eth/queue/tx.py index 6b228cc0..841f45b5 100644 --- a/apps/cic-eth/cic_eth/queue/tx.py +++ b/apps/cic-eth/cic_eth/queue/tx.py @@ -5,92 +5,50 @@ import datetime # external imports import celery +from chainqueue.db.models.otx import Otx +from chainqueue.db.models.otx import OtxStateLog +from chainqueue.db.models.tx import TxCache from hexathon import strip_0x from sqlalchemy import or_ from sqlalchemy import not_ from sqlalchemy import tuple_ from sqlalchemy import func +from chainlib.chain import ChainSpec from chainlib.eth.tx import unpack - -# local imports -from cic_eth.db.models.otx import Otx -from cic_eth.db.models.otx import OtxStateLog -from cic_eth.db.models.tx import TxCache -from cic_eth.db.models.lock import Lock -from cic_eth.db import SessionBase -from cic_eth.db.enum import ( +import chainqueue.state +from chainqueue.db.enum import ( StatusEnum, - LockEnum, StatusBits, is_alive, dead, ) +from chainqueue.tx import create +from chainqueue.error import NotLocalTxError +from chainqueue.db.enum import status_str + +# local imports +from cic_eth.db.models.lock import Lock +from cic_eth.db import SessionBase +from cic_eth.db.enum import LockEnum from cic_eth.task import CriticalSQLAlchemyTask -from cic_eth.error import NotLocalTxError from cic_eth.error import LockedError -from cic_eth.db.enum import status_str celery_app = celery.current_app -#logg = celery_app.log.get_default_logger() logg = logging.getLogger() -def create(nonce, holder_address, tx_hash, signed_tx, chain_spec, obsolete_predecessors=True, session=None): - """Create a new transaction queue record. - - :param nonce: Transaction nonce - :type nonce: int - :param holder_address: Sender address - :type holder_address: str, 0x-hex - :param tx_hash: Transaction hash - :type tx_hash: str, 0x-hex - :param signed_tx: Signed raw transaction - :type signed_tx: str, 0x-hex - :param chain_spec: Chain spec to create transaction for - :type chain_spec: ChainSpec - :returns: transaction hash - :rtype: str, 0x-hash - """ +def queue_create(chain_spec, nonce, holder_address, tx_hash, signed_tx, session=None): session = SessionBase.bind_session(session) + lock = Lock.check_aggregate(str(chain_spec), LockEnum.QUEUE, holder_address, session=session) if lock > 0: SessionBase.release_session(session) raise LockedError(lock) - o = Otx.add( - nonce=nonce, - address=holder_address, - tx_hash=tx_hash, - signed_tx=signed_tx, - session=session, - ) - session.flush() - - if obsolete_predecessors: - q = session.query(Otx) - q = q.join(TxCache) - q = q.filter(Otx.nonce==nonce) - q = q.filter(TxCache.sender==holder_address) - q = q.filter(Otx.tx_hash!=tx_hash) - q = q.filter(Otx.status.op('&')(StatusBits.FINAL)==0) - - for otx in q.all(): - logg.info('otx {} obsoleted by {}'.format(otx.tx_hash, tx_hash)) - try: - otx.cancel(confirmed=False, session=session) - except TxStateChangeError as e: - logg.exception('obsolete fail: {}'.format(e)) - session.close() - raise(e) - except Exception as e: - logg.exception('obsolete UNEXPECTED fail: {}'.format(e)) - session.close() - raise(e) - - - session.commit() + tx_hash = create(chain_spec, nonce, holder_address, tx_hash, signed_tx, chain_spec, session=session) + SessionBase.release_session(session) - logg.debug('queue created nonce {} from {} hash {}'.format(nonce, holder_address, tx_hash)) + return tx_hash @@ -111,14 +69,14 @@ def register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=No """ logg.debug('adding queue tx {}:{} -> {}'.format(chain_spec, tx_hash_hex, tx_signed_raw_hex)) tx_signed_raw = bytes.fromhex(strip_0x(tx_signed_raw_hex)) - tx = unpack(tx_signed_raw, chain_id=chain_spec.chain_id()) + tx = unpack(tx_signed_raw, chain_spec) - create( + tx_hash = queue_create( + chain_spec, tx['nonce'], tx['from'], tx_hash_hex, tx_signed_raw_hex, - chain_spec, session=session, ) @@ -134,674 +92,3 @@ def register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=No queue=queue, ) s_cache.apply_async() - - return (tx_hash_hex, tx_signed_raw_hex,) - - -# TODO: Replace set_* with single task for set status -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_sent_status(tx_hash, fail=False): - """Used to set the status after a send attempt - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :param fail: if True, will set a SENDFAIL status, otherwise a SENT status. (Default: False) - :type fail: boolean - :raises NotLocalTxError: If transaction not found in queue. - :returns: True if tx is known, False otherwise - :rtype: boolean - """ - session = SessionBase.create_session() - q = session.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - o = q.first() - if o == None: - logg.warning('not local tx, skipping {}'.format(tx_hash)) - session.close() - return False - - try: - if fail: - o.sendfail(session=session) - else: - o.sent(session=session) - except TxStateChangeError as e: - logg.exception('set sent fail: {}'.format(e)) - session.close() - raise(e) - except Exception as e: - logg.exception('set sent UNEXPECED fail: {}'.format(e)) - session.close() - raise(e) - - - session.commit() - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_final_status(tx_hash, block=None, fail=False): - """Used to set the status of an incoming transaction result. - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :param block: Block number if final status represents a confirmation on the network - :type block: number - :param fail: if True, will set a SUCCESS status, otherwise a REVERTED status. (Default: False) - :type fail: boolean - :raises NotLocalTxError: If transaction not found in queue. - """ - session = SessionBase.create_session() - q = session.query( - Otx.nonce.label('nonce'), - TxCache.sender.label('sender'), - Otx.id.label('otxid'), - ) - q = q.join(TxCache) - q = q.filter(Otx.tx_hash==tx_hash) - o = q.first() - - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - session.flush() - - nonce = o.nonce - sender = o.sender - otxid = o.otxid - - q = session.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - o = q.first() - - try: - if fail: - o.minefail(block, session=session) - else: - o.success(block, session=session) - session.commit() - except TxStateChangeError as e: - logg.exception('set final fail: {}'.format(e)) - session.close() - raise(e) - except Exception as e: - logg.exception('set final UNEXPECED fail: {}'.format(e)) - session.close() - raise(e) - - q = session.query(Otx) - q = q.join(TxCache) - q = q.filter(Otx.nonce==nonce) - q = q.filter(TxCache.sender==sender) - q = q.filter(Otx.tx_hash!=tx_hash) - - for otwo in q.all(): - try: - otwo.cancel(True, session=session) - except TxStateChangeError as e: - logg.exception('cancel non-final fail: {}'.format(e)) - session.close() - raise(e) - except Exception as e: - logg.exception('cancel non-final UNEXPECTED fail: {}'.format(e)) - session.close() - raise(e) - session.commit() - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_cancel(tx_hash, manual=False): - """Used to set the status when a transaction is cancelled. - - Will set the state to CANCELLED or OVERRIDDEN - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :param manual: If set, status will be OVERRIDDEN. Otherwise CANCELLED. - :type manual: boolean - :raises NotLocalTxError: If transaction not found in queue. - """ - - session = SessionBase.create_session() - o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first() - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - session.flush() - - try: - if manual: - o.override(session=session) - else: - o.cancel(session=session) - session.commit() - except TxStateChangeError as e: - logg.exception('set cancel fail: {}'.format(e)) - except Exception as e: - logg.exception('set cancel UNEXPECTED fail: {}'.format(e)) - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_rejected(tx_hash): - """Used to set the status when the node rejects sending a transaction to network - - Will set the state to REJECTED - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :raises NotLocalTxError: If transaction not found in queue. - """ - - session = SessionBase.create_session() - o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first() - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - session.flush() - - o.reject(session=session) - session.commit() - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_fubar(tx_hash): - """Used to set the status when an unexpected error occurs. - - Will set the state to FUBAR - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :raises NotLocalTxError: If transaction not found in queue. - """ - - session = SessionBase.create_session() - o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first() - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - session.flush() - - o.fubar(session=session) - session.commit() - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_manual(tx_hash): - """Used to set the status when queue is manually changed - - Will set the state to MANUAL - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :raises NotLocalTxError: If transaction not found in queue. - """ - - session = SessionBase.create_session() - o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first() - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - session.flush() - - o.manual(session=session) - session.commit() - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_ready(tx_hash): - """Used to mark a transaction as ready to be sent to network - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :raises NotLocalTxError: If transaction not found in queue. - """ - session = SessionBase.create_session() - o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first() - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - session.flush() - - if o.status & StatusBits.GAS_ISSUES or o.status == StatusEnum.PENDING: - o.readysend(session=session) - else: - o.retry(session=session) - - session.commit() - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_dequeue(tx_hash): - session = SessionBase.create_session() - q = session.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - o = q.first() - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - session.flush() - - o.dequeue(session=session) - session.commit() - session.close() - - return tx_hash - - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def set_waitforgas(tx_hash): - """Used to set the status when a transaction must be deferred due to gas refill - - Will set the state to WAITFORGAS - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :raises NotLocalTxError: If transaction not found in queue. - """ - - session = SessionBase.create_session() - o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first() - if o == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - session.flush() - - o.waitforgas(session=session) - session.commit() - session.close() - - return tx_hash - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def get_state_log(tx_hash): - - logs = [] - - session = SessionBase.create_session() - - q = session.query(OtxStateLog) - q = q.join(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - q = q.order_by(OtxStateLog.date.asc()) - for l in q.all(): - logs.append((l.date, l.status,)) - - session.close() - - return logs - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def get_tx_cache(tx_hash): - """Returns an aggregate dictionary of outgoing transaction data and metadata - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :raises NotLocalTxError: If transaction not found in queue. - :returns: Transaction data - :rtype: dict - """ - session = SessionBase.create_session() - q = session.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - otx = q.first() - - if otx == None: - session.close() - raise NotLocalTxError(tx_hash) - - session.flush() - - q = session.query(TxCache) - q = q.filter(TxCache.otx_id==otx.id) - txc = q.first() - - session.close() - - tx = { - 'tx_hash': otx.tx_hash, - 'signed_tx': otx.signed_tx, - 'nonce': otx.nonce, - 'status': status_str(otx.status), - 'status_code': otx.status, - 'source_token': txc.source_token_address, - 'destination_token': txc.destination_token_address, - 'block_number': txc.block_number, - 'tx_index': txc.tx_index, - 'sender': txc.sender, - 'recipient': txc.recipient, - 'from_value': int(txc.from_value), - 'to_value': int(txc.to_value), - 'date_created': txc.date_created, - 'date_updated': txc.date_updated, - 'date_checked': txc.date_checked, - } - - return tx - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def get_lock(address=None): - """Retrieve all active locks - - If address is set, the query will look up the lock for the specified address only. A list of zero or one elements is returned, depending on whether a lock is set or not. - - :param address: Get lock for only the specified address - :type address: str, 0x-hex - :returns: List of locks - :rtype: list of dicts - """ - session = SessionBase.create_session() - q = session.query( - Lock.date_created, - Lock.address, - Lock.flags, - Otx.tx_hash, - ) - q = q.join(Otx, isouter=True) - if address != None: - q = q.filter(Lock.address==address) - else: - q = q.order_by(Lock.date_created.asc()) - - locks = [] - for lock in q.all(): - o = { - 'date': lock[0], - 'address': lock[1], - 'tx_hash': lock[3], - 'flags': lock[2], - } - locks.append(o) - session.close() - - return locks - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def get_tx(tx_hash): - """Retrieve a transaction queue record by transaction hash - - :param tx_hash: Transaction hash of record to modify - :type tx_hash: str, 0x-hex - :raises NotLocalTxError: If transaction not found in queue. - :returns: nonce, address and signed_tx (raw signed transaction) - :rtype: dict - """ - session = SessionBase.create_session() - q = session.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - tx = q.first() - if tx == None: - session.close() - raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash)) - - o = { - 'otx_id': tx.id, - 'nonce': tx.nonce, - 'signed_tx': tx.signed_tx, - 'status': tx.status, - } - logg.debug('get tx {}'.format(o)) - session.close() - return o - - -@celery_app.task(base=CriticalSQLAlchemyTask) -def get_nonce_tx(nonce, sender, chain_id): - """Retrieve all transactions for address with specified nonce - - :param nonce: Nonce - :type nonce: number - :param address: Ethereum address - :type address: str, 0x-hex - :returns: Transactions - :rtype: dict, with transaction hash as key, signed raw transaction as value - """ - session = SessionBase.create_session() - q = session.query(Otx) - q = q.join(TxCache) - q = q.filter(TxCache.sender==sender) - q = q.filter(Otx.nonce==nonce) - - txs = {} - for r in q.all(): - tx_signed_bytes = bytes.fromhex(r.signed_tx[2:]) - tx = unpack(tx_signed_bytes, chain_id) - if sender == None or tx['from'] == sender: - txs[r.tx_hash] = r.signed_tx - - session.close() - - return txs - - - -# TODO: pass chain spec instead of chain id -def get_paused_txs(status=None, sender=None, chain_id=0, session=None): - """Returns not finalized transactions that have been attempted sent without success. - - :param status: If set, will return transactions with this local queue status only - :type status: cic_eth.db.enum.StatusEnum - :param recipient: Recipient address to return transactions for - :type recipient: str, 0x-hex - :param chain_id: Numeric chain id to use to parse signed transaction data - :type chain_id: number - :raises ValueError: Status is finalized, sent or never attempted sent - :returns: Transactions - :rtype: dict, with transaction hash as key, signed raw transaction as value - """ - session = SessionBase.bind_session(session) - q = session.query(Otx) - - if status != None: - #if status == StatusEnum.PENDING or status >= StatusEnum.SENT: - if status == StatusEnum.PENDING or status & StatusBits.IN_NETWORK or not is_alive(status): - SessionBase.release_session(session) - raise ValueError('not a valid paused tx value: {}'.format(status)) - q = q.filter(Otx.status.op('&')(status.value)==status.value) - q = q.join(TxCache) - else: - q = q.filter(Otx.status>StatusEnum.PENDING.value) - q = q.filter(not_(Otx.status.op('&')(StatusBits.IN_NETWORK.value)>0)) - - if sender != None: - q = q.filter(TxCache.sender==sender) - - txs = {} - - for r in q.all(): - tx_signed_bytes = bytes.fromhex(r.signed_tx[2:]) - tx = unpack(tx_signed_bytes, chain_id) - if sender == None or tx['from'] == sender: - #gas += tx['gas'] * tx['gasPrice'] - txs[r.tx_hash] = r.signed_tx - - SessionBase.release_session(session) - - return txs - - -def get_status_tx(status, not_status=None, before=None, exact=False, limit=0, session=None): - """Retrieve transaction with a specific queue status. - - :param status: Status to match transactions with - :type status: str - :param before: If set, return only transactions older than the timestamp - :type status: datetime.dateTime - :param limit: Limit amount of returned transactions - :type limit: number - :returns: Transactions - :rtype: list of cic_eth.db.models.otx.Otx - """ - txs = {} - session = SessionBase.bind_session(session) - q = session.query(Otx) - q = q.join(TxCache) - # before = datetime.datetime.utcnow() - if before != None: - q = q.filter(TxCache.date_updated0) - if not_status != None: - q = q.filter(Otx.status.op('&')(not_status)==0) - i = 0 - for o in q.all(): - if limit > 0 and i == limit: - break - txs[o.tx_hash] = o.signed_tx - i += 1 - SessionBase.release_session(session) - return txs - - -# TODO: move query to model -def get_upcoming_tx(status=StatusEnum.READYSEND, recipient=None, before=None, chain_id=0, session=None): - """Returns the next pending transaction, specifically the transaction with the lowest nonce, for every recipient that has pending transactions. - - Will omit addresses that have the LockEnum.SEND bit in Lock set. - - (TODO) Will not return any rows if LockEnum.SEND bit in Lock is set for zero address. - - :param status: Defines the status used to filter as upcoming. - :type status: cic_eth.db.enum.StatusEnum - :param recipient: Ethereum address of recipient to return transaction for - :type recipient: str, 0x-hex - :param before: Only return transactions if their modification date is older than the given timestamp - :type before: datetime.datetime - :param chain_id: Chain id to use to parse signed transaction data - :type chain_id: number - :raises ValueError: Status is finalized, sent or never attempted sent - :returns: Transactions - :rtype: dict, with transaction hash as key, signed raw transaction as value - """ - session = SessionBase.bind_session(session) - q_outer = session.query( - TxCache.sender, - func.min(Otx.nonce).label('nonce'), - ) - q_outer = q_outer.join(TxCache) - q_outer = q_outer.join(Lock, isouter=True) - q_outer = q_outer.filter(or_(Lock.flags==None, Lock.flags.op('&')(LockEnum.SEND.value)==0)) - - if not is_alive(status): - SessionBase.release_session(session) - raise ValueError('not a valid non-final tx value: {}'.format(status)) - if status == StatusEnum.PENDING: - q_outer = q_outer.filter(Otx.status==status.value) - else: - q_outer = q_outer.filter(Otx.status.op('&')(status.value)==status.value) - - if recipient != None: - q_outer = q_outer.filter(TxCache.recipient==recipient) - - q_outer = q_outer.group_by(TxCache.sender) - - txs = {} - - for r in q_outer.all(): - q = session.query(Otx) - q = q.join(TxCache) - q = q.filter(TxCache.sender==r.sender) - q = q.filter(Otx.nonce==r.nonce) - - if before != None: - q = q.filter(TxCache.date_checked 0: + tx_hash_hex = add_0x(tx.hash) logg.debug('gas refill tx {}'.format(tx_hash_hex)) session = SessionBase.bind_session(session) q = session.query(TxCache.recipient) q = q.join(Otx) - q = q.filter(Otx.tx_hash==tx_hash_hex) + q = q.filter(Otx.tx_hash==strip_0x(tx_hash_hex)) r = q.first() if r == None: @@ -38,7 +42,7 @@ class GasFilter(SyncFilter): SessionBase.release_session(session) return - txs = get_paused_txs(StatusBits.GAS_ISSUES, r[0], self.chain_spec.chain_id(), session=session) + txs = get_paused_tx(self.chain_spec, status=StatusBits.GAS_ISSUES, sender=r[0], session=session, decoder=unpack) SessionBase.release_session(session) diff --git a/apps/cic-eth/cic_eth/runnable/daemons/filters/register.py b/apps/cic-eth/cic_eth/runnable/daemons/filters/register.py index 40eef8ad..841e7d75 100644 --- a/apps/cic-eth/cic_eth/runnable/daemons/filters/register.py +++ b/apps/cic-eth/cic_eth/runnable/daemons/filters/register.py @@ -35,9 +35,10 @@ class RegistrationFilter(SyncFilter): address = to_checksum_address(add_0x(address_hex)) logg.info('request token gift to {}'.format(address)) s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ address, + self.chain_spec.asdict(), ], queue=self.queue, ) diff --git a/apps/cic-eth/cic_eth/runnable/daemons/filters/straggler.py b/apps/cic-eth/cic_eth/runnable/daemons/filters/straggler.py new file mode 100644 index 00000000..79d50cde --- /dev/null +++ b/apps/cic-eth/cic_eth/runnable/daemons/filters/straggler.py @@ -0,0 +1,34 @@ +# standard imports +import logging + +# external imports +import celery +from chainqueue.state import obsolete_by_cache + +logg = logging.getLogger() + + + +class StragglerFilter: + + def __init__(self, chain_spec, queue='cic-eth'): + self.chain_spec = chain_spec + self.queue = queue + + + def filter(self, conn, block, tx, db_session=None): + logg.debug('tx {}'.format(tx)) + obsolete_by_cache(self.chain_spec, tx.hash, False, session=db_session) + s_send = celery.signature( + 'cic_eth.eth.gas.resend_with_higher_gas', + [ + tx.hash, + self.chain_spec.asdict(), + ], + queue=self.queue, + ) + return s_send.apply_async() + + + def __str__(self): + return 'stragglerfilter' diff --git a/apps/cic-eth/cic_eth/runnable/daemons/filters/transferauth.py b/apps/cic-eth/cic_eth/runnable/daemons/filters/transferauth.py index 9c09bafe..d6646b6c 100644 --- a/apps/cic-eth/cic_eth/runnable/daemons/filters/transferauth.py +++ b/apps/cic-eth/cic_eth/runnable/daemons/filters/transferauth.py @@ -61,7 +61,7 @@ class TransferAuthFilter(SyncFilter): } s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ [token_data], sender, @@ -69,7 +69,7 @@ class TransferAuthFilter(SyncFilter): queue=self.queue, ) s_approve = celery.signature( - 'cic_eth.eth.token.approve', + 'cic_eth.eth.erc20.approve', [ sender, recipient, diff --git a/apps/cic-eth/cic_eth/runnable/daemons/filters/tx.py b/apps/cic-eth/cic_eth/runnable/daemons/filters/tx.py index 52ec2532..4e06e4d8 100644 --- a/apps/cic-eth/cic_eth/runnable/daemons/filters/tx.py +++ b/apps/cic-eth/cic_eth/runnable/daemons/filters/tx.py @@ -1,16 +1,16 @@ # standard imports import logging -# third-party imports +# external imports import celery from hexathon import ( add_0x, ) +from chainsyncer.db.models.base import SessionBase +from chainqueue.db.models.otx import Otx +from chainlib.status import Status # local imports -from cic_eth.db.models.otx import Otx -from chainsyncer.db.models.base import SessionBase -from chainlib.status import Status from .base import SyncFilter logg = logging.getLogger().getChild(__name__) @@ -33,16 +33,28 @@ class TxFilter(SyncFilter): logg.info('tx filter match on {}'.format(otx.tx_hash)) db_session.flush() SessionBase.release_session(db_session) - s = celery.signature( - 'cic_eth.queue.tx.set_final_status', + s_final_state = celery.signature( + 'cic_eth.queue.state.set_final', [ + self.chain_spec.asdict(), add_0x(tx_hash_hex), tx.block.number, + tx.index, tx.status == Status.ERROR, ], queue=self.queue, ) - t = s.apply_async() + s_obsolete_state = celery.signature( + 'cic_eth.queue.state.obsolete', + [ + self.chain_spec.asdict(), + add_0x(tx_hash_hex), + True, + ], + queue=self.queue, + ) + t = celery.group(s_obsolete_state, s_final_state)() + return t diff --git a/apps/cic-eth/cic_eth/runnable/daemons/retry.py b/apps/cic-eth/cic_eth/runnable/daemons/retry.py index 6fb94784..8974c33d 100644 --- a/apps/cic-eth/cic_eth/runnable/daemons/retry.py +++ b/apps/cic-eth/cic_eth/runnable/daemons/retry.py @@ -1,26 +1,26 @@ +# standard imports import os import sys import logging import argparse import re -import datetime -import web3 +# external imports import confini import celery from cic_eth_registry import CICRegistry from chainlib.chain import ChainSpec +from chainlib.connection import RPCConnection +from chainsyncer.filter import SyncFilter +# local imports from cic_eth.db import dsn_from_config from cic_eth.db import SessionBase -from cic_eth.eth import RpcClient -from cic_eth.sync.retry import RetrySyncer -from cic_eth.queue.tx import get_status_tx -from cic_eth.queue.tx import get_tx from cic_eth.admin.ctrl import lock_send -from cic_eth.db.enum import StatusEnum from cic_eth.db.enum import LockEnum -from cic_eth.eth.util import unpack_signed_raw_tx_hex +from cic_eth.runnable.daemons.filters.straggler import StragglerFilter +from cic_eth.sync.retry import RetrySyncer +from cic_eth.stat import init_chain_stat logging.basicConfig(level=logging.WARNING) logg = logging.getLogger() @@ -31,7 +31,8 @@ argparser = argparse.ArgumentParser(description='daemon that monitors transactio argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provider') argparser.add_argument('-c', type=str, default=config_dir, help='config root to use') argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec') -argparser.add_argument('--retry-delay', dest='retry_delay', type=str, help='seconds to wait for retrying a transaction that is marked as sent') +argparser.add_argument('--batch-size', dest='batch_size', type=int, default=50, help='max amount of txs to resend per iteration') +argparser.add_argument('--retry-delay', dest='retry_delay', type=int, help='seconds to wait for retrying a transaction that is marked as sent') argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to') argparser.add_argument('-v', help='be verbose', action='store_true') @@ -51,7 +52,6 @@ config.process() # override args args_override = { 'ETH_PROVIDER': getattr(args, 'p'), - 'ETH_ABI_DIR': getattr(args, 'abi_dir'), 'CIC_CHAIN_SPEC': getattr(args, 'i'), 'CIC_TX_RETRY_DELAY': getattr(args, 'retry_delay'), } @@ -59,6 +59,7 @@ config.dict_override(args_override, 'cli flag') config.censor('PASSWORD', 'DATABASE') config.censor('PASSWORD', 'SSL') logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) +config.add(args.batch_size, '_BATCH_SIZE', True) app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL')) @@ -66,165 +67,26 @@ queue = args.q chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) -RPCConnection.registry_location(args.p, chain_spec, tag='default') +RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, tag='default') dsn = dsn_from_config(config) -SessionBase.connect(dsn) +SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG')) -straggler_delay = int(config.get('CIC_TX_RETRY_DELAY')) - -# TODO: we already have the signed raw tx in get, so its a waste of cycles to get_tx here -def sendfail_filter(w3, tx_hash, rcpt, chain_spec): - tx_dict = get_tx(tx_hash) - tx = unpack_signed_raw_tx_hex(tx_dict['signed_tx'], chain_spec.chain_id()) - logg.debug('submitting tx {} for retry'.format(tx_hash)) - s_check = celery.signature( - 'cic_eth.admin.ctrl.check_lock', - [ - tx_hash, - chain_str, - LockEnum.QUEUE, - tx['from'], - ], - queue=queue, - ) -# s_resume = celery.signature( -# 'cic_eth.eth.tx.resume_tx', -# [ -# chain_str, -# ], -# queue=queue, -# ) - -# s_retry_status = celery.signature( -# 'cic_eth.queue.tx.set_ready', -# [], -# queue=queue, -# ) - s_resend = celery.signature( - 'cic_eth.eth.tx.resend_with_higher_gas', - [ - chain_str, - ], - queue=queue, - ) - - #s_resume.link(s_retry_status) - #s_check.link(s_resume) - s_check.link(s_resend) - s_check.apply_async() - - -# TODO: can we merely use the dispatcher instead? -def dispatch(conn, chain_spec): - txs = get_status_tx(StatusEnum.RETRY, before=datetime.datetime.utcnow()) - if len(txs) == 0: - logg.debug('no retry state txs found') - return - #signed_txs = list(txs.values()) - #logg.debug('signed txs {} chain {}'.format(signed_txs, chain_str)) - #for tx in signed_txs: - for k in txs.keys(): - #tx_cache = get_tx_cache(k) - tx_raw = txs[k] - tx = unpack_signed_raw_tx_hex(tx_raw, chain_spec.chain_id()) - - s_check = celery.signature( - 'cic_eth.admin.ctrl.check_lock', - [ - [tx_raw], - chain_str, - LockEnum.QUEUE, - tx['from'], - ], - queue=queue, - ) - s_send = celery.signature( - 'cic_eth.eth.tx.send', - [ - chain_str, - ], - queue=queue, - ) - s_check.link(s_send) - t = s_check.apply_async() - -# try: -# r = t.get() -# logg.debug('submitted as {} result {} with queue task {}'.format(t, r, t.children[0].get())) -# except PermanentTxError as e: -# logg.error('tx {} permanently failed: {}'.format(tx, e)) -# except TemporaryTxError as e: -# logg.error('tx {} temporarily failed: {}'.format(tx, e)) - -# -# -#def straggler_filter(w3, tx, rcpt, chain_str): -# before = datetime.datetime.utcnow() - datetime.timedelta(seconds=straggler_delay) -# txs = get_status_tx(StatusEnum.SENT, before) -# if len(txs) == 0: -# logg.debug('no straggler txs found') -# return -# txs = list(txs.keys()) -# logg.debug('straggler txs {} chain {}'.format(signed_txs, chain_str)) -# s_send = celery.signature( -# 'cic_eth.eth.resend_with_higher_gas', -# [ -# txs, -# chain_str, -# ], -# queue=queue, -# ) -# s_send.apply_async() - - -class RetrySyncer(Syncer): - - def __init__(self, chain_spec, stalled_grace_seconds, failed_grace_seconds=None, final_func=None): - self.chain_spec = chain_spec - if failed_grace_seconds == None: - failed_grace_seconds = stalled_grace_seconds - self.stalled_grace_seconds = stalled_grace_seconds - self.failed_grace_seconds = failed_grace_seconds - self.final_func = final_func - - - def get(self): -# before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.failed_grace_seconds) -# failed_txs = get_status_tx( -# StatusEnum.SENDFAIL.value, -# before=before, -# ) - before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.stalled_grace_seconds) - stalled_txs = get_status_tx( - StatusBits.IN_NETWORK.value, - not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE, - before=before, - ) - # return list(failed_txs.keys()) + list(stalled_txs.keys()) - return stalled_txs - - def process(self, conn, ref): - logg.debug('tx {}'.format(ref)) - for f in self.filter: - f(conn, ref, None, str(self.chain_spec)) - - - - def loop(self, interval): - while self.running and Syncer.running_global: - rpc = RPCConnection.connect(self.chain_spec, 'default') - for tx in self.get(): - self.process(rpc, tx) - if self.final_func != None: - self.final_func(rpc, self.chain_spec) - time.sleep(interval) def main(): + conn = RPCConnection.connect(chain_spec, 'default') - syncer = RetrySyncer(chain_spec, straggler_delay, final_func=dispatch) - syncer.filter.append(sendfail_filter) - syncer.loop(float(straggler_delay)) + straggler_delay = int(config.get('CIC_TX_RETRY_DELAY')) + loop_interval = config.get('SYNCER_LOOP_INTERVAL') + if loop_interval == None: + stat = init_chain_stat(conn) + loop_interval = stat.block_average() + + syncer = RetrySyncer(conn, chain_spec, straggler_delay, batch_size=config.get('_BATCH_SIZE')) + syncer.backend.set(0, 0) + fltr = StragglerFilter(chain_spec, queue=queue) + syncer.add_filter(fltr) + syncer.loop(int(loop_interval), conn) if __name__ == '__main__': diff --git a/apps/cic-eth/cic_eth/runnable/daemons/tasker.py b/apps/cic-eth/cic_eth/runnable/daemons/tasker.py index 2c679524..8a4a47f5 100644 --- a/apps/cic-eth/cic_eth/runnable/daemons/tasker.py +++ b/apps/cic-eth/cic_eth/runnable/daemons/tasker.py @@ -14,25 +14,42 @@ import confini from chainlib.connection import RPCConnection from chainlib.eth.connection import EthUnixSignerConnection from chainlib.chain import ChainSpec +from chainqueue.db.models.otx import Otx # local imports -from cic_eth_registry import CICRegistry - -from cic_eth.eth import erc20 -from cic_eth.eth import tx -from cic_eth.eth import account -from cic_eth.admin import debug -from cic_eth.admin import ctrl -from cic_eth.queue import tx -from cic_eth.queue import balance -from cic_eth.callbacks import Callback -from cic_eth.callbacks import http -from cic_eth.callbacks import tcp -from cic_eth.callbacks import redis +from cic_eth.eth import ( + erc20, + tx, + account, + nonce, + gas, + ) +from cic_eth.admin import ( + debug, + ctrl, + ) +from cic_eth.queue import ( + query, + balance, + state, + tx, + lock, + time, + ) +from cic_eth.callbacks import ( + Callback, + http, + #tcp, + redis, + ) from cic_eth.db.models.base import SessionBase -from cic_eth.db.models.otx import Otx from cic_eth.db import dsn_from_config from cic_eth.ext import tx +from cic_eth.registry import ( + connect as connect_registry, + connect_declarator, + connect_token_registry, + ) logging.basicConfig(level=logging.WARNING) logg = logging.getLogger() @@ -74,7 +91,7 @@ logg.debug('config loaded from {}:\n{}'.format(args.c, config)) # connect to database dsn = dsn_from_config(config) -SessionBase.connect(dsn, pool_size=50, debug=config.true('DATABASE_DEBUG')) +SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG')) # verify database connection with minimal sanity query session = SessionBase.create_session() @@ -121,8 +138,6 @@ RPCConnection.register_location(config.get('SIGNER_SOCKET_PATH'), chain_spec, 's Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS') -CICRegistry.address = config.get('CIC_REGISTRY_ADDRESS') - def main(): argv = ['worker'] @@ -145,8 +160,8 @@ def main(): # Callback.ssl_ca_file = config.get('SSL_CA_FILE') rpc = RPCConnection.connect(chain_spec, 'default') - registry = CICRegistry(chain_spec, rpc) - registry_address = registry.by_name('ContractRegistry') + + connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS')) trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') if trusted_addresses_src == None: @@ -155,9 +170,16 @@ def main(): trusted_addresses = trusted_addresses_src.split(',') for address in trusted_addresses: logg.info('using trusted address {}'.format(address)) + connect_declarator(rpc, chain_spec, trusted_addresses) + connect_token_registry(rpc, chain_spec) current_app.worker_main(argv) +@celery.signals.eventlet_pool_postshutdown.connect +def shutdown(sender=None, headers=None, body=None, **kwargs): + logg.warning('in shudown event hook') + + if __name__ == '__main__': main() diff --git a/apps/cic-eth/cic_eth/runnable/daemons/tracker.py b/apps/cic-eth/cic_eth/runnable/daemons/tracker.py index f9f91629..653dd8ae 100644 --- a/apps/cic-eth/cic_eth/runnable/daemons/tracker.py +++ b/apps/cic-eth/cic_eth/runnable/daemons/tracker.py @@ -7,7 +7,7 @@ import argparse import sys import re -# third-party imports +# external imports import confini import celery import rlp @@ -42,6 +42,7 @@ from cic_eth.runnable.daemons.filters import ( RegistrationFilter, TransferAuthFilter, ) +from cic_eth.stat import init_chain_stat script_dir = os.path.realpath(os.path.dirname(__file__)) @@ -66,7 +67,6 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) #RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default') cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) - def main(): # connect to celery celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) @@ -76,7 +76,13 @@ def main(): o = block_latest() r = rpc.do(o) - block_offset = int(strip_0x(r), 16) + 1 + block_current = int(r, 16) + block_offset = block_current + 1 + + loop_interval = config.get('SYNCER_LOOP_INTERVAL') + if loop_interval == None: + stat = init_chain_stat(rpc, block_start=block_current) + loop_interval = stat.block_average() logg.debug('starting at block {}'.format(block_offset)) @@ -140,7 +146,8 @@ def main(): for cf in callback_filters: syncer.add_filter(cf) - r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc) + #r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc) + r = syncer.loop(int(loop_interval), rpc) sys.stderr.write("sync {} done at block {}\n".format(syncer, r)) i += 1 diff --git a/apps/cic-eth/cic_eth/runnable/resend.py b/apps/cic-eth/cic_eth/runnable/resend.py index 77faf95a..7198b8ac 100644 --- a/apps/cic-eth/cic_eth/runnable/resend.py +++ b/apps/cic-eth/cic_eth/runnable/resend.py @@ -7,13 +7,10 @@ import os # third-party imports import celery import confini -import web3 -from cic_registry import CICRegistry -from cic_registry.chain import ChainSpec -from cic_registry.chain import ChainRegistry +from chainlib.chain import ChainSpec +from chainlib.eth.connection import EthHTTPConnection # local imports -from cic_eth.eth.rpc import RpcClient from cic_eth.api.api_admin import AdminApi logging.basicConfig(level=logging.WARNING) @@ -55,41 +52,20 @@ args_override = { config.censor('PASSWORD', 'DATABASE') config.censor('PASSWORD', 'SSL') logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) +config.add(args.tx_hash, '_TX_HASH', True) +config.add(args.unlock, '_UNLOCK', True) chain_spec = ChainSpec.from_chain_str(args.i) -chain_str = str(chain_spec) -re_websocket = re.compile('^wss?://') -re_http = re.compile('^https?://') -blockchain_provider = config.get('ETH_PROVIDER') -if re.match(re_websocket, blockchain_provider) != None: - blockchain_provider = web3.Web3.WebsocketProvider(blockchain_provider) -elif re.match(re_http, blockchain_provider) != None: - blockchain_provider = web3.Web3.HTTPProvider(blockchain_provider) -else: - raise ValueError('unknown provider url {}'.format(blockchain_provider)) - -def web3_constructor(): - w3 = web3.Web3(blockchain_provider) - return (blockchain_provider, w3) -RpcClient.set_constructor(web3_constructor) +rpc = EthHTTPConnection(config.get('ETH_PROVIDER')) celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) -c = RpcClient(chain_spec) - -CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec) -chain_registry = ChainRegistry(chain_spec) -CICRegistry.add_chain_registry(chain_registry) -CICRegistry.add_path(config.get('ETH_ABI_DIR')) -CICRegistry.load_for(chain_spec) - - def main(): - api = AdminApi(c) + api = AdminApi(rpc) tx_details = api.tx(chain_spec, args.tx_hash) - t = api.resend(args.tx_hash, chain_str, unlock=True) - + t = api.resend(args.tx_hash, chain_spec, unlock=config.get('_UNLOCK')) + print(t.get_leaf()) if __name__ == '__main__': main() diff --git a/apps/cic-eth/cic_eth/runnable/transfer.py b/apps/cic-eth/cic_eth/runnable/transfer.py new file mode 100644 index 00000000..6f408785 --- /dev/null +++ b/apps/cic-eth/cic_eth/runnable/transfer.py @@ -0,0 +1,100 @@ +#!/usr/bin/python +import sys +import os +import logging +import uuid +import json +import argparse + +# external imports +import celery +import confini +import redis +from xdg.BaseDirectory import xdg_config_home +from chainlib.eth.address import to_checksum_address + +# local imports +from cic_eth.api import Api + +logging.basicConfig(level=logging.WARNING) +logg = logging.getLogger('create_account_script') +logging.getLogger('confini').setLevel(logging.WARNING) +logging.getLogger('gnupg').setLevel(logging.WARNING) + +default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic') + +argparser = argparse.ArgumentParser() +argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index') +argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') +argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec') +argparser.add_argument('--token-symbol', dest='token_symbol', type=str, help='Symbol of token to transfer') +argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission') +argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission') +argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback') +argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback') +argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback') +argparser.add_argument('--timeout', default=20.0, type=float, help='Callback timeout') +argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue') +argparser.add_argument('-v', action='store_true', help='Be verbose') +argparser.add_argument('-vv', action='store_true', help='Be more verbose') +argparser.add_argument('sender', type=str, help='Transaction sender') +argparser.add_argument('recipient', type=str, help='Transaction recipient') +argparser.add_argument('value', type=int, help='Transaction value with decimals') +args = argparser.parse_args() + +if args.vv: + logg.setLevel(logging.DEBUG) +if args.v: + logg.setLevel(logging.INFO) + +config_dir = args.c +config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX')) +config.process() +args_override = { + 'CIC_CHAIN_SPEC': getattr(args, 'i'), + 'REDIS_HOST': getattr(args, 'redis_host'), + 'REDIS_PORT': getattr(args, 'redis_port'), + 'REDIS_DB': getattr(args, 'redis_db'), + } +config.dict_override(args_override, 'cli') +config.add(to_checksum_address(args.sender), '_SENDER', True) +config.add(to_checksum_address(args.recipient), '_RECIPIENT', True) +config.add(args.value, '_VALUE', True) +config.add(args.token_symbol, '_SYMBOL', True) +if config.get('_SYMBOL') == None: + raise ValueError('gas transfers not yet supported; token symbol required') +celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) + + +def main(): + redis_host = config.get('REDIS_HOST') + redis_port = config.get('REDIS_PORT') + redis_db = config.get('REDIS_DB') + redis_channel = str(uuid.uuid4()) + r = redis.Redis(redis_host, redis_port, redis_db) + + ps = r.pubsub() + ps.subscribe(redis_channel) + ps.get_message() + + api = Api( + config.get('CIC_CHAIN_SPEC'), + queue=args.q, + callback_param='{}:{}:{}:{}'.format(args.redis_host_callback, args.redis_port_callback, redis_db, redis_channel), + callback_task='cic_eth.callbacks.redis.redis', + callback_queue=args.q, + ) + + #register = not args.no_register + #logg.debug('register {}'.format(register)) + #t = api.create_account(register=register) + t = api.transfer(config.get('_SENDER'), config.get('_RECIPIENT'), config.get('_VALUE'), config.get('_SYMBOL')) + + ps.get_message() + o = ps.get_message(timeout=args.timeout) + m = json.loads(o['data']) + print(m['result']) + + +if __name__ == '__main__': + main() diff --git a/apps/cic-eth/cic_eth/runnable/view.py b/apps/cic-eth/cic_eth/runnable/view.py index 66e08a01..50adce37 100644 --- a/apps/cic-eth/cic_eth/runnable/view.py +++ b/apps/cic-eth/cic_eth/runnable/view.py @@ -14,8 +14,6 @@ import datetime # external imports import confini import celery -from cic_eth_registry import CICRegistry -from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup from chainlib.chain import ChainSpec from chainlib.eth.connection import EthHTTPConnection from hexathon import add_0x @@ -27,6 +25,7 @@ from cic_eth.db.enum import ( status_str, LockEnum, ) +from cic_eth.registry import connect as connect_registry logging.basicConfig(level=logging.WARNING) logg = logging.getLogger() @@ -147,28 +146,19 @@ def render_lock(o, **kwargs): return s -def connect_registry(registry_address, chain_spec, rpc): - CICRegistry.address = registry_address - registry = CICRegistry(chain_spec, rpc) - declarator_address = registry.by_name('AddressDeclarator') - lookup = AddressDeclaratorLookup(declarator_address, trusted_addresses) - registry.add_lookup(lookup) - return registry - - # TODO: move each command to submodule def main(): txs = [] renderer = render_tx if len(config.get('_QUERY')) > 66: - registry = connect_registry(registry_address, chain_spec, rpc) + registry = connect_registry(rpc, chain_spec, registry_address) admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'), registry=registry, renderer=renderer) elif len(config.get('_QUERY')) > 42: - registry = connect_registry(registry_address, chain_spec, rpc) + registry = connect_registry(rpc, chain_spec, registry_address) admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'), registry=registry, renderer=renderer) elif len(config.get('_QUERY')) == 42: - registry = connect_registry(registry_address, chain_spec, rpc) + registry = connect_registry(rpc, chain_spec, registry_address) txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False, renderer=render_account) renderer = render_account elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock': diff --git a/apps/cic-eth/cic_eth/stat.py b/apps/cic-eth/cic_eth/stat.py new file mode 100644 index 00000000..d3a07fdb --- /dev/null +++ b/apps/cic-eth/cic_eth/stat.py @@ -0,0 +1,33 @@ +# standard imports +import logging + +# external imports +from chainlib.stat import ChainStat +from chainlib.eth.block import ( + block_latest, + block_by_number, + Block, + ) + +logg = logging.getLogger().getChild(__name__) + +BLOCK_SAMPLES = 10 + + +def init_chain_stat(rpc, block_start=0): + stat = ChainStat() + + if block_start == 0: + o = block_latest() + r = rpc.do(o) + block_start = int(r, 16) + + for i in range(BLOCK_SAMPLES): + o = block_by_number(block_start-10+i) + block_src = rpc.do(o) + logg.debug('block {}'.format(block_src)) + block = Block(block_src) + stat.block_apply(block) + + logg.debug('calculated block time {} from {} block samples'.format(stat.block_average(), BLOCK_SAMPLES)) + return stat diff --git a/apps/cic-eth/cic_eth/sync/retry.py b/apps/cic-eth/cic_eth/sync/retry.py new file mode 100644 index 00000000..b4cefca6 --- /dev/null +++ b/apps/cic-eth/cic_eth/sync/retry.py @@ -0,0 +1,92 @@ +# standard imports +import logging +import datetime + +# external imports +from chainsyncer.driver import HeadSyncer +from chainsyncer.backend import MemBackend +from chainsyncer.error import NoBlockForYou +from chainlib.eth.block import ( + block_by_number, + block_latest, + Block, + ) +from chainlib.eth.tx import ( + unpack, + Tx, + ) +from cic_eth.queue.query import get_status_tx +from chainqueue.db.enum import StatusBits +from hexathon import strip_0x + +# local imports +from cic_eth.db import SessionBase + +logg = logging.getLogger() + + +class DbSessionMemBackend(MemBackend): + + def connect(self): + self.db_session = SessionBase.create_session() + return self.db_session + + + def disconnect(self): + self.db_session.close() + self.db_session = None + + +class RetrySyncer(HeadSyncer): + + def __init__(self, conn, chain_spec, stalled_grace_seconds, batch_size=50, failed_grace_seconds=None): + backend = DbSessionMemBackend(chain_spec, None) + super(RetrySyncer, self).__init__(backend) + self.chain_spec = chain_spec + if failed_grace_seconds == None: + failed_grace_seconds = stalled_grace_seconds + self.stalled_grace_seconds = stalled_grace_seconds + self.failed_grace_seconds = failed_grace_seconds + self.batch_size = batch_size + self.conn = conn + + + def get(self, conn): + o = block_latest() + r = conn.do(o) + (pair, flags) = self.backend.get() + n = int(r, 16) + if n == pair[0]: + raise NoBlockForYou('block {} already checked'.format(n)) + o = block_by_number(n) + r = conn.do(o) + b = Block(r) + return b + + + def process(self, conn, block): + before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.stalled_grace_seconds) + session = SessionBase.create_session() + stalled_txs = get_status_tx( + self.chain_spec, + StatusBits.IN_NETWORK.value, + not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE, + before=before, + limit=self.batch_size, + session=session, + ) + session.close() +# stalled_txs = get_upcoming_tx( +# status=StatusBits.IN_NETWORK.value, +# not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE, +# before=before, +# limit=self.batch_size, +# ) + for tx_signed_raw_hex in stalled_txs.values(): + tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex)) + tx_src = unpack(tx_signed_raw_bytes, self.chain_spec) + tx = Tx(tx_src) + self.filter.apply(self.conn, block, tx) + self.backend.set(block.number, 0) + + diff --git a/apps/cic-eth/cic_eth/version.py b/apps/cic-eth/cic_eth/version.py index 0d7e8016..3fa1d970 100644 --- a/apps/cic-eth/cic_eth/version.py +++ b/apps/cic-eth/cic_eth/version.py @@ -8,9 +8,9 @@ import semver version = ( 0, - 10, - 1, - 'beta.1', + 11, + 0, + 'beta.3', ) version_object = semver.VersionInfo( diff --git a/apps/cic-eth/config/database.ini b/apps/cic-eth/config/database.ini index 50a2dd0d..4517445c 100644 --- a/apps/cic-eth/config/database.ini +++ b/apps/cic-eth/config/database.ini @@ -6,4 +6,5 @@ HOST=localhost PORT=5432 ENGINE=postgresql DRIVER=psycopg2 +POOL_SIZE=50 DEBUG=0 diff --git a/apps/cic-eth/config/docker/cic.ini b/apps/cic-eth/config/docker/cic.ini index 251a6be3..50032aa9 100644 --- a/apps/cic-eth/config/docker/cic.ini +++ b/apps/cic-eth/config/docker/cic.ini @@ -2,3 +2,4 @@ registry_address = chain_spec = evm:bloxberg:8996 trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C +tx_retry_delay = 20 diff --git a/apps/cic-eth/config/docker/database.ini b/apps/cic-eth/config/docker/database.ini index a5f2be48..6448be0e 100644 --- a/apps/cic-eth/config/docker/database.ini +++ b/apps/cic-eth/config/docker/database.ini @@ -6,4 +6,5 @@ HOST=localhost PORT=63432 ENGINE=postgresql DRIVER=psycopg2 +POOL_SIZE=50 DEBUG=0 diff --git a/apps/cic-eth/config/docker/syncer.ini b/apps/cic-eth/config/docker/syncer.ini index 97236743..9c452999 100644 --- a/apps/cic-eth/config/docker/syncer.ini +++ b/apps/cic-eth/config/docker/syncer.ini @@ -1,2 +1,2 @@ [SYNCER] -loop_interval = 1 +loop_interval = diff --git a/apps/cic-eth/config/syncer.ini b/apps/cic-eth/config/syncer.ini index 97236743..9c452999 100644 --- a/apps/cic-eth/config/syncer.ini +++ b/apps/cic-eth/config/syncer.ini @@ -1,2 +1,2 @@ [SYNCER] -loop_interval = 1 +loop_interval = diff --git a/apps/cic-eth/docker/Dockerfile b/apps/cic-eth/docker/Dockerfile index 86bde4c5..e5bdd2f9 100644 --- a/apps/cic-eth/docker/Dockerfile +++ b/apps/cic-eth/docker/Dockerfile @@ -29,7 +29,7 @@ RUN /usr/local/bin/python -m pip install --upgrade pip # python merge_requirements.py | tee merged_requirements.txt #RUN cd cic-base && \ # pip install $pip_extra_index_url_flag -r ./merged_requirements.txt -RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a44 +RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62 COPY cic-eth/scripts/ scripts/ COPY cic-eth/setup.cfg cic-eth/setup.py ./ diff --git a/apps/cic-eth/requirements.txt b/apps/cic-eth/requirements.txt index a7a04da7..92ce8c4f 100644 --- a/apps/cic-eth/requirements.txt +++ b/apps/cic-eth/requirements.txt @@ -1,24 +1,25 @@ -cic-base~=0.1.2a46 +cic-base~=0.1.2a67 celery==4.4.7 -crypto-dev-signer~=0.4.14a16 +crypto-dev-signer~=0.4.14a17 confini~=0.3.6rc3 -cic-eth-registry~=0.5.4a7 +cic-eth-registry~=0.5.4a13 #cic-bancor~=0.0.6 redis==3.5.3 alembic==1.4.2 websockets==8.1 requests~=2.24.0 -eth_accounts_index~=0.0.11a3 -erc20-transfer-authorization~=0.3.1a2 +eth_accounts_index~=0.0.11a7 +erc20-transfer-authorization~=0.3.1a3 #simple-rlp==0.1.2 uWSGI==2.0.19.1 semver==2.13.0 websocket-client==0.57.0 moolb~=0.1.1b2 -eth-address-index~=0.1.1a5 -chainlib~=0.0.1a42 +eth-address-index~=0.1.1a7 +chainlib~=0.0.2a5 hexathon~=0.0.1a7 -chainsyncer~=0.0.1a20 +chainsyncer[sql]~=0.0.2a1 +chainqueue~=0.0.1a7 pysha3==1.0.2 coincurve==15.0.0 -sarafu-faucet==0.0.2a13 +sarafu-faucet~=0.0.2a19 diff --git a/apps/cic-eth/setup.cfg b/apps/cic-eth/setup.cfg index 8d7b8b94..d2895943 100644 --- a/apps/cic-eth/setup.cfg +++ b/apps/cic-eth/setup.cfg @@ -37,6 +37,7 @@ packages = cic_eth.runnable.daemons cic_eth.runnable.daemons.filters cic_eth.callbacks + cic_eth.sync scripts = ./scripts/migrate.py @@ -54,3 +55,4 @@ console_scripts = # TODO: Merge this with ctl when subcmds sorted to submodules cic-eth-tag = cic_eth.runnable.tag:main cic-eth-resend = cic_eth.runnable.resend:main + cic-eth-transfer = cic_eth.runnable.transfer:main diff --git a/apps/cic-eth/tests/filters/test_straggler_filter.py b/apps/cic-eth/tests/filters/test_straggler_filter.py new file mode 100644 index 00000000..27dc0b75 --- /dev/null +++ b/apps/cic-eth/tests/filters/test_straggler_filter.py @@ -0,0 +1,89 @@ +# external imports +from chainlib.connection import RPCConnection +from chainlib.eth.nonce import OverrideNonceOracle +from chainlib.eth.tx import ( + TxFormat, + unpack, + Tx, + ) +from chainlib.eth.gas import ( + Gas, + OverrideGasOracle, + ) +from chainlib.eth.block import ( + block_latest, + block_by_number, + Block, + ) +from chainqueue.db.models.otx import Otx +from chainqueue.db.enum import StatusBits +from chainqueue.tx import create as queue_create +from chainqueue.state import ( + set_reserved, + set_ready, + set_sent, + ) + +from hexathon import strip_0x + +# local imports +from cic_eth.runnable.daemons.filters.straggler import StragglerFilter +from cic_eth.eth.gas import cache_gas_data + + +def test_tx( + default_chain_spec, + init_database, + eth_rpc, + eth_signer, + agent_roles, + celery_session_worker, + ): + + rpc = RPCConnection.connect(default_chain_spec, 'default') + nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42) + gas_oracle = OverrideGasOracle(price=1000000000, limit=21000) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) + (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED) + queue_create( + default_chain_spec, + 42, + agent_roles['ALICE'], + tx_hash_hex, + tx_signed_raw_hex, + session=init_database, + ) + cache_gas_data( + tx_hash_hex, + tx_signed_raw_hex, + default_chain_spec.asdict(), + ) + + set_ready(default_chain_spec, tx_hash_hex, session=init_database) + set_reserved(default_chain_spec, tx_hash_hex, session=init_database) + set_sent(default_chain_spec, tx_hash_hex, session=init_database) + + fltr = StragglerFilter(default_chain_spec, None) + + o = block_latest() + r = eth_rpc.do(o) + o = block_by_number(r, include_tx=False) + r = eth_rpc.do(o) + block = Block(r) + block.txs = [tx_hash_hex] + + tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex)) + tx_src = unpack(tx_signed_raw_bytes, default_chain_spec) + tx = Tx(tx_src, block=block) + t = fltr.filter(None, block, tx, db_session=init_database) + tx_hash_hex_successor = t.get_leaf() + + assert t.successful() + assert tx_hash_hex_successor != tx_hash_hex + + otx = Otx.load(tx_hash_hex, session=init_database) + assert otx.status & StatusBits.OBSOLETE > 0 + assert otx.status & (StatusBits.FINAL | StatusBits.QUEUED | StatusBits.RESERVED) == 0 + + otx = Otx.load(tx_hash_hex_successor, session=init_database) + assert otx.status == StatusBits.QUEUED diff --git a/apps/cic-eth/tests/filters/test_tx_filter.py b/apps/cic-eth/tests/filters/test_tx_filter.py new file mode 100644 index 00000000..7515401e --- /dev/null +++ b/apps/cic-eth/tests/filters/test_tx_filter.py @@ -0,0 +1,110 @@ +# external imports +from chainlib.connection import RPCConnection +from chainlib.eth.nonce import OverrideNonceOracle +from chainlib.eth.tx import ( + TxFormat, + unpack, + Tx, + ) +from chainlib.eth.gas import ( + Gas, + OverrideGasOracle, + ) +from chainlib.eth.block import ( + block_latest, + block_by_number, + Block, + ) +from chainqueue.db.models.otx import Otx +from chainqueue.db.enum import StatusBits +from chainqueue.tx import create as queue_create +from chainqueue.state import ( + set_reserved, + set_ready, + set_sent, + ) + +from hexathon import strip_0x + +# local imports +from cic_eth.runnable.daemons.filters.tx import TxFilter +from cic_eth.eth.gas import cache_gas_data + + +def test_tx( + default_chain_spec, + init_database, + eth_rpc, + eth_signer, + agent_roles, + celery_session_worker, + ): + + rpc = RPCConnection.connect(default_chain_spec, 'default') + nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42) + gas_oracle = OverrideGasOracle(price=1000000000, limit=21000) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) + (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED) + queue_create( + default_chain_spec, + 42, + agent_roles['ALICE'], + tx_hash_hex, + tx_signed_raw_hex, + session=init_database, + ) + cache_gas_data( + tx_hash_hex, + tx_signed_raw_hex, + default_chain_spec.asdict(), + ) + + set_ready(default_chain_spec, tx_hash_hex, session=init_database) + set_reserved(default_chain_spec, tx_hash_hex, session=init_database) + set_sent(default_chain_spec, tx_hash_hex, session=init_database) + tx_hash_hex_orig = tx_hash_hex + + gas_oracle = OverrideGasOracle(price=1100000000, limit=21000) + (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED) + queue_create( + default_chain_spec, + 42, + agent_roles['ALICE'], + tx_hash_hex, + tx_signed_raw_hex, + session=init_database, + ) + cache_gas_data( + tx_hash_hex, + tx_signed_raw_hex, + default_chain_spec.asdict(), + ) + + set_ready(default_chain_spec, tx_hash_hex, session=init_database) + set_reserved(default_chain_spec, tx_hash_hex, session=init_database) + set_sent(default_chain_spec, tx_hash_hex, session=init_database) + + fltr = TxFilter(default_chain_spec, None) + + o = block_latest() + r = eth_rpc.do(o) + o = block_by_number(r, include_tx=False) + r = eth_rpc.do(o) + block = Block(r) + block.txs = [tx_hash_hex] + + tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex)) + tx_src = unpack(tx_signed_raw_bytes, default_chain_spec) + tx = Tx(tx_src, block=block) + t = fltr.filter(eth_rpc, block, tx, db_session=init_database) + + t.get() + assert t.successful() + + otx = Otx.load(tx_hash_hex_orig, session=init_database) + assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE + assert otx.status & StatusBits.FINAL == StatusBits.FINAL + + otx = Otx.load(tx_hash_hex, session=init_database) + assert otx.status & StatusBits.OBSOLETE == 0 + assert otx.status & StatusBits.FINAL == StatusBits.FINAL diff --git a/apps/cic-eth/tests/fixtures_celery.py b/apps/cic-eth/tests/fixtures_celery.py index 47de8801..33cadc31 100644 --- a/apps/cic-eth/tests/fixtures_celery.py +++ b/apps/cic-eth/tests/fixtures_celery.py @@ -27,6 +27,9 @@ def celery_includes(): 'cic_eth.eth.tx', 'cic_eth.ext.tx', 'cic_eth.queue.tx', + 'cic_eth.queue.lock', + 'cic_eth.queue.query', + 'cic_eth.queue.state', 'cic_eth.queue.balance', 'cic_eth.admin.ctrl', 'cic_eth.admin.nonce', diff --git a/apps/cic-eth/tests/task/api/test_admin.py b/apps/cic-eth/tests/task/api/test_admin.py index 411eabcb..ca50f107 100644 --- a/apps/cic-eth/tests/task/api/test_admin.py +++ b/apps/cic-eth/tests/task/api/test_admin.py @@ -16,27 +16,22 @@ from hexathon import ( strip_0x, add_0x, ) +from chainqueue.db.models.otx import Otx +from chainqueue.db.models.tx import TxCache +from chainqueue.db.enum import ( + StatusEnum, + StatusBits, + status_str, + ) +from chainqueue.query import get_tx # local imports from cic_eth.api import AdminApi from cic_eth.db.models.role import AccountRole -from cic_eth.db.models.otx import Otx -from cic_eth.db.models.tx import TxCache -from cic_eth.db.enum import ( - StatusEnum, - StatusBits, - status_str, - LockEnum, - ) +from cic_eth.db.enum import LockEnum from cic_eth.error import InitializationError -from cic_eth.eth.tx import ( - cache_gas_data, - ) -#from cic_eth.eth.gas import cache_gas_tx -from cic_eth.queue.tx import ( - create as queue_create, - get_tx, - ) +from cic_eth.eth.gas import cache_gas_data +from cic_eth.queue.tx import queue_create logg = logging.getLogger() @@ -56,7 +51,7 @@ logg = logging.getLogger() # gas_provider = c.gas_provider() # # s_nonce = celery.signature( -# 'cic_eth.eth.tx.reserve_nonce', +# 'cic_eth.eth.nonce.reserve_nonce', # [ # init_w3.eth.accounts[0], # gas_provider, @@ -64,7 +59,7 @@ logg = logging.getLogger() # queue=None, # ) # s_refill = celery.signature( -# 'cic_eth.eth.tx.refill_gas', +# 'cic_eth.eth.gas.refill_gas', # [ # chain_str, # ], @@ -83,7 +78,7 @@ logg = logging.getLogger() # o = q.first() # tx_raw = o.signed_tx # -# tx_dict = unpack_signed_raw_tx(bytes.fromhex(tx_raw[2:]), default_chain_spec.chain_id()) +# tx_dict = unpack(bytes.fromhex(tx_raw), default_chain_spec) # gas_price_before = tx_dict['gasPrice'] # # s = celery.signature( @@ -109,7 +104,7 @@ logg = logging.getLogger() # # tx_raw_new = get_tx(tx_hash_new_hex) # logg.debug('get {}'.format(tx_raw_new)) -# tx_dict_new = unpack_signed_raw_tx(bytes.fromhex(tx_raw_new['signed_tx'][2:]), default_chain_spec.chain_id()) +# tx_dict_new = unpack(bytes.fromhex(tx_raw_new['signed_tx']), default_chain_spec) # assert tx_hash_new_hex != tx_dict['hash'] # assert tx_dict_new['gasPrice'] > gas_price_before # @@ -133,7 +128,7 @@ logg = logging.getLogger() # sigs = [] # for i in range(5): # s = celery.signature( -# 'cic_eth.eth.tx.refill_gas', +# 'cic_eth.eth.gas.refill_gas', # [ # eth_empty_accounts[i], # chain_str, @@ -278,15 +273,14 @@ def test_tx( eth_signer, agent_roles, contract_roles, - celery_session_worker, + celery_worker, ): - chain_id = default_chain_spec.chain_id() nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 1024, tx_format=TxFormat.RLP_SIGNED) - tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), chain_id) - queue_create(tx['nonce'], agent_roles['ALICE'], tx_hash_hex, tx_signed_raw_hex, default_chain_spec, session=init_database) + tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec) + queue_create(default_chain_spec, tx['nonce'], agent_roles['ALICE'], tx_hash_hex, tx_signed_raw_hex) cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['DEFAULT']) diff --git a/apps/cic-eth/tests/task/api/test_app.py b/apps/cic-eth/tests/task/api/test_app.py index 5e1733a2..1d3e64aa 100644 --- a/apps/cic-eth/tests/task/api/test_app.py +++ b/apps/cic-eth/tests/task/api/test_app.py @@ -38,11 +38,12 @@ def test_transfer_api( agent_roles, cic_registry, register_tokens, + register_lookups, celery_session_worker, ): #token = CICRegistry.get_address(default_chain_spec, bancor_tokens[0]) - foo_token_cache = ERC20Token(eth_rpc, foo_token) + foo_token_cache = ERC20Token(default_chain_spec, eth_rpc, foo_token) api = Api(str(default_chain_spec), callback_param='transfer', callback_task='cic_eth.callbacks.noop.noop', queue=None) t = api.transfer(custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], 1024, foo_token_cache.symbol) diff --git a/apps/cic-eth/tests/task/api/test_list.py b/apps/cic-eth/tests/task/api/test_list.py index 27f0bdf2..8f513ff4 100644 --- a/apps/cic-eth/tests/task/api/test_list.py +++ b/apps/cic-eth/tests/task/api/test_list.py @@ -1,23 +1,29 @@ # standard imports import logging -# local imports +# external imports +import pytest from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.erc20 import ERC20 from chainlib.eth.tx import receipt + +# local imports from cic_eth.api.api_task import Api -from tests.mock.filter import ( - block_filter, - tx_filter, - ) from cic_eth.db.models.nonce import ( Nonce, NonceReservation, ) +# test imports +from tests.mock.filter import ( + block_filter, + tx_filter, + ) + logg = logging.getLogger() +@pytest.mark.xfail() def test_list_tx( default_chain_spec, init_database, @@ -29,11 +35,9 @@ def test_list_tx( foo_token, register_tokens, init_eth_tester, - celery_session_worker, + celery_worker, ): - chain_id = default_chain_spec.chain_id() - tx_hashes = [] # external tx @@ -53,7 +57,7 @@ def test_list_tx( init_database.commit() init_eth_tester.mine_blocks(13) - c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id) + c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) (tx_hash_hex, o) = c.transfer(foo_token, custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], 1024) eth_rpc.do(o) o = receipt(tx_hash_hex) @@ -73,7 +77,7 @@ def test_list_tx( init_eth_tester.mine_blocks(13) nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) - c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id) + c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) (tx_hash_hex, o) = c.transfer(foo_token, agent_roles['ALICE'], agent_roles['BOB'], 256) eth_rpc.do(o) o = receipt(tx_hash_hex) @@ -110,7 +114,6 @@ def test_list_tx( r = t.get_leaf() assert t.successful() - assert len(r) == 3 logg.debug('rrrr {}'.format(r)) diff --git a/apps/cic-eth/tests/task/test_task_account.py b/apps/cic-eth/tests/task/test_task_account.py index d4e0ee6f..010f0e51 100644 --- a/apps/cic-eth/tests/task/test_task_account.py +++ b/apps/cic-eth/tests/task/test_task_account.py @@ -11,13 +11,12 @@ from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.tx import receipt from eth_accounts_index import AccountRegistry from hexathon import strip_0x +from chainqueue.db.enum import StatusEnum +from chainqueue.db.models.otx import Otx # local imports from cic_eth.error import OutOfGasError -from cic_eth.db.models.otx import Otx from cic_eth.db.models.base import SessionBase -from cic_eth.db.enum import StatusEnum -from cic_eth.db.enum import StatusEnum from cic_eth.db.models.nonce import Nonce from cic_eth.db.models.role import AccountRole @@ -74,9 +73,10 @@ def test_register_account( ): s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ eth_empty_accounts[0], + default_chain_spec.asdict(), custodial_roles['ACCOUNT_REGISTRY_WRITER'], ], queue=None, @@ -116,7 +116,7 @@ def test_register_account( init_eth_tester.mine_block() - c = AccountRegistry() + c = AccountRegistry(default_chain_spec) o = c.have(account_registry, eth_empty_accounts[0], sender_address=call_sender) r = eth_rpc.do(o) assert int(strip_0x(r), 16) == 1 @@ -160,7 +160,7 @@ def test_gift( ): nonce_oracle = RPCNonceOracle(contract_roles['ACCOUNT_REGISTRY_WRITER'], eth_rpc) - c = AccountRegistry(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=default_chain_spec.chain_id()) + c = AccountRegistry(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) (tx_hash_hex, o) = c.add(account_registry, contract_roles['ACCOUNT_REGISTRY_WRITER'], agent_roles['ALICE']) eth_rpc.do(o) o = receipt(tx_hash_hex) @@ -168,9 +168,10 @@ def test_gift( assert r['status'] == 1 s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ agent_roles['ALICE'], + default_chain_spec.asdict(), ], queue=None, ) diff --git a/apps/cic-eth/tests/task/test_task_erc20.py b/apps/cic-eth/tests/task/test_task_erc20.py index 048ed71b..c202bded 100644 --- a/apps/cic-eth/tests/task/test_task_erc20.py +++ b/apps/cic-eth/tests/task/test_task_erc20.py @@ -28,7 +28,7 @@ def test_otx_cache_transfer( celery_session_worker, ): nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], eth_rpc) - c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=default_chain_spec.chain_id()) + c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) transfer_value = 100 * (10**6) (tx_hash_hex, tx_signed_raw_hex) = c.transfer(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value, tx_format=TxFormat.RLP_SIGNED) register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) @@ -59,7 +59,7 @@ def test_erc20_balance_task( ): nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], eth_rpc) - c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=default_chain_spec.chain_id()) + c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) transfer_value = 100 * (10**6) (tx_hash_hex, o) = c.transfer(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value) eth_rpc.do(o) @@ -102,9 +102,10 @@ def test_erc20_transfer_task( transfer_value = 100 * (10 ** 6) s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ [token_object], + default_chain_spec.asdict(), custodial_roles['FOO_TOKEN_GIFTER'], ], queue=None, @@ -143,9 +144,10 @@ def test_erc20_approve_task( transfer_value = 100 * (10 ** 6) s_nonce = celery.signature( - 'cic_eth.eth.tx.reserve_nonce', + 'cic_eth.eth.nonce.reserve_nonce', [ [token_object], + default_chain_spec.asdict(), custodial_roles['FOO_TOKEN_GIFTER'], ], queue=None, diff --git a/apps/cic-eth/tests/task/test_task_tx.py b/apps/cic-eth/tests/task/test_task_tx.py index 2f223c54..939b800b 100644 --- a/apps/cic-eth/tests/task/test_task_tx.py +++ b/apps/cic-eth/tests/task/test_task_tx.py @@ -12,15 +12,16 @@ from chainlib.eth.tx import ( transaction, receipt, ) +from hexathon import strip_0x +from chainqueue.db.models.otx import Otx # local imports from cic_eth.queue.tx import register_tx -from cic_eth.eth.tx import cache_gas_data +from cic_eth.eth.gas import cache_gas_data logg = logging.getLogger() -@pytest.mark.skip() def test_tx_send( init_database, default_chain_spec, @@ -31,11 +32,9 @@ def test_tx_send( celery_session_worker, ): - chain_id = default_chain_spec.chain_id() nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 1024, tx_format=TxFormat.RLP_SIGNED) - #unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), chain_id) register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) @@ -64,6 +63,45 @@ def test_sync_tx( default_chain_spec, eth_rpc, eth_signer, - celery_worker, + celery_session_worker, ): pass + + +def test_resend_with_higher_gas( + init_database, + default_chain_spec, + eth_rpc, + eth_signer, + agent_roles, + celery_session_worker, + ): + + nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) + (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 1024, tx_format=TxFormat.RLP_SIGNED) + register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) + cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) + tx_before = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec) + + s = celery.signature( + 'cic_eth.eth.gas.resend_with_higher_gas', + [ + tx_hash_hex, + default_chain_spec.asdict(), + ], + queue=None, + ) + t = s.apply_async() + r = t.get_leaf() + + q = init_database.query(Otx) + q = q.filter(Otx.tx_hash==strip_0x(r)) + otx = q.first() + if otx == None: + raise NotLocalTxError(r) + + tx_after = unpack(bytes.fromhex(strip_0x(otx.signed_tx)), default_chain_spec) + logg.debug('gasprices before {} after {}'.format(tx_before['gasPrice'], tx_after['gasPrice'])) + assert tx_after['gasPrice'] > tx_before['gasPrice'] + diff --git a/apps/cic-eth/tests/unit/db/test_block_sync.py b/apps/cic-eth/tests/unit/db/test_block_sync.py deleted file mode 100644 index 87fdee4f..00000000 --- a/apps/cic-eth/tests/unit/db/test_block_sync.py +++ /dev/null @@ -1,29 +0,0 @@ -# standard imports -import logging - -# local imports -from cic_eth.db.models.otx import OtxSync - -logg = logging.getLogger() - - -def test_db_block_sync( - init_database, - ): - - s = OtxSync('eip155:8995:bloxberg') - - s.head(666, 12) - assert s.head() == (666, 12) - - s.session(42, 13) - assert s.session() == (42, 13) - - s.backlog(13, 2) - assert s.backlog() == (13, 2) - - assert not s.synced - - s.backlog(42, 13) - assert s.backlog() == (42, 13) - assert s.synced diff --git a/apps/cic-eth/tests/unit/db/test_db_convert_transfer.py b/apps/cic-eth/tests/unit/db/test_db_convert_transfer.py index 15e766db..bc3b6e5a 100644 --- a/apps/cic-eth/tests/unit/db/test_db_convert_transfer.py +++ b/apps/cic-eth/tests/unit/db/test_db_convert_transfer.py @@ -3,7 +3,7 @@ import logging import pytest -from cic_eth.db import TxConvertTransfer +from cic_eth.db.models.convert import TxConvertTransfer from cic_eth.db.error import UnknownConvertError logg = logging.getLogger() diff --git a/apps/cic-eth/tests/unit/db/test_otx.py b/apps/cic-eth/tests/unit/db/test_otx.py deleted file mode 100644 index 00057a5e..00000000 --- a/apps/cic-eth/tests/unit/db/test_otx.py +++ /dev/null @@ -1,107 +0,0 @@ -# standard imports -import os -import logging - -# third-party imports -import pytest - -# local imports -from cic_eth.db.models.base import SessionBase -from cic_eth.db.models.otx import OtxStateLog -from cic_eth.db.models.otx import Otx -from cic_eth.db.enum import ( - StatusEnum, - StatusBits, - is_alive, - ) - -logg = logging.getLogger() - - -#def test_get( -# rpc_eth, -# rpc_signer, -# agent_roles, -# init_database, -# ): -# -# rpc = RPCConnection.connect(default_chain_spec, 'default') -# nonce_oracle = RPCNonceOracle(agent_roles['ALICE']) -# gas_oracle = RPCGasOracle(eth_rpc) -# c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) -# -# for i in range(10): -# -# (tx_hash_hex, tx_rpc) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6)), -# -# tx_def = { -# 'from': init_w3.eth.accounts[0], -# 'to': init_w3.eth.accounts[1], -# 'nonce': 0, -# 'value': 101, -# 'gasPrice': 2000000000, -# 'gas': 21000, -# 'data': '', -# 'chainId': 1, -# } -# -# session = init_database -# txs = [] -# for i in range(10): -# nonce = init_w3.eth.getTransactionCount(init_w3.eth.accounts[0], 'pending') -# tx_def['nonce'] = nonce -# tx = init_w3.eth.sign_transaction(tx_def) -# tx_hash = init_w3.eth.send_raw_transaction(tx['raw']) -# logg.debug('tx {}'.format(tx)) -# -# address = init_w3.eth.accounts[i%3] -# otx = Otx(int((i/3)+1), address, '0x'+tx_hash.hex(), tx['raw']) -# txs.append(otx) -# session.add(otx) -# session.flush() -# -# logg.debug(txs) -# session.commit() -# -# txs[0].status = 0 -# session.add(txs[0]) -# session.commit() -# session.close() -# -# get_txs = Otx.get() -# logg.debug(get_txs) - - -def test_state_log( - init_database, - ): - - Otx.tracing = True - - address = '0x' + os.urandom(20).hex() - tx_hash = '0x' + os.urandom(32).hex() - signed_tx = '0x' + os.urandom(128).hex() - otx = Otx.add(0, address, tx_hash, signed_tx, session=init_database) - - otx.waitforgas(session=init_database) - init_database.commit() - - otx.readysend(session=init_database) - init_database.commit() - - otx.sent(session=init_database) - init_database.commit() - - otx.success(1024, session=init_database) - init_database.commit() - - q = init_database.query(OtxStateLog) - q = q.filter(OtxStateLog.otx_id==otx.id) - q = q.order_by(OtxStateLog.date.asc()) - logs = q.all() - - assert logs[0].status == StatusEnum.PENDING - assert logs[1].status == StatusEnum.WAITFORGAS - assert logs[2].status & StatusBits.QUEUED - assert logs[3].status & StatusBits.IN_NETWORK - assert not is_alive(logs[4].status) diff --git a/apps/cic-eth/tests/unit/db/test_status.py b/apps/cic-eth/tests/unit/db/test_status.py deleted file mode 100644 index f40cd6d8..00000000 --- a/apps/cic-eth/tests/unit/db/test_status.py +++ /dev/null @@ -1,97 +0,0 @@ -# standard imports -import os - -# third-party imports -import pytest - -# local imports -from cic_eth.db.models.otx import Otx -from cic_eth.db.enum import ( - StatusEnum, - StatusBits, - is_alive, - ) - - -@pytest.fixture(scope='function') -def otx( - init_database, - ): - - bogus_hash = '0x' + os.urandom(32).hex() - bogus_address = '0x' + os.urandom(20).hex() - bogus_tx_raw = '0x' + os.urandom(128).hex() - return Otx(0, bogus_address, bogus_hash, bogus_tx_raw) - - -def test_status_chain_gas( - init_database, - otx, - ): - - otx.waitforgas(init_database) - otx.readysend(init_database) - otx.sent(init_database) - otx.success(1024, init_database) - assert not is_alive(otx.status) - - -def test_status_chain_straight_success( - init_database, - otx, - ): - - otx.readysend(init_database) - otx.sent(init_database) - otx.success(1024, init_database) - assert not is_alive(otx.status) - - -def test_status_chain_straight_revert( - init_database, - otx, - ): - - otx.readysend(init_database) - otx.sent(init_database) - otx.minefail(1024, init_database) - assert not is_alive(otx.status) - - -def test_status_chain_nodeerror( - init_database, - otx, - ): - - otx.readysend(init_database) - otx.sendfail(init_database) - otx.retry(init_database) - otx.sent(init_database) - otx.success(1024, init_database) - assert not is_alive(otx.status) - - - -def test_status_chain_nodeerror_multiple( - init_database, - otx, - ): - - otx.readysend(init_database) - otx.sendfail(init_database) - otx.retry(init_database) - otx.sendfail(init_database) - otx.retry(init_database) - otx.sent(init_database) - otx.success(1024, init_database) - assert not is_alive(otx.status) - - -def test_status_chain_nodeerror( - init_database, - otx, - ): - - otx.readysend(init_database) - otx.reject(init_database) - assert not is_alive(otx.status) diff --git a/apps/cic-eth/tests/unit/db/test_tx.py b/apps/cic-eth/tests/unit/db/test_tx.py index c616bf77..2cc54e73 100644 --- a/apps/cic-eth/tests/unit/db/test_tx.py +++ b/apps/cic-eth/tests/unit/db/test_tx.py @@ -18,10 +18,8 @@ from hexathon import ( add_0x, strip_0x, ) - -# local imports -from cic_eth.db.models.tx import TxCache -from cic_eth.db.models.otx import Otx +from chainqueue.db.models.tx import TxCache +from chainqueue.db.models.otx import Otx # test imports from tests.util.gas import StaticGasOracle @@ -35,18 +33,16 @@ def test_set( agent_roles, ): - chain_id = default_chain_spec.chain_id() rpc = RPCConnection.connect(default_chain_spec, 'default') nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=chain_id) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED) - tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), chain_id) + tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec) otx = Otx( tx['nonce'], - tx['from'], tx_hash_hex, tx_signed_raw_hex, ) @@ -66,6 +62,7 @@ def test_set( to_value, 666, 13, + session=init_database, ) init_database.add(txc) init_database.commit() @@ -89,18 +86,17 @@ def test_clone( agent_roles, ): - chain_id = default_chain_spec.chain_id() rpc = RPCConnection.connect(default_chain_spec, 'default') nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) gas_oracle = StaticGasOracle(2 * (10 ** 9), 21000) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=chain_id) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) txs_rpc = [ c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED), ] gas_oracle = StaticGasOracle(4 * (10 ** 9), 21000) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=chain_id) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) txs_rpc += [ c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED), ] @@ -109,10 +105,9 @@ def test_clone( for tx_rpc in txs_rpc: tx_hash_hex = tx_rpc[0] tx_signed_raw_hex = tx_rpc[1] - tx_dict = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), chain_id) + tx_dict = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec) otx = Otx( tx_dict['nonce'], - tx_dict['from'], tx_hash_hex, tx_signed_raw_hex, ) @@ -130,15 +125,16 @@ def test_clone( ZERO_ADDRESS, txs[0]['value'], txs[0]['value'], + session=init_database, ) init_database.add(txc) init_database.commit() - TxCache.clone(txs[0]['hash'], txs[1]['hash']) + TxCache.clone(txs[0]['hash'], txs[1]['hash'], session=init_database) q = init_database.query(TxCache) q = q.join(Otx) - q = q.filter(Otx.tx_hash==txs[1]['hash']) + q = q.filter(Otx.tx_hash==strip_0x(txs[1]['hash'])) txc_clone = q.first() assert txc_clone != None diff --git a/apps/cic-eth/tests/unit/eth/test_raw.py b/apps/cic-eth/tests/unit/eth/test_raw.py index ab7f65d2..4230b7f3 100644 --- a/apps/cic-eth/tests/unit/eth/test_raw.py +++ b/apps/cic-eth/tests/unit/eth/test_raw.py @@ -18,13 +18,12 @@ def test_unpack( agent_roles, ): - chain_id = default_chain_spec.chain_id() rpc = RPCConnection.connect(default_chain_spec, 'default') nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) (tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED) - tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), chain_id=chain_id) + tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), default_chain_spec) assert tx_hash_hex == tx['hash'] diff --git a/apps/cic-eth/tests/unit/ext/test_address.py b/apps/cic-eth/tests/unit/ext/test_address.py index ecfde959..332af060 100644 --- a/apps/cic-eth/tests/unit/ext/test_address.py +++ b/apps/cic-eth/tests/unit/ext/test_address.py @@ -23,7 +23,7 @@ def test_translate( nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc) - c = AddressDeclarator(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=default_chain_spec.chain_id()) + c = AddressDeclarator(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) description = 'alice'.encode('utf-8').ljust(32, b'\x00').hex() (tx_hash_hex, o) = c.add_declaration(address_declarator, contract_roles['CONTRACT_DEPLOYER'], agent_roles['ALICE'], add_0x(description)) diff --git a/apps/cic-eth/tests/unit/ext/test_ext_tx.py b/apps/cic-eth/tests/unit/ext/test_ext_tx.py index d942c4ac..2fadca4b 100644 --- a/apps/cic-eth/tests/unit/ext/test_ext_tx.py +++ b/apps/cic-eth/tests/unit/ext/test_ext_tx.py @@ -43,7 +43,7 @@ def test_filter_process( nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) init_eth_tester.mine_blocks(13) - c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle) + c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) (tx_hash_hex, o) = c.transfer(foo_token, agent_roles['ALICE'], agent_roles['BOB'], 1024) eth_rpc.do(o) o = receipt(tx_hash_hex) @@ -56,7 +56,7 @@ def test_filter_process( # external tx init_eth_tester.mine_blocks(28) - c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle) + c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle) (tx_hash_hex, o) = c.transfer(foo_token, agent_roles['ALICE'], agent_roles['BOB'], 512) eth_rpc.do(o) o = receipt(tx_hash_hex) diff --git a/apps/cic-eth/tests/unit/queue/test_balances.py b/apps/cic-eth/tests/unit/queue/test_balances.py index 168401c9..2a372314 100644 --- a/apps/cic-eth/tests/unit/queue/test_balances.py +++ b/apps/cic-eth/tests/unit/queue/test_balances.py @@ -2,12 +2,12 @@ import os import logging -# third-party imports +# external imports import pytest +from chainqueue.db.models.otx import Otx +from chainqueue.db.models.tx import TxCache # local imports -from cic_eth.db.models.otx import Otx -from cic_eth.db.models.tx import TxCache from cic_eth.queue.balance import ( balance_outgoing, balance_incoming, @@ -59,17 +59,15 @@ def test_assemble(): assert r[1].get('balance_xyzzy') != None -@pytest.mark.skip() def test_outgoing_balance( default_chain_spec, init_database, ): - chain_str = str(default_chain_spec) recipient = '0x' + os.urandom(20).hex() tx_hash = '0x' + os.urandom(32).hex() signed_tx = '0x' + os.urandom(128).hex() - otx = Otx.add(0, recipient, tx_hash, signed_tx, session=init_database) + otx = Otx.add(0, tx_hash, signed_tx, session=init_database) init_database.add(otx) init_database.commit() @@ -83,6 +81,7 @@ def test_outgoing_balance( token_address, 1000, 1000, + session=init_database, ) init_database.add(txc) init_database.commit() @@ -91,33 +90,35 @@ def test_outgoing_balance( 'address': token_address, 'converters': [], } - b = balance_outgoing([token_data], sender, chain_str) + b = balance_outgoing([token_data], sender, default_chain_spec.asdict()) assert b[0]['balance_outgoing'] == 1000 + otx.readysend(session=init_database) + init_database.flush() + otx.reserve(session=init_database) + init_database.flush() otx.sent(session=init_database) init_database.commit() - b = balance_outgoing([token_data], sender, chain_str) + b = balance_outgoing([token_data], sender, default_chain_spec.asdict()) assert b[0]['balance_outgoing'] == 1000 otx.success(block=1024, session=init_database) init_database.commit() - b = balance_outgoing([token_data], sender, chain_str) + b = balance_outgoing([token_data], sender, default_chain_spec.asdict()) assert b[0]['balance_outgoing'] == 0 -@pytest.mark.skip() def test_incoming_balance( default_chain_spec, init_database, ): - chain_str = str(default_chain_spec) recipient = '0x' + os.urandom(20).hex() tx_hash = '0x' + os.urandom(32).hex() signed_tx = '0x' + os.urandom(128).hex() - otx = Otx.add(0, recipient, tx_hash, signed_tx, session=init_database) + otx = Otx.add(0, tx_hash, signed_tx, session=init_database) init_database.add(otx) init_database.commit() @@ -131,6 +132,7 @@ def test_incoming_balance( token_address, 1000, 1000, + session=init_database, ) init_database.add(txc) init_database.commit() @@ -139,19 +141,23 @@ def test_incoming_balance( 'address': token_address, 'converters': [], } - b = balance_incoming([token_data], recipient, chain_str) + b = balance_incoming([token_data], recipient, default_chain_spec.asdict()) assert b[0]['balance_incoming'] == 0 + otx.readysend(session=init_database) + init_database.flush() + otx.reserve(session=init_database) + init_database.flush() otx.sent(session=init_database) init_database.commit() - b = balance_incoming([token_data], recipient, chain_str) + b = balance_incoming([token_data], recipient, default_chain_spec.asdict()) assert b[0]['balance_incoming'] == 1000 otx.success(block=1024, session=init_database) init_database.commit() - b = balance_incoming([token_data], recipient, chain_str) + b = balance_incoming([token_data], recipient, default_chain_spec.asdict()) assert b[0]['balance_incoming'] == 0 diff --git a/apps/cic-eth/tests/unit/queue/test_list_tx.py b/apps/cic-eth/tests/unit/queue/test_list_tx.py deleted file mode 100644 index 397ed07e..00000000 --- a/apps/cic-eth/tests/unit/queue/test_list_tx.py +++ /dev/null @@ -1,76 +0,0 @@ -# standard imports -import logging - -# external imports -from chainlib.connection import RPCConnection -from chainlib.eth.gas import RPCGasOracle -from chainlib.eth.nonce import RPCNonceOracle -from chainlib.eth.gas import Gas - -# local imports -from cic_eth.queue.tx import get_status_tx -from cic_eth.db.enum import ( - StatusEnum, - StatusBits, - ) -from cic_eth.queue.tx import create as queue_create -from cic_eth.eth.tx import cache_gas_data -from cic_eth.queue.tx import register_tx -from cic_eth.db.models.otx import Otx - -logg = logging.getLogger() - - -def test_status_tx_list( - default_chain_spec, - init_database, - eth_rpc, - eth_signer, - agent_roles, - ): - - rpc = RPCConnection.connect(default_chain_spec, 'default') - - nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - (tx_hash_hex, o) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 1024) - r = rpc.do(o) - - tx_signed_raw_hex = o['params'][0] - #queue_create(tx['nonce'], tx['from'], tx_hash.hex(), tx_signed['raw'], str(default_chain_spec)) - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - q = init_database.query(Otx) - otx = q.get(1) - otx.sendfail(session=init_database) - init_database.add(otx) - init_database.commit() - init_database.refresh(otx) - - txs = get_status_tx(StatusBits.LOCAL_ERROR, session=init_database) - assert len(txs) == 1 - - otx.sendfail(session=init_database) - otx.retry(session=init_database) - init_database.add(otx) - init_database.commit() - init_database.refresh(otx) - - txs = get_status_tx(StatusBits.LOCAL_ERROR, session=init_database) - assert len(txs) == 1 - - txs = get_status_tx(StatusBits.QUEUED, session=init_database) - assert len(txs) == 1 - - txs = get_status_tx(StatusBits.QUEUED, not_status=StatusBits.LOCAL_ERROR, session=init_database) - assert len(txs) == 0 - - txs = get_status_tx(StatusBits.QUEUED, not_status=StatusBits.IN_NETWORK, session=init_database) - assert len(txs) == 1 - - txs = get_status_tx(StatusBits.IN_NETWORK, session=init_database) - assert len(txs) == 0 - diff --git a/apps/cic-eth/tests/unit/queue/test_otx_state_log.py b/apps/cic-eth/tests/unit/queue/test_otx_state_log.py deleted file mode 100644 index 9c5194da..00000000 --- a/apps/cic-eth/tests/unit/queue/test_otx_state_log.py +++ /dev/null @@ -1,22 +0,0 @@ -# standard imports -import os - -# local imports -from cic_eth.db.models.otx import Otx -from cic_eth.queue.tx import get_state_log - - -def test_otx_state_log( - init_database, - ): - - Otx.tracing = True - - address = '0x' + os.urandom(20).hex() - tx_hash = '0x' + os.urandom(32).hex() - signed_tx = '0x' + os.urandom(128).hex() - otx = Otx.add(0, address, tx_hash, signed_tx, session=init_database) - init_database.commit() - - log = get_state_log(tx_hash) - assert len(log) == 1 diff --git a/apps/cic-eth/tests/unit/queue/test_query.py b/apps/cic-eth/tests/unit/queue/test_query.py new file mode 100644 index 00000000..b82fbd4e --- /dev/null +++ b/apps/cic-eth/tests/unit/queue/test_query.py @@ -0,0 +1,58 @@ +# external imports +from chainqueue.db.enum import ( + StatusEnum, + StatusBits, + ) +from chainlib.connection import RPCConnection +from chainlib.eth.gas import ( + RPCGasOracle, + Gas, + ) +from chainlib.chain import ChainSpec + +# local imports +from cic_eth.db.enum import LockEnum +from cic_eth.db.models.lock import Lock +from cic_eth.queue.query import get_upcoming_tx +from cic_eth.queue.tx import register_tx +from cic_eth.eth.gas import cache_gas_data + +# test imports +from tests.util.nonce import StaticNonceOracle + + +def test_upcoming_with_lock( + default_chain_spec, + init_database, + eth_rpc, + eth_signer, + agent_roles, + ): + + rpc = RPCConnection.connect(default_chain_spec, 'default') + nonce_oracle = StaticNonceOracle(42) + gas_oracle = RPCGasOracle(eth_rpc) + c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) + + (tx_hash_hex, tx_rpc) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6)) + tx_signed_raw_hex = tx_rpc['params'][0] + + register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) + cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) + + txs = get_upcoming_tx(default_chain_spec, StatusEnum.PENDING) + assert len(txs.keys()) == 1 + + Lock.set(str(default_chain_spec), LockEnum.SEND, address=agent_roles['ALICE']) + + txs = get_upcoming_tx(default_chain_spec, StatusEnum.PENDING) + assert len(txs.keys()) == 0 + + (tx_hash_hex, tx_rpc) = c.create(agent_roles['BOB'], agent_roles['ALICE'], 100 * (10 ** 6)) + tx_signed_raw_hex = tx_rpc['params'][0] + + register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) + cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) + + txs = get_upcoming_tx(default_chain_spec, StatusEnum.PENDING) + assert len(txs.keys()) == 1 diff --git a/apps/cic-eth/tests/unit/queue/test_queue_lock.py b/apps/cic-eth/tests/unit/queue/test_queue_lock.py index fe18fa16..7b0a6e5b 100644 --- a/apps/cic-eth/tests/unit/queue/test_queue_lock.py +++ b/apps/cic-eth/tests/unit/queue/test_queue_lock.py @@ -5,10 +5,10 @@ import os import pytest # local imports -from cic_eth.queue.tx import create as queue_create from cic_eth.db.models.lock import Lock from cic_eth.db.enum import LockEnum from cic_eth.error import LockedError +from cic_eth.queue.tx import queue_create def test_queue_lock( @@ -25,41 +25,41 @@ def test_queue_lock( Lock.set(chain_str, LockEnum.QUEUE) with pytest.raises(LockedError): queue_create( + default_chain_spec, 0, address, tx_hash, tx_raw, - chain_str ) Lock.set(chain_str, LockEnum.QUEUE, address=address) with pytest.raises(LockedError): queue_create( + default_chain_spec, 0, address, tx_hash, tx_raw, - chain_str ) Lock.reset(chain_str, LockEnum.QUEUE) with pytest.raises(LockedError): queue_create( + default_chain_spec, 0, address, tx_hash, tx_raw, - chain_str ) Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash) with pytest.raises(LockedError): queue_create( + default_chain_spec, 0, address, tx_hash, tx_raw, - chain_str ) diff --git a/apps/cic-eth/tests/unit/queue/test_tx_queue.py b/apps/cic-eth/tests/unit/queue/test_tx_queue.py deleted file mode 100644 index 385b3fb5..00000000 --- a/apps/cic-eth/tests/unit/queue/test_tx_queue.py +++ /dev/null @@ -1,577 +0,0 @@ -# standard imports import logging -import datetime -import os -import logging - -# external imports -import pytest -from sqlalchemy import DateTime -from chainlib.connection import RPCConnection -from chainlib.eth.nonce import OverrideNonceOracle -from chainlib.eth.tx import unpack -from chainlib.eth.gas import ( - RPCGasOracle, - Gas, - ) -from chainlib.eth.constant import ZERO_ADDRESS -from hexathon import strip_0x - -# local imports -from cic_eth.eth.tx import cache_gas_data -from cic_eth.db.models.otx import Otx -from cic_eth.db.models.otx import OtxSync -from cic_eth.db.models.tx import TxCache -from cic_eth.db.models.lock import Lock -from cic_eth.db.models.base import SessionBase -from cic_eth.db.enum import ( - StatusEnum, - LockEnum, - StatusBits, - is_alive, - is_error_status, - status_str, - ) -from cic_eth.queue.tx import create as queue_create -from cic_eth.queue.tx import set_final_status -from cic_eth.queue.tx import set_sent_status -from cic_eth.queue.tx import set_waitforgas -from cic_eth.queue.tx import set_ready -from cic_eth.queue.tx import get_paused_txs -from cic_eth.queue.tx import get_upcoming_tx -from cic_eth.queue.tx import get_account_tx -from cic_eth.queue.tx import get_tx -from cic_eth.db.error import TxStateChangeError -from cic_eth.queue.tx import register_tx - -# test imports -from tests.util.nonce import StaticNonceOracle - -logg = logging.getLogger() - - -def test_finalize( - default_chain_spec, - eth_rpc, - eth_signer, - init_database, - agent_roles, - ): - - rpc = RPCConnection.connect(default_chain_spec, 'default') - nonce_oracle = StaticNonceOracle(0) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - txs_rpc = [ - c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['BOB'], 200 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['BOB'], 300 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['BOB'], 400 * (10 ** 6)), - ] - - nonce_oracle = StaticNonceOracle(1) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - txs_rpc.append(c.create(agent_roles['ALICE'], agent_roles['BOB'], 500 * (10 ** 6))) - - tx_hashes = [] - i = 0 - for entry in txs_rpc: - tx_hash_hex = entry[0] - tx_rpc = entry[1] - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - tx_hashes.append(tx_hash_hex) - - if i < 3: - set_sent_status(tx_hash_hex) - - i += 1 - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[0]).first() - assert otx.status & StatusBits.OBSOLETE - assert not is_alive(otx.status) - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[1]).first() - assert otx.status & StatusBits.OBSOLETE - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[2]).first() - assert otx.status & StatusBits.OBSOLETE - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[3]).first() - assert otx.status == StatusEnum.PENDING - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[4]).first() - assert otx.status == StatusEnum.PENDING - - set_sent_status(tx_hashes[3], False) - set_sent_status(tx_hashes[4], False) - set_final_status(tx_hashes[3], 1024) - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[0]).first() - assert otx.status & (StatusBits.OBSOLETE | StatusBits.FINAL) - assert not is_alive(otx.status) - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[1]).first() - assert otx.status & (StatusBits.OBSOLETE | StatusBits.FINAL) - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[2]).first() - assert otx.status & (StatusBits.OBSOLETE | StatusBits.FINAL) - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[3]).first() - assert otx.status & (StatusBits.IN_NETWORK | StatusBits.FINAL) - assert not is_error_status(otx.status) - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[4]).first() - assert otx.status & (StatusBits.IN_NETWORK | StatusBits.FINAL) - assert not is_error_status(otx.status) - - -def test_expired( - default_chain_spec, - init_database, - eth_rpc, - eth_signer, - agent_roles, - ): - - rpc = RPCConnection.connect(default_chain_spec, 'default') - nonce_oracle = StaticNonceOracle(42) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - txs_rpc = [ - c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['BOB'], 200 * (10 ** 6)), - ] - - nonce_oracle = StaticNonceOracle(43) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - txs_rpc += [ - c.create(agent_roles['ALICE'], agent_roles['BOB'], 300 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['BOB'], 400 * (10 ** 6)), - ] - - nonce_oracle = StaticNonceOracle(44) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - txs_rpc.append(c.create(agent_roles['ALICE'], agent_roles['BOB'], 500 * (10 ** 6))) - - tx_hashes = [] - - i = 0 - for entry in txs_rpc: - tx_hash_hex = entry[0] - tx_rpc = entry[1] - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - tx_hashes.append(tx_hash_hex) - - set_sent_status(tx_hash_hex, False) - - otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first() - fake_created = datetime.datetime.utcnow() - datetime.timedelta(seconds=40*i) - otx.date_created = fake_created - init_database.add(otx) - init_database.commit() - init_database.refresh(otx) - - i += 1 - - now = datetime.datetime.utcnow() - delta = datetime.timedelta(seconds=61) - then = now - delta - - otxs = OtxSync.get_expired(then) - nonce_acc = 0 - for otx in otxs: - nonce_acc += otx.nonce - - assert nonce_acc == (43 + 44) - - -def test_get_paused( - init_database, - default_chain_spec, - eth_rpc, - eth_signer, - agent_roles, - ): - - chain_id = default_chain_spec.chain_id() - rpc = RPCConnection.connect(default_chain_spec, 'default') - nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - txs_rpc = [ - c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['BOB'], 200 * (10 ** 6)), - ] - - tx_hashes = [] - for entry in txs_rpc: - tx_hash_hex = entry[0] - tx_rpc = entry[1] - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - tx_hashes.append(tx_hash_hex) - - txs = get_paused_txs(sender=agent_roles['ALICE'], chain_id=chain_id) - assert len(txs.keys()) == 0 - - q = init_database.query(Otx) - q = q.filter(Otx.tx_hash==tx_hashes[0]) - r = q.first() - r.waitforgas(session=init_database) - init_database.add(r) - init_database.commit() - - chain_id = default_chain_spec.chain_id() - txs = get_paused_txs(chain_id=chain_id) - assert len(txs.keys()) == 1 - - txs = get_paused_txs(sender=agent_roles['ALICE'], chain_id=chain_id) # init_w3.eth.accounts[0]) - assert len(txs.keys()) == 1 - - txs = get_paused_txs(status=StatusBits.GAS_ISSUES) - assert len(txs.keys()) == 1 - - txs = get_paused_txs(sender=agent_roles['ALICE'], status=StatusBits.GAS_ISSUES, chain_id=chain_id) - assert len(txs.keys()) == 1 - - - q = init_database.query(Otx) - q = q.filter(Otx.tx_hash==tx_hashes[1]) - o = q.first() - o.waitforgas(session=init_database) - init_database.add(o) - init_database.commit() - - txs = get_paused_txs() - assert len(txs.keys()) == 2 - - txs = get_paused_txs(sender=agent_roles['ALICE'], chain_id=chain_id) # init_w3.eth.accounts[0]) - assert len(txs.keys()) == 2 - - txs = get_paused_txs(status=StatusBits.GAS_ISSUES, chain_id=chain_id) - assert len(txs.keys()) == 2 - - txs = get_paused_txs(sender=agent_roles['ALICE'], status=StatusBits.GAS_ISSUES, chain_id=chain_id) # init_w3.eth.accounts[0]) - assert len(txs.keys()) == 2 - - q = init_database.query(Otx) - q = q.filter(Otx.tx_hash==tx_hashes[1]) - o = q.first() - o.sendfail(session=init_database) - init_database.add(o) - init_database.commit() - - txs = get_paused_txs() - assert len(txs.keys()) == 2 - - txs = get_paused_txs(sender=agent_roles['ALICE'], chain_id=chain_id) # init_w3.eth.accounts[0]) - assert len(txs.keys()) == 2 - - txs = get_paused_txs(status=StatusBits.GAS_ISSUES, chain_id=chain_id) - txs = get_paused_txs(status=StatusEnum.WAITFORGAS, chain_id=chain_id) - assert len(txs.keys()) == 1 - - txs = get_paused_txs(sender=agent_roles['ALICE'], status=StatusBits.GAS_ISSUES, chain_id=chain_id) # init_w3.eth.accounts[0]) - assert len(txs.keys()) == 1 - - -def test_get_upcoming( - default_chain_spec, - eth_rpc, - eth_signer, - init_database, - agent_roles, - ): - - chain_id = default_chain_spec.chain_id() - rpc = RPCConnection.connect(default_chain_spec, 'default') - nonce_oracle = StaticNonceOracle(42) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - txs_rpc = [ - c.create(agent_roles['ALICE'], agent_roles['DAVE'], 100 * (10 ** 6)), - c.create(agent_roles['BOB'], agent_roles['DAVE'], 200 * (10 ** 6)), - c.create(agent_roles['CAROL'], agent_roles['DAVE'], 300 * (10 ** 6)), - ] - - nonce_oracle = StaticNonceOracle(43) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - txs_rpc += [ - c.create(agent_roles['ALICE'], agent_roles['DAVE'], 400 * (10 ** 6)), - c.create(agent_roles['BOB'], agent_roles['DAVE'], 500 * (10 ** 6)), - c.create(agent_roles['CAROL'], agent_roles['DAVE'], 600 * (10 ** 6)), - ] - - nonce_oracle = StaticNonceOracle(44) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - txs_rpc += [ - c.create(agent_roles['ALICE'], agent_roles['DAVE'], 700 * (10 ** 6)), - ] - - tx_hashes = [] - for entry in txs_rpc: - tx_hash_hex = entry[0] - tx_rpc = entry[1] - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - tx_hashes.append(tx_hash_hex) - - set_ready(tx_hash_hex) - - txs = get_upcoming_tx(StatusBits.QUEUED, chain_id=chain_id) - assert len(txs.keys()) == 3 - - tx = unpack(bytes.fromhex(strip_0x(txs[tx_hashes[0]])), chain_id) - assert tx['nonce'] == 42 - - tx = unpack(bytes.fromhex(strip_0x(txs[tx_hashes[1]])), chain_id) - assert tx['nonce'] == 42 - - tx = unpack(bytes.fromhex(strip_0x(txs[tx_hashes[2]])), chain_id) - assert tx['nonce'] == 42 - - q = init_database.query(TxCache) - q = q.filter(TxCache.sender==agent_roles['ALICE']) - for o in q.all(): - o.date_checked -= datetime.timedelta(seconds=30) - init_database.add(o) - init_database.commit() - - before = datetime.datetime.now() - datetime.timedelta(seconds=20) - logg.debug('before {}'.format(before)) - txs = get_upcoming_tx(StatusBits.QUEUED, before=before) - logg.debug('txs {} {}'.format(txs.keys(), txs.values())) - assert len(txs.keys()) == 1 - - # Now date checked has been set to current time, and the check returns no results - txs = get_upcoming_tx(StatusBits.QUEUED, before=before) - logg.debug('txs {} {}'.format(txs.keys(), txs.values())) - assert len(txs.keys()) == 0 - - set_sent_status(tx_hashes[0]) - - txs = get_upcoming_tx(StatusBits.QUEUED) - assert len(txs.keys()) == 3 - with pytest.raises(KeyError): - tx = txs[tx_hashes[0]] - - tx = unpack(bytes.fromhex(strip_0x(txs[tx_hashes[3]])), chain_id) - assert tx['nonce'] == 43 - - set_waitforgas(tx_hashes[1]) - txs = get_upcoming_tx(StatusBits.QUEUED) - assert len(txs.keys()) == 3 - with pytest.raises(KeyError): - tx = txs[tx_hashes[1]] - - tx = unpack(bytes.fromhex(strip_0x(txs[tx_hashes[3]])), chain_id) - assert tx['nonce'] == 43 - - txs = get_upcoming_tx(StatusBits.GAS_ISSUES) - assert len(txs.keys()) == 1 - - -def test_upcoming_with_lock( - default_chain_spec, - init_database, - eth_rpc, - eth_signer, - agent_roles, - ): - - chain_id = int(default_chain_spec.chain_id()) - - rpc = RPCConnection.connect(default_chain_spec, 'default') - nonce_oracle = StaticNonceOracle(42) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - (tx_hash_hex, tx_rpc) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6)) - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - txs = get_upcoming_tx(StatusEnum.PENDING, chain_id=chain_id) - assert len(txs.keys()) == 1 - - Lock.set(str(default_chain_spec), LockEnum.SEND, address=agent_roles['ALICE']) - - txs = get_upcoming_tx(StatusEnum.PENDING, chain_id=chain_id) - assert len(txs.keys()) == 0 - - (tx_hash_hex, tx_rpc) = c.create(agent_roles['BOB'], agent_roles['ALICE'], 100 * (10 ** 6)) - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - txs = get_upcoming_tx(StatusEnum.PENDING, chain_id=chain_id) - assert len(txs.keys()) == 1 - - -def test_obsoletion( - default_chain_spec, - init_database, - eth_rpc, - eth_signer, - agent_roles, - ): - - chain_id = default_chain_spec.chain_id() - rpc = RPCConnection.connect(default_chain_spec, 'default') - nonce_oracle = StaticNonceOracle(42) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - txs_rpc = [ - c.create(agent_roles['ALICE'], agent_roles['DAVE'], 100 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['DAVE'], 200 * (10 ** 6)), - c.create(agent_roles['BOB'], agent_roles['DAVE'], 300 * (10 ** 6)), - ] - - nonce_oracle = StaticNonceOracle(43) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - txs_rpc += [ - c.create(agent_roles['BOB'], agent_roles['DAVE'], 400 * (10 ** 6)), - ] - - tx_hashes = [] - i = 0 - for entry in txs_rpc: - tx_hash_hex = entry[0] - tx_rpc = entry[1] - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - tx_hashes.append(tx_hash_hex) - - if i < 2: - set_sent_status(tx_hash_hex) - - i += 1 - - session = SessionBase.create_session() - q = session.query(Otx) - q = q.filter(Otx.status.op('&')(StatusEnum.OBSOLETED.value)==StatusEnum.OBSOLETED.value) - z = 0 - for o in q.all(): - z += o.nonce - - session.close() - assert z == 42 - - set_final_status(tx_hashes[1], 1023, True) - - session = SessionBase.create_session() - q = session.query(Otx) - q = q.filter(Otx.status.op('&')(StatusEnum.CANCELLED.value)==StatusEnum.OBSOLETED.value) - zo = 0 - for o in q.all(): - zo += o.nonce - - q = session.query(Otx) - q = q.filter(Otx.status.op('&')(StatusEnum.CANCELLED.value)==StatusEnum.CANCELLED.value) - zc = 0 - for o in q.all(): - zc += o.nonce - - session.close() - assert zo == 0 - assert zc == 42 - - -def test_retry( - init_database, - ): - - address = '0x' + os.urandom(20).hex() - tx_hash = '0x' + os.urandom(32).hex() - signed_tx = '0x' + os.urandom(128).hex() - otx = Otx(0, address, tx_hash, signed_tx) - init_database.add(otx) - init_database.commit() - - set_sent_status(tx_hash, True) - set_ready(tx_hash) - - q = init_database.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - otx = q.first() - - assert (otx.status & StatusEnum.RETRY.value) == StatusEnum.RETRY.value - assert is_error_status(otx.status) - - set_sent_status(tx_hash, False) - set_ready(tx_hash) - - init_database.commit() - - q = init_database.query(Otx) - q = q.filter(Otx.tx_hash==tx_hash) - otx = q.first() - - assert (otx.status & StatusEnum.RETRY.value) == StatusBits.QUEUED.value - assert not is_error_status(otx.status) - - -def test_get_account_tx( - default_chain_spec, - init_database, - eth_rpc, - eth_signer, - agent_roles, - ): - - chain_id = default_chain_spec.chain_id() - rpc = RPCConnection.connect(default_chain_spec, 'default') - nonce_oracle = OverrideNonceOracle(ZERO_ADDRESS, 42) - gas_oracle = RPCGasOracle(eth_rpc) - c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=default_chain_spec.chain_id()) - - txs_rpc = [ - c.create(agent_roles['ALICE'], agent_roles['DAVE'], 100 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['CAROL'], 200 * (10 ** 6)), - c.create(agent_roles['ALICE'], agent_roles['BOB'], 300 * (10 ** 6)), - c.create(agent_roles['BOB'], agent_roles['ALICE'], 300 * (10 ** 6)), - ] - - tx_hashes = [] - for entry in txs_rpc: - tx_hash_hex = entry[0] - tx_rpc = entry[1] - tx_signed_raw_hex = tx_rpc['params'][0] - - register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database) - cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict()) - - tx_hashes.append(tx_hash_hex) - - txs = get_account_tx(agent_roles['ALICE']) - logg.debug('tx {} tx {}'.format(list(txs.keys()), tx_hashes)) - assert list(txs.keys()) == tx_hashes - - txs = get_account_tx(agent_roles['ALICE'], as_recipient=False) - assert list(txs.keys()) == tx_hashes[:3] - - txs = get_account_tx(agent_roles['ALICE'], as_sender=False) - assert list(txs.keys()) == tx_hashes[3:] diff --git a/apps/cic-meta/package.json b/apps/cic-meta/package.json index 39144727..ef706733 100644 --- a/apps/cic-meta/package.json +++ b/apps/cic-meta/package.json @@ -1,6 +1,6 @@ { "name": "cic-client-meta", - "version": "0.0.7-alpha.3", + "version": "0.0.7-alpha.6", "description": "Signed CRDT metadata graphs for the CIC network", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -40,6 +40,6 @@ ], "license": "GPL-3.0-or-later", "engines": { - "node": "~15.3.0" + "node": "~14.16.1" } } diff --git a/apps/cic-meta/scripts/server/server.ts b/apps/cic-meta/scripts/server/server.ts index ca9c2dfb..9bef8bda 100755 --- a/apps/cic-meta/scripts/server/server.ts +++ b/apps/cic-meta/scripts/server/server.ts @@ -114,6 +114,7 @@ async function processRequest(req, res) { return; } + if (!['PUT', 'GET', 'POST'].includes(req.method)) { res.writeHead(405, {"Content-Type": "text/plain"}); res.end(); @@ -123,6 +124,7 @@ async function processRequest(req, res) { try { digest = parseDigest(req.url); } catch(e) { + console.error('digest error: ' + e) res.writeHead(400, {"Content-Type": "text/plain"}); res.end(); return; diff --git a/apps/cic-meta/src/assets/phone.ts b/apps/cic-meta/src/assets/phone.ts index e786265e..7ace058f 100644 --- a/apps/cic-meta/src/assets/phone.ts +++ b/apps/cic-meta/src/assets/phone.ts @@ -1,12 +1,12 @@ import { ArgPair, Syncable } from '../sync'; -import { Addressable, addressToBytes, bytesToHex, toKey } from '../digest'; +import { Addressable, mergeKey } from '../digest'; class Phone extends Syncable implements Addressable { address: string value: number - constructor(address:string, v:number) { + constructor(address:string, v:string) { const o = { msisdn: v, } @@ -17,8 +17,8 @@ class Phone extends Syncable implements Addressable { }); } - public static async toKey(msisdn:number) { - return await toKey(msisdn.toString(), ':cic.msisdn'); + public static async toKey(msisdn:string) { + return await mergeKey(Buffer.from(msisdn), Buffer.from(':cic.phone')); } public key(): string { diff --git a/apps/cic-meta/src/digest.ts b/apps/cic-meta/src/digest.ts index 8681578d..d66f7463 100644 --- a/apps/cic-meta/src/digest.ts +++ b/apps/cic-meta/src/digest.ts @@ -61,6 +61,7 @@ function addressToBytes(s:string) { export { toKey, toAddressKey, + mergeKey, bytesToHex, addressToBytes, Addressable, diff --git a/apps/cic-notify/cic_notify/api.py b/apps/cic-notify/cic_notify/api.py index 789eb835..f879a6ee 100644 --- a/apps/cic-notify/cic_notify/api.py +++ b/apps/cic-notify/cic_notify/api.py @@ -3,6 +3,7 @@ import logging import re # third-party imports +from celery.app.control import Inspect import celery # local imports @@ -15,6 +16,29 @@ logg = logging.getLogger() sms_tasks_matcher = r"^(cic_notify.tasks.sms)(\.\w+)?" +re_q = r'^cic-notify' +def get_sms_queue_tasks(app, task_prefix='cic_notify.tasks.sms.'): + host_queues = [] + + i = Inspect(app=app) + qs = i.active_queues() + for host in qs.keys(): + for q in qs[host]: + if re.match(re_q, q['name']): + host_queues.append((host, q['name'],)) + + task_prefix_len = len(task_prefix) + queue_tasks = [] + for (host, queue) in host_queues: + i = Inspect(app=app, destination=[host]) + for tasks in i.registered_tasks().values(): + for task in tasks: + if len(task) >= task_prefix_len and task[:task_prefix_len] == task_prefix: + queue_tasks.append((queue, task,)) + + return queue_tasks + + class Api: # TODO: Implement callback strategy def __init__(self, queue='cic-notify'): @@ -22,17 +46,9 @@ class Api: :param queue: The queue on which to execute notification tasks :type queue: str """ - registered_tasks = app.tasks - self.sms_tasks = [] + self.sms_tasks = get_sms_queue_tasks(app) + logg.debug('sms tasks {}'.format(self.sms_tasks)) - for task in registered_tasks.keys(): - logg.debug(f'Found: {task} {registered_tasks[task]}') - match = re.match(sms_tasks_matcher, task) - if match: - self.sms_tasks.append(task) - - self.queue = queue - logg.info(f'api using queue: {self.queue}') def sms(self, message, recipient): """This function chains all sms tasks in order to send a message, log and persist said data to disk @@ -44,12 +60,17 @@ class Api: :rtype: Celery.Task """ signatures = [] - for task in self.sms_tasks: - signature = celery.signature(task) + for q in self.sms_tasks: + signature = celery.signature( + q[1], + [ + message, + recipient, + ], + queue=q[0], + ) signatures.append(signature) - signature_group = celery.group(signatures) - result = signature_group.apply_async( - args=[message, recipient], - queue=self.queue - ) - return result + + t = celery.group(signatures)() + + return t diff --git a/apps/cic-notify/cic_notify/runnable/send.py b/apps/cic-notify/cic_notify/runnable/send.py new file mode 100644 index 00000000..5126736e --- /dev/null +++ b/apps/cic-notify/cic_notify/runnable/send.py @@ -0,0 +1,76 @@ +# standard imports +import sys +import os +import logging +import argparse +import tempfile + +# external imports +import celery +import confini + +# local imports +from cic_notify.api import Api + +logging.basicConfig(level=logging.WARNING) +logg = logging.getLogger() + +config_dir = os.path.join('/usr/local/etc/cic-notify') + +argparser = argparse.ArgumentParser() +argparser.add_argument('-c', type=str, default=config_dir, help='config file') +argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') +argparser.add_argument('-v', action='store_true', help='be verbose') +argparser.add_argument('-vv', action='store_true', help='be more verbose') +argparser.add_argument('recipient', type=str, help='notification recipient') +argparser.add_argument('message', type=str, help='message text') +args = argparser.parse_args() + +if args.vv: + logging.getLogger().setLevel(logging.DEBUG) +elif args.v: + logging.getLogger().setLevel(logging.INFO) + +config = confini.Config(args.c, args.env_prefix) +config.process() +config.censor('PASSWORD', 'DATABASE') +config.add(args.recipient, '_RECIPIENT', True) +config.add(args.message, '_MESSAGE', True) + +# set up celery +app = celery.Celery(__name__) + +broker = config.get('CELERY_BROKER_URL') +if broker[:4] == 'file': + bq = tempfile.mkdtemp() + bp = tempfile.mkdtemp() + app.conf.update({ + 'broker_url': broker, + 'broker_transport_options': { + 'data_folder_in': bq, + 'data_folder_out': bq, + 'data_folder_processed': bp, + }, + }, + ) + logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp)) +else: + app.conf.update({ + 'broker_url': broker, + }) + +result = config.get('CELERY_RESULT_URL') +if result[:4] == 'file': + rq = tempfile.mkdtemp() + app.conf.update({ + 'result_backend': 'file://{}'.format(rq), + }) + logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq)) +else: + app.conf.update({ + 'result_backend': result, + }) + +if __name__ == '__main__': + a = Api() + t = a.sms(config.get('_RECIPIENT'), config.get('_MESSAGE')) diff --git a/apps/cic-notify/docker/Dockerfile b/apps/cic-notify/docker/Dockerfile index dfce433a..ea00a238 100644 --- a/apps/cic-notify/docker/Dockerfile +++ b/apps/cic-notify/docker/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.8.6 +FROM python:3.8.6-slim-buster RUN apt-get update && \ apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps @@ -6,7 +6,7 @@ RUN apt-get update && \ WORKDIR /usr/src/cic-notify ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433' -RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a44 +RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62 COPY cic-notify/setup.cfg \ cic-notify/setup.py \ diff --git a/apps/cic-notify/requirements.txt b/apps/cic-notify/requirements.txt index 7a952e58..0b7de273 100644 --- a/apps/cic-notify/requirements.txt +++ b/apps/cic-notify/requirements.txt @@ -1 +1 @@ -cic_base[full_graph]~=0.1.2a46 \ No newline at end of file +cic_base[full_graph]~=0.1.2a61 diff --git a/apps/cic-notify/setup.cfg b/apps/cic-notify/setup.cfg index 4ef87f2c..9daf04a9 100644 --- a/apps/cic-notify/setup.cfg +++ b/apps/cic-notify/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = cic-notify -version= 0.4.0a2 +version= 0.4.0a3 description = CIC notifications service author = Louis Holbrook author_email = dev@holbrook.no @@ -45,3 +45,4 @@ testing = [options.entry_points] console_scripts = cic-notify-tasker = cic_notify.runnable.tasker:main + cic-notify-send = cic_notify.runnable.send:main diff --git a/apps/cic-ussd/.config/app.ini b/apps/cic-ussd/.config/app.ini index 3efaaabc..6d37c421 100644 --- a/apps/cic-ussd/.config/app.ini +++ b/apps/cic-ussd/.config/app.ini @@ -1,14 +1,24 @@ [app] -ALLOWED_IP=127.0.0.1 +ALLOWED_IP=0.0.0.0/0 LOCALE_FALLBACK=en -LOCALE_PATH=var/lib/locale/ +LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/ MAX_BODY_LENGTH=1024 PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I= SERVICE_CODE=*483*46# +[phone_number] +REGION=KE + [ussd] MENU_FILE=/usr/src/data/ussd_menu.json +user = +pass = [statemachine] STATES=/usr/src/cic-ussd/states/ TRANSITIONS=/usr/src/cic-ussd/transitions/ + +[client] +host = +port = +ssl = diff --git a/apps/cic-ussd/.config/database.ini b/apps/cic-ussd/.config/database.ini index 7e2c3e44..5bce6bcf 100644 --- a/apps/cic-ussd/.config/database.ini +++ b/apps/cic-ussd/.config/database.ini @@ -6,3 +6,5 @@ HOST=localhost PORT=5432 ENGINE=postgresql DRIVER=psycopg2 +DEBUG=0 +POOL_SIZE=1 diff --git a/apps/cic-ussd/.config/redis.ini b/apps/cic-ussd/.config/redis.ini index 44b3a0bf..417481db 100644 --- a/apps/cic-ussd/.config/redis.ini +++ b/apps/cic-ussd/.config/redis.ini @@ -1,9 +1,9 @@ [celery] -BROKER_URL=redis:// -RESULT_URL=redis:// +BROKER_URL=redis://redis:6379 +RESULT_URL=redis://redis:6379 [redis] -HOSTNAME=localhost +HOSTNAME=redis PASSWORD= PORT=6379 DATABASE=0 diff --git a/apps/cic-ussd/cic_ussd/account.py b/apps/cic-ussd/cic_ussd/account.py index ac34df8a..42e058b6 100644 --- a/apps/cic-ussd/cic_ussd/account.py +++ b/apps/cic-ussd/cic_ussd/account.py @@ -27,7 +27,7 @@ def define_account_tx_metadata(user: User): if account_metadata: account_metadata = json.loads(account_metadata) person = Person() - deserialized_person = person.deserialize(metadata=account_metadata) + deserialized_person = person.deserialize(person_data=account_metadata) given_name = deserialized_person.given_name family_name = deserialized_person.family_name phone_number = deserialized_person.tel @@ -46,4 +46,4 @@ def retrieve_account_statement(blockchain_address: str): callback_task='cic_ussd.tasks.callback_handler.process_statement_callback', callback_param=blockchain_address ) - result = cic_eth_api.list(address=blockchain_address, limit=9) + cic_eth_api.list(address=blockchain_address, limit=9) diff --git a/apps/cic-ussd/cic_ussd/db/models/base.py b/apps/cic-ussd/cic_ussd/db/models/base.py index 9ff5227e..d76ee6c4 100644 --- a/apps/cic-ussd/cic_ussd/db/models/base.py +++ b/apps/cic-ussd/cic_ussd/db/models/base.py @@ -1,47 +1,129 @@ -# standard imports +# stanard imports +import logging import datetime -# third-party imports +# external imports from sqlalchemy import Column, Integer, DateTime from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker +from sqlalchemy.pool import ( + StaticPool, + QueuePool, + AssertionPool, + NullPool, + ) + +logg = logging.getLogger().getChild(__name__) Model = declarative_base(name='Model') class SessionBase(Model): + """The base object for all SQLAlchemy enabled models. All other models must extend this. + """ __abstract__ = True - id = Column(Integer, primary_key=True) created = Column(DateTime, default=datetime.datetime.utcnow) updated = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow) + id = Column(Integer, primary_key=True) + engine = None - session = None - query = None + """Database connection engine of the running aplication""" + sessionmaker = None + """Factory object responsible for creating sessions from the connection pool""" + transactional = True + """Whether the database backend supports query transactions. Should be explicitly set by initialization code""" + poolable = True + """Whether the database backend supports connection pools. Should be explicitly set by initialization code""" + procedural = True + """Whether the database backend supports stored procedures""" + localsessions = {} + """Contains dictionary of sessions initiated by db model components""" + @staticmethod def create_session(): - session = sessionmaker(bind=SessionBase.engine) - return session() + """Creates a new database session. + """ + return SessionBase.sessionmaker() + @staticmethod def _set_engine(engine): + """Sets the database engine static property + """ SessionBase.engine = engine + SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine) + @staticmethod - def build(): - Model.metadata.create_all(bind=SessionBase.engine) + def connect(dsn, pool_size=16, debug=False): + """Create new database connection engine and connect to database backend. + + :param dsn: DSN string defining connection. + :type dsn: str + """ + e = None + if SessionBase.poolable: + poolclass = QueuePool + if pool_size > 1: + logg.info('db using queue pool') + e = create_engine( + dsn, + max_overflow=pool_size*3, + pool_pre_ping=True, + pool_size=pool_size, + pool_recycle=60, + poolclass=poolclass, + echo=debug, + ) + else: + if pool_size == 0: + poolclass = NullPool + elif debug: + poolclass = AssertionPool + else: + poolclass = StaticPool + e = create_engine( + dsn, + poolclass=poolclass, + echo=debug, + ) + else: + logg.info('db connection not poolable') + e = create_engine( + dsn, + echo=debug, + ) + + SessionBase._set_engine(e) - @staticmethod - # https://docs.sqlalchemy.org/en/13/core/pooling.html#pool-disconnects - def connect(data_source_name): - engine = create_engine(data_source_name, pool_pre_ping=True) - SessionBase._set_engine(engine) @staticmethod def disconnect(): + """Disconnect from database and free resources. + """ SessionBase.engine.dispose() SessionBase.engine = None + + @staticmethod + def bind_session(session=None): + localsession = session + if localsession == None: + localsession = SessionBase.create_session() + localsession_key = str(id(localsession)) + logg.debug('creating new session {}'.format(localsession_key)) + SessionBase.localsessions[localsession_key] = localsession + return localsession + + + @staticmethod + def release_session(session=None): + session_key = str(id(session)) + if SessionBase.localsessions.get(session_key) != None: + logg.debug('commit and destroy session {}'.format(session_key)) + session.commit() + session.close() diff --git a/apps/cic-ussd/cic_ussd/db/ussd_menu.json b/apps/cic-ussd/cic_ussd/db/ussd_menu.json index 12455622..0f0d24eb 100644 --- a/apps/cic-ussd/cic_ussd/db/ussd_menu.json +++ b/apps/cic-ussd/cic_ussd/db/ussd_menu.json @@ -128,8 +128,8 @@ }, "22": { "description": "Pin entry menu.", - "display_key": "ussd.kenya.standard_pin_authorization", - "name": "standard_pin_authorization", + "display_key": "ussd.kenya.display_metadata_pin_authorization", + "name": "display_metadata_pin_authorization", "parent": "start" }, "23": { @@ -230,9 +230,22 @@ }, "39": { "description": "Menu to instruct users to call the office.", - "display_key": "ussd.key.help", + "display_key": "ussd.kenya.help", "name": "help", "parent": null + }, + "40": { + "description": "Menu to display a user's entire profile", + "display_key": "ussd.kenya.display_user_metadata", + "name": "display_user_metadata", + "parent": "account_management" + }, + "41": { + "description": "The recipient is not in the system", + "display_key": "ussd.kenya.exit_invalid_recipient", + "name": "exit_invalid_recipient", + "parent": null } + } } \ No newline at end of file diff --git a/apps/cic-ussd/cic_ussd/error.py b/apps/cic-ussd/cic_ussd/error.py index 006a7a49..256e77c6 100644 --- a/apps/cic-ussd/cic_ussd/error.py +++ b/apps/cic-ussd/cic_ussd/error.py @@ -18,7 +18,7 @@ class ActionDataNotFoundError(OSError): pass -class UserMetadataNotFoundError(OSError): +class MetadataNotFoundError(OSError): """Raised when metadata is expected but not available in cache.""" pass @@ -31,3 +31,10 @@ class UnsupportedMethodError(OSError): class CachedDataNotFoundError(OSError): """Raised when the method passed to the make request function is unsupported.""" pass + + +class MetadataStoreError(Exception): + """Raised when metadata storage fails""" + pass + + diff --git a/apps/cic-ussd/cic_ussd/metadata/__init__.py b/apps/cic-ussd/cic_ussd/metadata/__init__.py index 1c90189e..44c4a217 100644 --- a/apps/cic-ussd/cic_ussd/metadata/__init__.py +++ b/apps/cic-ussd/cic_ussd/metadata/__init__.py @@ -3,7 +3,10 @@ # third-party imports import requests from chainlib.eth.address import to_checksum -from hexathon import add_0x +from hexathon import ( + add_0x, + strip_0x, + ) # local imports from cic_ussd.error import UnsupportedMethodError @@ -40,4 +43,4 @@ def blockchain_address_to_metadata_pointer(blockchain_address: str): :return: :rtype: """ - return bytes.fromhex(blockchain_address[2:]) + return bytes.fromhex(strip_0x(blockchain_address)) diff --git a/apps/cic-ussd/cic_ussd/metadata/base.py b/apps/cic-ussd/cic_ussd/metadata/base.py new file mode 100644 index 00000000..6f2d1ffc --- /dev/null +++ b/apps/cic-ussd/cic_ussd/metadata/base.py @@ -0,0 +1,126 @@ +# standard imports +import json +import logging +import os +from typing import Dict, Union + +# third-part imports +import requests +from cic_types.models.person import generate_metadata_pointer, Person + +# local imports +from cic_ussd.metadata import make_request +from cic_ussd.metadata.signer import Signer +from cic_ussd.redis import cache_data +from cic_ussd.error import MetadataStoreError + + +logg = logging.getLogger().getChild(__name__) + + +class Metadata: + """ + :cvar base_url: The base url or the metadata server. + :type base_url: str + """ + + base_url = None + + +def metadata_http_error_handler(result: requests.Response): + """ This function handles and appropriately raises errors from http requests interacting with the metadata server. + :param result: The response object from a http request. + :type result: requests.Response + """ + status_code = result.status_code + + if 100 <= status_code < 200: + raise MetadataStoreError(f'Informational errors: {status_code}, reason: {result.reason}') + + elif 300 <= status_code < 400: + raise MetadataStoreError(f'Redirect Issues: {status_code}, reason: {result.reason}') + + elif 400 <= status_code < 500: + raise MetadataStoreError(f'Client Error: {status_code}, reason: {result.reason}') + + elif 500 <= status_code < 600: + raise MetadataStoreError(f'Server Error: {status_code}, reason: {result.reason}') + + +class MetadataRequestsHandler(Metadata): + + def __init__(self, cic_type: str, identifier: bytes, engine: str = 'pgp'): + """ + :param cic_type: The salt value with which to hash a specific metadata identifier. + :type cic_type: str + :param engine: Encryption used for sending data to the metadata server. + :type engine: str + :param identifier: A unique element of data in bytes necessary for creating a metadata pointer. + :type identifier: bytes + """ + self.cic_type = cic_type + self.engine = engine + self.headers = { + 'X-CIC-AUTOMERGE': 'server', + 'Content-Type': 'application/json' + } + self.identifier = identifier + self.metadata_pointer = generate_metadata_pointer( + identifier=self.identifier, + cic_type=self.cic_type + ) + if self.base_url: + self.url = os.path.join(self.base_url, self.metadata_pointer) + + def create(self, data: Union[Dict, str]): + """ This function is responsible for posting data to the metadata server with a corresponding metadata pointer + for storage. + :param data: The data to be stored in the metadata server. + :type data: dict|str + """ + data = json.dumps(data).encode('utf-8') + result = make_request(method='POST', url=self.url, data=data, headers=self.headers) + metadata_http_error_handler(result=result) + metadata = result.content + self.edit(data=metadata) + + def edit(self, data: bytes): + """ This function is responsible for editing data in the metadata server corresponding to a unique pointer. + :param data: The data to be edited in the metadata server. + :type data: bytes + """ + cic_meta_signer = Signer() + signature = cic_meta_signer.sign_digest(data=data) + algorithm = cic_meta_signer.get_operational_key().get('algo') + decoded_data = data.decode('utf-8') + formatted_data = { + 'm': data.decode('utf-8'), + 's': { + 'engine': self.engine, + 'algo': algorithm, + 'data': signature, + 'digest': json.loads(data).get('digest'), + } + } + formatted_data = json.dumps(formatted_data).encode('utf-8') + result = make_request(method='PUT', url=self.url, data=formatted_data, headers=self.headers) + logg.info(f'signed metadata submission status: {result.status_code}.') + metadata_http_error_handler(result=result) + try: + decoded_identifier = self.identifier.decode("utf-8") + except UnicodeDecodeError: + decoded_identifier = self.identifier.hex() + logg.info(f'identifier: {decoded_identifier}. metadata pointer: {self.metadata_pointer} set to: {decoded_data}.') + + def query(self): + """This function is responsible for querying the metadata server for data corresponding to a unique pointer.""" + result = make_request(method='GET', url=self.url) + metadata_http_error_handler(result=result) + response_data = result.content + data = json.loads(response_data.decode('utf-8')) + if result.status_code == 200 and self.cic_type == 'cic.person': + person = Person() + deserialized_person = person.deserialize(person_data=json.loads(data)) + data = json.dumps(deserialized_person.serialize()) + cache_data(self.metadata_pointer, data=data) + logg.debug(f'caching: {data} with key: {self.metadata_pointer}') diff --git a/apps/cic-ussd/cic_ussd/metadata/person.py b/apps/cic-ussd/cic_ussd/metadata/person.py new file mode 100644 index 00000000..57dec98c --- /dev/null +++ b/apps/cic-ussd/cic_ussd/metadata/person.py @@ -0,0 +1,12 @@ +# standard imports + +# third-party imports + +# local imports +from .base import MetadataRequestsHandler + + +class PersonMetadata(MetadataRequestsHandler): + + def __init__(self, identifier: bytes): + super().__init__(cic_type='cic.person', identifier=identifier) diff --git a/apps/cic-ussd/cic_ussd/metadata/phone.py b/apps/cic-ussd/cic_ussd/metadata/phone.py new file mode 100644 index 00000000..46d508cb --- /dev/null +++ b/apps/cic-ussd/cic_ussd/metadata/phone.py @@ -0,0 +1,13 @@ +# standard imports +import logging + +# external imports + +# local imports +from .base import MetadataRequestsHandler + + +class PhonePointerMetadata(MetadataRequestsHandler): + + def __init__(self, identifier: bytes): + super().__init__(cic_type='cic.msisdn', identifier=identifier) diff --git a/apps/cic-ussd/cic_ussd/metadata/signer.py b/apps/cic-ussd/cic_ussd/metadata/signer.py index 5639fcd5..dc187d29 100644 --- a/apps/cic-ussd/cic_ussd/metadata/signer.py +++ b/apps/cic-ussd/cic_ussd/metadata/signer.py @@ -44,7 +44,7 @@ class Signer: gpg_keys = self.gpg.list_keys() key_algorithm = gpg_keys[0].get('algo') key_id = gpg_keys[0].get("keyid") - logg.info(f'using signing key: {key_id}, algorithm: {key_algorithm}') + logg.debug(f'using signing key: {key_id}, algorithm: {key_algorithm}') return gpg_keys[0] def sign_digest(self, data: bytes): diff --git a/apps/cic-ussd/cic_ussd/metadata/user.py b/apps/cic-ussd/cic_ussd/metadata/user.py deleted file mode 100644 index b72bebaf..00000000 --- a/apps/cic-ussd/cic_ussd/metadata/user.py +++ /dev/null @@ -1,102 +0,0 @@ -# standard imports -import json -import logging -import os - -# third-party imports -import requests -from cic_types.models.person import generate_metadata_pointer, Person - -# local imports -from cic_ussd.chain import Chain -from cic_ussd.metadata import make_request -from cic_ussd.metadata.signer import Signer -from cic_ussd.redis import cache_data - -logg = logging.getLogger() - - -class UserMetadata: - """ - :cvar base_url: - :type base_url: - """ - base_url = None - - def __init__(self, identifier: bytes): - """ - :param identifier: - :type identifier: - """ - self. headers = { - 'X-CIC-AUTOMERGE': 'server', - 'Content-Type': 'application/json' - } - self.identifier = identifier - self.metadata_pointer = generate_metadata_pointer( - identifier=self.identifier, - cic_type='cic.person' - ) - if self.base_url: - self.url = os.path.join(self.base_url, self.metadata_pointer) - - def create(self, data: dict): - try: - data = json.dumps(data).encode('utf-8') - result = make_request(method='POST', url=self.url, data=data, headers=self.headers) - metadata = result.content - self.edit(data=metadata, engine='pgp') - logg.info(f'Get sign material response status: {result.status_code}') - result.raise_for_status() - except requests.exceptions.HTTPError as error: - raise RuntimeError(error) - - def edit(self, data: bytes, engine: str): - """ - :param data: - :type data: - :param engine: - :type engine: - :return: - :rtype: - """ - cic_meta_signer = Signer() - signature = cic_meta_signer.sign_digest(data=data) - algorithm = cic_meta_signer.get_operational_key().get('algo') - formatted_data = { - 'm': data.decode('utf-8'), - 's': { - 'engine': engine, - 'algo': algorithm, - 'data': signature, - 'digest': json.loads(data).get('digest'), - } - } - formatted_data = json.dumps(formatted_data).encode('utf-8') - - try: - result = make_request(method='PUT', url=self.url, data=formatted_data, headers=self.headers) - logg.info(f'Signed content submission status: {result.status_code}.') - result.raise_for_status() - except requests.exceptions.HTTPError as error: - raise RuntimeError(error) - - def query(self): - result = make_request(method='GET', url=self.url) - status = result.status_code - logg.info(f'Get latest data status: {status}') - try: - if status == 200: - response_data = result.content - data = json.loads(response_data.decode()) - - # validate data - person = Person() - deserialized_person = person.deserialize(metadata=json.loads(data)) - - cache_data(key=self.metadata_pointer, data=json.dumps(deserialized_person.serialize())) - elif status == 404: - logg.info('The data is not available and might need to be added.') - result.raise_for_status() - except requests.exceptions.HTTPError as error: - raise RuntimeError(error) diff --git a/apps/cic-ussd/cic_ussd/processor.py b/apps/cic-ussd/cic_ussd/processor.py index 32e69a3e..061e9fbb 100644 --- a/apps/cic-ussd/cic_ussd/processor.py +++ b/apps/cic-ussd/cic_ussd/processor.py @@ -15,6 +15,7 @@ from cic_ussd.balance import BalanceManager, compute_operational_balance, get_ca from cic_ussd.chain import Chain from cic_ussd.db.models.user import AccountStatus, User from cic_ussd.db.models.ussd_session import UssdSession +from cic_ussd.error import MetadataNotFoundError from cic_ussd.menu.ussd_menu import UssdMenu from cic_ussd.metadata import blockchain_address_to_metadata_pointer from cic_ussd.phone_number import get_user_by_phone_number @@ -22,6 +23,7 @@ from cic_ussd.redis import cache_data, create_cached_data_key, get_cached_data from cic_ussd.state_machine import UssdStateMachine from cic_ussd.conversions import to_wei, from_wei from cic_ussd.translation import translation_for +from cic_types.models.person import generate_metadata_pointer, get_contact_data_from_vcard logg = logging.getLogger(__name__) @@ -136,7 +138,7 @@ def process_transaction_pin_authorization(user: User, display_key: str, ussd_ses tx_sender_information = define_account_tx_metadata(user=user) token_symbol = 'SRF' - user_input = ussd_session.get('user_input').split('*')[-1] + user_input = ussd_session.get('session_data').get('transaction_amount') transaction_amount = to_wei(value=int(user_input)) logg.debug('Requires integration to determine user tokens.') return process_pin_authorization( @@ -187,21 +189,55 @@ def format_transactions(transactions: list, preferred_language: str): value = transaction.get('to_value') timestamp = transaction.get('timestamp') action_tag = transaction.get('action_tag') + direction = transaction.get('direction') token_symbol = 'SRF' if action_tag == 'SENT' or action_tag == 'ULITUMA': - formatted_transactions += f'{action_tag} {value} {token_symbol} {recipient_phone_number} {timestamp}.\n' + formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n' else: - formatted_transactions += f'{action_tag} {value} {token_symbol} {sender_phone_number} {timestamp}. \n' + formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {sender_phone_number} {timestamp}. \n' return formatted_transactions else: if preferred_language == 'en': - formatted_transactions = 'Empty' + formatted_transactions = 'NO TRANSACTION HISTORY' else: - formatted_transactions = 'Hamna historia' + formatted_transactions = 'HAMNA RIPOTI YA MATUMIZI' return formatted_transactions +def process_display_user_metadata(user: User, display_key: str): + """ + :param user: + :type user: + :param display_key: + :type display_key: + """ + key = generate_metadata_pointer( + identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address), + cic_type='cic.person' + ) + user_metadata = get_cached_data(key) + if user_metadata: + user_metadata = json.loads(user_metadata) + contact_data = get_contact_data_from_vcard(vcard=user_metadata.get('vcard')) + logg.debug(f'{contact_data}') + full_name = f'{contact_data.get("given")} {contact_data.get("family")}' + gender = user_metadata.get('gender') + products = ', '.join(user_metadata.get('products')) + location = user_metadata.get('location').get('area_name') + + return translation_for( + key=display_key, + preferred_language=user.preferred_language, + full_name=full_name, + gender=gender, + location=location, + products=products + ) + else: + raise MetadataNotFoundError(f'Expected person metadata but found none in cache for key: {key}') + + def process_account_statement(user: User, display_key: str, ussd_session: dict): """ :param user: @@ -229,7 +265,7 @@ def process_account_statement(user: User, display_key: str, ussd_session: dict): middle_transaction_set += transactions[3:][:3] first_transaction_set += transactions[:3] # there are probably much cleaner and operational inexpensive ways to do this so find them - elif 4 < len(transactions) < 7: + elif 3 < len(transactions) < 7: middle_transaction_set += transactions[3:] first_transaction_set += transactions[:3] else: @@ -295,11 +331,11 @@ def process_start_menu(display_key: str, user: User): operational_balance = compute_operational_balance(balances=balances_data) # retrieve and cache account's metadata - s_query_user_metadata = celery.signature( - 'cic_ussd.tasks.metadata.query_user_metadata', + s_query_person_metadata = celery.signature( + 'cic_ussd.tasks.metadata.query_person_metadata', [blockchain_address] ) - s_query_user_metadata.apply_async(queue='cic-ussd') + s_query_person_metadata.apply_async(queue='cic-ussd') # retrieve and cache account's statement retrieve_account_statement(blockchain_address=blockchain_address) @@ -349,18 +385,24 @@ def process_request(user_input: str, user: User, ussd_session: Optional[dict] = if user.has_valid_pin(): last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number) - key = create_cached_data_key( + key = generate_metadata_pointer( identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address), - salt='cic.person' + cic_type='cic.person' ) - user_metadata = get_cached_data(key=key) + person_metadata = get_cached_data(key=key) if last_ussd_session: # get last state last_state = last_ussd_session.state - logg.debug(f'LAST USSD SESSION STATE: {last_state}') # if last state is account_creation_prompt and metadata exists, show start menu - if last_state == 'account_creation_prompt' and user_metadata is not None: + if last_state in [ + 'account_creation_prompt', + 'exit', + 'exit_invalid_pin', + 'exit_invalid_new_pin', + 'exit_pin_mismatch', + 'exit_invalid_request' + ] and person_metadata is not None: return UssdMenu.find_by_name(name='start') else: return UssdMenu.find_by_name(name=last_state) @@ -420,9 +462,13 @@ def custom_display_text( return process_start_menu(display_key=display_key, user=user) elif 'pin_authorization' in menu_name: return process_pin_authorization(display_key=display_key, user=user) + elif 'enter_current_pin' in menu_name: + return process_pin_authorization(display_key=display_key, user=user) elif menu_name == 'account_balances': return process_account_balances(display_key=display_key, user=user, ussd_session=ussd_session) elif 'transaction_set' in menu_name: return process_account_statement(display_key=display_key, user=user, ussd_session=ussd_session) + elif menu_name == 'display_user_metadata': + return process_display_user_metadata(display_key=display_key, user=user) else: return translation_for(key=display_key, preferred_language=user.preferred_language) diff --git a/apps/cic-ussd/cic_ussd/runnable/server.py b/apps/cic-ussd/cic_ussd/runnable/server.py index a7bdf936..e969d619 100644 --- a/apps/cic-ussd/cic_ussd/runnable/server.py +++ b/apps/cic-ussd/cic_ussd/runnable/server.py @@ -23,10 +23,11 @@ from cic_ussd.encoder import PasswordEncoder from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser from cic_ussd.menu.ussd_menu import UssdMenu from cic_ussd.metadata.signer import Signer -from cic_ussd.metadata.user import UserMetadata +from cic_ussd.metadata.base import Metadata from cic_ussd.operations import (define_response_with_content, process_menu_interaction_requests, define_multilingual_responses) +from cic_ussd.phone_number import process_phone_number from cic_ussd.redis import InMemoryStore from cic_ussd.requests import (get_request_endpoint, get_request_method, @@ -35,7 +36,8 @@ from cic_ussd.requests import (get_request_endpoint, process_pin_reset_requests) from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession from cic_ussd.state_machine import UssdStateMachine -from cic_ussd.validator import check_ip, check_request_content_length, check_service_code, validate_phone_number +from cic_ussd.validator import check_ip, check_request_content_length, check_service_code, validate_phone_number, \ + validate_presence logging.basicConfig(level=logging.WARNING) logg = logging.getLogger() @@ -63,7 +65,6 @@ config.censor('PASSWORD', 'DATABASE') # define log levels if args.vv: logging.getLogger().setLevel(logging.DEBUG) - logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG) elif args.v: logging.getLogger().setLevel(logging.INFO) @@ -85,7 +86,7 @@ UssdMenu.ussd_menu_db = ussd_menu_db # set up db data_source_name = dsn_from_config(config) -SessionBase.connect(data_source_name=data_source_name) +SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG')) # create session for the life time of http request SessionBase.session = SessionBase.create_session() @@ -98,12 +99,18 @@ InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'), InMemoryUssdSession.redis_cache = InMemoryStore.cache # define metadata URL -UserMetadata.base_url = config.get('CIC_META_URL') +Metadata.base_url = config.get('CIC_META_URL') # define signer values -Signer.gpg_path = config.get('PGP_EXPORT_DIR') +export_dir = config.get('PGP_EXPORT_DIR') +if export_dir: + validate_presence(path=export_dir) +Signer.gpg_path = export_dir Signer.gpg_passphrase = config.get('PGP_PASSPHRASE') -Signer.key_file_path = f"{config.get('PGP_KEYS_PATH')}{config.get('PGP_PRIVATE_KEYS')}" +key_file_path = f"{config.get('PGP_KEYS_PATH')}{config.get('PGP_PRIVATE_KEYS')}" +if key_file_path: + validate_presence(path=key_file_path) +Signer.key_file_path = key_file_path # initialize celery app celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL')) @@ -144,6 +151,10 @@ def application(env, start_response): external_session_id = post_data.get('sessionId') user_input = post_data.get('text') + # add validation for phone number + if phone_number: + phone_number = process_phone_number(phone_number=phone_number, region=config.get('PHONE_NUMBER_REGION')) + # validate ip address if not check_ip(config=config, env=env): start_response('403 Sneaky, sneaky', errors_headers) @@ -167,8 +178,10 @@ def application(env, start_response): # validate phone number if not validate_phone_number(phone_number): + logg.error('invalid phone number {}'.format(phone_number)) start_response('400 Invalid phone number format', errors_headers) return [] + logg.debug('session {} started for {}'.format(external_session_id, phone_number)) # handle menu interaction requests chain_str = chain_spec.__str__() diff --git a/apps/cic-ussd/cic_ussd/runnable/tasker.py b/apps/cic-ussd/cic_ussd/runnable/tasker.py index 1cdbe47a..ac39e06f 100644 --- a/apps/cic-ussd/cic_ussd/runnable/tasker.py +++ b/apps/cic-ussd/cic_ussd/runnable/tasker.py @@ -13,12 +13,14 @@ from confini import Config from cic_ussd.db import dsn_from_config from cic_ussd.db.models.base import SessionBase from cic_ussd.metadata.signer import Signer -from cic_ussd.metadata.user import UserMetadata +from cic_ussd.metadata.base import Metadata from cic_ussd.redis import InMemoryStore from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession +from cic_ussd.validator import validate_presence logging.basicConfig(level=logging.WARNING) logg = logging.getLogger() +logging.getLogger('gnupg').setLevel(logging.WARNING) config_directory = '/usr/local/etc/cic-ussd/' @@ -46,7 +48,7 @@ logg.debug(config) # connect to database data_source_name = dsn_from_config(config) -SessionBase.connect(data_source_name=data_source_name) +SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG')) # verify database connection with minimal sanity query session = SessionBase.create_session() @@ -62,12 +64,18 @@ InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'), InMemoryUssdSession.redis_cache = InMemoryStore.cache # define metadata URL -UserMetadata.base_url = config.get('CIC_META_URL') +Metadata.base_url = config.get('CIC_META_URL') # define signer values -Signer.gpg_path = config.get('PGP_EXPORT_DIR') +export_dir = config.get('PGP_EXPORT_DIR') +if export_dir: + validate_presence(path=export_dir) +Signer.gpg_path = export_dir Signer.gpg_passphrase = config.get('PGP_PASSPHRASE') -Signer.key_file_path = f"{config.get('PGP_KEYS_PATH')}{config.get('PGP_PRIVATE_KEYS')}" +key_file_path = f"{config.get('PGP_KEYS_PATH')}{config.get('PGP_PRIVATE_KEYS')}" +if key_file_path: + validate_presence(path=key_file_path) +Signer.key_file_path = key_file_path # set up celery current_app = celery.Celery(__name__) diff --git a/apps/cic-ussd/cic_ussd/state_machine/logic/transaction.py b/apps/cic-ussd/cic_ussd/state_machine/logic/transaction.py index 9023960a..7a006a78 100644 --- a/apps/cic-ussd/cic_ussd/state_machine/logic/transaction.py +++ b/apps/cic-ussd/cic_ussd/state_machine/logic/transaction.py @@ -78,9 +78,10 @@ def save_recipient_phone_to_session_data(state_machine_data: Tuple[str, dict, Us :type state_machine_data: str """ user_input, ussd_session, user = state_machine_data - session_data = { - 'recipient_phone_number': user_input - } + + session_data = ussd_session.get('session_data') or {} + session_data['recipient_phone_number'] = user_input + save_to_in_memory_ussd_session_data(queue='cic-ussd', session_data=session_data, ussd_session=ussd_session) @@ -96,11 +97,11 @@ def retrieve_recipient_metadata(state_machine_data: Tuple[str, dict, User]): recipient = get_user_by_phone_number(phone_number=user_input) blockchain_address = recipient.blockchain_address # retrieve and cache account's metadata - s_query_user_metadata = celery.signature( - 'cic_ussd.tasks.metadata.query_user_metadata', + s_query_person_metadata = celery.signature( + 'cic_ussd.tasks.metadata.query_person_metadata', [blockchain_address] ) - s_query_user_metadata.apply_async(queue='cic-ussd') + s_query_person_metadata.apply_async(queue='cic-ussd') def save_transaction_amount_to_session_data(state_machine_data: Tuple[str, dict, User]): @@ -109,9 +110,10 @@ def save_transaction_amount_to_session_data(state_machine_data: Tuple[str, dict, :type state_machine_data: str """ user_input, ussd_session, user = state_machine_data - session_data = { - 'transaction_amount': user_input - } + + session_data = ussd_session.get('session_data') or {} + session_data['transaction_amount'] = user_input + save_to_in_memory_ussd_session_data(queue='cic-ussd', session_data=session_data, ussd_session=ussd_session) diff --git a/apps/cic-ussd/cic_ussd/state_machine/logic/user.py b/apps/cic-ussd/cic_ussd/state_machine/logic/user.py index 7e53ea82..fd53904d 100644 --- a/apps/cic-ussd/cic_ussd/state_machine/logic/user.py +++ b/apps/cic-ussd/cic_ussd/state_machine/logic/user.py @@ -11,7 +11,7 @@ from cic_types.models.person import generate_vcard_from_contact_data, manage_ide # local imports from cic_ussd.chain import Chain from cic_ussd.db.models.user import User -from cic_ussd.error import UserMetadataNotFoundError +from cic_ussd.error import MetadataNotFoundError from cic_ussd.metadata import blockchain_address_to_metadata_pointer from cic_ussd.operations import save_to_in_memory_ussd_session_data from cic_ussd.redis import get_cached_data @@ -164,11 +164,11 @@ def save_complete_user_metadata(state_machine_data: Tuple[str, dict, User]): user_metadata = format_user_metadata(metadata=metadata, user=user) blockchain_address = user.blockchain_address - s_create_user_metadata = celery.signature( - 'cic_ussd.tasks.metadata.create_user_metadata', + s_create_person_metadata = celery.signature( + 'cic_ussd.tasks.metadata.create_person_metadata', [blockchain_address, user_metadata] ) - s_create_user_metadata.apply_async(queue='cic-ussd') + s_create_person_metadata.apply_async(queue='cic-ussd') def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, User]): @@ -181,7 +181,7 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, User]): user_metadata = get_cached_data(key=key) if not user_metadata: - raise UserMetadataNotFoundError(f'Expected user metadata but found none in cache for key: {blockchain_address}') + raise MetadataNotFoundError(f'Expected user metadata but found none in cache for key: {blockchain_address}') given_name = ussd_session.get('session_data').get('given_name') family_name = ussd_session.get('session_data').get('family_name') @@ -192,7 +192,7 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, User]): # validate user metadata person = Person() user_metadata = json.loads(user_metadata) - deserialized_person = person.deserialize(metadata=user_metadata) + deserialized_person = person.deserialize(person_data=user_metadata) # edit specific metadata attribute if given_name: @@ -211,18 +211,18 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, User]): edited_metadata = deserialized_person.serialize() - s_edit_user_metadata = celery.signature( - 'cic_ussd.tasks.metadata.edit_user_metadata', - [blockchain_address, edited_metadata, 'pgp'] + s_edit_person_metadata = celery.signature( + 'cic_ussd.tasks.metadata.edit_person_metadata', + [blockchain_address, edited_metadata] ) - s_edit_user_metadata.apply_async(queue='cic-ussd') + s_edit_person_metadata.apply_async(queue='cic-ussd') def get_user_metadata(state_machine_data: Tuple[str, dict, User]): user_input, ussd_session, user = state_machine_data blockchain_address = user.blockchain_address s_get_user_metadata = celery.signature( - 'cic_ussd.tasks.metadata.query_user_metadata', + 'cic_ussd.tasks.metadata.query_person_metadata', [blockchain_address] ) s_get_user_metadata.apply_async(queue='cic-ussd') diff --git a/apps/cic-ussd/cic_ussd/tasks/base.py b/apps/cic-ussd/cic_ussd/tasks/base.py index 4b5e535b..0f9a7438 100644 --- a/apps/cic-ussd/cic_ussd/tasks/base.py +++ b/apps/cic-ussd/cic_ussd/tasks/base.py @@ -5,9 +5,24 @@ import celery import sqlalchemy # local imports +from cic_ussd.error import MetadataStoreError +from cic_ussd.db.models.base import SessionBase -class CriticalTask(celery.Task): +class BaseTask(celery.Task): + + session_func = SessionBase.create_session + + def create_session(self): + return BaseTask.session_func() + + + def log_banner(self): + logg.debug('task {} root uuid {}'.format(self.__class__.__name__, self.request.root_id)) + return + + +class CriticalTask(BaseTask): retry_jitter = True retry_backoff = True retry_backoff_max = 8 @@ -17,4 +32,11 @@ class CriticalSQLAlchemyTask(CriticalTask): autoretry_for = ( sqlalchemy.exc.DatabaseError, sqlalchemy.exc.TimeoutError, + sqlalchemy.exc.ResourceClosedError, + ) + + +class CriticalMetadataTask(CriticalTask): + autoretry_for = ( + MetadataStoreError, ) diff --git a/apps/cic-ussd/cic_ussd/tasks/callback_handler.py b/apps/cic-ussd/cic_ussd/tasks/callback_handler.py index 2d4fca7e..286840b8 100644 --- a/apps/cic-ussd/cic_ussd/tasks/callback_handler.py +++ b/apps/cic-ussd/cic_ussd/tasks/callback_handler.py @@ -53,6 +53,13 @@ def process_account_creation_callback(self, result: str, url: str, status_code: session.add(user) session.commit() session.close() + + queue = self.request.delivery_info.get('routing_key') + s = celery.signature( + 'cic_ussd.tasks.metadata.add_phone_pointer', + [result, phone_number] + ) + s.apply_async(queue=queue) # expire cache cache.expire(task_id, timedelta(seconds=180)) @@ -65,6 +72,8 @@ def process_account_creation_callback(self, result: str, url: str, status_code: session.close() raise ActionDataNotFoundError(f'Account creation task: {task_id}, returned unexpected response: {status_code}') + session.close() + @celery_app.task def process_incoming_transfer_callback(result: dict, param: str, status_code: int): @@ -118,6 +127,7 @@ def process_incoming_transfer_callback(result: dict, param: str, status_code: in session.close() raise ValueError(f'Unexpected status code: {status_code}.') + session.close() @celery_app.task def process_balances_callback(result: list, param: str, status_code: int): @@ -143,21 +153,24 @@ def define_transaction_action_tag( # check preferred language if preferred_language == 'en': action_tag = 'SENT' + direction = 'TO' else: action_tag = 'ULITUMA' + direction = 'KWA' else: if preferred_language == 'en': action_tag = 'RECEIVED' + direction = 'FROM' else: action_tag = 'ULIPOKEA' - return action_tag + direction = 'KUTOKA' + return action_tag, direction @celery_app.task def process_statement_callback(result, param: str, status_code: int): if status_code == 0: # create session - session = SessionBase.create_session() processed_transactions = [] # process transaction data to cache @@ -170,20 +183,23 @@ def process_statement_callback(result, param: str, status_code: int): if '0x0000000000000000000000000000000000000000' in source_token: pass else: + session = SessionBase.create_session() # describe a processed transaction processed_transaction = {} # check if sender is in the system sender: User = session.query(User).filter_by(blockchain_address=sender_blockchain_address).first() + owner: User = session.query(User).filter_by(blockchain_address=param).first() if sender: processed_transaction['sender_phone_number'] = sender.phone_number - action_tag = define_transaction_action_tag( - preferred_language=sender.preferred_language, + action_tag, direction = define_transaction_action_tag( + preferred_language=owner.preferred_language, sender_blockchain_address=sender_blockchain_address, param=param ) processed_transaction['action_tag'] = action_tag + processed_transaction['direction'] = direction else: processed_transaction['sender_phone_number'] = 'GRASSROOTS ECONOMICS' @@ -196,9 +212,11 @@ def process_statement_callback(result, param: str, status_code: int): else: logg.warning(f'Tx with recipient not found in cic-ussd') + session.close() + # add transaction values - processed_transaction['to_value'] = from_wei(value=transaction.get('to_value')) - processed_transaction['from_value'] = from_wei(value=transaction.get('from_value')) + processed_transaction['to_value'] = from_wei(value=transaction.get('to_value')).__str__() + processed_transaction['from_value'] = from_wei(value=transaction.get('from_value')).__str__() raw_timestamp = transaction.get('timestamp') timestamp = datetime.utcfromtimestamp(raw_timestamp).strftime('%d/%m/%y, %H:%M') diff --git a/apps/cic-ussd/cic_ussd/tasks/metadata.py b/apps/cic-ussd/cic_ussd/tasks/metadata.py index 4410148a..fa6caf86 100644 --- a/apps/cic-ussd/cic_ussd/tasks/metadata.py +++ b/apps/cic-ussd/cic_ussd/tasks/metadata.py @@ -1,20 +1,22 @@ # standard imports -import json import logging # third-party imports import celery +from hexathon import strip_0x # local imports from cic_ussd.metadata import blockchain_address_to_metadata_pointer -from cic_ussd.metadata.user import UserMetadata +from cic_ussd.metadata.person import PersonMetadata +from cic_ussd.metadata.phone import PhonePointerMetadata +from cic_ussd.tasks.base import CriticalMetadataTask celery_app = celery.current_app -logg = logging.getLogger() +logg = logging.getLogger().getChild(__name__) @celery_app.task -def query_user_metadata(blockchain_address: str): +def query_person_metadata(blockchain_address: str): """ :param blockchain_address: :type blockchain_address: @@ -22,12 +24,12 @@ def query_user_metadata(blockchain_address: str): :rtype: """ identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address) - user_metadata_client = UserMetadata(identifier=identifier) - user_metadata_client.query() + person_metadata_client = PersonMetadata(identifier=identifier) + person_metadata_client.query() @celery_app.task -def create_user_metadata(blockchain_address: str, data: dict): +def create_person_metadata(blockchain_address: str, data: dict): """ :param blockchain_address: :type blockchain_address: @@ -37,12 +39,20 @@ def create_user_metadata(blockchain_address: str, data: dict): :rtype: """ identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address) - user_metadata_client = UserMetadata(identifier=identifier) - user_metadata_client.create(data=data) + person_metadata_client = PersonMetadata(identifier=identifier) + person_metadata_client.create(data=data) @celery_app.task -def edit_user_metadata(blockchain_address: str, data: bytes, engine: str): +def edit_person_metadata(blockchain_address: str, data: bytes): identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address) - user_metadata_client = UserMetadata(identifier=identifier) - user_metadata_client.edit(data=data, engine=engine) + person_metadata_client = PersonMetadata(identifier=identifier) + person_metadata_client.edit(data=data) + + +@celery_app.task(bind=True, base=CriticalMetadataTask) +def add_phone_pointer(self, blockchain_address: str, phone_number: str): + identifier = phone_number.encode('utf-8') + stripped_address = strip_0x(blockchain_address) + phone_metadata_client = PhonePointerMetadata(identifier=identifier) + phone_metadata_client.create(data=stripped_address) diff --git a/apps/cic-ussd/cic_ussd/tasks/ussd_session.py b/apps/cic-ussd/cic_ussd/tasks/ussd_session.py index 85f388e2..dac199f2 100644 --- a/apps/cic-ussd/cic_ussd/tasks/ussd_session.py +++ b/apps/cic-ussd/cic_ussd/tasks/ussd_session.py @@ -70,3 +70,4 @@ def persist_session_to_db(external_session_id: str): session.close() raise SessionNotFoundError('Session does not exist!') + session.close() diff --git a/apps/cic-ussd/cic_ussd/transactions.py b/apps/cic-ussd/cic_ussd/transactions.py index 08205de0..be08d745 100644 --- a/apps/cic-ussd/cic_ussd/transactions.py +++ b/apps/cic-ussd/cic_ussd/transactions.py @@ -47,7 +47,7 @@ def to_wei(value: int) -> int: :return: Wei equivalent of value in SRF :rtype: int """ - return int(value * 1e+18) + return int(value * 1e+6) class IncomingTransactionProcessor: diff --git a/apps/cic-ussd/cic_ussd/validator.py b/apps/cic-ussd/cic_ussd/validator.py index a3647375..1fc10065 100644 --- a/apps/cic-ussd/cic_ussd/validator.py +++ b/apps/cic-ussd/cic_ussd/validator.py @@ -1,6 +1,8 @@ # standard imports import logging +import os import re +import ipaddress # third-party imports from confini import Config @@ -20,7 +22,14 @@ def check_ip(config: Config, env: dict): :return: Request IP validity :rtype: boolean """ - return env.get('REMOTE_ADDR') == config.get('APP_ALLOWED_IP') + # TODO: do once at boot time + actual_ip = ipaddress.ip_network(env.get('REMOTE_ADDR') + '/32') + for allowed_net_src in config.get('APP_ALLOWED_IP').split(','): + allowed_net = ipaddress.ip_network(allowed_net_src) + if actual_ip.subnet_of(allowed_net): + return True + + return False def check_request_content_length(config: Config, env: dict): @@ -110,7 +119,7 @@ def validate_phone_number(phone: str): def validate_response_type(processor_response: str) -> bool: - """1*3443*3443*Philip*Wanga*1*Juja*Software Developer*2*3 + """ This function checks the prefix for a corresponding menu's text from the response offered by the Ussd Processor and determines whether the response should prompt the end of a ussd session or the :param processor_response: A ussd menu's text value. @@ -126,3 +135,14 @@ def validate_response_type(processor_response: str) -> bool: return True return False + +def validate_presence(path: str): + """ + + """ + is_present = os.path.exists(path=path) + + if not is_present: + raise ValueError(f'Directory/File in path: {path} not found.') + else: + logg.debug(f'Loading data from: {path}') diff --git a/apps/cic-ussd/cic_ussd/version.py b/apps/cic-ussd/cic_ussd/version.py index 62e14ec7..5033279d 100644 --- a/apps/cic-ussd/cic_ussd/version.py +++ b/apps/cic-ussd/cic_ussd/version.py @@ -1,7 +1,7 @@ # standard imports import semver -version = (0, 3, 0, 'alpha.7') +version = (0, 3, 0, 'alpha.8') version_object = semver.VersionInfo( major=version[0], diff --git a/apps/cic-ussd/docker/Dockerfile b/apps/cic-ussd/docker/Dockerfile index 933ffc41..32a094e8 100644 --- a/apps/cic-ussd/docker/Dockerfile +++ b/apps/cic-ussd/docker/Dockerfile @@ -51,4 +51,4 @@ RUN cd cic-ussd && \ COPY cic-ussd/.config/ /usr/local/etc/cic-ussd/ COPY cic-ussd/cic_ussd/db/migrations/ /usr/local/share/cic-ussd/alembic -WORKDIR /root \ No newline at end of file +WORKDIR /root diff --git a/apps/cic-ussd/docker/start_tasker.sh b/apps/cic-ussd/docker/start_tasker.sh index bd588f28..37f32597 100644 --- a/apps/cic-ussd/docker/start_tasker.sh +++ b/apps/cic-ussd/docker/start_tasker.sh @@ -2,4 +2,4 @@ . /root/db.sh -/usr/local/bin/cic-ussd-tasker -vv "$@" \ No newline at end of file +/usr/local/bin/cic-ussd-tasker $@ diff --git a/apps/cic-ussd/docker/start_uwsgi.sh b/apps/cic-ussd/docker/start_uwsgi.sh index ac5261cb..ff3271ec 100644 --- a/apps/cic-ussd/docker/start_uwsgi.sh +++ b/apps/cic-ussd/docker/start_uwsgi.sh @@ -2,4 +2,6 @@ . /root/db.sh -/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/server.py --http :9000 --pyargv "-vv" +server_port=${SERVER_PORT:-9000} + +/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/server.py --http :$server_port --pyargv "$@" diff --git a/apps/cic-ussd/requirements.txt b/apps/cic-ussd/requirements.txt index ed8b1b7b..fe3e79e2 100644 --- a/apps/cic-ussd/requirements.txt +++ b/apps/cic-ussd/requirements.txt @@ -1,5 +1,4 @@ -cic_base[full_graph]~=0.1.2a46 -cic-eth~=0.10.1b1 +cic_base[full_graph]~=0.1.2a68 +cic-eth~=0.11.0b3 cic-notify~=0.4.0a3 -cic-types~=0.1.0a8 -pyresttest==1.7.1 \ No newline at end of file +cic-types~=0.1.0a10 diff --git a/apps/cic-ussd/states/user_metadata_states.json b/apps/cic-ussd/states/user_metadata_states.json index 285dafb0..59bc1fd7 100644 --- a/apps/cic-ussd/states/user_metadata_states.json +++ b/apps/cic-ussd/states/user_metadata_states.json @@ -4,5 +4,6 @@ "enter_gender", "enter_age", "enter_location", - "enter_products" + "enter_products", + "display_metadata_pin_authorization" ] \ No newline at end of file diff --git a/apps/cic-ussd/tests/cic_ussd/metadata/test_user_metadata.py b/apps/cic-ussd/tests/cic_ussd/metadata/test_user_metadata.py index dc04951e..6c932ce0 100644 --- a/apps/cic-ussd/tests/cic_ussd/metadata/test_user_metadata.py +++ b/apps/cic-ussd/tests/cic_ussd/metadata/test_user_metadata.py @@ -9,26 +9,26 @@ from cic_types.models.person import generate_metadata_pointer # local imports from cic_ussd.metadata import blockchain_address_to_metadata_pointer from cic_ussd.metadata.signer import Signer -from cic_ussd.metadata.user import UserMetadata +from cic_ussd.metadata.person import PersonMetadata from cic_ussd.redis import get_cached_data def test_user_metadata(create_activated_user, define_metadata_pointer_url, load_config): - UserMetadata.base_url = load_config.get('CIC_META_URL') + PersonMetadata.base_url = load_config.get('CIC_META_URL') identifier = blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address) - user_metadata_client = UserMetadata(identifier=identifier) + person_metadata_client = PersonMetadata(identifier=identifier) - assert user_metadata_client.url == define_metadata_pointer_url + assert person_metadata_client.url == define_metadata_pointer_url -def test_create_user_metadata(caplog, - create_activated_user, - define_metadata_pointer_url, - load_config, - mock_meta_post_response, - person_metadata): +def test_create_person_metadata(caplog, + create_activated_user, + define_metadata_pointer_url, + load_config, + mock_meta_post_response, + person_metadata): identifier = blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address) - user_metadata_client = UserMetadata(identifier=identifier) + person_metadata_client = PersonMetadata(identifier=identifier) with requests_mock.Mocker(real_http=False) as request_mocker: request_mocker.register_uri( @@ -38,7 +38,7 @@ def test_create_user_metadata(caplog, reason='CREATED', content=json.dumps(mock_meta_post_response).encode('utf-8') ) - user_metadata_client.create(data=person_metadata) + person_metadata_client.create(data=person_metadata) assert 'Get signed material response status: 201' in caplog.text with pytest.raises(RuntimeError) as error: @@ -49,19 +49,19 @@ def test_create_user_metadata(caplog, status_code=400, reason='BAD REQUEST' ) - user_metadata_client.create(data=person_metadata) + person_metadata_client.create(data=person_metadata) assert str(error.value) == f'400 Client Error: BAD REQUEST for url: {define_metadata_pointer_url}' -def test_edit_user_metadata(caplog, - create_activated_user, - define_metadata_pointer_url, - load_config, - person_metadata, - setup_metadata_signer): +def test_edit_person_metadata(caplog, + create_activated_user, + define_metadata_pointer_url, + load_config, + person_metadata, + setup_metadata_signer): Signer.gpg_passphrase = load_config.get('KEYS_PASSPHRASE') identifier = blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address) - user_metadata_client = UserMetadata(identifier=identifier) + person_metadata_client = PersonMetadata(identifier=identifier) with requests_mock.Mocker(real_http=False) as request_mocker: request_mocker.register_uri( 'PUT', @@ -69,7 +69,7 @@ def test_edit_user_metadata(caplog, status_code=200, reason='OK' ) - user_metadata_client.edit(data=person_metadata, engine='pgp') + person_metadata_client.edit(data=person_metadata) assert 'Signed content submission status: 200' in caplog.text with pytest.raises(RuntimeError) as error: @@ -80,7 +80,7 @@ def test_edit_user_metadata(caplog, status_code=400, reason='BAD REQUEST' ) - user_metadata_client.edit(data=person_metadata, engine='pgp') + person_metadata_client.edit(data=person_metadata) assert str(error.value) == f'400 Client Error: BAD REQUEST for url: {define_metadata_pointer_url}' @@ -92,7 +92,7 @@ def test_get_user_metadata(caplog, person_metadata, setup_metadata_signer): identifier = blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address) - user_metadata_client = UserMetadata(identifier=identifier) + person_metadata_client = PersonMetadata(identifier=identifier) with requests_mock.Mocker(real_http=False) as request_mocker: request_mocker.register_uri( 'GET', @@ -101,7 +101,7 @@ def test_get_user_metadata(caplog, content=json.dumps(person_metadata).encode('utf-8'), reason='OK' ) - user_metadata_client.query() + person_metadata_client.query() assert 'Get latest data status: 200' in caplog.text key = generate_metadata_pointer( identifier=identifier, @@ -118,6 +118,6 @@ def test_get_user_metadata(caplog, status_code=404, reason='NOT FOUND' ) - user_metadata_client.query() + person_metadata_client.query() assert 'The data is not available and might need to be added.' in caplog.text assert str(error.value) == f'400 Client Error: NOT FOUND for url: {define_metadata_pointer_url}' diff --git a/apps/cic-ussd/tests/cic_ussd/state_machine/logic/test_user_logic.py b/apps/cic-ussd/tests/cic_ussd/state_machine/logic/test_user_logic.py index d55679a4..97f1327f 100644 --- a/apps/cic-ussd/tests/cic_ussd/state_machine/logic/test_user_logic.py +++ b/apps/cic-ussd/tests/cic_ussd/state_machine/logic/test_user_logic.py @@ -15,7 +15,7 @@ from cic_ussd.state_machine.logic.user import ( get_user_metadata, save_complete_user_metadata, process_gender_user_input, - save_profile_attribute_to_session_data, + save_metadata_attribute_to_session_data, update_account_status_to_active) @@ -41,14 +41,14 @@ def test_update_account_status_to_active(create_pending_user, create_in_db_ussd_ ("enter_location", "location", "Kangemi", "Kangemi"), ("enter_products", "products", "Mandazi", "Mandazi"), ]) -def test_save_save_profile_attribute_to_session_data(current_state, - expected_key, - expected_result, - user_input, - celery_session_worker, - create_activated_user, - create_in_db_ussd_session, - create_in_redis_ussd_session): +def test_save_metadata_attribute_to_session_data(current_state, + expected_key, + expected_result, + user_input, + celery_session_worker, + create_activated_user, + create_in_db_ussd_session, + create_in_redis_ussd_session): create_in_db_ussd_session.state = current_state serialized_in_db_ussd_session = create_in_db_ussd_session.to_json() state_machine_data = (user_input, serialized_in_db_ussd_session, create_activated_user) @@ -56,7 +56,7 @@ def test_save_save_profile_attribute_to_session_data(current_state, in_memory_ussd_session = json.loads(in_memory_ussd_session) assert in_memory_ussd_session.get('session_data') == {} serialized_in_db_ussd_session['state'] = current_state - save_profile_attribute_to_session_data(state_machine_data=state_machine_data) + save_metadata_attribute_to_session_data(state_machine_data=state_machine_data) in_memory_ussd_session = InMemoryStore.cache.get('AT974186') in_memory_ussd_session = json.loads(in_memory_ussd_session) @@ -82,23 +82,23 @@ def test_format_user_metadata(create_activated_user, from cic_types.models.person import Person formatted_user_metadata = format_user_metadata(metadata=complete_user_metadata, user=create_activated_user) person = Person() - user_metadata = person.deserialize(metadata=formatted_user_metadata) + user_metadata = person.deserialize(person_data=formatted_user_metadata) assert formatted_user_metadata == user_metadata.serialize() def test_save_complete_user_metadata(celery_session_worker, - complete_user_metadata, - create_activated_user, - create_in_redis_ussd_session, - mocker, - setup_chain_spec, - ussd_session_data): + complete_user_metadata, + create_activated_user, + create_in_redis_ussd_session, + mocker, + setup_chain_spec, + ussd_session_data): ussd_session = create_in_redis_ussd_session.get(ussd_session_data.get('external_session_id')) ussd_session = json.loads(ussd_session) ussd_session['session_data'] = complete_user_metadata user_metadata = format_user_metadata(metadata=ussd_session.get('session_data'), user=create_activated_user) state_machine_data = ('', ussd_session, create_activated_user) - mocked_create_metadata_task = mocker.patch('cic_ussd.tasks.metadata.create_user_metadata.apply_async') + mocked_create_metadata_task = mocker.patch('cic_ussd.tasks.metadata.create_person_metadata.apply_async') save_complete_user_metadata(state_machine_data=state_machine_data) mocked_create_metadata_task.assert_called_with( (user_metadata, create_activated_user.blockchain_address), @@ -127,7 +127,7 @@ def test_edit_user_metadata_attribute(celery_session_worker, } state_machine_data = ('', ussd_session, create_activated_user) - mocked_edit_metadata = mocker.patch('cic_ussd.tasks.metadata.edit_user_metadata.apply_async') + mocked_edit_metadata = mocker.patch('cic_ussd.tasks.metadata.edit_person_metadata.apply_async') edit_user_metadata_attribute(state_machine_data=state_machine_data) person_metadata['location']['area_name'] = 'nairobi' mocked_edit_metadata.assert_called_with( @@ -146,7 +146,7 @@ def test_get_user_metadata_attribute(celery_session_worker, ussd_session = json.loads(ussd_session) state_machine_data = ('', ussd_session, create_activated_user) - mocked_get_metadata = mocker.patch('cic_ussd.tasks.metadata.query_user_metadata.apply_async') + mocked_get_metadata = mocker.patch('cic_ussd.tasks.metadata.query_person_metadata.apply_async') get_user_metadata(state_machine_data=state_machine_data) mocked_get_metadata.assert_called_with( (create_activated_user.blockchain_address,), diff --git a/apps/cic-ussd/tests/fixtures/config.py b/apps/cic-ussd/tests/fixtures/config.py index ba03f4d5..9dd18c37 100644 --- a/apps/cic-ussd/tests/fixtures/config.py +++ b/apps/cic-ussd/tests/fixtures/config.py @@ -18,7 +18,7 @@ from cic_ussd.files.local_files import create_local_file_data_stores, json_file_ from cic_ussd.menu.ussd_menu import UssdMenu from cic_ussd.metadata import blockchain_address_to_metadata_pointer from cic_ussd.metadata.signer import Signer -from cic_ussd.metadata.user import UserMetadata +from cic_ussd.metadata.person import PersonMetadata from cic_ussd.state_machine import UssdStateMachine @@ -121,9 +121,9 @@ def setup_metadata_signer(load_config): @pytest.fixture(scope='function') def define_metadata_pointer_url(load_config, create_activated_user): identifier = blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address) - UserMetadata.base_url = load_config.get('CIC_META_URL') - user_metadata_client = UserMetadata(identifier=identifier) - return user_metadata_client.url + PersonMetadata.base_url = load_config.get('CIC_META_URL') + person_metadata_client = PersonMetadata(identifier=identifier) + return person_metadata_client.url @pytest.fixture(scope='function') diff --git a/apps/cic-ussd/transitions/exit_transitions.json b/apps/cic-ussd/transitions/exit_transitions.json index 0fc6c7da..d814040a 100644 --- a/apps/cic-ussd/transitions/exit_transitions.json +++ b/apps/cic-ussd/transitions/exit_transitions.json @@ -38,7 +38,7 @@ { "trigger": "scan_data", "source": "exit_invalid_recipient", - "dest": "send_enter_recipient", + "dest": "enter_transaction_recipient", "conditions": "cic_ussd.state_machine.logic.menu.menu_zero_zero_selected" }, { @@ -49,13 +49,13 @@ "after": "cic_ussd.state_machine.logic.sms.upsell_unregistered_recipient" }, { - "trigger": "feed_char", + "trigger": "scan_data", "source": "exit_successful_transaction", "dest": "start", "conditions": "cic_ussd.state_machine.logic.menu.menu_zero_zero_selected" }, { - "trigger": "feed_char", + "trigger": "scan_data", "source": "exit_successful_transaction", "dest": "exit", "conditions": "cic_ussd.state_machine.logic.menu.menu_ninety_nine_selected" diff --git a/apps/cic-ussd/transitions/user_metadata_transitions.json b/apps/cic-ussd/transitions/user_metadata_transitions.json index 603217ab..7ee8cab0 100644 --- a/apps/cic-ussd/transitions/user_metadata_transitions.json +++ b/apps/cic-ussd/transitions/user_metadata_transitions.json @@ -26,18 +26,18 @@ { "trigger": "scan_data", "source": "metadata_management", - "dest": "standard_pin_authorization", + "dest": "display_metadata_pin_authorization", "conditions": "cic_ussd.state_machine.logic.menu.menu_five_selected" }, { "trigger": "scan_data", - "source": "standard_pin_authorization", + "source": "display_metadata_pin_authorization", "dest": "display_user_metadata", "conditions": "cic_ussd.state_machine.logic.pin.is_authorized_pin" }, { "trigger": "scan_data", - "source": "standard_pin_authorization", + "source": "display_metadata_pin_authorization", "dest": "exit_pin_blocked", "conditions": "cic_ussd.state_machine.logic.pin.is_locked_account" }, diff --git a/apps/cic-ussd/var/lib/locale/ussd.en.yml b/apps/cic-ussd/var/lib/locale/ussd.en.yml index ea1f2214..e0751543 100644 --- a/apps/cic-ussd/var/lib/locale/ussd.en.yml +++ b/apps/cic-ussd/var/lib/locale/ussd.en.yml @@ -55,8 +55,8 @@ en: 4. Edit products 5. View my profile 0. Back - display_user_profile_data: |- - END Your details are: + display_user_metadata: |- + CON Your details are: Name: %{full_name} Gender: %{gender} Location: %{location} @@ -85,7 +85,7 @@ en: retry: |- CON Please enter your PIN. You have %{remaining_attempts} attempts remaining. 0. Back - standard_pin_authorization: + display_metadata_pin_authorization: first: |- CON Please enter your PIN. 0. Back diff --git a/apps/cic-ussd/var/lib/locale/ussd.sw.yml b/apps/cic-ussd/var/lib/locale/ussd.sw.yml index 888585bb..123363be 100644 --- a/apps/cic-ussd/var/lib/locale/ussd.sw.yml +++ b/apps/cic-ussd/var/lib/locale/ussd.sw.yml @@ -56,7 +56,7 @@ sw: 5. Angalia wasifu wako 0. Nyuma display_user_metadata: |- - END Wasifu wako una maelezo yafuatayo: + CON Wasifu wako una maelezo yafuatayo: Jina: %{full_name} Jinsia: %{gender} Eneo: %{location} diff --git a/apps/contract-migration/docker/Dockerfile b/apps/contract-migration/docker/Dockerfile index 940457cb..dd2cf26c 100644 --- a/apps/contract-migration/docker/Dockerfile +++ b/apps/contract-migration/docker/Dockerfile @@ -57,9 +57,9 @@ WORKDIR /home/grassroots USER grassroots ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433 -ARG cic_base_version=0.1.2a49 -ARG cic_eth_version=0.10.1b1 -ARG sarafu_faucet_version=0.0.2a13 +ARG cic_base_version=0.1.2a67 +ARG cic_eth_version=0.11.0b1 +ARG sarafu_faucet_version=0.0.2a19 ARG cic_contracts_version=0.0.2a2 RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \ cic-eth==$cic_eth_version \ diff --git a/apps/contract-migration/reset.sh b/apps/contract-migration/reset.sh index 0822994c..2d81ae64 100755 --- a/apps/contract-migration/reset.sh +++ b/apps/contract-migration/reset.sh @@ -6,6 +6,7 @@ DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER} DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER} DEV_RESERVE_AMOUNT=${DEV_ETH_RESERVE_AMOUNT:-""10000000000000000000000000000000000} +faucet_amount=${DEV_FAUCET_AMOUNT:-0} keystore_file=$(realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c) echo "environment:" @@ -40,7 +41,10 @@ if [[ -n "${ETH_PROVIDER}" ]]; then #BANCOR_REGISTRY_ADDRESS=`cic-bancor-deploy --bancor-dir /usr/local/share/cic/bancor -z $DEV_ETH_RESERVE_ADDRESS -p $ETH_PROVIDER -o $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER` + >&2 echo "deploy account index contract" DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -y $keystore_file -vv -w` + >&2 echo "add deployer address as account index writer" + eth-accounts-index-writer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_ACCOUNT_INDEX_ADDRESS -ww $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy -i $CIC_CHAIN_SPEC -y $keystore_file --identifier BancorRegistry --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization -p $ETH_PROVIDER -vv -w` eth-contract-registry-set -w -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv ContractRegistry $CIC_REGISTRY_ADDRESS @@ -72,6 +76,9 @@ if [[ -n "${ETH_PROVIDER}" ]]; then >&2 echo "set faucet as token minter" giftable-token-minter -w -y $keystore_file -a $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv $DEV_FAUCET_ADDRESS + >&2 echo "set token faucet amount" + sarafu-faucet-set -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_FAUCET_ADDRESS $faucet_amount + else echo "\$ETH_PROVIDER not set!" diff --git a/apps/contract-migration/scripts/README.md b/apps/contract-migration/scripts/README.md index 048a1f6f..01861ac5 100644 --- a/apps/contract-migration/scripts/README.md +++ b/apps/contract-migration/scripts/README.md @@ -2,71 +2,234 @@ This folder contains tools to generate and import test data. -## DATA CREATION +## OVERVIEW -Does not need the cluster to run. +Three sets of tools are available, sorted by respective subdirectories. -Vanilla: +* **eth**: Import using sovereign wallets. +* **cic_eth**: Import using the `cic_eth` custodial engine. +* **cic_ussd**: Import using the `cic_ussd` interface (backed by `cic_eth`) + +Each of the modules include two main scripts: + +* **import_users.py**: Registers all created accounts in the network +* **import_balance.py**: Transfer an opening balance using an external keystore wallet + +The balance script will sync with the blockchain, processing transactions and triggering actions when it finds. In its current version it does not keep track of any other state, so it will run indefinitly and needs You the Human to decide when it has done what it needs to do. + + +In addition the following common tools are available: + +* **create_import_users.py**: User creation script +* **verify.py**: Import verification script +* **cic_meta**: Metadata imports + + +## REQUIREMENTS + +A virtual environment for the python scripts is recommended. We know it works with `python 3.8.x`. Let us know if you run it successfully with other minor versions. + +``` +python3 -m venv .venv +source .venv/bin/activate +``` + +Install all requirements from the `requirements.txt` file: + +`pip install --extra-index-url https://pip.grassrootseconomics.net:8433 -r requirements.txt` + + +If you are importing metadata, also do ye olde: + +`npm install` + + +## HOW TO USE + +### Step 1 - Data creation + +Before running any of the imports, the user data to import has to be generated and saved to disk. + +The script does not need any services to run. + +Vanilla version: `python create_import_users.py [--dir ] ` -If you want to use the `import_balance.py` script to add to the user's balance from an external address, add: +If you want to use a `import_balance.py` script to add to the user's balance from an external address, use: `python create_import_users.py --gift-threshold [--dir ] ` -## IMPORT +### Step 2 - Services -Make sure the following is running in the cluster: - * eth - * postgres - * redis - * cic-eth-tasker - * cic-eth-dispatcher - * cic-eth-manager-head +Unless you know what you are doing, start with a clean slate, and execute (in the repository root): + +`docker-compose down -v` + +Then go through, in sequence: + +#### Base requirements + +If you are importing using `eth` and _not_ importing metadata, then the only service you need running in the cluster is: +* eth + +In all other cases you will _also_ need: +* postgres +* redis -You will want to run these in sequence: +#### EVM provisions + +This step is needed in *all* cases. + +`RUN_MASK=1 docker-compose up contract-migration` + +After this step is run, you can find top-level ethereum addresses (like the cic registry address, which you will need below) in `/service-configs/.env` -## 1. Metadata +#### Custodial provisions -`node import_meta.js ` +This step is _only_ needed if you are importing using `cic_eth` or `cic_ussd` + +`RUN_MASK=2 docker-compose up contract-migration` + + +#### Custodial services + +If importing using `cic_eth` or `cic_ussd` also run: +* cic-eth-tasker +* cic-eth-dispatcher +* cic-eth-tracker +* cic-eth-retrier + +If importing using `cic_ussd` also run: +* cic-ussd-tasker +* cic-ussd-server +* cic-notify-tasker + +If metadata is to be imported, also run: +* cic-meta-server + + + +### Step 3 - User imports + +If you did not change the docker-compose setup, your `eth_provider` the you need for the commands below will be `http://localhost:63545`. + +Only run _one_ of the alternatives. + +The keystore file used for transferring external opening balances tracker is relative to the directory you found this README in. Of course you can use a different wallet, but then you will have to provide it with tokens yourself (hint: `../reset.sh`) + +All external balance transactions are saved in raw wire format in `/txs`, with transaction hash as file name. + + + +#### Alternative 1 - Sovereign wallet import - `eth` + + +First, make a note of the **block height** before running anything. + +To import, run to _completion_: + +`python eth/import_users.py -v -c config -p -r -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c ` + +After the script completes, keystore files for all generated accouts will be found in `/keystore`, all with `foo` as password (would set it empty, but believe it or not some interfaces out there won't work unless you have one). + +Then run: + +`python eth/import_balance.py -v -c config -r -p --offset -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c ` + + + +#### Alternative 2 - Custodial engine import - `cic_eth` + +Run in sequence, in first terminal: + +`python cic_eth/import_balance.py -v -c config -p -r -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c --head out` + +In another terminal: + +`python cic_eth/import_users.py -v -c config --redis-host-callback out` + +The `redis_hostname_in_docker` value is the hostname required to reach the redis server from within the docker cluster, and should be `redis` if you left the docker-compose unchanged. The `import_users` script will receive the address of each newly created custodial account on a redis subscription fed by a callback task in the `cic_eth` account creation task chain. + + +#### Alternative 3 - USSD import - `cic_ussd` + +If you have previously run the `cic_ussd` import incompletely, it could be a good idea to purge the queue. If you have left docker-compose unchanged, `redis_url` should be `redis://localhost:63379`. + +`celery -A cic_ussd.import_task purge -Q cic-import-ussd --broker ` + +Then, in sequence, run in first terminal: + +`python cic_eth/import_balance.py -v -c config -p -r -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c out` + +In second terminal: + +`python cic_ussd/import_users.py -v -c config out` + +The balance script is a celery task worker, and will not exit by itself in its current version. However, after it's done doing its job, you will find "reached nonce ... exiting" among the last lines of the log. + +The connection parameters for the `cic-ussd-server` is currently _hardcoded_ in the `import_users.py` script file. + + +### Step 4 - Metadata import (optional) + +The metadata import scripts can be run at any time after step 1 has been completed. + + +#### Importing user metadata + +To import the main user metadata structs, run: + +`node cic_meta/import_meta.js ` Monitors a folder for output from the `import_users.py` script, adding the metadata found to the `cic-meta` service. - -## 2. Balances - -(Only if you used the `--gift-threshold` option above) - -`python -c config -i -r -p --head -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c ` - -This will monitor new mined blocks and send balances to the newly created accounts. +If _number of users_ is omitted the script will run until manually interrupted. -### 3. Users +#### Importing phone pointer -Without any modifications to the cluster and config files: +`node cic_meta/import_meta_phone.js ` -`python import_users.py -c config --redis-host-callback redis ` - -** A note on the The callback**: The script uses a redis callback to retrieve the newly generated custodial address. This is the redis server _from the perspective of the cic-eth component_. +If you imported using `cic_ussd`, the phone pointer is _already added_ and this script will do nothing. -## VERIFY +### Step 5 - Verify -`python verify.py -c config -i -r -p ` +`python verify.py -v -c config -r -p ` -Checks - * Private key is in cic-eth keystore - * Address is in accounts index - * Address has balance matching the gift threshold - * Metadata can be retrieved and has exact match +Included checks: +* Private key is in cic-eth keystore +* Address is in accounts index +* Address has gas balance +* Address has triggered the token faucet +* Address has token balance matching the gift threshold +* Personal metadata can be retrieved and has exact match +* Phone pointer metadata can be retrieved and matches address +* USSD menu response is initial state after registration + +Checks can be selectively included and excluded. See `--help` for details. + +Will output one line for each check, with name of check and number of errors found per check. Should exit with code 0 if all input data is found in the respective services. ## KNOWN ISSUES -If the faucet disbursement is set to a non-zero amount, the balances will be off. The verify script needs to be improved to check the faucet amount. +- If the faucet disbursement is set to a non-zero amount, the balances will be off. The verify script needs to be improved to check the faucet amount. + +- When the account callback in `cic_eth` fails, the `cic_eth/import_users.py` script will exit with a cryptic complaint concerning a `None` value. + +- Sovereign import scripts use the same keystore, and running them simultaneously will mess up the transaction nonce sequence. Better would be to use two different keystore wallets so balance and users scripts can be run simultaneously. + +- `pycrypto` and `pycryptodome` _have to be installed in that order_. If you get errors concerning `Crypto.KDF` then uninstall both and re-install in that order. Make sure you use the versions listed in `requirements.txt`. `pycryptodome` is a legacy dependency and will be removed as soon as possible. + +- Sovereign import script is very slow because it's scrypt'ing keystore files for the accounts that it creates. An improvement would be optional and/or asynchronous keyfile generation. + +- Running the balance script should be _optional_ in all cases, but is currently required in the case of `cic_ussd` because it is needed to generate the metadata. An improvement would be moving the task to `import_users.py`, for a different queue than the balance tx handler. + +- `cic_ussd` imports is poorly implemented, and consumes a lot of resources. Therefore it takes a long time to complete. Reducing the amount of polls for the phone pointer would go a long way to improve it. diff --git a/apps/contract-migration/scripts/import_balance.py b/apps/contract-migration/scripts/cic_eth/import_balance.py similarity index 88% rename from apps/contract-migration/scripts/import_balance.py rename to apps/contract-migration/scripts/cic_eth/import_balance.py index 244b8202..2067d197 100644 --- a/apps/contract-migration/scripts/import_balance.py +++ b/apps/contract-migration/scripts/cic_eth/import_balance.py @@ -10,7 +10,7 @@ import hashlib import csv import json -# third-party impotts +# external imports import eth_abi import confini from hexathon import ( @@ -42,7 +42,7 @@ from cic_types.models.person import Person logging.basicConfig(level=logging.WARNING) logg = logging.getLogger() -config_dir = '/usr/local/etc/cic-syncer' +config_dir = './config' argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address') @@ -117,7 +117,7 @@ class Handler: self.user_dir = user_dir self.balances = balances self.chain_spec = chain_spec - self.tx_factory = ERC20(signer, gas_oracle, nonce_oracle, chain_spec.network_id()) + self.tx_factory = ERC20(chain_spec, signer, gas_oracle, nonce_oracle) def name(self): @@ -162,6 +162,15 @@ class Handler: (tx_hash_hex, o) = self.tx_factory.transfer(self.token_address, signer_address, recipient, balance_full) logg.info('submitting erc20 transfer tx {} for recipient {}'.format(tx_hash_hex, recipient)) r = conn.do(o) + + tx_path = os.path.join( + user_dir, + 'txs', + strip_0x(tx_hash_hex), + ) + f = open(tx_path, 'w') + f.write(strip_0x(o['params'][0])) + f.close() # except TypeError as e: # logg.warning('typerror {}'.format(e)) # pass @@ -174,29 +183,29 @@ class Handler: # logg.error('key record not found in imports: {}'.format(e).ljust(200)) -class BlockGetter: - - def __init__(self, conn, gas_oracle, nonce_oracle, chain_id): - self.conn = conn - self.tx_factory = ERC20(signer=signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_id) +#class BlockGetter: +# +# def __init__(self, conn, gas_oracle, nonce_oracle, chain_spec): +# self.conn = conn +# self.tx_factory = ERC20(signer=signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_id) +# +# +# def get(self, n): +# o = block_by_number(n) +# r = self.conn.do(o) +# b = None +# try: +# b = Block(r) +# except TypeError as e: +# if r == None: +# logg.debug('block not found {}'.format(n)) +# else: +# logg.error('block retrieve error {}'.format(e)) +# return b - def get(self, n): - o = block_by_number(n) - r = self.conn.do(o) - b = None - try: - b = Block(r) - except TypeError as e: - if r == None: - logg.debug('block not found {}'.format(n)) - else: - logg.error('block retrieve error {}'.format(e)) - return b - - -def progress_callback(block_number, tx_index, s): - sys.stdout.write(str(s).ljust(200) + "\n") +def progress_callback(block_number, tx_index): + sys.stdout.write(str(block_number).ljust(200) + "\n") @@ -208,7 +217,7 @@ def main(): nonce_oracle = RPCNonceOracle(signer_address, conn) # Get Token registry address - txf = TxFactory(signer=signer, gas_oracle=gas_oracle, nonce_oracle=None, chain_id=chain_spec.network_id()) + txf = TxFactory(chain_spec, signer=signer, gas_oracle=gas_oracle, nonce_oracle=None) tx = txf.template(signer_address, config.get('CIC_REGISTRY_ADDRESS')) registry_addressof_method = keccak256_string_to_hex('addressOf(bytes32)')[:8] @@ -290,7 +299,7 @@ def main(): f.close() syncer_backend.set(block_offset, 0) - syncer = HeadSyncer(syncer_backend, progress_callback=progress_callback) + syncer = HeadSyncer(syncer_backend, block_callback=progress_callback) handler = Handler(conn, chain_spec, user_dir, balances, sarafu_token_address, signer, gas_oracle, nonce_oracle) syncer.add_filter(handler) syncer.loop(1, conn) diff --git a/apps/contract-migration/scripts/import_users.py b/apps/contract-migration/scripts/cic_eth/import_users.py similarity index 83% rename from apps/contract-migration/scripts/import_users.py rename to apps/contract-migration/scripts/cic_eth/import_users.py index 8264a319..f5312187 100644 --- a/apps/contract-migration/scripts/import_users.py +++ b/apps/contract-migration/scripts/cic_eth/import_users.py @@ -7,6 +7,7 @@ import argparse import uuid import datetime import time +import phonenumbers from glob import glob # third-party imports @@ -17,7 +18,7 @@ from hexathon import ( add_0x, strip_0x, ) -from chainlib.eth.address import to_checksum +from chainlib.eth.address import to_checksum_address from cic_types.models.person import Person from cic_eth.api.api_task import Api from chainlib.chain import ChainSpec @@ -36,7 +37,7 @@ argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback') argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback') argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback') -argparser.add_argument('--batch-size', dest='batch_size', default=50, type=int, help='burst size of sending transactions to node') +argparser.add_argument('--batch-size', dest='batch_size', default=100, type=int, help='burst size of sending transactions to node') # batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size argparser.add_argument('--batch-delay', dest='batch_delay', default=2, type=int, help='seconds delay between batches') argparser.add_argument('--timeout', default=60.0, type=float, help='Callback timeout') argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue') @@ -75,9 +76,15 @@ os.makedirs(user_new_dir) meta_dir = os.path.join(args.user_dir, 'meta') os.makedirs(meta_dir) +phone_dir = os.path.join(args.user_dir, 'phone') +os.makedirs(os.path.join(phone_dir, 'meta')) + user_old_dir = os.path.join(args.user_dir, 'old') os.stat(user_old_dir) +txs_dir = os.path.join(args.user_dir, 'txs') +os.makedirs(txs_dir) + chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_str = str(chain_spec) @@ -124,10 +131,6 @@ def register_eth(i, u): return address -def register_ussd(u): - pass - - if __name__ == '__main__': #fi = open(os.path.join(user_out_dir, 'addresses.csv'), 'a') @@ -155,8 +158,6 @@ if __name__ == '__main__': sub_chain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id()) u.identities['evm'][sub_chain_str] = [new_address] - register_ussd(u) - new_address_clean = strip_0x(new_address) filepath = os.path.join( user_new_dir, @@ -171,12 +172,32 @@ if __name__ == '__main__': f.write(json.dumps(o)) f.close() - #old_address = to_checksum(add_0x(y[:len(y)-5])) #fi.write('{},{}\n'.format(new_address, old_address)) meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), 'cic.person') meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper())) os.symlink(os.path.realpath(filepath), meta_filepath) + phone_object = phonenumbers.parse(u.tel) + phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164) + meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone') + meta_phone_filepath = os.path.join(phone_dir, 'meta', meta_phone_key) + + filepath = os.path.join( + phone_dir, + 'new', + meta_phone_key[:2].upper(), + meta_phone_key[2:4].upper(), + meta_phone_key.upper(), + ) + os.makedirs(os.path.dirname(filepath), exist_ok=True) + + f = open(filepath, 'w') + f.write(to_checksum_address(new_address_clean)) + f.close() + + os.symlink(os.path.realpath(filepath), meta_phone_filepath) + + i += 1 sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r") diff --git a/apps/contract-migration/scripts/cic_eth/traffic/cmd/__init__.py b/apps/contract-migration/scripts/cic_eth/traffic/cmd/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/contract-migration/scripts/cmd/traffic.py b/apps/contract-migration/scripts/cic_eth/traffic/cmd/traffic.py similarity index 100% rename from apps/contract-migration/scripts/cmd/traffic.py rename to apps/contract-migration/scripts/cic_eth/traffic/cmd/traffic.py diff --git a/apps/contract-migration/scripts/common/__init__.py b/apps/contract-migration/scripts/cic_eth/traffic/common/__init__.py similarity index 100% rename from apps/contract-migration/scripts/common/__init__.py rename to apps/contract-migration/scripts/cic_eth/traffic/common/__init__.py diff --git a/apps/contract-migration/scripts/common/argparse.py b/apps/contract-migration/scripts/cic_eth/traffic/common/argparse.py similarity index 100% rename from apps/contract-migration/scripts/common/argparse.py rename to apps/contract-migration/scripts/cic_eth/traffic/common/argparse.py diff --git a/apps/contract-migration/scripts/common/config.py b/apps/contract-migration/scripts/cic_eth/traffic/common/config.py similarity index 100% rename from apps/contract-migration/scripts/common/config.py rename to apps/contract-migration/scripts/cic_eth/traffic/common/config.py diff --git a/apps/contract-migration/scripts/common/log.py b/apps/contract-migration/scripts/cic_eth/traffic/common/log.py similarity index 100% rename from apps/contract-migration/scripts/common/log.py rename to apps/contract-migration/scripts/cic_eth/traffic/common/log.py diff --git a/apps/contract-migration/scripts/common/registry.py b/apps/contract-migration/scripts/cic_eth/traffic/common/registry.py similarity index 100% rename from apps/contract-migration/scripts/common/registry.py rename to apps/contract-migration/scripts/cic_eth/traffic/common/registry.py diff --git a/apps/contract-migration/scripts/common/rpc.py b/apps/contract-migration/scripts/cic_eth/traffic/common/rpc.py similarity index 100% rename from apps/contract-migration/scripts/common/rpc.py rename to apps/contract-migration/scripts/cic_eth/traffic/common/rpc.py diff --git a/apps/contract-migration/scripts/common/signer.py b/apps/contract-migration/scripts/cic_eth/traffic/common/signer.py similarity index 100% rename from apps/contract-migration/scripts/common/signer.py rename to apps/contract-migration/scripts/cic_eth/traffic/common/signer.py diff --git a/apps/contract-migration/scripts/local/account.py b/apps/contract-migration/scripts/cic_eth/traffic/local/account.py similarity index 100% rename from apps/contract-migration/scripts/local/account.py rename to apps/contract-migration/scripts/cic_eth/traffic/local/account.py diff --git a/apps/contract-migration/scripts/local/noop.py b/apps/contract-migration/scripts/cic_eth/traffic/local/noop.py similarity index 100% rename from apps/contract-migration/scripts/local/noop.py rename to apps/contract-migration/scripts/cic_eth/traffic/local/noop.py diff --git a/apps/contract-migration/scripts/local/transfer.py b/apps/contract-migration/scripts/cic_eth/traffic/local/transfer.py similarity index 100% rename from apps/contract-migration/scripts/local/transfer.py rename to apps/contract-migration/scripts/cic_eth/traffic/local/transfer.py diff --git a/apps/contract-migration/scripts/traffic.py b/apps/contract-migration/scripts/cic_eth/traffic/traffic.py similarity index 100% rename from apps/contract-migration/scripts/traffic.py rename to apps/contract-migration/scripts/cic_eth/traffic/traffic.py diff --git a/apps/contract-migration/scripts/import_meta.js b/apps/contract-migration/scripts/cic_meta/import_meta.js similarity index 99% rename from apps/contract-migration/scripts/import_meta.js rename to apps/contract-migration/scripts/cic_meta/import_meta.js index 56c754f3..28cf5f7d 100644 --- a/apps/contract-migration/scripts/import_meta.js +++ b/apps/contract-migration/scripts/cic_meta/import_meta.js @@ -101,6 +101,7 @@ function importMeta(keystore) { const file = files[i]; if (file.substr(-5) != '.json') { console.debug('skipping file', file); + continue; } const filePath = path.join(workDir, file); doOne(keystore, filePath); diff --git a/apps/contract-migration/scripts/cic_meta/import_meta_phone.js b/apps/contract-migration/scripts/cic_meta/import_meta_phone.js new file mode 100644 index 00000000..9c82afe5 --- /dev/null +++ b/apps/contract-migration/scripts/cic_meta/import_meta_phone.js @@ -0,0 +1,134 @@ +const fs = require('fs'); +const path = require('path'); +const http = require('http'); + +const cic = require('cic-client-meta'); +const vcfp = require('vcard-parser'); + +//const conf = JSON.parse(fs.readFileSync('./cic.conf')); + +const config = new cic.Config('./config'); +config.process(); +console.log(config); + + +function sendit(uid, envelope) { + const d = envelope.toJSON(); + + const contentLength = (new TextEncoder().encode(d)).length; + const opts = { + method: 'PUT', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': contentLength, + 'X-CIC-AUTOMERGE': 'client', + + }, + }; + let url = config.get('META_URL'); + url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/'); + console.log('posting to url: ' + url + uid); + const req = http.request(url + uid, opts, (res) => { + res.on('data', process.stdout.write); + res.on('end', () => { + console.log('result', res.statusCode, res.headers); + }); + }); + if (!req.write(d)) { + console.error('foo', d); + process.exit(1); + } + req.end(); +} + +function doOne(keystore, filePath, address) { + const signer = new cic.PGPSigner(keystore); + + const j = JSON.parse(fs.readFileSync(filePath).toString()); + const b = Buffer.from(j['vcard'], 'base64'); + const s = b.toString(); + const o = vcfp.parse(s); + const phone = o.tel[0].value; + + cic.Phone.toKey(phone).then((uid) => { + const o = fs.readFileSync(filePath, 'utf-8'); + + const s = new cic.Syncable(uid, o); + s.setSigner(signer); + s.onwrap = (env) => { + sendit(uid, env); + }; + s.sign(); + }); +} + +const privateKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE')); +const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE')); +pk = fs.readFileSync(privateKeyPath); +pubk = fs.readFileSync(publicKeyPath); + +new cic.PGPKeyStore( + config.get('PGP_PASSPHRASE'), + pk, + pubk, + undefined, + undefined, + importMetaPhone, +); + +const batchSize = 16; +const batchDelay = 1000; +const total = parseInt(process.argv[3]); +const dataDir = process.argv[2]; +const workDir = path.join(dataDir, 'phone/meta'); +const userDir = path.join(dataDir, 'new'); +let count = 0; +let batchCount = 0; + + +function importMetaPhone(keystore) { + let err; + let files; + + try { + err, files = fs.readdirSync(workDir); + } catch { + console.error('source directory not yet ready', workDir); + setTimeout(importMetaPhone, batchDelay, keystore); + return; + } + let limit = batchSize; + if (files.length < limit) { + limit = files.length; + } + for (let i = 0; i < limit; i++) { + const file = files[i]; + if (file.substr(0) == '.') { + console.debug('skipping file', file); + } + const filePath = path.join(workDir, file); + + const address = fs.readFileSync(filePath).toString().substring(2).toUpperCase(); + const metaFilePath = path.join( + userDir, + address.substring(0, 2), + address.substring(2, 4), + address + '.json', + ); + + doOne(keystore, metaFilePath, address); + fs.unlinkSync(filePath); + count++; + batchCount++; + if (batchCount == batchSize) { + console.debug('reached batch size, breathing'); + batchCount=0; + setTimeout(importMeta, batchDelay, keystore); + return; + } + } + if (count == total) { + return; + } + setTimeout(importMetaPhone, 100, keystore); +} diff --git a/apps/contract-migration/scripts/cic_ussd/import_balance.py b/apps/contract-migration/scripts/cic_ussd/import_balance.py new file mode 100644 index 00000000..35430e95 --- /dev/null +++ b/apps/contract-migration/scripts/cic_ussd/import_balance.py @@ -0,0 +1,157 @@ +# standard imports +import os +import sys +import logging +import argparse +import hashlib +import redis +import celery + +# external imports +import confini +from chainlib.eth.connection import EthHTTPConnection +from chainlib.chain import ChainSpec +from hexathon import ( + strip_0x, + add_0x, + ) +from chainlib.eth.address import to_checksum_address +from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer +from crypto_dev_signer.keystore.dict import DictKeystore +from cic_types.models.person import Person + +# local imports +from import_util import BalanceProcessor +from import_task import * + +logging.basicConfig(level=logging.WARNING) +logg = logging.getLogger() + +config_dir = './config' + +argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') +argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address') +argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing') +argparser.add_argument('-c', type=str, default=config_dir, help='config root to use') +argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec') +argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec') +argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address') +argparser.add_argument('--meta-host', dest='meta_host', type=str, help='metadata server host') +argparser.add_argument('--meta-port', dest='meta_port', type=int, help='metadata server host') +argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission') +argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission') +argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback') +argparser.add_argument('--token-symbol', default='SRF', type=str, dest='token_symbol', help='Token symbol to use for trnsactions') +argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)') +argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') +argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to') +argparser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from') +argparser.add_argument('-v', help='be verbose', action='store_true') +argparser.add_argument('-vv', help='be more verbose', action='store_true') +argparser.add_argument('user_dir', default='out', type=str, help='user export directory') +args = argparser.parse_args(sys.argv[1:]) + +if args.v == True: + logging.getLogger().setLevel(logging.INFO) +elif args.vv == True: + logging.getLogger().setLevel(logging.DEBUG) + +config_dir = os.path.join(args.c) +os.makedirs(config_dir, 0o777, True) +config = confini.Config(config_dir, args.env_prefix) +config.process() +# override args +args_override = { + 'CIC_CHAIN_SPEC': getattr(args, 'i'), + 'ETH_PROVIDER': getattr(args, 'p'), + 'CIC_REGISTRY_ADDRESS': getattr(args, 'r'), + 'REDIS_HOST': getattr(args, 'redis_host'), + 'REDIS_PORT': getattr(args, 'redis_port'), + 'REDIS_DB': getattr(args, 'redis_db'), + 'META_HOST': getattr(args, 'meta_host'), + 'META_PORT': getattr(args, 'meta_port'), + } +config.dict_override(args_override, 'cli flag') +config.censor('PASSWORD', 'DATABASE') +config.censor('PASSWORD', 'SSL') +logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) + +redis_host = config.get('REDIS_HOST') +redis_port = config.get('REDIS_PORT') +redis_db = config.get('REDIS_DB') +r = redis.Redis(redis_host, redis_port, redis_db) +celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL')) + +signer_address = None +keystore = DictKeystore() +if args.y != None: + logg.debug('loading keystore file {}'.format(args.y)) + signer_address = keystore.import_keystore_file(args.y) + logg.debug('now have key for signer address {}'.format(signer_address)) +signer = EIP155Signer(keystore) + +queue = args.q +chain_str = config.get('CIC_CHAIN_SPEC') +block_offset = 0 +if args.head: + block_offset = -1 +else: + block_offset = args.offset + +chain_spec = ChainSpec.from_chain_str(chain_str) +old_chain_spec_str = args.old_chain_spec +old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str) + +user_dir = args.user_dir # user_out_dir from import_users.py + +token_symbol = args.token_symbol + +MetadataTask.meta_host = config.get('META_HOST') +MetadataTask.meta_port = config.get('META_PORT') +ImportTask.chain_spec = chain_spec + +def main(): + conn = EthHTTPConnection(config.get('ETH_PROVIDER')) + + ImportTask.balance_processor = BalanceProcessor(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'), signer_address, signer) + ImportTask.balance_processor.init() + + # TODO get decimals from token + balances = {} + f = open('{}/balances.csv'.format(user_dir, 'r')) + remove_zeros = 10**6 + i = 0 + while True: + l = f.readline() + if l == None: + break + r = l.split(',') + try: + address = to_checksum_address(r[0]) + sys.stdout.write('loading balance {} {} {}'.format(i, address, r[1]).ljust(200) + "\r") + except ValueError: + break + balance = int(int(r[1].rstrip()) / remove_zeros) + balances[address] = balance + i += 1 + + f.close() + + ImportTask.balances = balances + ImportTask.count = i + + s = celery.signature( + 'import_task.send_txs', + [ + MetadataTask.balance_processor.nonce_offset, + ], + queue='cic-import-ussd', + ) + s.apply_async() + + argv = ['worker', '-Q', 'cic-import-ussd', '--loglevel=DEBUG'] + celery_app.worker_main(argv) + + +if __name__ == '__main__': + main() diff --git a/apps/contract-migration/scripts/cic_ussd/import_task.py b/apps/contract-migration/scripts/cic_ussd/import_task.py new file mode 100644 index 00000000..a3f2abf0 --- /dev/null +++ b/apps/contract-migration/scripts/cic_ussd/import_task.py @@ -0,0 +1,218 @@ +# standard imports +import os +import logging +import urllib.parse +import urllib.error +import urllib.request +import json + +# external imports +import celery +from hexathon import ( + strip_0x, + add_0x, + ) +from chainlib.eth.address import to_checksum_address +from chainlib.eth.tx import ( + unpack, + raw, + ) +from cic_types.processor import generate_metadata_pointer +from cic_types.models.person import Person + +#logg = logging.getLogger().getChild(__name__) +logg = logging.getLogger() + +celery_app = celery.current_app + + +class ImportTask(celery.Task): + + balances = None + import_dir = 'out' + count = 0 + chain_spec = None + balance_processor = None + max_retries = None + +class MetadataTask(ImportTask): + + meta_host = None + meta_port = None + meta_path = '' + meta_ssl = False + autoretry_for = ( + urllib.error.HTTPError, + OSError, + ) + retry_jitter = True + retry_backoff = True + retry_backoff_max = 60 + + @classmethod + def meta_url(self): + scheme = 'http' + if self.meta_ssl: + scheme += s + url = urllib.parse.urlparse('{}://{}:{}/{}'.format(scheme, self.meta_host, self.meta_port, self.meta_path)) + return urllib.parse.urlunparse(url) + + +def old_address_from_phone(base_path, phone): + pidx = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone') + phone_idx_path = os.path.join('{}/phone/{}/{}/{}'.format( + base_path, + pidx[:2], + pidx[2:4], + pidx, + ) + ) + f = open(phone_idx_path, 'r') + old_address = f.read() + f.close() + + return old_address + + +@celery_app.task(bind=True, base=MetadataTask) +def resolve_phone(self, phone): + identifier = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone') + url = urllib.parse.urljoin(self.meta_url(), identifier) + logg.debug('attempt getting phone pointer at {} for phone {}'.format(url, phone)) + r = urllib.request.urlopen(url) + address = json.load(r) + address = address.replace('"', '') + logg.debug('address {} for phone {}'.format(address, phone)) + + return address + + +@celery_app.task(bind=True, base=MetadataTask) +def generate_metadata(self, address, phone): + old_address = old_address_from_phone(self.import_dir, phone) + + logg.debug('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> foo') + logg.debug('address {}'.format(address)) + old_address_upper = strip_0x(old_address).upper() + metadata_path = '{}/old/{}/{}/{}.json'.format( + self.import_dir, + old_address_upper[:2], + old_address_upper[2:4], + old_address_upper, + ) + + f = open(metadata_path, 'r') + o = json.load(f) + f.close() + + u = Person.deserialize(o) + + if u.identities.get('evm') == None: + u.identities['evm'] = {} + sub_chain_str = '{}:{}'.format(self.chain_spec.common_name(), self.chain_spec.network_id()) + u.identities['evm'][sub_chain_str] = [add_0x(address)] + + new_address_clean = strip_0x(address) + filepath = os.path.join( + self.import_dir, + 'new', + new_address_clean[:2].upper(), + new_address_clean[2:4].upper(), + new_address_clean.upper() + '.json', + ) + os.makedirs(os.path.dirname(filepath), exist_ok=True) + + o = u.serialize() + f = open(filepath, 'w') + f.write(json.dumps(o)) + f.close() + + meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person') + meta_filepath = os.path.join( + self.import_dir, + 'meta', + '{}.json'.format(new_address_clean.upper()), + ) + os.symlink(os.path.realpath(filepath), meta_filepath) + + logg.debug('found metadata {} for phone {}'.format(o, phone)) + + return address + + +@celery_app.task(bind=True, base=MetadataTask) +def opening_balance_tx(self, address, phone, serial): + + + old_address = old_address_from_phone(self.import_dir, phone) + + k = to_checksum_address(strip_0x(old_address)) + balance = self.balances[k] + logg.debug('found balance {} for address {} phone {}'.format(balance, old_address, phone)) + + decimal_balance = self.balance_processor.get_decimal_amount(int(balance)) + + (tx_hash_hex, o) = self.balance_processor.get_rpc_tx(address, decimal_balance, serial) + + tx = unpack(bytes.fromhex(strip_0x(o)), self.chain_spec) + logg.debug('generated tx token value {} to {} tx hash {}'.format(decimal_balance, address, tx_hash_hex)) + + tx_path = os.path.join( + self.import_dir, + 'txs', + strip_0x(tx_hash_hex), + ) + + f = open(tx_path, 'w') + f.write(strip_0x(o)) + f.close() + + tx_nonce_path = os.path.join( + self.import_dir, + 'txs', + '.' + str(tx['nonce']), + ) + os.symlink(os.path.realpath(tx_path), tx_nonce_path) + + return tx['hash'] + + +@celery_app.task(bind=True, base=ImportTask, autoretry_for=(FileNotFoundError,), max_retries=None, default_retry_delay=0.1) +def send_txs(self, nonce): + + if nonce == self.count + self.balance_processor.nonce_offset: + logg.info('reached nonce {} (offset {} + count {}) exiting'.format(nonce, self.balance_processor.nonce_offset, self.count)) + return + + + logg.debug('attempt to open symlink for nonce {}'.format(nonce)) + tx_nonce_path = os.path.join( + self.import_dir, + 'txs', + '.' + str(nonce), + ) + f = open(tx_nonce_path, 'r') + tx_signed_raw_hex = f.read() + f.close() + + os.unlink(tx_nonce_path) + + o = raw(add_0x(tx_signed_raw_hex)) + tx_hash_hex = self.balance_processor.conn.do(o) + + logg.info('sent nonce {} tx hash {}'.format(nonce, tx_hash_hex)) #tx_signed_raw_hex)) + + nonce += 1 + + queue = self.request.delivery_info.get('routing_key') + s = celery.signature( + 'import_task.send_txs', + [ + nonce, + ], + queue=queue, + ) + s.apply_async() + + + return nonce diff --git a/apps/contract-migration/scripts/cic_ussd/import_users.py b/apps/contract-migration/scripts/cic_ussd/import_users.py new file mode 100644 index 00000000..ca9bf3f1 --- /dev/null +++ b/apps/contract-migration/scripts/cic_ussd/import_users.py @@ -0,0 +1,191 @@ +# standard imports +import os +import sys +import json +import logging +import argparse +import uuid +import datetime +import time +import urllib.request +from glob import glob + +# third-party imports +import redis +import confini +import celery +from hexathon import ( + add_0x, + strip_0x, + ) +from chainlib.eth.address import to_checksum +from cic_types.models.person import Person +from cic_eth.api.api_task import Api +from chainlib.chain import ChainSpec +from cic_types.processor import generate_metadata_pointer +import phonenumbers + +logging.basicConfig(level=logging.WARNING) +logg = logging.getLogger() + +default_config_dir = '/usr/local/etc/cic' + +argparser = argparse.ArgumentParser() +argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') +argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string') +argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission') +argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission') +argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback') +argparser.add_argument('--batch-size', dest='batch_size', default=100, type=int, help='burst size of sending transactions to node') # batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size +argparser.add_argument('--batch-delay', dest='batch_delay', default=3, type=int, help='seconds delay between batches') +argparser.add_argument('--timeout', default=60.0, type=float, help='Callback timeout') +argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue') +argparser.add_argument('-v', action='store_true', help='Be verbose') +argparser.add_argument('-vv', action='store_true', help='Be more verbose') +argparser.add_argument('user_dir', type=str, help='path to users export dir tree') +args = argparser.parse_args() + +if args.v: + logg.setLevel(logging.INFO) +elif args.vv: + logg.setLevel(logging.DEBUG) + +config_dir = args.c +config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX')) +config.process() +args_override = { + 'CIC_CHAIN_SPEC': getattr(args, 'i'), + 'REDIS_HOST': getattr(args, 'redis_host'), + 'REDIS_PORT': getattr(args, 'redis_port'), + 'REDIS_DB': getattr(args, 'redis_db'), + } +config.dict_override(args_override, 'cli') +celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) + +redis_host = config.get('REDIS_HOST') +redis_port = config.get('REDIS_PORT') +redis_db = config.get('REDIS_DB') +r = redis.Redis(redis_host, redis_port, redis_db) + +ps = r.pubsub() + +user_new_dir = os.path.join(args.user_dir, 'new') +os.makedirs(user_new_dir) + +meta_dir = os.path.join(args.user_dir, 'meta') +os.makedirs(meta_dir) + +user_old_dir = os.path.join(args.user_dir, 'old') +os.stat(user_old_dir) + +txs_dir = os.path.join(args.user_dir, 'txs') +os.makedirs(txs_dir) + +chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) +chain_str = str(chain_spec) + +batch_size = args.batch_size +batch_delay = args.batch_delay + + + +def build_ussd_request(phone, host, port, service_code, username, password, ssl=False): + url = 'http' + if ssl: + url += 's' + url += '://{}:{}'.format(host, port) + url += '/?username={}&password={}'.format(username, password) #config.get('USSD_USER'), config.get('USSD_PASS')) + + logg.info('ussd service url {}'.format(url)) + logg.info('ussd phone {}'.format(phone)) + + session = uuid.uuid4().hex + data = { + 'sessionId': session, + 'serviceCode': service_code, + 'phoneNumber': phone, + 'text': service_code, + } + req = urllib.request.Request(url) + data_str = json.dumps(data) + data_bytes = data_str.encode('utf-8') + req.add_header('Content-Type', 'application/json') + req.data = data_bytes + + return req + + +def register_ussd(i, u): + phone_object = phonenumbers.parse(u.tel) + phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164) + logg.debug('tel {} {}'.format(u.tel, phone)) + req = build_ussd_request(phone, 'localhost', 63315, '*483*46#', '', '') + response = urllib.request.urlopen(req) + response_data = response.read().decode('utf-8') + state = response_data[:3] + out = response_data[4:] + logg.debug('ussd reponse: {}'.format(out)) + + +if __name__ == '__main__': + + i = 0 + j = 0 + for x in os.walk(user_old_dir): + for y in x[2]: + if y[len(y)-5:] != '.json': + continue + filepath = os.path.join(x[0], y) + f = open(filepath, 'r') + try: + o = json.load(f) + except json.decoder.JSONDecodeError as e: + f.close() + logg.error('load error for {}: {}'.format(y, e)) + continue + f.close() + u = Person.deserialize(o) + + new_address = register_ussd(i, u) + + phone_object = phonenumbers.parse(u.tel) + phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164) + + s_phone = celery.signature( + 'import_task.resolve_phone', + [ + phone, + ], + queue='cic-import-ussd', + ) + + s_meta = celery.signature( + 'import_task.generate_metadata', + [ + phone, + ], + queue='cic-import-ussd', + ) + + s_balance = celery.signature( + 'import_task.opening_balance_tx', + [ + phone, + i, + ], + queue='cic-import-ussd', + ) + + s_meta.link(s_balance) + s_phone.link(s_meta) + s_phone.apply_async(countdown=7) # block time plus a bit of time for ussd processing + + i += 1 + sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r") + + j += 1 + if j == batch_size: + time.sleep(batch_delay) + j = 0 + + #fi.close() diff --git a/apps/contract-migration/scripts/cic_ussd/import_util.py b/apps/contract-migration/scripts/cic_ussd/import_util.py new file mode 100644 index 00000000..ebe706bb --- /dev/null +++ b/apps/contract-migration/scripts/cic_ussd/import_util.py @@ -0,0 +1,72 @@ +# standard imports +import logging + +# external imports +from eth_contract_registry import Registry +from eth_token_index import TokenUniqueSymbolIndex +from chainlib.eth.gas import OverrideGasOracle +from chainlib.eth.nonce import OverrideNonceOracle +from chainlib.eth.erc20 import ERC20 +from chainlib.eth.tx import ( + count, + TxFormat, + ) + +logg = logging.getLogger().getChild(__name__) + + +class BalanceProcessor: + + def __init__(self, conn, chain_spec, registry_address, signer_address, signer): + + self.chain_spec = chain_spec + self.conn = conn + #self.signer_address = signer_address + self.registry_address = registry_address + + self.token_index_address = None + self.token_address = None + self.signer_address = signer_address + self.signer = signer + + o = count(signer_address) + c = self.conn.do(o) + self.nonce_offset = int(c, 16) + self.gas_oracle = OverrideGasOracle(conn=conn, limit=8000000) + + self.value_multiplier = 1 + + + def init(self): + # Get Token registry address + registry = Registry(self.chain_spec) + o = registry.address_of(self.registry_address, 'TokenRegistry') + r = self.conn.do(o) + self.token_index_address = registry.parse_address_of(r) + logg.info('found token index address {}'.format(self.token_index_address)) + + token_registry = TokenUniqueSymbolIndex(self.chain_spec) + o = token_registry.address_of(self.token_index_address, 'SRF') + r = self.conn.do(o) + self.token_address = token_registry.parse_address_of(r) + logg.info('found SRF token address {}'.format(self.token_address)) + + tx_factory = ERC20(self.chain_spec) + o = tx_factory.decimals(self.token_address) + r = self.conn.do(o) + n = tx_factory.parse_decimals(r) + self.value_multiplier = 10 ** n + + + def get_rpc_tx(self, recipient, value, i): + logg.debug('initiating nonce offset {} for recipient {}'.format(self.nonce_offset + i, recipient)) + nonce_oracle = OverrideNonceOracle(self.signer_address, self.nonce_offset + i) + tx_factory = ERC20(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.gas_oracle) + return tx_factory.transfer(self.token_address, self.signer_address, recipient, value, tx_format=TxFormat.RLP_SIGNED) + #(tx_hash_hex, o) = tx_factory.transfer(self.token_address, self.signer_address, recipient, value) + #self.conn.do(o) + #return tx_hash_hex + + + def get_decimal_amount(self, value): + return value * self.value_multiplier diff --git a/apps/contract-migration/scripts/config/app.ini b/apps/contract-migration/scripts/config/app.ini new file mode 100644 index 00000000..6d37c421 --- /dev/null +++ b/apps/contract-migration/scripts/config/app.ini @@ -0,0 +1,24 @@ +[app] +ALLOWED_IP=0.0.0.0/0 +LOCALE_FALLBACK=en +LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/ +MAX_BODY_LENGTH=1024 +PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I= +SERVICE_CODE=*483*46# + +[phone_number] +REGION=KE + +[ussd] +MENU_FILE=/usr/src/data/ussd_menu.json +user = +pass = + +[statemachine] +STATES=/usr/src/cic-ussd/states/ +TRANSITIONS=/usr/src/cic-ussd/transitions/ + +[client] +host = +port = +ssl = diff --git a/apps/contract-migration/scripts/config/meta.ini b/apps/contract-migration/scripts/config/meta.ini index 0d16ee28..a5363cc7 100644 --- a/apps/contract-migration/scripts/config/meta.ini +++ b/apps/contract-migration/scripts/config/meta.ini @@ -1,2 +1,5 @@ [meta] url = http://localhost:63380 +host = localhost +port = 63380 +ssl = 0 diff --git a/apps/contract-migration/scripts/create_import_users.py b/apps/contract-migration/scripts/create_import_users.py index d8587ee0..9f7c3d9c 100644 --- a/apps/contract-migration/scripts/create_import_users.py +++ b/apps/contract-migration/scripts/create_import_users.py @@ -17,15 +17,14 @@ import random import vobject import celery from faker import Faker -import cic_eth_registry import confini -from cic_eth.api import Api from cic_types.models.person import ( Person, generate_vcard_from_contact_data, get_contact_data_from_vcard, ) from chainlib.eth.address import to_checksum_address +import phonenumbers logging.basicConfig(level=logging.WARNING) logg = logging.getLogger() @@ -62,8 +61,6 @@ ts_then = int(dt_then.timestamp()) celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) -api = Api(config.get('CIC_CHAIN_SPEC')) - gift_max = args.gift_threshold or 0 gift_factor = (10**6) @@ -84,10 +81,12 @@ phone_idx = [] user_dir = args.dir user_count = args.user_count +random.seed() + def genPhoneIndex(phone): h = hashlib.new('sha256') h.update(phone.encode('utf-8')) - h.update(b'cic.msisdn') + h.update(b':cic.phone') return h.digest().hex() @@ -100,13 +99,14 @@ def genId(addr, typ): def genDate(): - logg.info(ts_then) ts = random.randint(ts_then, ts_now) return datetime.datetime.fromtimestamp(ts).timestamp() def genPhone(): - return fake.msisdn() + phone_str = '+254' + str(random.randint(100000000, 999999999)) + phone_object = phonenumbers.parse(phone_str) + return phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164) def genPersonal(phone): @@ -210,14 +210,14 @@ if __name__ == '__main__': f.close() pidx = genPhoneIndex(phone) - d = prepareLocalFilePath(os.path.join(user_dir, 'phone'), uid) + d = prepareLocalFilePath(os.path.join(user_dir, 'phone'), pidx) f = open('{}/{}'.format(d, pidx), 'w') f.write(eth) f.close() amount = genAmount() fa.write('{},{}\n'.format(eth,amount)) - logg.debug('pidx {}, uid {}, eth {}, amount {}'.format(pidx, uid, eth, amount)) + logg.debug('pidx {}, uid {}, eth {}, amount {}, phone {}'.format(pidx, uid, eth, amount, phone)) i += 1 diff --git a/apps/contract-migration/scripts/eth/import_balance.py b/apps/contract-migration/scripts/eth/import_balance.py new file mode 100644 index 00000000..2067d197 --- /dev/null +++ b/apps/contract-migration/scripts/eth/import_balance.py @@ -0,0 +1,309 @@ +# standard imports +import os +import sys +import logging +import time +import argparse +import sys +import re +import hashlib +import csv +import json + +# external imports +import eth_abi +import confini +from hexathon import ( + strip_0x, + add_0x, + ) +from chainsyncer.backend import MemBackend +from chainsyncer.driver import HeadSyncer +from chainlib.eth.connection import EthHTTPConnection +from chainlib.eth.block import ( + block_latest, + block_by_number, + Block, + ) +from chainlib.eth.hash import keccak256_string_to_hex +from chainlib.eth.address import to_checksum_address +from chainlib.eth.erc20 import ERC20 +from chainlib.eth.gas import OverrideGasOracle +from chainlib.eth.nonce import RPCNonceOracle +from chainlib.eth.tx import TxFactory +from chainlib.eth.rpc import jsonrpc_template +from chainlib.eth.error import EthException +from chainlib.chain import ChainSpec +from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer +from crypto_dev_signer.keystore.dict import DictKeystore +from cic_types.models.person import Person + + +logging.basicConfig(level=logging.WARNING) +logg = logging.getLogger() + +config_dir = './config' + +argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') +argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address') +argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing') +argparser.add_argument('-c', type=str, default=config_dir, help='config root to use') +argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec') +argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec') +argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address') +argparser.add_argument('--token-symbol', default='SRF', type=str, dest='token_symbol', help='Token symbol to use for trnsactions') +argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)') +argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') +argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to') +argparser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from') +argparser.add_argument('-v', help='be verbose', action='store_true') +argparser.add_argument('-vv', help='be more verbose', action='store_true') +argparser.add_argument('user_dir', type=str, help='user export directory') +args = argparser.parse_args(sys.argv[1:]) + +if args.v == True: + logging.getLogger().setLevel(logging.INFO) +elif args.vv == True: + logging.getLogger().setLevel(logging.DEBUG) + +config_dir = os.path.join(args.c) +os.makedirs(config_dir, 0o777, True) +config = confini.Config(config_dir, args.env_prefix) +config.process() +# override args +args_override = { + 'CIC_CHAIN_SPEC': getattr(args, 'i'), + 'ETH_PROVIDER': getattr(args, 'p'), + 'CIC_REGISTRY_ADDRESS': getattr(args, 'r'), + } +config.dict_override(args_override, 'cli flag') +config.censor('PASSWORD', 'DATABASE') +config.censor('PASSWORD', 'SSL') +logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) + +#app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL')) + +signer_address = None +keystore = DictKeystore() +if args.y != None: + logg.debug('loading keystore file {}'.format(args.y)) + signer_address = keystore.import_keystore_file(args.y) + logg.debug('now have key for signer address {}'.format(signer_address)) +signer = EIP155Signer(keystore) + +queue = args.q +chain_str = config.get('CIC_CHAIN_SPEC') +block_offset = 0 +if args.head: + block_offset = -1 +else: + block_offset = args.offset + +chain_spec = ChainSpec.from_chain_str(chain_str) +old_chain_spec_str = args.old_chain_spec +old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str) + +user_dir = args.user_dir # user_out_dir from import_users.py + +token_symbol = args.token_symbol + + +class Handler: + + account_index_add_signature = keccak256_string_to_hex('add(address)')[:8] + + def __init__(self, conn, chain_spec, user_dir, balances, token_address, signer, gas_oracle, nonce_oracle): + self.token_address = token_address + self.user_dir = user_dir + self.balances = balances + self.chain_spec = chain_spec + self.tx_factory = ERC20(chain_spec, signer, gas_oracle, nonce_oracle) + + + def name(self): + return 'balance_handler' + + + def filter(self, conn, block, tx, db_session): + if tx.payload == None or len(tx.payload) == 0: + logg.debug('no payload, skipping {}'.format(tx)) + return + + if tx.payload[:8] == self.account_index_add_signature: + recipient = eth_abi.decode_single('address', bytes.fromhex(tx.payload[-64:])) + #original_address = to_checksum_address(self.addresses[to_checksum_address(recipient)]) + user_file = 'new/{}/{}/{}.json'.format( + recipient[2:4].upper(), + recipient[4:6].upper(), + recipient[2:].upper(), + ) + filepath = os.path.join(self.user_dir, user_file) + o = None + try: + f = open(filepath, 'r') + o = json.load(f) + f.close() + except FileNotFoundError: + logg.error('no import record of address {}'.format(recipient)) + return + u = Person.deserialize(o) + original_address = u.identities[old_chain_spec.engine()]['{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())][0] + try: + balance = self.balances[original_address] + except KeyError as e: + logg.error('balance get fail orig {} new {}'.format(original_address, recipient)) + return + + # TODO: store token object in handler ,get decimals from there + multiplier = 10**6 + balance_full = balance * multiplier + logg.info('registered {} originally {} ({}) tx hash {} balance {}'.format(recipient, original_address, u, tx.hash, balance_full)) + + (tx_hash_hex, o) = self.tx_factory.transfer(self.token_address, signer_address, recipient, balance_full) + logg.info('submitting erc20 transfer tx {} for recipient {}'.format(tx_hash_hex, recipient)) + r = conn.do(o) + + tx_path = os.path.join( + user_dir, + 'txs', + strip_0x(tx_hash_hex), + ) + f = open(tx_path, 'w') + f.write(strip_0x(o['params'][0])) + f.close() +# except TypeError as e: +# logg.warning('typerror {}'.format(e)) +# pass +# except IndexError as e: +# logg.warning('indexerror {}'.format(e)) +# pass +# except EthException as e: +# logg.error('send error {}'.format(e).ljust(200)) + #except KeyError as e: + # logg.error('key record not found in imports: {}'.format(e).ljust(200)) + + +#class BlockGetter: +# +# def __init__(self, conn, gas_oracle, nonce_oracle, chain_spec): +# self.conn = conn +# self.tx_factory = ERC20(signer=signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_id) +# +# +# def get(self, n): +# o = block_by_number(n) +# r = self.conn.do(o) +# b = None +# try: +# b = Block(r) +# except TypeError as e: +# if r == None: +# logg.debug('block not found {}'.format(n)) +# else: +# logg.error('block retrieve error {}'.format(e)) +# return b + + +def progress_callback(block_number, tx_index): + sys.stdout.write(str(block_number).ljust(200) + "\n") + + + +def main(): + global chain_str, block_offset, user_dir + + conn = EthHTTPConnection(config.get('ETH_PROVIDER')) + gas_oracle = OverrideGasOracle(conn=conn, limit=8000000) + nonce_oracle = RPCNonceOracle(signer_address, conn) + + # Get Token registry address + txf = TxFactory(chain_spec, signer=signer, gas_oracle=gas_oracle, nonce_oracle=None) + tx = txf.template(signer_address, config.get('CIC_REGISTRY_ADDRESS')) + + registry_addressof_method = keccak256_string_to_hex('addressOf(bytes32)')[:8] + data = add_0x(registry_addressof_method) + data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex() + txf.set_code(tx, data) + + o = jsonrpc_template() + o['method'] = 'eth_call' + o['params'].append(txf.normalize(tx)) + o['params'].append('latest') + r = conn.do(o) + token_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r)))) + logg.info('found token index address {}'.format(token_index_address)) + + + # Get Sarafu token address + tx = txf.template(signer_address, token_index_address) + data = add_0x(registry_addressof_method) + h = hashlib.new('sha256') + h.update(token_symbol.encode('utf-8')) + z = h.digest() + data += eth_abi.encode_single('bytes32', z).hex() + txf.set_code(tx, data) + o = jsonrpc_template() + o['method'] = 'eth_call' + o['params'].append(txf.normalize(tx)) + o['params'].append('latest') + r = conn.do(o) + try: + sarafu_token_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r)))) + except ValueError as e: + logg.critical('lookup failed for token {}: {}'.format(token_symbol, e)) + sys.exit(1) + logg.info('found token address {}'.format(sarafu_token_address)) + + syncer_backend = MemBackend(chain_str, 0) + + if block_offset == -1: + o = block_latest() + r = conn.do(o) + block_offset = int(strip_0x(r), 16) + 1 +# +# addresses = {} +# f = open('{}/addresses.csv'.format(user_dir, 'r')) +# while True: +# l = f.readline() +# if l == None: +# break +# r = l.split(',') +# try: +# k = r[0] +# v = r[1].rstrip() +# addresses[k] = v +# sys.stdout.write('loading address mapping {} -> {}'.format(k, v).ljust(200) + "\r") +# except IndexError as e: +# break +# f.close() + + # TODO get decimals from token + balances = {} + f = open('{}/balances.csv'.format(user_dir, 'r')) + remove_zeros = 10**6 + i = 0 + while True: + l = f.readline() + if l == None: + break + r = l.split(',') + try: + address = to_checksum_address(r[0]) + sys.stdout.write('loading balance {} {} {}'.format(i, address, r[1]).ljust(200) + "\r") + except ValueError: + break + balance = int(int(r[1].rstrip()) / remove_zeros) + balances[address] = balance + i += 1 + + f.close() + + syncer_backend.set(block_offset, 0) + syncer = HeadSyncer(syncer_backend, block_callback=progress_callback) + handler = Handler(conn, chain_spec, user_dir, balances, sarafu_token_address, signer, gas_oracle, nonce_oracle) + syncer.add_filter(handler) + syncer.loop(1, conn) + + +if __name__ == '__main__': + main() diff --git a/apps/contract-migration/scripts/eth/import_users.py b/apps/contract-migration/scripts/eth/import_users.py new file mode 100644 index 00000000..c5a5fb69 --- /dev/null +++ b/apps/contract-migration/scripts/eth/import_users.py @@ -0,0 +1,209 @@ +# standard imports +import os +import sys +import json +import logging +import argparse +import uuid +import datetime +import time +import phonenumbers +from glob import glob + +# external imports +import confini +from hexathon import ( + add_0x, + strip_0x, + ) +from cic_types.models.person import Person +from chainlib.eth.address import to_checksum_address +from chainlib.chain import ChainSpec +from chainlib.eth.connection import EthHTTPConnection +from chainlib.eth.gas import RPCGasOracle +from chainlib.eth.nonce import RPCNonceOracle +from cic_types.processor import generate_metadata_pointer +from eth_accounts_index import AccountRegistry +from contract_registry import Registry +from crypto_dev_signer.keystore.dict import DictKeystore +from crypto_dev_signer.eth.signer.defaultsigner import ReferenceSigner as EIP155Signer +from crypto_dev_signer.keystore.keyfile import to_dict as to_keyfile_dict + +logging.basicConfig(level=logging.WARNING) +logg = logging.getLogger() + +default_config_dir = '/usr/local/etc/cic' + +argparser = argparse.ArgumentParser() +argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)') +argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing') +argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') +argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string') +argparser.add_argument('-r', '--registry', dest='r', type=str, help='Contract registry address') +argparser.add_argument('--batch-size', dest='batch_size', default=50, type=int, help='burst size of sending transactions to node') +argparser.add_argument('--batch-delay', dest='batch_delay', default=2, type=int, help='seconds delay between batches') +argparser.add_argument('-v', action='store_true', help='Be verbose') +argparser.add_argument('-vv', action='store_true', help='Be more verbose') +argparser.add_argument('user_dir', type=str, help='path to users export dir tree') +args = argparser.parse_args() + +if args.v: + logg.setLevel(logging.INFO) +elif args.vv: + logg.setLevel(logging.DEBUG) + +config_dir = args.c +config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX')) +config.process() +args_override = { + 'CIC_REGISTRY_ADDRESS': getattr(args, 'r'), + 'CIC_CHAIN_SPEC': getattr(args, 'i'), + } +config.dict_override(args_override, 'cli') +config.add(args.user_dir, '_USERDIR', True) + +user_new_dir = os.path.join(args.user_dir, 'new') +os.makedirs(user_new_dir) + +meta_dir = os.path.join(args.user_dir, 'meta') +os.makedirs(meta_dir) + +phone_dir = os.path.join(args.user_dir, 'phone') +os.makedirs(os.path.join(phone_dir, 'meta')) + +user_old_dir = os.path.join(args.user_dir, 'old') +os.stat(user_old_dir) + +txs_dir = os.path.join(args.user_dir, 'txs') +os.makedirs(txs_dir) + +chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) +chain_str = str(chain_spec) + +batch_size = args.batch_size +batch_delay = args.batch_delay + +rpc = EthHTTPConnection(args.p) + +signer_address = None +keystore = DictKeystore() +if args.y != None: + logg.debug('loading keystore file {}'.format(args.y)) + signer_address = keystore.import_keystore_file(args.y) + logg.debug('now have key for signer address {}'.format(signer_address)) +signer = EIP155Signer(keystore) + +nonce_oracle = RPCNonceOracle(signer_address, rpc) + +registry = Registry(chain_spec) +o = registry.address_of(config.get('CIC_REGISTRY_ADDRESS'), 'AccountRegistry') +r = rpc.do(o) +account_registry_address = registry.parse_address_of(r) +logg.info('using account registry {}'.format(account_registry_address)) + +keyfile_dir = os.path.join(config.get('_USERDIR'), 'keystore') +os.makedirs(keyfile_dir) + +def register_eth(i, u): + + address_hex = keystore.new() + address = add_0x(to_checksum_address(address_hex)) + + gas_oracle = RPCGasOracle(rpc, code_callback=AccountRegistry.gas) + c = AccountRegistry(chain_spec, signer=signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) + (tx_hash_hex, o) = c.add(account_registry_address, signer_address, address) + logg.debug('o {}'.format(o)) + rpc.do(o) + + pk = keystore.get(address) + keyfile_content = to_keyfile_dict(pk, 'foo') + keyfile_path = os.path.join(keyfile_dir, '{}.json'.format(address)) + f = open(keyfile_path, 'w') + json.dump(keyfile_content, f) + f.close() + + logg.debug('[{}] register eth {} {} tx {} keyfile {}'.format(i, u, address, tx_hash_hex, keyfile_path)) + + return address + + +def register_ussd(u): + pass + + +if __name__ == '__main__': + + + i = 0 + j = 0 + for x in os.walk(user_old_dir): + for y in x[2]: + if y[len(y)-5:] != '.json': + continue + filepath = os.path.join(x[0], y) + f = open(filepath, 'r') + try: + o = json.load(f) + except json.decoder.JSONDecodeError as e: + f.close() + logg.error('load error for {}: {}'.format(y, e)) + continue + f.close() + u = Person.deserialize(o) + + new_address = register_eth(i, u) + if u.identities.get('evm') == None: + u.identities['evm'] = {} + sub_chain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id()) + u.identities['evm'][sub_chain_str] = [new_address] + + register_ussd(u) + + new_address_clean = strip_0x(new_address) + filepath = os.path.join( + user_new_dir, + new_address_clean[:2].upper(), + new_address_clean[2:4].upper(), + new_address_clean.upper() + '.json', + ) + os.makedirs(os.path.dirname(filepath), exist_ok=True) + + o = u.serialize() + f = open(filepath, 'w') + f.write(json.dumps(o)) + f.close() + + meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person') + meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper())) + os.symlink(os.path.realpath(filepath), meta_filepath) + + phone_object = phonenumbers.parse(u.tel) + phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164) + logg.debug('>>>>> Using phone {}'.format(phone)) + meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone') + meta_phone_filepath = os.path.join(phone_dir, 'meta', meta_phone_key) + + filepath = os.path.join( + phone_dir, + 'new', + meta_phone_key[:2].upper(), + meta_phone_key[2:4].upper(), + meta_phone_key.upper(), + ) + os.makedirs(os.path.dirname(filepath), exist_ok=True) + + f = open(filepath, 'w') + f.write(to_checksum_address(new_address_clean)) + f.close() + + os.symlink(os.path.realpath(filepath), meta_phone_filepath) + + i += 1 + sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r") + + j += 1 + if j == batch_size: + time.sleep(batch_delay) + j = 0 + + #fi.close() diff --git a/apps/contract-migration/scripts/false/_logs/2021-02-12T18_59_21_680Z-debug.log b/apps/contract-migration/scripts/false/_logs/2021-02-12T18_59_21_680Z-debug.log deleted file mode 100644 index 011bfd8b..00000000 --- a/apps/contract-migration/scripts/false/_logs/2021-02-12T18_59_21_680Z-debug.log +++ /dev/null @@ -1,1383 +0,0 @@ -0 info it worked if it ends with ok -1 verbose cli [ -1 verbose cli '/usr/bin/node', -1 verbose cli '/usr/bin/npm', -1 verbose cli 'install', -1 verbose cli '--no-cache', -1 verbose cli '../../../apps/cic-meta' -1 verbose cli ] -2 info using npm@6.14.11 -3 info using node@v15.6.0 -4 verbose npm-session 64ba63e9b9f4a63f -5 silly install loadCurrentTree -6 silly install readLocalPackageData -7 silly pacote directory manifest for undefined@file:../../cic-meta fetched in 4ms -8 timing stage:loadCurrentTree Completed in 473ms -9 silly install loadIdealTree -10 silly install cloneCurrentTreeToIdealTree -11 timing stage:loadIdealTree:cloneCurrentTree Completed in 3ms -12 silly install loadShrinkwrap -13 timing stage:loadIdealTree:loadShrinkwrap Completed in 57ms -14 silly install loadAllDepsIntoIdealTree -15 silly resolveWithNewModule cic-client-meta@0.0.7-alpha.1 checking installable status -16 silly removeObsoleteDep removing cic-client-meta@0.0.7-alpha.1 from the tree as its been replaced by a newer version or is no longer required -17 silly removeObsoleteDep removing @ethereumjs/tx@3.0.0-beta.1 from the tree as its been replaced by a newer version or is no longer required -18 silly removeObsoleteDep removing @ethereumjs/common@2.0.0-beta.1 from the tree as its been replaced by a newer version or is no longer required -19 silly removeObsoleteDep removing crc-32@1.2.0 from the tree as its been replaced by a newer version or is no longer required -20 silly removeObsoleteDep removing exit-on-epipe@1.0.1 from the tree as its been replaced by a newer version or is no longer required -21 silly removeObsoleteDep removing printj@1.1.2 from the tree as its been replaced by a newer version or is no longer required -22 silly removeObsoleteDep removing automerge@0.14.1 from the tree as its been replaced by a newer version or is no longer required -23 silly removeObsoleteDep removing immutable@3.8.2 from the tree as its been replaced by a newer version or is no longer required -24 silly removeObsoleteDep removing transit-immutable-js@0.7.0 from the tree as its been replaced by a newer version or is no longer required -25 silly removeObsoleteDep removing transit-js@0.8.867 from the tree as its been replaced by a newer version or is no longer required -26 silly removeObsoleteDep removing ethereumjs-wallet@1.0.1 from the tree as its been replaced by a newer version or is no longer required -27 silly removeObsoleteDep removing aes-js@3.1.2 from the tree as its been replaced by a newer version or is no longer required -28 silly removeObsoleteDep removing bs58check@2.1.2 from the tree as its been replaced by a newer version or is no longer required -29 silly removeObsoleteDep removing bs58@4.0.1 from the tree as its been replaced by a newer version or is no longer required -30 silly removeObsoleteDep removing base-x@3.0.8 from the tree as its been replaced by a newer version or is no longer required -31 silly removeObsoleteDep removing create-hash@1.2.0 from the tree as its been replaced by a newer version or is no longer required -32 silly removeObsoleteDep removing cipher-base@1.0.4 from the tree as its been replaced by a newer version or is no longer required -33 silly removeObsoleteDep removing md5.js@1.3.5 from the tree as its been replaced by a newer version or is no longer required -34 silly removeObsoleteDep removing hash-base@3.1.0 from the tree as its been replaced by a newer version or is no longer required -35 silly removeObsoleteDep removing ripemd160@2.0.2 from the tree as its been replaced by a newer version or is no longer required -36 silly removeObsoleteDep removing sha.js@2.4.11 from the tree as its been replaced by a newer version or is no longer required -37 silly removeObsoleteDep removing ethereum-cryptography@0.1.3 from the tree as its been replaced by a newer version or is no longer required -38 silly removeObsoleteDep removing @types/pbkdf2@3.1.0 from the tree as its been replaced by a newer version or is no longer required -39 silly removeObsoleteDep removing @types/node@14.14.6 from the tree as its been replaced by a newer version or is no longer required -40 silly removeObsoleteDep removing @types/secp256k1@4.0.1 from the tree as its been replaced by a newer version or is no longer required -41 silly removeObsoleteDep removing blakejs@1.1.0 from the tree as its been replaced by a newer version or is no longer required -42 silly removeObsoleteDep removing browserify-aes@1.2.0 from the tree as its been replaced by a newer version or is no longer required -43 silly removeObsoleteDep removing buffer-xor@1.0.3 from the tree as its been replaced by a newer version or is no longer required -44 silly removeObsoleteDep removing evp_bytestokey@1.0.3 from the tree as its been replaced by a newer version or is no longer required -45 silly removeObsoleteDep removing create-hmac@1.1.7 from the tree as its been replaced by a newer version or is no longer required -46 silly removeObsoleteDep removing hash.js@1.1.7 from the tree as its been replaced by a newer version or is no longer required -47 silly removeObsoleteDep removing keccak@3.0.1 from the tree as its been replaced by a newer version or is no longer required -48 silly removeObsoleteDep removing node-gyp-build@4.2.3 from the tree as its been replaced by a newer version or is no longer required -49 silly removeObsoleteDep removing pbkdf2@3.1.1 from the tree as its been replaced by a newer version or is no longer required -50 silly removeObsoleteDep removing randombytes@2.1.0 from the tree as its been replaced by a newer version or is no longer required -51 silly removeObsoleteDep removing scrypt-js@3.0.1 from the tree as its been replaced by a newer version or is no longer required -52 silly removeObsoleteDep removing secp256k1@4.0.2 from the tree as its been replaced by a newer version or is no longer required -53 silly removeObsoleteDep removing elliptic@6.5.3 from the tree as its been replaced by a newer version or is no longer required -54 silly removeObsoleteDep removing brorand@1.1.0 from the tree as its been replaced by a newer version or is no longer required -55 silly removeObsoleteDep removing hmac-drbg@1.0.1 from the tree as its been replaced by a newer version or is no longer required -56 silly removeObsoleteDep removing minimalistic-crypto-utils@1.0.1 from the tree as its been replaced by a newer version or is no longer required -57 silly removeObsoleteDep removing setimmediate@1.0.5 from the tree as its been replaced by a newer version or is no longer required -58 silly removeObsoleteDep removing ethereumjs-util@7.0.7 from the tree as its been replaced by a newer version or is no longer required -59 silly removeObsoleteDep removing @types/bn.js@4.11.6 from the tree as its been replaced by a newer version or is no longer required -60 silly removeObsoleteDep removing bn.js@5.1.3 from the tree as its been replaced by a newer version or is no longer required -61 silly removeObsoleteDep removing ethjs-util@0.1.6 from the tree as its been replaced by a newer version or is no longer required -62 silly removeObsoleteDep removing is-hex-prefixed@1.0.0 from the tree as its been replaced by a newer version or is no longer required -63 silly removeObsoleteDep removing strip-hex-prefix@1.0.0 from the tree as its been replaced by a newer version or is no longer required -64 silly removeObsoleteDep removing rlp@2.2.6 from the tree as its been replaced by a newer version or is no longer required -65 silly removeObsoleteDep removing utf8@3.0.0 from the tree as its been replaced by a newer version or is no longer required -66 silly removeObsoleteDep removing openpgp@4.10.8 from the tree as its been replaced by a newer version or is no longer required -67 silly removeObsoleteDep removing asn1.js@5.4.1 from the tree as its been replaced by a newer version or is no longer required -68 silly removeObsoleteDep removing bn.js@4.11.9 from the tree as its been replaced by a newer version or is no longer required -69 silly removeObsoleteDep removing minimalistic-assert@1.0.1 from the tree as its been replaced by a newer version or is no longer required -70 silly removeObsoleteDep removing node-fetch@2.6.1 from the tree as its been replaced by a newer version or is no longer required -71 silly removeObsoleteDep removing node-localstorage@1.3.1 from the tree as its been replaced by a newer version or is no longer required -72 silly removeObsoleteDep removing write-file-atomic@1.3.4 from the tree as its been replaced by a newer version or is no longer required -73 silly removeObsoleteDep removing imurmurhash@0.1.4 from the tree as its been replaced by a newer version or is no longer required -74 silly removeObsoleteDep removing slide@1.1.6 from the tree as its been replaced by a newer version or is no longer required -75 silly removeObsoleteDep removing pg@8.4.2 from the tree as its been replaced by a newer version or is no longer required -76 silly removeObsoleteDep removing buffer-writer@2.0.0 from the tree as its been replaced by a newer version or is no longer required -77 silly removeObsoleteDep removing packet-reader@1.0.0 from the tree as its been replaced by a newer version or is no longer required -78 silly removeObsoleteDep removing pg-connection-string@2.4.0 from the tree as its been replaced by a newer version or is no longer required -79 silly removeObsoleteDep removing pg-pool@3.2.2 from the tree as its been replaced by a newer version or is no longer required -80 silly removeObsoleteDep removing pg-protocol@1.3.0 from the tree as its been replaced by a newer version or is no longer required -81 silly removeObsoleteDep removing pg-types@2.2.0 from the tree as its been replaced by a newer version or is no longer required -82 silly removeObsoleteDep removing pg-int8@1.0.1 from the tree as its been replaced by a newer version or is no longer required -83 silly removeObsoleteDep removing postgres-array@2.0.0 from the tree as its been replaced by a newer version or is no longer required -84 silly removeObsoleteDep removing postgres-bytea@1.0.0 from the tree as its been replaced by a newer version or is no longer required -85 silly removeObsoleteDep removing postgres-date@1.0.7 from the tree as its been replaced by a newer version or is no longer required -86 silly removeObsoleteDep removing postgres-interval@1.2.0 from the tree as its been replaced by a newer version or is no longer required -87 silly removeObsoleteDep removing xtend@4.0.2 from the tree as its been replaced by a newer version or is no longer required -88 silly removeObsoleteDep removing pgpass@1.0.4 from the tree as its been replaced by a newer version or is no longer required -89 silly removeObsoleteDep removing split2@3.2.2 from the tree as its been replaced by a newer version or is no longer required -90 silly removeObsoleteDep removing readable-stream@3.6.0 from the tree as its been replaced by a newer version or is no longer required -91 silly removeObsoleteDep removing string_decoder@1.3.0 from the tree as its been replaced by a newer version or is no longer required -92 silly removeObsoleteDep removing sqlite3@5.0.0 from the tree as its been replaced by a newer version or is no longer required -93 silly removeObsoleteDep removing node-addon-api@2.0.0 from the tree as its been replaced by a newer version or is no longer required -94 silly removeObsoleteDep removing node-gyp@3.8.0 from the tree as its been replaced by a newer version or is no longer required -95 silly removeObsoleteDep removing fstream@1.0.12 from the tree as its been replaced by a newer version or is no longer required -96 silly removeObsoleteDep removing graceful-fs@4.2.4 from the tree as its been replaced by a newer version or is no longer required -97 silly removeObsoleteDep removing inherits@2.0.4 from the tree as its been replaced by a newer version or is no longer required -98 silly removeObsoleteDep removing mkdirp@0.5.5 from the tree as its been replaced by a newer version or is no longer required -99 silly removeObsoleteDep removing minimist@1.2.5 from the tree as its been replaced by a newer version or is no longer required -100 silly removeObsoleteDep removing rimraf@2.7.1 from the tree as its been replaced by a newer version or is no longer required -101 silly removeObsoleteDep removing glob@7.1.6 from the tree as its been replaced by a newer version or is no longer required -102 silly removeObsoleteDep removing fs.realpath@1.0.0 from the tree as its been replaced by a newer version or is no longer required -103 silly removeObsoleteDep removing inflight@1.0.6 from the tree as its been replaced by a newer version or is no longer required -104 silly removeObsoleteDep removing once@1.4.0 from the tree as its been replaced by a newer version or is no longer required -105 silly removeObsoleteDep removing wrappy@1.0.2 from the tree as its been replaced by a newer version or is no longer required -106 silly removeObsoleteDep removing minimatch@3.0.4 from the tree as its been replaced by a newer version or is no longer required -107 silly removeObsoleteDep removing brace-expansion@1.1.11 from the tree as its been replaced by a newer version or is no longer required -108 silly removeObsoleteDep removing balanced-match@1.0.0 from the tree as its been replaced by a newer version or is no longer required -109 silly removeObsoleteDep removing concat-map@0.0.1 from the tree as its been replaced by a newer version or is no longer required -110 silly removeObsoleteDep removing path-is-absolute@1.0.1 from the tree as its been replaced by a newer version or is no longer required -111 silly removeObsoleteDep removing nopt@3.0.6 from the tree as its been replaced by a newer version or is no longer required -112 silly removeObsoleteDep removing abbrev@1.1.1 from the tree as its been replaced by a newer version or is no longer required -113 silly removeObsoleteDep removing npmlog@4.1.2 from the tree as its been replaced by a newer version or is no longer required -114 silly removeObsoleteDep removing are-we-there-yet@1.1.5 from the tree as its been replaced by a newer version or is no longer required -115 silly removeObsoleteDep removing delegates@1.0.0 from the tree as its been replaced by a newer version or is no longer required -116 silly removeObsoleteDep removing readable-stream@2.3.7 from the tree as its been replaced by a newer version or is no longer required -117 silly removeObsoleteDep removing core-util-is@1.0.2 from the tree as its been replaced by a newer version or is no longer required -118 silly removeObsoleteDep removing isarray@1.0.0 from the tree as its been replaced by a newer version or is no longer required -119 silly removeObsoleteDep removing process-nextick-args@2.0.1 from the tree as its been replaced by a newer version or is no longer required -120 silly removeObsoleteDep removing safe-buffer@5.1.2 from the tree as its been replaced by a newer version or is no longer required -121 silly removeObsoleteDep removing string_decoder@1.1.1 from the tree as its been replaced by a newer version or is no longer required -122 silly removeObsoleteDep removing util-deprecate@1.0.2 from the tree as its been replaced by a newer version or is no longer required -123 silly removeObsoleteDep removing console-control-strings@1.1.0 from the tree as its been replaced by a newer version or is no longer required -124 silly removeObsoleteDep removing gauge@2.7.4 from the tree as its been replaced by a newer version or is no longer required -125 silly removeObsoleteDep removing aproba@1.2.0 from the tree as its been replaced by a newer version or is no longer required -126 silly removeObsoleteDep removing has-unicode@2.0.1 from the tree as its been replaced by a newer version or is no longer required -127 silly removeObsoleteDep removing object-assign@4.1.1 from the tree as its been replaced by a newer version or is no longer required -128 silly removeObsoleteDep removing signal-exit@3.0.3 from the tree as its been replaced by a newer version or is no longer required -129 silly removeObsoleteDep removing string-width@1.0.2 from the tree as its been replaced by a newer version or is no longer required -130 silly removeObsoleteDep removing code-point-at@1.1.0 from the tree as its been replaced by a newer version or is no longer required -131 silly removeObsoleteDep removing is-fullwidth-code-point@1.0.0 from the tree as its been replaced by a newer version or is no longer required -132 silly removeObsoleteDep removing number-is-nan@1.0.1 from the tree as its been replaced by a newer version or is no longer required -133 silly removeObsoleteDep removing strip-ansi@3.0.1 from the tree as its been replaced by a newer version or is no longer required -134 silly removeObsoleteDep removing ansi-regex@2.1.1 from the tree as its been replaced by a newer version or is no longer required -135 silly removeObsoleteDep removing wide-align@1.1.3 from the tree as its been replaced by a newer version or is no longer required -136 silly removeObsoleteDep removing set-blocking@2.0.0 from the tree as its been replaced by a newer version or is no longer required -137 silly removeObsoleteDep removing osenv@0.1.5 from the tree as its been replaced by a newer version or is no longer required -138 silly removeObsoleteDep removing os-homedir@1.0.2 from the tree as its been replaced by a newer version or is no longer required -139 silly removeObsoleteDep removing os-tmpdir@1.0.2 from the tree as its been replaced by a newer version or is no longer required -140 silly removeObsoleteDep removing request@2.88.2 from the tree as its been replaced by a newer version or is no longer required -141 silly removeObsoleteDep removing aws-sign2@0.7.0 from the tree as its been replaced by a newer version or is no longer required -142 silly removeObsoleteDep removing aws4@1.11.0 from the tree as its been replaced by a newer version or is no longer required -143 silly removeObsoleteDep removing caseless@0.12.0 from the tree as its been replaced by a newer version or is no longer required -144 silly removeObsoleteDep removing combined-stream@1.0.8 from the tree as its been replaced by a newer version or is no longer required -145 silly removeObsoleteDep removing delayed-stream@1.0.0 from the tree as its been replaced by a newer version or is no longer required -146 silly removeObsoleteDep removing extend@3.0.2 from the tree as its been replaced by a newer version or is no longer required -147 silly removeObsoleteDep removing forever-agent@0.6.1 from the tree as its been replaced by a newer version or is no longer required -148 silly removeObsoleteDep removing form-data@2.3.3 from the tree as its been replaced by a newer version or is no longer required -149 silly removeObsoleteDep removing asynckit@0.4.0 from the tree as its been replaced by a newer version or is no longer required -150 silly removeObsoleteDep removing mime-types@2.1.27 from the tree as its been replaced by a newer version or is no longer required -151 silly removeObsoleteDep removing mime-db@1.44.0 from the tree as its been replaced by a newer version or is no longer required -152 silly removeObsoleteDep removing har-validator@5.1.5 from the tree as its been replaced by a newer version or is no longer required -153 silly removeObsoleteDep removing ajv@6.12.6 from the tree as its been replaced by a newer version or is no longer required -154 silly removeObsoleteDep removing fast-deep-equal@3.1.3 from the tree as its been replaced by a newer version or is no longer required -155 silly removeObsoleteDep removing fast-json-stable-stringify@2.1.0 from the tree as its been replaced by a newer version or is no longer required -156 silly removeObsoleteDep removing json-schema-traverse@0.4.1 from the tree as its been replaced by a newer version or is no longer required -157 silly removeObsoleteDep removing uri-js@4.4.0 from the tree as its been replaced by a newer version or is no longer required -158 silly removeObsoleteDep removing punycode@2.1.1 from the tree as its been replaced by a newer version or is no longer required -159 silly removeObsoleteDep removing har-schema@2.0.0 from the tree as its been replaced by a newer version or is no longer required -160 silly removeObsoleteDep removing http-signature@1.2.0 from the tree as its been replaced by a newer version or is no longer required -161 silly removeObsoleteDep removing assert-plus@1.0.0 from the tree as its been replaced by a newer version or is no longer required -162 silly removeObsoleteDep removing jsprim@1.4.1 from the tree as its been replaced by a newer version or is no longer required -163 silly removeObsoleteDep removing extsprintf@1.3.0 from the tree as its been replaced by a newer version or is no longer required -164 silly removeObsoleteDep removing json-schema@0.2.3 from the tree as its been replaced by a newer version or is no longer required -165 silly removeObsoleteDep removing verror@1.10.0 from the tree as its been replaced by a newer version or is no longer required -166 silly removeObsoleteDep removing sshpk@1.16.1 from the tree as its been replaced by a newer version or is no longer required -167 silly removeObsoleteDep removing asn1@0.2.4 from the tree as its been replaced by a newer version or is no longer required -168 silly removeObsoleteDep removing safer-buffer@2.1.2 from the tree as its been replaced by a newer version or is no longer required -169 silly removeObsoleteDep removing bcrypt-pbkdf@1.0.2 from the tree as its been replaced by a newer version or is no longer required -170 silly removeObsoleteDep removing tweetnacl@0.14.5 from the tree as its been replaced by a newer version or is no longer required -171 silly removeObsoleteDep removing dashdash@1.14.1 from the tree as its been replaced by a newer version or is no longer required -172 silly removeObsoleteDep removing ecc-jsbn@0.1.2 from the tree as its been replaced by a newer version or is no longer required -173 silly removeObsoleteDep removing jsbn@0.1.1 from the tree as its been replaced by a newer version or is no longer required -174 silly removeObsoleteDep removing getpass@0.1.7 from the tree as its been replaced by a newer version or is no longer required -175 silly removeObsoleteDep removing is-typedarray@1.0.0 from the tree as its been replaced by a newer version or is no longer required -176 silly removeObsoleteDep removing isstream@0.1.2 from the tree as its been replaced by a newer version or is no longer required -177 silly removeObsoleteDep removing json-stringify-safe@5.0.1 from the tree as its been replaced by a newer version or is no longer required -178 silly removeObsoleteDep removing oauth-sign@0.9.0 from the tree as its been replaced by a newer version or is no longer required -179 silly removeObsoleteDep removing performance-now@2.1.0 from the tree as its been replaced by a newer version or is no longer required -180 silly removeObsoleteDep removing qs@6.5.2 from the tree as its been replaced by a newer version or is no longer required -181 silly removeObsoleteDep removing safe-buffer@5.2.1 from the tree as its been replaced by a newer version or is no longer required -182 silly removeObsoleteDep removing tough-cookie@2.5.0 from the tree as its been replaced by a newer version or is no longer required -183 silly removeObsoleteDep removing psl@1.8.0 from the tree as its been replaced by a newer version or is no longer required -184 silly removeObsoleteDep removing tunnel-agent@0.6.0 from the tree as its been replaced by a newer version or is no longer required -185 silly removeObsoleteDep removing uuid@3.4.0 from the tree as its been replaced by a newer version or is no longer required -186 silly removeObsoleteDep removing semver@5.3.0 from the tree as its been replaced by a newer version or is no longer required -187 silly removeObsoleteDep removing tar@2.2.2 from the tree as its been replaced by a newer version or is no longer required -188 silly removeObsoleteDep removing block-stream@0.0.9 from the tree as its been replaced by a newer version or is no longer required -189 silly removeObsoleteDep removing which@1.3.1 from the tree as its been replaced by a newer version or is no longer required -190 silly removeObsoleteDep removing isexe@2.0.0 from the tree as its been replaced by a newer version or is no longer required -191 silly removeObsoleteDep removing node-pre-gyp@0.11.0 from the tree as its been replaced by a newer version or is no longer required -192 silly removeObsoleteDep removing detect-libc@1.0.3 from the tree as its been replaced by a newer version or is no longer required -193 silly removeObsoleteDep removing needle@2.5.2 from the tree as its been replaced by a newer version or is no longer required -194 silly removeObsoleteDep removing debug@3.2.6 from the tree as its been replaced by a newer version or is no longer required -195 silly removeObsoleteDep removing ms@2.1.2 from the tree as its been replaced by a newer version or is no longer required -196 silly removeObsoleteDep removing iconv-lite@0.4.24 from the tree as its been replaced by a newer version or is no longer required -197 silly removeObsoleteDep removing sax@1.2.4 from the tree as its been replaced by a newer version or is no longer required -198 silly removeObsoleteDep removing nopt@4.0.3 from the tree as its been replaced by a newer version or is no longer required -199 silly removeObsoleteDep removing npm-packlist@1.4.8 from the tree as its been replaced by a newer version or is no longer required -200 silly removeObsoleteDep removing ignore-walk@3.0.3 from the tree as its been replaced by a newer version or is no longer required -201 silly removeObsoleteDep removing npm-bundled@1.1.1 from the tree as its been replaced by a newer version or is no longer required -202 silly removeObsoleteDep removing npm-normalize-package-bin@1.0.1 from the tree as its been replaced by a newer version or is no longer required -203 silly removeObsoleteDep removing rc@1.2.8 from the tree as its been replaced by a newer version or is no longer required -204 silly removeObsoleteDep removing deep-extend@0.6.0 from the tree as its been replaced by a newer version or is no longer required -205 silly removeObsoleteDep removing ini@1.3.5 from the tree as its been replaced by a newer version or is no longer required -206 silly removeObsoleteDep removing strip-json-comments@2.0.1 from the tree as its been replaced by a newer version or is no longer required -207 silly removeObsoleteDep removing tar@4.4.13 from the tree as its been replaced by a newer version or is no longer required -208 silly removeObsoleteDep removing chownr@1.1.4 from the tree as its been replaced by a newer version or is no longer required -209 silly removeObsoleteDep removing fs-minipass@1.2.7 from the tree as its been replaced by a newer version or is no longer required -210 silly removeObsoleteDep removing minipass@2.9.0 from the tree as its been replaced by a newer version or is no longer required -211 silly removeObsoleteDep removing yallist@3.1.1 from the tree as its been replaced by a newer version or is no longer required -212 silly removeObsoleteDep removing minizlib@1.3.3 from the tree as its been replaced by a newer version or is no longer required -213 silly removeObsoleteDep removing yargs@16.1.0 from the tree as its been replaced by a newer version or is no longer required -214 silly removeObsoleteDep removing cliui@7.0.3 from the tree as its been replaced by a newer version or is no longer required -215 silly removeObsoleteDep removing string-width@4.2.0 from the tree as its been replaced by a newer version or is no longer required -216 silly removeObsoleteDep removing emoji-regex@8.0.0 from the tree as its been replaced by a newer version or is no longer required -217 silly removeObsoleteDep removing is-fullwidth-code-point@3.0.0 from the tree as its been replaced by a newer version or is no longer required -218 silly removeObsoleteDep removing strip-ansi@6.0.0 from the tree as its been replaced by a newer version or is no longer required -219 silly removeObsoleteDep removing ansi-regex@5.0.0 from the tree as its been replaced by a newer version or is no longer required -220 silly removeObsoleteDep removing wrap-ansi@7.0.0 from the tree as its been replaced by a newer version or is no longer required -221 silly removeObsoleteDep removing ansi-styles@4.3.0 from the tree as its been replaced by a newer version or is no longer required -222 silly removeObsoleteDep removing color-convert@2.0.1 from the tree as its been replaced by a newer version or is no longer required -223 silly removeObsoleteDep removing color-name@1.1.4 from the tree as its been replaced by a newer version or is no longer required -224 silly removeObsoleteDep removing string-width@4.2.0 from the tree as its been replaced by a newer version or is no longer required -225 silly removeObsoleteDep removing is-fullwidth-code-point@3.0.0 from the tree as its been replaced by a newer version or is no longer required -226 silly removeObsoleteDep removing strip-ansi@6.0.0 from the tree as its been replaced by a newer version or is no longer required -227 silly removeObsoleteDep removing ansi-regex@5.0.0 from the tree as its been replaced by a newer version or is no longer required -228 silly removeObsoleteDep removing escalade@3.1.1 from the tree as its been replaced by a newer version or is no longer required -229 silly removeObsoleteDep removing get-caller-file@2.0.5 from the tree as its been replaced by a newer version or is no longer required -230 silly removeObsoleteDep removing require-directory@2.1.1 from the tree as its been replaced by a newer version or is no longer required -231 silly removeObsoleteDep removing string-width@4.2.0 from the tree as its been replaced by a newer version or is no longer required -232 silly removeObsoleteDep removing is-fullwidth-code-point@3.0.0 from the tree as its been replaced by a newer version or is no longer required -233 silly removeObsoleteDep removing strip-ansi@6.0.0 from the tree as its been replaced by a newer version or is no longer required -234 silly removeObsoleteDep removing ansi-regex@5.0.0 from the tree as its been replaced by a newer version or is no longer required -235 silly removeObsoleteDep removing y18n@5.0.5 from the tree as its been replaced by a newer version or is no longer required -236 silly removeObsoleteDep removing yargs-parser@20.2.3 from the tree as its been replaced by a newer version or is no longer required -237 timing stage:loadIdealTree:loadAllDepsIntoIdealTree Completed in 339ms -238 timing stage:loadIdealTree Completed in 441ms -239 silly currentTree scripts -239 silly currentTree └─┬ cic-client-meta@0.0.7-alpha.1 -239 silly currentTree ├── @ethereumjs/common@2.0.0-beta.1 -239 silly currentTree ├── @ethereumjs/tx@3.0.0-beta.1 -239 silly currentTree ├── @types/bn.js@4.11.6 -239 silly currentTree ├── @types/eslint-scope@3.7.0 -239 silly currentTree ├── @types/eslint@7.2.4 -239 silly currentTree ├── @types/estree@0.0.45 -239 silly currentTree ├── @types/json-schema@7.0.6 -239 silly currentTree ├── @types/mocha@8.0.3 -239 silly currentTree ├── @types/node@14.14.6 -239 silly currentTree ├── @types/pbkdf2@3.1.0 -239 silly currentTree ├── @types/secp256k1@4.0.1 -239 silly currentTree ├── @ungap/promise-all-settled@1.1.2 -239 silly currentTree ├── @webassemblyjs/ast@1.9.0 -239 silly currentTree ├── @webassemblyjs/floating-point-hex-parser@1.9.0 -239 silly currentTree ├── @webassemblyjs/helper-api-error@1.9.0 -239 silly currentTree ├── @webassemblyjs/helper-buffer@1.9.0 -239 silly currentTree ├── @webassemblyjs/helper-code-frame@1.9.0 -239 silly currentTree ├── @webassemblyjs/helper-fsm@1.9.0 -239 silly currentTree ├── @webassemblyjs/helper-module-context@1.9.0 -239 silly currentTree ├── @webassemblyjs/helper-wasm-bytecode@1.9.0 -239 silly currentTree ├── @webassemblyjs/helper-wasm-section@1.9.0 -239 silly currentTree ├── @webassemblyjs/ieee754@1.9.0 -239 silly currentTree ├── @webassemblyjs/leb128@1.9.0 -239 silly currentTree ├── @webassemblyjs/utf8@1.9.0 -239 silly currentTree ├── @webassemblyjs/wasm-edit@1.9.0 -239 silly currentTree ├── @webassemblyjs/wasm-gen@1.9.0 -239 silly currentTree ├── @webassemblyjs/wasm-opt@1.9.0 -239 silly currentTree ├── @webassemblyjs/wasm-parser@1.9.0 -239 silly currentTree ├── @webassemblyjs/wast-parser@1.9.0 -239 silly currentTree ├── @webassemblyjs/wast-printer@1.9.0 -239 silly currentTree ├── @webpack-cli/info@1.1.0 -239 silly currentTree ├── @webpack-cli/serve@1.1.0 -239 silly currentTree ├── @xtuc/ieee754@1.2.0 -239 silly currentTree ├── @xtuc/long@4.2.2 -239 silly currentTree ├── abbrev@1.1.1 -239 silly currentTree ├── acorn@8.0.4 -239 silly currentTree ├── aes-js@3.1.2 -239 silly currentTree ├── ajv-keywords@3.5.2 -239 silly currentTree ├── ajv@6.12.6 -239 silly currentTree ├── ansi-colors@4.1.1 -239 silly currentTree ├── ansi-regex@2.1.1 -239 silly currentTree ├── ansi-styles@4.3.0 -239 silly currentTree ├── anymatch@3.1.1 -239 silly currentTree ├── aproba@1.2.0 -239 silly currentTree ├─┬ are-we-there-yet@1.1.5 -239 silly currentTree │ ├── readable-stream@2.3.7 -239 silly currentTree │ ├── safe-buffer@5.1.2 -239 silly currentTree │ └── string_decoder@1.1.1 -239 silly currentTree ├── arg@4.1.3 -239 silly currentTree ├── argparse@1.0.10 -239 silly currentTree ├── array-back@4.0.1 -239 silly currentTree ├── asn1.js@5.4.1 -239 silly currentTree ├── asn1@0.2.4 -239 silly currentTree ├── assert-plus@1.0.0 -239 silly currentTree ├── asynckit@0.4.0 -239 silly currentTree ├── automerge@0.14.1 -239 silly currentTree ├── aws-sign2@0.7.0 -239 silly currentTree ├── aws4@1.11.0 -239 silly currentTree ├── balanced-match@1.0.0 -239 silly currentTree ├── base-x@3.0.8 -239 silly currentTree ├── bcrypt-pbkdf@1.0.2 -239 silly currentTree ├── binary-extensions@2.1.0 -239 silly currentTree ├── blakejs@1.1.0 -239 silly currentTree ├── block-stream@0.0.9 -239 silly currentTree ├── bn.js@4.11.9 -239 silly currentTree ├── brace-expansion@1.1.11 -239 silly currentTree ├── braces@3.0.2 -239 silly currentTree ├── brorand@1.1.0 -239 silly currentTree ├── browser-stdout@1.3.1 -239 silly currentTree ├── browserify-aes@1.2.0 -239 silly currentTree ├── browserslist@4.14.6 -239 silly currentTree ├── bs58@4.0.1 -239 silly currentTree ├── bs58check@2.1.2 -239 silly currentTree ├── buffer-from@1.1.1 -239 silly currentTree ├── buffer-writer@2.0.0 -239 silly currentTree ├── buffer-xor@1.0.3 -239 silly currentTree ├── camelcase@5.3.1 -239 silly currentTree ├── caniuse-lite@1.0.30001156 -239 silly currentTree ├── caseless@0.12.0 -239 silly currentTree ├── chalk@4.1.0 -239 silly currentTree ├── chokidar@3.4.3 -239 silly currentTree ├── chownr@1.1.4 -239 silly currentTree ├── chrome-trace-event@1.0.2 -239 silly currentTree ├── cipher-base@1.0.4 -239 silly currentTree ├─┬ cliui@7.0.3 -239 silly currentTree │ ├── ansi-regex@5.0.0 -239 silly currentTree │ ├── is-fullwidth-code-point@3.0.0 -239 silly currentTree │ ├── string-width@4.2.0 -239 silly currentTree │ └── strip-ansi@6.0.0 -239 silly currentTree ├── code-point-at@1.1.0 -239 silly currentTree ├── color-convert@2.0.1 -239 silly currentTree ├── color-name@1.1.4 -239 silly currentTree ├── colorette@1.2.1 -239 silly currentTree ├── combined-stream@1.0.8 -239 silly currentTree ├─┬ command-line-usage@6.1.0 -239 silly currentTree │ ├── ansi-styles@3.2.1 -239 silly currentTree │ ├── chalk@2.4.2 -239 silly currentTree │ ├── color-convert@1.9.3 -239 silly currentTree │ ├── color-name@1.1.3 -239 silly currentTree │ ├── escape-string-regexp@1.0.5 -239 silly currentTree │ ├── has-flag@3.0.0 -239 silly currentTree │ └── supports-color@5.5.0 -239 silly currentTree ├── commander@2.20.3 -239 silly currentTree ├── concat-map@0.0.1 -239 silly currentTree ├── console-control-strings@1.1.0 -239 silly currentTree ├── core-util-is@1.0.2 -239 silly currentTree ├── crc-32@1.2.0 -239 silly currentTree ├── create-hash@1.2.0 -239 silly currentTree ├── create-hmac@1.1.7 -239 silly currentTree ├─┬ cross-spawn@7.0.3 -239 silly currentTree │ └── which@2.0.2 -239 silly currentTree ├── dashdash@1.14.1 -239 silly currentTree ├── debug@3.2.6 -239 silly currentTree ├── decamelize@1.2.0 -239 silly currentTree ├── deep-extend@0.6.0 -239 silly currentTree ├── delayed-stream@1.0.0 -239 silly currentTree ├── delegates@1.0.0 -239 silly currentTree ├── detect-libc@1.0.3 -239 silly currentTree ├── diff@4.0.2 -239 silly currentTree ├── ecc-jsbn@0.1.2 -239 silly currentTree ├── electron-to-chromium@1.3.591 -239 silly currentTree ├── elliptic@6.5.3 -239 silly currentTree ├── emoji-regex@8.0.0 -239 silly currentTree ├── end-of-stream@1.4.4 -239 silly currentTree ├── enhanced-resolve@5.3.1 -239 silly currentTree ├── enquirer@2.3.6 -239 silly currentTree ├── envinfo@7.7.3 -239 silly currentTree ├── escalade@3.1.1 -239 silly currentTree ├── escape-string-regexp@4.0.0 -239 silly currentTree ├── eslint-scope@5.1.1 -239 silly currentTree ├── esprima@4.0.1 -239 silly currentTree ├─┬ esrecurse@4.3.0 -239 silly currentTree │ └── estraverse@5.2.0 -239 silly currentTree ├── estraverse@4.3.0 -239 silly currentTree ├── ethereum-cryptography@0.1.3 -239 silly currentTree ├─┬ ethereumjs-util@7.0.7 -239 silly currentTree │ └── bn.js@5.1.3 -239 silly currentTree ├── ethereumjs-wallet@1.0.1 -239 silly currentTree ├── ethjs-util@0.1.6 -239 silly currentTree ├── events@3.2.0 -239 silly currentTree ├── evp_bytestokey@1.0.3 -239 silly currentTree ├── execa@4.1.0 -239 silly currentTree ├── exit-on-epipe@1.0.1 -239 silly currentTree ├── extend@3.0.2 -239 silly currentTree ├── extsprintf@1.3.0 -239 silly currentTree ├── fast-deep-equal@3.1.3 -239 silly currentTree ├── fast-json-stable-stringify@2.1.0 -239 silly currentTree ├── fill-range@7.0.1 -239 silly currentTree ├── find-up@5.0.0 -239 silly currentTree ├── flat@5.0.2 -239 silly currentTree ├── forever-agent@0.6.1 -239 silly currentTree ├── form-data@2.3.3 -239 silly currentTree ├── fs-minipass@1.2.7 -239 silly currentTree ├── fs.realpath@1.0.0 -239 silly currentTree ├── fstream@1.0.12 -239 silly currentTree ├── function-bind@1.1.1 -239 silly currentTree ├── gauge@2.7.4 -239 silly currentTree ├── get-caller-file@2.0.5 -239 silly currentTree ├── get-stream@5.2.0 -239 silly currentTree ├── getpass@0.1.7 -239 silly currentTree ├── glob-parent@5.1.1 -239 silly currentTree ├── glob-to-regexp@0.4.1 -239 silly currentTree ├── glob@7.1.6 -239 silly currentTree ├── graceful-fs@4.2.4 -239 silly currentTree ├── growl@1.10.5 -239 silly currentTree ├── har-schema@2.0.0 -239 silly currentTree ├── har-validator@5.1.5 -239 silly currentTree ├── has-flag@4.0.0 -239 silly currentTree ├── has-unicode@2.0.1 -239 silly currentTree ├── has@1.0.3 -239 silly currentTree ├── hash-base@3.1.0 -239 silly currentTree ├── hash.js@1.1.7 -239 silly currentTree ├── he@1.2.0 -239 silly currentTree ├── hmac-drbg@1.0.1 -239 silly currentTree ├── http-signature@1.2.0 -239 silly currentTree ├── human-signals@1.1.1 -239 silly currentTree ├── iconv-lite@0.4.24 -239 silly currentTree ├── ignore-walk@3.0.3 -239 silly currentTree ├── immutable@3.8.2 -239 silly currentTree ├── import-local@3.0.2 -239 silly currentTree ├── imurmurhash@0.1.4 -239 silly currentTree ├── inflight@1.0.6 -239 silly currentTree ├── inherits@2.0.4 -239 silly currentTree ├── ini@1.3.5 -239 silly currentTree ├── interpret@2.2.0 -239 silly currentTree ├── is-binary-path@2.1.0 -239 silly currentTree ├── is-core-module@2.1.0 -239 silly currentTree ├── is-extglob@2.1.1 -239 silly currentTree ├── is-fullwidth-code-point@1.0.0 -239 silly currentTree ├── is-glob@4.0.1 -239 silly currentTree ├── is-hex-prefixed@1.0.0 -239 silly currentTree ├── is-number@7.0.0 -239 silly currentTree ├── is-plain-obj@2.1.0 -239 silly currentTree ├── is-stream@2.0.0 -239 silly currentTree ├── is-typedarray@1.0.0 -239 silly currentTree ├── isarray@1.0.0 -239 silly currentTree ├── isexe@2.0.0 -239 silly currentTree ├── isstream@0.1.2 -239 silly currentTree ├── jest-worker@26.6.2 -239 silly currentTree ├── js-yaml@3.14.0 -239 silly currentTree ├── jsbn@0.1.1 -239 silly currentTree ├── json-parse-better-errors@1.0.2 -239 silly currentTree ├── json-schema-traverse@0.4.1 -239 silly currentTree ├── json-schema@0.2.3 -239 silly currentTree ├── json-stringify-safe@5.0.1 -239 silly currentTree ├── jsprim@1.4.1 -239 silly currentTree ├── keccak@3.0.1 -239 silly currentTree ├── leven@3.1.0 -239 silly currentTree ├── loader-runner@4.1.0 -239 silly currentTree ├── locate-path@6.0.0 -239 silly currentTree ├── lodash@4.17.20 -239 silly currentTree ├── log-symbols@4.0.0 -239 silly currentTree ├── make-error@1.3.6 -239 silly currentTree ├── md5.js@1.3.5 -239 silly currentTree ├── merge-stream@2.0.0 -239 silly currentTree ├── mime-db@1.44.0 -239 silly currentTree ├── mime-types@2.1.27 -239 silly currentTree ├── mimic-fn@2.1.0 -239 silly currentTree ├── minimalistic-assert@1.0.1 -239 silly currentTree ├── minimalistic-crypto-utils@1.0.1 -239 silly currentTree ├── minimatch@3.0.4 -239 silly currentTree ├── minimist@1.2.5 -239 silly currentTree ├── minipass@2.9.0 -239 silly currentTree ├── minizlib@1.3.3 -239 silly currentTree ├── mkdirp@0.5.5 -239 silly currentTree ├─┬ mocha@8.2.1 -239 silly currentTree │ ├── ansi-regex@4.1.0 -239 silly currentTree │ ├── ansi-styles@3.2.1 -239 silly currentTree │ ├── cliui@5.0.0 -239 silly currentTree │ ├── color-convert@1.9.3 -239 silly currentTree │ ├── color-name@1.1.3 -239 silly currentTree │ ├── debug@4.2.0 -239 silly currentTree │ ├── emoji-regex@7.0.3 -239 silly currentTree │ ├── is-fullwidth-code-point@2.0.0 -239 silly currentTree │ ├── locate-path@3.0.0 -239 silly currentTree │ ├── p-limit@2.3.0 -239 silly currentTree │ ├── p-locate@3.0.0 -239 silly currentTree │ ├── path-exists@3.0.0 -239 silly currentTree │ ├── string-width@3.1.0 -239 silly currentTree │ ├── strip-ansi@5.2.0 -239 silly currentTree │ ├── strip-json-comments@3.1.1 -239 silly currentTree │ ├── which@2.0.2 -239 silly currentTree │ ├── wrap-ansi@5.1.0 -239 silly currentTree │ ├── y18n@4.0.0 -239 silly currentTree │ ├── yargs-parser@13.1.2 -239 silly currentTree │ └─┬ yargs@13.3.2 -239 silly currentTree │ └── find-up@3.0.0 -239 silly currentTree ├── ms@2.1.2 -239 silly currentTree ├── nanoid@3.1.12 -239 silly currentTree ├── needle@2.5.2 -239 silly currentTree ├── neo-async@2.6.2 -239 silly currentTree ├── node-addon-api@2.0.0 -239 silly currentTree ├── node-fetch@2.6.1 -239 silly currentTree ├── node-gyp-build@4.2.3 -239 silly currentTree ├── node-gyp@3.8.0 -239 silly currentTree ├── node-localstorage@2.1.6 -239 silly currentTree ├─┬ node-pre-gyp@0.11.0 -239 silly currentTree │ ├── nopt@4.0.3 -239 silly currentTree │ └── tar@4.4.13 -239 silly currentTree ├── node-releases@1.1.66 -239 silly currentTree ├── nopt@3.0.6 -239 silly currentTree ├── normalize-path@3.0.0 -239 silly currentTree ├── npm-bundled@1.1.1 -239 silly currentTree ├── npm-normalize-package-bin@1.0.1 -239 silly currentTree ├── npm-packlist@1.4.8 -239 silly currentTree ├── npm-run-path@4.0.1 -239 silly currentTree ├── npmlog@4.1.2 -239 silly currentTree ├── number-is-nan@1.0.1 -239 silly currentTree ├── oauth-sign@0.9.0 -239 silly currentTree ├── object-assign@4.1.1 -239 silly currentTree ├── once@1.4.0 -239 silly currentTree ├── onetime@5.1.2 -239 silly currentTree ├─┬ openpgp@4.10.8 -239 silly currentTree │ └── node-localstorage@1.3.1 -239 silly currentTree ├── os-homedir@1.0.2 -239 silly currentTree ├── os-tmpdir@1.0.2 -239 silly currentTree ├── osenv@0.1.5 -239 silly currentTree ├── p-limit@3.0.2 -239 silly currentTree ├── p-locate@5.0.0 -239 silly currentTree ├── p-try@2.2.0 -239 silly currentTree ├── packet-reader@1.0.0 -239 silly currentTree ├── path-exists@4.0.0 -239 silly currentTree ├── path-is-absolute@1.0.1 -239 silly currentTree ├── path-key@3.1.1 -239 silly currentTree ├── path-parse@1.0.6 -239 silly currentTree ├── pbkdf2@3.1.1 -239 silly currentTree ├── performance-now@2.1.0 -239 silly currentTree ├── pg-connection-string@2.4.0 -239 silly currentTree ├── pg-int8@1.0.1 -239 silly currentTree ├── pg-pool@3.2.2 -239 silly currentTree ├── pg-protocol@1.3.0 -239 silly currentTree ├── pg-types@2.2.0 -239 silly currentTree ├── pg@8.4.2 -239 silly currentTree ├── pgpass@1.0.4 -239 silly currentTree ├── picomatch@2.2.2 -239 silly currentTree ├─┬ pkg-dir@4.2.0 -239 silly currentTree │ ├── find-up@4.1.0 -239 silly currentTree │ ├── locate-path@5.0.0 -239 silly currentTree │ ├── p-limit@2.3.0 -239 silly currentTree │ └── p-locate@4.1.0 -239 silly currentTree ├── postgres-array@2.0.0 -239 silly currentTree ├── postgres-bytea@1.0.0 -239 silly currentTree ├── postgres-date@1.0.7 -239 silly currentTree ├── postgres-interval@1.2.0 -239 silly currentTree ├── printj@1.1.2 -239 silly currentTree ├── process-nextick-args@2.0.1 -239 silly currentTree ├── psl@1.8.0 -239 silly currentTree ├── pump@3.0.0 -239 silly currentTree ├── punycode@2.1.1 -239 silly currentTree ├── qs@6.5.2 -239 silly currentTree ├── randombytes@2.1.0 -239 silly currentTree ├── rc@1.2.8 -239 silly currentTree ├── readable-stream@3.6.0 -239 silly currentTree ├── readdirp@3.5.0 -239 silly currentTree ├── rechoir@0.7.0 -239 silly currentTree ├── reduce-flatten@2.0.0 -239 silly currentTree ├── request@2.88.2 -239 silly currentTree ├── require-directory@2.1.1 -239 silly currentTree ├── require-main-filename@2.0.0 -239 silly currentTree ├── resolve-cwd@3.0.0 -239 silly currentTree ├── resolve-from@5.0.0 -239 silly currentTree ├── resolve@1.18.1 -239 silly currentTree ├── rimraf@2.7.1 -239 silly currentTree ├── ripemd160@2.0.2 -239 silly currentTree ├── rlp@2.2.6 -239 silly currentTree ├── safe-buffer@5.2.1 -239 silly currentTree ├── safer-buffer@2.1.2 -239 silly currentTree ├── sax@1.2.4 -239 silly currentTree ├── schema-utils@3.0.0 -239 silly currentTree ├── scrypt-js@3.0.1 -239 silly currentTree ├── secp256k1@4.0.2 -239 silly currentTree ├── semver@5.3.0 -239 silly currentTree ├── serialize-javascript@5.0.1 -239 silly currentTree ├── set-blocking@2.0.0 -239 silly currentTree ├── setimmediate@1.0.5 -239 silly currentTree ├── sha.js@2.4.11 -239 silly currentTree ├── shebang-command@2.0.0 -239 silly currentTree ├── shebang-regex@3.0.0 -239 silly currentTree ├── signal-exit@3.0.3 -239 silly currentTree ├── slide@1.1.6 -239 silly currentTree ├── source-list-map@2.0.1 -239 silly currentTree ├── source-map-support@0.5.19 -239 silly currentTree ├── source-map@0.6.1 -239 silly currentTree ├── split2@3.2.2 -239 silly currentTree ├── sprintf-js@1.0.3 -239 silly currentTree ├── sqlite3@5.0.0 -239 silly currentTree ├── sshpk@1.16.1 -239 silly currentTree ├── string_decoder@1.3.0 -239 silly currentTree ├── string-width@1.0.2 -239 silly currentTree ├── strip-ansi@3.0.1 -239 silly currentTree ├── strip-final-newline@2.0.0 -239 silly currentTree ├── strip-hex-prefix@1.0.0 -239 silly currentTree ├── strip-json-comments@2.0.1 -239 silly currentTree ├── supports-color@7.2.0 -239 silly currentTree ├── table-layout@1.0.1 -239 silly currentTree ├── tapable@2.0.0 -239 silly currentTree ├── tar@2.2.2 -239 silly currentTree ├── terser-webpack-plugin@5.0.3 -239 silly currentTree ├─┬ terser@5.3.8 -239 silly currentTree │ └── source-map@0.7.3 -239 silly currentTree ├── to-regex-range@5.0.1 -239 silly currentTree ├── tough-cookie@2.5.0 -239 silly currentTree ├── transit-immutable-js@0.7.0 -239 silly currentTree ├── transit-js@0.8.867 -239 silly currentTree ├── ts-node@9.0.0 -239 silly currentTree ├── tslib@1.14.1 -239 silly currentTree ├── tunnel-agent@0.6.0 -239 silly currentTree ├── tweetnacl@0.14.5 -239 silly currentTree ├── typescript@4.0.5 -239 silly currentTree ├── typical@5.2.0 -239 silly currentTree ├── uri-js@4.4.0 -239 silly currentTree ├── utf8@3.0.0 -239 silly currentTree ├── util-deprecate@1.0.2 -239 silly currentTree ├── uuid@3.4.0 -239 silly currentTree ├── v8-compile-cache@2.2.0 -239 silly currentTree ├── verror@1.10.0 -239 silly currentTree ├── watchpack@2.0.1 -239 silly currentTree ├─┬ webpack-cli@4.2.0 -239 silly currentTree │ └── commander@6.2.0 -239 silly currentTree ├── webpack-merge@4.2.2 -239 silly currentTree ├── webpack-sources@2.2.0 -239 silly currentTree ├── webpack@5.4.0 -239 silly currentTree ├── which-module@2.0.0 -239 silly currentTree ├── which@1.3.1 -239 silly currentTree ├── wide-align@1.1.3 -239 silly currentTree ├── wordwrapjs@4.0.0 -239 silly currentTree ├── workerpool@6.0.2 -239 silly currentTree ├─┬ wrap-ansi@7.0.0 -239 silly currentTree │ ├── ansi-regex@5.0.0 -239 silly currentTree │ ├── is-fullwidth-code-point@3.0.0 -239 silly currentTree │ ├── string-width@4.2.0 -239 silly currentTree │ └── strip-ansi@6.0.0 -239 silly currentTree ├── wrappy@1.0.2 -239 silly currentTree ├── write-file-atomic@1.3.4 -239 silly currentTree ├── xtend@4.0.2 -239 silly currentTree ├── y18n@5.0.5 -239 silly currentTree ├── yallist@3.1.1 -239 silly currentTree ├── yargs-parser@20.2.3 -239 silly currentTree ├─┬ yargs-unparser@2.0.0 -239 silly currentTree │ ├── camelcase@6.2.0 -239 silly currentTree │ └── decamelize@4.0.0 -239 silly currentTree ├─┬ yargs@16.1.0 -239 silly currentTree │ ├── ansi-regex@5.0.0 -239 silly currentTree │ ├── is-fullwidth-code-point@3.0.0 -239 silly currentTree │ ├── string-width@4.2.0 -239 silly currentTree │ └── strip-ansi@6.0.0 -239 silly currentTree └── yn@3.1.1 -240 silly idealTree scripts -240 silly idealTree └─┬ cic-client-meta@0.0.7-alpha.1 -240 silly idealTree ├── @ethereumjs/common@2.0.0-beta.1 -240 silly idealTree ├── @ethereumjs/tx@3.0.0-beta.1 -240 silly idealTree ├── @types/bn.js@4.11.6 -240 silly idealTree ├── @types/eslint-scope@3.7.0 -240 silly idealTree ├── @types/eslint@7.2.4 -240 silly idealTree ├── @types/estree@0.0.45 -240 silly idealTree ├── @types/json-schema@7.0.6 -240 silly idealTree ├── @types/mocha@8.0.3 -240 silly idealTree ├── @types/node@14.14.6 -240 silly idealTree ├── @types/pbkdf2@3.1.0 -240 silly idealTree ├── @types/secp256k1@4.0.1 -240 silly idealTree ├── @ungap/promise-all-settled@1.1.2 -240 silly idealTree ├── @webassemblyjs/ast@1.9.0 -240 silly idealTree ├── @webassemblyjs/floating-point-hex-parser@1.9.0 -240 silly idealTree ├── @webassemblyjs/helper-api-error@1.9.0 -240 silly idealTree ├── @webassemblyjs/helper-buffer@1.9.0 -240 silly idealTree ├── @webassemblyjs/helper-code-frame@1.9.0 -240 silly idealTree ├── @webassemblyjs/helper-fsm@1.9.0 -240 silly idealTree ├── @webassemblyjs/helper-module-context@1.9.0 -240 silly idealTree ├── @webassemblyjs/helper-wasm-bytecode@1.9.0 -240 silly idealTree ├── @webassemblyjs/helper-wasm-section@1.9.0 -240 silly idealTree ├── @webassemblyjs/ieee754@1.9.0 -240 silly idealTree ├── @webassemblyjs/leb128@1.9.0 -240 silly idealTree ├── @webassemblyjs/utf8@1.9.0 -240 silly idealTree ├── @webassemblyjs/wasm-edit@1.9.0 -240 silly idealTree ├── @webassemblyjs/wasm-gen@1.9.0 -240 silly idealTree ├── @webassemblyjs/wasm-opt@1.9.0 -240 silly idealTree ├── @webassemblyjs/wasm-parser@1.9.0 -240 silly idealTree ├── @webassemblyjs/wast-parser@1.9.0 -240 silly idealTree ├── @webassemblyjs/wast-printer@1.9.0 -240 silly idealTree ├── @webpack-cli/info@1.1.0 -240 silly idealTree ├── @webpack-cli/serve@1.1.0 -240 silly idealTree ├── @xtuc/ieee754@1.2.0 -240 silly idealTree ├── @xtuc/long@4.2.2 -240 silly idealTree ├── abbrev@1.1.1 -240 silly idealTree ├── acorn@8.0.4 -240 silly idealTree ├── aes-js@3.1.2 -240 silly idealTree ├── ajv-keywords@3.5.2 -240 silly idealTree ├── ajv@6.12.6 -240 silly idealTree ├── ansi-colors@4.1.1 -240 silly idealTree ├── ansi-regex@2.1.1 -240 silly idealTree ├── ansi-styles@4.3.0 -240 silly idealTree ├── anymatch@3.1.1 -240 silly idealTree ├── aproba@1.2.0 -240 silly idealTree ├─┬ are-we-there-yet@1.1.5 -240 silly idealTree │ ├── readable-stream@2.3.7 -240 silly idealTree │ ├── safe-buffer@5.1.2 -240 silly idealTree │ └── string_decoder@1.1.1 -240 silly idealTree ├── arg@4.1.3 -240 silly idealTree ├── argparse@1.0.10 -240 silly idealTree ├── array-back@4.0.1 -240 silly idealTree ├── asn1.js@5.4.1 -240 silly idealTree ├── asn1@0.2.4 -240 silly idealTree ├── assert-plus@1.0.0 -240 silly idealTree ├── asynckit@0.4.0 -240 silly idealTree ├── automerge@0.14.1 -240 silly idealTree ├── aws-sign2@0.7.0 -240 silly idealTree ├── aws4@1.11.0 -240 silly idealTree ├── balanced-match@1.0.0 -240 silly idealTree ├── base-x@3.0.8 -240 silly idealTree ├── bcrypt-pbkdf@1.0.2 -240 silly idealTree ├── binary-extensions@2.1.0 -240 silly idealTree ├── blakejs@1.1.0 -240 silly idealTree ├── block-stream@0.0.9 -240 silly idealTree ├── bn.js@4.11.9 -240 silly idealTree ├── brace-expansion@1.1.11 -240 silly idealTree ├── braces@3.0.2 -240 silly idealTree ├── brorand@1.1.0 -240 silly idealTree ├── browser-stdout@1.3.1 -240 silly idealTree ├── browserify-aes@1.2.0 -240 silly idealTree ├── browserslist@4.14.6 -240 silly idealTree ├── bs58@4.0.1 -240 silly idealTree ├── bs58check@2.1.2 -240 silly idealTree ├── buffer-from@1.1.1 -240 silly idealTree ├── buffer-writer@2.0.0 -240 silly idealTree ├── buffer-xor@1.0.3 -240 silly idealTree ├── camelcase@5.3.1 -240 silly idealTree ├── caniuse-lite@1.0.30001156 -240 silly idealTree ├── caseless@0.12.0 -240 silly idealTree ├── chalk@4.1.0 -240 silly idealTree ├── chokidar@3.4.3 -240 silly idealTree ├── chownr@1.1.4 -240 silly idealTree ├── chrome-trace-event@1.0.2 -240 silly idealTree ├── cipher-base@1.0.4 -240 silly idealTree ├─┬ cliui@7.0.3 -240 silly idealTree │ ├── ansi-regex@5.0.0 -240 silly idealTree │ ├── is-fullwidth-code-point@3.0.0 -240 silly idealTree │ ├── string-width@4.2.0 -240 silly idealTree │ └── strip-ansi@6.0.0 -240 silly idealTree ├── code-point-at@1.1.0 -240 silly idealTree ├── color-convert@2.0.1 -240 silly idealTree ├── color-name@1.1.4 -240 silly idealTree ├── colorette@1.2.1 -240 silly idealTree ├── combined-stream@1.0.8 -240 silly idealTree ├─┬ command-line-usage@6.1.0 -240 silly idealTree │ ├── ansi-styles@3.2.1 -240 silly idealTree │ ├── chalk@2.4.2 -240 silly idealTree │ ├── color-convert@1.9.3 -240 silly idealTree │ ├── color-name@1.1.3 -240 silly idealTree │ ├── escape-string-regexp@1.0.5 -240 silly idealTree │ ├── has-flag@3.0.0 -240 silly idealTree │ └── supports-color@5.5.0 -240 silly idealTree ├── commander@2.20.3 -240 silly idealTree ├── concat-map@0.0.1 -240 silly idealTree ├── console-control-strings@1.1.0 -240 silly idealTree ├── core-util-is@1.0.2 -240 silly idealTree ├── crc-32@1.2.0 -240 silly idealTree ├── create-hash@1.2.0 -240 silly idealTree ├── create-hmac@1.1.7 -240 silly idealTree ├─┬ cross-spawn@7.0.3 -240 silly idealTree │ └── which@2.0.2 -240 silly idealTree ├── dashdash@1.14.1 -240 silly idealTree ├── debug@3.2.6 -240 silly idealTree ├── decamelize@1.2.0 -240 silly idealTree ├── deep-extend@0.6.0 -240 silly idealTree ├── delayed-stream@1.0.0 -240 silly idealTree ├── delegates@1.0.0 -240 silly idealTree ├── detect-libc@1.0.3 -240 silly idealTree ├── diff@4.0.2 -240 silly idealTree ├── ecc-jsbn@0.1.2 -240 silly idealTree ├── electron-to-chromium@1.3.591 -240 silly idealTree ├── elliptic@6.5.3 -240 silly idealTree ├── emoji-regex@8.0.0 -240 silly idealTree ├── end-of-stream@1.4.4 -240 silly idealTree ├── enhanced-resolve@5.3.1 -240 silly idealTree ├── enquirer@2.3.6 -240 silly idealTree ├── envinfo@7.7.3 -240 silly idealTree ├── escalade@3.1.1 -240 silly idealTree ├── escape-string-regexp@4.0.0 -240 silly idealTree ├── eslint-scope@5.1.1 -240 silly idealTree ├── esprima@4.0.1 -240 silly idealTree ├─┬ esrecurse@4.3.0 -240 silly idealTree │ └── estraverse@5.2.0 -240 silly idealTree ├── estraverse@4.3.0 -240 silly idealTree ├── ethereum-cryptography@0.1.3 -240 silly idealTree ├─┬ ethereumjs-util@7.0.7 -240 silly idealTree │ └── bn.js@5.1.3 -240 silly idealTree ├── ethereumjs-wallet@1.0.1 -240 silly idealTree ├── ethjs-util@0.1.6 -240 silly idealTree ├── events@3.2.0 -240 silly idealTree ├── evp_bytestokey@1.0.3 -240 silly idealTree ├── execa@4.1.0 -240 silly idealTree ├── exit-on-epipe@1.0.1 -240 silly idealTree ├── extend@3.0.2 -240 silly idealTree ├── extsprintf@1.3.0 -240 silly idealTree ├── fast-deep-equal@3.1.3 -240 silly idealTree ├── fast-json-stable-stringify@2.1.0 -240 silly idealTree ├── fill-range@7.0.1 -240 silly idealTree ├── find-up@5.0.0 -240 silly idealTree ├── flat@5.0.2 -240 silly idealTree ├── forever-agent@0.6.1 -240 silly idealTree ├── form-data@2.3.3 -240 silly idealTree ├── fs-minipass@1.2.7 -240 silly idealTree ├── fs.realpath@1.0.0 -240 silly idealTree ├── fstream@1.0.12 -240 silly idealTree ├── function-bind@1.1.1 -240 silly idealTree ├── gauge@2.7.4 -240 silly idealTree ├── get-caller-file@2.0.5 -240 silly idealTree ├── get-stream@5.2.0 -240 silly idealTree ├── getpass@0.1.7 -240 silly idealTree ├── glob-parent@5.1.1 -240 silly idealTree ├── glob-to-regexp@0.4.1 -240 silly idealTree ├── glob@7.1.6 -240 silly idealTree ├── graceful-fs@4.2.4 -240 silly idealTree ├── growl@1.10.5 -240 silly idealTree ├── har-schema@2.0.0 -240 silly idealTree ├── har-validator@5.1.5 -240 silly idealTree ├── has-flag@4.0.0 -240 silly idealTree ├── has-unicode@2.0.1 -240 silly idealTree ├── has@1.0.3 -240 silly idealTree ├── hash-base@3.1.0 -240 silly idealTree ├── hash.js@1.1.7 -240 silly idealTree ├── he@1.2.0 -240 silly idealTree ├── hmac-drbg@1.0.1 -240 silly idealTree ├── http-signature@1.2.0 -240 silly idealTree ├── human-signals@1.1.1 -240 silly idealTree ├── iconv-lite@0.4.24 -240 silly idealTree ├── ignore-walk@3.0.3 -240 silly idealTree ├── immutable@3.8.2 -240 silly idealTree ├── import-local@3.0.2 -240 silly idealTree ├── imurmurhash@0.1.4 -240 silly idealTree ├── inflight@1.0.6 -240 silly idealTree ├── inherits@2.0.4 -240 silly idealTree ├── ini@1.3.5 -240 silly idealTree ├── interpret@2.2.0 -240 silly idealTree ├── is-binary-path@2.1.0 -240 silly idealTree ├── is-core-module@2.1.0 -240 silly idealTree ├── is-extglob@2.1.1 -240 silly idealTree ├── is-fullwidth-code-point@1.0.0 -240 silly idealTree ├── is-glob@4.0.1 -240 silly idealTree ├── is-hex-prefixed@1.0.0 -240 silly idealTree ├── is-number@7.0.0 -240 silly idealTree ├── is-plain-obj@2.1.0 -240 silly idealTree ├── is-stream@2.0.0 -240 silly idealTree ├── is-typedarray@1.0.0 -240 silly idealTree ├── isarray@1.0.0 -240 silly idealTree ├── isexe@2.0.0 -240 silly idealTree ├── isstream@0.1.2 -240 silly idealTree ├── jest-worker@26.6.2 -240 silly idealTree ├── js-yaml@3.14.0 -240 silly idealTree ├── jsbn@0.1.1 -240 silly idealTree ├── json-parse-better-errors@1.0.2 -240 silly idealTree ├── json-schema-traverse@0.4.1 -240 silly idealTree ├── json-schema@0.2.3 -240 silly idealTree ├── json-stringify-safe@5.0.1 -240 silly idealTree ├── jsprim@1.4.1 -240 silly idealTree ├── keccak@3.0.1 -240 silly idealTree ├── leven@3.1.0 -240 silly idealTree ├── loader-runner@4.1.0 -240 silly idealTree ├── locate-path@6.0.0 -240 silly idealTree ├── lodash@4.17.20 -240 silly idealTree ├── log-symbols@4.0.0 -240 silly idealTree ├── make-error@1.3.6 -240 silly idealTree ├── md5.js@1.3.5 -240 silly idealTree ├── merge-stream@2.0.0 -240 silly idealTree ├── mime-db@1.44.0 -240 silly idealTree ├── mime-types@2.1.27 -240 silly idealTree ├── mimic-fn@2.1.0 -240 silly idealTree ├── minimalistic-assert@1.0.1 -240 silly idealTree ├── minimalistic-crypto-utils@1.0.1 -240 silly idealTree ├── minimatch@3.0.4 -240 silly idealTree ├── minimist@1.2.5 -240 silly idealTree ├── minipass@2.9.0 -240 silly idealTree ├── minizlib@1.3.3 -240 silly idealTree ├── mkdirp@0.5.5 -240 silly idealTree ├─┬ mocha@8.2.1 -240 silly idealTree │ ├── ansi-regex@4.1.0 -240 silly idealTree │ ├── ansi-styles@3.2.1 -240 silly idealTree │ ├── cliui@5.0.0 -240 silly idealTree │ ├── color-convert@1.9.3 -240 silly idealTree │ ├── color-name@1.1.3 -240 silly idealTree │ ├── debug@4.2.0 -240 silly idealTree │ ├── emoji-regex@7.0.3 -240 silly idealTree │ ├── is-fullwidth-code-point@2.0.0 -240 silly idealTree │ ├── locate-path@3.0.0 -240 silly idealTree │ ├── p-limit@2.3.0 -240 silly idealTree │ ├── p-locate@3.0.0 -240 silly idealTree │ ├── path-exists@3.0.0 -240 silly idealTree │ ├── string-width@3.1.0 -240 silly idealTree │ ├── strip-ansi@5.2.0 -240 silly idealTree │ ├── strip-json-comments@3.1.1 -240 silly idealTree │ ├── which@2.0.2 -240 silly idealTree │ ├── wrap-ansi@5.1.0 -240 silly idealTree │ ├── y18n@4.0.0 -240 silly idealTree │ ├── yargs-parser@13.1.2 -240 silly idealTree │ └─┬ yargs@13.3.2 -240 silly idealTree │ └── find-up@3.0.0 -240 silly idealTree ├── ms@2.1.2 -240 silly idealTree ├── nanoid@3.1.12 -240 silly idealTree ├── needle@2.5.2 -240 silly idealTree ├── neo-async@2.6.2 -240 silly idealTree ├── node-addon-api@2.0.0 -240 silly idealTree ├── node-fetch@2.6.1 -240 silly idealTree ├── node-gyp-build@4.2.3 -240 silly idealTree ├── node-gyp@3.8.0 -240 silly idealTree ├── node-localstorage@2.1.6 -240 silly idealTree ├─┬ node-pre-gyp@0.11.0 -240 silly idealTree │ ├── nopt@4.0.3 -240 silly idealTree │ └── tar@4.4.13 -240 silly idealTree ├── node-releases@1.1.66 -240 silly idealTree ├── nopt@3.0.6 -240 silly idealTree ├── normalize-path@3.0.0 -240 silly idealTree ├── npm-bundled@1.1.1 -240 silly idealTree ├── npm-normalize-package-bin@1.0.1 -240 silly idealTree ├── npm-packlist@1.4.8 -240 silly idealTree ├── npm-run-path@4.0.1 -240 silly idealTree ├── npmlog@4.1.2 -240 silly idealTree ├── number-is-nan@1.0.1 -240 silly idealTree ├── oauth-sign@0.9.0 -240 silly idealTree ├── object-assign@4.1.1 -240 silly idealTree ├── once@1.4.0 -240 silly idealTree ├── onetime@5.1.2 -240 silly idealTree ├─┬ openpgp@4.10.8 -240 silly idealTree │ └── node-localstorage@1.3.1 -240 silly idealTree ├── os-homedir@1.0.2 -240 silly idealTree ├── os-tmpdir@1.0.2 -240 silly idealTree ├── osenv@0.1.5 -240 silly idealTree ├── p-limit@3.0.2 -240 silly idealTree ├── p-locate@5.0.0 -240 silly idealTree ├── p-try@2.2.0 -240 silly idealTree ├── packet-reader@1.0.0 -240 silly idealTree ├── path-exists@4.0.0 -240 silly idealTree ├── path-is-absolute@1.0.1 -240 silly idealTree ├── path-key@3.1.1 -240 silly idealTree ├── path-parse@1.0.6 -240 silly idealTree ├── pbkdf2@3.1.1 -240 silly idealTree ├── performance-now@2.1.0 -240 silly idealTree ├── pg-connection-string@2.4.0 -240 silly idealTree ├── pg-int8@1.0.1 -240 silly idealTree ├── pg-pool@3.2.2 -240 silly idealTree ├── pg-protocol@1.3.0 -240 silly idealTree ├── pg-types@2.2.0 -240 silly idealTree ├── pg@8.4.2 -240 silly idealTree ├── pgpass@1.0.4 -240 silly idealTree ├── picomatch@2.2.2 -240 silly idealTree ├─┬ pkg-dir@4.2.0 -240 silly idealTree │ ├── find-up@4.1.0 -240 silly idealTree │ ├── locate-path@5.0.0 -240 silly idealTree │ ├── p-limit@2.3.0 -240 silly idealTree │ └── p-locate@4.1.0 -240 silly idealTree ├── postgres-array@2.0.0 -240 silly idealTree ├── postgres-bytea@1.0.0 -240 silly idealTree ├── postgres-date@1.0.7 -240 silly idealTree ├── postgres-interval@1.2.0 -240 silly idealTree ├── printj@1.1.2 -240 silly idealTree ├── process-nextick-args@2.0.1 -240 silly idealTree ├── psl@1.8.0 -240 silly idealTree ├── pump@3.0.0 -240 silly idealTree ├── punycode@2.1.1 -240 silly idealTree ├── qs@6.5.2 -240 silly idealTree ├── randombytes@2.1.0 -240 silly idealTree ├── rc@1.2.8 -240 silly idealTree ├── readable-stream@3.6.0 -240 silly idealTree ├── readdirp@3.5.0 -240 silly idealTree ├── rechoir@0.7.0 -240 silly idealTree ├── reduce-flatten@2.0.0 -240 silly idealTree ├── request@2.88.2 -240 silly idealTree ├── require-directory@2.1.1 -240 silly idealTree ├── require-main-filename@2.0.0 -240 silly idealTree ├── resolve-cwd@3.0.0 -240 silly idealTree ├── resolve-from@5.0.0 -240 silly idealTree ├── resolve@1.18.1 -240 silly idealTree ├── rimraf@2.7.1 -240 silly idealTree ├── ripemd160@2.0.2 -240 silly idealTree ├── rlp@2.2.6 -240 silly idealTree ├── safe-buffer@5.2.1 -240 silly idealTree ├── safer-buffer@2.1.2 -240 silly idealTree ├── sax@1.2.4 -240 silly idealTree ├── schema-utils@3.0.0 -240 silly idealTree ├── scrypt-js@3.0.1 -240 silly idealTree ├── secp256k1@4.0.2 -240 silly idealTree ├── semver@5.3.0 -240 silly idealTree ├── serialize-javascript@5.0.1 -240 silly idealTree ├── set-blocking@2.0.0 -240 silly idealTree ├── setimmediate@1.0.5 -240 silly idealTree ├── sha.js@2.4.11 -240 silly idealTree ├── shebang-command@2.0.0 -240 silly idealTree ├── shebang-regex@3.0.0 -240 silly idealTree ├── signal-exit@3.0.3 -240 silly idealTree ├── slide@1.1.6 -240 silly idealTree ├── source-list-map@2.0.1 -240 silly idealTree ├── source-map-support@0.5.19 -240 silly idealTree ├── source-map@0.6.1 -240 silly idealTree ├── split2@3.2.2 -240 silly idealTree ├── sprintf-js@1.0.3 -240 silly idealTree ├── sqlite3@5.0.0 -240 silly idealTree ├── sshpk@1.16.1 -240 silly idealTree ├── string_decoder@1.3.0 -240 silly idealTree ├── string-width@1.0.2 -240 silly idealTree ├── strip-ansi@3.0.1 -240 silly idealTree ├── strip-final-newline@2.0.0 -240 silly idealTree ├── strip-hex-prefix@1.0.0 -240 silly idealTree ├── strip-json-comments@2.0.1 -240 silly idealTree ├── supports-color@7.2.0 -240 silly idealTree ├── table-layout@1.0.1 -240 silly idealTree ├── tapable@2.0.0 -240 silly idealTree ├── tar@2.2.2 -240 silly idealTree ├── terser-webpack-plugin@5.0.3 -240 silly idealTree ├─┬ terser@5.3.8 -240 silly idealTree │ └── source-map@0.7.3 -240 silly idealTree ├── to-regex-range@5.0.1 -240 silly idealTree ├── tough-cookie@2.5.0 -240 silly idealTree ├── transit-immutable-js@0.7.0 -240 silly idealTree ├── transit-js@0.8.867 -240 silly idealTree ├── ts-node@9.0.0 -240 silly idealTree ├── tslib@1.14.1 -240 silly idealTree ├── tunnel-agent@0.6.0 -240 silly idealTree ├── tweetnacl@0.14.5 -240 silly idealTree ├── typescript@4.0.5 -240 silly idealTree ├── typical@5.2.0 -240 silly idealTree ├── uri-js@4.4.0 -240 silly idealTree ├── utf8@3.0.0 -240 silly idealTree ├── util-deprecate@1.0.2 -240 silly idealTree ├── uuid@3.4.0 -240 silly idealTree ├── v8-compile-cache@2.2.0 -240 silly idealTree ├── verror@1.10.0 -240 silly idealTree ├── watchpack@2.0.1 -240 silly idealTree ├─┬ webpack-cli@4.2.0 -240 silly idealTree │ └── commander@6.2.0 -240 silly idealTree ├── webpack-merge@4.2.2 -240 silly idealTree ├── webpack-sources@2.2.0 -240 silly idealTree ├── webpack@5.4.0 -240 silly idealTree ├── which-module@2.0.0 -240 silly idealTree ├── which@1.3.1 -240 silly idealTree ├── wide-align@1.1.3 -240 silly idealTree ├── wordwrapjs@4.0.0 -240 silly idealTree ├── workerpool@6.0.2 -240 silly idealTree ├─┬ wrap-ansi@7.0.0 -240 silly idealTree │ ├── ansi-regex@5.0.0 -240 silly idealTree │ ├── is-fullwidth-code-point@3.0.0 -240 silly idealTree │ ├── string-width@4.2.0 -240 silly idealTree │ └── strip-ansi@6.0.0 -240 silly idealTree ├── wrappy@1.0.2 -240 silly idealTree ├── write-file-atomic@1.3.4 -240 silly idealTree ├── xtend@4.0.2 -240 silly idealTree ├── y18n@5.0.5 -240 silly idealTree ├── yallist@3.1.1 -240 silly idealTree ├── yargs-parser@20.2.3 -240 silly idealTree ├─┬ yargs-unparser@2.0.0 -240 silly idealTree │ ├── camelcase@6.2.0 -240 silly idealTree │ └── decamelize@4.0.0 -240 silly idealTree ├─┬ yargs@16.1.0 -240 silly idealTree │ ├── ansi-regex@5.0.0 -240 silly idealTree │ ├── is-fullwidth-code-point@3.0.0 -240 silly idealTree │ ├── string-width@4.2.0 -240 silly idealTree │ └── strip-ansi@6.0.0 -240 silly idealTree └── yn@3.1.1 -241 silly install generateActionsToTake -242 timing stage:generateActionsToTake Completed in 13ms -243 silly diffTrees action count 1 -244 silly diffTrees update cic-client-meta@0.0.7-alpha.1 -245 silly decomposeActions action count 8 -246 silly decomposeActions unbuild cic-client-meta@0.0.7-alpha.1 -247 silly decomposeActions remove cic-client-meta@0.0.7-alpha.1 -248 silly decomposeActions preinstall cic-client-meta@0.0.7-alpha.1 -249 silly decomposeActions build cic-client-meta@0.0.7-alpha.1 -250 silly decomposeActions install cic-client-meta@0.0.7-alpha.1 -251 silly decomposeActions postinstall cic-client-meta@0.0.7-alpha.1 -252 silly decomposeActions finalize cic-client-meta@0.0.7-alpha.1 -253 silly decomposeActions refresh-package-json cic-client-meta@0.0.7-alpha.1 -254 silly install executeActions -255 silly doSerial global-install 8 -256 verbose correctMkdir /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/false/_locks correctMkdir not in flight; initializing -257 verbose lock using /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/false/_locks/staging-5da34164c06d3d08.lock for /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/.staging -258 silly doParallel extract 0 -259 silly doReverseSerial unbuild 8 -260 silly unbuild cic-client-meta@0.0.7-alpha.1 -261 info lifecycle cic-client-meta@0.0.7-alpha.1~preuninstall: cic-client-meta@0.0.7-alpha.1 -262 info lifecycle cic-client-meta@0.0.7-alpha.1~uninstall: cic-client-meta@0.0.7-alpha.1 -263 verbose unbuild rmStuff cic-client-meta@0.0.7-alpha.1 from /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules -264 silly isEverInside /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/cic-client-meta is not inside /usr -265 silly isEverInside /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/cic-meta is not inside /usr -266 silly isEverInside /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/cic-client-meta is not inside /usr/bin -267 silly isEverInside /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/cic-meta is not inside /usr/bin -268 silly isEverInside /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/cic-client-meta is not inside /usr/lib/node_modules -269 silly isEverInside /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/cic-meta is not inside /usr/lib/node_modules -270 info lifecycle cic-client-meta@0.0.7-alpha.1~postuninstall: cic-client-meta@0.0.7-alpha.1 -271 timing action:unbuild Completed in 5ms -272 silly doSerial remove 8 -273 silly remove /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/cic-client-meta -274 timing action:remove Completed in 1ms -275 silly doSerial move 8 -276 silly doSerial finalize 8 -277 silly finalize /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/cic-meta -278 timing action:finalize Completed in 0ms -279 silly doParallel refresh-package-json 1 -280 silly refresh-package-json /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/cic-meta -281 timing action:refresh-package-json Completed in 2ms -282 silly doParallel preinstall 1 -283 silly preinstall cic-client-meta@0.0.7-alpha.1 -284 info lifecycle cic-client-meta@0.0.7-alpha.1~preinstall: cic-client-meta@0.0.7-alpha.1 -285 timing action:preinstall Completed in 0ms -286 silly doSerial build 8 -287 silly build cic-client-meta@0.0.7-alpha.1 -288 info linkStuff cic-client-meta@0.0.7-alpha.1 -289 silly linkStuff cic-client-meta@0.0.7-alpha.1 has /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules as its parent node_modules -290 verbose linkBins [ -290 verbose linkBins { 'cic-meta-server': 'dist-server/scripts/server/server.js' }, -290 verbose linkBins '/home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/.bin', -290 verbose linkBins false -290 verbose linkBins ] -291 timing action:build Completed in 2ms -292 verbose unlock done using /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/false/_locks/staging-5da34164c06d3d08.lock for /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/.staging -293 timing stage:rollbackFailedOptional Completed in 0ms -294 timing stage:runTopLevelLifecycles Completed in 1000ms -295 silly saveTree scripts -295 silly saveTree └─┬ cic-client-meta@0.0.7-alpha.1 -295 silly saveTree ├─┬ @ethereumjs/tx@3.0.0-beta.1 -295 silly saveTree │ ├─┬ @ethereumjs/common@2.0.0-beta.1 -295 silly saveTree │ │ └─┬ crc-32@1.2.0 -295 silly saveTree │ │ ├── exit-on-epipe@1.0.1 -295 silly saveTree │ │ └── printj@1.1.2 -295 silly saveTree │ └─┬ ethereumjs-util@7.0.7 -295 silly saveTree │ ├─┬ @types/bn.js@4.11.6 -295 silly saveTree │ │ └── @types/node@14.14.6 -295 silly saveTree │ ├── bn.js@5.1.3 -295 silly saveTree │ ├─┬ create-hash@1.2.0 -295 silly saveTree │ │ ├─┬ cipher-base@1.0.4 -295 silly saveTree │ │ │ ├── inherits@2.0.4 -295 silly saveTree │ │ │ └── safe-buffer@5.2.1 -295 silly saveTree │ │ ├── inherits@2.0.4 -295 silly saveTree │ │ ├─┬ md5.js@1.3.5 -295 silly saveTree │ │ │ └─┬ hash-base@3.1.0 -295 silly saveTree │ │ │ └─┬ readable-stream@3.6.0 -295 silly saveTree │ │ │ ├── string_decoder@1.3.0 -295 silly saveTree │ │ │ └── util-deprecate@1.0.2 -295 silly saveTree │ │ ├── ripemd160@2.0.2 -295 silly saveTree │ │ └── sha.js@2.4.11 -295 silly saveTree │ ├─┬ ethereum-cryptography@0.1.3 -295 silly saveTree │ │ ├── @types/pbkdf2@3.1.0 -295 silly saveTree │ │ ├── @types/secp256k1@4.0.1 -295 silly saveTree │ │ ├── blakejs@1.1.0 -295 silly saveTree │ │ ├─┬ browserify-aes@1.2.0 -295 silly saveTree │ │ │ ├── buffer-xor@1.0.3 -295 silly saveTree │ │ │ └── evp_bytestokey@1.0.3 -295 silly saveTree │ │ ├─┬ bs58check@2.1.2 -295 silly saveTree │ │ │ └─┬ bs58@4.0.1 -295 silly saveTree │ │ │ └── base-x@3.0.8 -295 silly saveTree │ │ ├── create-hmac@1.1.7 -295 silly saveTree │ │ ├─┬ hash.js@1.1.7 -295 silly saveTree │ │ │ └── minimalistic-assert@1.0.1 -295 silly saveTree │ │ ├─┬ keccak@3.0.1 -295 silly saveTree │ │ │ ├── node-addon-api@2.0.0 -295 silly saveTree │ │ │ └── node-gyp-build@4.2.3 -295 silly saveTree │ │ ├── pbkdf2@3.1.1 -295 silly saveTree │ │ ├── randombytes@2.1.0 -295 silly saveTree │ │ ├── scrypt-js@3.0.1 -295 silly saveTree │ │ ├─┬ secp256k1@4.0.2 -295 silly saveTree │ │ │ └─┬ elliptic@6.5.3 -295 silly saveTree │ │ │ ├── bn.js@4.11.9 -295 silly saveTree │ │ │ ├── brorand@1.1.0 -295 silly saveTree │ │ │ ├─┬ hmac-drbg@1.0.1 -295 silly saveTree │ │ │ │ └── minimalistic-crypto-utils@1.0.1 -295 silly saveTree │ │ │ └── minimalistic-crypto-utils@1.0.1 -295 silly saveTree │ │ └── setimmediate@1.0.5 -295 silly saveTree │ ├─┬ ethjs-util@0.1.6 -295 silly saveTree │ │ ├── is-hex-prefixed@1.0.0 -295 silly saveTree │ │ └── strip-hex-prefix@1.0.0 -295 silly saveTree │ └── rlp@2.2.6 -295 silly saveTree ├─┬ automerge@0.14.1 -295 silly saveTree │ ├── immutable@3.8.2 -295 silly saveTree │ ├── transit-immutable-js@0.7.0 -295 silly saveTree │ ├── transit-js@0.8.867 -295 silly saveTree │ └── uuid@3.4.0 -295 silly saveTree ├─┬ ethereumjs-wallet@1.0.1 -295 silly saveTree │ ├── aes-js@3.1.2 -295 silly saveTree │ └── utf8@3.0.0 -295 silly saveTree ├── ini@1.3.5 -295 silly saveTree ├─┬ openpgp@4.10.8 -295 silly saveTree │ ├─┬ asn1.js@5.4.1 -295 silly saveTree │ │ └── safer-buffer@2.1.2 -295 silly saveTree │ ├── node-fetch@2.6.1 -295 silly saveTree │ └─┬ node-localstorage@1.3.1 -295 silly saveTree │ └─┬ write-file-atomic@1.3.4 -295 silly saveTree │ ├── graceful-fs@4.2.4 -295 silly saveTree │ ├── imurmurhash@0.1.4 -295 silly saveTree │ └── slide@1.1.6 -295 silly saveTree ├─┬ pg@8.4.2 -295 silly saveTree │ ├── buffer-writer@2.0.0 -295 silly saveTree │ ├── packet-reader@1.0.0 -295 silly saveTree │ ├── pg-connection-string@2.4.0 -295 silly saveTree │ ├── pg-pool@3.2.2 -295 silly saveTree │ ├── pg-protocol@1.3.0 -295 silly saveTree │ ├─┬ pg-types@2.2.0 -295 silly saveTree │ │ ├── pg-int8@1.0.1 -295 silly saveTree │ │ ├── postgres-array@2.0.0 -295 silly saveTree │ │ ├── postgres-bytea@1.0.0 -295 silly saveTree │ │ ├── postgres-date@1.0.7 -295 silly saveTree │ │ └─┬ postgres-interval@1.2.0 -295 silly saveTree │ │ └── xtend@4.0.2 -295 silly saveTree │ └─┬ pgpass@1.0.4 -295 silly saveTree │ └── split2@3.2.2 -295 silly saveTree ├─┬ sqlite3@5.0.0 -295 silly saveTree │ ├─┬ node-gyp@3.8.0 -295 silly saveTree │ │ ├─┬ fstream@1.0.12 -295 silly saveTree │ │ │ ├─┬ mkdirp@0.5.5 -295 silly saveTree │ │ │ │ └── minimist@1.2.5 -295 silly saveTree │ │ │ └─┬ rimraf@2.7.1 -295 silly saveTree │ │ │ └─┬ glob@7.1.6 -295 silly saveTree │ │ │ ├── fs.realpath@1.0.0 -295 silly saveTree │ │ │ ├─┬ inflight@1.0.6 -295 silly saveTree │ │ │ │ ├─┬ once@1.4.0 -295 silly saveTree │ │ │ │ │ └── wrappy@1.0.2 -295 silly saveTree │ │ │ │ └── wrappy@1.0.2 -295 silly saveTree │ │ │ ├─┬ minimatch@3.0.4 -295 silly saveTree │ │ │ │ └─┬ brace-expansion@1.1.11 -295 silly saveTree │ │ │ │ ├── balanced-match@1.0.0 -295 silly saveTree │ │ │ │ └── concat-map@0.0.1 -295 silly saveTree │ │ │ ├── once@1.4.0 -295 silly saveTree │ │ │ └── path-is-absolute@1.0.1 -295 silly saveTree │ │ ├── glob@7.1.6 -295 silly saveTree │ │ ├── mkdirp@0.5.5 -295 silly saveTree │ │ ├─┬ nopt@3.0.6 -295 silly saveTree │ │ │ └── abbrev@1.1.1 -295 silly saveTree │ │ ├─┬ npmlog@4.1.2 -295 silly saveTree │ │ │ ├─┬ are-we-there-yet@1.1.5 -295 silly saveTree │ │ │ │ ├── delegates@1.0.0 -295 silly saveTree │ │ │ │ └─┬ readable-stream@2.3.7 -295 silly saveTree │ │ │ │ ├── core-util-is@1.0.2 -295 silly saveTree │ │ │ │ ├── isarray@1.0.0 -295 silly saveTree │ │ │ │ ├── process-nextick-args@2.0.1 -295 silly saveTree │ │ │ │ ├── safe-buffer@5.1.2 -295 silly saveTree │ │ │ │ └── string_decoder@1.1.1 -295 silly saveTree │ │ │ ├── console-control-strings@1.1.0 -295 silly saveTree │ │ │ ├─┬ gauge@2.7.4 -295 silly saveTree │ │ │ │ ├── aproba@1.2.0 -295 silly saveTree │ │ │ │ ├── has-unicode@2.0.1 -295 silly saveTree │ │ │ │ ├── object-assign@4.1.1 -295 silly saveTree │ │ │ │ ├── signal-exit@3.0.3 -295 silly saveTree │ │ │ │ ├─┬ string-width@1.0.2 -295 silly saveTree │ │ │ │ │ ├── code-point-at@1.1.0 -295 silly saveTree │ │ │ │ │ ├─┬ is-fullwidth-code-point@1.0.0 -295 silly saveTree │ │ │ │ │ │ └── number-is-nan@1.0.1 -295 silly saveTree │ │ │ │ │ └─┬ strip-ansi@3.0.1 -295 silly saveTree │ │ │ │ │ └── ansi-regex@2.1.1 -295 silly saveTree │ │ │ │ ├── strip-ansi@3.0.1 -295 silly saveTree │ │ │ │ └── wide-align@1.1.3 -295 silly saveTree │ │ │ └── set-blocking@2.0.0 -295 silly saveTree │ │ ├─┬ osenv@0.1.5 -295 silly saveTree │ │ │ ├── os-homedir@1.0.2 -295 silly saveTree │ │ │ └── os-tmpdir@1.0.2 -295 silly saveTree │ │ ├─┬ request@2.88.2 -295 silly saveTree │ │ │ ├── aws-sign2@0.7.0 -295 silly saveTree │ │ │ ├── aws4@1.11.0 -295 silly saveTree │ │ │ ├── caseless@0.12.0 -295 silly saveTree │ │ │ ├─┬ combined-stream@1.0.8 -295 silly saveTree │ │ │ │ └── delayed-stream@1.0.0 -295 silly saveTree │ │ │ ├── extend@3.0.2 -295 silly saveTree │ │ │ ├── forever-agent@0.6.1 -295 silly saveTree │ │ │ ├─┬ form-data@2.3.3 -295 silly saveTree │ │ │ │ ├── asynckit@0.4.0 -295 silly saveTree │ │ │ │ └─┬ mime-types@2.1.27 -295 silly saveTree │ │ │ │ └── mime-db@1.44.0 -295 silly saveTree │ │ │ ├─┬ har-validator@5.1.5 -295 silly saveTree │ │ │ │ ├─┬ ajv@6.12.6 -295 silly saveTree │ │ │ │ │ ├── fast-deep-equal@3.1.3 -295 silly saveTree │ │ │ │ │ ├── fast-json-stable-stringify@2.1.0 -295 silly saveTree │ │ │ │ │ ├── json-schema-traverse@0.4.1 -295 silly saveTree │ │ │ │ │ └─┬ uri-js@4.4.0 -295 silly saveTree │ │ │ │ │ └── punycode@2.1.1 -295 silly saveTree │ │ │ │ └── har-schema@2.0.0 -295 silly saveTree │ │ │ ├─┬ http-signature@1.2.0 -295 silly saveTree │ │ │ │ ├── assert-plus@1.0.0 -295 silly saveTree │ │ │ │ ├─┬ jsprim@1.4.1 -295 silly saveTree │ │ │ │ │ ├── extsprintf@1.3.0 -295 silly saveTree │ │ │ │ │ ├── json-schema@0.2.3 -295 silly saveTree │ │ │ │ │ └── verror@1.10.0 -295 silly saveTree │ │ │ │ └─┬ sshpk@1.16.1 -295 silly saveTree │ │ │ │ ├── asn1@0.2.4 -295 silly saveTree │ │ │ │ ├─┬ bcrypt-pbkdf@1.0.2 -295 silly saveTree │ │ │ │ │ └── tweetnacl@0.14.5 -295 silly saveTree │ │ │ │ ├── dashdash@1.14.1 -295 silly saveTree │ │ │ │ ├─┬ ecc-jsbn@0.1.2 -295 silly saveTree │ │ │ │ │ └── jsbn@0.1.1 -295 silly saveTree │ │ │ │ ├── getpass@0.1.7 -295 silly saveTree │ │ │ │ ├── jsbn@0.1.1 -295 silly saveTree │ │ │ │ └── tweetnacl@0.14.5 -295 silly saveTree │ │ │ ├── is-typedarray@1.0.0 -295 silly saveTree │ │ │ ├── isstream@0.1.2 -295 silly saveTree │ │ │ ├── json-stringify-safe@5.0.1 -295 silly saveTree │ │ │ ├── mime-types@2.1.27 -295 silly saveTree │ │ │ ├── oauth-sign@0.9.0 -295 silly saveTree │ │ │ ├── performance-now@2.1.0 -295 silly saveTree │ │ │ ├── qs@6.5.2 -295 silly saveTree │ │ │ ├─┬ tough-cookie@2.5.0 -295 silly saveTree │ │ │ │ └── psl@1.8.0 -295 silly saveTree │ │ │ └── tunnel-agent@0.6.0 -295 silly saveTree │ │ ├── rimraf@2.7.1 -295 silly saveTree │ │ ├── semver@5.3.0 -295 silly saveTree │ │ ├─┬ tar@2.2.2 -295 silly saveTree │ │ │ └── block-stream@0.0.9 -295 silly saveTree │ │ └─┬ which@1.3.1 -295 silly saveTree │ │ └── isexe@2.0.0 -295 silly saveTree │ └─┬ node-pre-gyp@0.11.0 -295 silly saveTree │ ├── detect-libc@1.0.3 -295 silly saveTree │ ├─┬ needle@2.5.2 -295 silly saveTree │ │ ├─┬ debug@3.2.6 -295 silly saveTree │ │ │ └── ms@2.1.2 -295 silly saveTree │ │ ├── iconv-lite@0.4.24 -295 silly saveTree │ │ └── sax@1.2.4 -295 silly saveTree │ ├── nopt@4.0.3 -295 silly saveTree │ ├─┬ npm-packlist@1.4.8 -295 silly saveTree │ │ ├── ignore-walk@3.0.3 -295 silly saveTree │ │ ├─┬ npm-bundled@1.1.1 -295 silly saveTree │ │ │ └── npm-normalize-package-bin@1.0.1 -295 silly saveTree │ │ └── npm-normalize-package-bin@1.0.1 -295 silly saveTree │ ├─┬ rc@1.2.8 -295 silly saveTree │ │ ├── deep-extend@0.6.0 -295 silly saveTree │ │ └── strip-json-comments@2.0.1 -295 silly saveTree │ └─┬ tar@4.4.13 -295 silly saveTree │ ├── chownr@1.1.4 -295 silly saveTree │ ├─┬ fs-minipass@1.2.7 -295 silly saveTree │ │ └─┬ minipass@2.9.0 -295 silly saveTree │ │ └── yallist@3.1.1 -295 silly saveTree │ ├── minipass@2.9.0 -295 silly saveTree │ ├── minizlib@1.3.3 -295 silly saveTree │ └── yallist@3.1.1 -295 silly saveTree └─┬ yargs@16.1.0 -295 silly saveTree ├─┬ cliui@7.0.3 -295 silly saveTree │ ├─┬ string-width@4.2.0 -295 silly saveTree │ │ ├── emoji-regex@8.0.0 -295 silly saveTree │ │ ├── is-fullwidth-code-point@3.0.0 -295 silly saveTree │ │ └─┬ strip-ansi@6.0.0 -295 silly saveTree │ │ └── ansi-regex@5.0.0 -295 silly saveTree │ ├── strip-ansi@6.0.0 -295 silly saveTree │ └─┬ wrap-ansi@7.0.0 -295 silly saveTree │ ├─┬ ansi-styles@4.3.0 -295 silly saveTree │ │ └─┬ color-convert@2.0.1 -295 silly saveTree │ │ └── color-name@1.1.4 -295 silly saveTree │ ├─┬ string-width@4.2.0 -295 silly saveTree │ │ ├── is-fullwidth-code-point@3.0.0 -295 silly saveTree │ │ └─┬ strip-ansi@6.0.0 -295 silly saveTree │ │ └── ansi-regex@5.0.0 -295 silly saveTree │ └── strip-ansi@6.0.0 -295 silly saveTree ├── escalade@3.1.1 -295 silly saveTree ├── get-caller-file@2.0.5 -295 silly saveTree ├── require-directory@2.1.1 -295 silly saveTree ├─┬ string-width@4.2.0 -295 silly saveTree │ ├── is-fullwidth-code-point@3.0.0 -295 silly saveTree │ └─┬ strip-ansi@6.0.0 -295 silly saveTree │ └── ansi-regex@5.0.0 -295 silly saveTree ├── y18n@5.0.5 -295 silly saveTree └── yargs-parser@20.2.3 -296 warn notsup Unsupported engine for cic-client-meta@0.0.7-alpha.1: wanted: {"node":"~15.3.0"} (current: {"node":"15.6.0","npm":"6.14.11"}) -297 warn notsup Not compatible with your version of node/npm: cic-client-meta@0.0.7-alpha.1 -298 verbose notsup Not compatible with your version of node/npm: cic-client-meta@0.0.7-alpha.1 -298 verbose notsup Required: {"node":"~15.3.0"} -298 verbose notsup Actual: {"npm":"6.14.11","node":"15.6.0"} -299 warn enoent ENOENT: no such file or directory, open '/home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/package.json' -300 verbose enoent This is related to npm not being able to find a file. -301 warn scripts No description -302 warn scripts No repository field. -303 warn scripts No README data -304 warn scripts No license field. -305 verbose stack Error: ENOENT: no such file or directory, chmod '/home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/cic-client-meta/dist-server/scripts/server/server.js' -306 verbose cwd /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts -307 verbose Linux 5.10.8-arch1-1 -308 verbose argv "/usr/bin/node" "/usr/bin/npm" "install" "--no-cache" "../../../apps/cic-meta" -309 verbose node v15.6.0 -310 verbose npm v6.14.11 -311 error code ENOENT -312 error syscall chmod -313 error path /home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/cic-client-meta/dist-server/scripts/server/server.js -314 error errno -2 -315 error enoent ENOENT: no such file or directory, chmod '/home/lash/src/ext/cic/grassrootseconomics/cic-internal-integration/apps/contract-migration/scripts/node_modules/cic-client-meta/dist-server/scripts/server/server.js' -316 error enoent This is related to npm not being able to find a file. -317 verbose exit [ -2, true ] diff --git a/apps/contract-migration/scripts/false/anonymous-cli-metrics.json b/apps/contract-migration/scripts/false/anonymous-cli-metrics.json deleted file mode 100644 index bd2344ca..00000000 --- a/apps/contract-migration/scripts/false/anonymous-cli-metrics.json +++ /dev/null @@ -1 +0,0 @@ -{"metricId":"ea11447f-da1c-49e6-b0a2-8a988a99e3ce","metrics":{"from":"2021-02-12T18:59:21.666Z","to":"2021-02-12T18:59:21.666Z","successfulInstalls":0,"failedInstalls":1}} \ No newline at end of file diff --git a/apps/contract-migration/scripts/import_balance.sh b/apps/contract-migration/scripts/import_balance.sh deleted file mode 100644 index 8db2632d..00000000 --- a/apps/contract-migration/scripts/import_balance.sh +++ /dev/null @@ -1 +0,0 @@ -python import_balance.py -c config -i evm:bloxberg:8996 -y /home/lash/tmp/d/keystore/UTC--2021-02-07T09-58-35.341813355Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c -v $@ diff --git a/apps/contract-migration/scripts/import_users.sh b/apps/contract-migration/scripts/import_users.sh deleted file mode 100644 index 38507b70..00000000 --- a/apps/contract-migration/scripts/import_users.sh +++ /dev/null @@ -1 +0,0 @@ -python import_users.py -c config --redis-host-callback redis -vv $@ diff --git a/apps/contract-migration/scripts/package-lock.json b/apps/contract-migration/scripts/package-lock.json index d2d7ed32..9737db2f 100644 --- a/apps/contract-migration/scripts/package-lock.json +++ b/apps/contract-migration/scripts/package-lock.json @@ -1,6 +1,2088 @@ { + "name": "scripts", + "lockfileVersion": 2, "requires": true, - "lockfileVersion": 1, + "packages": { + "": { + "dependencies": { + "cic-client-meta": "^0.0.7-alpha.5", + "vcard-parser": "^1.0.0" + } + }, + "node_modules/@ethereumjs/common": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@ethereumjs/common/-/common-2.0.0.tgz", + "integrity": "sha512-yL0zA7Xwgz8IFHKW0VoXGjdZDVxUJg8BQ/muMHvYPW7zHJNNC80gQmvLH+MpvIg1TCXZkFXxrpYRAyCElSm+aw==", + "dependencies": { + "crc-32": "^1.2.0" + } + }, + "node_modules/@ethereumjs/tx": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@ethereumjs/tx/-/tx-3.0.2.tgz", + "integrity": "sha512-zmFCosjOdj1WoYEiQBdC4sCOAllBEwxdKuY85L9FgZ4zVDfZUVsQ4S9paczt4hVt65A7N8sJwgVEzDaQmrRaqw==", + "dependencies": { + "@ethereumjs/common": "^2.0.0", + "ethereumjs-util": "^7.0.8" + } + }, + "node_modules/@types/bn.js": { + "version": "4.11.6", + "resolved": "https://registry.npmjs.org/@types/bn.js/-/bn.js-4.11.6.tgz", + "integrity": "sha512-pqr857jrp2kPuO9uRjZ3PwnJTjoQy+fcdxvBTvHm6dkmEL9q+hDD/2j/0ELOBPtPnS8LjCX0gI9nbl8lVkadpg==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "14.14.30", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.30.tgz", + "integrity": "sha512-gUWhy8s45fQp4PqqKecsnOkdW0kt1IaKjgOIR3HPokkzTmQj9ji2wWFID5THu1MKrtO+d4s2lVrlEhXUsPXSvg==" + }, + "node_modules/@types/pbkdf2": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@types/pbkdf2/-/pbkdf2-3.1.0.tgz", + "integrity": "sha512-Cf63Rv7jCQ0LaL8tNXmEyqTHuIJxRdlS5vMh1mj5voN4+QFhVZnlZruezqpWYDiJ8UTzhP0VmeLXCmBk66YrMQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/secp256k1": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@types/secp256k1/-/secp256k1-4.0.1.tgz", + "integrity": "sha512-+ZjSA8ELlOp8SlKi0YLB2tz9d5iPNEmOBd+8Rz21wTMdaXQIa9b6TEnD6l5qKOCypE7FSyPyck12qZJxSDNoog==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" + }, + "node_modules/aes-js": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/aes-js/-/aes-js-3.1.2.tgz", + "integrity": "sha512-e5pEa2kBnBOgR4Y/p20pskXI74UEz7de8ZGVo58asOtvSVG5YAbJeELPZxOmt+Bnz3rX753YKhfIn4X4l1PPRQ==" + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "optional": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "node_modules/ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==" + }, + "node_modules/are-we-there-yet": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz", + "integrity": "sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" + } + }, + "node_modules/are-we-there-yet/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/are-we-there-yet/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/are-we-there-yet/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/asn1": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "optional": true, + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/asn1.js": { + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", + "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", + "dependencies": { + "bn.js": "^4.0.0", + "inherits": "^2.0.1", + "minimalistic-assert": "^1.0.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/asn1.js/node_modules/bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" + }, + "node_modules/assert-plus": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", + "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", + "optional": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=", + "optional": true + }, + "node_modules/automerge": { + "version": "0.14.2", + "resolved": "https://registry.npmjs.org/automerge/-/automerge-0.14.2.tgz", + "integrity": "sha512-shiwuJHCbNRI23WZyIECLV4Ovf3WiAFJ7P9BH4l5gON1In/UUbjcSJKRygtIirObw2UQumeYxp3F2XBdSvQHnA==", + "dependencies": { + "immutable": "^3.8.2", + "transit-immutable-js": "^0.7.0", + "transit-js": "^0.8.861", + "uuid": "^3.4.0" + } + }, + "node_modules/aws-sign2": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", + "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=", + "optional": true, + "engines": { + "node": "*" + } + }, + "node_modules/aws4": { + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", + "optional": true + }, + "node_modules/balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + }, + "node_modules/base-x": { + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/base-x/-/base-x-3.0.8.tgz", + "integrity": "sha512-Rl/1AWP4J/zRrk54hhlxH4drNxPJXYUaKffODVI53/dAsV4t9fBxyxYKAVPU1XBHxYwOWP9h9H0hM2MVw4YfJA==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/bcrypt-pbkdf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", + "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", + "optional": true, + "dependencies": { + "tweetnacl": "^0.14.3" + } + }, + "node_modules/blakejs": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/blakejs/-/blakejs-1.1.0.tgz", + "integrity": "sha1-ad+S75U6qIylGjLfarHFShVfx6U=" + }, + "node_modules/block-stream": { + "version": "0.0.9", + "resolved": "https://registry.npmjs.org/block-stream/-/block-stream-0.0.9.tgz", + "integrity": "sha1-E+v+d4oDIFz+A3UUgeu0szAMEmo=", + "optional": true, + "dependencies": { + "inherits": "~2.0.0" + }, + "engines": { + "node": "0.4 || >=0.5.8" + } + }, + "node_modules/bn.js": { + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.1.3.tgz", + "integrity": "sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/brorand": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/brorand/-/brorand-1.1.0.tgz", + "integrity": "sha1-EsJe/kCkXjwyPrhnWgoM5XsiNx8=" + }, + "node_modules/browserify-aes": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/browserify-aes/-/browserify-aes-1.2.0.tgz", + "integrity": "sha512-+7CHXqGuspUn/Sl5aO7Ea0xWGAtETPXNSAjHo48JfLdPWcMng33Xe4znFvQweqc/uzk5zSOI3H52CYnjCfb5hA==", + "dependencies": { + "buffer-xor": "^1.0.3", + "cipher-base": "^1.0.0", + "create-hash": "^1.1.0", + "evp_bytestokey": "^1.0.3", + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/bs58": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/bs58/-/bs58-4.0.1.tgz", + "integrity": "sha1-vhYedsNU9veIrkBx9j806MTwpCo=", + "dependencies": { + "base-x": "^3.0.2" + } + }, + "node_modules/bs58check": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/bs58check/-/bs58check-2.1.2.tgz", + "integrity": "sha512-0TS1jicxdU09dwJMNZtVAfzPi6Q6QeN0pM1Fkzrjn+XYHvzMKPU3pHVpva+769iNVSfIYWf7LJ6WR+BuuMf8cA==", + "dependencies": { + "bs58": "^4.0.0", + "create-hash": "^1.1.0", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/buffer-xor": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", + "integrity": "sha1-JuYe0UIvtw3ULm42cp7VHYVf6Nk=" + }, + "node_modules/caseless": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", + "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", + "optional": true + }, + "node_modules/chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" + }, + "node_modules/cic-client-meta": { + "version": "0.0.7-alpha.5", + "resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.5.tgz", + "integrity": "sha512-h+0wmAKZIgezppBNYDmG387w6tI91FSWqONMTZbMuaO1Ej76Gg0Mk2UcDyAF/dmY6doXz3kHAbWkWat7mTzXAQ==", + "dependencies": { + "@ethereumjs/tx": "^3.0.0-beta.1", + "automerge": "^0.14.1", + "ethereumjs-wallet": "^1.0.1", + "ini": "^1.3.5", + "openpgp": "^4.10.8", + "pg": "^8.4.2", + "sqlite3": "^5.0.0", + "yargs": "^16.1.0" + }, + "engines": { + "node": "~15.3.0" + } + }, + "node_modules/cipher-base": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/cipher-base/-/cipher-base-1.0.4.tgz", + "integrity": "sha512-Kkht5ye6ZGmwv40uUDZztayT2ThLQGfnj/T71N/XzeZeo3nf8foyW7zGTsPYkEya3m5f3cAypH+qe7YOrM1U2Q==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/cliui/node_modules/ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cliui/node_modules/strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "optional": true, + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" + }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "node_modules/crc-32": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.0.tgz", + "integrity": "sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==", + "dependencies": { + "exit-on-epipe": "~1.0.1", + "printj": "~1.1.0" + }, + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/create-hash": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/create-hash/-/create-hash-1.2.0.tgz", + "integrity": "sha512-z00bCGNHDG8mHAkP7CtT1qVu+bFQUPjYq/4Iv3C3kWjTFV10zIjfSoeqXo9Asws8gwSHDGj/hl2u4OGIjapeCg==", + "dependencies": { + "cipher-base": "^1.0.1", + "inherits": "^2.0.1", + "md5.js": "^1.3.4", + "ripemd160": "^2.0.1", + "sha.js": "^2.4.0" + } + }, + "node_modules/create-hmac": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/create-hmac/-/create-hmac-1.1.7.tgz", + "integrity": "sha512-MJG9liiZ+ogc4TzUwuvbER1JRdgvUFSB5+VR/g5h82fGaIRWMWddtKBHi7/sVhfjQZ6SehlyhvQYrcYkaUIpLg==", + "dependencies": { + "cipher-base": "^1.0.3", + "create-hash": "^1.1.0", + "inherits": "^2.0.1", + "ripemd160": "^2.0.0", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + } + }, + "node_modules/dashdash": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", + "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", + "optional": true, + "dependencies": { + "assert-plus": "^1.0.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "optional": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" + }, + "node_modules/detect-libc": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", + "integrity": "sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=", + "bin": { + "detect-libc": "bin/detect-libc.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/ecc-jsbn": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", + "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", + "optional": true, + "dependencies": { + "jsbn": "~0.1.0", + "safer-buffer": "^2.1.0" + } + }, + "node_modules/elliptic": { + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", + "dependencies": { + "bn.js": "^4.11.9", + "brorand": "^1.1.0", + "hash.js": "^1.0.0", + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/elliptic/node_modules/bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/ethereum-cryptography": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/ethereum-cryptography/-/ethereum-cryptography-0.1.3.tgz", + "integrity": "sha512-w8/4x1SGGzc+tO97TASLja6SLd3fRIK2tLVcV2Gx4IB21hE19atll5Cq9o3d0ZmAYC/8aw0ipieTSiekAea4SQ==", + "dependencies": { + "@types/pbkdf2": "^3.0.0", + "@types/secp256k1": "^4.0.1", + "blakejs": "^1.1.0", + "browserify-aes": "^1.2.0", + "bs58check": "^2.1.2", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "hash.js": "^1.1.7", + "keccak": "^3.0.0", + "pbkdf2": "^3.0.17", + "randombytes": "^2.1.0", + "safe-buffer": "^5.1.2", + "scrypt-js": "^3.0.0", + "secp256k1": "^4.0.1", + "setimmediate": "^1.0.5" + } + }, + "node_modules/ethereumjs-util": { + "version": "7.0.8", + "resolved": "https://registry.npmjs.org/ethereumjs-util/-/ethereumjs-util-7.0.8.tgz", + "integrity": "sha512-JJt7tDpCAmDPw/sGoFYeq0guOVqT3pTE9xlEbBmc/nlCij3JRCoS2c96SQ6kXVHOT3xWUNLDm5QCJLQaUnVAtQ==", + "dependencies": { + "@types/bn.js": "^4.11.3", + "bn.js": "^5.1.2", + "create-hash": "^1.1.2", + "ethereum-cryptography": "^0.1.3", + "ethjs-util": "0.1.6", + "rlp": "^2.2.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/ethereumjs-wallet": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ethereumjs-wallet/-/ethereumjs-wallet-1.0.1.tgz", + "integrity": "sha512-3Z5g1hG1das0JWU6cQ9HWWTY2nt9nXCcwj7eXVNAHKbo00XAZO8+NHlwdgXDWrL0SXVQMvTWN8Q/82DRH/JhPw==", + "dependencies": { + "aes-js": "^3.1.1", + "bs58check": "^2.1.2", + "ethereum-cryptography": "^0.1.3", + "ethereumjs-util": "^7.0.2", + "randombytes": "^2.0.6", + "scrypt-js": "^3.0.1", + "utf8": "^3.0.0", + "uuid": "^3.3.2" + } + }, + "node_modules/ethjs-util": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/ethjs-util/-/ethjs-util-0.1.6.tgz", + "integrity": "sha512-CUnVOQq7gSpDHZVVrQW8ExxUETWrnrvXYvYz55wOU8Uj4VCgw56XC2B/fVqQN+f7gmrnRHSLVnFAwsCuNwji8w==", + "dependencies": { + "is-hex-prefixed": "1.0.0", + "strip-hex-prefix": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/evp_bytestokey": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/evp_bytestokey/-/evp_bytestokey-1.0.3.tgz", + "integrity": "sha512-/f2Go4TognH/KvCISP7OUsHn85hT9nUkxxA9BEWxFn+Oj9o8ZNLm/40hdlgSLyuOimsrTKLUMEorQexp/aPQeA==", + "dependencies": { + "md5.js": "^1.3.4", + "safe-buffer": "^5.1.1" + } + }, + "node_modules/exit-on-epipe": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", + "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "optional": true + }, + "node_modules/extsprintf": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", + "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=", + "engines": [ + "node >=0.6.0" + ], + "optional": true + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "optional": true + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "optional": true + }, + "node_modules/forever-agent": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", + "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", + "optional": true, + "engines": { + "node": "*" + } + }, + "node_modules/form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "optional": true, + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 0.12" + } + }, + "node_modules/fs-minipass": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz", + "integrity": "sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==", + "dependencies": { + "minipass": "^2.6.0" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "node_modules/fstream": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fstream/-/fstream-1.0.12.tgz", + "integrity": "sha512-WvJ193OHa0GHPEL+AycEJgxvBEwyfRkN1vhjca23OaPVMCaLCXTd5qAu82AjTcgP1UJmytkOKb63Ypde7raDIg==", + "optional": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "inherits": "~2.0.0", + "mkdirp": ">=0.5 0", + "rimraf": "2" + }, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "dependencies": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/getpass": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", + "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", + "optional": true, + "dependencies": { + "assert-plus": "^1.0.0" + } + }, + "node_modules/glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.6.tgz", + "integrity": "sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ==" + }, + "node_modules/har-schema": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", + "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=", + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/har-validator": { + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", + "optional": true, + "dependencies": { + "ajv": "^6.12.3", + "har-schema": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" + }, + "node_modules/hash-base": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", + "dependencies": { + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/hash.js": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/hash.js/-/hash.js-1.1.7.tgz", + "integrity": "sha512-taOaskGt4z4SOANNseOviYDvjEJinIkRgmp7LbKP2YTTmVxWBl87s/uzK9r+44BclBSp2X7K1hqeNfz9JbBeXA==", + "dependencies": { + "inherits": "^2.0.3", + "minimalistic-assert": "^1.0.1" + } + }, + "node_modules/hmac-drbg": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/hmac-drbg/-/hmac-drbg-1.0.1.tgz", + "integrity": "sha1-0nRXAQJabHdabFRXk+1QL8DGSaE=", + "dependencies": { + "hash.js": "^1.0.3", + "minimalistic-assert": "^1.0.0", + "minimalistic-crypto-utils": "^1.0.1" + } + }, + "node_modules/http-signature": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", + "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", + "optional": true, + "dependencies": { + "assert-plus": "^1.0.0", + "jsprim": "^1.2.2", + "sshpk": "^1.7.0" + }, + "engines": { + "node": ">=0.8", + "npm": ">=1.3.7" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore-walk": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-3.0.3.tgz", + "integrity": "sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==", + "dependencies": { + "minimatch": "^3.0.4" + } + }, + "node_modules/immutable": { + "version": "3.8.2", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-3.8.2.tgz", + "integrity": "sha1-wkOZUUVbs5kT2vKBN28VMOEErfM=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, + "node_modules/is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-hex-prefixed": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-hex-prefixed/-/is-hex-prefixed-1.0.0.tgz", + "integrity": "sha1-fY035q135dEnFIkTxXPggtd39VQ=", + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "optional": true + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "optional": true + }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=", + "optional": true + }, + "node_modules/jsbn": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", + "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", + "optional": true + }, + "node_modules/json-schema": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", + "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "optional": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "optional": true + }, + "node_modules/json-stringify-safe": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", + "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=", + "optional": true + }, + "node_modules/jsprim": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", + "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "engines": [ + "node >=0.6.0" + ], + "optional": true, + "dependencies": { + "assert-plus": "1.0.0", + "extsprintf": "1.3.0", + "json-schema": "0.2.3", + "verror": "1.10.0" + } + }, + "node_modules/keccak": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/keccak/-/keccak-3.0.1.tgz", + "integrity": "sha512-epq90L9jlFWCW7+pQa6JOnKn2Xgl2mtI664seYR6MHskvI9agt7AnDqmAlp9TqU4/caMYbA08Hi5DMZAl5zdkA==", + "dependencies": { + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/md5.js": { + "version": "1.3.5", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz", + "integrity": "sha512-xitP+WxNPcTTOgnTJcrhM0xvdPepipPSf3I8EIpGKeFLjt3PlJLIDG3u8EX53ZIubkb+5U2+3rELYpEhHhzdkg==", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1", + "safe-buffer": "^5.1.2" + } + }, + "node_modules/mime-db": { + "version": "1.46.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.46.0.tgz", + "integrity": "sha512-svXaP8UQRZ5K7or+ZmfNhg2xX3yKDMUzqadsSqi4NCH/KomcH75MAMYAGVlvXn4+b/xOPhS3I2uHKRUzvjY7BQ==", + "optional": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.29", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.29.tgz", + "integrity": "sha512-Y/jMt/S5sR9OaqteJtslsFZKWOIIqMACsJSiHghlCAyhf7jfVYjKBmLiX8OgpWeW+fjJ2b+Az69aPFPkUOY6xQ==", + "optional": true, + "dependencies": { + "mime-db": "1.46.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/minimalistic-assert": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-assert/-/minimalistic-assert-1.0.1.tgz", + "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==" + }, + "node_modules/minimalistic-crypto-utils": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz", + "integrity": "sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo=" + }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==" + }, + "node_modules/minipass": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", + "integrity": "sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==", + "dependencies": { + "safe-buffer": "^5.1.2", + "yallist": "^3.0.0" + } + }, + "node_modules/minizlib": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.3.3.tgz", + "integrity": "sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==", + "dependencies": { + "minipass": "^2.9.0" + } + }, + "node_modules/mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/needle": { + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/needle/-/needle-2.6.0.tgz", + "integrity": "sha512-KKYdza4heMsEfSWD7VPUIz3zX2XDwOyX2d+geb4vrERZMT5RMU6ujjaD+I5Yr54uZxQ2w6XRTAhHBbSCyovZBg==", + "dependencies": { + "debug": "^3.2.6", + "iconv-lite": "^0.4.4", + "sax": "^1.2.4" + }, + "bin": { + "needle": "bin/needle" + }, + "engines": { + "node": ">= 4.4.x" + } + }, + "node_modules/node-addon-api": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-2.0.2.tgz", + "integrity": "sha512-Ntyt4AIXyaLIuMHF6IOoTakB3K+RWxwtsHNRxllEoA6vPwP9o4866g6YWDLUdnucilZhmkxiHwHr11gAENw+QA==" + }, + "node_modules/node-fetch": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", + "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==", + "engines": { + "node": "4.x || >=6.0.0" + } + }, + "node_modules/node-gyp": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-3.8.0.tgz", + "integrity": "sha512-3g8lYefrRRzvGeSowdJKAKyks8oUpLEd/DyPV4eMhVlhJ0aNaZqIrNUIPuEWWTAoPqyFkfGrM67MC69baqn6vA==", + "optional": true, + "dependencies": { + "fstream": "^1.0.0", + "glob": "^7.0.3", + "graceful-fs": "^4.1.2", + "mkdirp": "^0.5.0", + "nopt": "2 || 3", + "npmlog": "0 || 1 || 2 || 3 || 4", + "osenv": "0", + "request": "^2.87.0", + "rimraf": "2", + "semver": "~5.3.0", + "tar": "^2.0.0", + "which": "1" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/node-gyp-build": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", + "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, + "node_modules/node-localstorage": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-localstorage/-/node-localstorage-1.3.1.tgz", + "integrity": "sha512-NMWCSWWc6JbHT5PyWlNT2i8r7PgGYXVntmKawY83k/M0UJScZ5jirb61TLnqKwd815DfBQu+lR3sRw08SPzIaQ==", + "dependencies": { + "write-file-atomic": "^1.1.4" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/node-pre-gyp": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/node-pre-gyp/-/node-pre-gyp-0.11.0.tgz", + "integrity": "sha512-TwWAOZb0j7e9eGaf9esRx3ZcLaE5tQ2lvYy1pb5IAaG1a2e2Kv5Lms1Y4hpj+ciXJRofIxxlt5haeQ/2ANeE0Q==", + "dependencies": { + "detect-libc": "^1.0.2", + "mkdirp": "^0.5.1", + "needle": "^2.2.1", + "nopt": "^4.0.1", + "npm-packlist": "^1.1.6", + "npmlog": "^4.0.2", + "rc": "^1.2.7", + "rimraf": "^2.6.1", + "semver": "^5.3.0", + "tar": "^4" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, + "node_modules/node-pre-gyp/node_modules/nopt": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz", + "integrity": "sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==", + "dependencies": { + "abbrev": "1", + "osenv": "^0.1.4" + }, + "bin": { + "nopt": "bin/nopt.js" + } + }, + "node_modules/node-pre-gyp/node_modules/tar": { + "version": "4.4.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.13.tgz", + "integrity": "sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==", + "dependencies": { + "chownr": "^1.1.1", + "fs-minipass": "^1.2.5", + "minipass": "^2.8.6", + "minizlib": "^1.2.1", + "mkdirp": "^0.5.0", + "safe-buffer": "^5.1.2", + "yallist": "^3.0.3" + }, + "engines": { + "node": ">=4.5" + } + }, + "node_modules/nopt": { + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-3.0.6.tgz", + "integrity": "sha1-xkZdvwirzU2zWTF/eaxopkayj/k=", + "optional": true, + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + } + }, + "node_modules/npm-bundled": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-1.1.1.tgz", + "integrity": "sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==", + "dependencies": { + "npm-normalize-package-bin": "^1.0.1" + } + }, + "node_modules/npm-normalize-package-bin": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz", + "integrity": "sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==" + }, + "node_modules/npm-packlist": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-1.4.8.tgz", + "integrity": "sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==", + "dependencies": { + "ignore-walk": "^3.0.1", + "npm-bundled": "^1.0.1", + "npm-normalize-package-bin": "^1.0.1" + } + }, + "node_modules/npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "dependencies": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, + "node_modules/number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/oauth-sign": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", + "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==", + "optional": true, + "engines": { + "node": "*" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/openpgp": { + "version": "4.10.10", + "resolved": "https://registry.npmjs.org/openpgp/-/openpgp-4.10.10.tgz", + "integrity": "sha512-Ub48OogGPjNsr0G/wnJ/SyAQzt/tfcXZTWVZdjKFpXCQV1Ca+upFdSPPkBlGG3lb9EQGOKZJ2tzYNH6ZyKMkDQ==", + "dependencies": { + "asn1.js": "^5.0.0", + "node-fetch": "^2.1.2", + "node-localstorage": "~1.3.0" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/os-homedir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", + "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/osenv": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", + "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", + "dependencies": { + "os-homedir": "^1.0.0", + "os-tmpdir": "^1.0.0" + } + }, + "node_modules/packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pbkdf2": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.1.tgz", + "integrity": "sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg==", + "dependencies": { + "create-hash": "^1.1.2", + "create-hmac": "^1.1.4", + "ripemd160": "^2.0.1", + "safe-buffer": "^5.0.1", + "sha.js": "^2.4.8" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/performance-now": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", + "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", + "optional": true + }, + "node_modules/pg": { + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.5.1.tgz", + "integrity": "sha512-9wm3yX9lCfjvA98ybCyw2pADUivyNWT/yIP4ZcDVpMN0og70BUWYEGXPCTAQdGTAqnytfRADb7NERrY1qxhIqw==", + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.4.0", + "pg-pool": "^3.2.2", + "pg-protocol": "^1.4.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + } + }, + "node_modules/pg-connection-string": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.4.0.tgz", + "integrity": "sha512-3iBXuv7XKvxeMrIgym7njT+HlZkwZqqGX4Bu9cci8xHZNT+Um1gWKqCsAzcC0d95rcKMU5WBg6YRUcHyV0HZKQ==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.2.tgz", + "integrity": "sha512-ORJoFxAlmmros8igi608iVEbQNNZlp89diFVx6yV5v+ehmpMY9sK6QgpmgoXbmkNaBAx8cOOZh9g80kJv1ooyA==" + }, + "node_modules/pg-protocol": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.4.0.tgz", + "integrity": "sha512-El+aXWcwG/8wuFICMQjM5ZSAm6OWiJicFdNYo+VY3QP+8vI4SvLIWVe51PppTzMhikUJR+PsyIFKqfdXPz/yxA==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz", + "integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==", + "dependencies": { + "split2": "^3.1.1" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/printj": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/printj/-/printj-1.1.2.tgz", + "integrity": "sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==", + "bin": { + "printj": "bin/printj.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/psl": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", + "optional": true + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/qs": { + "version": "6.5.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", + "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "optional": true, + "engines": { + "node": ">=0.6" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/request": { + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", + "optional": true, + "dependencies": { + "aws-sign2": "~0.7.0", + "aws4": "^1.8.0", + "caseless": "~0.12.0", + "combined-stream": "~1.0.6", + "extend": "~3.0.2", + "forever-agent": "~0.6.1", + "form-data": "~2.3.2", + "har-validator": "~5.1.3", + "http-signature": "~1.2.0", + "is-typedarray": "~1.0.0", + "isstream": "~0.1.2", + "json-stringify-safe": "~5.0.1", + "mime-types": "~2.1.19", + "oauth-sign": "~0.9.0", + "performance-now": "^2.1.0", + "qs": "~6.5.2", + "safe-buffer": "^5.1.2", + "tough-cookie": "~2.5.0", + "tunnel-agent": "^0.6.0", + "uuid": "^3.3.2" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/ripemd160": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.2.tgz", + "integrity": "sha512-ii4iagi25WusVoiC4B4lq7pbXfAp3D9v5CwfkY33vffw2+pkDjY1D8GaN7spsxvCSx8dkPqOZCEZyfxcmJG2IA==", + "dependencies": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, + "node_modules/rlp": { + "version": "2.2.6", + "resolved": "https://registry.npmjs.org/rlp/-/rlp-2.2.6.tgz", + "integrity": "sha512-HAfAmL6SDYNWPUOJNrM500x4Thn4PZsEy5pijPh40U9WfNk0z15hUYzO9xVIMAdIHdFtD8CBDHd75Td1g36Mjg==", + "dependencies": { + "bn.js": "^4.11.1" + }, + "bin": { + "rlp": "bin/rlp" + } + }, + "node_modules/rlp/node_modules/bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==" + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" + }, + "node_modules/scrypt-js": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/scrypt-js/-/scrypt-js-3.0.1.tgz", + "integrity": "sha512-cdwTTnqPu0Hyvf5in5asVdZocVDTNRmR7XEcJuIzMjJeSHybHl7vpB66AzwTaIg6CLSbtjcxc8fqcySfnTkccA==" + }, + "node_modules/secp256k1": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/secp256k1/-/secp256k1-4.0.2.tgz", + "integrity": "sha512-UDar4sKvWAksIlfX3xIaQReADn+WFnHvbVujpcbr+9Sf/69odMwy2MUsz5CKLQgX9nsIyrjuxL2imVyoNHa3fg==", + "dependencies": { + "elliptic": "^6.5.2", + "node-addon-api": "^2.0.0", + "node-gyp-build": "^4.2.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/semver": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz", + "integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "node_modules/setimmediate": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", + "integrity": "sha1-KQy7Iy4waULX1+qbg3Mqt4VvgoU=" + }, + "node_modules/sha.js": { + "version": "2.4.11", + "resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz", + "integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==", + "dependencies": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + }, + "bin": { + "sha.js": "bin.js" + } + }, + "node_modules/signal-exit": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", + "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==" + }, + "node_modules/slide": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz", + "integrity": "sha1-VusCfWW00tzmyy4tMsTUr8nh1wc=", + "engines": { + "node": "*" + } + }, + "node_modules/split2": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", + "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "dependencies": { + "readable-stream": "^3.0.0" + } + }, + "node_modules/sqlite3": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/sqlite3/-/sqlite3-5.0.2.tgz", + "integrity": "sha512-1SdTNo+BVU211Xj1csWa8lV6KM0CtucDwRyA0VHl91wEH1Mgh7RxUpI4rVvG7OhHrzCSGaVyW5g8vKvlrk9DJA==", + "dependencies": { + "node-addon-api": "^3.0.0", + "node-pre-gyp": "^0.11.0" + }, + "optionalDependencies": { + "node-gyp": "3.x" + } + }, + "node_modules/sqlite3/node_modules/node-addon-api": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.1.0.tgz", + "integrity": "sha512-flmrDNB06LIl5lywUz7YlNGZH/5p0M7W28k8hzd9Lshtdh1wshD2Y+U4h9LD6KObOy1f+fEVdgprPrEymjM5uw==" + }, + "node_modules/sshpk": { + "version": "1.16.1", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", + "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "optional": true, + "dependencies": { + "asn1": "~0.2.3", + "assert-plus": "^1.0.0", + "bcrypt-pbkdf": "^1.0.0", + "dashdash": "^1.12.0", + "ecc-jsbn": "~0.1.1", + "getpass": "^0.1.1", + "jsbn": "~0.1.0", + "safer-buffer": "^2.0.2", + "tweetnacl": "~0.14.0" + }, + "bin": { + "sshpk-conv": "bin/sshpk-conv", + "sshpk-sign": "bin/sshpk-sign", + "sshpk-verify": "bin/sshpk-verify" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-hex-prefix": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/strip-hex-prefix/-/strip-hex-prefix-1.0.0.tgz", + "integrity": "sha1-DF8VX+8RUTczd96du1iNoFUA428=", + "dependencies": { + "is-hex-prefixed": "1.0.0" + }, + "engines": { + "node": ">=6.5.0", + "npm": ">=3" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/tar": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/tar/-/tar-2.2.2.tgz", + "integrity": "sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA==", + "optional": true, + "dependencies": { + "block-stream": "*", + "fstream": "^1.0.12", + "inherits": "2" + } + }, + "node_modules/tough-cookie": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", + "optional": true, + "dependencies": { + "psl": "^1.1.28", + "punycode": "^2.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/transit-immutable-js": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/transit-immutable-js/-/transit-immutable-js-0.7.0.tgz", + "integrity": "sha1-mT4lCJtjEf9AIUD1VidtbSUwBdk=" + }, + "node_modules/transit-js": { + "version": "0.8.867", + "resolved": "https://registry.npmjs.org/transit-js/-/transit-js-0.8.867.tgz", + "integrity": "sha512-rOwB4K0z/WZ+E2bV42iN9UV3mvGzmwSv/IpMOKdnFpawPAZT0d1L7f91Y+tZQF7lXSDGk+oln4XyIQXo+pyTGA==", + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "optional": true, + "dependencies": { + "safe-buffer": "^5.0.1" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tweetnacl": { + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", + "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", + "optional": true + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "optional": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/utf8": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/utf8/-/utf8-3.0.0.tgz", + "integrity": "sha512-E8VjFIQ/TyQgp+TZfS6l8yp/xWppSAHzidGiRrqe4bK4XP9pTRyKFgGJpO3SN7zdX4DeomTrwaseCHovfpFcqQ==" + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/vcard-parser": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/vcard-parser/-/vcard-parser-1.0.0.tgz", + "integrity": "sha512-rSEjrjBK3of4VimMR5vBjLLcN5ZCSp9yuVzyx5i4Fwx74Yd0s+DnHtSit/wAAtj1a7/T/qQc0ykwXADoD0+fTQ==" + }, + "node_modules/verror": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", + "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", + "engines": [ + "node >=0.6.0" + ], + "optional": true, + "dependencies": { + "assert-plus": "^1.0.0", + "core-util-is": "1.0.2", + "extsprintf": "^1.2.0" + } + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "optional": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dependencies": { + "string-width": "^1.0.2 || 2" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/wrap-ansi/node_modules/ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi/node_modules/strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "node_modules/write-file-atomic": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-1.3.4.tgz", + "integrity": "sha1-+Aek8LHZ6ROuekgRLmzDrxmRtF8=", + "dependencies": { + "graceful-fs": "^4.1.11", + "imurmurhash": "^0.1.4", + "slide": "^1.1.5" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", + "integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.5", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.5.tgz", + "integrity": "sha512-jYRGS3zWy20NtDtK2kBgo/TlAoy5YUuhD9/LZ7z7W4j1Fdw2cqD0xEEclf8fxc8xjD6X5Qr+qQQwCEsP8iRiYg==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs/node_modules/strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + } + }, "dependencies": { "@ethereumjs/common": { "version": "2.0.0", @@ -296,9 +2378,9 @@ "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==" }, "cic-client-meta": { - "version": "0.0.7-alpha.2", - "resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.2.tgz", - "integrity": "sha512-q7Ifz/THPIp9XvjUEK+WFM/UFk1R0VbGJXopQd/3gW3zRpxEPj5V1JfcTts7EkyS5eYaLA0/+E4u6j4ddgmsfg==", + "version": "0.0.7-alpha.5", + "resolved": "https://registry.npmjs.org/cic-client-meta/-/cic-client-meta-0.0.7-alpha.5.tgz", + "integrity": "sha512-h+0wmAKZIgezppBNYDmG387w6tI91FSWqONMTZbMuaO1Ej76Gg0Mk2UcDyAF/dmY6doXz3kHAbWkWat7mTzXAQ==", "requires": { "@ethereumjs/tx": "^3.0.0-beta.1", "automerge": "^0.14.1", @@ -1489,6 +3571,14 @@ "tweetnacl": "~0.14.0" } }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + }, "string-width": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", @@ -1499,14 +3589,6 @@ "strip-ansi": "^3.0.0" } }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } - }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -1598,6 +3680,11 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" }, + "vcard-parser": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/vcard-parser/-/vcard-parser-1.0.0.tgz", + "integrity": "sha512-rSEjrjBK3of4VimMR5vBjLLcN5ZCSp9yuVzyx5i4Fwx74Yd0s+DnHtSit/wAAtj1a7/T/qQc0ykwXADoD0+fTQ==" + }, "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", diff --git a/apps/contract-migration/scripts/package.json b/apps/contract-migration/scripts/package.json new file mode 100644 index 00000000..b5f273d2 --- /dev/null +++ b/apps/contract-migration/scripts/package.json @@ -0,0 +1,6 @@ +{ + "dependencies": { + "cic-client-meta": "^0.0.7-alpha.6", + "vcard-parser": "^1.0.0" + } +} diff --git a/apps/contract-migration/scripts/requirements.txt b/apps/contract-migration/scripts/requirements.txt index c195635f..d0ddd1e6 100644 --- a/apps/contract-migration/scripts/requirements.txt +++ b/apps/contract-migration/scripts/requirements.txt @@ -1,3 +1,5 @@ -cic-base[full_graph]==0.1.2a40 -cic-eth==0.10.1b1 -cic-types==0.1.0a8 +cic-base[full_graph]==0.1.2a67 +sarafu-faucet==0.0.2a20 +cic-eth==0.11.0b3 +cic-types==0.1.0a10 +crypto-dev-signer==0.4.14b1 diff --git a/apps/contract-migration/scripts/verify.py b/apps/contract-migration/scripts/verify.py index 0c7ddefa..14272ddd 100644 --- a/apps/contract-migration/scripts/verify.py +++ b/apps/contract-migration/scripts/verify.py @@ -10,6 +10,9 @@ import hashlib import csv import json import urllib +import copy +import uuid +import urllib.request # external imports import celery @@ -39,7 +42,6 @@ from chainlib.eth.gas import ( from chainlib.eth.tx import TxFactory from chainlib.eth.rpc import jsonrpc_template from chainlib.eth.error import EthException -from cic_eth.api.api_admin import AdminApi from cic_types.models.person import ( Person, generate_metadata_pointer, @@ -51,12 +53,38 @@ logg = logging.getLogger() config_dir = '/usr/local/etc/cic-syncer' +custodial_tests = [ + 'local_key', + 'gas', + 'faucet', + ] + +metadata_tests = [ + 'metadata', + 'metadata_phone', + ] + +eth_tests = [ + 'accounts_index', + 'balance', + ] + +phone_tests = [ + 'ussd', + ] + +all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests + argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address') argparser.add_argument('-c', type=str, default=config_dir, help='config root to use') argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec') argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec') argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url') +argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url') +argparser.add_argument('--skip-custodial', dest='skip_custodial', action='store_true', help='skip all custodial verifications') +argparser.add_argument('--exclude', action='append', type=str, default=[], help='skip specified verification') +argparser.add_argument('--include', action='append', type=str, help='include specified verification') argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address') argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser.add_argument('-x', '--exit-on-error', dest='x', action='store_true', help='Halt exection on error') @@ -83,6 +111,9 @@ args_override = { config.dict_override(args_override, 'cli flag') config.censor('PASSWORD', 'DATABASE') config.censor('PASSWORD', 'SSL') +config.add(args.meta_provider, '_META_PROVIDER', True) +config.add(args.ussd_provider, '_USSD_PROVIDER', True) + logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL')) @@ -92,17 +123,63 @@ chain_str = str(chain_spec) old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec) old_chain_str = str(old_chain_spec) user_dir = args.user_dir # user_out_dir from import_users.py -meta_url = args.meta_provider exit_on_error = args.x +active_tests = [] +exclude = [] +include = args.include +if args.include == None: + include = all_tests +for t in args.exclude: + if t not in all_tests: + raise ValueError('Cannot exclude unknown verification "{}"'.format(t)) + exclude.append(t) +if args.skip_custodial: + logg.info('will skip all custodial verifications ({})'.format(','.join(custodial_tests))) + for t in custodial_tests: + if t not in exclude: + exclude.append(t) +for t in include: + if t not in all_tests: + raise ValueError('Cannot include unknown verification "{}"'.format(t)) + if t not in exclude: + active_tests.append(t) + logg.info('will perform verification "{}"'.format(t)) + +api = None +for t in custodial_tests: + if t in active_tests: + from cic_eth.api.api_admin import AdminApi + api = AdminApi(None) + logg.info('activating custodial module'.format(t)) + break + +cols = os.get_terminal_size().columns + + +def to_terminalwidth(s): + ss = s.ljust(int(cols)-1) + ss += "\r" + return ss + +def default_outfunc(s): + ss = to_terminalwidth(s) + sys.stdout.write(ss) +outfunc = default_outfunc +if logg.isEnabledFor(logging.DEBUG): + outfunc = logg.debug + class VerifierState: - def __init__(self, item_keys): + def __init__(self, item_keys, active_tests=None): self.items = {} for k in item_keys: - logg.info('k {}'.format(k)) self.items[k] = 0 + if active_tests == None: + self.active_tests = copy.copy(item_keys) + else: + self.active_tests = copy.copy(active_tests) def poke(self, item_key): @@ -112,7 +189,10 @@ class VerifierState: def __str__(self): r = '' for k in self.items.keys(): - r += '{}: {}\n'.format(k, self.items[k]) + if k in self.active_tests: + r += '{}: {}\n'.format(k, self.items[k]) + else: + r += '{}: skipped\n'.format(k) return r @@ -138,20 +218,20 @@ class Verifier: self.index_address = index_address self.token_address = token_address self.faucet_address = faucet_address - self.erc20_tx_factory = ERC20(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle) - self.tx_factory = TxFactory(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle) + self.erc20_tx_factory = ERC20(chain_spec, gas_oracle=gas_oracle) + self.tx_factory = TxFactory(chain_spec, gas_oracle=gas_oracle) self.api = cic_eth_api self.data_dir = data_dir self.exit_on_error = exit_on_error - self.faucet_tx_factory = SingleShotFaucet(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle) + self.faucet_tx_factory = SingleShotFaucet(chain_spec, gas_oracle=gas_oracle) verifymethods = [] for k in dir(self): if len(k) > 7 and k[:7] == 'verify_': - logg.info('adding verify method {}'.format(k)) + logg.debug('verifier has verify method {}'.format(k)) verifymethods.append(k[7:]) - self.state = VerifierState(verifymethods) + self.state = VerifierState(verifymethods, active_tests=active_tests) def verify_accounts_index(self, address, balance=None): @@ -191,6 +271,7 @@ class Verifier: def verify_gas(self, address, balance_token=None): o = balance(address) r = self.conn.do(o) + logg.debug('wtf {}'.format(r)) actual_balance = int(strip_0x(r), 16) if actual_balance == 0: raise VerifierError((address, actual_balance), 'gas') @@ -205,7 +286,7 @@ class Verifier: def verify_metadata(self, address, balance=None): k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person') - url = os.path.join(meta_url, k) + url = os.path.join(config.get('_META_PROVIDER'), k) logg.debug('verify metadata url {}'.format(url)) try: res = urllib.request.urlopen(url) @@ -233,26 +314,91 @@ class Verifier: raise VerifierError(o_retrieved, 'metadata (person)') - def verify(self, address, balance): - logg.debug('verify {} {}'.format(address, balance)) - - methods = [ - 'local_key', - 'accounts_index', - 'balance', - 'metadata', - 'gas', - 'faucet', - ] + def verify_metadata_phone(self, address, balance=None): + upper_address = strip_0x(address).upper() + f = open(os.path.join( + self.data_dir, + 'new', + upper_address[:2], + upper_address[2:4], + upper_address + '.json', + ), 'r' + ) + o = json.load(f) + f.close() - for k in methods: + p = Person.deserialize(o) + + k = generate_metadata_pointer(p.tel.encode('utf-8'), ':cic.phone') + url = os.path.join(config.get('_META_PROVIDER'), k) + logg.debug('verify metadata phone url {}'.format(url)) + try: + res = urllib.request.urlopen(url) + except urllib.error.HTTPError as e: + raise VerifierError( + '({}) {}'.format(url, e), + 'metadata (phone)', + ) + b = res.read() + address_recovered = json.loads(b.decode('utf-8')) + address_recovered = address_recovered.replace('"', '') + + try: + address = strip_0x(address) + address_recovered = strip_0x(address_recovered) + except ValueError: + raise VerifierError(address_recovered, 'metadata (phone) address {} address recovered {}'.format(address, address_recovered)) + + if address != address_recovered: + raise VerifierError(address_recovered, 'metadata (phone)') + + + def verify_ussd(self, address, balance=None): + upper_address = strip_0x(address).upper() + f = open(os.path.join( + self.data_dir, + 'new', + upper_address[:2], + upper_address[2:4], + upper_address + '.json', + ), 'r' + ) + o = json.load(f) + f.close() + + p = Person.deserialize(o) + phone = p.tel + + session = uuid.uuid4().hex + data = { + 'sessionId': session, + 'serviceCode': config.get('APP_SERVICE_CODE'), + 'phoneNumber': phone, + 'text': config.get('APP_SERVICE_CODE'), + } + + req = urllib.request.Request(config.get('_USSD_PROVIDER')) + data_str = json.dumps(data) + data_bytes = data_str.encode('utf-8') + req.add_header('Content-Type', 'application/json') + req.data = data_bytes + response = urllib.request.urlopen(req) + response_data = response.read().decode('utf-8') + state = response_data[:3] + out = response_data[4:] + m = '{} {}'.format(state, out[:7]) + if m != 'CON Welcome': + raise VerifierError(response_data, 'ussd') + + + def verify(self, address, balance, debug_stem=None): + + for k in active_tests: + s = '{} {}'.format(debug_stem, k) + outfunc(s) try: m = getattr(self, 'verify_{}'.format(k)) m(address, balance) -# self.verify_local_key(address) -# self.verify_accounts_index(address) -# self.verify_balance(address, balance) -# self.verify_metadata(address) except VerifierError as e: logline = 'verification {} failed for {}: {}'.format(k, address, str(e)) if self.exit_on_error: @@ -266,10 +412,6 @@ class Verifier: return str(self.state) -class MockClient: - - w3 = None - def main(): global chain_str, block_offset, user_dir @@ -277,7 +419,7 @@ def main(): gas_oracle = OverrideGasOracle(conn=conn, limit=8000000) # Get Token registry address - txf = TxFactory(signer=None, gas_oracle=gas_oracle, nonce_oracle=None, chain_id=chain_spec.chain_id()) + txf = TxFactory(chain_spec, signer=None, gas_oracle=gas_oracle, nonce_oracle=None) tx = txf.template(ZERO_ADDRESS, config.get('CIC_REGISTRY_ADDRESS')) # TODO: replace with cic-eth-registry @@ -291,7 +433,6 @@ def main(): o['params'].append(txf.normalize(tx)) o['params'].append('latest') r = conn.do(o) - print('r {}'.format(r)) token_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r)))) logg.info('found token index address {}'.format(token_index_address)) @@ -320,6 +461,7 @@ def main(): logg.info('found faucet {}'.format(faucet_address)) + # Get Sarafu token address tx = txf.template(ZERO_ADDRESS, token_index_address) data = add_0x(registry_addressof_method) @@ -333,7 +475,6 @@ def main(): o['params'].append(txf.normalize(tx)) o['params'].append('latest') r = conn.do(o) - print('r {}'.format(r)) sarafu_token_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r)))) logg.info('found token address {}'.format(sarafu_token_address)) @@ -348,7 +489,7 @@ def main(): try: address = to_checksum_address(r[0]) #sys.stdout.write('loading balance {} {}'.format(i, address).ljust(200) + "\r") - logg.debug('loading balance {} {}'.format(i, address).ljust(200)) + outfunc('loading balance {} {}'.format(i, address)) #.ljust(200)) except ValueError: break balance = int(r[1].rstrip()) @@ -357,11 +498,10 @@ def main(): f.close() - api = AdminApi(MockClient()) - verifier = Verifier(conn, api, gas_oracle, chain_spec, account_index_address, sarafu_token_address, faucet_address, user_dir, exit_on_error) user_new_dir = os.path.join(user_dir, 'new') + i = 0 for x in os.walk(user_new_dir): for y in x[2]: if y[len(y)-5:] != '.json': @@ -377,7 +517,7 @@ def main(): f.close() u = Person.deserialize(o) - logg.debug('data {}'.format(u.identities['evm'])) + #logg.debug('data {}'.format(u.identities['evm'])) subchain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id()) new_address = u.identities['evm'][subchain_str][0] @@ -388,9 +528,11 @@ def main(): balance = balances[old_address] except KeyError: logg.info('no old balance found for {}, assuming 0'.format(old_address)) - logg.debug('checking {} -> {} = {}'.format(old_address, new_address, balance)) - verifier.verify(new_address, balance) + s = 'checking {}: {} -> {} = {}'.format(i, old_address, new_address, balance) + + verifier.verify(new_address, balance, debug_stem=s) + i += 1 print(verifier) diff --git a/apps/contract-migration/seed_cic_eth.sh b/apps/contract-migration/seed_cic_eth.sh index 0ba44c5b..84d1258c 100755 --- a/apps/contract-migration/seed_cic_eth.sh +++ b/apps/contract-migration/seed_cic_eth.sh @@ -21,8 +21,6 @@ debug='-vv' abi_dir=${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi} gas_amount=100000000000000000000000 token_amount=${gas_amount} -#faucet_amount=1000000000 -faucet_amount=${DEV_FAUCET_AMOUNT:-0} env_out_file=${CIC_DATA_DIR}/.env_seed init_level_file=${CIC_DATA_DIR}/.init truncate $env_out_file -s 0 @@ -30,13 +28,15 @@ truncate $env_out_file -s 0 set -e set -a +#pip install --extra-index-url $DEV_PIP_EXTRA_INDEX_URL eth-address-index==0.1.1a7 + # get required addresses from registries DEV_TOKEN_INDEX_ADDRESS=`eth-contract-registry-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -f brief TokenRegistry` -DEV_ACCOUNTS_INDEX_ADDRESS=`eth-contract-registry-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -f brief AccountRegistry` +DEV_ACCOUNT_INDEX_ADDRESS=`eth-contract-registry-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -f brief AccountRegistry` DEV_RESERVE_ADDRESS=`eth-token-index-list -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_TOKEN_INDEX_ADDRESS -f brief SRF` cat <> $env_out_file cic-eth-tag -i $CIC_CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER >&2 echo "add acccounts index writer account as writer on contract" -eth-accounts-index-writer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_ACCOUNTS_INDEX_ADDRESS -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER +eth-accounts-index-writer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_ACCOUNT_INDEX_ADDRESS -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER # Transfer gas to custodial gas provider adddress >&2 echo gift gas to gas gifter diff --git a/docker-compose.yml b/docker-compose.yml index 6fa6e8a1..85dbfbce 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -145,7 +145,7 @@ services: - -c - | if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi - /usr/local/bin/cic-cache-trackerd -vv + /usr/local/bin/cic-cache-trackerd -vv -c /usr/local/etc/cic-cache volumes: - contract-config:/tmp/cic/config/:ro @@ -164,6 +164,7 @@ services: DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres} DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2} DATABASE_DEBUG: 1 + DATABASE_POOL_SIZE: 0 ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi} CIC_TRUST_ADDRESS: ${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER:-0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C} CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996} @@ -233,6 +234,7 @@ services: DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres} DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2} DATABASE_DEBUG: ${DATABASE_DEBUG:-0} + DATABASE_POOL_SIZE: 0 PGPASSWORD: ${DATABASE_PASSWORD:-tralala} CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996} BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor} @@ -317,8 +319,8 @@ services: CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis} CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis} TASKS_TRANSFER_CALLBACKS: $TASKS_TRANSFER_CALLBACKS - #DATABASE_DEBUG: ${DATABASE_DEBUG:-false} - DATABASE_DEBUG: 1 + DATABASE_DEBUG: ${DATABASE_DEBUG:-false} + #DATABASE_DEBUG: 1 depends_on: - eth @@ -358,7 +360,9 @@ services: CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis} CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis} TASKS_TRANSFER_CALLBACKS: $TASKS_TRANSFER_CALLBACKS - CIC_TX_RETRY_DELAY: 15 + CIC_TX_RETRY_DELAY: 60 + BATCH_SIZE: ${RETRIER_BATCH_SIZE:-50} + #DATABASE_DEBUG: 1 depends_on: - eth - postgres @@ -373,7 +377,7 @@ services: - -c - | if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi - ./start_retry.sh -v + ./start_retry.sh -vv # command: "/root/start_retry.sh -q cic-eth -vv" @@ -428,6 +432,7 @@ services: DATABASE_NAME: ${DATABASE_NAME_CIC_NOTIFY:-cic_notify} DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres} DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2} + DATABASE_POOL_SIZE: 0 PGPASSWORD: ${DATABASE_PASSWORD:-tralala} CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis} CELERY_RESULT_URL: ${CELERY_BROKER_URL:-redis://redis} @@ -444,6 +449,7 @@ services: cic-meta-server: + hostname: meta build: context: apps/ dockerfile: cic-meta/docker/Dockerfile @@ -488,9 +494,10 @@ services: DATABASE_ENGINE: postgresql DATABASE_DRIVER: psycopg2 PGP_PASSPHRASE: merman - SERVER_PORT: 8000 + SERVER_PORT: 9000 + CIC_META_URL: ${CIC_META_URL:-http://meta:8000} ports: - - ${HTTP_PORT_CIC_USSD:-63315}:8000 + - ${HTTP_PORT_CIC_USSD:-63315}:9000 depends_on: - postgres - redis @@ -499,7 +506,7 @@ services: deploy: restart_policy: condition: on-failure - command: "/root/start_uwsgi.sh" + command: "/root/start_uwsgi.sh -vv" cic-ussd-tasker: # image: grassrootseconomics:cic-ussd @@ -514,9 +521,11 @@ services: DATABASE_NAME: cic_ussd DATABASE_ENGINE: postgresql DATABASE_DRIVER: psycopg2 + DATABASE_POOL_SIZE: 0 CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis} CELERY_RESULT_URL: ${CELERY_BROKER_URL:-redis://redis} PGP_PASSPHRASE: merman + CIC_META_URL: ${CIC_META_URL:-http://meta:8000} depends_on: - postgres - redis @@ -525,4 +534,4 @@ services: deploy: restart_policy: condition: on-failure - command: "/root/start_tasker.sh -q cic-ussd" + command: "/root/start_tasker.sh -q cic-ussd -vv" diff --git a/service-configs/.gitkeep b/service-configs/.gitkeep new file mode 100644 index 00000000..e69de29b