Compare commits

...

3 Commits

6 changed files with 100 additions and 33 deletions

View File

@ -31,7 +31,10 @@ from cic_eth.db.models.tx import TxCache
from cic_eth.db.models.nonce import Nonce from cic_eth.db.models.nonce import Nonce
from cic_eth.db.enum import ( from cic_eth.db.enum import (
StatusEnum, StatusEnum,
StatusBits,
is_alive, is_alive,
is_error_status,
status_str,
) )
from cic_eth.error import InitializationError from cic_eth.error import InitializationError
from cic_eth.db.error import TxStateChangeError from cic_eth.db.error import TxStateChangeError
@ -42,6 +45,8 @@ app = celery.current_app
#logg = logging.getLogger(__file__) #logg = logging.getLogger(__file__)
logg = logging.getLogger() logg = logging.getLogger()
local_fail = StatusBits.LOCAL_ERROR | StatusBits.NODE_ERROR | StatusBits.UNKNOWN_ERROR
class AdminApi: class AdminApi:
"""Provides an interface to view and manipulate existing transaction tasks and system runtime settings. """Provides an interface to view and manipulate existing transaction tasks and system runtime settings.
@ -195,6 +200,7 @@ class AdminApi:
blocking_tx = None blocking_tx = None
blocking_nonce = None blocking_nonce = None
nonce_otx = 0 nonce_otx = 0
last_nonce = -1
for k in txs.keys(): for k in txs.keys():
s_get_tx = celery.signature( s_get_tx = celery.signature(
'cic_eth.queue.tx.get_tx', 'cic_eth.queue.tx.get_tx',
@ -205,18 +211,25 @@ class AdminApi:
) )
tx = s_get_tx.apply_async().get() tx = s_get_tx.apply_async().get()
#tx = get_tx(k) #tx = get_tx(k)
logg.debug('checking nonce {}'.format(tx['nonce'])) logg.debug('checking nonce {} (previous {})'.format(tx['nonce'], last_nonce))
if tx['status'] in [StatusEnum.REJECTED, StatusEnum.FUBAR]:
blocking_tx = k
blocking_nonce = tx['nonce']
nonce_otx = tx['nonce'] nonce_otx = tx['nonce']
if not is_alive(tx['status']) and tx['status'] & local_fail > 0:
logg.info('permanently errored {} nonce {} status {}'.format(k, nonce_otx, status_str(tx['status'])))
blocking_tx = k
blocking_nonce = nonce_otx
elif nonce_otx - last_nonce > 1:
logg.error('nonce gap; {} followed {}'.format(nonce_otx, last_nonce))
blocking_tx = k
blocking_nonce = nonce_otx
break
last_nonce = nonce_otx
nonce_cache = Nonce.get(address) #nonce_cache = Nonce.get(address)
#nonce_w3 = self.w3.eth.getTransactionCount(address, 'pending') #nonce_w3 = self.w3.eth.getTransactionCount(address, 'pending')
return { return {
'nonce': { 'nonce': {
'network': nonce_cache, #'network': nonce_cache,
'queue': nonce_otx, 'queue': nonce_otx,
#'cache': nonce_cache, #'cache': nonce_cache,
'blocking': blocking_nonce, 'blocking': blocking_nonce,
@ -270,16 +283,15 @@ class AdminApi:
# self.w3.eth.sign(addr, text='666f6f') # self.w3.eth.sign(addr, text='666f6f')
def account(self, chain_spec, address, cols=['tx_hash', 'sender', 'recipient', 'nonce', 'block', 'tx_index', 'status', 'network_status', 'date_created'], include_sender=True, include_recipient=True): def account(self, chain_spec, address, include_sender=True, include_recipient=True, renderer=None, w=sys.stdout):
"""Lists locally originated transactions for the given Ethereum address. """Lists locally originated transactions for the given Ethereum address.
Performs a synchronous call to the Celery task responsible for performing the query. Performs a synchronous call to the Celery task responsible for performing the query.
:param address: Ethereum address to return transactions for :param address: Ethereum address to return transactions for
:type address: str, 0x-hex :type address: str, 0x-hex
:param cols: Data columns to include
:type cols: list of str
""" """
last_nonce = -1
s = celery.signature( s = celery.signature(
'cic_eth.queue.tx.get_account_tx', 'cic_eth.queue.tx.get_account_tx',
[ [
@ -291,33 +303,45 @@ class AdminApi:
tx_dict_list = [] tx_dict_list = []
for tx_hash in txs.keys(): for tx_hash in txs.keys():
errors = []
s = celery.signature( s = celery.signature(
'cic_eth.queue.tx.get_tx_cache', 'cic_eth.queue.tx.get_tx_cache',
[tx_hash], [tx_hash],
queue=self.queue, queue=self.queue,
) )
tx_dict = s.apply_async().get() tx_dict = s.apply_async().get()
if tx_dict['sender'] == address and not include_sender: if tx_dict['sender'] == address:
logg.debug('skipping sender tx {}'.format(tx_dict['tx_hash'])) if tx_dict['nonce'] - last_nonce > 1:
continue logg.error('nonce gap; {} followed {} for tx {}'.format(tx_dict['nonce'], last_nonce, tx_dict['hash']))
errors.append('nonce')
elif tx_dict['nonce'] == last_nonce:
logg.warning('nonce {} duplicate in tx {}'.format(tx_dict['nonce'], tx_dict['hash']))
last_nonce = tx_dict['nonce']
if not include_sender:
logg.debug('skipping sender tx {}'.format(tx_dict['tx_hash']))
continue
elif tx_dict['recipient'] == address and not include_recipient: elif tx_dict['recipient'] == address and not include_recipient:
logg.debug('skipping recipient tx {}'.format(tx_dict['tx_hash'])) logg.debug('skipping recipient tx {}'.format(tx_dict['tx_hash']))
continue continue
logg.debug(tx_dict)
o = { o = {
'nonce': tx_dict['nonce'], 'nonce': tx_dict['nonce'],
'tx_hash': tx_dict['tx_hash'], 'tx_hash': tx_dict['tx_hash'],
'status': tx_dict['status'], 'status': tx_dict['status'],
'date_updated': tx_dict['date_updated'], 'date_updated': tx_dict['date_updated'],
'errors': errors,
} }
tx_dict_list.append(o) if renderer != None:
r = renderer(o)
w.write(r + '\n')
else:
tx_dict_list.append(o)
return tx_dict_list return tx_dict_list
# TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring # TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring
def tx(self, chain_spec, tx_hash=None, tx_raw=None, registry=None): def tx(self, chain_spec, tx_hash=None, tx_raw=None, registry=None, renderer=None, w=sys.stdout):
"""Output local and network details about a given transaction with local origin. """Output local and network details about a given transaction with local origin.
If the transaction hash is given, the raw trasnaction data will be retrieved from the local transaction queue backend. Otherwise the raw transaction data must be provided directly. Only one of transaction hash and transaction data can be passed. If the transaction hash is given, the raw trasnaction data will be retrieved from the local transaction queue backend. Otherwise the raw transaction data must be provided directly. Only one of transaction hash and transaction data can be passed.
@ -511,4 +535,9 @@ class AdminApi:
for p in problems: for p in problems:
sys.stderr.write('!!!{}\n'.format(p)) sys.stderr.write('!!!{}\n'.format(p))
return tx if renderer == None:
return tx
r = renderer(tx)
w.write(r + '\n')
return None

View File

@ -281,6 +281,7 @@ def send(self, txs, chain_spec_dict):
o = raw(tx_hex) o = raw(tx_hex)
conn = RPCConnection.connect(chain_spec, 'default') conn = RPCConnection.connect(chain_spec, 'default')
conn.do(o) conn.do(o)
s_set_sent.apply_async() s_set_sent.apply_async()
tx_tail = txs[1:] tx_tail = txs[1:]

View File

@ -116,12 +116,16 @@ def render_tx(o, **kwargs):
return content return content
def render_account(o, **kwargs): def render_account(o, **kwargs):
return '{} {} {} {}'.format( s = '{} {} {} {}'.format(
o['date_updated'], o['date_updated'],
o['nonce'], o['nonce'],
o['tx_hash'], o['tx_hash'],
o['status'], o['status'],
) )
if len(o['errors']) > 0:
s += ' !{}'.format(','.join(o['errors']))
return s
def render_lock(o, **kwargs): def render_lock(o, **kwargs):
@ -158,29 +162,25 @@ def main():
renderer = render_tx renderer = render_tx
if len(config.get('_QUERY')) > 66: if len(config.get('_QUERY')) > 66:
registry = connect_registry(registry_address, chain_spec, rpc) registry = connect_registry(registry_address, chain_spec, rpc)
txs = [admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'), registry=registry)] admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'), registry=registry, renderer=renderer)
elif len(config.get('_QUERY')) > 42: elif len(config.get('_QUERY')) > 42:
registry = connect_registry(registry_address, chain_spec, rpc) registry = connect_registry(registry_address, chain_spec, rpc)
txs = [admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'), registry=registry)] admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'), registry=registry, renderer=renderer)
elif len(config.get('_QUERY')) == 42: elif len(config.get('_QUERY')) == 42:
registry = connect_registry(registry_address, chain_spec, rpc) registry = connect_registry(registry_address, chain_spec, rpc)
txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False) txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False, renderer=render_account)
renderer = render_account renderer = render_account
elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock': elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock':
t = admin_api.get_lock() t = admin_api.get_lock()
txs = t.get() txs = t.get()
renderer = render_lock renderer = render_lock
for tx in txs:
r = renderer(txs)
sys.stdout.write(r + '\n')
else: else:
raise ValueError('cannot parse argument {}'.format(config.get('_QUERY'))) raise ValueError('cannot parse argument {}'.format(config.get('_QUERY')))
if len(txs) == 0:
logg.info('no matches found')
else:
if fmt == 'json':
sys.stdout.write(json.dumps(txs))
else:
m = map(renderer, txs)
print(*m, sep="\n")
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -32,7 +32,10 @@ from chainlib.eth.block import (
from chainlib.eth.hash import keccak256_string_to_hex from chainlib.eth.hash import keccak256_string_to_hex
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.erc20 import ERC20 from chainlib.eth.erc20 import ERC20
from chainlib.eth.gas import OverrideGasOracle from chainlib.eth.gas import (
OverrideGasOracle,
balance,
)
from chainlib.eth.tx import TxFactory from chainlib.eth.tx import TxFactory
from chainlib.eth.rpc import jsonrpc_template from chainlib.eth.rpc import jsonrpc_template
from chainlib.eth.error import EthException from chainlib.eth.error import EthException
@ -41,6 +44,7 @@ from cic_types.models.person import (
Person, Person,
generate_metadata_pointer, generate_metadata_pointer,
) )
from erc20_single_shot_faucet import SingleShotFaucet
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
@ -127,17 +131,19 @@ class VerifierError(Exception):
class Verifier: class Verifier:
# TODO: what an awful function signature # TODO: what an awful function signature
def __init__(self, conn, cic_eth_api, gas_oracle, chain_spec, index_address, token_address, data_dir, exit_on_error=False): def __init__(self, conn, cic_eth_api, gas_oracle, chain_spec, index_address, token_address, faucet_address, data_dir, exit_on_error=False):
self.conn = conn self.conn = conn
self.gas_oracle = gas_oracle self.gas_oracle = gas_oracle
self.chain_spec = chain_spec self.chain_spec = chain_spec
self.index_address = index_address self.index_address = index_address
self.token_address = token_address self.token_address = token_address
self.faucet_address = faucet_address
self.erc20_tx_factory = ERC20(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle) self.erc20_tx_factory = ERC20(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle)
self.tx_factory = TxFactory(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle) self.tx_factory = TxFactory(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle)
self.api = cic_eth_api self.api = cic_eth_api
self.data_dir = data_dir self.data_dir = data_dir
self.exit_on_error = exit_on_error self.exit_on_error = exit_on_error
self.faucet_tx_factory = SingleShotFaucet(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle)
verifymethods = [] verifymethods = []
for k in dir(self): for k in dir(self):
@ -182,6 +188,21 @@ class Verifier:
raise VerifierError((address, r), 'local key') raise VerifierError((address, r), 'local key')
def verify_gas(self, address, balance_token=None):
o = balance(address)
r = self.conn.do(o)
actual_balance = int(strip_0x(r), 16)
if actual_balance == 0:
raise VerifierError((address, actual_balance), 'gas')
def verify_faucet(self, address, balance_token=None):
o = self.faucet_tx_factory.usable_for(self.faucet_address, address)
r = self.conn.do(o)
if self.faucet_tx_factory.parse_usable_for(r):
raise VerifierError((address, r), 'faucet')
def verify_metadata(self, address, balance=None): def verify_metadata(self, address, balance=None):
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person') k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person')
url = os.path.join(meta_url, k) url = os.path.join(meta_url, k)
@ -220,6 +241,8 @@ class Verifier:
'accounts_index', 'accounts_index',
'balance', 'balance',
'metadata', 'metadata',
'gas',
'faucet',
] ]
for k in methods: for k in methods:
@ -257,6 +280,7 @@ def main():
txf = TxFactory(signer=None, gas_oracle=gas_oracle, nonce_oracle=None, chain_id=chain_spec.chain_id()) txf = TxFactory(signer=None, gas_oracle=gas_oracle, nonce_oracle=None, chain_id=chain_spec.chain_id())
tx = txf.template(ZERO_ADDRESS, config.get('CIC_REGISTRY_ADDRESS')) tx = txf.template(ZERO_ADDRESS, config.get('CIC_REGISTRY_ADDRESS'))
# TODO: replace with cic-eth-registry
registry_addressof_method = keccak256_string_to_hex('addressOf(bytes32)')[:8] registry_addressof_method = keccak256_string_to_hex('addressOf(bytes32)')[:8]
data = add_0x(registry_addressof_method) data = add_0x(registry_addressof_method)
data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex() data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex()
@ -283,6 +307,18 @@ def main():
account_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r)))) account_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
logg.info('found account index address {}'.format(account_index_address)) logg.info('found account index address {}'.format(account_index_address))
data = add_0x(registry_addressof_method)
data += eth_abi.encode_single('bytes32', b'Faucet').hex()
txf.set_code(tx, data)
o = jsonrpc_template()
o['method'] = 'eth_call'
o['params'].append(txf.normalize(tx))
o['params'].append('latest')
r = conn.do(o)
faucet_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
logg.info('found faucet {}'.format(faucet_address))
# Get Sarafu token address # Get Sarafu token address
tx = txf.template(ZERO_ADDRESS, token_index_address) tx = txf.template(ZERO_ADDRESS, token_index_address)
@ -323,7 +359,7 @@ def main():
api = AdminApi(MockClient()) api = AdminApi(MockClient())
verifier = Verifier(conn, api, gas_oracle, chain_spec, account_index_address, sarafu_token_address, user_dir, exit_on_error) verifier = Verifier(conn, api, gas_oracle, chain_spec, account_index_address, sarafu_token_address, faucet_address, user_dir, exit_on_error)
user_new_dir = os.path.join(user_dir, 'new') user_new_dir = os.path.join(user_dir, 'new')
for x in os.walk(user_new_dir): for x in os.walk(user_new_dir):

View File

@ -22,7 +22,7 @@ abi_dir=${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
gas_amount=100000000000000000000000 gas_amount=100000000000000000000000
token_amount=${gas_amount} token_amount=${gas_amount}
#faucet_amount=1000000000 #faucet_amount=1000000000
faucet_amount=0 faucet_amount=${DEV_FAUCET_AMOUNT:-0}
env_out_file=${CIC_DATA_DIR}/.env_seed env_out_file=${CIC_DATA_DIR}/.env_seed
init_level_file=${CIC_DATA_DIR}/.init init_level_file=${CIC_DATA_DIR}/.init
truncate $env_out_file -s 0 truncate $env_out_file -s 0

View File

@ -102,6 +102,7 @@ services:
CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis:6379} CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis:6379}
DEV_PIP_EXTRA_INDEX_URL: ${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433} DEV_PIP_EXTRA_INDEX_URL: ${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433}
RUN_MASK: ${RUN_MASK:-0} # bit flags; 1: contract migrations 2: seed data RUN_MASK: ${RUN_MASK:-0} # bit flags; 1: contract migrations 2: seed data
DEV_FAUCET_AMOUNT: ${DEV_FAUCET_AMOUNT:-0}
command: ["./run_job.sh"] command: ["./run_job.sh"]
#command: ["./reset.sh"] #command: ["./reset.sh"]
depends_on: depends_on: