Rehabilitate transfer, approve
Signed-off-by: nolash <dev@holbrook.no>
This commit is contained in:
parent
299385f320
commit
b65ab8a0ca
@ -17,8 +17,7 @@ RUN apt-get update && \
|
|||||||
|
|
||||||
# Copy shared requirements from top of mono-repo
|
# Copy shared requirements from top of mono-repo
|
||||||
RUN echo "copying root req file ${root_requirement_file}"
|
RUN echo "copying root req file ${root_requirement_file}"
|
||||||
COPY $root_requirement_file .
|
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a44
|
||||||
RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
|
|
||||||
|
|
||||||
COPY cic-cache/requirements.txt ./
|
COPY cic-cache/requirements.txt ./
|
||||||
COPY cic-cache/setup.cfg \
|
COPY cic-cache/setup.cfg \
|
||||||
|
@ -4,7 +4,8 @@ import logging
|
|||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import celery
|
import celery
|
||||||
from cic_registry import zero_address
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.db.enum import LockEnum
|
||||||
@ -19,7 +20,7 @@ celery_app = celery.current_app
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def lock(chained_input, chain_str, address=zero_address, flags=LockEnum.ALL, tx_hash=None):
|
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL, tx_hash=None):
|
||||||
"""Task wrapper to set arbitrary locks
|
"""Task wrapper to set arbitrary locks
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@ -31,13 +32,14 @@ def lock(chained_input, chain_str, address=zero_address, flags=LockEnum.ALL, tx_
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.set(chain_str, flags, address=address, tx_hash=tx_hash)
|
r = Lock.set(chain_str, flags, address=address, tx_hash=tx_hash)
|
||||||
logg.debug('Locked {} for {}, flag now {}'.format(flags, address, r))
|
logg.debug('Locked {} for {}, flag now {}'.format(flags, address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def unlock(chained_input, chain_str, address=zero_address, flags=LockEnum.ALL):
|
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL):
|
||||||
"""Task wrapper to reset arbitrary locks
|
"""Task wrapper to reset arbitrary locks
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@ -49,13 +51,14 @@ def unlock(chained_input, chain_str, address=zero_address, flags=LockEnum.ALL):
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.reset(chain_str, flags, address=address)
|
r = Lock.reset(chain_str, flags, address=address)
|
||||||
logg.debug('Unlocked {} for {}, flag now {}'.format(flags, address, r))
|
logg.debug('Unlocked {} for {}, flag now {}'.format(flags, address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def lock_send(chained_input, chain_str, address=zero_address, tx_hash=None):
|
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
||||||
"""Task wrapper to set send lock
|
"""Task wrapper to set send lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@ -65,13 +68,14 @@ def lock_send(chained_input, chain_str, address=zero_address, tx_hash=None):
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
||||||
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def unlock_send(chained_input, chain_str, address=zero_address):
|
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||||
"""Task wrapper to reset send lock
|
"""Task wrapper to reset send lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@ -81,13 +85,14 @@ def unlock_send(chained_input, chain_str, address=zero_address):
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
||||||
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def lock_queue(chained_input, chain_str, address=zero_address, tx_hash=None):
|
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
||||||
"""Task wrapper to set queue direct lock
|
"""Task wrapper to set queue direct lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@ -97,13 +102,14 @@ def lock_queue(chained_input, chain_str, address=zero_address, tx_hash=None):
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||||
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def unlock_queue(chained_input, chain_str, address=zero_address):
|
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||||
"""Task wrapper to reset queue direct lock
|
"""Task wrapper to reset queue direct lock
|
||||||
|
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_str: Chain spec string representation
|
||||||
@ -113,18 +119,23 @@ def unlock_queue(chained_input, chain_str, address=zero_address):
|
|||||||
:returns: New lock state for address
|
:returns: New lock state for address
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
||||||
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def check_lock(chained_input, chain_str, lock_flags, address=None):
|
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
|
||||||
|
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
r = Lock.check(chain_str, lock_flags, address=zero_address, session=session)
|
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS, session=session)
|
||||||
if address != None:
|
if address != None:
|
||||||
r |= Lock.check(chain_str, lock_flags, address=address, session=session)
|
r |= Lock.check(chain_str, lock_flags, address=address, session=session)
|
||||||
if r > 0:
|
if r > 0:
|
||||||
logg.debug('lock check {} has match {} for {}'.format(lock_flags, r, address))
|
logg.debug('lock check {} has match {} for {}'.format(lock_flags, r, address))
|
||||||
|
session.close()
|
||||||
raise LockedError(r)
|
raise LockedError(r)
|
||||||
|
session.flush()
|
||||||
|
session.close()
|
||||||
return chained_input
|
return chained_input
|
||||||
|
@ -1,25 +1,30 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.tx import unpack
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
from cic_eth.db.models.otx import Otx
|
from cic_eth.db.models.otx import Otx
|
||||||
from cic_eth.db.models.tx import TxCache
|
from cic_eth.db.models.tx import TxCache
|
||||||
from cic_eth.db.models.nonce import Nonce
|
from cic_eth.db.models.nonce import Nonce
|
||||||
from cic_eth.admin.ctrl import lock_send
|
from cic_eth.admin.ctrl import (
|
||||||
from cic_eth.admin.ctrl import unlock_send
|
lock_send,
|
||||||
from cic_eth.admin.ctrl import lock_queue
|
unlock_send,
|
||||||
from cic_eth.admin.ctrl import unlock_queue
|
lock_queue,
|
||||||
from cic_eth.queue.tx import get_tx
|
unlock_queue,
|
||||||
from cic_eth.queue.tx import set_cancel
|
)
|
||||||
|
from cic_eth.queue.tx import (
|
||||||
|
get_tx,
|
||||||
|
set_cancel,
|
||||||
|
)
|
||||||
from cic_eth.queue.tx import create as queue_create
|
from cic_eth.queue.tx import create as queue_create
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
from cic_eth.eth.gas import (
|
||||||
from cic_eth.eth.task import sign_tx
|
create_check_gas_task,
|
||||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
)
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -46,7 +51,7 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
|
|||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
tx_brief = get_tx(tx_hash_orig_hex)
|
tx_brief = get_tx(tx_hash_orig_hex)
|
||||||
tx_raw = bytes.fromhex(tx_brief['signed_tx'][2:])
|
tx_raw = bytes.fromhex(tx_brief['signed_tx'][2:])
|
||||||
tx = unpack_signed_raw_tx(tx_raw, chain_spec.chain_id())
|
tx = unpack(tx_raw, chain_spec.chain_id())
|
||||||
nonce = tx_brief['nonce']
|
nonce = tx_brief['nonce']
|
||||||
address = tx['from']
|
address = tx['from']
|
||||||
|
|
||||||
@ -67,7 +72,7 @@ def shift_nonce(self, chain_str, tx_hash_orig_hex, delta=1):
|
|||||||
txs = []
|
txs = []
|
||||||
for otx in otxs:
|
for otx in otxs:
|
||||||
tx_raw = bytes.fromhex(otx.signed_tx[2:])
|
tx_raw = bytes.fromhex(otx.signed_tx[2:])
|
||||||
tx_new = unpack_signed_raw_tx(tx_raw, chain_spec.chain_id())
|
tx_new = unpack(tx_raw, chain_spec.chain_id())
|
||||||
|
|
||||||
tx_previous_hash_hex = tx_new['hash']
|
tx_previous_hash_hex = tx_new['hash']
|
||||||
tx_previous_nonce = tx_new['nonce']
|
tx_previous_nonce = tx_new['nonce']
|
||||||
|
@ -2,14 +2,26 @@
|
|||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import web3
|
from chainlib.eth.constant import (
|
||||||
from cic_registry import zero_address
|
ZERO_ADDRESS,
|
||||||
from cic_registry import zero_content
|
)
|
||||||
from cic_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
from crypto_dev_signer.eth.web3ext import Web3 as Web3Ext
|
from cic_eth_registry.error import UnknownContractError
|
||||||
from cic_registry.error import UnknownContractError
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.contract import code
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
transaction,
|
||||||
|
receipt,
|
||||||
|
unpack,
|
||||||
|
)
|
||||||
|
from chainlib.hash import keccak256_hex_to_hex
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
from chainlib.eth.gas import balance
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
@ -23,9 +35,7 @@ from cic_eth.db.enum import (
|
|||||||
)
|
)
|
||||||
from cic_eth.error import InitializationError
|
from cic_eth.error import InitializationError
|
||||||
from cic_eth.db.error import TxStateChangeError
|
from cic_eth.db.error import TxStateChangeError
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.queue.tx import get_tx
|
from cic_eth.queue.tx import get_tx
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
|
|
||||||
app = celery.current_app
|
app = celery.current_app
|
||||||
|
|
||||||
@ -41,19 +51,20 @@ class AdminApi:
|
|||||||
:param queue: Name of worker queue to submit tasks to
|
:param queue: Name of worker queue to submit tasks to
|
||||||
:type queue: str
|
:type queue: str
|
||||||
"""
|
"""
|
||||||
def __init__(self, rpc_client, queue='cic-eth'):
|
def __init__(self, rpc, queue='cic-eth', call_address=ZERO_ADDRESS):
|
||||||
self.rpc_client = rpc_client
|
self.rpc = rpc
|
||||||
self.w3 = rpc_client.w3
|
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
|
self.call_address = call_address
|
||||||
|
|
||||||
|
|
||||||
def unlock(self, chain_spec, address, flags=None):
|
def unlock(self, chain_spec, address, flags=None):
|
||||||
s_unlock = celery.signature(
|
s_unlock = celery.signature(
|
||||||
'cic_eth.admin.ctrl.unlock',
|
'cic_eth.admin.ctrl.unlock',
|
||||||
[
|
[
|
||||||
str(chain_spec),
|
None,
|
||||||
flags,
|
chain_spec.asdict(),
|
||||||
address,
|
address,
|
||||||
|
flags,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -64,9 +75,10 @@ class AdminApi:
|
|||||||
s_lock = celery.signature(
|
s_lock = celery.signature(
|
||||||
'cic_eth.admin.ctrl.lock',
|
'cic_eth.admin.ctrl.lock',
|
||||||
[
|
[
|
||||||
str(chain_spec),
|
None,
|
||||||
flags,
|
chain_spec.asdict(),
|
||||||
address,
|
address,
|
||||||
|
flags,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -79,10 +91,10 @@ class AdminApi:
|
|||||||
[],
|
[],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
return s_lock.apply_async().get()
|
return s_lock.apply_async()
|
||||||
|
|
||||||
|
|
||||||
def tag_account(self, tag, address_hex):
|
def tag_account(self, tag, address_hex, chain_spec):
|
||||||
"""Persistently associate an address with a plaintext tag.
|
"""Persistently associate an address with a plaintext tag.
|
||||||
|
|
||||||
Some tags are known by the system and is used to resolve addresses to use for certain transactions.
|
Some tags are known by the system and is used to resolve addresses to use for certain transactions.
|
||||||
@ -93,26 +105,28 @@ class AdminApi:
|
|||||||
:type address_hex: str, 0x-hex
|
:type address_hex: str, 0x-hex
|
||||||
:raises ValueError: Invalid checksum address
|
:raises ValueError: Invalid checksum address
|
||||||
"""
|
"""
|
||||||
if not web3.Web3.isChecksumAddress(address_hex):
|
s_tag = celery.signature(
|
||||||
raise ValueError('invalid address')
|
'cic_eth.eth.account.set_role',
|
||||||
session = SessionBase.create_session()
|
[
|
||||||
role = AccountRole.set(tag, address_hex)
|
tag,
|
||||||
session.add(role)
|
address_hex,
|
||||||
session.commit()
|
chain_spec.asdict(),
|
||||||
session.close()
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
return s_tag.apply_async()
|
||||||
|
|
||||||
|
|
||||||
def have_account(self, address_hex, chain_str):
|
def have_account(self, address_hex, chain_spec):
|
||||||
s_have = celery.signature(
|
s_have = celery.signature(
|
||||||
'cic_eth.eth.account.have',
|
'cic_eth.eth.account.have',
|
||||||
[
|
[
|
||||||
address_hex,
|
address_hex,
|
||||||
chain_str,
|
chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
t = s_have.apply_async()
|
return s_have.apply_async()
|
||||||
return t.get()
|
|
||||||
|
|
||||||
|
|
||||||
def resend(self, tx_hash_hex, chain_str, in_place=True, unlock=False):
|
def resend(self, tx_hash_hex, chain_str, in_place=True, unlock=False):
|
||||||
@ -197,12 +211,12 @@ class AdminApi:
|
|||||||
blocking_nonce = tx['nonce']
|
blocking_nonce = tx['nonce']
|
||||||
nonce_otx = tx['nonce']
|
nonce_otx = tx['nonce']
|
||||||
|
|
||||||
#nonce_cache = Nonce.get(address)
|
nonce_cache = Nonce.get(address)
|
||||||
nonce_w3 = self.w3.eth.getTransactionCount(address, 'pending')
|
#nonce_w3 = self.w3.eth.getTransactionCount(address, 'pending')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
'nonce': {
|
'nonce': {
|
||||||
'network': nonce_w3,
|
'network': nonce_cache,
|
||||||
'queue': nonce_otx,
|
'queue': nonce_otx,
|
||||||
#'cache': nonce_cache,
|
#'cache': nonce_cache,
|
||||||
'blocking': blocking_nonce,
|
'blocking': blocking_nonce,
|
||||||
@ -213,7 +227,7 @@ class AdminApi:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def fix_nonce(self, address, nonce):
|
def fix_nonce(self, address, nonce, chain_spec):
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_account_tx',
|
'cic_eth.queue.tx.get_account_tx',
|
||||||
[
|
[
|
||||||
@ -234,7 +248,7 @@ class AdminApi:
|
|||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.admin.nonce.shift_nonce',
|
'cic_eth.admin.nonce.shift_nonce',
|
||||||
[
|
[
|
||||||
str(self.rpc_client.chain_spec),
|
self.rpc.chain_spec.asdict(),
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
],
|
],
|
||||||
queue=self.queue
|
queue=self.queue
|
||||||
@ -242,18 +256,18 @@ class AdminApi:
|
|||||||
return s_nonce.apply_async()
|
return s_nonce.apply_async()
|
||||||
|
|
||||||
|
|
||||||
# TODO: this is a stub, complete all checks
|
# # TODO: this is a stub, complete all checks
|
||||||
def ready(self):
|
# def ready(self):
|
||||||
"""Checks whether all required initializations have been performed.
|
# """Checks whether all required initializations have been performed.
|
||||||
|
#
|
||||||
:raises cic_eth.error.InitializationError: At least one setting pre-requisite has not been met.
|
# :raises cic_eth.error.InitializationError: At least one setting pre-requisite has not been met.
|
||||||
:raises KeyError: An address provided for initialization is not known by the keystore.
|
# :raises KeyError: An address provided for initialization is not known by the keystore.
|
||||||
"""
|
# """
|
||||||
addr = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS')
|
# addr = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS')
|
||||||
if addr == zero_address:
|
# if addr == ZERO_ADDRESS:
|
||||||
raise InitializationError('missing account ETH_GAS_PROVIDER_ADDRESS')
|
# raise InitializationError('missing account ETH_GAS_PROVIDER_ADDRESS')
|
||||||
|
#
|
||||||
self.w3.eth.sign(addr, text='666f6f')
|
# self.w3.eth.sign(addr, text='666f6f')
|
||||||
|
|
||||||
|
|
||||||
def account(self, chain_spec, address, cols=['tx_hash', 'sender', 'recipient', 'nonce', 'block', 'tx_index', 'status', 'network_status', 'date_created'], include_sender=True, include_recipient=True):
|
def account(self, chain_spec, address, cols=['tx_hash', 'sender', 'recipient', 'nonce', 'block', 'tx_index', 'status', 'network_status', 'date_created'], include_sender=True, include_recipient=True):
|
||||||
@ -303,7 +317,7 @@ class AdminApi:
|
|||||||
|
|
||||||
|
|
||||||
# TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring
|
# TODO: Add exception upon non-existent tx aswell as invalid tx data to docstring
|
||||||
def tx(self, chain_spec, tx_hash=None, tx_raw=None):
|
def tx(self, chain_spec, tx_hash=None, tx_raw=None, registry=None):
|
||||||
"""Output local and network details about a given transaction with local origin.
|
"""Output local and network details about a given transaction with local origin.
|
||||||
|
|
||||||
If the transaction hash is given, the raw trasnaction data will be retrieved from the local transaction queue backend. Otherwise the raw transaction data must be provided directly. Only one of transaction hash and transaction data can be passed.
|
If the transaction hash is given, the raw trasnaction data will be retrieved from the local transaction queue backend. Otherwise the raw transaction data must be provided directly. Only one of transaction hash and transaction data can be passed.
|
||||||
@ -324,7 +338,8 @@ class AdminApi:
|
|||||||
ValueError('Specify only one of hash or raw tx')
|
ValueError('Specify only one of hash or raw tx')
|
||||||
|
|
||||||
if tx_raw != None:
|
if tx_raw != None:
|
||||||
tx_hash = self.w3.keccak(hexstr=tx_raw).hex()
|
tx_hash = add_0x(keccak256_hex_to_hex(tx_raw))
|
||||||
|
#tx_hash = self.w3.keccak(hexstr=tx_raw).hex()
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.get_tx_cache',
|
'cic_eth.queue.tx.get_tx_cache',
|
||||||
@ -335,31 +350,35 @@ class AdminApi:
|
|||||||
tx = s.apply_async().get()
|
tx = s.apply_async().get()
|
||||||
|
|
||||||
source_token = None
|
source_token = None
|
||||||
if tx['source_token'] != zero_address:
|
if tx['source_token'] != ZERO_ADDRESS:
|
||||||
try:
|
try:
|
||||||
source_token = CICRegistry.get_address(chain_spec, tx['source_token']).contract
|
source_token = registry.by_address(tx['source_token'])
|
||||||
|
#source_token = CICRegistry.get_address(chain_spec, tx['source_token']).contract
|
||||||
except UnknownContractError:
|
except UnknownContractError:
|
||||||
source_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
#source_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
||||||
source_token = CICRegistry.add_token(chain_spec, source_token_contract)
|
#source_token = CICRegistry.add_token(chain_spec, source_token_contract)
|
||||||
logg.warning('unknown source token contract {}'.format(tx['source_token']))
|
logg.warning('unknown source token contract {}'.format(tx['source_token']))
|
||||||
|
|
||||||
destination_token = None
|
destination_token = None
|
||||||
if tx['source_token'] != zero_address:
|
if tx['source_token'] != ZERO_ADDRESS:
|
||||||
try:
|
try:
|
||||||
destination_token = CICRegistry.get_address(chain_spec, tx['destination_token'])
|
#destination_token = CICRegistry.get_address(chain_spec, tx['destination_token'])
|
||||||
|
destination_token = registry.by_address(tx['destination_token'])
|
||||||
except UnknownContractError:
|
except UnknownContractError:
|
||||||
destination_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
#destination_token_contract = self.w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=tx['source_token'])
|
||||||
destination_token = CICRegistry.add_token(chain_spec, destination_token_contract)
|
#destination_token = CICRegistry.add_token(chain_spec, destination_token_contract)
|
||||||
logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
|
logg.warning('unknown destination token contract {}'.format(tx['destination_token']))
|
||||||
|
|
||||||
tx['sender_description'] = 'Custodial account'
|
tx['sender_description'] = 'Custodial account'
|
||||||
tx['recipient_description'] = 'Custodial account'
|
tx['recipient_description'] = 'Custodial account'
|
||||||
|
|
||||||
c = RpcClient(chain_spec)
|
o = code(tx['sender'])
|
||||||
if len(c.w3.eth.getCode(tx['sender'])) > 0:
|
r = self.rpc.do(o)
|
||||||
|
if len(strip_0x(r, allow_empty=True)) > 0:
|
||||||
try:
|
try:
|
||||||
sender_contract = CICRegistry.get_address(chain_spec, tx['sender'])
|
#sender_contract = CICRegistry.get_address(chain_spec, tx['sender'])
|
||||||
tx['sender_description'] = 'Contract {}'.format(sender_contract.identifier())
|
sender_contract = registry.by_address(tx['sender'], sender_address=self.call_address)
|
||||||
|
tx['sender_description'] = 'Contract at {}'.format(tx['sender']) #sender_contract)
|
||||||
except UnknownContractError:
|
except UnknownContractError:
|
||||||
tx['sender_description'] = 'Unknown contract'
|
tx['sender_description'] = 'Unknown contract'
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
@ -369,7 +388,7 @@ class AdminApi:
|
|||||||
'cic_eth.eth.account.have',
|
'cic_eth.eth.account.have',
|
||||||
[
|
[
|
||||||
tx['sender'],
|
tx['sender'],
|
||||||
str(chain_spec),
|
chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -382,7 +401,7 @@ class AdminApi:
|
|||||||
'cic_eth.eth.account.role',
|
'cic_eth.eth.account.role',
|
||||||
[
|
[
|
||||||
tx['sender'],
|
tx['sender'],
|
||||||
str(chain_spec),
|
chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -391,11 +410,13 @@ class AdminApi:
|
|||||||
if role != None:
|
if role != None:
|
||||||
tx['sender_description'] = role
|
tx['sender_description'] = role
|
||||||
|
|
||||||
|
o = code(tx['recipient'])
|
||||||
if len(c.w3.eth.getCode(tx['recipient'])) > 0:
|
r = self.rpc.do(o)
|
||||||
|
if len(strip_0x(r, allow_empty=True)) > 0:
|
||||||
try:
|
try:
|
||||||
recipient_contract = CICRegistry.get_address(chain_spec, tx['recipient'])
|
#recipient_contract = CICRegistry.by_address(tx['recipient'])
|
||||||
tx['recipient_description'] = 'Contract {}'.format(recipient_contract.identifier())
|
recipient_contract = registry.by_address(tx['recipient'])
|
||||||
|
tx['recipient_description'] = 'Contract at {}'.format(tx['recipient']) #recipient_contract)
|
||||||
except UnknownContractError as e:
|
except UnknownContractError as e:
|
||||||
tx['recipient_description'] = 'Unknown contract'
|
tx['recipient_description'] = 'Unknown contract'
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
@ -405,7 +426,7 @@ class AdminApi:
|
|||||||
'cic_eth.eth.account.have',
|
'cic_eth.eth.account.have',
|
||||||
[
|
[
|
||||||
tx['recipient'],
|
tx['recipient'],
|
||||||
str(chain_spec),
|
chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -418,7 +439,7 @@ class AdminApi:
|
|||||||
'cic_eth.eth.account.role',
|
'cic_eth.eth.account.role',
|
||||||
[
|
[
|
||||||
tx['recipient'],
|
tx['recipient'],
|
||||||
str(chain_spec),
|
chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -437,29 +458,40 @@ class AdminApi:
|
|||||||
|
|
||||||
tx['network_status'] = 'Not submitted'
|
tx['network_status'] = 'Not submitted'
|
||||||
|
|
||||||
|
r = None
|
||||||
try:
|
try:
|
||||||
c.w3.eth.getTransaction(tx_hash)
|
o = transaction(tx_hash)
|
||||||
|
r = self.rpc.do(o)
|
||||||
|
except Exception as e:
|
||||||
|
logg.warning('(too permissive exception handler, please fix!) {}'.format(e))
|
||||||
tx['network_status'] = 'Mempool'
|
tx['network_status'] = 'Mempool'
|
||||||
except web3.exceptions.TransactionNotFound:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
if r != None:
|
||||||
r = c.w3.eth.getTransactionReceipt(tx_hash)
|
try:
|
||||||
if r.status == 1:
|
o = receipt(tx_hash)
|
||||||
tx['network_status'] = 'Confirmed'
|
r = self.rpc.do(o)
|
||||||
else:
|
logg.debug('h {} o {}'.format(tx_hash, o))
|
||||||
tx['network_status'] = 'Reverted'
|
if int(strip_0x(r['status'])) == 1:
|
||||||
tx['network_block_number'] = r.blockNumber
|
tx['network_status'] = 'Confirmed'
|
||||||
tx['network_tx_index'] = r.transactionIndex
|
else:
|
||||||
if tx['block_number'] == None:
|
tx['network_status'] = 'Reverted'
|
||||||
problems.append('Queue is missing block number {} for mined tx'.format(r.blockNumber))
|
tx['network_block_number'] = r.blockNumber
|
||||||
except web3.exceptions.TransactionNotFound:
|
tx['network_tx_index'] = r.transactionIndex
|
||||||
pass
|
if tx['block_number'] == None:
|
||||||
|
problems.append('Queue is missing block number {} for mined tx'.format(r.blockNumber))
|
||||||
|
except Exception as e:
|
||||||
|
logg.warning('too permissive exception handler, please fix!')
|
||||||
|
pass
|
||||||
|
|
||||||
tx['sender_gas_balance'] = c.w3.eth.getBalance(tx['sender'])
|
o = balance(tx['sender'])
|
||||||
tx['recipient_gas_balance'] = c.w3.eth.getBalance(tx['recipient'])
|
r = self.rpc.do(o)
|
||||||
|
tx['sender_gas_balance'] = r
|
||||||
|
|
||||||
tx_unpacked = unpack_signed_raw_tx(bytes.fromhex(tx['signed_tx'][2:]), chain_spec.chain_id())
|
o = balance(tx['recipient'])
|
||||||
|
r = self.rpc.do(o)
|
||||||
|
tx['recipient_gas_balance'] = r
|
||||||
|
|
||||||
|
tx_unpacked = unpack(bytes.fromhex(tx['signed_tx'][2:]), chain_spec.chain_id())
|
||||||
tx['gas_price'] = tx_unpacked['gasPrice']
|
tx['gas_price'] = tx_unpacked['gasPrice']
|
||||||
tx['gas_limit'] = tx_unpacked['gas']
|
tx['gas_limit'] = tx_unpacked['gas']
|
||||||
tx['data'] = tx_unpacked['data']
|
tx['data'] = tx_unpacked['data']
|
||||||
|
@ -8,12 +8,10 @@ import logging
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
#from cic_registry.chain import ChainSpec
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.eth.factory import TxFactory
|
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.db.enum import LockEnum
|
||||||
|
|
||||||
app = celery.current_app
|
app = celery.current_app
|
||||||
@ -87,7 +85,7 @@ class Api:
|
|||||||
'cic_eth.admin.ctrl.check_lock',
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
[
|
[
|
||||||
[from_token_symbol, to_token_symbol],
|
[from_token_symbol, to_token_symbol],
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
LockEnum.QUEUE,
|
LockEnum.QUEUE,
|
||||||
from_address,
|
from_address,
|
||||||
],
|
],
|
||||||
@ -99,7 +97,7 @@ class Api:
|
|||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_tokens = celery.signature(
|
s_tokens = celery.signature(
|
||||||
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_str,
|
||||||
],
|
],
|
||||||
@ -112,7 +110,7 @@ class Api:
|
|||||||
target_return,
|
target_return,
|
||||||
minimum_return,
|
minimum_return,
|
||||||
to_address,
|
to_address,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -149,7 +147,7 @@ class Api:
|
|||||||
'cic_eth.admin.ctrl.check_lock',
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
[
|
[
|
||||||
[from_token_symbol, to_token_symbol],
|
[from_token_symbol, to_token_symbol],
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
LockEnum.QUEUE,
|
LockEnum.QUEUE,
|
||||||
from_address,
|
from_address,
|
||||||
],
|
],
|
||||||
@ -161,9 +159,9 @@ class Api:
|
|||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_tokens = celery.signature(
|
s_tokens = celery.signature(
|
||||||
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -174,7 +172,7 @@ class Api:
|
|||||||
target_return,
|
target_return,
|
||||||
minimum_return,
|
minimum_return,
|
||||||
from_address,
|
from_address,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -208,7 +206,7 @@ class Api:
|
|||||||
'cic_eth.admin.ctrl.check_lock',
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
[
|
[
|
||||||
[token_symbol],
|
[token_symbol],
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
LockEnum.QUEUE,
|
LockEnum.QUEUE,
|
||||||
from_address,
|
from_address,
|
||||||
],
|
],
|
||||||
@ -222,19 +220,19 @@ class Api:
|
|||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_tokens = celery.signature(
|
s_tokens = celery.signature(
|
||||||
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_transfer = celery.signature(
|
s_transfer = celery.signature(
|
||||||
'cic_eth.eth.token.transfer',
|
'cic_eth.eth.erc20.transfer',
|
||||||
[
|
[
|
||||||
from_address,
|
from_address,
|
||||||
to_address,
|
to_address,
|
||||||
value,
|
value,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -266,18 +264,18 @@ class Api:
|
|||||||
logg.warning('balance pointlessly called with no callback url')
|
logg.warning('balance pointlessly called with no callback url')
|
||||||
|
|
||||||
s_tokens = celery.signature(
|
s_tokens = celery.signature(
|
||||||
'cic_eth.eth.token.resolve_tokens_by_symbol',
|
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
[
|
[
|
||||||
[token_symbol],
|
[token_symbol],
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_balance = celery.signature(
|
s_balance = celery.signature(
|
||||||
'cic_eth.eth.token.balance',
|
'cic_eth.eth.erc20.balance',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -293,7 +291,7 @@ class Api:
|
|||||||
'cic_eth.queue.balance.balance_incoming',
|
'cic_eth.queue.balance.balance_incoming',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -301,7 +299,7 @@ class Api:
|
|||||||
'cic_eth.queue.balance.balance_outgoing',
|
'cic_eth.queue.balance.balance_outgoing',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -309,16 +307,22 @@ class Api:
|
|||||||
s_balance_incoming.link(s_balance_outgoing)
|
s_balance_incoming.link(s_balance_outgoing)
|
||||||
last_in_chain = s_balance_outgoing
|
last_in_chain = s_balance_outgoing
|
||||||
|
|
||||||
one = celery.chain(s_tokens, s_balance)
|
one = celery.chain(s_tokens, s_balance)
|
||||||
two = celery.chain(s_tokens, s_balance_incoming)
|
two = celery.chain(s_tokens, s_balance_incoming)
|
||||||
three = celery.chain(s_tokens, s_balance_outgoing)
|
three = celery.chain(s_tokens, s_balance_outgoing)
|
||||||
|
|
||||||
t = None
|
t = None
|
||||||
if self.callback_param != None:
|
if self.callback_param != None:
|
||||||
s_result.link(self.callback_success).on_error(self.callback_error)
|
s_result.link(self.callback_success).on_error(self.callback_error)
|
||||||
t = celery.chord([one, two, three])(s_result)
|
t = celery.chord([one, two, three])(s_result)
|
||||||
|
else:
|
||||||
|
t = celery.chord([one, two, three])(s_result)
|
||||||
else:
|
else:
|
||||||
t = celery.chord([one, two, three])(s_result)
|
# TODO: Chord is inefficient with only one chain, but assemble_balances must be able to handle different structures in order to avoid chord
|
||||||
|
one = celery.chain(s_tokens, s_balance)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_result.link(self.callback_success).on_error(self.callback_error)
|
||||||
|
t = celery.chord([one])(s_result)
|
||||||
|
|
||||||
return t
|
return t
|
||||||
|
|
||||||
@ -337,7 +341,7 @@ class Api:
|
|||||||
'cic_eth.admin.ctrl.check_lock',
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
[
|
[
|
||||||
password,
|
password,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
LockEnum.CREATE,
|
LockEnum.CREATE,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@ -345,7 +349,7 @@ class Api:
|
|||||||
s_account = celery.signature(
|
s_account = celery.signature(
|
||||||
'cic_eth.eth.account.create',
|
'cic_eth.eth.account.create',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -357,14 +361,14 @@ class Api:
|
|||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
[
|
[
|
||||||
'ACCOUNTS_INDEX_WRITER',
|
'ACCOUNT_REGISTRY_WRITER',
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
s_register = celery.signature(
|
s_register = celery.signature(
|
||||||
'cic_eth.eth.account.register',
|
'cic_eth.eth.account.register',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -387,7 +391,7 @@ class Api:
|
|||||||
'cic_eth.admin.ctrl.check_lock',
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
LockEnum.QUEUE,
|
LockEnum.QUEUE,
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
@ -402,7 +406,7 @@ class Api:
|
|||||||
s_refill = celery.signature(
|
s_refill = celery.signature(
|
||||||
'cic_eth.eth.tx.refill_gas',
|
'cic_eth.eth.tx.refill_gas',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
@ -445,7 +449,7 @@ class Api:
|
|||||||
s_brief = celery.signature(
|
s_brief = celery.signature(
|
||||||
'cic_eth.ext.tx.tx_collate',
|
'cic_eth.ext.tx.tx_collate',
|
||||||
[
|
[
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
offset,
|
offset,
|
||||||
limit
|
limit
|
||||||
],
|
],
|
||||||
@ -471,7 +475,7 @@ class Api:
|
|||||||
'cic_eth.ext.tx.list_tx_by_bloom',
|
'cic_eth.ext.tx.list_tx_by_bloom',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
self.chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
|
@ -20,6 +20,7 @@ def upgrade():
|
|||||||
op.create_table(
|
op.create_table(
|
||||||
'nonce_task_reservation',
|
'nonce_task_reservation',
|
||||||
sa.Column('id', sa.Integer, primary_key=True),
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('address_hex', sa.String(42), nullable=False),
|
||||||
sa.Column('nonce', sa.Integer, nullable=False),
|
sa.Column('nonce', sa.Integer, nullable=False),
|
||||||
sa.Column('key', sa.String, nullable=False),
|
sa.Column('key', sa.String, nullable=False),
|
||||||
sa.Column('date_created', sa.DateTime, nullable=False),
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
@ -20,6 +20,7 @@ def upgrade():
|
|||||||
op.create_table(
|
op.create_table(
|
||||||
'nonce_task_reservation',
|
'nonce_task_reservation',
|
||||||
sa.Column('id', sa.Integer, primary_key=True),
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('address_hex', sa.String(42), nullable=False),
|
||||||
sa.Column('nonce', sa.Integer, nullable=False),
|
sa.Column('nonce', sa.Integer, nullable=False),
|
||||||
sa.Column('key', sa.String, nullable=False),
|
sa.Column('key', sa.String, nullable=False),
|
||||||
sa.Column('date_created', sa.DateTime, nullable=False),
|
sa.Column('date_created', sa.DateTime, nullable=False),
|
||||||
|
@ -24,6 +24,7 @@ def upgrade():
|
|||||||
sa.Column('blockchain', sa.String),
|
sa.Column('blockchain', sa.String),
|
||||||
sa.Column("flags", sa.BIGINT(), nullable=False, default=0),
|
sa.Column("flags", sa.BIGINT(), nullable=False, default=0),
|
||||||
sa.Column("date_created", sa.DateTime, nullable=False),
|
sa.Column("date_created", sa.DateTime, nullable=False),
|
||||||
|
sa.Column("otx_id", sa.Integer, nullable=True),
|
||||||
)
|
)
|
||||||
op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True)
|
op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True)
|
||||||
|
|
||||||
|
@ -116,6 +116,6 @@ class SessionBase(Model):
|
|||||||
def release_session(session=None):
|
def release_session(session=None):
|
||||||
session_key = str(id(session))
|
session_key = str(id(session))
|
||||||
if SessionBase.localsessions.get(session_key) != None:
|
if SessionBase.localsessions.get(session_key) != None:
|
||||||
logg.debug('destroying session {}'.format(session_key))
|
logg.debug('commit and destroy session {}'.format(session_key))
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
@ -4,7 +4,7 @@ import logging
|
|||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
from sqlalchemy import Column, String, Integer, DateTime, ForeignKey
|
from sqlalchemy import Column, String, Integer, DateTime, ForeignKey
|
||||||
from cic_registry import zero_address
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
@ -35,7 +35,7 @@ class Lock(SessionBase):
|
|||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def set(chain_str, flags, address=zero_address, session=None, tx_hash=None):
|
def set(chain_str, flags, address=ZERO_ADDRESS, session=None, tx_hash=None):
|
||||||
"""Sets flags associated with the given address and chain.
|
"""Sets flags associated with the given address and chain.
|
||||||
|
|
||||||
If a flags entry does not exist it is created.
|
If a flags entry does not exist it is created.
|
||||||
@ -88,7 +88,7 @@ class Lock(SessionBase):
|
|||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def reset(chain_str, flags, address=zero_address, session=None):
|
def reset(chain_str, flags, address=ZERO_ADDRESS, session=None):
|
||||||
"""Resets flags associated with the given address and chain.
|
"""Resets flags associated with the given address and chain.
|
||||||
|
|
||||||
If the resulting flags entry value is 0, the entry will be deleted.
|
If the resulting flags entry value is 0, the entry will be deleted.
|
||||||
@ -132,7 +132,7 @@ class Lock(SessionBase):
|
|||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def check(chain_str, flags, address=zero_address, session=None):
|
def check(chain_str, flags, address=ZERO_ADDRESS, session=None):
|
||||||
"""Checks whether all given flags are set for given address and chain.
|
"""Checks whether all given flags are set for given address and chain.
|
||||||
|
|
||||||
Does not validate the address against any other tables or components.
|
Does not validate the address against any other tables or components.
|
||||||
|
@ -55,6 +55,20 @@ class Nonce(SessionBase):
|
|||||||
conn.execute("UPDATE nonce set nonce = {} WHERE address_hex = '{}'".format(nonce, address))
|
conn.execute("UPDATE nonce set nonce = {} WHERE address_hex = '{}'".format(nonce, address))
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def __inc(conn, address):
|
||||||
|
#conn.execute("UPDATE nonce set nonce = nonce + 1 WHERE address_hex = '{}'".format(address))
|
||||||
|
q = conn.query(Nonce)
|
||||||
|
q = q.filter(Nonce.address_hex==address)
|
||||||
|
q = q.with_for_update()
|
||||||
|
o = q.first()
|
||||||
|
nonce = o.nonce
|
||||||
|
o.nonce += 1
|
||||||
|
conn.add(o)
|
||||||
|
conn.flush()
|
||||||
|
return nonce
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __init(conn, address, nonce):
|
def __init(conn, address, nonce):
|
||||||
conn.execute("INSERT INTO nonce (nonce, address_hex) VALUES ({}, '{}')".format(nonce, address))
|
conn.execute("INSERT INTO nonce (nonce, address_hex) VALUES ({}, '{}')".format(nonce, address))
|
||||||
@ -78,7 +92,7 @@ class Nonce(SessionBase):
|
|||||||
|
|
||||||
# TODO: Incrementing nonce MUST be done by separate tasks.
|
# TODO: Incrementing nonce MUST be done by separate tasks.
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def next(address, initial_if_not_exists=0):
|
def next(address, initial_if_not_exists=0, session=None):
|
||||||
"""Generate next nonce for the given address.
|
"""Generate next nonce for the given address.
|
||||||
|
|
||||||
If there is no previous nonce record for the address, the nonce may be initialized to a specified value, or 0 if no value has been given.
|
If there is no previous nonce record for the address, the nonce may be initialized to a specified value, or 0 if no value has been given.
|
||||||
@ -90,28 +104,31 @@ class Nonce(SessionBase):
|
|||||||
:returns: Nonce
|
:returns: Nonce
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
#session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
#session.begin_nested()
|
#session.begin_nested()
|
||||||
conn = Nonce.engine.connect()
|
#conn = Nonce.engine.connect()
|
||||||
if Nonce.transactional:
|
#if Nonce.transactional:
|
||||||
conn.execute('BEGIN')
|
# conn.execute('BEGIN')
|
||||||
conn.execute('LOCK TABLE nonce IN SHARE ROW EXCLUSIVE MODE')
|
# conn.execute('LOCK TABLE nonce IN SHARE ROW EXCLUSIVE MODE')
|
||||||
logg.debug('locking nonce table for address {}'.format(address))
|
# logg.debug('locking nonce table for address {}'.format(address))
|
||||||
nonce = Nonce.__get(conn, address)
|
#nonce = Nonce.__get(conn, address)
|
||||||
|
nonce = Nonce.__get(session, address)
|
||||||
logg.debug('get nonce {} for address {}'.format(nonce, address))
|
logg.debug('get nonce {} for address {}'.format(nonce, address))
|
||||||
if nonce == None:
|
if nonce == None:
|
||||||
nonce = initial_if_not_exists
|
nonce = initial_if_not_exists
|
||||||
logg.debug('setting default nonce to {} for address {}'.format(nonce, address))
|
logg.debug('setting default nonce to {} for address {}'.format(nonce, address))
|
||||||
Nonce.__init(conn, address, nonce)
|
#Nonce.__init(conn, address, nonce)
|
||||||
Nonce.__set(conn, address, nonce+1)
|
Nonce.__init(session, address, nonce)
|
||||||
if Nonce.transactional:
|
#Nonce.__set(conn, address, nonce+1)
|
||||||
conn.execute('COMMIT')
|
nonce = Nonce.__inc(session, address)
|
||||||
logg.debug('unlocking nonce table for address {}'.format(address))
|
#if Nonce.transactional:
|
||||||
conn.close()
|
#conn.execute('COMMIT')
|
||||||
|
# logg.debug('unlocking nonce table for address {}'.format(address))
|
||||||
|
#conn.close()
|
||||||
#session.commit()
|
#session.commit()
|
||||||
|
|
||||||
#SessionBase.release_session(session)
|
SessionBase.release_session(session)
|
||||||
return nonce
|
return nonce
|
||||||
|
|
||||||
|
|
||||||
@ -119,67 +136,74 @@ class NonceReservation(SessionBase):
|
|||||||
|
|
||||||
__tablename__ = 'nonce_task_reservation'
|
__tablename__ = 'nonce_task_reservation'
|
||||||
|
|
||||||
|
address_hex = Column(String(42))
|
||||||
nonce = Column(Integer)
|
nonce = Column(Integer)
|
||||||
key = Column(String)
|
key = Column(String)
|
||||||
date_created = Column(DateTime, default=datetime.datetime.utcnow)
|
date_created = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def peek(key, session=None):
|
def peek(address, key, session=None):
|
||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
q = session.query(NonceReservation)
|
q = session.query(NonceReservation)
|
||||||
q = q.filter(NonceReservation.key==key)
|
q = q.filter(NonceReservation.key==key)
|
||||||
|
q = q.filter(NonceReservation.address_hex==address)
|
||||||
o = q.first()
|
o = q.first()
|
||||||
|
|
||||||
nonce = None
|
r = None
|
||||||
if o != None:
|
if o != None:
|
||||||
nonce = o.nonce
|
r = (o.key, o.nonce)
|
||||||
|
|
||||||
session.flush()
|
session.flush()
|
||||||
|
|
||||||
SessionBase.release_session(session)
|
SessionBase.release_session(session)
|
||||||
|
|
||||||
return nonce
|
return r
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def release(key, session=None):
|
def release(address, key, session=None):
|
||||||
|
|
||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
nonce = NonceReservation.peek(key, session=session)
|
o = NonceReservation.peek(address, key, session=session)
|
||||||
|
|
||||||
|
if o == None:
|
||||||
|
SessionBase.release_session(session)
|
||||||
|
raise IntegrityError('"release" called on key {} address {} which does not exists'.format(key, address))
|
||||||
|
|
||||||
q = session.query(NonceReservation)
|
q = session.query(NonceReservation)
|
||||||
q = q.filter(NonceReservation.key==key)
|
q = q.filter(NonceReservation.key==key)
|
||||||
|
q = q.filter(NonceReservation.address_hex==address)
|
||||||
o = q.first()
|
o = q.first()
|
||||||
|
r = (o.key, o.nonce)
|
||||||
if o == None:
|
|
||||||
raise IntegrityError('nonce for key {}'.format(nonce))
|
|
||||||
SessionBase.release_session(session)
|
|
||||||
|
|
||||||
session.delete(o)
|
session.delete(o)
|
||||||
session.flush()
|
session.flush()
|
||||||
|
|
||||||
SessionBase.release_session(session)
|
SessionBase.release_session(session)
|
||||||
|
|
||||||
return nonce
|
return r
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def next(address, key, session=None):
|
def next(address, key, session=None):
|
||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if NonceReservation.peek(key, session) != None:
|
o = NonceReservation.peek(address, key, session)
|
||||||
raise IntegrityError('nonce for key {}'.format(key))
|
if o != None:
|
||||||
|
raise IntegrityError('"next" called on nonce for key {} address {} during active key {}'.format(key, address, o[0]))
|
||||||
|
|
||||||
nonce = Nonce.next(address)
|
nonce = Nonce.next(address, session=session)
|
||||||
|
|
||||||
o = NonceReservation()
|
o = NonceReservation()
|
||||||
o.nonce = nonce
|
o.nonce = nonce
|
||||||
o.key = key
|
o.key = key
|
||||||
|
o.address_hex = address
|
||||||
session.add(o)
|
session.add(o)
|
||||||
|
r = (key, nonce)
|
||||||
|
|
||||||
SessionBase.release_session(session)
|
SessionBase.release_session(session)
|
||||||
|
|
||||||
return nonce
|
return r
|
||||||
|
@ -15,7 +15,6 @@ from cic_eth.db.enum import (
|
|||||||
is_error_status,
|
is_error_status,
|
||||||
)
|
)
|
||||||
from cic_eth.db.error import TxStateChangeError
|
from cic_eth.db.error import TxStateChangeError
|
||||||
#from cic_eth.eth.util import address_hex_from_signed_tx
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
@ -95,19 +94,16 @@ class Otx(SessionBase):
|
|||||||
:type block: number
|
:type block: number
|
||||||
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||||
"""
|
"""
|
||||||
localsession = session
|
session = SessionBase.bind_session(session)
|
||||||
if localsession == None:
|
|
||||||
localsession = SessionBase.create_session()
|
|
||||||
|
|
||||||
if self.block != None:
|
if self.block != None:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('Attempted set block {} when block was already {}'.format(block, self.block))
|
raise TxStateChangeError('Attempted set block {} when block was already {}'.format(block, self.block))
|
||||||
self.block = block
|
self.block = block
|
||||||
localsession.add(self)
|
session.add(self)
|
||||||
localsession.flush()
|
session.flush()
|
||||||
|
|
||||||
if session==None:
|
SessionBase.release_session(session)
|
||||||
localsession.commit()
|
|
||||||
localsession.close()
|
|
||||||
|
|
||||||
|
|
||||||
def waitforgas(self, session=None):
|
def waitforgas(self, session=None):
|
||||||
@ -123,8 +119,10 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if self.status & StatusBits.IN_NETWORK:
|
if self.status & StatusBits.IN_NETWORK:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.GAS_ISSUES, session)
|
self.__set_status(StatusBits.GAS_ISSUES, session)
|
||||||
@ -147,8 +145,10 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('FUBAR cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('FUBAR cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if is_error_status(self.status):
|
if is_error_status(self.status):
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('FUBAR cannot be set on an entry with an error state already set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('FUBAR cannot be set on an entry with an error state already set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.UNKNOWN_ERROR | StatusBits.FINAL, session)
|
self.__set_status(StatusBits.UNKNOWN_ERROR | StatusBits.FINAL, session)
|
||||||
@ -170,10 +170,13 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('REJECTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('REJECTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if self.status & StatusBits.IN_NETWORK:
|
if self.status & StatusBits.IN_NETWORK:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('REJECTED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('REJECTED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status)))
|
||||||
if is_error_status(self.status):
|
if is_error_status(self.status):
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('REJECTED cannot be set on an entry with an error state already set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('REJECTED cannot be set on an entry with an error state already set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.NODE_ERROR | StatusBits.FINAL, session)
|
self.__set_status(StatusBits.NODE_ERROR | StatusBits.FINAL, session)
|
||||||
@ -193,10 +196,13 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if self.status & StatusBits.IN_NETWORK:
|
if self.status & StatusBits.IN_NETWORK:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status)))
|
||||||
if self.status & StatusBits.OBSOLETE:
|
if self.status & StatusBits.OBSOLETE:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already OBSOLETE ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already OBSOLETE ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.OBSOLETE, session)
|
self.__set_status(StatusBits.OBSOLETE, session)
|
||||||
@ -216,6 +222,7 @@ class Otx(SessionBase):
|
|||||||
|
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.MANUAL, session)
|
self.__set_status(StatusBits.MANUAL, session)
|
||||||
@ -238,8 +245,10 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('RETRY cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('RETRY cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if not is_error_status(self.status) and not StatusBits.IN_NETWORK & self.status > 0:
|
if not is_error_status(self.status) and not StatusBits.IN_NETWORK & self.status > 0:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('RETRY cannot be set on an entry that has no error ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('RETRY cannot be set on an entry that has no error ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.QUEUED, session)
|
self.__set_status(StatusBits.QUEUED, session)
|
||||||
@ -264,8 +273,10 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('READYSEND cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('READYSEND cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if is_error_status(self.status):
|
if is_error_status(self.status):
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('READYSEND cannot be set on an errored state ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('READYSEND cannot be set on an errored state ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.QUEUED, session)
|
self.__set_status(StatusBits.QUEUED, session)
|
||||||
@ -290,6 +301,7 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('SENT cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('SENT cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.IN_NETWORK, session)
|
self.__set_status(StatusBits.IN_NETWORK, session)
|
||||||
@ -314,8 +326,10 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('SENDFAIL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('SENDFAIL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if self.status & StatusBits.IN_NETWORK:
|
if self.status & StatusBits.IN_NETWORK:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('SENDFAIL cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('SENDFAIL cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__set_status(StatusBits.LOCAL_ERROR | StatusBits.DEFERRED, session)
|
self.__set_status(StatusBits.LOCAL_ERROR | StatusBits.DEFERRED, session)
|
||||||
@ -340,9 +354,11 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
raise TxStateChangeError('SENDFAIL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
SessionBase.release_session(session)
|
||||||
|
raise TxStateChangeError('QUEUED cannot be unset on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if self.status & StatusBits.IN_NETWORK:
|
if self.status & StatusBits.IN_NETWORK:
|
||||||
raise TxStateChangeError('SENDFAIL cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
|
SessionBase.release_session(session)
|
||||||
|
raise TxStateChangeError('QUEUED cannot be unset on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
self.__reset_status(StatusBits.QUEUED, session)
|
self.__reset_status(StatusBits.QUEUED, session)
|
||||||
|
|
||||||
@ -368,8 +384,10 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('REVERTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('REVERTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if not self.status & StatusBits.IN_NETWORK:
|
if not self.status & StatusBits.IN_NETWORK:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('REVERTED cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('REVERTED cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
if block != None:
|
if block != None:
|
||||||
@ -397,10 +415,12 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('CANCEL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('CANCEL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
if confirmed:
|
if confirmed:
|
||||||
if self.status > 0 and not self.status & StatusBits.OBSOLETE:
|
if self.status > 0 and not self.status & StatusBits.OBSOLETE:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('CANCEL can only be set on an entry marked OBSOLETE ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('CANCEL can only be set on an entry marked OBSOLETE ({})'.format(status_str(self.status)))
|
||||||
self.__set_status(StatusEnum.CANCELLED, session)
|
self.__set_status(StatusEnum.CANCELLED, session)
|
||||||
else:
|
else:
|
||||||
@ -425,10 +445,13 @@ class Otx(SessionBase):
|
|||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
if self.status & StatusBits.FINAL:
|
if self.status & StatusBits.FINAL:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('SUCCESS cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('SUCCESS cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||||
if not self.status & StatusBits.IN_NETWORK:
|
if not self.status & StatusBits.IN_NETWORK:
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('SUCCESS cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('SUCCESS cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status)))
|
||||||
if is_error_status(self.status):
|
if is_error_status(self.status):
|
||||||
|
SessionBase.release_session(session)
|
||||||
raise TxStateChangeError('SUCCESS cannot be set on an entry with error state set ({})'.format(status_str(self.status)))
|
raise TxStateChangeError('SUCCESS cannot be set on an entry with error state set ({})'.format(status_str(self.status)))
|
||||||
|
|
||||||
if block != None:
|
if block != None:
|
||||||
@ -509,22 +532,23 @@ class Otx(SessionBase):
|
|||||||
session.add(l)
|
session.add(l)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: it is not safe to return otx here unless session has been passed in
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def add(nonce, address, tx_hash, signed_tx, session=None):
|
def add(nonce, address, tx_hash, signed_tx, session=None):
|
||||||
localsession = session
|
external_session = session != None
|
||||||
if localsession == None:
|
|
||||||
localsession = SessionBase.create_session()
|
session = SessionBase.bind_session(session)
|
||||||
|
|
||||||
otx = Otx(nonce, address, tx_hash, signed_tx)
|
otx = Otx(nonce, address, tx_hash, signed_tx)
|
||||||
localsession.add(otx)
|
session.add(otx)
|
||||||
localsession.flush()
|
session.flush()
|
||||||
if otx.tracing:
|
if otx.tracing:
|
||||||
otx.__state_log(session=localsession)
|
otx.__state_log(session=session)
|
||||||
localsession.flush()
|
session.flush()
|
||||||
|
|
||||||
if session==None:
|
SessionBase.release_session(session)
|
||||||
localsession.commit()
|
|
||||||
localsession.close()
|
if not external_session:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return otx
|
return otx
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
from sqlalchemy import Column, String, Text
|
from sqlalchemy import Column, String, Text
|
||||||
from cic_registry import zero_address
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import SessionBase
|
from .base import SessionBase
|
||||||
@ -42,7 +42,7 @@ class AccountRole(SessionBase):
|
|||||||
|
|
||||||
role = AccountRole.__get_role(tag, session)
|
role = AccountRole.__get_role(tag, session)
|
||||||
|
|
||||||
r = zero_address
|
r = ZERO_ADDRESS
|
||||||
if role != None:
|
if role != None:
|
||||||
r = role.address_hex
|
r = role.address_hex
|
||||||
|
|
||||||
@ -133,4 +133,4 @@ class AccountRole(SessionBase):
|
|||||||
|
|
||||||
def __init__(self, tag):
|
def __init__(self, tag):
|
||||||
self.tag = tag
|
self.tag = tag
|
||||||
self.address_hex = zero_address
|
self.address_hex = ZERO_ADDRESS
|
||||||
|
@ -1,16 +0,0 @@
|
|||||||
"""Ethereum batch functions and utilities
|
|
||||||
|
|
||||||
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
|
|
||||||
|
|
||||||
"""
|
|
||||||
# standard imports
|
|
||||||
import os
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from .rpc import RpcClient
|
|
||||||
|
|
||||||
registry_extra_identifiers = {
|
|
||||||
'Faucet': '0x{:0<64s}'.format(b'Faucet'.hex()),
|
|
||||||
'TransferApproval': '0x{:0<64s}'.format(b'TransferApproval'.hex()),
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +1,37 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import web3
|
|
||||||
import celery
|
import celery
|
||||||
from cic_registry import CICRegistry
|
from erc20_single_shot_faucet import SingleShotFaucet as Faucet
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from erc20_single_shot_faucet import Faucet
|
from hexathon import (
|
||||||
from cic_registry import zero_address
|
strip_0x,
|
||||||
from hexathon import strip_0x
|
)
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.sign import (
|
||||||
|
new_account,
|
||||||
|
sign_message,
|
||||||
|
)
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
TxFormat,
|
||||||
|
unpack,
|
||||||
|
)
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from eth_accounts_index import AccountRegistry
|
||||||
|
from sarafu_faucet import MinterFaucet as Faucet
|
||||||
|
|
||||||
# local import
|
# local import
|
||||||
from cic_eth.eth import RpcClient
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_eth.eth import registry_extra_identifiers
|
from cic_eth.eth.gas import (
|
||||||
from cic_eth.eth.task import sign_and_register_tx
|
create_check_gas_task,
|
||||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
)
|
||||||
from cic_eth.eth.factory import TxFactory
|
#from cic_eth.eth.factory import TxFactory
|
||||||
from cic_eth.db.models.nonce import Nonce
|
from cic_eth.db.models.nonce import Nonce
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
from cic_eth.db.models.role import AccountRole
|
from cic_eth.db.models.role import AccountRole
|
||||||
from cic_eth.db.models.tx import TxCache
|
from cic_eth.db.models.tx import TxCache
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
from cic_eth.error import (
|
from cic_eth.error import (
|
||||||
RoleMissingError,
|
RoleMissingError,
|
||||||
SignerError,
|
SignerError,
|
||||||
@ -28,125 +39,22 @@ from cic_eth.error import (
|
|||||||
from cic_eth.task import (
|
from cic_eth.task import (
|
||||||
CriticalSQLAlchemyTask,
|
CriticalSQLAlchemyTask,
|
||||||
CriticalSQLAlchemyAndSignerTask,
|
CriticalSQLAlchemyAndSignerTask,
|
||||||
|
BaseTask,
|
||||||
|
)
|
||||||
|
from cic_eth.eth.nonce import (
|
||||||
|
CustodialTaskNonceOracle,
|
||||||
|
)
|
||||||
|
from cic_eth.queue.tx import (
|
||||||
|
register_tx,
|
||||||
)
|
)
|
||||||
|
|
||||||
#logg = logging.getLogger(__name__)
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
class AccountTxFactory(TxFactory):
|
|
||||||
"""Factory for creating account index contract transactions
|
|
||||||
"""
|
|
||||||
def add(
|
|
||||||
self,
|
|
||||||
address,
|
|
||||||
chain_spec,
|
|
||||||
uuid,
|
|
||||||
session=None,
|
|
||||||
):
|
|
||||||
"""Register an Ethereum account address with the on-chain account registry
|
|
||||||
|
|
||||||
:param address: Ethereum account address to add
|
|
||||||
:type address: str, 0x-hex
|
|
||||||
:param chain_spec: Chain to build transaction for
|
|
||||||
:type chain_spec: cic_registry.chain.ChainSpec
|
|
||||||
:returns: Unsigned "AccountRegistry.add" transaction in standard Ethereum format
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
|
|
||||||
c = CICRegistry.get_contract(chain_spec, 'AccountRegistry')
|
|
||||||
f = c.function('add')
|
|
||||||
tx_add_buildable = f(
|
|
||||||
address,
|
|
||||||
)
|
|
||||||
gas = c.gas('add')
|
|
||||||
tx_add = tx_add_buildable.buildTransaction({
|
|
||||||
'from': self.address,
|
|
||||||
'gas': gas,
|
|
||||||
'gasPrice': self.gas_price,
|
|
||||||
'chainId': chain_spec.chain_id(),
|
|
||||||
'nonce': self.next_nonce(uuid, session=session),
|
|
||||||
'value': 0,
|
|
||||||
})
|
|
||||||
return tx_add
|
|
||||||
|
|
||||||
|
|
||||||
def gift(
|
|
||||||
self,
|
|
||||||
address,
|
|
||||||
chain_spec,
|
|
||||||
uuid,
|
|
||||||
session=None,
|
|
||||||
):
|
|
||||||
"""Trigger the on-chain faucet to disburse tokens to the provided Ethereum account
|
|
||||||
|
|
||||||
:param address: Ethereum account address to gift to
|
|
||||||
:type address: str, 0x-hex
|
|
||||||
:param chain_spec: Chain to build transaction for
|
|
||||||
:type chain_spec: cic_registry.chain.ChainSpec
|
|
||||||
:returns: Unsigned "Faucet.giveTo" transaction in standard Ethereum format
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
|
|
||||||
c = CICRegistry.get_contract(chain_spec, 'Faucet')
|
|
||||||
f = c.function('giveTo')
|
|
||||||
tx_add_buildable = f(address)
|
|
||||||
gas = c.gas('add')
|
|
||||||
tx_add = tx_add_buildable.buildTransaction({
|
|
||||||
'from': self.address,
|
|
||||||
'gas': gas,
|
|
||||||
'gasPrice': self.gas_price,
|
|
||||||
'chainId': chain_spec.chain_id(),
|
|
||||||
'nonce': self.next_nonce(uuid, session=session),
|
|
||||||
'value': 0,
|
|
||||||
})
|
|
||||||
return tx_add
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_register(data):
|
|
||||||
"""Verifies that a transaction is an "AccountRegister.add" transaction, and extracts call parameters from it.
|
|
||||||
|
|
||||||
:param data: Raw input data from Ethereum transaction.
|
|
||||||
:type data: str, 0x-hex
|
|
||||||
:raises ValueError: Function signature does not match AccountRegister.add
|
|
||||||
:returns: Parsed parameters
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
data = strip_0x(data)
|
|
||||||
f = data[:8]
|
|
||||||
if f != '0a3b0a4f':
|
|
||||||
raise ValueError('Invalid account index register data ({})'.format(f))
|
|
||||||
|
|
||||||
d = data[8:]
|
|
||||||
return {
|
|
||||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_gift(data):
|
|
||||||
"""Verifies that a transaction is a "Faucet.giveTo" transaction, and extracts call parameters from it.
|
|
||||||
|
|
||||||
:param data: Raw input data from Ethereum transaction.
|
|
||||||
:type data: str, 0x-hex
|
|
||||||
:raises ValueError: Function signature does not match AccountRegister.add
|
|
||||||
:returns: Parsed parameters
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
data = strip_0x(data)
|
|
||||||
f = data[:8]
|
|
||||||
if f != '63e4bff4':
|
|
||||||
raise ValueError('Invalid gift data ({})'.format(f))
|
|
||||||
|
|
||||||
d = data[8:]
|
|
||||||
return {
|
|
||||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: Separate out nonce initialization task
|
# TODO: Separate out nonce initialization task
|
||||||
@celery_app.task(base=CriticalSQLAlchemyAndSignerTask)
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
||||||
def create(password, chain_str):
|
def create(self, password, chain_spec_dict):
|
||||||
"""Creates and stores a new ethereum account in the keystore.
|
"""Creates and stores a new ethereum account in the keystore.
|
||||||
|
|
||||||
The password is passed on to the wallet backend, no encryption is performed in the task worker.
|
The password is passed on to the wallet backend, no encryption is performed in the task worker.
|
||||||
@ -158,19 +66,19 @@ def create(password, chain_str):
|
|||||||
:returns: Ethereum address of newly created account
|
:returns: Ethereum address of newly created account
|
||||||
:rtype: str, 0x-hex
|
:rtype: str, 0x-hex
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
a = None
|
a = None
|
||||||
try:
|
conn = RPCConnection.connect(chain_spec, 'signer')
|
||||||
a = c.w3.eth.personal.new_account(password)
|
o = new_account()
|
||||||
except FileNotFoundError:
|
a = conn.do(o)
|
||||||
pass
|
conn.disconnect()
|
||||||
|
|
||||||
if a == None:
|
if a == None:
|
||||||
raise SignerError('create account')
|
raise SignerError('create account')
|
||||||
logg.debug('created account {}'.format(a))
|
logg.debug('created account {}'.format(a))
|
||||||
|
|
||||||
# Initialize nonce provider record for account
|
# Initialize nonce provider record for account
|
||||||
session = SessionBase.create_session()
|
session = self.create_session()
|
||||||
Nonce.init(a, session=session)
|
Nonce.init(a, session=session)
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
@ -178,7 +86,7 @@ def create(password, chain_str):
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, throws=(RoleMissingError,), base=CriticalSQLAlchemyAndSignerTask)
|
@celery_app.task(bind=True, throws=(RoleMissingError,), base=CriticalSQLAlchemyAndSignerTask)
|
||||||
def register(self, account_address, chain_str, writer_address=None):
|
def register(self, account_address, chain_spec_dict, writer_address=None):
|
||||||
"""Creates a transaction to add the given address to the accounts index.
|
"""Creates a transaction to add the given address to the accounts index.
|
||||||
|
|
||||||
:param account_address: Ethereum address to add
|
:param account_address: Ethereum address to add
|
||||||
@ -191,35 +99,52 @@ def register(self, account_address, chain_str, writer_address=None):
|
|||||||
:returns: The account_address input param
|
:returns: The account_address input param
|
||||||
:rtype: str, 0x-hex
|
:rtype: str, 0x-hex
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = self.create_session()
|
||||||
if writer_address == None:
|
if writer_address == None:
|
||||||
writer_address = AccountRole.get_address('ACCOUNTS_INDEX_WRITER', session=session)
|
writer_address = AccountRole.get_address('ACCOUNT_REGISTRY_WRITER', session=session)
|
||||||
|
|
||||||
if writer_address == zero_address:
|
if writer_address == ZERO_ADDRESS:
|
||||||
session.close()
|
session.close()
|
||||||
raise RoleMissingError(account_address)
|
raise RoleMissingError('writer address for regsistering {}'.format(account_address))
|
||||||
|
|
||||||
logg.debug('adding account address {} to index; writer {}'.format(account_address, writer_address))
|
logg.debug('adding account address {} to index; writer {}'.format(account_address, writer_address))
|
||||||
queue = self.request.delivery_info['routing_key']
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
c = RpcClient(chain_spec, holder_address=writer_address)
|
# Retrieve account index address
|
||||||
txf = AccountTxFactory(writer_address, c)
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
call_address = AccountRole.get_address('DEFAULT', session=session)
|
||||||
|
if writer_address == ZERO_ADDRESS:
|
||||||
|
session.close()
|
||||||
|
raise RoleMissingError('call address for resgistering {}'.format(account_address))
|
||||||
|
account_registry_address = registry.by_name('AccountRegistry', sender_address=call_address)
|
||||||
|
|
||||||
tx_add = txf.add(account_address, chain_spec, self.request.root_id, session=session)
|
# Generate and sign transaction
|
||||||
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
|
nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce)
|
||||||
|
gas_oracle = self.create_gas_oracle(rpc, AccountRegistry.gas)
|
||||||
|
account_registry = AccountRegistry(signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=chain_spec.chain_id())
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
rpc_signer.disconnect()
|
||||||
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_add, chain_str, queue, 'cic_eth.eth.account.cache_account_data', session=session)
|
# add transaction to queue
|
||||||
|
cache_task = 'cic_eth.eth.account.cache_account_data'
|
||||||
|
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
|
||||||
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
gas_budget = tx_add['gas'] * tx_add['gasPrice']
|
gas_pair = gas_oracle.get_gas(tx_signed_raw_hex)
|
||||||
|
gas_budget = gas_pair[0] * gas_pair[1]
|
||||||
|
logg.debug('register user tx {} {} {}'.format(tx_hash_hex, queue, gas_budget))
|
||||||
|
rpc.disconnect()
|
||||||
|
|
||||||
logg.debug('register user tx {}'.format(tx_hash_hex))
|
s = create_check_gas_task(
|
||||||
s = create_check_gas_and_send_task(
|
|
||||||
[tx_signed_raw_hex],
|
[tx_signed_raw_hex],
|
||||||
chain_str,
|
chain_spec,
|
||||||
writer_address,
|
writer_address,
|
||||||
gas_budget,
|
gas=gas_budget,
|
||||||
tx_hashes_hex=[tx_hash_hex],
|
tx_hashes_hex=[tx_hash_hex],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
@ -228,7 +153,7 @@ def register(self, account_address, chain_str, writer_address=None):
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
||||||
def gift(self, account_address, chain_str):
|
def gift(self, account_address, chain_spec_dict):
|
||||||
"""Creates a transaction to invoke the faucet contract for the given address.
|
"""Creates a transaction to invoke the faucet contract for the given address.
|
||||||
|
|
||||||
:param account_address: Ethereum address to give to
|
:param account_address: Ethereum address to give to
|
||||||
@ -238,36 +163,51 @@ def gift(self, account_address, chain_str):
|
|||||||
:returns: Raw signed transaction
|
:returns: Raw signed transaction
|
||||||
:rtype: list with transaction as only element
|
:rtype: list with transaction as only element
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
|
||||||
logg.debug('gift account address {} to index'.format(account_address))
|
logg.debug('gift account address {} to index'.format(account_address))
|
||||||
queue = self.request.delivery_info['routing_key']
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
c = RpcClient(chain_spec, holder_address=account_address)
|
# Retrieve account index address
|
||||||
txf = AccountTxFactory(account_address, c)
|
session = self.create_session()
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
faucet_address = registry.by_name('Faucet', sender_address=self.call_address)
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
# Generate and sign transaction
|
||||||
tx_add = txf.gift(account_address, chain_spec, self.request.root_id, session=session)
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_add, chain_str, queue, 'cic_eth.eth.account.cache_gift_data', session=session)
|
nonce_oracle = CustodialTaskNonceOracle(account_address, self.request.root_id, session=session) #, default_nonce)
|
||||||
|
gas_oracle = self.create_gas_oracle(rpc, Faucet.gas)
|
||||||
|
faucet = Faucet(signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=chain_spec.chain_id())
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = faucet.give_to(faucet_address, account_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
rpc_signer.disconnect()
|
||||||
|
|
||||||
|
# add transaction to queue
|
||||||
|
cache_task = 'cic_eth.eth.account.cache_gift_data'
|
||||||
|
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task, session=session)
|
||||||
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
gas_budget = tx_add['gas'] * tx_add['gasPrice']
|
gas_pair = gas_oracle.get_gas(tx_signed_raw_hex)
|
||||||
|
gas_budget = gas_pair[0] * gas_pair[1]
|
||||||
|
logg.debug('register user tx {} {} {}'.format(tx_hash_hex, queue, gas_budget))
|
||||||
|
rpc.disconnect()
|
||||||
|
|
||||||
logg.debug('gift user tx {}'.format(tx_hash_hex))
|
s = create_check_gas_task(
|
||||||
s = create_check_gas_and_send_task(
|
|
||||||
[tx_signed_raw_hex],
|
[tx_signed_raw_hex],
|
||||||
chain_str,
|
chain_spec,
|
||||||
account_address,
|
account_address,
|
||||||
gas_budget,
|
gas_budget,
|
||||||
[tx_hash_hex],
|
[tx_hash_hex],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
s.apply_async()
|
s.apply_async()
|
||||||
|
|
||||||
return [tx_signed_raw_hex]
|
return [tx_signed_raw_hex]
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True)
|
@celery_app.task(bind=True)
|
||||||
def have(self, account, chain_str):
|
def have(self, account, chain_spec_dict):
|
||||||
"""Check whether the given account exists in keystore
|
"""Check whether the given account exists in keystore
|
||||||
|
|
||||||
:param account: Account to check
|
:param account: Account to check
|
||||||
@ -277,17 +217,38 @@ def have(self, account, chain_str):
|
|||||||
:returns: Account, or None if not exists
|
:returns: Account, or None if not exists
|
||||||
:rtype: Varies
|
:rtype: Varies
|
||||||
"""
|
"""
|
||||||
c = RpcClient(account)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
o = sign_message(account, '0x2a')
|
||||||
try:
|
try:
|
||||||
c.w3.eth.sign(account, text='2a')
|
conn = RPCConnection.connect(chain_spec, 'signer')
|
||||||
return account
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logg.debug('cannot sign with {}: {}'.format(account, e))
|
logg.debug('cannot sign with {}: {}'.format(account, e))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
conn.do(o)
|
||||||
|
conn.disconnect()
|
||||||
|
return account
|
||||||
|
except Exception as e:
|
||||||
|
logg.debug('cannot sign with {}: {}'.format(account, e))
|
||||||
|
conn.disconnect()
|
||||||
|
return None
|
||||||
|
|
||||||
@celery_app.task(bind=True)
|
|
||||||
def role(self, account, chain_str):
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||||
|
def set_role(self, tag, address, chain_spec_dict):
|
||||||
|
if not to_checksum_address(address):
|
||||||
|
raise ValueError('invalid checksum address {}'.format(address))
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
role = AccountRole.set(tag, address, session=session)
|
||||||
|
session.add(role)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
return tag
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def role(self, address, chain_spec_dict):
|
||||||
"""Return account role for address
|
"""Return account role for address
|
||||||
|
|
||||||
:param account: Account to check
|
:param account: Account to check
|
||||||
@ -297,14 +258,18 @@ def role(self, account, chain_str):
|
|||||||
:returns: Account, or None if not exists
|
:returns: Account, or None if not exists
|
||||||
:rtype: Varies
|
:rtype: Varies
|
||||||
"""
|
"""
|
||||||
return AccountRole.role_for(account)
|
session = self.create_session()
|
||||||
|
role_tag = AccountRole.role_for(address, session=session)
|
||||||
|
session.close()
|
||||||
|
return role_tag
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task()
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||||
def cache_gift_data(
|
def cache_gift_data(
|
||||||
|
self,
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
tx_signed_raw_hex,
|
tx_signed_raw_hex,
|
||||||
chain_str,
|
chain_spec_dict,
|
||||||
):
|
):
|
||||||
"""Generates and commits transaction cache metadata for a Faucet.giveTo transaction
|
"""Generates and commits transaction cache metadata for a Faucet.giveTo transaction
|
||||||
|
|
||||||
@ -317,21 +282,20 @@ def cache_gift_data(
|
|||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
:rtype: tuple
|
:rtype: tuple
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
|
|
||||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||||
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
tx = unpack(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
tx_data = unpack_gift(tx['data'])
|
tx_data = Faucet.parse_give_to_request(tx['data'])
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = self.create_session()
|
||||||
|
|
||||||
tx_cache = TxCache(
|
tx_cache = TxCache(
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
tx['from'],
|
tx['from'],
|
||||||
tx['to'],
|
tx['to'],
|
||||||
zero_address,
|
ZERO_ADDRESS,
|
||||||
zero_address,
|
ZERO_ADDRESS,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
session=session,
|
session=session,
|
||||||
@ -344,11 +308,12 @@ def cache_gift_data(
|
|||||||
return (tx_hash_hex, cache_id)
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||||
def cache_account_data(
|
def cache_account_data(
|
||||||
|
self,
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
tx_signed_raw_hex,
|
tx_signed_raw_hex,
|
||||||
chain_str,
|
chain_spec_dict,
|
||||||
):
|
):
|
||||||
"""Generates and commits transaction cache metadata for an AccountsIndex.add transaction
|
"""Generates and commits transaction cache metadata for an AccountsIndex.add transaction
|
||||||
|
|
||||||
@ -361,21 +326,18 @@ def cache_account_data(
|
|||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
:rtype: tuple
|
:rtype: tuple
|
||||||
"""
|
"""
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
|
|
||||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
tx = unpack(tx_signed_raw_bytes, chain_id=chain_spec.chain_id())
|
||||||
tx_data = unpack_register(tx['data'])
|
tx_data = AccountRegistry.parse_add_request(tx['data'])
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
tx_cache = TxCache(
|
tx_cache = TxCache(
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
tx['from'],
|
tx['from'],
|
||||||
tx['to'],
|
tx['to'],
|
||||||
zero_address,
|
ZERO_ADDRESS,
|
||||||
zero_address,
|
ZERO_ADDRESS,
|
||||||
0,
|
0,
|
||||||
0,
|
0,
|
||||||
session=session,
|
session=session,
|
||||||
|
305
apps/cic-eth/cic_eth/eth/erc20.py
Normal file
305
apps/cic-eth/cic_eth/eth/erc20.py
Normal file
@ -0,0 +1,305 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.erc20 import ERC20
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
TxFormat,
|
||||||
|
unpack,
|
||||||
|
)
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
from cic_eth_registry.erc20 import ERC20Token
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth.error import TokenCountError, PermanentTxError, OutOfGasError, NotLocalTxError
|
||||||
|
from cic_eth.queue.tx import register_tx
|
||||||
|
from cic_eth.eth.gas import (
|
||||||
|
create_check_gas_task,
|
||||||
|
MaxGasOracle,
|
||||||
|
)
|
||||||
|
#from cic_eth.eth.factory import TxFactory
|
||||||
|
from cic_eth.ext.address import translate_address
|
||||||
|
from cic_eth.task import (
|
||||||
|
CriticalSQLAlchemyTask,
|
||||||
|
CriticalWeb3Task,
|
||||||
|
CriticalSQLAlchemyAndSignerTask,
|
||||||
|
)
|
||||||
|
from cic_eth.eth.nonce import CustodialTaskNonceOracle
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(base=CriticalWeb3Task)
|
||||||
|
def balance(tokens, holder_address, chain_spec_dict):
|
||||||
|
"""Return token balances for a list of tokens for given address
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param chain_spec_dict: Chain spec string representation
|
||||||
|
:type chain_spec_dict: str
|
||||||
|
:return: List of balances
|
||||||
|
:rtype: list of int
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
caller_address = ERC20Token.caller_address
|
||||||
|
|
||||||
|
for t in tokens:
|
||||||
|
address = t['address']
|
||||||
|
token = ERC20Token(rpc, address)
|
||||||
|
c = ERC20()
|
||||||
|
o = c.balance_of(address, holder_address, sender_address=caller_address)
|
||||||
|
r = rpc.do(o)
|
||||||
|
t['balance_network'] = c.parse_balance(r)
|
||||||
|
rpc.disconnect()
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
||||||
|
def transfer(self, tokens, holder_address, receiver_address, value, chain_spec_dict):
|
||||||
|
"""Transfer ERC20 tokens between addresses
|
||||||
|
|
||||||
|
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
||||||
|
|
||||||
|
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param receiver_address: Token receiver address
|
||||||
|
:type receiver_address: str, 0x-hex
|
||||||
|
:param value: Amount of token, in 'wei'
|
||||||
|
:type value: int
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:raises TokenCountError: More than one token is passed in tokens list
|
||||||
|
:return: Transaction hash for tranfer operation
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
# we only allow one token, one transfer
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
t = tokens[0]
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
|
|
||||||
|
session = self.create_session()
|
||||||
|
nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session)
|
||||||
|
gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas)
|
||||||
|
c = ERC20(signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_spec.chain_id())
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.transfer(t['address'], holder_address, receiver_address, value, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
|
||||||
|
rpc_signer.disconnect()
|
||||||
|
rpc.disconnect()
|
||||||
|
|
||||||
|
cache_task = 'cic_eth.eth.erc20.cache_transfer_data'
|
||||||
|
|
||||||
|
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
gas_pair = gas_oracle.get_gas(tx_signed_raw_hex)
|
||||||
|
gas_budget = gas_pair[0] * gas_pair[1]
|
||||||
|
logg.debug('transfer tx {} {} {}'.format(tx_hash_hex, queue, gas_budget))
|
||||||
|
|
||||||
|
s = create_check_gas_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_spec,
|
||||||
|
holder_address,
|
||||||
|
gas_budget,
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
||||||
|
def approve(self, tokens, holder_address, spender_address, value, chain_spec_dict):
|
||||||
|
"""Approve ERC20 transfer on behalf of holder address
|
||||||
|
|
||||||
|
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
||||||
|
|
||||||
|
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param receiver_address: Token receiver address
|
||||||
|
:type receiver_address: str, 0x-hex
|
||||||
|
:param value: Amount of token, in 'wei'
|
||||||
|
:type value: int
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:raises TokenCountError: More than one token is passed in tokens list
|
||||||
|
:return: Transaction hash for tranfer operation
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
# we only allow one token, one transfer
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
t = tokens[0]
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
|
|
||||||
|
session = self.create_session()
|
||||||
|
nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session)
|
||||||
|
gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas)
|
||||||
|
c = ERC20(signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_spec.chain_id())
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.approve(t['address'], holder_address, spender_address, value, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
|
||||||
|
rpc_signer.disconnect()
|
||||||
|
rpc.disconnect()
|
||||||
|
|
||||||
|
cache_task = 'cic_eth.eth.erc20.cache_approve_data'
|
||||||
|
|
||||||
|
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
gas_pair = gas_oracle.get_gas(tx_signed_raw_hex)
|
||||||
|
gas_budget = gas_pair[0] * gas_pair[1]
|
||||||
|
|
||||||
|
s = create_check_gas_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_spec,
|
||||||
|
holder_address,
|
||||||
|
gas_budget,
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=CriticalWeb3Task)
|
||||||
|
def resolve_tokens_by_symbol(self, token_symbols, chain_spec_dict):
|
||||||
|
"""Returns contract addresses of an array of ERC20 token symbols
|
||||||
|
|
||||||
|
:param token_symbols: Token symbols to resolve
|
||||||
|
:type token_symbols: list of str
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
|
||||||
|
:return: Respective token contract addresses
|
||||||
|
:rtype: list of str, 0x-hex
|
||||||
|
"""
|
||||||
|
tokens = []
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
session = self.create_session()
|
||||||
|
sender_address = AccountRole.get_address('DEFAULT', session)
|
||||||
|
session.close()
|
||||||
|
for token_symbol in token_symbols:
|
||||||
|
token_address = registry.by_name(token_symbol, sender_address=sender_address)
|
||||||
|
logg.debug('token {}'.format(token_address))
|
||||||
|
tokens.append({
|
||||||
|
'address': token_address,
|
||||||
|
'converters': [],
|
||||||
|
})
|
||||||
|
rpc.disconnect()
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
|
def cache_transfer_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_spec_dict,
|
||||||
|
):
|
||||||
|
"""Helper function for otx_cache_transfer
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx: Signed raw transaction
|
||||||
|
:type tx: str, 0x-hex
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||||
|
tx = unpack(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
|
||||||
|
tx_data = ERC20.parse_transfer_request(tx['data'])
|
||||||
|
recipient_address = tx_data[0]
|
||||||
|
token_value = tx_data[1]
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
recipient_address,
|
||||||
|
tx['to'],
|
||||||
|
tx['to'],
|
||||||
|
token_value,
|
||||||
|
token_value,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
|
def cache_approve_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_spec_dict,
|
||||||
|
):
|
||||||
|
"""Helper function for otx_cache_approve
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx: Signed raw transaction
|
||||||
|
:type tx: str, 0x-hex
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||||
|
tx = unpack(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
|
||||||
|
tx_data = ERC20.parse_approve_request(tx['data'])
|
||||||
|
recipient_address = tx_data[0]
|
||||||
|
token_value = tx_data[1]
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
recipient_address,
|
||||||
|
tx['to'],
|
||||||
|
tx['to'],
|
||||||
|
token_value,
|
||||||
|
token_value,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
@ -1,41 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from cic_eth.eth.nonce import NonceOracle
|
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TxFactory:
|
|
||||||
"""Base class for transaction factory classes.
|
|
||||||
|
|
||||||
:param from_address: Signer address to create transaction on behalf of
|
|
||||||
:type from_address: str, 0x-hex
|
|
||||||
:param rpc_client: RPC connection object to use to acquire account nonce if no record in nonce cache
|
|
||||||
:type rpc_client: cic_eth.eth.rpc.RpcClient
|
|
||||||
"""
|
|
||||||
|
|
||||||
gas_price = 100
|
|
||||||
"""Gas price, updated between batches"""
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, from_address, rpc_client):
|
|
||||||
self.address = from_address
|
|
||||||
|
|
||||||
self.default_nonce = rpc_client.w3.eth.getTransactionCount(from_address, 'pending')
|
|
||||||
self.nonce_oracle = NonceOracle(from_address, self.default_nonce)
|
|
||||||
|
|
||||||
TxFactory.gas_price = rpc_client.gas_price()
|
|
||||||
logg.debug('txfactory instance address {} gas price'.format(self.address, self.gas_price))
|
|
||||||
|
|
||||||
|
|
||||||
def next_nonce(self, uuid, session=None):
|
|
||||||
"""Returns the current reserved nonce value, and increments it for next transaction.
|
|
||||||
|
|
||||||
:returns: Nonce
|
|
||||||
:rtype: number
|
|
||||||
"""
|
|
||||||
return self.nonce_oracle.next_by_task_uuid(uuid, session=session)
|
|
@ -1,75 +1,138 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from chainlib.eth.gas import price
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.role import AccountRole
|
from cic_eth.db.models.role import AccountRole
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
#
|
||||||
|
#class GasOracle():
|
||||||
|
# """Provides gas pricing for transactions.
|
||||||
|
#
|
||||||
|
# :param w3: Web3 object
|
||||||
|
# :type w3: web3.Web3
|
||||||
|
# """
|
||||||
|
#
|
||||||
|
# __safe_threshold_amount_value = 2000000000 * 60000 * 3
|
||||||
|
# __refill_amount_value = __safe_threshold_amount_value * 5
|
||||||
|
# default_gas_limit = 21000
|
||||||
|
#
|
||||||
|
# def __init__(self, conn):
|
||||||
|
# o = price()
|
||||||
|
# r = conn.do(o)
|
||||||
|
# b = bytes.from_hex(strip_0x(r))
|
||||||
|
# self.gas_price_current = int.from_bytes(b, 'big')
|
||||||
|
#
|
||||||
|
# #self.w3 = w3
|
||||||
|
# #self.gas_price_current = w3.eth.gas_price()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def safe_threshold_amount(self):
|
||||||
|
# """The gas balance threshold under which a new gas refill transaction should be initiated.
|
||||||
|
#
|
||||||
|
# :returns: Gas token amount
|
||||||
|
# :rtype: number
|
||||||
|
# """
|
||||||
|
# g = GasOracle.__safe_threshold_amount_value
|
||||||
|
# logg.warning('gas safe threshold is currently hardcoded to {}'.format(g))
|
||||||
|
# return g
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def refill_amount(self):
|
||||||
|
# """The amount of gas tokens to send in a gas refill transaction.
|
||||||
|
#
|
||||||
|
# :returns: Gas token amount
|
||||||
|
# :rtype: number
|
||||||
|
# """
|
||||||
|
# g = GasOracle.__refill_amount_value
|
||||||
|
# logg.warning('gas refill amount is currently hardcoded to {}'.format(g))
|
||||||
|
# return g
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def gas_provider(self):
|
||||||
|
# """Gas provider address.
|
||||||
|
#
|
||||||
|
# :returns: Etheerum account address
|
||||||
|
# :rtype: str, 0x-hex
|
||||||
|
# """
|
||||||
|
# session = SessionBase.create_session()
|
||||||
|
# a = AccountRole.get_address('GAS_GIFTER', session)
|
||||||
|
# logg.debug('gasgifter {}'.format(a))
|
||||||
|
# session.close()
|
||||||
|
# return a
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def gas_price(self, category='safe'):
|
||||||
|
# """Get projected gas price to use for a transaction at the current moment.
|
||||||
|
#
|
||||||
|
# When the category parameter is implemented, it can be used to control the priority of a transaction in the network.
|
||||||
|
#
|
||||||
|
# :param category: Bid level category to return price for. Currently has no effect.
|
||||||
|
# :type category: str
|
||||||
|
# :returns: Gas price
|
||||||
|
# :rtype: number
|
||||||
|
# """
|
||||||
|
# #logg.warning('gas price hardcoded to category "safe"')
|
||||||
|
# #g = 100
|
||||||
|
# #return g
|
||||||
|
# return self.gas_price_current
|
||||||
|
|
||||||
class GasOracle():
|
|
||||||
"""Provides gas pricing for transactions.
|
|
||||||
|
|
||||||
:param w3: Web3 object
|
class MaxGasOracle:
|
||||||
:type w3: web3.Web3
|
|
||||||
|
def gas(code=None):
|
||||||
|
return 8000000
|
||||||
|
|
||||||
|
|
||||||
|
def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
|
||||||
|
"""Creates a celery task signature for a check_gas task that adds the task to the outgoing queue to be processed by the dispatcher.
|
||||||
|
|
||||||
|
If tx_hashes_hex is not spefified, a preceding task chained to check_gas must supply the transaction hashes as its return value.
|
||||||
|
|
||||||
|
:param tx_signed_raws_hex: Raw signed transaction data
|
||||||
|
:type tx_signed_raws_hex: list of str, 0x-hex
|
||||||
|
:param chain_spec: Chain spec of address to add check gas for
|
||||||
|
:type chain_spec: chainlib.chain.ChainSpec
|
||||||
|
:param holder_address: Address sending the transactions
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param gas: Gas budget hint for transactions
|
||||||
|
:type gas: int
|
||||||
|
:param tx_hashes_hex: Transaction hashes
|
||||||
|
:type tx_hashes_hex: list of str, 0x-hex
|
||||||
|
:param queue: Task queue
|
||||||
|
:type queue: str
|
||||||
|
:returns: Signature of task chain
|
||||||
|
:rtype: celery.Signature
|
||||||
"""
|
"""
|
||||||
|
s_check_gas = None
|
||||||
__safe_threshold_amount_value = 2000000000 * 60000 * 3
|
if tx_hashes_hex != None:
|
||||||
__refill_amount_value = __safe_threshold_amount_value * 5
|
s_check_gas = celery.signature(
|
||||||
default_gas_limit = 21000
|
'cic_eth.eth.tx.check_gas',
|
||||||
|
[
|
||||||
def __init__(self, w3):
|
tx_hashes_hex,
|
||||||
self.w3 = w3
|
chain_spec.asdict(),
|
||||||
self.gas_price_current = w3.eth.gas_price()
|
tx_signed_raws_hex,
|
||||||
|
holder_address,
|
||||||
|
gas,
|
||||||
def safe_threshold_amount(self):
|
],
|
||||||
"""The gas balance threshold under which a new gas refill transaction should be initiated.
|
queue=queue,
|
||||||
|
)
|
||||||
:returns: Gas token amount
|
else:
|
||||||
:rtype: number
|
s_check_gas = celery.signature(
|
||||||
"""
|
'cic_eth.eth.tx.check_gas',
|
||||||
g = GasOracle.__safe_threshold_amount_value
|
[
|
||||||
logg.warning('gas safe threshold is currently hardcoded to {}'.format(g))
|
chain_spec.asdict(),
|
||||||
return g
|
tx_signed_raws_hex,
|
||||||
|
holder_address,
|
||||||
|
gas,
|
||||||
def refill_amount(self):
|
],
|
||||||
"""The amount of gas tokens to send in a gas refill transaction.
|
queue=queue,
|
||||||
|
)
|
||||||
:returns: Gas token amount
|
return s_check_gas
|
||||||
:rtype: number
|
|
||||||
"""
|
|
||||||
g = GasOracle.__refill_amount_value
|
|
||||||
logg.warning('gas refill amount is currently hardcoded to {}'.format(g))
|
|
||||||
return g
|
|
||||||
|
|
||||||
|
|
||||||
def gas_provider(self):
|
|
||||||
"""Gas provider address.
|
|
||||||
|
|
||||||
:returns: Etheerum account address
|
|
||||||
:rtype: str, 0x-hex
|
|
||||||
"""
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
a = AccountRole.get_address('GAS_GIFTER', session)
|
|
||||||
logg.debug('gasgifter {}'.format(a))
|
|
||||||
session.close()
|
|
||||||
return a
|
|
||||||
|
|
||||||
|
|
||||||
def gas_price(self, category='safe'):
|
|
||||||
"""Get projected gas price to use for a transaction at the current moment.
|
|
||||||
|
|
||||||
When the category parameter is implemented, it can be used to control the priority of a transaction in the network.
|
|
||||||
|
|
||||||
:param category: Bid level category to return price for. Currently has no effect.
|
|
||||||
:type category: str
|
|
||||||
:returns: Gas price
|
|
||||||
:rtype: number
|
|
||||||
"""
|
|
||||||
#logg.warning('gas price hardcoded to category "safe"')
|
|
||||||
#g = 100
|
|
||||||
#return g
|
|
||||||
return self.gas_price_current
|
|
||||||
|
71
apps/cic-eth/cic_eth/eth/meta.py
Normal file
71
apps/cic-eth/cic_eth/eth/meta.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# extended imports
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.status import Status as TxStatus
|
||||||
|
|
||||||
|
|
||||||
|
class ExtendedTx:
|
||||||
|
|
||||||
|
_default_decimals = 6
|
||||||
|
|
||||||
|
def __init__(self, rpc, tx_hash, chain_spec):
|
||||||
|
self.rpc = rpc
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.hash = tx_hash
|
||||||
|
self.sender = None
|
||||||
|
self.sender_label = None
|
||||||
|
self.recipient = None
|
||||||
|
self.recipient_label = None
|
||||||
|
self.source_token_value = 0
|
||||||
|
self.destination_token_value = 0
|
||||||
|
self.source_token = ZERO_ADDRESS
|
||||||
|
self.destination_token = ZERO_ADDRESS
|
||||||
|
self.source_token_symbol = ''
|
||||||
|
self.destination_token_symbol = ''
|
||||||
|
self.source_token_decimals = ExtendedTx._default_decimals
|
||||||
|
self.destination_token_decimals = ExtendedTx._default_decimals
|
||||||
|
self.status = TxStatus.PENDING.name
|
||||||
|
self.status_code = TxStatus.PENDING.value
|
||||||
|
|
||||||
|
|
||||||
|
def set_actors(self, sender, recipient, trusted_declarator_addresses=None):
|
||||||
|
self.sender = sender
|
||||||
|
self.recipient = recipient
|
||||||
|
if trusted_declarator_addresses != None:
|
||||||
|
self.sender_label = translate_address(sender, trusted_declarator_addresses, self.chain_spec)
|
||||||
|
self.recipient_label = translate_address(recipient, trusted_declarator_addresses, self.chain_spec)
|
||||||
|
|
||||||
|
|
||||||
|
def set_tokens(self, source, source_value, destination=None, destination_value=None):
|
||||||
|
if destination == None:
|
||||||
|
destination = source
|
||||||
|
if destination_value == None:
|
||||||
|
destination_value = source_value
|
||||||
|
st = ERC20Token(self.rpc, source)
|
||||||
|
dt = ERC20Token(self.rpc, destination)
|
||||||
|
self.source_token = source
|
||||||
|
self.source_token_symbol = st.symbol
|
||||||
|
self.source_token_name = st.name
|
||||||
|
self.source_token_decimals = st.decimals
|
||||||
|
self.source_token_value = source_value
|
||||||
|
self.destination_token = destination
|
||||||
|
self.destination_token_symbol = dt.symbol
|
||||||
|
self.destination_token_name = dt.name
|
||||||
|
self.destination_token_decimals = dt.decimals
|
||||||
|
self.destination_token_value = destination_value
|
||||||
|
|
||||||
|
|
||||||
|
def set_status(self, n):
|
||||||
|
if n:
|
||||||
|
self.status = TxStatus.ERROR.name
|
||||||
|
else:
|
||||||
|
self.status = TxStatus.SUCCESS.name
|
||||||
|
self.status_code = n
|
||||||
|
|
||||||
|
|
||||||
|
def to_dict(self):
|
||||||
|
o = {}
|
||||||
|
for attr in dir(self):
|
||||||
|
if attr[0] == '_' or attr in ['set_actors', 'set_tokens', 'set_status', 'to_dict']:
|
||||||
|
continue
|
||||||
|
o[attr] = getattr(self, attr)
|
||||||
|
return o
|
@ -4,7 +4,7 @@ from cic_eth.db.models.nonce import (
|
|||||||
NonceReservation,
|
NonceReservation,
|
||||||
)
|
)
|
||||||
|
|
||||||
class NonceOracle():
|
class CustodialTaskNonceOracle():
|
||||||
"""Ensures atomic nonce increments for all transactions across all tasks and threads.
|
"""Ensures atomic nonce increments for all transactions across all tasks and threads.
|
||||||
|
|
||||||
:param address: Address to generate nonces for
|
:param address: Address to generate nonces for
|
||||||
@ -12,20 +12,21 @@ class NonceOracle():
|
|||||||
:param default_nonce: Initial nonce value to use if no nonce cache entry already exists
|
:param default_nonce: Initial nonce value to use if no nonce cache entry already exists
|
||||||
:type default_nonce: number
|
:type default_nonce: number
|
||||||
"""
|
"""
|
||||||
def __init__(self, address, default_nonce):
|
def __init__(self, address, uuid, session=None):
|
||||||
self.address = address
|
self.address = address
|
||||||
self.default_nonce = default_nonce
|
self.uuid = uuid
|
||||||
|
self.session = session
|
||||||
|
|
||||||
|
|
||||||
def next(self):
|
def get_nonce(self):
|
||||||
|
return self.next_nonce()
|
||||||
|
|
||||||
|
|
||||||
|
def next_nonce(self):
|
||||||
"""Get next unique nonce.
|
"""Get next unique nonce.
|
||||||
|
|
||||||
:returns: Nonce
|
:returns: Nonce
|
||||||
:rtype: number
|
:rtype: number
|
||||||
"""
|
"""
|
||||||
raise AttributeError('this should not be called')
|
r = NonceReservation.release(self.address, self.uuid, session=self.session)
|
||||||
return Nonce.next(self.address, self.default_nonce)
|
return r[1]
|
||||||
|
|
||||||
|
|
||||||
def next_by_task_uuid(self, uuid, session=None):
|
|
||||||
return NonceReservation.release(uuid, session=session)
|
|
||||||
|
@ -1,39 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.eth.gas import GasOracle
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
class RpcClient(GasOracle):
|
|
||||||
"""RPC wrapper for web3 enabling gas calculation helpers and signer middleware.
|
|
||||||
|
|
||||||
:param chain_spec: Chain spec
|
|
||||||
:type chain_spec: cic_registry.chain.ChainSpec
|
|
||||||
:param holder_address: DEPRECATED Address of subject of the session.
|
|
||||||
:type holder_address: str, 0x-hex
|
|
||||||
"""
|
|
||||||
|
|
||||||
signer_ipc_path = None
|
|
||||||
"""Unix socket path to JSONRPC signer and keystore"""
|
|
||||||
|
|
||||||
web3_constructor = None
|
|
||||||
"""Custom function to build a web3 object with middleware plugins"""
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, chain_spec, holder_address=None):
|
|
||||||
(self.provider, w3) = RpcClient.web3_constructor()
|
|
||||||
super(RpcClient, self).__init__(w3)
|
|
||||||
self.chain_spec = chain_spec
|
|
||||||
if holder_address != None:
|
|
||||||
self.holder_address = holder_address
|
|
||||||
logg.info('gasprice {}'.format(self.gas_price()))
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def set_constructor(web3_constructor):
|
|
||||||
"""Sets the constructor to use for building the web3 object.
|
|
||||||
"""
|
|
||||||
RpcClient.web3_constructor = web3_constructor
|
|
@ -1,132 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
from cic_eth.queue.tx import create as queue_create
|
|
||||||
from cic_eth.error import SignerError
|
|
||||||
|
|
||||||
celery_app = celery.current_app
|
|
||||||
logg = celery_app.log.get_default_logger()
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task()
|
|
||||||
def sign_tx(tx, chain_str):
|
|
||||||
"""Sign a single transaction against the given chain specification.
|
|
||||||
|
|
||||||
:param tx: Transaction in standard Ethereum format
|
|
||||||
:type tx: dict
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:returns: Transaction hash and raw signed transaction, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
tx_transfer_signed = None
|
|
||||||
try:
|
|
||||||
tx_transfer_signed = c.w3.eth.sign_transaction(tx)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
if tx_transfer_signed == None:
|
|
||||||
raise SignerError('sign tx')
|
|
||||||
logg.debug('tx_transfer_signed {}'.format(tx_transfer_signed))
|
|
||||||
tx_hash = c.w3.keccak(hexstr=tx_transfer_signed['raw'])
|
|
||||||
tx_hash_hex = tx_hash.hex()
|
|
||||||
return (tx_hash_hex, tx_transfer_signed['raw'],)
|
|
||||||
|
|
||||||
|
|
||||||
def sign_and_register_tx(tx, chain_str, queue, cache_task=None, session=None):
|
|
||||||
"""Signs the provided transaction, and adds it to the transaction queue cache (with status PENDING).
|
|
||||||
|
|
||||||
:param tx: Standard ethereum transaction data
|
|
||||||
:type tx: dict
|
|
||||||
:param chain_str: Chain spec, string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:param queue: Task queue
|
|
||||||
:type queue: str
|
|
||||||
:param cache_task: Cache task to call with signed transaction. If None, no task will be called.
|
|
||||||
:type cache_task: str
|
|
||||||
:raises: sqlalchemy.exc.DatabaseError
|
|
||||||
:returns: Tuple; Transaction hash, signed raw transaction data
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, chain_str)
|
|
||||||
|
|
||||||
logg.debug('adding queue tx {}'.format(tx_hash_hex))
|
|
||||||
|
|
||||||
queue_create(
|
|
||||||
tx['nonce'],
|
|
||||||
tx['from'],
|
|
||||||
tx_hash_hex,
|
|
||||||
tx_signed_raw_hex,
|
|
||||||
chain_str,
|
|
||||||
session=session,
|
|
||||||
)
|
|
||||||
|
|
||||||
if cache_task != None:
|
|
||||||
logg.debug('adding cache task {} tx {}'.format(cache_task, tx_hash_hex))
|
|
||||||
s_cache = celery.signature(
|
|
||||||
cache_task,
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
tx_signed_raw_hex,
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
queue=queue,
|
|
||||||
)
|
|
||||||
s_cache.apply_async()
|
|
||||||
|
|
||||||
return (tx_hash_hex, tx_signed_raw_hex,)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: rename as we will not be sending task in the chain, this is the responsibility of the dispatcher
|
|
||||||
def create_check_gas_and_send_task(tx_signed_raws_hex, chain_str, holder_address, gas, tx_hashes_hex=None, queue=None):
|
|
||||||
"""Creates a celery task signature for a check_gas task that adds the task to the outgoing queue to be processed by the dispatcher.
|
|
||||||
|
|
||||||
If tx_hashes_hex is not spefified, a preceding task chained to check_gas must supply the transaction hashes as its return value.
|
|
||||||
|
|
||||||
:param tx_signed_raws_hex: Raw signed transaction data
|
|
||||||
:type tx_signed_raws_hex: list of str, 0x-hex
|
|
||||||
:param chain_str: Chain spec, string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:param holder_address: Address sending the transactions
|
|
||||||
:type holder_address: str, 0x-hex
|
|
||||||
:param gas: Gas budget hint for transactions
|
|
||||||
:type gas: int
|
|
||||||
:param tx_hashes_hex: Transaction hashes
|
|
||||||
:type tx_hashes_hex: list of str, 0x-hex
|
|
||||||
:param queue: Task queue
|
|
||||||
:type queue: str
|
|
||||||
:returns: Signature of task chain
|
|
||||||
:rtype: celery.Signature
|
|
||||||
"""
|
|
||||||
s_check_gas = None
|
|
||||||
if tx_hashes_hex != None:
|
|
||||||
s_check_gas = celery.signature(
|
|
||||||
'cic_eth.eth.tx.check_gas',
|
|
||||||
[
|
|
||||||
tx_hashes_hex,
|
|
||||||
chain_str,
|
|
||||||
tx_signed_raws_hex,
|
|
||||||
holder_address,
|
|
||||||
gas,
|
|
||||||
],
|
|
||||||
queue=queue,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
s_check_gas = celery.signature(
|
|
||||||
'cic_eth.eth.tx.check_gas',
|
|
||||||
[
|
|
||||||
chain_str,
|
|
||||||
tx_signed_raws_hex,
|
|
||||||
holder_address,
|
|
||||||
gas,
|
|
||||||
],
|
|
||||||
queue=queue,
|
|
||||||
)
|
|
||||||
return s_check_gas
|
|
@ -1,535 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
import requests
|
|
||||||
import web3
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from cic_registry import zero_address
|
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
from hexathon import strip_0x
|
|
||||||
from chainlib.status import Status as TxStatus
|
|
||||||
|
|
||||||
# platform imports
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
from cic_eth.db.models.base import SessionBase
|
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
from cic_eth.error import TokenCountError, PermanentTxError, OutOfGasError, NotLocalTxError
|
|
||||||
from cic_eth.eth.task import sign_and_register_tx
|
|
||||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
|
||||||
from cic_eth.eth.factory import TxFactory
|
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
from cic_eth.ext.address import translate_address
|
|
||||||
from cic_eth.task import (
|
|
||||||
CriticalSQLAlchemyTask,
|
|
||||||
CriticalWeb3Task,
|
|
||||||
CriticalSQLAlchemyAndSignerTask,
|
|
||||||
)
|
|
||||||
|
|
||||||
celery_app = celery.current_app
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
# TODO: fetch from cic-contracts instead when implemented
|
|
||||||
contract_function_signatures = {
|
|
||||||
'transfer': 'a9059cbb',
|
|
||||||
'approve': '095ea7b3',
|
|
||||||
'transferfrom': '23b872dd',
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class TokenTxFactory(TxFactory):
|
|
||||||
"""Factory for creating ERC20 token transactions.
|
|
||||||
"""
|
|
||||||
def approve(
|
|
||||||
self,
|
|
||||||
token_address,
|
|
||||||
spender_address,
|
|
||||||
amount,
|
|
||||||
chain_spec,
|
|
||||||
uuid,
|
|
||||||
session=None,
|
|
||||||
):
|
|
||||||
"""Create an ERC20 "approve" transaction
|
|
||||||
|
|
||||||
:param token_address: ERC20 contract address
|
|
||||||
:type token_address: str, 0x-hex
|
|
||||||
:param spender_address: Address to approve spending for
|
|
||||||
:type spender_address: str, 0x-hex
|
|
||||||
:param amount: Amount of tokens to approve
|
|
||||||
:type amount: int
|
|
||||||
:param chain_spec: Chain spec
|
|
||||||
:type chain_spec: cic_registry.chain.ChainSpec
|
|
||||||
:returns: Unsigned "approve" transaction in standard Ethereum format
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
source_token = CICRegistry.get_address(chain_spec, token_address)
|
|
||||||
source_token_contract = source_token.contract
|
|
||||||
tx_approve_buildable = source_token_contract.functions.approve(
|
|
||||||
spender_address,
|
|
||||||
amount,
|
|
||||||
)
|
|
||||||
source_token_gas = source_token.gas('transfer')
|
|
||||||
|
|
||||||
tx_approve = tx_approve_buildable.buildTransaction({
|
|
||||||
'from': self.address,
|
|
||||||
'gas': source_token_gas,
|
|
||||||
'gasPrice': self.gas_price,
|
|
||||||
'chainId': chain_spec.chain_id(),
|
|
||||||
'nonce': self.next_nonce(uuid, session=session),
|
|
||||||
})
|
|
||||||
return tx_approve
|
|
||||||
|
|
||||||
|
|
||||||
def transfer(
|
|
||||||
self,
|
|
||||||
token_address,
|
|
||||||
receiver_address,
|
|
||||||
value,
|
|
||||||
chain_spec,
|
|
||||||
uuid,
|
|
||||||
session=None,
|
|
||||||
):
|
|
||||||
"""Create an ERC20 "transfer" transaction
|
|
||||||
|
|
||||||
:param token_address: ERC20 contract address
|
|
||||||
:type token_address: str, 0x-hex
|
|
||||||
:param receiver_address: Address to send tokens to
|
|
||||||
:type receiver_address: str, 0x-hex
|
|
||||||
:param amount: Amount of tokens to send
|
|
||||||
:type amount: int
|
|
||||||
:param chain_spec: Chain spec
|
|
||||||
:type chain_spec: cic_registry.chain.ChainSpec
|
|
||||||
:returns: Unsigned "transfer" transaction in standard Ethereum format
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
source_token = CICRegistry.get_address(chain_spec, token_address)
|
|
||||||
source_token_contract = source_token.contract
|
|
||||||
transfer_buildable = source_token_contract.functions.transfer(
|
|
||||||
receiver_address,
|
|
||||||
value,
|
|
||||||
)
|
|
||||||
source_token_gas = source_token.gas('transfer')
|
|
||||||
|
|
||||||
tx_transfer = transfer_buildable.buildTransaction(
|
|
||||||
{
|
|
||||||
'from': self.address,
|
|
||||||
'gas': source_token_gas,
|
|
||||||
'gasPrice': self.gas_price,
|
|
||||||
'chainId': chain_spec.chain_id(),
|
|
||||||
'nonce': self.next_nonce(uuid, session=session),
|
|
||||||
})
|
|
||||||
return tx_transfer
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_transfer(data):
|
|
||||||
"""Verifies that a transaction is an "ERC20.transfer" transaction, and extracts call parameters from it.
|
|
||||||
|
|
||||||
:param data: Raw input data from Ethereum transaction.
|
|
||||||
:type data: str, 0x-hex
|
|
||||||
:raises ValueError: Function signature does not match AccountRegister.add
|
|
||||||
:returns: Parsed parameters
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
data = strip_0x(data)
|
|
||||||
f = data[:8]
|
|
||||||
if f != contract_function_signatures['transfer']:
|
|
||||||
raise ValueError('Invalid transfer data ({})'.format(f))
|
|
||||||
|
|
||||||
d = data[8:]
|
|
||||||
return {
|
|
||||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
|
||||||
'amount': int(d[64:], 16)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_transferfrom(data):
|
|
||||||
"""Verifies that a transaction is an "ERC20.transferFrom" transaction, and extracts call parameters from it.
|
|
||||||
|
|
||||||
:param data: Raw input data from Ethereum transaction.
|
|
||||||
:type data: str, 0x-hex
|
|
||||||
:raises ValueError: Function signature does not match AccountRegister.add
|
|
||||||
:returns: Parsed parameters
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
data = strip_0x(data)
|
|
||||||
f = data[:8]
|
|
||||||
if f != contract_function_signatures['transferfrom']:
|
|
||||||
raise ValueError('Invalid transferFrom data ({})'.format(f))
|
|
||||||
|
|
||||||
d = data[8:]
|
|
||||||
return {
|
|
||||||
'from': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
|
||||||
'to': web3.Web3.toChecksumAddress('0x' + d[128-40:128]),
|
|
||||||
'amount': int(d[128:], 16)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_approve(data):
|
|
||||||
"""Verifies that a transaction is an "ERC20.approve" transaction, and extracts call parameters from it.
|
|
||||||
|
|
||||||
:param data: Raw input data from Ethereum transaction.
|
|
||||||
:type data: str, 0x-hex
|
|
||||||
:raises ValueError: Function signature does not match AccountRegister.add
|
|
||||||
:returns: Parsed parameters
|
|
||||||
:rtype: dict
|
|
||||||
"""
|
|
||||||
data = strip_0x(data)
|
|
||||||
f = data[:8]
|
|
||||||
if f != contract_function_signatures['approve']:
|
|
||||||
raise ValueError('Invalid approval data ({})'.format(f))
|
|
||||||
|
|
||||||
d = data[8:]
|
|
||||||
return {
|
|
||||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
|
||||||
'amount': int(d[64:], 16)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalWeb3Task)
|
|
||||||
def balance(tokens, holder_address, chain_str):
|
|
||||||
"""Return token balances for a list of tokens for given address
|
|
||||||
|
|
||||||
:param tokens: Token addresses
|
|
||||||
:type tokens: list of str, 0x-hex
|
|
||||||
:param holder_address: Token holder address
|
|
||||||
:type holder_address: str, 0x-hex
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:return: List of balances
|
|
||||||
:rtype: list of int
|
|
||||||
"""
|
|
||||||
#abi = ContractRegistry.abi('ERC20Token')
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
for t in tokens:
|
|
||||||
#token = CICRegistry.get_address(t['address'])
|
|
||||||
#abi = token.abi()
|
|
||||||
#o = c.w3.eth.contract(abi=abi, address=t['address'])
|
|
||||||
o = CICRegistry.get_address(chain_spec, t['address']).contract
|
|
||||||
b = o.functions.balanceOf(holder_address).call()
|
|
||||||
t['balance_network'] = b
|
|
||||||
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
|
||||||
def transfer(self, tokens, holder_address, receiver_address, value, chain_str):
|
|
||||||
"""Transfer ERC20 tokens between addresses
|
|
||||||
|
|
||||||
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
|
||||||
|
|
||||||
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
|
||||||
|
|
||||||
:param tokens: Token addresses
|
|
||||||
:type tokens: list of str, 0x-hex
|
|
||||||
:param holder_address: Token holder address
|
|
||||||
:type holder_address: str, 0x-hex
|
|
||||||
:param receiver_address: Token receiver address
|
|
||||||
:type receiver_address: str, 0x-hex
|
|
||||||
:param value: Amount of token, in 'wei'
|
|
||||||
:type value: int
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:raises TokenCountError: More than one token is passed in tokens list
|
|
||||||
:return: Transaction hash for tranfer operation
|
|
||||||
:rtype: str, 0x-hex
|
|
||||||
"""
|
|
||||||
# we only allow one token, one transfer
|
|
||||||
if len(tokens) != 1:
|
|
||||||
raise TokenCountError
|
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
|
|
||||||
queue = self.request.delivery_info['routing_key']
|
|
||||||
|
|
||||||
# retrieve the token interface
|
|
||||||
t = tokens[0]
|
|
||||||
|
|
||||||
c = RpcClient(chain_spec, holder_address=holder_address)
|
|
||||||
|
|
||||||
txf = TokenTxFactory(holder_address, c)
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
tx_transfer = txf.transfer(t['address'], receiver_address, value, chain_spec, self.request.root_id, session=session)
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, cache_task='cic_eth.eth.token.otx_cache_transfer', session=session)
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
gas_budget = tx_transfer['gas'] * tx_transfer['gasPrice']
|
|
||||||
|
|
||||||
s = create_check_gas_and_send_task(
|
|
||||||
[tx_signed_raw_hex],
|
|
||||||
chain_str,
|
|
||||||
holder_address,
|
|
||||||
gas_budget,
|
|
||||||
[tx_hash_hex],
|
|
||||||
queue,
|
|
||||||
)
|
|
||||||
s.apply_async()
|
|
||||||
return tx_hash_hex
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
|
||||||
def approve(self, tokens, holder_address, spender_address, value, chain_str):
|
|
||||||
"""Approve ERC20 transfer on behalf of holder address
|
|
||||||
|
|
||||||
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
|
||||||
|
|
||||||
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
|
||||||
|
|
||||||
:param tokens: Token addresses
|
|
||||||
:type tokens: list of str, 0x-hex
|
|
||||||
:param holder_address: Token holder address
|
|
||||||
:type holder_address: str, 0x-hex
|
|
||||||
:param receiver_address: Token receiver address
|
|
||||||
:type receiver_address: str, 0x-hex
|
|
||||||
:param value: Amount of token, in 'wei'
|
|
||||||
:type value: int
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:raises TokenCountError: More than one token is passed in tokens list
|
|
||||||
:return: Transaction hash for tranfer operation
|
|
||||||
:rtype: str, 0x-hex
|
|
||||||
"""
|
|
||||||
# we only allow one token, one transfer
|
|
||||||
if len(tokens) != 1:
|
|
||||||
raise TokenCountError
|
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
|
|
||||||
queue = self.request.delivery_info['routing_key']
|
|
||||||
|
|
||||||
# retrieve the token interface
|
|
||||||
t = tokens[0]
|
|
||||||
|
|
||||||
c = RpcClient(chain_spec, holder_address=holder_address)
|
|
||||||
|
|
||||||
txf = TokenTxFactory(holder_address, c)
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
tx_transfer = txf.approve(t['address'], spender_address, value, chain_spec, self.request.root_id, session=session)
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_transfer, chain_str, queue, cache_task='cic_eth.eth.token.otx_cache_approve', session=session)
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
gas_budget = tx_transfer['gas'] * tx_transfer['gasPrice']
|
|
||||||
|
|
||||||
s = create_check_gas_and_send_task(
|
|
||||||
[tx_signed_raw_hex],
|
|
||||||
chain_str,
|
|
||||||
holder_address,
|
|
||||||
gas_budget,
|
|
||||||
[tx_hash_hex],
|
|
||||||
queue,
|
|
||||||
)
|
|
||||||
s.apply_async()
|
|
||||||
return tx_hash_hex
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalWeb3Task)
|
|
||||||
def resolve_tokens_by_symbol(token_symbols, chain_str):
|
|
||||||
"""Returns contract addresses of an array of ERC20 token symbols
|
|
||||||
|
|
||||||
:param token_symbols: Token symbols to resolve
|
|
||||||
:type token_symbols: list of str
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
|
|
||||||
:return: Respective token contract addresses
|
|
||||||
:rtype: list of str, 0x-hex
|
|
||||||
"""
|
|
||||||
tokens = []
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
for token_symbol in token_symbols:
|
|
||||||
token = CICRegistry.get_token(chain_spec, token_symbol)
|
|
||||||
tokens.append({
|
|
||||||
'address': token.address(),
|
|
||||||
'converters': [],
|
|
||||||
})
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
|
||||||
def otx_cache_transfer(
|
|
||||||
tx_hash_hex,
|
|
||||||
tx_signed_raw_hex,
|
|
||||||
chain_str,
|
|
||||||
):
|
|
||||||
"""Generates and commits transaction cache metadata for an ERC20.transfer or ERC20.transferFrom transaction
|
|
||||||
|
|
||||||
:param tx_hash_hex: Transaction hash
|
|
||||||
:type tx_hash_hex: str, 0x-hex
|
|
||||||
:param tx_signed_raw_hex: Raw signed transaction
|
|
||||||
:type tx_signed_raw_hex: str, 0x-hex
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
|
||||||
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
|
||||||
(txc, cache_id) = cache_transfer_data(tx_hash_hex, tx)
|
|
||||||
return txc
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
|
||||||
def cache_transfer_data(
|
|
||||||
tx_hash_hex,
|
|
||||||
tx,
|
|
||||||
):
|
|
||||||
"""Helper function for otx_cache_transfer
|
|
||||||
|
|
||||||
:param tx_hash_hex: Transaction hash
|
|
||||||
:type tx_hash_hex: str, 0x-hex
|
|
||||||
:param tx: Signed raw transaction
|
|
||||||
:type tx: str, 0x-hex
|
|
||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
tx_data = unpack_transfer(tx['data'])
|
|
||||||
logg.debug('tx data {}'.format(tx_data))
|
|
||||||
logg.debug('tx {}'.format(tx))
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
tx_cache = TxCache(
|
|
||||||
tx_hash_hex,
|
|
||||||
tx['from'],
|
|
||||||
tx_data['to'],
|
|
||||||
tx['to'],
|
|
||||||
tx['to'],
|
|
||||||
tx_data['amount'],
|
|
||||||
tx_data['amount'],
|
|
||||||
session=session,
|
|
||||||
)
|
|
||||||
session.add(tx_cache)
|
|
||||||
session.commit()
|
|
||||||
cache_id = tx_cache.id
|
|
||||||
session.close()
|
|
||||||
return (tx_hash_hex, cache_id)
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
|
||||||
def otx_cache_approve(
|
|
||||||
tx_hash_hex,
|
|
||||||
tx_signed_raw_hex,
|
|
||||||
chain_str,
|
|
||||||
):
|
|
||||||
"""Generates and commits transaction cache metadata for an ERC20.approve transaction
|
|
||||||
|
|
||||||
:param tx_hash_hex: Transaction hash
|
|
||||||
:type tx_hash_hex: str, 0x-hex
|
|
||||||
:param tx_signed_raw_hex: Raw signed transaction
|
|
||||||
:type tx_signed_raw_hex: str, 0x-hex
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
|
||||||
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
|
||||||
(txc, cache_id) = cache_approve_data(tx_hash_hex, tx)
|
|
||||||
return txc
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
|
||||||
def cache_approve_data(
|
|
||||||
tx_hash_hex,
|
|
||||||
tx,
|
|
||||||
):
|
|
||||||
"""Helper function for otx_cache_approve
|
|
||||||
|
|
||||||
:param tx_hash_hex: Transaction hash
|
|
||||||
:type tx_hash_hex: str, 0x-hex
|
|
||||||
:param tx: Signed raw transaction
|
|
||||||
:type tx: str, 0x-hex
|
|
||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
tx_data = unpack_approve(tx['data'])
|
|
||||||
logg.debug('tx data {}'.format(tx_data))
|
|
||||||
logg.debug('tx {}'.format(tx))
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
tx_cache = TxCache(
|
|
||||||
tx_hash_hex,
|
|
||||||
tx['from'],
|
|
||||||
tx_data['to'],
|
|
||||||
tx['to'],
|
|
||||||
tx['to'],
|
|
||||||
tx_data['amount'],
|
|
||||||
tx_data['amount'],
|
|
||||||
session=session,
|
|
||||||
)
|
|
||||||
session.add(tx_cache)
|
|
||||||
session.commit()
|
|
||||||
cache_id = tx_cache.id
|
|
||||||
session.close()
|
|
||||||
return (tx_hash_hex, cache_id)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: Move to dedicated metadata package
|
|
||||||
class ExtendedTx:
|
|
||||||
|
|
||||||
_default_decimals = 6
|
|
||||||
|
|
||||||
def __init__(self, tx_hash, chain_spec):
|
|
||||||
self._chain_spec = chain_spec
|
|
||||||
self.chain = str(chain_spec)
|
|
||||||
self.hash = tx_hash
|
|
||||||
self.sender = None
|
|
||||||
self.sender_label = None
|
|
||||||
self.recipient = None
|
|
||||||
self.recipient_label = None
|
|
||||||
self.source_token_value = 0
|
|
||||||
self.destination_token_value = 0
|
|
||||||
self.source_token = zero_address
|
|
||||||
self.destination_token = zero_address
|
|
||||||
self.source_token_symbol = ''
|
|
||||||
self.destination_token_symbol = ''
|
|
||||||
self.source_token_decimals = ExtendedTx._default_decimals
|
|
||||||
self.destination_token_decimals = ExtendedTx._default_decimals
|
|
||||||
self.status = TxStatus.PENDING.name
|
|
||||||
self.status_code = TxStatus.PENDING.value
|
|
||||||
|
|
||||||
|
|
||||||
def set_actors(self, sender, recipient, trusted_declarator_addresses=None):
|
|
||||||
self.sender = sender
|
|
||||||
self.recipient = recipient
|
|
||||||
if trusted_declarator_addresses != None:
|
|
||||||
self.sender_label = translate_address(sender, trusted_declarator_addresses, self.chain)
|
|
||||||
self.recipient_label = translate_address(recipient, trusted_declarator_addresses, self.chain)
|
|
||||||
|
|
||||||
|
|
||||||
def set_tokens(self, source, source_value, destination=None, destination_value=None):
|
|
||||||
if destination == None:
|
|
||||||
destination = source
|
|
||||||
if destination_value == None:
|
|
||||||
destination_value = source_value
|
|
||||||
st = CICRegistry.get_address(self._chain_spec, source)
|
|
||||||
dt = CICRegistry.get_address(self._chain_spec, destination)
|
|
||||||
self.source_token = source
|
|
||||||
self.source_token_symbol = st.symbol()
|
|
||||||
self.source_token_decimals = st.decimals()
|
|
||||||
self.source_token_value = source_value
|
|
||||||
self.destination_token = destination
|
|
||||||
self.destination_token_symbol = dt.symbol()
|
|
||||||
self.destination_token_decimals = dt.decimals()
|
|
||||||
self.destination_token_value = destination_value
|
|
||||||
|
|
||||||
|
|
||||||
def set_status(self, n):
|
|
||||||
if n:
|
|
||||||
self.status = TxStatus.ERROR.name
|
|
||||||
else:
|
|
||||||
self.status = TxStatus.SUCCESS.name
|
|
||||||
self.status_code = n
|
|
||||||
|
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
o = {}
|
|
||||||
for attr in dir(self):
|
|
||||||
if attr[0] == '_' or attr in ['set_actors', 'set_tokens', 'set_status', 'to_dict']:
|
|
||||||
continue
|
|
||||||
o[attr] = getattr(self, attr)
|
|
||||||
return o
|
|
@ -4,13 +4,38 @@ import logging
|
|||||||
# third-party imports
|
# third-party imports
|
||||||
import celery
|
import celery
|
||||||
import requests
|
import requests
|
||||||
import web3
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from cic_registry import zero_address
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.eth.address import is_checksum_address
|
||||||
|
from chainlib.eth.gas import balance
|
||||||
|
from chainlib.eth.error import (
|
||||||
|
EthException,
|
||||||
|
NotFoundEthException,
|
||||||
|
)
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
transaction,
|
||||||
|
receipt,
|
||||||
|
raw,
|
||||||
|
TxFormat,
|
||||||
|
unpack,
|
||||||
|
)
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.hash import keccak256_hex_to_hex
|
||||||
|
from chainlib.eth.gas import Gas
|
||||||
|
from chainlib.eth.contract import (
|
||||||
|
abi_decode_single,
|
||||||
|
ABIContractType,
|
||||||
|
)
|
||||||
|
from hexathon import (
|
||||||
|
add_0x,
|
||||||
|
strip_0x,
|
||||||
|
)
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .rpc import RpcClient
|
from cic_eth.db import (
|
||||||
from cic_eth.db import Otx, SessionBase
|
Otx,
|
||||||
|
SessionBase,
|
||||||
|
)
|
||||||
from cic_eth.db.models.tx import TxCache
|
from cic_eth.db.models.tx import TxCache
|
||||||
from cic_eth.db.models.nonce import NonceReservation
|
from cic_eth.db.models.nonce import NonceReservation
|
||||||
from cic_eth.db.models.lock import Lock
|
from cic_eth.db.models.lock import Lock
|
||||||
@ -22,17 +47,22 @@ from cic_eth.db.enum import (
|
|||||||
from cic_eth.error import PermanentTxError
|
from cic_eth.error import PermanentTxError
|
||||||
from cic_eth.error import TemporaryTxError
|
from cic_eth.error import TemporaryTxError
|
||||||
from cic_eth.error import NotLocalTxError
|
from cic_eth.error import NotLocalTxError
|
||||||
from cic_eth.queue.tx import create as queue_create
|
#from cic_eth.queue.tx import create as queue_create
|
||||||
from cic_eth.queue.tx import get_tx
|
from cic_eth.queue.tx import (
|
||||||
from cic_eth.queue.tx import get_nonce_tx
|
get_tx,
|
||||||
|
register_tx,
|
||||||
|
get_nonce_tx,
|
||||||
|
)
|
||||||
from cic_eth.error import OutOfGasError
|
from cic_eth.error import OutOfGasError
|
||||||
from cic_eth.error import LockedError
|
from cic_eth.error import LockedError
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
from cic_eth.eth.gas import (
|
||||||
from cic_eth.eth.task import sign_and_register_tx, create_check_gas_and_send_task
|
create_check_gas_task,
|
||||||
from cic_eth.eth.task import sign_tx
|
)
|
||||||
from cic_eth.eth.nonce import NonceOracle
|
from cic_eth.eth.nonce import CustodialTaskNonceOracle
|
||||||
from cic_eth.error import AlreadyFillingGasError
|
from cic_eth.error import (
|
||||||
from cic_eth.eth.util import tx_hex_string
|
AlreadyFillingGasError,
|
||||||
|
EthError,
|
||||||
|
)
|
||||||
from cic_eth.admin.ctrl import lock_send
|
from cic_eth.admin.ctrl import lock_send
|
||||||
from cic_eth.task import (
|
from cic_eth.task import (
|
||||||
CriticalSQLAlchemyTask,
|
CriticalSQLAlchemyTask,
|
||||||
@ -50,7 +80,7 @@ MAX_NONCE_ATTEMPTS = 3
|
|||||||
|
|
||||||
# TODO this function is too long
|
# TODO this function is too long
|
||||||
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task)
|
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task)
|
||||||
def check_gas(self, tx_hashes, chain_str, txs=[], address=None, gas_required=None):
|
def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=None):
|
||||||
"""Check the gas level of the sender address of a transaction.
|
"""Check the gas level of the sender address of a transaction.
|
||||||
|
|
||||||
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
|
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
|
||||||
@ -59,8 +89,8 @@ def check_gas(self, tx_hashes, chain_str, txs=[], address=None, gas_required=Non
|
|||||||
|
|
||||||
:param tx_hashes: Transaction hashes due to be submitted
|
:param tx_hashes: Transaction hashes due to be submitted
|
||||||
:type tx_hashes: list of str, 0x-hex
|
:type tx_hashes: list of str, 0x-hex
|
||||||
:param chain_str: Chain spec string representation
|
:param chain_spec_dict: Chain spec dict representation
|
||||||
:type chain_str: str
|
:type chain_spec_dict: dict
|
||||||
:param txs: Signed raw transaction data, corresponding to tx_hashes
|
:param txs: Signed raw transaction data, corresponding to tx_hashes
|
||||||
:type txs: list of str, 0x-hex
|
:type txs: list of str, 0x-hex
|
||||||
:param address: Sender address
|
:param address: Sender address
|
||||||
@ -77,38 +107,45 @@ def check_gas(self, tx_hashes, chain_str, txs=[], address=None, gas_required=Non
|
|||||||
if address == None:
|
if address == None:
|
||||||
address = o['address']
|
address = o['address']
|
||||||
|
|
||||||
if not web3.Web3.isChecksumAddress(address):
|
#if not web3.Web3.isChecksumAddress(address):
|
||||||
|
if not is_checksum_address(address):
|
||||||
raise ValueError('invalid address {}'.format(address))
|
raise ValueError('invalid address {}'.format(address))
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
|
||||||
queue = self.request.delivery_info['routing_key']
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
#c = RpcClient(chain_spec, holder_address=address)
|
conn = RPCConnection.connect(chain_spec)
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
|
|
||||||
# TODO: it should not be necessary to pass address explicitly, if not passed should be derived from the tx
|
# TODO: it should not be necessary to pass address explicitly, if not passed should be derived from the tx
|
||||||
balance = 0
|
gas_balance = 0
|
||||||
try:
|
try:
|
||||||
balance = c.w3.eth.getBalance(address)
|
o = balance(address)
|
||||||
except ValueError as e:
|
r = conn.do(o)
|
||||||
raise EthError('balance call for {}'.format())
|
conn.disconnect()
|
||||||
|
gas_balance = abi_decode_single(ABIContractType.UINT256, r)
|
||||||
|
except EthException as e:
|
||||||
|
conn.disconnect()
|
||||||
|
raise EthError('gas_balance call for {}: {}'.format(address, e))
|
||||||
|
|
||||||
logg.debug('address {} has gas {} needs {}'.format(address, balance, gas_required))
|
logg.debug('address {} has gas {} needs {}'.format(address, gas_balance, gas_required))
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
gas_provider = AccountRole.get_address('GAS_GIFTER', session=session)
|
||||||
|
session.close()
|
||||||
|
|
||||||
if gas_required > balance:
|
if gas_required > gas_balance:
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
c.gas_provider(),
|
gas_provider,
|
||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
s_refill_gas = celery.signature(
|
s_refill_gas = celery.signature(
|
||||||
'cic_eth.eth.tx.refill_gas',
|
'cic_eth.eth.tx.refill_gas',
|
||||||
[
|
[
|
||||||
chain_str,
|
chain_spec_dict,
|
||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
@ -125,28 +162,28 @@ def check_gas(self, tx_hashes, chain_str, txs=[], address=None, gas_required=Non
|
|||||||
)
|
)
|
||||||
wait_tasks.append(s)
|
wait_tasks.append(s)
|
||||||
celery.group(wait_tasks)()
|
celery.group(wait_tasks)()
|
||||||
raise OutOfGasError('need to fill gas, required {}, had {}'.format(gas_required, balance))
|
raise OutOfGasError('need to fill gas, required {}, had {}'.format(gas_required, gas_balance))
|
||||||
|
|
||||||
safe_gas = c.safe_threshold_amount()
|
safe_gas = self.safe_gas_threshold_amount
|
||||||
if balance < safe_gas:
|
if gas_balance < safe_gas:
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
c.gas_provider(),
|
gas_provider,
|
||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
s_refill_gas = celery.signature(
|
s_refill_gas = celery.signature(
|
||||||
'cic_eth.eth.tx.refill_gas',
|
'cic_eth.eth.tx.refill_gas',
|
||||||
[
|
[
|
||||||
chain_str,
|
chain_spec_dict,
|
||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
s_nonce.link(s_refill)
|
s_nonce.link(s_refill_gas)
|
||||||
s_nonce.apply_async()
|
s_nonce.apply_async()
|
||||||
logg.debug('requested refill from {} to {}'.format(c.gas_provider(), address))
|
logg.debug('requested refill from {} to {}'.format(gas_provider, address))
|
||||||
ready_tasks = []
|
ready_tasks = []
|
||||||
for tx_hash in tx_hashes:
|
for tx_hash in tx_hashes:
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
@ -178,8 +215,6 @@ def hashes_to_txs(self, tx_hashes):
|
|||||||
|
|
||||||
queue = self.request.delivery_info['routing_key']
|
queue = self.request.delivery_info['routing_key']
|
||||||
|
|
||||||
#otxs = ','.format("'{}'".format(tx_hash) for tx_hash in tx_hashes)
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
q = session.query(Otx.signed_tx)
|
q = session.query(Otx.signed_tx)
|
||||||
q = q.filter(Otx.tx_hash.in_(tx_hashes))
|
q = q.filter(Otx.tx_hash.in_(tx_hashes))
|
||||||
@ -196,172 +231,9 @@ def hashes_to_txs(self, tx_hashes):
|
|||||||
return txs
|
return txs
|
||||||
|
|
||||||
|
|
||||||
# TODO: Move this and send to subfolder submodule
|
|
||||||
class ParityNodeHandler:
|
|
||||||
def __init__(self, chain_spec, queue):
|
|
||||||
self.chain_spec = chain_spec
|
|
||||||
self.chain_str = str(chain_spec)
|
|
||||||
self.queue = queue
|
|
||||||
|
|
||||||
def handle(self, exception, tx_hash_hex, tx_hex):
|
|
||||||
meth = self.handle_default
|
|
||||||
if isinstance(exception, (ValueError)):
|
|
||||||
|
|
||||||
earg = exception.args[0]
|
|
||||||
if earg['code'] == -32010:
|
|
||||||
logg.debug('skipping lock for code {}'.format(earg['code']))
|
|
||||||
meth = self.handle_invalid_parameters
|
|
||||||
elif earg['code'] == -32602:
|
|
||||||
meth = self.handle_invalid_encoding
|
|
||||||
else:
|
|
||||||
# TODO: move to status log db comment field
|
|
||||||
meth = self.handle_invalid
|
|
||||||
elif isinstance(exception, (requests.exceptions.ConnectionError)):
|
|
||||||
meth = self.handle_connection
|
|
||||||
(t, e_fn, message) = meth(tx_hash_hex, tx_hex, str(exception))
|
|
||||||
return (t, e_fn, '{} {}'.format(message, exception))
|
|
||||||
|
|
||||||
|
|
||||||
def handle_connection(self, tx_hash_hex, tx_hex, debugstr=None):
|
|
||||||
s_set_sent = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_sent_status',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
True,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
t = s_set_sent.apply_async()
|
|
||||||
return (t, TemporaryTxError, 'Sendfail {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
|
||||||
|
|
||||||
|
|
||||||
def handle_invalid_encoding(self, tx_hash_hex, tx_hex, debugstr=None):
|
|
||||||
tx_bytes = bytes.fromhex(tx_hex[2:])
|
|
||||||
tx = unpack_signed_raw_tx(tx_bytes, self.chain_spec.chain_id())
|
|
||||||
s_lock = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_send',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
self.chain_str,
|
|
||||||
tx['from'],
|
|
||||||
tx_hash_hex,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_set_reject = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_rejected',
|
|
||||||
[],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
nonce_txs = get_nonce_tx(tx['nonce'], tx['from'], self.chain_spec.chain_id())
|
|
||||||
attempts = len(nonce_txs)
|
|
||||||
if attempts < MAX_NONCE_ATTEMPTS:
|
|
||||||
logg.debug('nonce {} address {} retries {} < {}'.format(tx['nonce'], tx['from'], attempts, MAX_NONCE_ATTEMPTS))
|
|
||||||
s_resend = celery.signature(
|
|
||||||
'cic_eth.eth.tx.resend_with_higher_gas',
|
|
||||||
[
|
|
||||||
self.chain_str,
|
|
||||||
None,
|
|
||||||
1.01,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_unlock = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.unlock_send',
|
|
||||||
[
|
|
||||||
self.chain_str,
|
|
||||||
tx['from'],
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_resend.link(s_unlock)
|
|
||||||
s_set_reject.link(s_resend)
|
|
||||||
|
|
||||||
s_lock.link(s_set_reject)
|
|
||||||
t = s_lock.apply_async()
|
|
||||||
return (t, PermanentTxError, 'Reject invalid encoding {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
|
||||||
|
|
||||||
|
|
||||||
def handle_invalid_parameters(self, tx_hash_hex, tx_hex, debugstr=None):
|
|
||||||
s_sync = celery.signature(
|
|
||||||
'cic_eth.eth.tx.sync_tx',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
self.chain_str,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
t = s_sync.apply_async()
|
|
||||||
return (t, PermanentTxError, 'Reject invalid parameters {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
|
||||||
|
|
||||||
|
|
||||||
def handle_invalid(self, tx_hash_hex, tx_hex, debugstr=None):
|
|
||||||
tx_bytes = bytes.fromhex(tx_hex[2:])
|
|
||||||
tx = unpack_signed_raw_tx(tx_bytes, self.chain_spec.chain_id())
|
|
||||||
s_lock = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_send',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
self.chain_str,
|
|
||||||
tx['from'],
|
|
||||||
tx_hash_hex,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_set_reject = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_rejected',
|
|
||||||
[],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_debug = celery.signature(
|
|
||||||
'cic_eth.admin.debug.alert',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
debugstr,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_set_reject.link(s_debug)
|
|
||||||
s_lock.link(s_set_reject)
|
|
||||||
t = s_lock.apply_async()
|
|
||||||
return (t, PermanentTxError, 'Reject invalid {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
|
||||||
|
|
||||||
|
|
||||||
def handle_default(self, tx_hash_hex, tx_hex, debugstr):
|
|
||||||
tx_bytes = bytes.fromhex(tx_hex[2:])
|
|
||||||
tx = unpack_signed_raw_tx(tx_bytes, self.chain_spec.chain_id())
|
|
||||||
s_lock = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_send',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
self.chain_str,
|
|
||||||
tx['from'],
|
|
||||||
tx_hash_hex,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_set_fubar = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_fubar',
|
|
||||||
[],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_debug = celery.signature(
|
|
||||||
'cic_eth.admin.debug.alert',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
debugstr,
|
|
||||||
],
|
|
||||||
queue=self.queue,
|
|
||||||
)
|
|
||||||
s_set_fubar.link(s_debug)
|
|
||||||
s_lock.link(s_set_fubar)
|
|
||||||
t = s_lock.apply_async()
|
|
||||||
return (t, PermanentTxError, 'Fubar {} {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id()), debugstr))
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: A lock should be introduced to ensure that the send status change and the transaction send is atomic.
|
# TODO: A lock should be introduced to ensure that the send status change and the transaction send is atomic.
|
||||||
@celery_app.task(bind=True, base=CriticalWeb3Task)
|
@celery_app.task(bind=True, base=CriticalWeb3Task)
|
||||||
def send(self, txs, chain_str):
|
def send(self, txs, chain_spec_dict):
|
||||||
"""Send transactions to the network.
|
"""Send transactions to the network.
|
||||||
|
|
||||||
If more than one transaction is passed to the task, it will spawn a new send task with the remaining transaction(s) after the first in the list has been processed.
|
If more than one transaction is passed to the task, it will spawn a new send task with the remaining transaction(s) after the first in the list has been processed.
|
||||||
@ -386,17 +258,16 @@ def send(self, txs, chain_str):
|
|||||||
if len(txs) == 0:
|
if len(txs) == 0:
|
||||||
raise ValueError('no transaction to send')
|
raise ValueError('no transaction to send')
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
|
||||||
tx_hex = txs[0]
|
tx_hex = txs[0]
|
||||||
logg.debug('send transaction {}'.format(tx_hex))
|
|
||||||
|
|
||||||
tx_hash = web3.Web3.keccak(hexstr=tx_hex)
|
tx_hash_hex = add_0x(keccak256_hex_to_hex(tx_hex))
|
||||||
tx_hash_hex = tx_hash.hex()
|
|
||||||
|
|
||||||
queue = self.request.delivery_info.get('routing_key', None)
|
logg.debug('send transaction {} -> {}'.format(tx_hash_hex, tx_hex))
|
||||||
|
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
r = None
|
r = None
|
||||||
s_set_sent = celery.signature(
|
s_set_sent = celery.signature(
|
||||||
'cic_eth.queue.tx.set_sent_status',
|
'cic_eth.queue.tx.set_sent_status',
|
||||||
@ -406,14 +277,10 @@ def send(self, txs, chain_str):
|
|||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
r = c.w3.eth.send_raw_transaction(tx_hex)
|
o = raw(tx_hex)
|
||||||
except requests.exceptions.ConnectionError as e:
|
conn = RPCConnection.connect(chain_spec, 'default')
|
||||||
raise(e)
|
conn.do(o)
|
||||||
except Exception as e:
|
|
||||||
raiser = ParityNodeHandler(chain_spec, queue)
|
|
||||||
(t, e, m) = raiser.handle(e, tx_hash_hex, tx_hex)
|
|
||||||
raise e(m)
|
|
||||||
s_set_sent.apply_async()
|
s_set_sent.apply_async()
|
||||||
|
|
||||||
tx_tail = txs[1:]
|
tx_tail = txs[1:]
|
||||||
@ -425,13 +292,13 @@ def send(self, txs, chain_str):
|
|||||||
)
|
)
|
||||||
s.apply_async()
|
s.apply_async()
|
||||||
|
|
||||||
return r.hex()
|
return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
# TODO: if this method fails the nonce will be out of sequence. session needs to be extended to include the queue create, so that nonce is rolled back if the second sql query fails. Better yet, split each state change into separate tasks.
|
# TODO: if this method fails the nonce will be out of sequence. session needs to be extended to include the queue create, so that nonce is rolled back if the second sql query fails. Better yet, split each state change into separate tasks.
|
||||||
# TODO: method is too long, factor out code for clarity
|
# TODO: method is too long, factor out code for clarity
|
||||||
@celery_app.task(bind=True, throws=(web3.exceptions.TransactionNotFound,), base=CriticalWeb3AndSignerTask)
|
@celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3AndSignerTask)
|
||||||
def refill_gas(self, recipient_address, chain_str):
|
def refill_gas(self, recipient_address, chain_spec_dict):
|
||||||
"""Executes a native token transaction to fund the recipient's gas expenditures.
|
"""Executes a native token transaction to fund the recipient's gas expenditures.
|
||||||
|
|
||||||
:param recipient_address: Recipient in need of gas
|
:param recipient_address: Recipient in need of gas
|
||||||
@ -442,8 +309,13 @@ def refill_gas(self, recipient_address, chain_str):
|
|||||||
:returns: Transaction hash.
|
:returns: Transaction hash.
|
||||||
:rtype: str, 0x-hex
|
:rtype: str, 0x-hex
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
# essentials
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
|
# Determine value of gas tokens to send
|
||||||
|
# if an uncompleted gas refill for the same recipient already exists, we still need to spend the nonce
|
||||||
|
# however, we will perform a 0-value transaction instead
|
||||||
zero_amount = False
|
zero_amount = False
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
|
status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
|
||||||
@ -454,63 +326,36 @@ def refill_gas(self, recipient_address, chain_str):
|
|||||||
q = q.filter(TxCache.recipient==recipient_address)
|
q = q.filter(TxCache.recipient==recipient_address)
|
||||||
c = q.count()
|
c = q.count()
|
||||||
if c > 0:
|
if c > 0:
|
||||||
#session.close()
|
|
||||||
#raise AlreadyFillingGasError(recipient_address)
|
|
||||||
logg.warning('already filling gas {}'.format(str(AlreadyFillingGasError(recipient_address))))
|
logg.warning('already filling gas {}'.format(str(AlreadyFillingGasError(recipient_address))))
|
||||||
zero_amount = True
|
zero_amount = True
|
||||||
session.flush()
|
session.flush()
|
||||||
|
|
||||||
queue = self.request.delivery_info['routing_key']
|
# finally determine the value to send
|
||||||
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
clogg = celery_app.log.get_default_logger()
|
|
||||||
logg.debug('refill gas from provider address {}'.format(c.gas_provider()))
|
|
||||||
default_nonce = c.w3.eth.getTransactionCount(c.gas_provider(), 'pending')
|
|
||||||
nonce_generator = NonceOracle(c.gas_provider(), default_nonce)
|
|
||||||
#nonce = nonce_generator.next(session=session)
|
|
||||||
nonce = nonce_generator.next_by_task_uuid(self.request.root_id, session=session)
|
|
||||||
gas_price = c.gas_price()
|
|
||||||
gas_limit = c.default_gas_limit
|
|
||||||
refill_amount = 0
|
refill_amount = 0
|
||||||
if not zero_amount:
|
if not zero_amount:
|
||||||
refill_amount = c.refill_amount()
|
refill_amount = self.safe_gas_refill_amount
|
||||||
logg.debug('tx send gas price {} nonce {}'.format(gas_price, nonce))
|
|
||||||
|
|
||||||
# create and sign transaction
|
# determine sender
|
||||||
tx_send_gas = {
|
gas_provider = AccountRole.get_address('GAS_GIFTER', session=session)
|
||||||
'from': c.gas_provider(),
|
session.flush()
|
||||||
'to': recipient_address,
|
|
||||||
'gas': gas_limit,
|
|
||||||
'gasPrice': gas_price,
|
|
||||||
'chainId': chain_spec.chain_id(),
|
|
||||||
'nonce': nonce,
|
|
||||||
'value': refill_amount,
|
|
||||||
'data': '',
|
|
||||||
}
|
|
||||||
tx_send_gas_signed = c.w3.eth.sign_transaction(tx_send_gas)
|
|
||||||
tx_hash = web3.Web3.keccak(hexstr=tx_send_gas_signed['raw'])
|
|
||||||
tx_hash_hex = tx_hash.hex()
|
|
||||||
|
|
||||||
# TODO: route this through sign_and_register_tx instead
|
# set up evm RPC connection
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
|
||||||
|
# set up transaction builder
|
||||||
|
nonce_oracle = CustodialTaskNonceOracle(gas_provider, self.request.root_id, session=session)
|
||||||
|
gas_oracle = self.create_gas_oracle(rpc)
|
||||||
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
|
c = Gas(signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, chain_id=chain_spec.chain_id())
|
||||||
|
|
||||||
|
# build and add transaction
|
||||||
|
logg.debug('tx send gas amount {} from provider {} to {}'.format(refill_amount, gas_provider, recipient_address))
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.create(gas_provider, recipient_address, refill_amount, tx_format=TxFormat.RLP_SIGNED)
|
||||||
logg.debug('adding queue refill gas tx {}'.format(tx_hash_hex))
|
logg.debug('adding queue refill gas tx {}'.format(tx_hash_hex))
|
||||||
queue_create(
|
cache_task = 'cic_eth.eth.tx.cache_gas_data'
|
||||||
nonce,
|
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
|
||||||
c.gas_provider(),
|
|
||||||
tx_hash_hex,
|
|
||||||
tx_send_gas_signed['raw'],
|
|
||||||
chain_str,
|
|
||||||
session=session,
|
|
||||||
)
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
s_tx_cache = celery.signature(
|
# add transaction to send queue
|
||||||
'cic_eth.eth.tx.cache_gas_refill_data',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
tx_send_gas,
|
|
||||||
],
|
|
||||||
queue=queue,
|
|
||||||
)
|
|
||||||
s_status = celery.signature(
|
s_status = celery.signature(
|
||||||
'cic_eth.queue.tx.set_ready',
|
'cic_eth.queue.tx.set_ready',
|
||||||
[
|
[
|
||||||
@ -518,9 +363,9 @@ def refill_gas(self, recipient_address, chain_str):
|
|||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
celery.group(s_tx_cache, s_status)()
|
t = s_status.apply_async()
|
||||||
|
|
||||||
return tx_send_gas_signed['raw']
|
return tx_signed_raw_hex
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
||||||
@ -541,7 +386,6 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_str, gas=None, default_fa
|
|||||||
"""
|
"""
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
|
|
||||||
q = session.query(Otx)
|
q = session.query(Otx)
|
||||||
q = q.filter(Otx.tx_hash==txold_hash_hex)
|
q = q.filter(Otx.tx_hash==txold_hash_hex)
|
||||||
otx = q.first()
|
otx = q.first()
|
||||||
@ -553,7 +397,7 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_str, gas=None, default_fa
|
|||||||
c = RpcClient(chain_spec)
|
c = RpcClient(chain_spec)
|
||||||
|
|
||||||
tx_signed_raw_bytes = bytes.fromhex(otx.signed_tx[2:])
|
tx_signed_raw_bytes = bytes.fromhex(otx.signed_tx[2:])
|
||||||
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
tx = unpack(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
logg.debug('resend otx {} {}'.format(tx, otx.signed_tx))
|
logg.debug('resend otx {} {}'.format(tx, otx.signed_tx))
|
||||||
|
|
||||||
queue = self.request.delivery_info['routing_key']
|
queue = self.request.delivery_info['routing_key']
|
||||||
@ -600,34 +444,41 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_str, gas=None, default_fa
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||||
def reserve_nonce(self, chained_input, signer=None):
|
def reserve_nonce(self, chained_input, signer_address=None):
|
||||||
|
|
||||||
|
self.log_banner()
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
address = None
|
address = None
|
||||||
if signer == None:
|
if signer_address == None:
|
||||||
address = chained_input
|
address = chained_input
|
||||||
logg.debug('non-explicit address for reserve nonce, using arg head {}'.format(chained_input))
|
logg.debug('non-explicit address for reserve nonce, using arg head {}'.format(chained_input))
|
||||||
else:
|
else:
|
||||||
if web3.Web3.isChecksumAddress(signer):
|
#if web3.Web3.isChecksumAddress(signer_address):
|
||||||
address = signer
|
if is_checksum_address(signer_address):
|
||||||
logg.debug('explicit address for reserve nonce {}'.format(signer))
|
address = signer_address
|
||||||
|
logg.debug('explicit address for reserve nonce {}'.format(signer_address))
|
||||||
else:
|
else:
|
||||||
address = AccountRole.get_address(signer, session=session)
|
address = AccountRole.get_address(signer_address, session=session)
|
||||||
logg.debug('role for reserve nonce {} -> {}'.format(signer, address))
|
logg.debug('role for reserve nonce {} -> {}'.format(signer_address, address))
|
||||||
|
|
||||||
if not web3.Web3.isChecksumAddress(address):
|
if not is_checksum_address(address):
|
||||||
raise ValueError('invalid result when resolving address for nonce {}'.format(address))
|
raise ValueError('invalid result when resolving address for nonce {}'.format(address))
|
||||||
|
|
||||||
root_id = self.request.root_id
|
root_id = self.request.root_id
|
||||||
nonce = NonceReservation.next(address, root_id)
|
r = NonceReservation.next(address, root_id)
|
||||||
|
logg.debug('nonce {} reserved for address {} task {}'.format(r[1], address, r[0]))
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
return chained_input
|
return chained_input
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, throws=(web3.exceptions.TransactionNotFound,), base=CriticalWeb3Task)
|
@celery_app.task(bind=True, throws=(NotFoundEthException,), base=CriticalWeb3Task)
|
||||||
def sync_tx(self, tx_hash_hex, chain_str):
|
def sync_tx(self, tx_hash_hex, chain_spec_dict):
|
||||||
"""Force update of network status of a simgle transaction
|
"""Force update of network status of a simgle transaction
|
||||||
|
|
||||||
:param tx_hash_hex: Transaction hash
|
:param tx_hash_hex: Transaction hash
|
||||||
@ -636,16 +487,19 @@ def sync_tx(self, tx_hash_hex, chain_str):
|
|||||||
:type chain_str: str
|
:type chain_str: str
|
||||||
"""
|
"""
|
||||||
|
|
||||||
queue = self.request.delivery_info['routing_key']
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
|
conn = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
o = transaction(tx_hash_hex)
|
||||||
|
tx = conn.do(o)
|
||||||
|
|
||||||
tx = c.w3.eth.getTransaction(tx_hash_hex)
|
|
||||||
rcpt = None
|
rcpt = None
|
||||||
try:
|
try:
|
||||||
rcpt = c.w3.eth.getTransactionReceipt(tx_hash_hex)
|
o = receipt(tx_hash_hex)
|
||||||
except web3.exceptions.TransactionNotFound as e:
|
rcpt = conn.do(o)
|
||||||
|
except NotFoundEthException as e:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if rcpt != None:
|
if rcpt != None:
|
||||||
@ -675,79 +529,54 @@ def sync_tx(self, tx_hash_hex, chain_str):
|
|||||||
s.apply_async()
|
s.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
@celery_app.task(bind=True)
|
#@celery_app.task(bind=True)
|
||||||
def resume_tx(self, txpending_hash_hex, chain_str):
|
#def resume_tx(self, txpending_hash_hex, chain_str):
|
||||||
"""Queue a suspended tranaction for (re)sending
|
# """Queue a suspended tranaction for (re)sending
|
||||||
|
#
|
||||||
:param txpending_hash_hex: Transaction hash
|
# :param txpending_hash_hex: Transaction hash
|
||||||
:type txpending_hash_hex: str, 0x-hex
|
# :type txpending_hash_hex: str, 0x-hex
|
||||||
:param chain_str: Chain spec, string representation
|
# :param chain_str: Chain spec, string representation
|
||||||
:type chain_str: str
|
# :type chain_str: str
|
||||||
:raises NotLocalTxError: Transaction does not exist in the local queue
|
# :raises NotLocalTxError: Transaction does not exist in the local queue
|
||||||
:returns: Transaction hash
|
# :returns: Transaction hash
|
||||||
:rtype: str, 0x-hex
|
# :rtype: str, 0x-hex
|
||||||
"""
|
# """
|
||||||
|
#
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
# chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
#
|
||||||
session = SessionBase.create_session()
|
# session = SessionBase.create_session()
|
||||||
q = session.query(Otx.signed_tx)
|
# q = session.query(Otx.signed_tx)
|
||||||
q = q.filter(Otx.tx_hash==txpending_hash_hex)
|
# q = q.filter(Otx.tx_hash==txpending_hash_hex)
|
||||||
r = q.first()
|
# r = q.first()
|
||||||
session.close()
|
# session.close()
|
||||||
if r == None:
|
# if r == None:
|
||||||
raise NotLocalTxError(txpending_hash_hex)
|
# raise NotLocalTxError(txpending_hash_hex)
|
||||||
|
#
|
||||||
tx_signed_raw_hex = r[0]
|
# tx_signed_raw_hex = r[0]
|
||||||
tx_signed_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
# tx_signed_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_spec.chain_id())
|
# tx = unpack_signed_raw_tx(tx_signed_bytes, chain_spec.chain_id())
|
||||||
|
#
|
||||||
queue = self.request.delivery_info['routing_key']
|
# queue = self.request.delivery_info['routing_key']
|
||||||
|
#
|
||||||
s = create_check_gas_and_send_task(
|
# s = create_check_gas_and_send_task(
|
||||||
[tx_signed_raw_hex],
|
# [tx_signed_raw_hex],
|
||||||
chain_str,
|
# chain_str,
|
||||||
tx['from'],
|
# tx['from'],
|
||||||
tx['gasPrice'] * tx['gas'],
|
# tx['gasPrice'] * tx['gas'],
|
||||||
[txpending_hash_hex],
|
# [txpending_hash_hex],
|
||||||
queue=queue,
|
# queue=queue,
|
||||||
)
|
# )
|
||||||
s.apply_async()
|
# s.apply_async()
|
||||||
return txpending_hash_hex
|
# return txpending_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: Move to cic_eth.eth.gas
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def otx_cache_parse_tx(
|
def cache_gas_data(
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
tx_signed_raw_hex,
|
tx_signed_raw_hex,
|
||||||
chain_str,
|
chain_spec_dict,
|
||||||
):
|
|
||||||
"""Generates and commits transaction cache metadata for a gas refill transaction
|
|
||||||
|
|
||||||
:param tx_hash_hex: Transaction hash
|
|
||||||
:type tx_hash_hex: str, 0x-hex
|
|
||||||
:param tx_signed_raw_hex: Raw signed transaction
|
|
||||||
:type tx_signed_raw_hex: str, 0x-hex
|
|
||||||
:param chain_str: Chain spec string representation
|
|
||||||
:type chain_str: str
|
|
||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
|
||||||
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
|
|
||||||
(txc, cache_id) = cache_gas_refill_data(tx_hash_hex, tx)
|
|
||||||
return txc
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
|
||||||
def cache_gas_refill_data(
|
|
||||||
tx_hash_hex,
|
|
||||||
tx,
|
|
||||||
):
|
):
|
||||||
"""Helper function for otx_cache_parse_tx
|
"""Helper function for otx_cache_parse_tx
|
||||||
|
|
||||||
@ -758,12 +587,16 @@ def cache_gas_refill_data(
|
|||||||
:returns: Transaction hash and id of cache element in storage backend, respectively
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
:rtype: tuple
|
:rtype: tuple
|
||||||
"""
|
"""
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||||
|
tx = unpack(tx_signed_raw_bytes, chain_spec.chain_id())
|
||||||
|
|
||||||
tx_cache = TxCache(
|
tx_cache = TxCache(
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
tx['from'],
|
tx['from'],
|
||||||
tx['to'],
|
tx['to'],
|
||||||
zero_address,
|
ZERO_ADDRESS,
|
||||||
zero_address,
|
ZERO_ADDRESS,
|
||||||
tx['value'],
|
tx['value'],
|
||||||
tx['value'],
|
tx['value'],
|
||||||
)
|
)
|
||||||
|
@ -1,110 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import sha3
|
|
||||||
import web3
|
|
||||||
|
|
||||||
# external imports
|
|
||||||
from rlp import decode as rlp_decode
|
|
||||||
from rlp import encode as rlp_encode
|
|
||||||
from eth_keys import KeyAPI
|
|
||||||
from chainlib.eth.tx import unpack
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
field_debugs = [
|
|
||||||
'nonce',
|
|
||||||
'gasPrice',
|
|
||||||
'gas',
|
|
||||||
'to',
|
|
||||||
'value',
|
|
||||||
'data',
|
|
||||||
'v',
|
|
||||||
'r',
|
|
||||||
's',
|
|
||||||
]
|
|
||||||
|
|
||||||
unpack_signed_raw_tx = unpack
|
|
||||||
|
|
||||||
#def unpack_signed_raw_tx(tx_raw_bytes, chain_id):
|
|
||||||
# d = rlp_decode(tx_raw_bytes)
|
|
||||||
#
|
|
||||||
# logg.debug('decoding {} using chain id {}'.format(tx_raw_bytes.hex(), chain_id))
|
|
||||||
# j = 0
|
|
||||||
# for i in d:
|
|
||||||
# logg.debug('decoded {}: {}'.format(field_debugs[j], i.hex()))
|
|
||||||
# j += 1
|
|
||||||
# vb = chain_id
|
|
||||||
# if chain_id != 0:
|
|
||||||
# v = int.from_bytes(d[6], 'big')
|
|
||||||
# vb = v - (chain_id * 2) - 35
|
|
||||||
# while len(d[7]) < 32:
|
|
||||||
# d[7] = b'\x00' + d[7]
|
|
||||||
# while len(d[8]) < 32:
|
|
||||||
# d[8] = b'\x00' + d[8]
|
|
||||||
# s = b''.join([d[7], d[8], bytes([vb])])
|
|
||||||
# so = KeyAPI.Signature(signature_bytes=s)
|
|
||||||
#
|
|
||||||
# h = sha3.keccak_256()
|
|
||||||
# h.update(rlp_encode(d))
|
|
||||||
# signed_hash = h.digest()
|
|
||||||
#
|
|
||||||
# d[6] = chain_id
|
|
||||||
# d[7] = b''
|
|
||||||
# d[8] = b''
|
|
||||||
#
|
|
||||||
# h = sha3.keccak_256()
|
|
||||||
# h.update(rlp_encode(d))
|
|
||||||
# unsigned_hash = h.digest()
|
|
||||||
#
|
|
||||||
# p = so.recover_public_key_from_msg_hash(unsigned_hash)
|
|
||||||
# a = p.to_checksum_address()
|
|
||||||
# logg.debug('decoded recovery byte {}'.format(vb))
|
|
||||||
# logg.debug('decoded address {}'.format(a))
|
|
||||||
# logg.debug('decoded signed hash {}'.format(signed_hash.hex()))
|
|
||||||
# logg.debug('decoded unsigned hash {}'.format(unsigned_hash.hex()))
|
|
||||||
#
|
|
||||||
# to = d[3].hex() or None
|
|
||||||
# if to != None:
|
|
||||||
# to = web3.Web3.toChecksumAddress('0x' + to)
|
|
||||||
#
|
|
||||||
# return {
|
|
||||||
# 'from': a,
|
|
||||||
# 'nonce': int.from_bytes(d[0], 'big'),
|
|
||||||
# 'gasPrice': int.from_bytes(d[1], 'big'),
|
|
||||||
# 'gas': int.from_bytes(d[2], 'big'),
|
|
||||||
# 'to': to,
|
|
||||||
# 'value': int.from_bytes(d[4], 'big'),
|
|
||||||
# 'data': '0x' + d[5].hex(),
|
|
||||||
# 'v': chain_id,
|
|
||||||
# 'r': '0x' + s[:32].hex(),
|
|
||||||
# 's': '0x' + s[32:64].hex(),
|
|
||||||
# 'chainId': chain_id,
|
|
||||||
# 'hash': '0x' + signed_hash.hex(),
|
|
||||||
# 'hash_unsigned': '0x' + unsigned_hash.hex(),
|
|
||||||
# }
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_signed_raw_tx_hex(tx_raw_hex, chain_id):
|
|
||||||
return unpack_signed_raw_tx(bytes.fromhex(tx_raw_hex[2:]), chain_id)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: consider moving tx string representation generation from api_admin to here
|
|
||||||
def tx_string(tx_raw_bytes, chain_id):
|
|
||||||
tx_unpacked = unpack_signed_raw_tx(tx_raw_bytes, chain_id)
|
|
||||||
return 'tx nonce {} from {} to {} hash {}'.format(
|
|
||||||
tx_unpacked['nonce'],
|
|
||||||
tx_unpacked['from'],
|
|
||||||
tx_unpacked['to'],
|
|
||||||
tx_unpacked['hash'],
|
|
||||||
)
|
|
||||||
|
|
||||||
def tx_hex_string(tx_hex, chain_id):
|
|
||||||
if len(tx_hex) < 2:
|
|
||||||
raise ValueError('invalid data length')
|
|
||||||
elif tx_hex[:2] == '0x':
|
|
||||||
tx_hex = tx_hex[2:]
|
|
||||||
|
|
||||||
tx_raw_bytes = bytes.fromhex(tx_hex)
|
|
||||||
return tx_string(tx_raw_bytes, chain_id)
|
|
||||||
|
|
||||||
|
|
@ -3,20 +3,34 @@ import logging
|
|||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import celery
|
import celery
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry import CICRegistry
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
from eth_address_declarator import AddressDeclarator
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.task import BaseTask
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def translate_address(address, trusted_addresses, chain_spec):
|
def translate_address(address, trusted_addresses, chain_spec, sender_address=ZERO_ADDRESS):
|
||||||
|
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
|
||||||
|
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
||||||
|
c = AddressDeclarator()
|
||||||
|
|
||||||
for trusted_address in trusted_addresses:
|
for trusted_address in trusted_addresses:
|
||||||
o = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', 'Declarator')
|
o = c.declaration(declarator_address, trusted_address, address, sender_address=sender_address)
|
||||||
fn = o.function('declaration')
|
r = rpc.do(o)
|
||||||
declaration_hex = fn(trusted_address, address).call()
|
declaration_hex = AddressDeclarator.parse_declaration(r)
|
||||||
declaration_bytes = declaration_hex[0].rstrip(b'\x00')
|
declaration_hex = declaration_hex[0].rstrip('0')
|
||||||
|
declaration_bytes = bytes.fromhex(declaration_hex)
|
||||||
declaration = None
|
declaration = None
|
||||||
try:
|
try:
|
||||||
declaration = declaration_bytes.decode('utf-8', errors='strict')
|
declaration = declaration_bytes.decode('utf-8', errors='strict')
|
||||||
@ -25,19 +39,19 @@ def translate_address(address, trusted_addresses, chain_spec):
|
|||||||
return declaration
|
return declaration
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task()
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
def translate_tx_addresses(tx, trusted_addresses, chain_str):
|
def translate_tx_addresses(self, tx, trusted_addresses, chain_spec_dict):
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
|
||||||
declaration = None
|
declaration = None
|
||||||
if tx['sender_label'] == None:
|
if tx['sender_label'] == None:
|
||||||
declaration = translate_address(tx['sender'], trusted_addresses, chain_spec)
|
declaration = translate_address(tx['sender'], trusted_addresses, chain_spec, self.call_address)
|
||||||
tx['sender_label'] = declaration
|
tx['sender_label'] = declaration
|
||||||
|
|
||||||
declaration = None
|
declaration = None
|
||||||
if tx['recipient_label'] == None:
|
if tx['recipient_label'] == None:
|
||||||
declaration = translate_address(tx['recipient'], trusted_addresses, chain_spec)
|
declaration = translate_address(tx['recipient'], trusted_addresses, chain_spec, self.call_address)
|
||||||
tx['recipient_label'] = declaration
|
tx['recipient_label'] = declaration
|
||||||
|
|
||||||
return tx
|
return tx
|
||||||
|
@ -3,21 +3,28 @@ import logging
|
|||||||
import math
|
import math
|
||||||
|
|
||||||
# third-pary imports
|
# third-pary imports
|
||||||
import web3
|
|
||||||
import celery
|
import celery
|
||||||
import moolb
|
import moolb
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry.registry import CICRegistry
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
unpack,
|
||||||
|
transaction_by_block,
|
||||||
|
receipt,
|
||||||
|
)
|
||||||
|
from chainlib.eth.block import block_by_number
|
||||||
|
from chainlib.eth.contract import abi_decode_single
|
||||||
|
from chainlib.eth.erc20 import ERC20
|
||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
from cic_eth_registry.erc20 import ERC20Token
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.db.models.otx import Otx
|
from cic_eth.db.models.otx import Otx
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
from cic_eth.db.enum import StatusEnum
|
from cic_eth.db.enum import StatusEnum
|
||||||
from cic_eth.eth.token import unpack_transfer
|
|
||||||
from cic_eth.queue.tx import get_tx_cache
|
from cic_eth.queue.tx import get_tx_cache
|
||||||
from cic_eth.queue.time import tx_times
|
from cic_eth.queue.time import tx_times
|
||||||
|
from cic_eth.task import BaseTask
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -26,8 +33,8 @@ MAX_BLOCK_TX = 250
|
|||||||
|
|
||||||
|
|
||||||
# TODO: Make this method easier to read
|
# TODO: Make this method easier to read
|
||||||
@celery_app.task()
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
def list_tx_by_bloom(bloomspec, address, chain_str):
|
def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
|
||||||
"""Retrieve external transaction data matching the provided filter
|
"""Retrieve external transaction data matching the provided filter
|
||||||
|
|
||||||
The bloom filter representation with the following structure (the size of the filter will be inferred from the size of the provided filter data):
|
The bloom filter representation with the following structure (the size of the filter will be inferred from the size of the provided filter data):
|
||||||
@ -49,8 +56,11 @@ def list_tx_by_bloom(bloomspec, address, chain_str):
|
|||||||
:returns: dict of transaction data as dict, keyed by transaction hash
|
:returns: dict of transaction data as dict, keyed by transaction hash
|
||||||
:rtype: dict of dict
|
:rtype: dict of dict
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
c = RpcClient(chain_spec)
|
chain_str = str(chain_spec)
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
|
||||||
block_filter_data = bytes.fromhex(bloomspec['block_filter'])
|
block_filter_data = bytes.fromhex(bloomspec['block_filter'])
|
||||||
tx_filter_data = bytes.fromhex(bloomspec['blocktx_filter'])
|
tx_filter_data = bytes.fromhex(bloomspec['blocktx_filter'])
|
||||||
databitlen = len(block_filter_data)*8
|
databitlen = len(block_filter_data)*8
|
||||||
@ -62,47 +72,53 @@ def list_tx_by_bloom(bloomspec, address, chain_str):
|
|||||||
block_height_bytes = block_height.to_bytes(4, 'big')
|
block_height_bytes = block_height.to_bytes(4, 'big')
|
||||||
if block_filter.check(block_height_bytes):
|
if block_filter.check(block_height_bytes):
|
||||||
logg.debug('filter matched block {}'.format(block_height))
|
logg.debug('filter matched block {}'.format(block_height))
|
||||||
block = c.w3.eth.getBlock(block_height, True)
|
o = block_by_number(block_height)
|
||||||
|
block = rpc.do(o)
|
||||||
|
logg.debug('block {}'.format(block))
|
||||||
|
|
||||||
for tx_index in range(0, len(block.transactions)):
|
for tx_index in range(0, len(block['transactions'])):
|
||||||
composite = tx_index + block_height
|
composite = tx_index + block_height
|
||||||
tx_index_bytes = composite.to_bytes(4, 'big')
|
tx_index_bytes = composite.to_bytes(4, 'big')
|
||||||
if tx_filter.check(tx_index_bytes):
|
if tx_filter.check(tx_index_bytes):
|
||||||
logg.debug('filter matched block {} tx {}'.format(block_height, tx_index))
|
logg.debug('filter matched block {} tx {}'.format(block_height, tx_index))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
tx = c.w3.eth.getTransactionByBlock(block_height, tx_index)
|
#tx = c.w3.eth.getTransactionByBlock(block_height, tx_index)
|
||||||
except web3.exceptions.TransactionNotFound:
|
o = transaction_by_block(block['hash'], tx_index)
|
||||||
logg.debug('false positive on block {} tx {}'.format(block_height, tx_index))
|
tx = rpc.do(o)
|
||||||
|
except Exception as e:
|
||||||
|
logg.debug('false positive on block {} tx {} ({})'.format(block_height, tx_index, e))
|
||||||
continue
|
continue
|
||||||
tx_address = None
|
tx_address = None
|
||||||
tx_token_value = 0
|
tx_token_value = 0
|
||||||
try:
|
try:
|
||||||
transfer_data = unpack_transfer(tx['data'])
|
transfer_data = ERC20.parse_transfer_request(tx['data'])
|
||||||
tx_address = transfer_data['to']
|
tx_address = transfer_data[0]
|
||||||
tx_token_value = transfer_data['amount']
|
tx_token_value = transfer_data[1]
|
||||||
except ValueError:
|
except ValueError:
|
||||||
logg.debug('not a transfer transaction, skipping {}'.format(tx))
|
logg.debug('not a transfer transaction, skipping {}'.format(tx))
|
||||||
continue
|
continue
|
||||||
if address == tx_address:
|
if address == tx_address:
|
||||||
status = StatusEnum.SENT
|
status = StatusEnum.SENT
|
||||||
try:
|
try:
|
||||||
rcpt = c.w3.eth.getTransactionReceipt(tx.hash)
|
o = receipt(tx['hash'])
|
||||||
|
rcpt = rpc.do(o)
|
||||||
if rcpt['status'] == 0:
|
if rcpt['status'] == 0:
|
||||||
pending = StatusEnum.REVERTED
|
pending = StatusEnum.REVERTED
|
||||||
else:
|
else:
|
||||||
pending = StatusEnum.SUCCESS
|
pending = StatusEnum.SUCCESS
|
||||||
except web3.exceptions.TransactionNotFound:
|
except Exception as e:
|
||||||
|
logg.error('skipping receipt lookup for {}: {}'.format(tx['hash'], e))
|
||||||
pass
|
pass
|
||||||
|
|
||||||
tx_hash_hex = tx['hash'].hex()
|
# TODO: pass through registry to validate declarator entry of token
|
||||||
|
#token = registry.by_address(tx['to'], sender_address=self.call_address)
|
||||||
token = CICRegistry.get_address(chain_spec, tx['to'])
|
token = ERC20Token(rpc, tx['to'])
|
||||||
token_symbol = token.symbol()
|
token_symbol = token.symbol
|
||||||
token_decimals = token.decimals()
|
token_decimals = token.decimals
|
||||||
times = tx_times(tx_hash_hex, chain_str)
|
times = tx_times(tx['hash'], chain_spec)
|
||||||
tx_r = {
|
tx_r = {
|
||||||
'hash': tx_hash_hex,
|
'hash': tx['hash'],
|
||||||
'sender': tx['from'],
|
'sender': tx['from'],
|
||||||
'recipient': tx_address,
|
'recipient': tx_address,
|
||||||
'source_value': tx_token_value,
|
'source_value': tx_token_value,
|
||||||
@ -121,7 +137,7 @@ def list_tx_by_bloom(bloomspec, address, chain_str):
|
|||||||
tx_r['date_created'] = times['queue']
|
tx_r['date_created'] = times['queue']
|
||||||
else:
|
else:
|
||||||
tx_r['date_created'] = times['network']
|
tx_r['date_created'] = times['network']
|
||||||
txs[tx_hash_hex] = tx_r
|
txs[tx['hash']] = tx_r
|
||||||
break
|
break
|
||||||
return txs
|
return txs
|
||||||
|
|
||||||
@ -130,7 +146,7 @@ def list_tx_by_bloom(bloomspec, address, chain_str):
|
|||||||
# TODO: DRY this with callback filter in cic_eth/runnable/manager
|
# TODO: DRY this with callback filter in cic_eth/runnable/manager
|
||||||
# TODO: Remove redundant fields from end representation (timestamp, tx_hash)
|
# TODO: Remove redundant fields from end representation (timestamp, tx_hash)
|
||||||
@celery_app.task()
|
@celery_app.task()
|
||||||
def tx_collate(tx_batches, chain_str, offset, limit, newest_first=True):
|
def tx_collate(tx_batches, chain_spec_dict, offset, limit, newest_first=True):
|
||||||
"""Merges transaction data from multiple sources and sorts them in chronological order.
|
"""Merges transaction data from multiple sources and sorts them in chronological order.
|
||||||
|
|
||||||
:param tx_batches: Transaction data inputs
|
:param tx_batches: Transaction data inputs
|
||||||
@ -147,7 +163,7 @@ def tx_collate(tx_batches, chain_str, offset, limit, newest_first=True):
|
|||||||
:rtype: list
|
:rtype: list
|
||||||
"""
|
"""
|
||||||
txs_by_block = {}
|
txs_by_block = {}
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
|
||||||
if isinstance(tx_batches, dict):
|
if isinstance(tx_batches, dict):
|
||||||
tx_batches = [tx_batches]
|
tx_batches = [tx_batches]
|
||||||
@ -158,7 +174,7 @@ def tx_collate(tx_batches, chain_str, offset, limit, newest_first=True):
|
|||||||
k = None
|
k = None
|
||||||
try:
|
try:
|
||||||
hx = strip_0x(v)
|
hx = strip_0x(v)
|
||||||
tx = unpack_signed_raw_tx(bytes.fromhex(hx), chain_spec.chain_id())
|
tx = unpack(bytes.fromhex(hx), chain_spec.chain_id())
|
||||||
txc = get_tx_cache(tx['hash'])
|
txc = get_tx_cache(tx['hash'])
|
||||||
txc['timestamp'] = int(txc['date_created'].timestamp())
|
txc['timestamp'] = int(txc['date_created'].timestamp())
|
||||||
txc['hash'] = txc['tx_hash']
|
txc['hash'] = txc['tx_hash']
|
||||||
|
@ -3,10 +3,10 @@ import logging
|
|||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import celery
|
import celery
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
from cic_eth.db import SessionBase
|
from cic_eth.db import SessionBase
|
||||||
from cic_eth.db.models.otx import Otx
|
from cic_eth.db.models.otx import Otx
|
||||||
from cic_eth.db.models.tx import TxCache
|
from cic_eth.db.models.tx import TxCache
|
||||||
@ -21,7 +21,7 @@ celery_app = celery.current_app
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def __balance_outgoing_compatible(token_address, holder_address, chain_str):
|
def __balance_outgoing_compatible(token_address, holder_address):
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
q = session.query(TxCache.from_value)
|
q = session.query(TxCache.from_value)
|
||||||
q = q.join(Otx)
|
q = q.join(Otx)
|
||||||
@ -37,7 +37,7 @@ def __balance_outgoing_compatible(token_address, holder_address, chain_str):
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def balance_outgoing(tokens, holder_address, chain_str):
|
def balance_outgoing(tokens, holder_address, chain_spec_dict):
|
||||||
"""Retrieve accumulated value of unprocessed transactions sent from the given address.
|
"""Retrieve accumulated value of unprocessed transactions sent from the given address.
|
||||||
|
|
||||||
:param tokens: list of token spec dicts with addresses to retrieve balances for
|
:param tokens: list of token spec dicts with addresses to retrieve balances for
|
||||||
@ -49,15 +49,15 @@ def balance_outgoing(tokens, holder_address, chain_str):
|
|||||||
:returns: Tokens dicts with outgoing balance added
|
:returns: Tokens dicts with outgoing balance added
|
||||||
:rtype: dict
|
:rtype: dict
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
b = __balance_outgoing_compatible(t['address'], holder_address, chain_str)
|
b = __balance_outgoing_compatible(t['address'], holder_address)
|
||||||
t['balance_outgoing'] = b
|
t['balance_outgoing'] = b
|
||||||
|
|
||||||
return tokens
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
def __balance_incoming_compatible(token_address, receiver_address, chain_str):
|
def __balance_incoming_compatible(token_address, receiver_address):
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
q = session.query(TxCache.to_value)
|
q = session.query(TxCache.to_value)
|
||||||
q = q.join(Otx)
|
q = q.join(Otx)
|
||||||
@ -75,7 +75,7 @@ def __balance_incoming_compatible(token_address, receiver_address, chain_str):
|
|||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def balance_incoming(tokens, receipient_address, chain_str):
|
def balance_incoming(tokens, receipient_address, chain_spec_dict):
|
||||||
"""Retrieve accumulated value of unprocessed transactions to be received by the given address.
|
"""Retrieve accumulated value of unprocessed transactions to be received by the given address.
|
||||||
|
|
||||||
:param tokens: list of token spec dicts with addresses to retrieve balances for
|
:param tokens: list of token spec dicts with addresses to retrieve balances for
|
||||||
@ -87,9 +87,9 @@ def balance_incoming(tokens, receipient_address, chain_str):
|
|||||||
:returns: Tokens dicts with outgoing balance added
|
:returns: Tokens dicts with outgoing balance added
|
||||||
:rtype: dict
|
:rtype: dict
|
||||||
"""
|
"""
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
for t in tokens:
|
for t in tokens:
|
||||||
b = __balance_incoming_compatible(t['address'], receipient_address, chain_str)
|
b = __balance_incoming_compatible(t['address'], receipient_address)
|
||||||
t['balance_incoming'] = b
|
t['balance_incoming'] = b
|
||||||
|
|
||||||
return tokens
|
return tokens
|
||||||
@ -107,6 +107,7 @@ def assemble_balances(balances_collection):
|
|||||||
:rtype: list of dicts
|
:rtype: list of dicts
|
||||||
"""
|
"""
|
||||||
tokens = {}
|
tokens = {}
|
||||||
|
logg.debug('received collection {}'.format(balances_collection))
|
||||||
for c in balances_collection:
|
for c in balances_collection:
|
||||||
for b in c:
|
for b in c:
|
||||||
address = b['address']
|
address = b['address']
|
||||||
|
@ -2,12 +2,13 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import web3
|
|
||||||
import celery
|
import celery
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.block import block_by_hash
|
||||||
|
from chainlib.eth.tx import receipt
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.db.models.otx import Otx
|
from cic_eth.db.models.otx import Otx
|
||||||
from cic_eth.error import NotLocalTxError
|
from cic_eth.error import NotLocalTxError
|
||||||
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
|
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
|
||||||
@ -17,21 +18,21 @@ celery_app = celery.current_app
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
# TODO: This method does not belong in the _queue_ module, it operates across queue and network
|
def tx_times(tx_hash, chain_spec):
|
||||||
@celery_app.task(base=CriticalSQLAlchemyAndWeb3Task)
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
def tx_times(tx_hash, chain_str):
|
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
time_pair = {
|
time_pair = {
|
||||||
'network': None,
|
'network': None,
|
||||||
'queue': None,
|
'queue': None,
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
rcpt = c.w3.eth.getTransactionReceipt(tx_hash)
|
o = receipt(tx_hash)
|
||||||
block = c.w3.eth.getBlock(rcpt['blockHash'])
|
r = rpc.do(o)
|
||||||
|
o = block_by_hash(r['block_hash'])
|
||||||
|
block = rpc.do(o)
|
||||||
logg.debug('rcpt {}'.format(block))
|
logg.debug('rcpt {}'.format(block))
|
||||||
time_pair['network'] = block['timestamp']
|
time_pair['network'] = block['timestamp']
|
||||||
except web3.exceptions.TransactionNotFound:
|
except Exception as e:
|
||||||
|
logg.debug('error with getting timestamp details for {}: {}'.format(tx_hash, e))
|
||||||
pass
|
pass
|
||||||
|
|
||||||
otx = Otx.load(tx_hash)
|
otx = Otx.load(tx_hash)
|
||||||
|
@ -3,17 +3,16 @@ import logging
|
|||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
from sqlalchemy import or_
|
from sqlalchemy import or_
|
||||||
from sqlalchemy import not_
|
from sqlalchemy import not_
|
||||||
from sqlalchemy import tuple_
|
from sqlalchemy import tuple_
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
|
from chainlib.eth.tx import unpack
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
from cic_eth.db.models.otx import Otx
|
from cic_eth.db.models.otx import Otx
|
||||||
from cic_eth.db.models.otx import OtxStateLog
|
from cic_eth.db.models.otx import OtxStateLog
|
||||||
from cic_eth.db.models.tx import TxCache
|
from cic_eth.db.models.tx import TxCache
|
||||||
@ -27,7 +26,6 @@ from cic_eth.db.enum import (
|
|||||||
dead,
|
dead,
|
||||||
)
|
)
|
||||||
from cic_eth.task import CriticalSQLAlchemyTask
|
from cic_eth.task import CriticalSQLAlchemyTask
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx # TODO: should not be in same sub-path as package that imports queue.tx
|
|
||||||
from cic_eth.error import NotLocalTxError
|
from cic_eth.error import NotLocalTxError
|
||||||
from cic_eth.error import LockedError
|
from cic_eth.error import LockedError
|
||||||
from cic_eth.db.enum import status_str
|
from cic_eth.db.enum import status_str
|
||||||
@ -37,7 +35,7 @@ celery_app = celery.current_app
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def create(nonce, holder_address, tx_hash, signed_tx, chain_str, obsolete_predecessors=True, session=None):
|
def create(nonce, holder_address, tx_hash, signed_tx, chain_spec, obsolete_predecessors=True, session=None):
|
||||||
"""Create a new transaction queue record.
|
"""Create a new transaction queue record.
|
||||||
|
|
||||||
:param nonce: Transaction nonce
|
:param nonce: Transaction nonce
|
||||||
@ -48,13 +46,13 @@ def create(nonce, holder_address, tx_hash, signed_tx, chain_str, obsolete_predec
|
|||||||
:type tx_hash: str, 0x-hex
|
:type tx_hash: str, 0x-hex
|
||||||
:param signed_tx: Signed raw transaction
|
:param signed_tx: Signed raw transaction
|
||||||
:type signed_tx: str, 0x-hex
|
:type signed_tx: str, 0x-hex
|
||||||
:param chain_str: Chain spec string representation to create transaction for
|
:param chain_spec: Chain spec to create transaction for
|
||||||
:type chain_str: str
|
:type chain_spec: ChainSpec
|
||||||
:returns: transaction hash
|
:returns: transaction hash
|
||||||
:rtype: str, 0x-hash
|
:rtype: str, 0x-hash
|
||||||
"""
|
"""
|
||||||
session = SessionBase.bind_session(session)
|
session = SessionBase.bind_session(session)
|
||||||
lock = Lock.check_aggregate(chain_str, LockEnum.QUEUE, holder_address, session=session)
|
lock = Lock.check_aggregate(str(chain_spec), LockEnum.QUEUE, holder_address, session=session)
|
||||||
if lock > 0:
|
if lock > 0:
|
||||||
SessionBase.release_session(session)
|
SessionBase.release_session(session)
|
||||||
raise LockedError(lock)
|
raise LockedError(lock)
|
||||||
@ -69,17 +67,26 @@ def create(nonce, holder_address, tx_hash, signed_tx, chain_str, obsolete_predec
|
|||||||
session.flush()
|
session.flush()
|
||||||
|
|
||||||
if obsolete_predecessors:
|
if obsolete_predecessors:
|
||||||
# TODO: obsolete previous txs from same holder with same nonce
|
|
||||||
q = session.query(Otx)
|
q = session.query(Otx)
|
||||||
q = q.join(TxCache)
|
q = q.join(TxCache)
|
||||||
q = q.filter(Otx.nonce==nonce)
|
q = q.filter(Otx.nonce==nonce)
|
||||||
q = q.filter(TxCache.sender==holder_address)
|
q = q.filter(TxCache.sender==holder_address)
|
||||||
q = q.filter(Otx.tx_hash!=tx_hash)
|
q = q.filter(Otx.tx_hash!=tx_hash)
|
||||||
q = q.filter(Otx.status<=StatusEnum.SENT)
|
q = q.filter(Otx.status.op('&')(StatusBits.FINAL)==0)
|
||||||
|
|
||||||
for otx in q.all():
|
for otx in q.all():
|
||||||
logg.info('otx {} obsoleted by {}'.format(otx.tx_hash, tx_hash))
|
logg.info('otx {} obsoleted by {}'.format(otx.tx_hash, tx_hash))
|
||||||
otx.cancel(confirmed=False, session=session)
|
try:
|
||||||
|
otx.cancel(confirmed=False, session=session)
|
||||||
|
except TxStateChangeError as e:
|
||||||
|
logg.exception('obsolete fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
except Exception as e:
|
||||||
|
logg.exception('obsolete UNEXPECTED fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
|
||||||
|
|
||||||
session.commit()
|
session.commit()
|
||||||
SessionBase.release_session(session)
|
SessionBase.release_session(session)
|
||||||
@ -87,6 +94,50 @@ def create(nonce, holder_address, tx_hash, signed_tx, chain_str, obsolete_predec
|
|||||||
return tx_hash
|
return tx_hash
|
||||||
|
|
||||||
|
|
||||||
|
def register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=None, session=None):
|
||||||
|
"""Signs the provided transaction, and adds it to the transaction queue cache (with status PENDING).
|
||||||
|
|
||||||
|
:param tx: Standard ethereum transaction data
|
||||||
|
:type tx: dict
|
||||||
|
:param chain_spec: Chain spec of transaction to add to queue
|
||||||
|
:type chain_spec: chainlib.chain.ChainSpec
|
||||||
|
:param queue: Task queue
|
||||||
|
:type queue: str
|
||||||
|
:param cache_task: Cache task to call with signed transaction. If None, no task will be called.
|
||||||
|
:type cache_task: str
|
||||||
|
:raises: sqlalchemy.exc.DatabaseError
|
||||||
|
:returns: Tuple; Transaction hash, signed raw transaction data
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
logg.debug('adding queue tx {}:{} -> {}'.format(chain_spec, tx_hash_hex, tx_signed_raw_hex))
|
||||||
|
tx_signed_raw = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||||
|
tx = unpack(tx_signed_raw, chain_id=chain_spec.chain_id())
|
||||||
|
|
||||||
|
create(
|
||||||
|
tx['nonce'],
|
||||||
|
tx['from'],
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_spec,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
if cache_task != None:
|
||||||
|
logg.debug('adding cache task {} tx {}'.format(cache_task, tx_hash_hex))
|
||||||
|
s_cache = celery.signature(
|
||||||
|
cache_task,
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s_cache.apply_async()
|
||||||
|
|
||||||
|
return (tx_hash_hex, tx_signed_raw_hex,)
|
||||||
|
|
||||||
|
|
||||||
# TODO: Replace set_* with single task for set status
|
# TODO: Replace set_* with single task for set status
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def set_sent_status(tx_hash, fail=False):
|
def set_sent_status(tx_hash, fail=False):
|
||||||
@ -109,10 +160,20 @@ def set_sent_status(tx_hash, fail=False):
|
|||||||
session.close()
|
session.close()
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if fail:
|
try:
|
||||||
o.sendfail(session=session)
|
if fail:
|
||||||
else:
|
o.sendfail(session=session)
|
||||||
o.sent(session=session)
|
else:
|
||||||
|
o.sent(session=session)
|
||||||
|
except TxStateChangeError as e:
|
||||||
|
logg.exception('set sent fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
except Exception as e:
|
||||||
|
logg.exception('set sent UNEXPECED fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
|
||||||
|
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
@ -156,10 +217,20 @@ def set_final_status(tx_hash, block=None, fail=False):
|
|||||||
q = q.filter(Otx.tx_hash==tx_hash)
|
q = q.filter(Otx.tx_hash==tx_hash)
|
||||||
o = q.first()
|
o = q.first()
|
||||||
|
|
||||||
if fail:
|
try:
|
||||||
o.minefail(block, session=session)
|
if fail:
|
||||||
else:
|
o.minefail(block, session=session)
|
||||||
o.success(block, session=session)
|
else:
|
||||||
|
o.success(block, session=session)
|
||||||
|
session.commit()
|
||||||
|
except TxStateChangeError as e:
|
||||||
|
logg.exception('set final fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
except Exception as e:
|
||||||
|
logg.exception('set final UNEXPECED fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
|
||||||
q = session.query(Otx)
|
q = session.query(Otx)
|
||||||
q = q.join(TxCache)
|
q = q.join(TxCache)
|
||||||
@ -168,8 +239,16 @@ def set_final_status(tx_hash, block=None, fail=False):
|
|||||||
q = q.filter(Otx.tx_hash!=tx_hash)
|
q = q.filter(Otx.tx_hash!=tx_hash)
|
||||||
|
|
||||||
for otwo in q.all():
|
for otwo in q.all():
|
||||||
otwo.cancel(True, session=session)
|
try:
|
||||||
|
otwo.cancel(True, session=session)
|
||||||
|
except TxStateChangeError as e:
|
||||||
|
logg.exception('cancel non-final fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
|
except Exception as e:
|
||||||
|
logg.exception('cancel non-final UNEXPECTED fail: {}'.format(e))
|
||||||
|
session.close()
|
||||||
|
raise(e)
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
@ -197,12 +276,16 @@ def set_cancel(tx_hash, manual=False):
|
|||||||
|
|
||||||
session.flush()
|
session.flush()
|
||||||
|
|
||||||
if manual:
|
try:
|
||||||
o.override(session=session)
|
if manual:
|
||||||
else:
|
o.override(session=session)
|
||||||
o.cancel(session=session)
|
else:
|
||||||
|
o.cancel(session=session)
|
||||||
session.commit()
|
session.commit()
|
||||||
|
except TxStateChangeError as e:
|
||||||
|
logg.exception('set cancel fail: {}'.format(e))
|
||||||
|
except Exception as e:
|
||||||
|
logg.exception('set cancel UNEXPECTED fail: {}'.format(e))
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
return tx_hash
|
return tx_hash
|
||||||
@ -513,7 +596,7 @@ def get_nonce_tx(nonce, sender, chain_id):
|
|||||||
txs = {}
|
txs = {}
|
||||||
for r in q.all():
|
for r in q.all():
|
||||||
tx_signed_bytes = bytes.fromhex(r.signed_tx[2:])
|
tx_signed_bytes = bytes.fromhex(r.signed_tx[2:])
|
||||||
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_id)
|
tx = unpack(tx_signed_bytes, chain_id)
|
||||||
if sender == None or tx['from'] == sender:
|
if sender == None or tx['from'] == sender:
|
||||||
txs[r.tx_hash] = r.signed_tx
|
txs[r.tx_hash] = r.signed_tx
|
||||||
|
|
||||||
@ -558,7 +641,7 @@ def get_paused_txs(status=None, sender=None, chain_id=0, session=None):
|
|||||||
|
|
||||||
for r in q.all():
|
for r in q.all():
|
||||||
tx_signed_bytes = bytes.fromhex(r.signed_tx[2:])
|
tx_signed_bytes = bytes.fromhex(r.signed_tx[2:])
|
||||||
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_id)
|
tx = unpack(tx_signed_bytes, chain_id)
|
||||||
if sender == None or tx['from'] == sender:
|
if sender == None or tx['from'] == sender:
|
||||||
#gas += tx['gas'] * tx['gasPrice']
|
#gas += tx['gas'] * tx['gasPrice']
|
||||||
txs[r.tx_hash] = r.signed_tx
|
txs[r.tx_hash] = r.signed_tx
|
||||||
@ -664,7 +747,7 @@ def get_upcoming_tx(status=StatusEnum.READYSEND, recipient=None, before=None, ch
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
tx_signed_bytes = bytes.fromhex(o.signed_tx[2:])
|
tx_signed_bytes = bytes.fromhex(o.signed_tx[2:])
|
||||||
tx = unpack_signed_raw_tx(tx_signed_bytes, chain_id)
|
tx = unpack(tx_signed_bytes, chain_id)
|
||||||
txs[o.tx_hash] = o.signed_tx
|
txs[o.tx_hash] = o.signed_tx
|
||||||
|
|
||||||
q = session.query(TxCache)
|
q = session.query(TxCache)
|
||||||
|
@ -1,86 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import copy
|
|
||||||
|
|
||||||
# external imports
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from eth_token_index import TokenUniqueSymbolIndex
|
|
||||||
from eth_accounts_index import AccountRegistry
|
|
||||||
from chainlib.chain import ChainSpec
|
|
||||||
from cic_registry.chain import ChainRegistry
|
|
||||||
from cic_registry.helper.declarator import DeclaratorOracleAdapter
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class TokenOracle:
|
|
||||||
|
|
||||||
def __init__(self, conn, chain_spec, registry):
|
|
||||||
self.tokens = []
|
|
||||||
self.chain_spec = chain_spec
|
|
||||||
self.registry = registry
|
|
||||||
|
|
||||||
token_registry_contract = CICRegistry.get_contract(chain_spec, 'TokenRegistry', 'Registry')
|
|
||||||
self.getter = TokenUniqueSymbolIndex(conn, token_registry_contract.address())
|
|
||||||
|
|
||||||
|
|
||||||
def get_tokens(self):
|
|
||||||
token_count = self.getter.count()
|
|
||||||
if token_count == len(self.tokens):
|
|
||||||
return self.tokens
|
|
||||||
|
|
||||||
for i in range(len(self.tokens), token_count):
|
|
||||||
token_address = self.getter.get_index(i)
|
|
||||||
t = self.registry.get_address(self.chain_spec, token_address)
|
|
||||||
token_symbol = t.symbol()
|
|
||||||
self.tokens.append(t)
|
|
||||||
|
|
||||||
logg.debug('adding token idx {} symbol {} address {}'.format(i, token_symbol, token_address))
|
|
||||||
|
|
||||||
return copy.copy(self.tokens)
|
|
||||||
|
|
||||||
|
|
||||||
class AccountsOracle:
|
|
||||||
|
|
||||||
def __init__(self, conn, chain_spec, registry):
|
|
||||||
self.accounts = []
|
|
||||||
self.chain_spec = chain_spec
|
|
||||||
self.registry = registry
|
|
||||||
|
|
||||||
accounts_registry_contract = CICRegistry.get_contract(chain_spec, 'AccountRegistry', 'Registry')
|
|
||||||
self.getter = AccountRegistry(conn, accounts_registry_contract.address())
|
|
||||||
|
|
||||||
|
|
||||||
def get_accounts(self):
|
|
||||||
accounts_count = self.getter.count()
|
|
||||||
if accounts_count == len(self.accounts):
|
|
||||||
return self.accounts
|
|
||||||
|
|
||||||
for i in range(len(self.accounts), accounts_count):
|
|
||||||
account = self.getter.get_index(i)
|
|
||||||
self.accounts.append(account)
|
|
||||||
logg.debug('adding account {}'.format(account))
|
|
||||||
|
|
||||||
return copy.copy(self.accounts)
|
|
||||||
|
|
||||||
|
|
||||||
def init_registry(config, w3):
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|
||||||
CICRegistry.init(w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
|
||||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
|
||||||
|
|
||||||
chain_registry = ChainRegistry(chain_spec)
|
|
||||||
CICRegistry.add_chain_registry(chain_registry, True)
|
|
||||||
|
|
||||||
declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', interface='Declarator')
|
|
||||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
|
||||||
if trusted_addresses_src == None:
|
|
||||||
raise ValueError('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
|
||||||
trusted_addresses = trusted_addresses_src.split(',')
|
|
||||||
for address in trusted_addresses:
|
|
||||||
logg.info('using trusted address {}'.format(address))
|
|
||||||
|
|
||||||
oracle = DeclaratorOracleAdapter(declarator.contract, trusted_addresses)
|
|
||||||
chain_registry.add_oracle(oracle, 'naive_erc20_oracle')
|
|
||||||
|
|
||||||
return CICRegistry
|
|
@ -1,17 +1,19 @@
|
|||||||
#!/usr/bin/python
|
#!/usr/bin/python
|
||||||
#import socket
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
import json
|
import json
|
||||||
|
import argparse
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
import confini
|
||||||
|
import redis
|
||||||
from xdg.BaseDirectory import xdg_config_home
|
from xdg.BaseDirectory import xdg_config_home
|
||||||
|
|
||||||
import celery
|
# local imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
import confini
|
|
||||||
import argparse
|
|
||||||
import redis
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger('create_account_script')
|
logg = logging.getLogger('create_account_script')
|
||||||
@ -50,6 +52,7 @@ args_override = {
|
|||||||
'REDIS_DB': getattr(args, 'redis_db'),
|
'REDIS_DB': getattr(args, 'redis_db'),
|
||||||
}
|
}
|
||||||
config.dict_override(args_override, 'cli')
|
config.dict_override(args_override, 'cli')
|
||||||
|
|
||||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,34 +3,28 @@ import argparse
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
import re
|
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import confini
|
import confini
|
||||||
import celery
|
import celery
|
||||||
import web3
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from cic_registry import zero_address
|
from chainlib.eth.address import is_checksum_address
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import AdminApi
|
from cic_eth.api import AdminApi
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.db.enum import LockEnum
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
logging.getLogger('web3').setLevel(logging.WARNING)
|
default_format = 'terminal'
|
||||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
|
|
||||||
default_abi_dir = '/usr/share/local/cic/solidity/abi'
|
|
||||||
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||||
argparser.add_argument('-r', '--registry-address', type=str, help='CIC registry address')
|
argparser.add_argument('-r', '--registry-address', type=str, help='CIC registry address')
|
||||||
argparser.add_argument('-f', '--format', dest='f', default='terminal', type=str, help='Output format')
|
argparser.add_argument('-f', '--format', dest='f', default=default_format, type=str, help='Output format')
|
||||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
@ -40,7 +34,7 @@ argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
|||||||
|
|
||||||
def process_lock_args(argparser):
|
def process_lock_args(argparser):
|
||||||
argparser.add_argument('flags', type=str, help='Flags to manipulate')
|
argparser.add_argument('flags', type=str, help='Flags to manipulate')
|
||||||
argparser.add_argument('address', default=zero_address, nargs='?', type=str, help='Ethereum address to unlock,')
|
argparser.add_argument('address', default=ZERO_ADDRESS, nargs='?', type=str, help='Ethereum address to unlock,')
|
||||||
|
|
||||||
sub = argparser.add_subparsers()
|
sub = argparser.add_subparsers()
|
||||||
sub.dest = "command"
|
sub.dest = "command"
|
||||||
@ -69,30 +63,12 @@ config.censor('PASSWORD', 'DATABASE')
|
|||||||
config.censor('PASSWORD', 'SSL')
|
config.censor('PASSWORD', 'SSL')
|
||||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
re_websocket = re.compile('^wss?://')
|
|
||||||
re_http = re.compile('^https?://')
|
|
||||||
blockchain_provider = config.get('ETH_PROVIDER')
|
|
||||||
if re.match(re_websocket, blockchain_provider) != None:
|
|
||||||
blockchain_provider = web3.Web3.WebsocketProvider(blockchain_provider)
|
|
||||||
elif re.match(re_http, blockchain_provider) != None:
|
|
||||||
blockchain_provider = web3.Web3.HTTPProvider(blockchain_provider)
|
|
||||||
else:
|
|
||||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
|
||||||
|
|
||||||
def web3_constructor():
|
|
||||||
w3 = web3.Web3(blockchain_provider)
|
|
||||||
return (blockchain_provider, w3)
|
|
||||||
RpcClient.set_constructor(web3_constructor)
|
|
||||||
|
|
||||||
|
|
||||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
queue = args.q
|
queue = args.q
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
chain_str = str(chain_spec)
|
admin_api = AdminApi(None)
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
admin_api = AdminApi(c)
|
|
||||||
|
|
||||||
|
|
||||||
def lock_names_to_flag(s):
|
def lock_names_to_flag(s):
|
||||||
@ -108,14 +84,14 @@ def lock_names_to_flag(s):
|
|||||||
def main():
|
def main():
|
||||||
if args.command == 'unlock':
|
if args.command == 'unlock':
|
||||||
flags = lock_names_to_flag(args.flags)
|
flags = lock_names_to_flag(args.flags)
|
||||||
if not web3.Web3.isChecksumAddress(args.address):
|
if not is_checksum_address(args.address):
|
||||||
raise ValueError('Invalid checksum address {}'.format(args.address))
|
raise ValueError('Invalid checksum address {}'.format(args.address))
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.admin.ctrl.unlock',
|
'cic_eth.admin.ctrl.unlock',
|
||||||
[
|
[
|
||||||
None,
|
None,
|
||||||
chain_str,
|
chain_spec.asdict(),
|
||||||
args.address,
|
args.address,
|
||||||
flags,
|
flags,
|
||||||
],
|
],
|
||||||
@ -127,14 +103,14 @@ def main():
|
|||||||
|
|
||||||
if args.command == 'lock':
|
if args.command == 'lock':
|
||||||
flags = lock_names_to_flag(args.flags)
|
flags = lock_names_to_flag(args.flags)
|
||||||
if not web3.Web3.isChecksumAddress(args.address):
|
if not is_checksum_address(args.address):
|
||||||
raise ValueError('Invalid checksum address {}'.format(args.address))
|
raise ValueError('Invalid checksum address {}'.format(args.address))
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.admin.ctrl.lock',
|
'cic_eth.admin.ctrl.lock',
|
||||||
[
|
[
|
||||||
None,
|
None,
|
||||||
chain_str,
|
chain_spec.asdict(),
|
||||||
args.address,
|
args.address,
|
||||||
flags,
|
flags,
|
||||||
],
|
],
|
||||||
|
@ -11,16 +11,15 @@ import datetime
|
|||||||
# third-party imports
|
# third-party imports
|
||||||
import confini
|
import confini
|
||||||
import celery
|
import celery
|
||||||
import web3
|
from cic_eth_registry import CICRegistry
|
||||||
from web3 import HTTPProvider, WebsocketProvider
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
from chainlib.eth.tx import unpack
|
from chainlib.eth.tx import unpack
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainsyncer.error import SyncDone
|
||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
import cic_eth
|
import cic_eth
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
from cic_eth.db import SessionBase
|
from cic_eth.db import SessionBase
|
||||||
from cic_eth.db.enum import StatusEnum
|
from cic_eth.db.enum import StatusEnum
|
||||||
from cic_eth.db.enum import StatusBits
|
from cic_eth.db.enum import StatusBits
|
||||||
@ -31,7 +30,6 @@ from cic_eth.queue.tx import (
|
|||||||
set_dequeue,
|
set_dequeue,
|
||||||
)
|
)
|
||||||
from cic_eth.admin.ctrl import lock_send
|
from cic_eth.admin.ctrl import lock_send
|
||||||
from cic_eth.sync.error import LoopDone
|
|
||||||
from cic_eth.eth.tx import send as task_tx_send
|
from cic_eth.eth.tx import send as task_tx_send
|
||||||
from cic_eth.error import (
|
from cic_eth.error import (
|
||||||
PermanentTxError,
|
PermanentTxError,
|
||||||
@ -42,16 +40,14 @@ from cic_eth.error import (
|
|||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
|
||||||
|
|
||||||
|
|
||||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-p', '--provider', default='http://localhost:8545', dest='p', type=str, help='rpc provider')
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
@ -68,6 +64,11 @@ os.makedirs(config_dir, 0o777, True)
|
|||||||
config = confini.Config(config_dir, args.env_prefix)
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
config.process()
|
config.process()
|
||||||
# override args
|
# override args
|
||||||
|
args_override = {
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli flag')
|
||||||
config.censor('PASSWORD', 'DATABASE')
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
config.censor('PASSWORD', 'SSL')
|
config.censor('PASSWORD', 'SSL')
|
||||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
@ -79,25 +80,12 @@ queue = args.q
|
|||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
re_websocket = re.compile('^wss?://')
|
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, tag='default')
|
||||||
re_http = re.compile('^https?://')
|
|
||||||
blockchain_provider = config.get('ETH_PROVIDER')
|
|
||||||
if re.match(re_websocket, blockchain_provider) != None:
|
|
||||||
blockchain_provider = WebsocketProvider(blockchain_provider)
|
|
||||||
elif re.match(re_http, blockchain_provider) != None:
|
|
||||||
blockchain_provider = HTTPProvider(blockchain_provider)
|
|
||||||
else:
|
|
||||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
|
||||||
|
|
||||||
def web3_constructor():
|
|
||||||
w3 = web3.Web3(blockchain_provider)
|
|
||||||
return (blockchain_provider, w3)
|
|
||||||
RpcClient.set_constructor(web3_constructor)
|
|
||||||
|
|
||||||
run = True
|
run = True
|
||||||
|
|
||||||
|
|
||||||
class DispatchSyncer:
|
class DispatchSyncer:
|
||||||
|
|
||||||
yield_delay = 0.005
|
yield_delay = 0.005
|
||||||
@ -117,7 +105,6 @@ class DispatchSyncer:
|
|||||||
chain_str = str(self.chain_spec)
|
chain_str = str(self.chain_spec)
|
||||||
for k in txs.keys():
|
for k in txs.keys():
|
||||||
tx_raw = txs[k]
|
tx_raw = txs[k]
|
||||||
#tx = unpack_signed_raw_tx_hex(tx_raw, self.chain_spec.chain_id())
|
|
||||||
tx_raw_bytes = bytes.fromhex(strip_0x(tx_raw))
|
tx_raw_bytes = bytes.fromhex(strip_0x(tx_raw))
|
||||||
tx = unpack(tx_raw_bytes, self.chain_spec.chain_id())
|
tx = unpack(tx_raw_bytes, self.chain_spec.chain_id())
|
||||||
|
|
||||||
@ -131,7 +118,7 @@ class DispatchSyncer:
|
|||||||
'cic_eth.admin.ctrl.check_lock',
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
[
|
[
|
||||||
[tx_raw],
|
[tx_raw],
|
||||||
chain_str,
|
self.chain_spec.asdict(),
|
||||||
LockEnum.QUEUE,
|
LockEnum.QUEUE,
|
||||||
tx['from'],
|
tx['from'],
|
||||||
],
|
],
|
||||||
@ -140,7 +127,7 @@ class DispatchSyncer:
|
|||||||
s_send = celery.signature(
|
s_send = celery.signature(
|
||||||
'cic_eth.eth.tx.send',
|
'cic_eth.eth.tx.send',
|
||||||
[
|
[
|
||||||
chain_str,
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=queue,
|
queue=queue,
|
||||||
)
|
)
|
||||||
@ -165,17 +152,11 @@ class DispatchSyncer:
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
|
|
||||||
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
|
||||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
|
||||||
|
|
||||||
syncer = DispatchSyncer(chain_spec)
|
syncer = DispatchSyncer(chain_spec)
|
||||||
|
conn = RPCConnection.connect(chain_spec, 'default')
|
||||||
try:
|
try:
|
||||||
syncer.loop(c.w3, float(config.get('DISPATCHER_LOOP_INTERVAL')))
|
syncer.loop(conn, float(config.get('DISPATCHER_LOOP_INTERVAL')))
|
||||||
except LoopDone as e:
|
except SyncDone as e:
|
||||||
sys.stderr.write("dispatcher done at block {}\n".format(e))
|
sys.stderr.write("dispatcher done at block {}\n".format(e))
|
||||||
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
@ -2,29 +2,61 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import web3
|
|
||||||
import celery
|
import celery
|
||||||
from cic_registry.error import UnknownContractError
|
from cic_eth_registry.error import UnknownContractError
|
||||||
from chainlib.status import Status as TxStatus
|
from chainlib.status import Status as TxStatus
|
||||||
from chainlib.eth.address import to_checksum
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.error import RequestMismatchException
|
||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.eth.erc20 import ERC20
|
||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import SyncFilter
|
from .base import SyncFilter
|
||||||
from cic_eth.eth.token import (
|
from cic_eth.eth.meta import ExtendedTx
|
||||||
unpack_transfer,
|
|
||||||
unpack_transferfrom,
|
|
||||||
)
|
|
||||||
from cic_eth.eth.account import unpack_gift
|
|
||||||
from cic_eth.eth.token import ExtendedTx
|
|
||||||
from .base import SyncFilter
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
transfer_method_signature = 'a9059cbb' # keccak256(transfer(address,uint256))
|
|
||||||
transferfrom_method_signature = '23b872dd' # keccak256(transferFrom(address,address,uint256))
|
def parse_transfer(tx):
|
||||||
giveto_method_signature = '63e4bff4' # keccak256(giveTo(address))
|
r = ERC20.parse_transfer_request(tx.payload)
|
||||||
|
transfer_data = {}
|
||||||
|
transfer_data['to'] = r[0]
|
||||||
|
transfer_data['value'] = r[1]
|
||||||
|
transfer_data['from'] = tx['from']
|
||||||
|
transfer_data['token_address'] = tx['to']
|
||||||
|
return ('transfer', transfer_data)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_transferfrom(tx):
|
||||||
|
r = ERC20.parse_transfer_request(tx.payload)
|
||||||
|
transfer_data = unpack_transferfrom(tx.payload)
|
||||||
|
transfer_data['from'] = r[0]
|
||||||
|
transfer_data['to'] = r[1]
|
||||||
|
transfer_data['value'] = r[2]
|
||||||
|
transfer_data['token_address'] = tx['to']
|
||||||
|
return ('transferfrom', transfer_data)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_giftto(tx):
|
||||||
|
# TODO: broken
|
||||||
|
logg.error('broken')
|
||||||
|
return
|
||||||
|
transfer_data = unpack_gift(tx.payload)
|
||||||
|
transfer_data['from'] = tx.inputs[0]
|
||||||
|
transfer_data['value'] = 0
|
||||||
|
transfer_data['token_address'] = ZERO_ADDRESS
|
||||||
|
# TODO: would be better to query the gift amount from the block state
|
||||||
|
for l in tx.logs:
|
||||||
|
topics = l['topics']
|
||||||
|
logg.debug('topixx {}'.format(topics))
|
||||||
|
if strip_0x(topics[0]) == '45c201a59ac545000ead84f30b2db67da23353aa1d58ac522c48505412143ffa':
|
||||||
|
#transfer_data['value'] = web3.Web3.toInt(hexstr=strip_0x(l['data']))
|
||||||
|
transfer_data['value'] = int.from_bytes(bytes.fromhex(strip_0x(l_data)))
|
||||||
|
#token_address_bytes = topics[2][32-20:]
|
||||||
|
token_address = strip_0x(topics[2])[64-40:]
|
||||||
|
transfer_data['token_address'] = to_checksum_address(token_address)
|
||||||
|
return ('tokengift', transfer_data)
|
||||||
|
|
||||||
|
|
||||||
class CallbackFilter(SyncFilter):
|
class CallbackFilter(SyncFilter):
|
||||||
@ -66,35 +98,23 @@ class CallbackFilter(SyncFilter):
|
|||||||
def parse_data(self, tx):
|
def parse_data(self, tx):
|
||||||
transfer_type = None
|
transfer_type = None
|
||||||
transfer_data = None
|
transfer_data = None
|
||||||
|
# TODO: what's with the mix of attributes and dict keys
|
||||||
logg.debug('have payload {}'.format(tx.payload))
|
logg.debug('have payload {}'.format(tx.payload))
|
||||||
method_signature = tx.payload[:8]
|
method_signature = tx.payload[:8]
|
||||||
|
|
||||||
logg.debug('tx status {}'.format(tx.status))
|
logg.debug('tx status {}'.format(tx.status))
|
||||||
if method_signature == transfer_method_signature:
|
|
||||||
transfer_data = unpack_transfer(tx.payload)
|
|
||||||
transfer_data['from'] = tx['from']
|
|
||||||
transfer_data['token_address'] = tx['to']
|
|
||||||
|
|
||||||
elif method_signature == transferfrom_method_signature:
|
for parser in [
|
||||||
transfer_type = 'transferfrom'
|
parse_transfer,
|
||||||
transfer_data = unpack_transferfrom(tx.payload)
|
parse_transferfrom,
|
||||||
transfer_data['token_address'] = tx['to']
|
parse_giftto,
|
||||||
|
]:
|
||||||
|
try:
|
||||||
|
(transfer_type, transfer_data) = parser(tx)
|
||||||
|
break
|
||||||
|
except RequestMismatchException:
|
||||||
|
continue
|
||||||
|
|
||||||
# TODO: do not rely on logs here
|
|
||||||
elif method_signature == giveto_method_signature:
|
|
||||||
transfer_type = 'tokengift'
|
|
||||||
transfer_data = unpack_gift(tx.payload)
|
|
||||||
transfer_data['from'] = tx.inputs[0]
|
|
||||||
transfer_data['value'] = 0
|
|
||||||
transfer_data['token_address'] = ZERO_ADDRESS
|
|
||||||
for l in tx.logs:
|
|
||||||
topics = l['topics']
|
|
||||||
logg.debug('topixx {}'.format(topics))
|
|
||||||
if strip_0x(topics[0]) == '45c201a59ac545000ead84f30b2db67da23353aa1d58ac522c48505412143ffa':
|
|
||||||
transfer_data['value'] = web3.Web3.toInt(hexstr=strip_0x(l['data']))
|
|
||||||
#token_address_bytes = topics[2][32-20:]
|
|
||||||
token_address = strip_0x(topics[2])[64-40:]
|
|
||||||
transfer_data['token_address'] = to_checksum(token_address)
|
|
||||||
|
|
||||||
logg.debug('resolved method {}'.format(transfer_type))
|
logg.debug('resolved method {}'.format(transfer_type))
|
||||||
|
|
||||||
@ -105,8 +125,6 @@ class CallbackFilter(SyncFilter):
|
|||||||
|
|
||||||
|
|
||||||
def filter(self, conn, block, tx, db_session=None):
|
def filter(self, conn, block, tx, db_session=None):
|
||||||
chain_str = str(self.chain_spec)
|
|
||||||
|
|
||||||
transfer_data = None
|
transfer_data = None
|
||||||
transfer_type = None
|
transfer_type = None
|
||||||
try:
|
try:
|
||||||
@ -122,11 +140,10 @@ class CallbackFilter(SyncFilter):
|
|||||||
logg.debug('checking callbacks filter input {}'.format(tx.payload[:8]))
|
logg.debug('checking callbacks filter input {}'.format(tx.payload[:8]))
|
||||||
|
|
||||||
if transfer_data != None:
|
if transfer_data != None:
|
||||||
logg.debug('wtfoo {}'.format(transfer_data))
|
|
||||||
token_symbol = None
|
token_symbol = None
|
||||||
result = None
|
result = None
|
||||||
try:
|
try:
|
||||||
tokentx = ExtendedTx(tx.hash, self.chain_spec)
|
tokentx = ExtendedTx(conn, tx.hash, self.chain_spec)
|
||||||
tokentx.set_actors(transfer_data['from'], transfer_data['to'], self.trusted_addresses)
|
tokentx.set_actors(transfer_data['from'], transfer_data['to'], self.trusted_addresses)
|
||||||
tokentx.set_tokens(transfer_data['token_address'], transfer_data['value'])
|
tokentx.set_tokens(transfer_data['token_address'], transfer_data['value'])
|
||||||
if transfer_data['status'] == 0:
|
if transfer_data['status'] == 0:
|
||||||
|
@ -2,7 +2,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
from hexathon import add_0x
|
from hexathon import add_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -11,10 +10,10 @@ from cic_eth.db.models.base import SessionBase
|
|||||||
from cic_eth.db.models.tx import TxCache
|
from cic_eth.db.models.tx import TxCache
|
||||||
from cic_eth.db.models.otx import Otx
|
from cic_eth.db.models.otx import Otx
|
||||||
from cic_eth.queue.tx import get_paused_txs
|
from cic_eth.queue.tx import get_paused_txs
|
||||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
from cic_eth.eth.gas import create_check_gas_task
|
||||||
from .base import SyncFilter
|
from .base import SyncFilter
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
class GasFilter(SyncFilter):
|
class GasFilter(SyncFilter):
|
||||||
@ -45,9 +44,9 @@ class GasFilter(SyncFilter):
|
|||||||
|
|
||||||
logg.info('resuming gas-in-waiting txs for {}'.format(r[0]))
|
logg.info('resuming gas-in-waiting txs for {}'.format(r[0]))
|
||||||
if len(txs) > 0:
|
if len(txs) > 0:
|
||||||
s = create_check_gas_and_send_task(
|
s = create_check_gas_task(
|
||||||
list(txs.values()),
|
list(txs.values()),
|
||||||
str(self.chain_spec),
|
self.chain_spec,
|
||||||
r[0],
|
r[0],
|
||||||
0,
|
0,
|
||||||
tx_hashes_hex=list(txs.keys()),
|
tx_hashes_hex=list(txs.keys()),
|
||||||
|
@ -3,7 +3,7 @@ import logging
|
|||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import celery
|
import celery
|
||||||
from chainlib.eth.address import to_checksum
|
from chainlib.eth.address import to_checksum_address
|
||||||
from hexathon import (
|
from hexathon import (
|
||||||
add_0x,
|
add_0x,
|
||||||
strip_0x,
|
strip_0x,
|
||||||
@ -12,9 +12,9 @@ from hexathon import (
|
|||||||
# local imports
|
# local imports
|
||||||
from .base import SyncFilter
|
from .base import SyncFilter
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd' # keccak256(AccountAdded(address,uint256))
|
account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd'
|
||||||
|
|
||||||
|
|
||||||
class RegistrationFilter(SyncFilter):
|
class RegistrationFilter(SyncFilter):
|
||||||
@ -32,7 +32,7 @@ class RegistrationFilter(SyncFilter):
|
|||||||
# TODO: use abi conversion method instead
|
# TODO: use abi conversion method instead
|
||||||
|
|
||||||
address_hex = strip_0x(l['topics'][1])[64-40:]
|
address_hex = strip_0x(l['topics'][1])[64-40:]
|
||||||
address = to_checksum(add_0x(address_hex))
|
address = to_checksum_address(add_0x(address_hex))
|
||||||
logg.info('request token gift to {}'.format(address))
|
logg.info('request token gift to {}'.format(address))
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
@ -44,7 +44,7 @@ class RegistrationFilter(SyncFilter):
|
|||||||
s_gift = celery.signature(
|
s_gift = celery.signature(
|
||||||
'cic_eth.eth.account.gift',
|
'cic_eth.eth.account.gift',
|
||||||
[
|
[
|
||||||
str(self.chain_spec),
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
|
@ -7,41 +7,29 @@ from hexathon import (
|
|||||||
strip_0x,
|
strip_0x,
|
||||||
add_0x,
|
add_0x,
|
||||||
)
|
)
|
||||||
from chainlib.eth.address import to_checksum
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.eth.contract import (
|
||||||
|
ABIContractType,
|
||||||
|
abi_decode_single,
|
||||||
|
)
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
from erc20_transfer_authorization import TransferAuthorization
|
||||||
|
|
||||||
|
# local imports
|
||||||
from .base import SyncFilter
|
from .base import SyncFilter
|
||||||
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
transfer_request_signature = 'ed71262a'
|
|
||||||
|
|
||||||
def unpack_create_request(data):
|
|
||||||
|
|
||||||
data = strip_0x(data)
|
|
||||||
cursor = 0
|
|
||||||
f = data[cursor:cursor+8]
|
|
||||||
cursor += 8
|
|
||||||
|
|
||||||
if f != transfer_request_signature:
|
|
||||||
raise ValueError('Invalid create request data ({})'.format(f))
|
|
||||||
|
|
||||||
o = {}
|
|
||||||
o['sender'] = data[cursor+24:cursor+64]
|
|
||||||
cursor += 64
|
|
||||||
o['recipient'] = data[cursor+24:cursor+64]
|
|
||||||
cursor += 64
|
|
||||||
o['token'] = data[cursor+24:cursor+64]
|
|
||||||
cursor += 64
|
|
||||||
o['value'] = int(data[cursor:], 16)
|
|
||||||
return o
|
|
||||||
|
|
||||||
|
|
||||||
class TransferAuthFilter(SyncFilter):
|
class TransferAuthFilter(SyncFilter):
|
||||||
|
|
||||||
def __init__(self, registry, chain_spec, queue=None):
|
def __init__(self, registry, chain_spec, conn, queue=None, call_address=ZERO_ADDRESS):
|
||||||
self.queue = queue
|
self.queue = queue
|
||||||
self.chain_spec = chain_spec
|
self.chain_spec = chain_spec
|
||||||
self.transfer_request_contract = registry.get_contract(self.chain_spec, 'TransferAuthorization')
|
registry = CICRegistry(chain_spec, conn)
|
||||||
|
self.transfer_request_contract = registry.by_name('TransferAuthorization', sender_address=call_address)
|
||||||
|
|
||||||
|
|
||||||
def filter(self, conn, block, tx, session): #rcpt, chain_str, session=None):
|
def filter(self, conn, block, tx, session): #rcpt, chain_str, session=None):
|
||||||
@ -61,11 +49,13 @@ class TransferAuthFilter(SyncFilter):
|
|||||||
logg.debug('not our transfer auth contract address {}'.format(recipient))
|
logg.debug('not our transfer auth contract address {}'.format(recipient))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
o = unpack_create_request(tx.payload)
|
r = TransferAuthorization.parse_create_request_request(tx.payload)
|
||||||
|
|
||||||
|
sender = abi_decode_single(ABIContractType.ADDRESS, r[0])
|
||||||
|
recipient = abi_decode_single(ABIContractType.ADDRESS, r[1])
|
||||||
|
token = abi_decode_single(ABIContractType.ADDRESS, r[2])
|
||||||
|
value = abi_decode_single(ABIContractType.UINT256, r[3])
|
||||||
|
|
||||||
sender = add_0x(to_checksum(o['sender']))
|
|
||||||
recipient = add_0x(to_checksum(recipient))
|
|
||||||
token = add_0x(to_checksum(o['token']))
|
|
||||||
token_data = {
|
token_data = {
|
||||||
'address': token,
|
'address': token,
|
||||||
}
|
}
|
||||||
@ -83,8 +73,8 @@ class TransferAuthFilter(SyncFilter):
|
|||||||
[
|
[
|
||||||
sender,
|
sender,
|
||||||
recipient,
|
recipient,
|
||||||
o['value'],
|
value,
|
||||||
str(self.chain_spec),
|
self.chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
queue=self.queue,
|
queue=self.queue,
|
||||||
)
|
)
|
||||||
|
@ -13,7 +13,7 @@ from chainsyncer.db.models.base import SessionBase
|
|||||||
from chainlib.status import Status
|
from chainlib.status import Status
|
||||||
from .base import SyncFilter
|
from .base import SyncFilter
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
class TxFilter(SyncFilter):
|
class TxFilter(SyncFilter):
|
||||||
@ -31,6 +31,7 @@ class TxFilter(SyncFilter):
|
|||||||
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
|
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
|
||||||
return None
|
return None
|
||||||
logg.info('tx filter match on {}'.format(otx.tx_hash))
|
logg.info('tx filter match on {}'.format(otx.tx_hash))
|
||||||
|
db_session.flush()
|
||||||
SessionBase.release_session(db_session)
|
SessionBase.release_session(db_session)
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.queue.tx.set_final_status',
|
'cic_eth.queue.tx.set_final_status',
|
||||||
|
@ -8,9 +8,8 @@ import datetime
|
|||||||
import web3
|
import web3
|
||||||
import confini
|
import confini
|
||||||
import celery
|
import celery
|
||||||
from web3 import HTTPProvider, WebsocketProvider
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_registry import CICRegistry
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
|
|
||||||
from cic_eth.db import dsn_from_config
|
from cic_eth.db import dsn_from_config
|
||||||
from cic_eth.db import SessionBase
|
from cic_eth.db import SessionBase
|
||||||
@ -25,19 +24,14 @@ from cic_eth.eth.util import unpack_signed_raw_tx_hex
|
|||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
|
||||||
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
|
||||||
|
|
||||||
|
|
||||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provider')
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
argparser.add_argument('--retry-delay', dest='retry_delay', type=str, help='seconds to wait for retrying a transaction that is marked as sent')
|
argparser.add_argument('--retry-delay', dest='retry_delay', type=str, help='seconds to wait for retrying a transaction that is marked as sent')
|
||||||
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
|
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
@ -56,6 +50,7 @@ config = confini.Config(config_dir, args.env_prefix)
|
|||||||
config.process()
|
config.process()
|
||||||
# override args
|
# override args
|
||||||
args_override = {
|
args_override = {
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
||||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
'CIC_TX_RETRY_DELAY': getattr(args, 'retry_delay'),
|
'CIC_TX_RETRY_DELAY': getattr(args, 'retry_delay'),
|
||||||
@ -71,31 +66,15 @@ queue = args.q
|
|||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
|
RPCConnection.registry_location(args.p, chain_spec, tag='default')
|
||||||
|
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn)
|
SessionBase.connect(dsn)
|
||||||
|
|
||||||
|
|
||||||
re_websocket = re.compile('^wss?://')
|
|
||||||
re_http = re.compile('^https?://')
|
|
||||||
blockchain_provider = config.get('ETH_PROVIDER')
|
|
||||||
if re.match(re_websocket, blockchain_provider) != None:
|
|
||||||
blockchain_provider = WebsocketProvider(blockchain_provider)
|
|
||||||
elif re.match(re_http, blockchain_provider) != None:
|
|
||||||
blockchain_provider = HTTPProvider(blockchain_provider)
|
|
||||||
else:
|
|
||||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
|
||||||
|
|
||||||
def web3_constructor():
|
|
||||||
w3 = web3.Web3(blockchain_provider)
|
|
||||||
return (blockchain_provider, w3)
|
|
||||||
RpcClient.set_constructor(web3_constructor)
|
|
||||||
|
|
||||||
|
|
||||||
straggler_delay = int(config.get('CIC_TX_RETRY_DELAY'))
|
straggler_delay = int(config.get('CIC_TX_RETRY_DELAY'))
|
||||||
|
|
||||||
# TODO: we already have the signed raw tx in get, so its a waste of cycles to get_tx here
|
# TODO: we already have the signed raw tx in get, so its a waste of cycles to get_tx here
|
||||||
def sendfail_filter(w3, tx_hash, rcpt, chain_str):
|
def sendfail_filter(w3, tx_hash, rcpt, chain_spec):
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
tx_dict = get_tx(tx_hash)
|
tx_dict = get_tx(tx_hash)
|
||||||
tx = unpack_signed_raw_tx_hex(tx_dict['signed_tx'], chain_spec.chain_id())
|
tx = unpack_signed_raw_tx_hex(tx_dict['signed_tx'], chain_spec.chain_id())
|
||||||
logg.debug('submitting tx {} for retry'.format(tx_hash))
|
logg.debug('submitting tx {} for retry'.format(tx_hash))
|
||||||
@ -137,7 +116,7 @@ def sendfail_filter(w3, tx_hash, rcpt, chain_str):
|
|||||||
|
|
||||||
|
|
||||||
# TODO: can we merely use the dispatcher instead?
|
# TODO: can we merely use the dispatcher instead?
|
||||||
def dispatch(chain_str):
|
def dispatch(conn, chain_spec):
|
||||||
txs = get_status_tx(StatusEnum.RETRY, before=datetime.datetime.utcnow())
|
txs = get_status_tx(StatusEnum.RETRY, before=datetime.datetime.utcnow())
|
||||||
if len(txs) == 0:
|
if len(txs) == 0:
|
||||||
logg.debug('no retry state txs found')
|
logg.debug('no retry state txs found')
|
||||||
@ -199,11 +178,49 @@ def dispatch(chain_str):
|
|||||||
# s_send.apply_async()
|
# s_send.apply_async()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
class RetrySyncer(Syncer):
|
||||||
|
|
||||||
c = RpcClient(chain_spec)
|
def __init__(self, chain_spec, stalled_grace_seconds, failed_grace_seconds=None, final_func=None):
|
||||||
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
self.chain_spec = chain_spec
|
||||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
if failed_grace_seconds == None:
|
||||||
|
failed_grace_seconds = stalled_grace_seconds
|
||||||
|
self.stalled_grace_seconds = stalled_grace_seconds
|
||||||
|
self.failed_grace_seconds = failed_grace_seconds
|
||||||
|
self.final_func = final_func
|
||||||
|
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
# before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.failed_grace_seconds)
|
||||||
|
# failed_txs = get_status_tx(
|
||||||
|
# StatusEnum.SENDFAIL.value,
|
||||||
|
# before=before,
|
||||||
|
# )
|
||||||
|
before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.stalled_grace_seconds)
|
||||||
|
stalled_txs = get_status_tx(
|
||||||
|
StatusBits.IN_NETWORK.value,
|
||||||
|
not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE,
|
||||||
|
before=before,
|
||||||
|
)
|
||||||
|
# return list(failed_txs.keys()) + list(stalled_txs.keys())
|
||||||
|
return stalled_txs
|
||||||
|
|
||||||
|
def process(self, conn, ref):
|
||||||
|
logg.debug('tx {}'.format(ref))
|
||||||
|
for f in self.filter:
|
||||||
|
f(conn, ref, None, str(self.chain_spec))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def loop(self, interval):
|
||||||
|
while self.running and Syncer.running_global:
|
||||||
|
rpc = RPCConnection.connect(self.chain_spec, 'default')
|
||||||
|
for tx in self.get():
|
||||||
|
self.process(rpc, tx)
|
||||||
|
if self.final_func != None:
|
||||||
|
self.final_func(rpc, self.chain_spec)
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
|
||||||
syncer = RetrySyncer(chain_spec, straggler_delay, final_func=dispatch)
|
syncer = RetrySyncer(chain_spec, straggler_delay, final_func=dispatch)
|
||||||
syncer.filter.append(sendfail_filter)
|
syncer.filter.append(sendfail_filter)
|
||||||
|
@ -8,28 +8,21 @@ import re
|
|||||||
import urllib
|
import urllib
|
||||||
import websocket
|
import websocket
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import confini
|
import confini
|
||||||
from crypto_dev_signer.eth.web3ext import Web3 as Web3Ext
|
from chainlib.connection import RPCConnection
|
||||||
from web3 import HTTPProvider, WebsocketProvider
|
from chainlib.eth.connection import EthUnixSignerConnection
|
||||||
from gas_proxy.web3 import GasMiddleware
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_registry.registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_registry.registry import ChainRegistry
|
|
||||||
from cic_registry.registry import ChainSpec
|
|
||||||
from cic_registry.helper.declarator import DeclaratorOracleAdapter
|
|
||||||
|
|
||||||
from cic_bancor.bancor import BancorRegistryClient
|
from cic_eth.eth import erc20
|
||||||
from cic_eth.eth import bancor
|
|
||||||
from cic_eth.eth import token
|
|
||||||
from cic_eth.eth import tx
|
from cic_eth.eth import tx
|
||||||
from cic_eth.eth import account
|
from cic_eth.eth import account
|
||||||
from cic_eth.admin import debug
|
from cic_eth.admin import debug
|
||||||
from cic_eth.admin import ctrl
|
from cic_eth.admin import ctrl
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.eth.rpc import GasOracle
|
|
||||||
from cic_eth.queue import tx
|
from cic_eth.queue import tx
|
||||||
from cic_eth.queue import balance
|
from cic_eth.queue import balance
|
||||||
from cic_eth.callbacks import Callback
|
from cic_eth.callbacks import Callback
|
||||||
@ -47,7 +40,7 @@ logg = logging.getLogger()
|
|||||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='web3 provider')
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provider')
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
||||||
argparser.add_argument('-r', type=str, help='CIC registry address')
|
argparser.add_argument('-r', type=str, help='CIC registry address')
|
||||||
@ -68,12 +61,12 @@ config = confini.Config(args.c, args.env_prefix)
|
|||||||
config.process()
|
config.process()
|
||||||
# override args
|
# override args
|
||||||
args_override = {
|
args_override = {
|
||||||
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
|
||||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||||
'ETH_PROVIDER': getattr(args, 'p'),
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'),
|
'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'),
|
||||||
}
|
}
|
||||||
|
config.add(args.q, '_CELERY_QUEUE', True)
|
||||||
config.dict_override(args_override, 'cli flag')
|
config.dict_override(args_override, 'cli flag')
|
||||||
config.censor('PASSWORD', 'DATABASE')
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
config.censor('PASSWORD', 'SSL')
|
config.censor('PASSWORD', 'SSL')
|
||||||
@ -81,7 +74,7 @@ logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
|||||||
|
|
||||||
# connect to database
|
# connect to database
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn, pool_size=8, debug=config.true('DATABASE_DEBUG'))
|
SessionBase.connect(dsn, pool_size=50, debug=config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
# verify database connection with minimal sanity query
|
# verify database connection with minimal sanity query
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
@ -122,68 +115,14 @@ else:
|
|||||||
'result_backend': result,
|
'result_backend': result,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
# set up web3
|
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
|
||||||
# TODO: web3 socket wrapping is now a lot of code. factor out
|
RPCConnection.register_location(config.get('SIGNER_SOCKET_PATH'), chain_spec, 'signer', constructor=EthUnixSignerConnection)
|
||||||
class JSONRPCHttpSocketAdapter:
|
|
||||||
|
|
||||||
def __init__(self, url):
|
|
||||||
self.response = None
|
|
||||||
self.url = url
|
|
||||||
|
|
||||||
def send(self, data):
|
|
||||||
logg.debug('redirecting socket send to jsonrpc http socket adapter {} {}'.format(self.url, data))
|
|
||||||
req = urllib.request.Request(self.url, method='POST')
|
|
||||||
req.add_header('Content-type', 'application/json')
|
|
||||||
req.add_header('Connection', 'close')
|
|
||||||
res = urllib.request.urlopen(req, data=data.encode('utf-8'))
|
|
||||||
self.response = res.read().decode('utf-8')
|
|
||||||
logg.debug('setting jsonrpc http socket adapter response to {}'.format(self.response))
|
|
||||||
|
|
||||||
def recv(self, n=0):
|
|
||||||
return self.response
|
|
||||||
|
|
||||||
|
|
||||||
re_websocket = re.compile('^wss?://')
|
|
||||||
re_http = re.compile('^https?://')
|
|
||||||
blockchain_provider = config.get('ETH_PROVIDER')
|
|
||||||
socket_constructor = None
|
|
||||||
if re.match(re_websocket, blockchain_provider) != None:
|
|
||||||
def socket_constructor_ws():
|
|
||||||
return websocket.create_connection(config.get('ETH_PROVIDER'))
|
|
||||||
socket_constructor = socket_constructor_ws
|
|
||||||
blockchain_provider = WebsocketProvider(blockchain_provider)
|
|
||||||
elif re.match(re_http, blockchain_provider) != None:
|
|
||||||
def socket_constructor_http():
|
|
||||||
return JSONRPCHttpSocketAdapter(config.get('ETH_PROVIDER'))
|
|
||||||
socket_constructor = socket_constructor_http
|
|
||||||
blockchain_provider = HTTPProvider(blockchain_provider)
|
|
||||||
else:
|
|
||||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
|
||||||
|
|
||||||
|
|
||||||
def web3ext_constructor():
|
|
||||||
w3 = Web3Ext(blockchain_provider, config.get('SIGNER_SOCKET_PATH'))
|
|
||||||
GasMiddleware.socket_constructor = socket_constructor
|
|
||||||
w3.middleware_onion.add(GasMiddleware)
|
|
||||||
|
|
||||||
def sign_transaction(tx):
|
|
||||||
r = w3.eth.signTransaction(tx)
|
|
||||||
d = r.__dict__
|
|
||||||
for k in d.keys():
|
|
||||||
if k == 'tx':
|
|
||||||
d[k] = d[k].__dict__
|
|
||||||
else:
|
|
||||||
d[k] = d[k].hex()
|
|
||||||
return d
|
|
||||||
|
|
||||||
setattr(w3.eth, 'sign_transaction', sign_transaction)
|
|
||||||
setattr(w3.eth, 'send_raw_transaction', w3.eth.sendRawTransaction)
|
|
||||||
return (blockchain_provider, w3)
|
|
||||||
RpcClient.set_constructor(web3ext_constructor)
|
|
||||||
|
|
||||||
Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS')
|
Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS')
|
||||||
|
|
||||||
|
CICRegistry.address = config.get('CIC_REGISTRY_ADDRESS')
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
argv = ['worker']
|
argv = ['worker']
|
||||||
@ -196,33 +135,19 @@ def main():
|
|||||||
argv.append('-n')
|
argv.append('-n')
|
||||||
argv.append(args.q)
|
argv.append(args.q)
|
||||||
|
|
||||||
if config.true('SSL_ENABLE_CLIENT'):
|
# if config.true('SSL_ENABLE_CLIENT'):
|
||||||
Callback.ssl = True
|
# Callback.ssl = True
|
||||||
Callback.ssl_cert_file = config.get('SSL_CERT_FILE')
|
# Callback.ssl_cert_file = config.get('SSL_CERT_FILE')
|
||||||
Callback.ssl_key_file = config.get('SSL_KEY_FILE')
|
# Callback.ssl_key_file = config.get('SSL_KEY_FILE')
|
||||||
Callback.ssl_password = config.get('SSL_PASSWORD')
|
# Callback.ssl_password = config.get('SSL_PASSWORD')
|
||||||
|
#
|
||||||
|
# if config.get('SSL_CA_FILE') != '':
|
||||||
|
# Callback.ssl_ca_file = config.get('SSL_CA_FILE')
|
||||||
|
|
||||||
if config.get('SSL_CA_FILE') != '':
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
Callback.ssl_ca_file = config.get('SSL_CA_FILE')
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
registry_address = registry.by_name('ContractRegistry')
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|
||||||
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
|
||||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
|
||||||
|
|
||||||
chain_registry = ChainRegistry(chain_spec)
|
|
||||||
CICRegistry.add_chain_registry(chain_registry, True)
|
|
||||||
try:
|
|
||||||
CICRegistry.get_contract(chain_spec, 'CICRegistry')
|
|
||||||
except Exception as e:
|
|
||||||
logg.exception('Eek, registry failure is baaad juju {}'.format(e))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER') != None:
|
|
||||||
CICRegistry.add_role(chain_spec, config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER'), 'AccountRegistry', True)
|
|
||||||
|
|
||||||
declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', interface='Declarator')
|
|
||||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||||
if trusted_addresses_src == None:
|
if trusted_addresses_src == None:
|
||||||
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||||
@ -230,15 +155,7 @@ def main():
|
|||||||
trusted_addresses = trusted_addresses_src.split(',')
|
trusted_addresses = trusted_addresses_src.split(',')
|
||||||
for address in trusted_addresses:
|
for address in trusted_addresses:
|
||||||
logg.info('using trusted address {}'.format(address))
|
logg.info('using trusted address {}'.format(address))
|
||||||
oracle = DeclaratorOracleAdapter(declarator.contract, trusted_addresses)
|
|
||||||
chain_registry.add_oracle(oracle, 'naive_erc20_oracle')
|
|
||||||
|
|
||||||
|
|
||||||
#chain_spec = CICRegistry.default_chain_spec
|
|
||||||
#bancor_registry_contract = CICRegistry.get_contract(chain_spec, 'BancorRegistry', interface='Registry')
|
|
||||||
#bancor_chain_registry = CICRegistry.get_chain_registry(chain_spec)
|
|
||||||
#bancor_registry = BancorRegistryClient(c.w3, bancor_chain_registry, config.get('ETH_ABI_DIR'))
|
|
||||||
#bancor_registry.load(True)
|
|
||||||
current_app.worker_main(argv)
|
current_app.worker_main(argv)
|
||||||
|
|
||||||
|
|
||||||
|
@ -11,18 +11,15 @@ import re
|
|||||||
import confini
|
import confini
|
||||||
import celery
|
import celery
|
||||||
import rlp
|
import rlp
|
||||||
import web3
|
|
||||||
from web3 import HTTPProvider, WebsocketProvider
|
|
||||||
import cic_base.config
|
import cic_base.config
|
||||||
import cic_base.log
|
import cic_base.log
|
||||||
import cic_base.argparse
|
import cic_base.argparse
|
||||||
import cic_base.rpc
|
import cic_base.rpc
|
||||||
from cic_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
|
from cic_eth_registry.error import UnknownContractError
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry import zero_address
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from cic_registry.chain import ChainRegistry
|
from chainlib.connection import RPCConnection
|
||||||
from cic_registry.error import UnknownContractError
|
|
||||||
from chainlib.eth.connection import HTTPConnection
|
|
||||||
from chainlib.eth.block import (
|
from chainlib.eth.block import (
|
||||||
block_latest,
|
block_latest,
|
||||||
)
|
)
|
||||||
@ -37,22 +34,7 @@ from chainsyncer.driver import (
|
|||||||
from chainsyncer.db.models.base import SessionBase
|
from chainsyncer.db.models.base import SessionBase
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.registry import init_registry
|
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
from cic_eth.db import Otx
|
|
||||||
from cic_eth.db import TxConvertTransfer
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
from cic_eth.db.enum import StatusEnum
|
|
||||||
from cic_eth.db import dsn_from_config
|
from cic_eth.db import dsn_from_config
|
||||||
from cic_eth.queue.tx import get_paused_txs
|
|
||||||
#from cic_eth.sync import Syncer
|
|
||||||
#from cic_eth.sync.error import LoopDone
|
|
||||||
from cic_eth.db.error import UnknownConvertError
|
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
|
||||||
from cic_eth.eth.token import unpack_transfer
|
|
||||||
from cic_eth.eth.token import unpack_transferfrom
|
|
||||||
from cic_eth.eth.account import unpack_gift
|
|
||||||
from cic_eth.runnable.daemons.filters import (
|
from cic_eth.runnable.daemons.filters import (
|
||||||
CallbackFilter,
|
CallbackFilter,
|
||||||
GasFilter,
|
GasFilter,
|
||||||
@ -75,27 +57,25 @@ config.add(args.q, '_CELERY_QUEUE', True)
|
|||||||
|
|
||||||
cic_base.config.log(config)
|
cic_base.config.log(config)
|
||||||
|
|
||||||
|
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn, pool_size=1, debug=config.true('DATABASE_DEBUG'))
|
|
||||||
|
SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
|
#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
|
||||||
|
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# parse chain spec object
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|
||||||
|
|
||||||
# connect to celery
|
# connect to celery
|
||||||
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
# set up registry
|
|
||||||
w3 = cic_base.rpc.create(config.get('ETH_PROVIDER')) # replace with HTTPConnection when registry has been so refactored
|
|
||||||
registry = init_registry(config, w3)
|
|
||||||
|
|
||||||
# Connect to blockchain with chainlib
|
# Connect to blockchain with chainlib
|
||||||
conn = HTTPConnection(config.get('ETH_PROVIDER'))
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
|
||||||
o = block_latest()
|
o = block_latest()
|
||||||
r = conn.do(o)
|
r = rpc.do(o)
|
||||||
block_offset = int(strip_0x(r), 16) + 1
|
block_offset = int(strip_0x(r), 16) + 1
|
||||||
|
|
||||||
logg.debug('starting at block {}'.format(block_offset))
|
logg.debug('starting at block {}'.format(block_offset))
|
||||||
@ -147,7 +127,7 @@ def main():
|
|||||||
|
|
||||||
gas_filter = GasFilter(chain_spec, config.get('_CELERY_QUEUE'))
|
gas_filter = GasFilter(chain_spec, config.get('_CELERY_QUEUE'))
|
||||||
|
|
||||||
transfer_auth_filter = TransferAuthFilter(registry, chain_spec, config.get('_CELERY_QUEUE'))
|
#transfer_auth_filter = TransferAuthFilter(registry, chain_spec, config.get('_CELERY_QUEUE'))
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
for syncer in syncers:
|
for syncer in syncers:
|
||||||
@ -156,17 +136,15 @@ def main():
|
|||||||
syncer.add_filter(registration_filter)
|
syncer.add_filter(registration_filter)
|
||||||
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
|
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
|
||||||
syncer.add_filter(tx_filter)
|
syncer.add_filter(tx_filter)
|
||||||
syncer.add_filter(transfer_auth_filter)
|
#syncer.add_filter(transfer_auth_filter)
|
||||||
for cf in callback_filters:
|
for cf in callback_filters:
|
||||||
syncer.add_filter(cf)
|
syncer.add_filter(cf)
|
||||||
|
|
||||||
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), conn)
|
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
|
||||||
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
@ -5,14 +5,14 @@ import logging
|
|||||||
import argparse
|
import argparse
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import web3
|
import celery
|
||||||
from web3 import HTTPProvider, WebsocketProvider
|
|
||||||
import confini
|
import confini
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from xdg.BaseDirectory import xdg_config_home
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import AdminApi
|
from cic_eth.api import AdminApi
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
from cic_eth.db import dsn_from_config
|
from cic_eth.db import dsn_from_config
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
@ -48,29 +48,14 @@ config.censor('PASSWORD', 'DATABASE')
|
|||||||
config.censor('PASSWORD', 'SSL')
|
config.censor('PASSWORD', 'SSL')
|
||||||
logg.debug('config loaded from {}\n{}'.format(args.c, config))
|
logg.debug('config loaded from {}\n{}'.format(args.c, config))
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(args.i)
|
||||||
|
|
||||||
dsn = dsn_from_config(config)
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
SessionBase.connect(dsn)
|
|
||||||
|
|
||||||
re_websocket = re.compile('^wss?://')
|
|
||||||
re_http = re.compile('^https?://')
|
|
||||||
blockchain_provider = config.get('ETH_PROVIDER')
|
|
||||||
if re.match(re_websocket, blockchain_provider) != None:
|
|
||||||
blockchain_provider = WebsocketProvider(blockchain_provider)
|
|
||||||
elif re.match(re_http, blockchain_provider) != None:
|
|
||||||
blockchain_provider = HTTPProvider(blockchain_provider)
|
|
||||||
else:
|
|
||||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
|
||||||
|
|
||||||
def web3_constructor():
|
|
||||||
w3 = web3.Web3(blockchain_provider)
|
|
||||||
return (blockchain_provider, w3)
|
|
||||||
RpcClient.set_constructor(web3_constructor)
|
|
||||||
c = RpcClient(config.get('CIC_CHAIN_SPEC'))
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
api = AdminApi(c)
|
api = AdminApi(None)
|
||||||
api.tag_account(args.tag, args.address)
|
api.tag_account(args.tag, args.address, chain_spec)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -11,18 +11,17 @@ import sys
|
|||||||
import re
|
import re
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import confini
|
import confini
|
||||||
import celery
|
import celery
|
||||||
import web3
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_registry import CICRegistry
|
from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup
|
||||||
from cic_registry.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from cic_registry.chain import ChainRegistry
|
from chainlib.eth.connection import EthHTTPConnection
|
||||||
from hexathon import add_0x
|
from hexathon import add_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import AdminApi
|
from cic_eth.api import AdminApi
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.db.enum import (
|
from cic_eth.db.enum import (
|
||||||
StatusEnum,
|
StatusEnum,
|
||||||
status_str,
|
status_str,
|
||||||
@ -32,18 +31,14 @@ from cic_eth.db.enum import (
|
|||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
logging.getLogger('web3').setLevel(logging.WARNING)
|
default_format = 'terminal'
|
||||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
|
|
||||||
default_abi_dir = '/usr/share/local/cic/solidity/abi'
|
|
||||||
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='Web3 provider url (http only)')
|
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||||
argparser.add_argument('-r', '--registry-address', dest='r', type=str, help='CIC registry address')
|
argparser.add_argument('-r', '--registry-address', dest='r', type=str, help='CIC registry address')
|
||||||
argparser.add_argument('-f', '--format', dest='f', default='terminal', type=str, help='Output format')
|
argparser.add_argument('-f', '--format', dest='f', default=default_format, type=str, help='Output format')
|
||||||
argparser.add_argument('--status-raw', dest='status_raw', action='store_true', help='Output statis bit enum names only')
|
argparser.add_argument('--status-raw', dest='status_raw', action='store_true', help='Output status bit enum names only')
|
||||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
@ -74,38 +69,30 @@ config.censor('PASSWORD', 'DATABASE')
|
|||||||
config.censor('PASSWORD', 'SSL')
|
config.censor('PASSWORD', 'SSL')
|
||||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
config.add(add_0x(args.query), '_QUERY', True)
|
try:
|
||||||
|
config.add(add_0x(args.query), '_QUERY', True)
|
||||||
re_websocket = re.compile('^wss?://')
|
except:
|
||||||
re_http = re.compile('^https?://')
|
config.add(args.query, '_QUERY', True)
|
||||||
blockchain_provider = config.get('ETH_PROVIDER')
|
|
||||||
if re.match(re_websocket, blockchain_provider) != None:
|
|
||||||
blockchain_provider = web3.Web3.WebsocketProvider(blockchain_provider)
|
|
||||||
elif re.match(re_http, blockchain_provider) != None:
|
|
||||||
blockchain_provider = web3.Web3.HTTPProvider(blockchain_provider)
|
|
||||||
else:
|
|
||||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
|
||||||
|
|
||||||
def web3_constructor():
|
|
||||||
w3 = web3.Web3(blockchain_provider)
|
|
||||||
return (blockchain_provider, w3)
|
|
||||||
RpcClient.set_constructor(web3_constructor)
|
|
||||||
|
|
||||||
|
|
||||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
queue = args.q
|
queue = args.q
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
chain_str = str(chain_spec)
|
|
||||||
c = RpcClient(chain_spec)
|
|
||||||
admin_api = AdminApi(c)
|
|
||||||
|
|
||||||
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
rpc = EthHTTPConnection(args.p)
|
||||||
chain_registry = ChainRegistry(chain_spec)
|
|
||||||
CICRegistry.add_chain_registry(chain_registry)
|
registry_address = config.get('CIC_REGISTRY_ADDRESS')
|
||||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
|
||||||
CICRegistry.load_for(chain_spec)
|
admin_api = AdminApi(rpc)
|
||||||
|
|
||||||
|
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||||
|
if trusted_addresses_src == None:
|
||||||
|
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||||
|
sys.exit(1)
|
||||||
|
trusted_addresses = trusted_addresses_src.split(',')
|
||||||
|
for address in trusted_addresses:
|
||||||
|
logg.info('using trusted address {}'.format(address))
|
||||||
|
|
||||||
fmt = 'terminal'
|
fmt = 'terminal'
|
||||||
if args.f[:1] == 'j':
|
if args.f[:1] == 'j':
|
||||||
@ -155,19 +142,33 @@ def render_lock(o, **kwargs):
|
|||||||
|
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def connect_registry(registry_address, chain_spec, rpc):
|
||||||
|
CICRegistry.address = registry_address
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
declarator_address = registry.by_name('AddressDeclarator')
|
||||||
|
lookup = AddressDeclaratorLookup(declarator_address, trusted_addresses)
|
||||||
|
registry.add_lookup(lookup)
|
||||||
|
return registry
|
||||||
|
|
||||||
|
|
||||||
# TODO: move each command to submodule
|
# TODO: move each command to submodule
|
||||||
def main():
|
def main():
|
||||||
txs = []
|
txs = []
|
||||||
renderer = render_tx
|
renderer = render_tx
|
||||||
if len(config.get('_QUERY')) > 66:
|
if len(config.get('_QUERY')) > 66:
|
||||||
txs = [admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'))]
|
registry = connect_registry(registry_address, chain_spec, rpc)
|
||||||
|
txs = [admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'), registry=registry)]
|
||||||
elif len(config.get('_QUERY')) > 42:
|
elif len(config.get('_QUERY')) > 42:
|
||||||
txs = [admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'))]
|
registry = connect_registry(registry_address, chain_spec, rpc)
|
||||||
|
txs = [admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'), registry=registry)]
|
||||||
elif len(config.get('_QUERY')) == 42:
|
elif len(config.get('_QUERY')) == 42:
|
||||||
|
registry = connect_registry(registry_address, chain_spec, rpc)
|
||||||
txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False)
|
txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False)
|
||||||
renderer = render_account
|
renderer = render_account
|
||||||
elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock':
|
elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock':
|
||||||
txs = admin_api.get_lock()
|
t = admin_api.get_lock()
|
||||||
|
txs = t.get()
|
||||||
renderer = render_lock
|
renderer = render_lock
|
||||||
else:
|
else:
|
||||||
raise ValueError('cannot parse argument {}'.format(config.get('_QUERY')))
|
raise ValueError('cannot parse argument {}'.format(config.get('_QUERY')))
|
||||||
|
@ -1 +0,0 @@
|
|||||||
from .base import Syncer
|
|
@ -1,201 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db.models.sync import BlockchainSync
|
|
||||||
from cic_eth.db.models.base import SessionBase
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
class SyncerBackend:
|
|
||||||
"""Interface to block and transaction sync state.
|
|
||||||
|
|
||||||
:param chain_spec: Chain spec for the chain that syncer is running for.
|
|
||||||
:type chain_spec: cic_registry.chain.ChainSpec
|
|
||||||
:param object_id: Unique id for the syncer session.
|
|
||||||
:type object_id: number
|
|
||||||
"""
|
|
||||||
def __init__(self, chain_spec, object_id):
|
|
||||||
self.db_session = None
|
|
||||||
self.db_object = None
|
|
||||||
self.chain_spec = chain_spec
|
|
||||||
self.object_id = object_id
|
|
||||||
self.connect()
|
|
||||||
self.disconnect()
|
|
||||||
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
"""Loads the state of the syncer session with the given id.
|
|
||||||
"""
|
|
||||||
if self.db_session == None:
|
|
||||||
self.db_session = SessionBase.create_session()
|
|
||||||
q = self.db_session.query(BlockchainSync)
|
|
||||||
q = q.filter(BlockchainSync.id==self.object_id)
|
|
||||||
self.db_object = q.first()
|
|
||||||
if self.db_object == None:
|
|
||||||
self.disconnect()
|
|
||||||
raise ValueError('sync entry with id {} not found'.format(self.object_id))
|
|
||||||
return self.db_session
|
|
||||||
|
|
||||||
|
|
||||||
def disconnect(self):
|
|
||||||
"""Commits state of sync to backend.
|
|
||||||
"""
|
|
||||||
if self.db_session != None:
|
|
||||||
self.db_session.add(self.db_object)
|
|
||||||
self.db_session.commit()
|
|
||||||
self.db_session.close()
|
|
||||||
self.db_session = None
|
|
||||||
|
|
||||||
|
|
||||||
def chain(self):
|
|
||||||
"""Returns chain spec for syncer
|
|
||||||
|
|
||||||
:returns: Chain spec
|
|
||||||
:rtype chain_spec: cic_registry.chain.ChainSpec
|
|
||||||
"""
|
|
||||||
return self.chain_spec
|
|
||||||
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
"""Get the current state of the syncer cursor.
|
|
||||||
|
|
||||||
:returns: Block and block transaction height, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
self.connect()
|
|
||||||
pair = self.db_object.cursor()
|
|
||||||
self.disconnect()
|
|
||||||
return pair
|
|
||||||
|
|
||||||
|
|
||||||
def set(self, block_height, tx_height):
|
|
||||||
"""Update the state of the syncer cursor
|
|
||||||
:param block_height: Block height of cursor
|
|
||||||
:type block_height: number
|
|
||||||
:param tx_height: Block transaction height of cursor
|
|
||||||
:type tx_height: number
|
|
||||||
:returns: Block and block transaction height, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
self.connect()
|
|
||||||
pair = self.db_object.set(block_height, tx_height)
|
|
||||||
self.disconnect()
|
|
||||||
return pair
|
|
||||||
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
"""Get the initial state of the syncer cursor.
|
|
||||||
|
|
||||||
:returns: Initial block and block transaction height, respectively
|
|
||||||
:rtype: tuple
|
|
||||||
"""
|
|
||||||
self.connect()
|
|
||||||
pair = self.db_object.start()
|
|
||||||
self.disconnect()
|
|
||||||
return pair
|
|
||||||
|
|
||||||
|
|
||||||
def target(self):
|
|
||||||
"""Get the target state (upper bound of sync) of the syncer cursor.
|
|
||||||
|
|
||||||
:returns: Target block height
|
|
||||||
:rtype: number
|
|
||||||
"""
|
|
||||||
self.connect()
|
|
||||||
target = self.db_object.target()
|
|
||||||
self.disconnect()
|
|
||||||
return target
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def first(chain):
|
|
||||||
"""Returns the model object of the most recent syncer in backend.
|
|
||||||
|
|
||||||
:param chain: Chain spec of chain that syncer is running for.
|
|
||||||
:type chain: cic_registry.chain.ChainSpec
|
|
||||||
:returns: Last syncer object
|
|
||||||
:rtype: cic_eth.db.models.BlockchainSync
|
|
||||||
"""
|
|
||||||
return BlockchainSync.first(chain)
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def initial(chain, block_height):
|
|
||||||
"""Creates a new syncer session and commit its initial state to backend.
|
|
||||||
|
|
||||||
:param chain: Chain spec of chain that syncer is running for.
|
|
||||||
:type chain: cic_registry.chain.ChainSpec
|
|
||||||
:param block_height: Target block height
|
|
||||||
:type block_height: number
|
|
||||||
:returns: New syncer object
|
|
||||||
:rtype: cic_eth.db.models.BlockchainSync
|
|
||||||
"""
|
|
||||||
object_id = None
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
o = BlockchainSync(chain, 0, 0, block_height)
|
|
||||||
session.add(o)
|
|
||||||
session.commit()
|
|
||||||
object_id = o.id
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
return SyncerBackend(chain, object_id)
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resume(chain, block_height):
|
|
||||||
"""Retrieves and returns all previously unfinished syncer sessions.
|
|
||||||
|
|
||||||
|
|
||||||
:param chain: Chain spec of chain that syncer is running for.
|
|
||||||
:type chain: cic_registry.chain.ChainSpec
|
|
||||||
:param block_height: Target block height
|
|
||||||
:type block_height: number
|
|
||||||
:returns: Syncer objects of unfinished syncs
|
|
||||||
:rtype: list of cic_eth.db.models.BlockchainSync
|
|
||||||
"""
|
|
||||||
syncers = []
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
|
|
||||||
object_id = None
|
|
||||||
|
|
||||||
for object_id in BlockchainSync.get_unsynced(session=session):
|
|
||||||
logg.debug('block syncer resume added previously unsynced sync entry id {}'.format(object_id))
|
|
||||||
syncers.append(SyncerBackend(chain, object_id))
|
|
||||||
|
|
||||||
(block_resume, tx_resume) = BlockchainSync.get_last_live_height(block_height, session=session)
|
|
||||||
if block_height != block_resume:
|
|
||||||
o = BlockchainSync(chain, block_resume, tx_resume, block_height)
|
|
||||||
session.add(o)
|
|
||||||
session.commit()
|
|
||||||
object_id = o.id
|
|
||||||
syncers.append(SyncerBackend(chain, object_id))
|
|
||||||
logg.debug('block syncer resume added new sync entry from previous run id {}, start{}:{} target {}'.format(object_id, block_resume, tx_resume, block_height))
|
|
||||||
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
return syncers
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def live(chain, block_height):
|
|
||||||
"""Creates a new open-ended syncer session starting at the given block height.
|
|
||||||
|
|
||||||
:param chain: Chain spec of chain that syncer is running for.
|
|
||||||
:type chain: cic_registry.chain.ChainSpec
|
|
||||||
:param block_height: Target block height
|
|
||||||
:type block_height: number
|
|
||||||
:returns: "Live" syncer object
|
|
||||||
:rtype: cic_eth.db.models.BlockchainSync
|
|
||||||
"""
|
|
||||||
object_id = None
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
o = BlockchainSync(chain, block_height, 0, None)
|
|
||||||
session.add(o)
|
|
||||||
session.commit()
|
|
||||||
object_id = o.id
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
return SyncerBackend(chain, object_id)
|
|
@ -1,51 +0,0 @@
|
|||||||
# TODO: extend blocksync model
|
|
||||||
class Syncer:
|
|
||||||
"""Base class and interface for implementing a block sync poller routine.
|
|
||||||
|
|
||||||
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
|
||||||
:type bc_cache: cic_eth.sync.SyncerBackend
|
|
||||||
"""
|
|
||||||
w3 = None
|
|
||||||
running_global = True
|
|
||||||
|
|
||||||
def __init__(self, bc_cache):
|
|
||||||
self.cursor = None
|
|
||||||
self.bc_cache = bc_cache
|
|
||||||
self.filter = []
|
|
||||||
self.running = True
|
|
||||||
|
|
||||||
|
|
||||||
def chain(self):
|
|
||||||
"""Returns the string representation of the chain spec for the chain the syncer is running on.
|
|
||||||
|
|
||||||
:returns: Chain spec string
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
return self.bc_cache.chain()
|
|
||||||
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
"""Get latest unprocessed blocks.
|
|
||||||
|
|
||||||
:returns: list of block hash strings
|
|
||||||
:rtype: list
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
def process(self, w3, ref):
|
|
||||||
"""Process transactions in a single block.
|
|
||||||
|
|
||||||
:param ref: Reference of object to process
|
|
||||||
:type ref: str, 0x-hex
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
def loop(self, interval):
|
|
||||||
"""Entry point for syncer loop
|
|
||||||
|
|
||||||
:param interval: Delay in seconds until next attempt if no new blocks are found.
|
|
||||||
:type interval: int
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
@ -1,4 +0,0 @@
|
|||||||
class LoopDone(Exception):
|
|
||||||
"""Exception raised when a syncing is complete.
|
|
||||||
"""
|
|
||||||
pass
|
|
@ -1,51 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import web3
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from .mined import MinedSyncer
|
|
||||||
from .base import Syncer
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
class HeadSyncer(MinedSyncer):
|
|
||||||
"""Implements the get method in Syncer for retrieving every new mined block.
|
|
||||||
|
|
||||||
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
|
||||||
:type bc_cache: Object implementing methods from cic_eth.sync.SyncerBackend
|
|
||||||
"""
|
|
||||||
def __init__(self, bc_cache):
|
|
||||||
super(HeadSyncer, self).__init__(bc_cache)
|
|
||||||
# TODO: filter not returning all blocks, at least with ganache. kind of defeats the point, then
|
|
||||||
#self.w3_filter = rpc.w3.eth.filter({
|
|
||||||
# 'fromBlock': block_offset,
|
|
||||||
# }) #'latest')
|
|
||||||
#self.bc_cache.set(block_offset, 0)
|
|
||||||
logg.debug('initialized head syncer with offset {}'.format(bc_cache.start()))
|
|
||||||
|
|
||||||
"""Implements Syncer.get
|
|
||||||
|
|
||||||
:param w3: Web3 object
|
|
||||||
:type w3: web3.Web3
|
|
||||||
:returns: Block hash of newly mined blocks. if any
|
|
||||||
:rtype: list of str, 0x-hex
|
|
||||||
"""
|
|
||||||
def get(self, w3):
|
|
||||||
# Of course, the filter doesn't return the same block dict format as getBlock() so we'll just waste some cycles getting the hashes instead.
|
|
||||||
#hashes = []
|
|
||||||
#for block in self.w3_filter.get_new_entries():
|
|
||||||
# hashes.append(block['blockHash'])
|
|
||||||
#logg.debug('blocks {}'.format(hashes))
|
|
||||||
#return hashes
|
|
||||||
(block_number, tx_number) = self.bc_cache.get()
|
|
||||||
block_hash = []
|
|
||||||
try:
|
|
||||||
block = w3.eth.getBlock(block_number)
|
|
||||||
block_hash.append(block.hash)
|
|
||||||
except web3.exceptions.BlockNotFound:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return block_hash
|
|
@ -1,74 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
from web3.exceptions import BlockNotFound
|
|
||||||
from .error import LoopDone
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from .mined import MinedSyncer
|
|
||||||
from .base import Syncer
|
|
||||||
from cic_eth.db.models.base import SessionBase
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
class HistorySyncer(MinedSyncer):
|
|
||||||
"""Implements the get method in Syncer for retrieving all blocks between last processed block before previous shutdown and block height at time of syncer start.
|
|
||||||
|
|
||||||
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
|
||||||
:type bc_cache: Object implementing methods from cic_eth.sync.SyncerBackend
|
|
||||||
:param mx: Maximum number of blocks to return in one call
|
|
||||||
:type mx: int
|
|
||||||
"""
|
|
||||||
def __init__(self, bc_cache, mx=500):
|
|
||||||
super(HistorySyncer, self).__init__(bc_cache)
|
|
||||||
self.max = mx
|
|
||||||
|
|
||||||
self.target = bc_cache.target()
|
|
||||||
logg.info('History syncer target block number {}'.format(self.target))
|
|
||||||
|
|
||||||
session_offset = self.bc_cache.get()
|
|
||||||
|
|
||||||
self.block_offset = session_offset[0]
|
|
||||||
self.tx_offset = session_offset[1]
|
|
||||||
logg.info('History syncer starting at {}:{}'.format(session_offset[0], session_offset[1]))
|
|
||||||
|
|
||||||
self.filter = []
|
|
||||||
|
|
||||||
|
|
||||||
"""Implements Syncer.get
|
|
||||||
|
|
||||||
BUG: Should also raise LoopDone when block array is empty after loop.
|
|
||||||
|
|
||||||
:param w3: Web3 object
|
|
||||||
:type w3: web3.Web3
|
|
||||||
:raises LoopDone: If a block is not found.
|
|
||||||
:return: Return a batch of blocks to process
|
|
||||||
:rtype: list of str, 0x-hex
|
|
||||||
"""
|
|
||||||
def get(self, w3):
|
|
||||||
sync_db = self.bc_cache
|
|
||||||
height = self.bc_cache.get()
|
|
||||||
logg.debug('height {}'.format(height))
|
|
||||||
block_last = height[0]
|
|
||||||
tx_last = height[1]
|
|
||||||
if not self.running:
|
|
||||||
raise LoopDone((block_last, tx_last))
|
|
||||||
b = []
|
|
||||||
block_target = block_last + self.max
|
|
||||||
if block_target > self.target:
|
|
||||||
block_target = self.target
|
|
||||||
logg.debug('target {} last {} max {}'.format(block_target, block_last, self.max))
|
|
||||||
for i in range(block_last, block_target):
|
|
||||||
if i == self.target:
|
|
||||||
logg.info('reached target {}, exiting'.format(i))
|
|
||||||
self.running = False
|
|
||||||
break
|
|
||||||
bhash = w3.eth.getBlock(i).hash
|
|
||||||
b.append(bhash)
|
|
||||||
logg.debug('appending block {} {}'.format(i, bhash.hex()))
|
|
||||||
if block_last == block_target:
|
|
||||||
logg.info('aleady reached target {}, exiting'.format(self.target))
|
|
||||||
self.running = False
|
|
||||||
return b
|
|
@ -1,50 +0,0 @@
|
|||||||
class MemPoolSyncer(Syncer):
|
|
||||||
|
|
||||||
|
|
||||||
def __init__(self, bc_cache):
|
|
||||||
raise NotImplementedError('incomplete, needs web3 tx to raw transaction conversion')
|
|
||||||
super(MemPoolSyncer, self).__init__(bc_cache)
|
|
||||||
# self.w3_filter = Syncer.w3.eth.filter('pending')
|
|
||||||
# for tx in tx_cache.txs:
|
|
||||||
# self.txs.append(tx)
|
|
||||||
# logg.debug('add tx {} to mempoolsyncer'.format(tx))
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def get(self):
|
|
||||||
# return self.w3_filter.get_new_entries()
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def process(self, tx_hash):
|
|
||||||
# tx_hash_hex = tx_hash.hex()
|
|
||||||
# if tx_hash_hex in self.txs:
|
|
||||||
# logg.debug('syncer already watching {}, skipping'.format(tx_hash_hex))
|
|
||||||
# tx = self.w3.eth.getTransaction(tx_hash_hex)
|
|
||||||
# serialized_tx = rlp.encode({
|
|
||||||
# 'nonce': tx.nonce,
|
|
||||||
# 'from': getattr(tx, 'from'),
|
|
||||||
# })
|
|
||||||
# logg.info('add {} to syncer: {}'.format(tx, serialized_tx))
|
|
||||||
# otx = Otx(
|
|
||||||
# nonce=tx.nonce,
|
|
||||||
# address=getattr(tx, 'from'),
|
|
||||||
# tx_hash=tx_hash_hex,
|
|
||||||
# signed_tx=serialized_tx,
|
|
||||||
# )
|
|
||||||
# Otx.session.add(otx)
|
|
||||||
# Otx.session.commit()
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# def loop(self, interval):
|
|
||||||
# while Syncer.running:
|
|
||||||
# logg.debug('loop execute')
|
|
||||||
# txs = self.get()
|
|
||||||
# logg.debug('got txs {}'.format(txs))
|
|
||||||
# for tx in txs:
|
|
||||||
# #block_number = self.process(block.hex())
|
|
||||||
# self.process(tx)
|
|
||||||
# #if block_number > self.bc_cache.head():
|
|
||||||
# # self.bc_cache.head(block_number)
|
|
||||||
# time.sleep(interval)
|
|
||||||
# logg.info("Syncer no longer set to run, gracefully exiting")
|
|
||||||
|
|
||||||
|
|
@ -1,109 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local impotes
|
|
||||||
from .base import Syncer
|
|
||||||
from cic_eth.queue.tx import set_final_status
|
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
|
|
||||||
app = celery.current_app
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
class MinedSyncer(Syncer):
|
|
||||||
"""Base implementation of block processor for mined blocks.
|
|
||||||
|
|
||||||
Loops through all transactions,
|
|
||||||
|
|
||||||
:param bc_cache: Retrieves block cache cursors for chain head and latest processed block.
|
|
||||||
:type bc_cache: Object implementing methods from cic_eth.sync.SyncerBackend
|
|
||||||
"""
|
|
||||||
|
|
||||||
yield_delay = 0.005
|
|
||||||
|
|
||||||
def __init__(self, bc_cache):
|
|
||||||
super(MinedSyncer, self).__init__(bc_cache)
|
|
||||||
self.block_offset = 0
|
|
||||||
self.tx_offset = 0
|
|
||||||
|
|
||||||
|
|
||||||
def process(self, w3, ref):
|
|
||||||
"""Processes transactions in a single block, advancing transaction (and block) cursor accordingly.
|
|
||||||
|
|
||||||
:param w3: Web3 object
|
|
||||||
:type w3: web3.Web3
|
|
||||||
:param ref: Block reference (hash) to process
|
|
||||||
:type ref: str, 0x-hex
|
|
||||||
:returns: Block number of next unprocessed block
|
|
||||||
:rtype: number
|
|
||||||
"""
|
|
||||||
b = w3.eth.getBlock(ref)
|
|
||||||
c = w3.eth.getBlockTransactionCount(ref)
|
|
||||||
s = 0
|
|
||||||
if self.block_offset == b.number:
|
|
||||||
s = self.tx_offset
|
|
||||||
|
|
||||||
logg.debug('processing {} (blocknumber {}, count {}, offset {})'.format(ref, b.number, c, s))
|
|
||||||
|
|
||||||
for i in range(s, c):
|
|
||||||
tx = w3.eth.getTransactionByBlock(ref, i)
|
|
||||||
tx_hash_hex = tx['hash'].hex()
|
|
||||||
rcpt = w3.eth.getTransactionReceipt(tx_hash_hex)
|
|
||||||
logg.debug('{}/{} processing tx {} from block {} {}'.format(i+1, c, tx_hash_hex, b.number, ref))
|
|
||||||
ours = False
|
|
||||||
# TODO: ensure filter loop can complete on graceful shutdown
|
|
||||||
for f in self.filter:
|
|
||||||
#try:
|
|
||||||
session = self.bc_cache.connect()
|
|
||||||
task_uuid = f(w3, tx, rcpt, self.chain(), session)
|
|
||||||
#except Exception as e:
|
|
||||||
# logg.error('error in filter {} tx {}: {}'.format(f, tx_hash_hex, e))
|
|
||||||
# continue
|
|
||||||
if task_uuid != None:
|
|
||||||
logg.debug('tx {} passed to celery task {}'.format(tx_hash_hex, task_uuid))
|
|
||||||
s = celery.signature(
|
|
||||||
'set_final_status',
|
|
||||||
[tx_hash_hex, rcpt['blockNumber'], not rcpt['status']],
|
|
||||||
)
|
|
||||||
s.apply_async()
|
|
||||||
break
|
|
||||||
next_tx = i + 1
|
|
||||||
if next_tx == c:
|
|
||||||
self.bc_cache.set(b.number+1, 0)
|
|
||||||
else:
|
|
||||||
self.bc_cache.set(b.number, next_tx)
|
|
||||||
if c == 0:
|
|
||||||
logg.info('synced block {} has no transactions'.format(b.number))
|
|
||||||
#self.bc_cache.session(b.number+1, 0)
|
|
||||||
self.bc_cache.set(b.number+1, 0)
|
|
||||||
return b['number']
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def loop(self, interval):
|
|
||||||
"""Loop running until the "running" property of Syncer is set to False.
|
|
||||||
|
|
||||||
Retrieves latest unprocessed blocks and processes them.
|
|
||||||
|
|
||||||
:param interval: Delay in seconds until next attempt if no new blocks are found.
|
|
||||||
:type interval: int
|
|
||||||
"""
|
|
||||||
while self.running and Syncer.running_global:
|
|
||||||
self.bc_cache.connect()
|
|
||||||
c = RpcClient(self.chain())
|
|
||||||
logg.debug('loop execute')
|
|
||||||
e = self.get(c.w3)
|
|
||||||
logg.debug('got blocks {}'.format(e))
|
|
||||||
for block in e:
|
|
||||||
block_number = self.process(c.w3, block.hex())
|
|
||||||
logg.debug('processed block {} {}'.format(block_number, block.hex()))
|
|
||||||
self.bc_cache.disconnect()
|
|
||||||
if len(e) > 0:
|
|
||||||
time.sleep(self.yield_delay)
|
|
||||||
else:
|
|
||||||
time.sleep(interval)
|
|
||||||
logg.info("Syncer no longer set to run, gracefully exiting")
|
|
@ -1,75 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import datetime
|
|
||||||
import time
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from .base import Syncer
|
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.db.enum import (
|
|
||||||
StatusEnum,
|
|
||||||
StatusBits,
|
|
||||||
)
|
|
||||||
from cic_eth.queue.tx import get_status_tx
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
celery_app = celery.current_app
|
|
||||||
|
|
||||||
|
|
||||||
class noop_cache:
|
|
||||||
|
|
||||||
def __init__(self, chain_spec):
|
|
||||||
self.chain_spec = chain_spec
|
|
||||||
|
|
||||||
|
|
||||||
def chain(self):
|
|
||||||
return self.chain_spec
|
|
||||||
|
|
||||||
|
|
||||||
class RetrySyncer(Syncer):
|
|
||||||
|
|
||||||
def __init__(self, chain_spec, stalled_grace_seconds, failed_grace_seconds=None, final_func=None):
|
|
||||||
cache = noop_cache(chain_spec)
|
|
||||||
super(RetrySyncer, self).__init__(cache)
|
|
||||||
if failed_grace_seconds == None:
|
|
||||||
failed_grace_seconds = stalled_grace_seconds
|
|
||||||
self.stalled_grace_seconds = stalled_grace_seconds
|
|
||||||
self.failed_grace_seconds = failed_grace_seconds
|
|
||||||
self.final_func = final_func
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, w3):
|
|
||||||
# before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.failed_grace_seconds)
|
|
||||||
# failed_txs = get_status_tx(
|
|
||||||
# StatusEnum.SENDFAIL.value,
|
|
||||||
# before=before,
|
|
||||||
# )
|
|
||||||
before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.stalled_grace_seconds)
|
|
||||||
stalled_txs = get_status_tx(
|
|
||||||
StatusBits.IN_NETWORK.value,
|
|
||||||
not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE,
|
|
||||||
before=before,
|
|
||||||
)
|
|
||||||
# return list(failed_txs.keys()) + list(stalled_txs.keys())
|
|
||||||
return stalled_txs
|
|
||||||
|
|
||||||
|
|
||||||
def process(self, w3, ref):
|
|
||||||
logg.debug('tx {}'.format(ref))
|
|
||||||
for f in self.filter:
|
|
||||||
f(w3, ref, None, str(self.chain()))
|
|
||||||
|
|
||||||
|
|
||||||
def loop(self, interval):
|
|
||||||
chain_str = str(self.chain())
|
|
||||||
while self.running and Syncer.running_global:
|
|
||||||
c = RpcClient(self.chain())
|
|
||||||
for tx in self.get(c.w3):
|
|
||||||
self.process(c.w3, tx)
|
|
||||||
if self.final_func != None:
|
|
||||||
self.final_func(chain_str)
|
|
||||||
time.sleep(interval)
|
|
@ -1,18 +1,45 @@
|
|||||||
# import
|
# import
|
||||||
|
import time
|
||||||
import requests
|
import requests
|
||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.gas import RPCGasOracle
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.error import (
|
from cic_eth.error import (
|
||||||
SignerError,
|
SignerError,
|
||||||
EthError,
|
EthError,
|
||||||
)
|
)
|
||||||
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
class CriticalTask(celery.Task):
|
class BaseTask(celery.Task):
|
||||||
|
|
||||||
|
session_func = SessionBase.create_session
|
||||||
|
call_address = ZERO_ADDRESS
|
||||||
|
create_nonce_oracle = RPCNonceOracle
|
||||||
|
create_gas_oracle = RPCGasOracle
|
||||||
|
|
||||||
|
def create_session(self):
|
||||||
|
return BaseTask.session_func()
|
||||||
|
|
||||||
|
|
||||||
|
def log_banner(self):
|
||||||
|
logg.debug('task {} root uuid {}'.format(self.__class__.__name__, self.request.root_id))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class CriticalTask(BaseTask):
|
||||||
retry_jitter = True
|
retry_jitter = True
|
||||||
retry_backoff = True
|
retry_backoff = True
|
||||||
retry_backoff_max = 8
|
retry_backoff_max = 8
|
||||||
@ -22,6 +49,7 @@ class CriticalSQLAlchemyTask(CriticalTask):
|
|||||||
autoretry_for = (
|
autoretry_for = (
|
||||||
sqlalchemy.exc.DatabaseError,
|
sqlalchemy.exc.DatabaseError,
|
||||||
sqlalchemy.exc.TimeoutError,
|
sqlalchemy.exc.TimeoutError,
|
||||||
|
sqlalchemy.exc.ResourceClosedError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -29,6 +57,8 @@ class CriticalWeb3Task(CriticalTask):
|
|||||||
autoretry_for = (
|
autoretry_for = (
|
||||||
requests.exceptions.ConnectionError,
|
requests.exceptions.ConnectionError,
|
||||||
)
|
)
|
||||||
|
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
||||||
|
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
||||||
|
|
||||||
|
|
||||||
class CriticalSQLAlchemyAndWeb3Task(CriticalTask):
|
class CriticalSQLAlchemyAndWeb3Task(CriticalTask):
|
||||||
@ -36,13 +66,18 @@ class CriticalSQLAlchemyAndWeb3Task(CriticalTask):
|
|||||||
sqlalchemy.exc.DatabaseError,
|
sqlalchemy.exc.DatabaseError,
|
||||||
sqlalchemy.exc.TimeoutError,
|
sqlalchemy.exc.TimeoutError,
|
||||||
requests.exceptions.ConnectionError,
|
requests.exceptions.ConnectionError,
|
||||||
|
sqlalchemy.exc.ResourceClosedError,
|
||||||
EthError,
|
EthError,
|
||||||
)
|
)
|
||||||
|
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
||||||
|
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
||||||
|
|
||||||
|
|
||||||
class CriticalSQLAlchemyAndSignerTask(CriticalTask):
|
class CriticalSQLAlchemyAndSignerTask(CriticalTask):
|
||||||
autoretry_for = (
|
autoretry_for = (
|
||||||
sqlalchemy.exc.DatabaseError,
|
sqlalchemy.exc.DatabaseError,
|
||||||
sqlalchemy.exc.TimeoutError,
|
sqlalchemy.exc.TimeoutError,
|
||||||
|
sqlalchemy.exc.ResourceClosedError,
|
||||||
SignerError,
|
SignerError,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -51,3 +86,11 @@ class CriticalWeb3AndSignerTask(CriticalTask):
|
|||||||
requests.exceptions.ConnectionError,
|
requests.exceptions.ConnectionError,
|
||||||
SignerError,
|
SignerError,
|
||||||
)
|
)
|
||||||
|
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
||||||
|
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=BaseTask)
|
||||||
|
def hello(self):
|
||||||
|
time.sleep(0.1)
|
||||||
|
return id(SessionBase.create_session)
|
||||||
|
@ -9,8 +9,8 @@ import semver
|
|||||||
version = (
|
version = (
|
||||||
0,
|
0,
|
||||||
10,
|
10,
|
||||||
0,
|
1,
|
||||||
'alpha.41',
|
'beta.1',
|
||||||
)
|
)
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[cic]
|
[cic]
|
||||||
registry_address =
|
registry_address =
|
||||||
chain_spec =
|
chain_spec = evm:bloxberg:8996
|
||||||
tx_retry_delay =
|
tx_retry_delay =
|
||||||
trust_address =
|
trust_address =
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
[signer]
|
[signer]
|
||||||
secret = deadbeef
|
secret = deadbeef
|
||||||
#database = crypto-dev-signer
|
#database = crypto-dev-signer
|
||||||
socket_path = /run/crypto-dev-signer/jsonrpc.ipc
|
socket_path = ipc:///run/crypto-dev-signer/jsonrpc.ipc
|
||||||
|
@ -20,8 +20,16 @@ RUN apt-get update && \
|
|||||||
|
|
||||||
# Copy shared requirements from top of mono-repo
|
# Copy shared requirements from top of mono-repo
|
||||||
RUN echo "copying root req file ${root_requirement_file}"
|
RUN echo "copying root req file ${root_requirement_file}"
|
||||||
COPY $root_requirement_file .
|
#COPY $root_requirement_file .
|
||||||
RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
|
#RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
|
||||||
|
RUN /usr/local/bin/python -m pip install --upgrade pip
|
||||||
|
#RUN git clone https://gitlab.com/grassrootseconomics/cic-base.git && \
|
||||||
|
# cd cic-base && \
|
||||||
|
# git checkout 7ae1f02efc206b13a65873567b0f6d1c3b7f9bc0 && \
|
||||||
|
# python merge_requirements.py | tee merged_requirements.txt
|
||||||
|
#RUN cd cic-base && \
|
||||||
|
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
|
||||||
|
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a44
|
||||||
|
|
||||||
COPY cic-eth/scripts/ scripts/
|
COPY cic-eth/scripts/ scripts/
|
||||||
COPY cic-eth/setup.cfg cic-eth/setup.py ./
|
COPY cic-eth/setup.cfg cic-eth/setup.py ./
|
||||||
|
@ -2,5 +2,5 @@
|
|||||||
|
|
||||||
set -e
|
set -e
|
||||||
>&2 echo executing database migration
|
>&2 echo executing database migration
|
||||||
migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
python scripts/migrate.py -c /usr/local/etc/cic-eth --migrations-dir /usr/local/share/cic-eth/alembic -vv
|
||||||
set +e
|
set +e
|
||||||
|
@ -6,7 +6,7 @@ set -e
|
|||||||
# set CONFINI_ENV_PREFIX to override the env prefix to override env vars
|
# set CONFINI_ENV_PREFIX to override the env prefix to override env vars
|
||||||
|
|
||||||
echo "!!! starting signer"
|
echo "!!! starting signer"
|
||||||
python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer &
|
python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer -vv 2> /tmp/signer.log &
|
||||||
|
|
||||||
echo "!!! starting tracker"
|
echo "!!! starting tracker"
|
||||||
/usr/local/bin/cic-eth-taskerd $@
|
/usr/local/bin/cic-eth-taskerd $@
|
||||||
|
@ -1,24 +1,24 @@
|
|||||||
cic-base~=0.1.1a20
|
cic-base~=0.1.2a46
|
||||||
web3==5.12.2
|
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
crypto-dev-signer~=0.4.13rc4
|
crypto-dev-signer~=0.4.14a16
|
||||||
confini~=0.3.6rc3
|
confini~=0.3.6rc3
|
||||||
cic-registry~=0.5.3a22
|
cic-eth-registry~=0.5.4a7
|
||||||
cic-bancor~=0.0.6
|
#cic-bancor~=0.0.6
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
websockets==8.1
|
websockets==8.1
|
||||||
requests~=2.24.0
|
requests~=2.24.0
|
||||||
eth_accounts_index~=0.0.10a10
|
eth_accounts_index~=0.0.11a3
|
||||||
erc20-transfer-authorization~=0.3.0a10
|
erc20-transfer-authorization~=0.3.1a2
|
||||||
erc20-single-shot-faucet~=0.2.0a6
|
#simple-rlp==0.1.2
|
||||||
rlp==2.0.1
|
|
||||||
uWSGI==2.0.19.1
|
uWSGI==2.0.19.1
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
eth-gas-proxy==0.0.1a4
|
|
||||||
websocket-client==0.57.0
|
websocket-client==0.57.0
|
||||||
moolb~=0.1.1b2
|
moolb~=0.1.1b2
|
||||||
eth-address-index~=0.1.0a8
|
eth-address-index~=0.1.1a5
|
||||||
chainlib~=0.0.1a20
|
chainlib~=0.0.1a42
|
||||||
hexathon~=0.0.1a3
|
hexathon~=0.0.1a7
|
||||||
chainsyncer~=0.0.1a19
|
chainsyncer~=0.0.1a20
|
||||||
|
pysha3==1.0.2
|
||||||
|
coincurve==15.0.0
|
||||||
|
sarafu-faucet==0.0.2a13
|
||||||
|
@ -32,7 +32,6 @@ packages =
|
|||||||
cic_eth.db
|
cic_eth.db
|
||||||
cic_eth.db.models
|
cic_eth.db.models
|
||||||
cic_eth.queue
|
cic_eth.queue
|
||||||
cic_eth.sync
|
|
||||||
cic_eth.ext
|
cic_eth.ext
|
||||||
cic_eth.runnable
|
cic_eth.runnable
|
||||||
cic_eth.runnable.daemons
|
cic_eth.runnable.daemons
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
pytest==6.0.1
|
pytest==6.0.1
|
||||||
pytest-celery==0.0.0a1
|
pytest-celery==0.0.0a1
|
||||||
pytest-mock==3.3.1
|
pytest-mock==3.3.1
|
||||||
py-eth==0.1.1
|
|
||||||
pytest-cov==2.10.1
|
pytest-cov==2.10.1
|
||||||
eth-tester==0.5.0b3
|
eth-tester==0.5.0b3
|
||||||
py-evm==0.3.0a20
|
py-evm==0.3.0a20
|
||||||
|
giftable-erc20-token==0.0.8a4
|
||||||
|
@ -1,47 +1,30 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import os
|
import os
|
||||||
import logging
|
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# local imports
|
||||||
import pytest
|
from cic_eth.api import Api
|
||||||
from cic_registry import CICRegistry
|
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
root_dir = os.path.dirname(script_dir)
|
root_dir = os.path.dirname(script_dir)
|
||||||
sys.path.insert(0, root_dir)
|
sys.path.insert(0, root_dir)
|
||||||
data_dir = os.path.join(script_dir, 'testdata', 'abi')
|
|
||||||
CICRegistry.add_path(data_dir)
|
|
||||||
|
|
||||||
# fixtures
|
# assemble fixtures
|
||||||
from tests.fixtures_registry import *
|
|
||||||
from cic_registry.pytest import *
|
|
||||||
from cic_bancor.pytest import *
|
|
||||||
from tests.fixtures_config import *
|
from tests.fixtures_config import *
|
||||||
from tests.fixtures_celery import *
|
|
||||||
from tests.fixtures_web3 import *
|
|
||||||
from tests.fixtures_database import *
|
from tests.fixtures_database import *
|
||||||
from tests.fixtures_faucet import *
|
from tests.fixtures_celery import *
|
||||||
from tests.fixtures_transferapproval import *
|
from tests.fixtures_role import *
|
||||||
from tests.fixtures_account import *
|
from chainlib.eth.pytest import *
|
||||||
|
from eth_contract_registry.pytest import *
|
||||||
logg = logging.getLogger()
|
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_tokens import *
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def init_registry(
|
|
||||||
init_w3_conn,
|
|
||||||
):
|
|
||||||
return CICRegistry
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def eth_empty_accounts(
|
def api(
|
||||||
init_wallet_extension,
|
default_chain_spec,
|
||||||
):
|
custodial_roles,
|
||||||
a = []
|
):
|
||||||
for i in range(10):
|
chain_str = str(default_chain_spec)
|
||||||
address = init_wallet_extension.new_account()
|
return Api(chain_str, queue=None, callback_param='foo')
|
||||||
a.append(address)
|
|
||||||
logg.info('added address {}'.format(a))
|
|
||||||
return a
|
|
||||||
|
@ -1,30 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
from eth_accounts_index import AccountRegistry
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def accounts_registry(
|
|
||||||
default_chain_spec,
|
|
||||||
cic_registry,
|
|
||||||
w3,
|
|
||||||
):
|
|
||||||
abi = AccountRegistry.abi()
|
|
||||||
constructor = w3.eth.contract(abi=abi, bytecode=AccountRegistry.bytecode())
|
|
||||||
tx_hash = constructor.constructor().transact()
|
|
||||||
r = w3.eth.getTransactionReceipt(tx_hash)
|
|
||||||
logg.debug('accounts registry deployed {}'.format(r.contractAddress))
|
|
||||||
account_registry = AccountRegistry(w3, r.contractAddress)
|
|
||||||
|
|
||||||
c = w3.eth.contract(abi=abi, address=r.contractAddress)
|
|
||||||
c.functions.addWriter(w3.eth.accounts[0]).transact()
|
|
||||||
|
|
||||||
CICRegistry.add_contract(default_chain_spec, c, 'AccountRegistry')
|
|
||||||
|
|
||||||
return account_registry
|
|
@ -1,234 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
import json
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
from cic_registry.bancor import contract_ids
|
|
||||||
from cic_registry import bancor
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.eth import rpc
|
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
root_dir = os.path.dirname(script_dir)
|
|
||||||
|
|
||||||
logg = logging.getLogger(__file__)
|
|
||||||
|
|
||||||
|
|
||||||
class BancorContractLoader:
|
|
||||||
|
|
||||||
bancor_path = os.path.join(root_dir, 'bancor')
|
|
||||||
registry_contract = None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def build_path():
|
|
||||||
return BancorContractLoader.bancor_path
|
|
||||||
# return os.path.join(BancorContractLoader.bancor_path, 'solidity', 'build', 'contracts')
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def contract(w3, bundle_id, registry_id=None):
|
|
||||||
if registry_id == None:
|
|
||||||
registry_id = bundle_id
|
|
||||||
contract_id_hex = w3.toHex(text=registry_id)
|
|
||||||
contract_address = BancorContractLoader.registry_contract.functions.addressOf(contract_id_hex).call()
|
|
||||||
contract_build_file = os.path.join(
|
|
||||||
BancorContractLoader.build_path(),
|
|
||||||
'{}.json'.format(bundle_id),
|
|
||||||
)
|
|
||||||
f = open(os.path.join(contract_build_file))
|
|
||||||
j = json.load(f)
|
|
||||||
f.close()
|
|
||||||
contract_abi = j['abi']
|
|
||||||
logg.debug('creating contract interface {} ({}) at address {}'.format(registry_id, bundle_id, contract_address))
|
|
||||||
contract = w3.eth.contract(abi=contract_abi, address=contract_address)
|
|
||||||
return contract
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: DRY
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def bancor_deploy(
|
|
||||||
load_config,
|
|
||||||
init_w3_conn,
|
|
||||||
):
|
|
||||||
bancor_dir_default = os.path.join(root_dir, 'bancor')
|
|
||||||
logg.debug('bancor deploy "{}"'.format(bancor_dir_default))
|
|
||||||
BancorContractLoader.bancor_path = load_config.get('BANCOR_DIR', bancor_dir_default)
|
|
||||||
bancor_build_dir = BancorContractLoader.build_path()
|
|
||||||
|
|
||||||
# deploy registry
|
|
||||||
registry_build_file = os.path.join(bancor_build_dir, 'ContractRegistry.json')
|
|
||||||
f = open(os.path.join(registry_build_file))
|
|
||||||
j = json.load(f)
|
|
||||||
f.close()
|
|
||||||
registry_constructor = init_w3_conn.eth.contract(abi=j['abi'], bytecode=j['bytecode'])
|
|
||||||
tx = registry_constructor.constructor().transact()
|
|
||||||
rcpt = init_w3_conn.eth.getTransactionReceipt(tx)
|
|
||||||
registry_address = rcpt['contractAddress']
|
|
||||||
registry_contract = init_w3_conn.eth.contract(abi=j['abi'], address=registry_address)
|
|
||||||
BancorContractLoader.registry_contract = registry_contract
|
|
||||||
|
|
||||||
# deply reserve token
|
|
||||||
reservetoken_build_file = os.path.join(bancor_build_dir, 'EtherToken.json')
|
|
||||||
f = open(os.path.join(reservetoken_build_file))
|
|
||||||
j = json.load(f)
|
|
||||||
f.close()
|
|
||||||
reservetoken_constructor = init_w3_conn.eth.contract(abi=j['abi'], bytecode=j['bytecode'])
|
|
||||||
tx = reservetoken_constructor.constructor('Reserve', 'RSV').transact()
|
|
||||||
rcpt = init_w3_conn.eth.getTransactionReceipt(tx)
|
|
||||||
reservetoken_address = rcpt['contractAddress']
|
|
||||||
reservetoken_contract = init_w3_conn.eth.contract(abi=j['abi'], address=reservetoken_address)
|
|
||||||
|
|
||||||
# register reserve token as bancor hub token
|
|
||||||
key_hex = init_w3_conn.toHex(text='BNTToken')
|
|
||||||
registry_contract.functions.registerAddress(key_hex, reservetoken_address).transact()
|
|
||||||
|
|
||||||
# deposit balances for minting liquid tokens with reserve
|
|
||||||
init_w3_conn.eth.sendTransaction({
|
|
||||||
'from': init_w3_conn.eth.accounts[1],
|
|
||||||
'to': reservetoken_address,
|
|
||||||
'value': init_w3_conn.toWei('101', 'ether'),
|
|
||||||
'nonce': 0,
|
|
||||||
})
|
|
||||||
init_w3_conn.eth.sendTransaction({
|
|
||||||
'from': init_w3_conn.eth.accounts[2],
|
|
||||||
'to': reservetoken_address,
|
|
||||||
'value': init_w3_conn.toWei('101', 'ether'),
|
|
||||||
'nonce': 0,
|
|
||||||
})
|
|
||||||
|
|
||||||
# deploy converter factory contract for creating liquid token exchanges
|
|
||||||
build_file = os.path.join(bancor_build_dir, 'LiquidTokenConverterFactory.json')
|
|
||||||
f = open(build_file)
|
|
||||||
j = json.load(f)
|
|
||||||
f.close()
|
|
||||||
converterfactory_constructor = init_w3_conn.eth.contract(abi=j['abi'], bytecode=j['bytecode'])
|
|
||||||
tx = converterfactory_constructor.constructor().transact()
|
|
||||||
rcpt = init_w3_conn.eth.getTransactionReceipt(tx)
|
|
||||||
converter_factory_address = rcpt['contractAddress']
|
|
||||||
|
|
||||||
# deploy the remaining contracts managed by the registry
|
|
||||||
for k in contract_ids.keys():
|
|
||||||
build_file = os.path.join(bancor_build_dir, '{}.json'.format(k))
|
|
||||||
f = open(build_file)
|
|
||||||
j = json.load(f)
|
|
||||||
f.close()
|
|
||||||
contract_constructor = init_w3_conn.eth.contract(abi=j['abi'], bytecode=j['bytecode'])
|
|
||||||
tx = None
|
|
||||||
|
|
||||||
# include the registry address as constructor parameters for the contracts that require it
|
|
||||||
if k in ['ConverterRegistry', 'ConverterRegistryData', 'BancorNetwork', 'ConversionPathFinder']:
|
|
||||||
tx = contract_constructor.constructor(registry_address).transact()
|
|
||||||
else:
|
|
||||||
tx = contract_constructor.constructor().transact()
|
|
||||||
rcpt = init_w3_conn.eth.getTransactionReceipt(tx)
|
|
||||||
contract_address = rcpt['contractAddress']
|
|
||||||
|
|
||||||
# register contract in registry
|
|
||||||
key_hex = init_w3_conn.toHex(text=contract_ids[k])
|
|
||||||
registry_contract.functions.registerAddress(key_hex, contract_address).transact()
|
|
||||||
contract = init_w3_conn.eth.contract(abi=j['abi'], address=contract_address)
|
|
||||||
|
|
||||||
# bancor formula needs to be initialized before use
|
|
||||||
if k == 'BancorFormula':
|
|
||||||
logg.debug('init bancor formula {}'.format(contract_address))
|
|
||||||
contract.functions.init().transact()
|
|
||||||
|
|
||||||
# converter factory needs liquid token converter factory to be able to issue our liquid tokens
|
|
||||||
if k == 'ConverterFactory':
|
|
||||||
logg.debug('register converter factory {}'.format(converter_factory_address))
|
|
||||||
contract.functions.registerTypedConverterFactory(converter_factory_address).transact()
|
|
||||||
|
|
||||||
logg.info('deployed registry at address {}'.format(registry_address))
|
|
||||||
return registry_contract
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def __create_converter(w3, converterregistry_contract, reserve_address, owner_address, token_name, token_symbol):
|
|
||||||
converterregistry_contract.functions.newConverter(
|
|
||||||
0,
|
|
||||||
token_name,
|
|
||||||
token_symbol,
|
|
||||||
18,
|
|
||||||
100000,
|
|
||||||
[reserve_address],
|
|
||||||
[250000],
|
|
||||||
).transact({
|
|
||||||
'from': owner_address,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def tokens_to_deploy(
|
|
||||||
):
|
|
||||||
return [
|
|
||||||
(1, 'Bert Token', 'BRT'), # account_index, token name, token symbol
|
|
||||||
(2, 'Ernie Token', 'RNI'),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def bancor_tokens(
|
|
||||||
init_w3_conn,
|
|
||||||
bancor_deploy,
|
|
||||||
tokens_to_deploy,
|
|
||||||
):
|
|
||||||
|
|
||||||
registry_contract = bancor_deploy
|
|
||||||
|
|
||||||
reserve_contract = BancorContractLoader.contract(init_w3_conn, 'ERC20Token', 'BNTToken')
|
|
||||||
reserve_address = reserve_contract.address
|
|
||||||
|
|
||||||
network_id = init_w3_conn.toHex(text='BancorNetwork')
|
|
||||||
network_address = registry_contract.functions.addressOf(network_id).call()
|
|
||||||
|
|
||||||
converterregistry_contract = BancorContractLoader.contract(init_w3_conn, 'ConverterRegistry', 'BancorConverterRegistry')
|
|
||||||
|
|
||||||
for p in tokens_to_deploy:
|
|
||||||
__create_converter(init_w3_conn, converterregistry_contract, reserve_address, init_w3_conn.eth.accounts[p[0]], p[1], p[2])
|
|
||||||
|
|
||||||
tokens = converterregistry_contract.functions.getAnchors().call()
|
|
||||||
|
|
||||||
network_contract = BancorContractLoader.contract(init_w3_conn, 'BancorNetwork')
|
|
||||||
|
|
||||||
mint_amount = init_w3_conn.toWei('100', 'ether')
|
|
||||||
i = 0
|
|
||||||
for token in tokens:
|
|
||||||
i += 1
|
|
||||||
owner = init_w3_conn.eth.accounts[i]
|
|
||||||
logg.debug('owner {} is {}'.format(owner, token))
|
|
||||||
reserve_contract.functions.approve(network_address, 0).transact({
|
|
||||||
'from': owner
|
|
||||||
})
|
|
||||||
reserve_contract.functions.approve(network_address, mint_amount).transact({
|
|
||||||
'from': owner
|
|
||||||
})
|
|
||||||
logg.debug('convert {} {} {} {}'.format(reserve_address, token, mint_amount, owner))
|
|
||||||
network_contract.functions.convert([
|
|
||||||
reserve_address,
|
|
||||||
token,
|
|
||||||
token,
|
|
||||||
],
|
|
||||||
mint_amount,
|
|
||||||
mint_amount,
|
|
||||||
).transact({
|
|
||||||
'from': owner,
|
|
||||||
})
|
|
||||||
|
|
||||||
return tokens
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def bancor_load(
|
|
||||||
load_config,
|
|
||||||
init_w3_conn,
|
|
||||||
bancor_deploy,
|
|
||||||
bancor_tokens,
|
|
||||||
):
|
|
||||||
registry_address = bancor_deploy.address
|
|
||||||
bancor_dir_default = os.path.join(root_dir, 'bancor')
|
|
||||||
bancor_dir = load_config.get('BANCOR_DIR', bancor_dir_default)
|
|
||||||
bancor.load(init_w3_conn, registry_address, bancor_dir)
|
|
@ -1,18 +1,29 @@
|
|||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import tempfile
|
import tempfile
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
# local impors
|
||||||
|
from cic_eth.task import BaseTask
|
||||||
|
|
||||||
|
#logg = logging.getLogger(__name__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def init_celery_tasks(
|
||||||
|
contract_roles,
|
||||||
|
):
|
||||||
|
BaseTask.call_address = contract_roles['DEFAULT']
|
||||||
|
|
||||||
|
|
||||||
# celery fixtures
|
# celery fixtures
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def celery_includes():
|
def celery_includes():
|
||||||
return [
|
return [
|
||||||
'cic_eth.eth.bancor',
|
# 'cic_eth.eth.bancor',
|
||||||
'cic_eth.eth.token',
|
'cic_eth.eth.erc20',
|
||||||
'cic_eth.eth.tx',
|
'cic_eth.eth.tx',
|
||||||
'cic_eth.ext.tx',
|
'cic_eth.ext.tx',
|
||||||
'cic_eth.queue.tx',
|
'cic_eth.queue.tx',
|
||||||
@ -52,7 +63,7 @@ def celery_config():
|
|||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def celery_worker_parameters():
|
def celery_worker_parameters():
|
||||||
return {
|
return {
|
||||||
# 'queues': ('cic-eth'),
|
# 'queues': ('celery'),
|
||||||
}
|
}
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
|
@ -1,74 +0,0 @@
|
|||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
from cic_registry.pytest import *
|
|
||||||
from erc20_single_shot_faucet import Faucet
|
|
||||||
from cic_registry import zero_address
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def faucet_amount():
|
|
||||||
return 50
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def faucet(
|
|
||||||
faucet_amount,
|
|
||||||
config,
|
|
||||||
default_chain_spec,
|
|
||||||
cic_registry,
|
|
||||||
bancor_tokens,
|
|
||||||
w3_account_roles,
|
|
||||||
w3_account_token_owners,
|
|
||||||
solidity_abis,
|
|
||||||
w3,
|
|
||||||
#accounts_registry,
|
|
||||||
):
|
|
||||||
|
|
||||||
|
|
||||||
abi = Faucet.abi('storage')
|
|
||||||
bytecode = Faucet.bytecode('storage')
|
|
||||||
|
|
||||||
cs = w3.eth.contract(abi=abi, bytecode=bytecode)
|
|
||||||
tx_hash = cs.constructor().transact({'from': w3_account_roles['eth_account_faucet_owner']})
|
|
||||||
rcpt = w3.eth.getTransactionReceipt(tx_hash)
|
|
||||||
cs_address = rcpt.contractAddress
|
|
||||||
|
|
||||||
abi = Faucet.abi()
|
|
||||||
bytecode = Faucet.bytecode()
|
|
||||||
cf = w3.eth.contract(abi=abi, bytecode=bytecode)
|
|
||||||
|
|
||||||
tx_hash = cf.constructor(
|
|
||||||
[
|
|
||||||
w3_account_roles['eth_account_faucet_owner']
|
|
||||||
],
|
|
||||||
bancor_tokens[0],
|
|
||||||
cs_address,
|
|
||||||
zero_address,
|
|
||||||
#accounts_registry,
|
|
||||||
).transact({
|
|
||||||
'from': w3_account_roles['eth_account_faucet_owner']
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
rcpt = w3.eth.getTransactionReceipt(tx_hash)
|
|
||||||
cf_address = rcpt.contractAddress
|
|
||||||
|
|
||||||
c = w3.eth.contract(abi=abi, address=cf_address)
|
|
||||||
c.functions.setAmount(50).transact({
|
|
||||||
'from': w3_account_roles['eth_account_faucet_owner']
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
logg.debug('foo {} bar {}'.format(cf_address, w3_account_roles))
|
|
||||||
|
|
||||||
# fund the faucet with token balance
|
|
||||||
token = w3.eth.contract(abi=solidity_abis['ERC20'], address=bancor_tokens[0])
|
|
||||||
token_symbol = token.functions.symbol().call()
|
|
||||||
tx_hash = token.functions.transfer(cf_address, 100000).transact({
|
|
||||||
'from': w3_account_token_owners[token_symbol],
|
|
||||||
})
|
|
||||||
|
|
||||||
CICRegistry.add_contract(default_chain_spec, c, 'Faucet')
|
|
||||||
|
|
||||||
return cf_address
|
|
||||||
|
|
@ -1,52 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from crypto_dev_signer.keystore import ReferenceKeystore
|
|
||||||
#from crypto_dev_signer.eth.web3ext import Web3 as Web3ext
|
|
||||||
|
|
||||||
logg = logging.getLogger(__file__)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: need mock for deterministic signatures
|
|
||||||
# depends on mock blockchain (ganache) where private key is passed directly to this module
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def init_mock_keystore(
|
|
||||||
):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def init_keystore(
|
|
||||||
load_config,
|
|
||||||
database_engine,
|
|
||||||
):
|
|
||||||
#symkey_hex = os.environ.get('CIC_SIGNER_SECRET')
|
|
||||||
symkey_hex = load_config.get('SIGNER_SECRET')
|
|
||||||
symkey = bytes.fromhex(symkey_hex)
|
|
||||||
opt = {
|
|
||||||
'symmetric_key': symkey,
|
|
||||||
}
|
|
||||||
k = ReferenceKeystore(database_engine, **opt)
|
|
||||||
k.db_session.execute('DELETE from ethereum')
|
|
||||||
k.db_session.commit()
|
|
||||||
keys_file = load_config.get('SIGNER_DEV_KEYS_PATH')
|
|
||||||
addresses = []
|
|
||||||
if keys_file:
|
|
||||||
logg.debug('loading keys from {}'.format(keys_file))
|
|
||||||
f = open(keys_file, 'r')
|
|
||||||
j = json.load(f)
|
|
||||||
f.close()
|
|
||||||
signer_password = load_config.get('SIGNER_PASSWORD')
|
|
||||||
for pk in j['private']:
|
|
||||||
address_hex = k.import_raw_key(bytes.fromhex(pk[2:]), signer_password)
|
|
||||||
addresses.append(address_hex)
|
|
||||||
|
|
||||||
RpcClient.set_provider_address(addresses[0])
|
|
||||||
return addresses
|
|
||||||
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
from eth_address_declarator import AddressDeclarator
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
from cic_registry import to_identifier
|
|
||||||
from cic_registry.contract import Contract
|
|
||||||
from cic_registry.error import ChainExistsError
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
script_dir = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def local_cic_registry(
|
|
||||||
cic_registry,
|
|
||||||
):
|
|
||||||
path = os.path.realpath(os.path.join(script_dir, 'testdata', 'abi'))
|
|
||||||
CICRegistry.add_path(path)
|
|
||||||
return cic_registry
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
|
||||||
def address_declarator(
|
|
||||||
bloxberg_config,
|
|
||||||
default_chain_spec,
|
|
||||||
default_chain_registry,
|
|
||||||
local_cic_registry,
|
|
||||||
init_rpc,
|
|
||||||
init_w3,
|
|
||||||
):
|
|
||||||
|
|
||||||
c = init_rpc.w3.eth.contract(abi=AddressDeclarator.abi(), bytecode=AddressDeclarator.bytecode())
|
|
||||||
default_description = '0x{:<064s}'.format(b'test'.hex())
|
|
||||||
logg.debug('default_ {}'.format(default_description))
|
|
||||||
tx_hash = c.constructor(default_description).transact()
|
|
||||||
rcpt = init_rpc.w3.eth.getTransactionReceipt(tx_hash)
|
|
||||||
|
|
||||||
registry = init_rpc.w3.eth.contract(abi=CICRegistry.abi(), address=local_cic_registry)
|
|
||||||
chain_identifier = to_identifier(default_chain_registry.chain())
|
|
||||||
registry.functions.set(to_identifier('AddressDeclarator'), rcpt.contractAddress, chain_identifier, bloxberg_config['digest']).transact()
|
|
||||||
|
|
||||||
return rcpt.contractAddress
|
|
62
apps/cic-eth/tests/fixtures_role.py
Normal file
62
apps/cic-eth/tests/fixtures_role.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from hexathon import add_0x
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth.db.models.nonce import Nonce
|
||||||
|
|
||||||
|
#logg = logging.getLogger(__name__)
|
||||||
|
# what the actual fuck, debug is not being shown even though explicitly set
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def init_custodial(
|
||||||
|
contract_roles,
|
||||||
|
token_roles,
|
||||||
|
agent_roles,
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
for roles in [contract_roles, token_roles, agent_roles]:
|
||||||
|
for role in roles.values():
|
||||||
|
Nonce.init(role, session=init_database)
|
||||||
|
|
||||||
|
init_database.commit()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def custodial_roles(
|
||||||
|
init_custodial,
|
||||||
|
contract_roles,
|
||||||
|
token_roles,
|
||||||
|
agent_roles,
|
||||||
|
eth_accounts,
|
||||||
|
eth_keystore,
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
r = {}
|
||||||
|
r.update(contract_roles)
|
||||||
|
r.update(agent_roles)
|
||||||
|
r.update({
|
||||||
|
'GAS_GIFTER': eth_accounts[10],
|
||||||
|
'FOO_TOKEN_GIFTER': token_roles['FOO_TOKEN_OWNER'],
|
||||||
|
})
|
||||||
|
for k in r.keys():
|
||||||
|
role = AccountRole.set(k, r[k])
|
||||||
|
init_database.add(role)
|
||||||
|
logg.info('adding role {} -> {}'.format(k, r[k]))
|
||||||
|
init_database.commit()
|
||||||
|
return r
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def whoever(
|
||||||
|
init_eth_tester,
|
||||||
|
):
|
||||||
|
return init_eth_tester.new_account()
|
@ -1,27 +0,0 @@
|
|||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def token_registry(
|
|
||||||
default_chain_spec,
|
|
||||||
cic_registry,
|
|
||||||
solidity_abis,
|
|
||||||
evm_bytecodes,
|
|
||||||
w3,
|
|
||||||
):
|
|
||||||
|
|
||||||
abi = solidity_abis['TokenRegistry']
|
|
||||||
bytecode = evm_bytecodes['TokenRegistry']
|
|
||||||
|
|
||||||
c = w3.eth.contract(abi=abi, bytecode=bytecode)
|
|
||||||
tx_hash = c.constructor().transact({'from': w3.eth.accounts[0]})
|
|
||||||
rcpt = w3.eth.getTransactionReceipt(tx_hash)
|
|
||||||
address = rcpt.contractAddress
|
|
||||||
|
|
||||||
c = w3.eth.contract(abi=abi, address=address)
|
|
||||||
|
|
||||||
CICRegistry.add_contract(default_chain_spec, c, 'TokenRegistry')
|
|
||||||
|
|
||||||
return address
|
|
@ -1,30 +0,0 @@
|
|||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
from cic_registry.pytest import *
|
|
||||||
from erc20_approval_escrow import TransferApproval
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def transfer_approval(
|
|
||||||
config,
|
|
||||||
default_chain_spec,
|
|
||||||
default_chain_registry,
|
|
||||||
bancor_tokens,
|
|
||||||
w3_account_roles,
|
|
||||||
cic_registry,
|
|
||||||
w3,
|
|
||||||
):
|
|
||||||
|
|
||||||
abi = TransferApproval.abi()
|
|
||||||
bytecode = TransferApproval.bytecode()
|
|
||||||
|
|
||||||
c = w3.eth.contract(abi=abi, bytecode=bytecode)
|
|
||||||
approvers = [w3_account_roles['eth_account_approval_owner']]
|
|
||||||
tx_hash = c.constructor(approvers).transact({'from': w3_account_roles['eth_account_approval_owner']})
|
|
||||||
rcpt = w3.eth.getTransactionReceipt(tx_hash)
|
|
||||||
|
|
||||||
c = w3.eth.contract(abi=abi, address=rcpt.contractAddress)
|
|
||||||
|
|
||||||
CICRegistry.add_contract(default_chain_spec, c, 'TransferApproval')
|
|
||||||
|
|
||||||
return rcpt.contractAddress
|
|
@ -1,212 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import hexbytes
|
|
||||||
import pytest
|
|
||||||
import web3
|
|
||||||
import eth_tester
|
|
||||||
from crypto_dev_signer.eth.transaction import EIP155Transaction
|
|
||||||
from crypto_dev_signer.eth.signer.defaultsigner import ReferenceSigner as EIP155Signer
|
|
||||||
from eth_keys import KeyAPI
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.eth import RpcClient
|
|
||||||
from cic_eth.eth.rpc import GasOracle
|
|
||||||
from cic_eth.db.models.role import AccountRole
|
|
||||||
from cic_eth.db.models.nonce import Nonce
|
|
||||||
|
|
||||||
#logg = logging.getLogger(__name__)
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def init_w3_nokey(
|
|
||||||
):
|
|
||||||
provider = 'http://localhost:8545'
|
|
||||||
return web3.Web3(provider)
|
|
||||||
|
|
||||||
|
|
||||||
class ProviderWalletExtension:
|
|
||||||
|
|
||||||
def __init__(self, provider, gas_price=1000000):
|
|
||||||
self.provider = provider
|
|
||||||
self.signer = EIP155Signer(provider)
|
|
||||||
self.default_gas_price = gas_price
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, address, password=None):
|
|
||||||
return self.provider.get(address, password)
|
|
||||||
|
|
||||||
|
|
||||||
def new_account(self, password=None):
|
|
||||||
keys = KeyAPI()
|
|
||||||
pk = os.urandom(32)
|
|
||||||
account = self.provider.add_account(pk.hex())
|
|
||||||
self.provider.accounts[account] = keys.PrivateKey(pk)
|
|
||||||
return account
|
|
||||||
|
|
||||||
|
|
||||||
def sign_transaction(self, tx):
|
|
||||||
tx['chainId'] = int(tx['chainId'])
|
|
||||||
logg.debug('signing {}'.format(tx))
|
|
||||||
signer_tx = EIP155Transaction(tx, tx['nonce'], tx['chainId'])
|
|
||||||
tx_signed = self.signer.signTransaction(signer_tx)
|
|
||||||
tx_signed_dict = signer_tx.serialize()
|
|
||||||
tx_signed_dict['raw'] = '0x' + signer_tx.rlp_serialize().hex()
|
|
||||||
return tx_signed_dict
|
|
||||||
|
|
||||||
|
|
||||||
def sign(self, address, text=None, bytes=None):
|
|
||||||
logg.debug('sign message {} {}'.format(address[2:], text))
|
|
||||||
return self.signer.signEthereumMessage(address[2:], text)
|
|
||||||
|
|
||||||
|
|
||||||
def send_raw_transaction(self, rlp_tx_hex):
|
|
||||||
raw_tx = self.provider.backend.send_raw_transaction(bytes.fromhex(rlp_tx_hex[2:]))
|
|
||||||
return raw_tx
|
|
||||||
|
|
||||||
|
|
||||||
def gas_price(self):
|
|
||||||
return self.default_gas_price
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def init_wallet_extension(
|
|
||||||
init_eth_tester,
|
|
||||||
eth_provider,
|
|
||||||
):
|
|
||||||
|
|
||||||
x = ProviderWalletExtension(init_eth_tester)
|
|
||||||
|
|
||||||
def _rpcclient_web3_constructor():
|
|
||||||
w3 = web3.Web3(eth_provider)
|
|
||||||
setattr(w3.eth, 'personal', x)
|
|
||||||
setattr(w3.eth, 'sign_transaction', x.sign_transaction)
|
|
||||||
setattr(w3.eth, 'send_raw_transaction', x.send_raw_transaction)
|
|
||||||
setattr(w3.eth, 'sign', x.sign)
|
|
||||||
setattr(w3.eth, 'gas_price', x.gas_price)
|
|
||||||
return (init_eth_tester, w3)
|
|
||||||
|
|
||||||
RpcClient.set_constructor(_rpcclient_web3_constructor)
|
|
||||||
init_eth_tester.signer = EIP155Signer(x)
|
|
||||||
return x
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def init_w3_conn(
|
|
||||||
default_chain_spec,
|
|
||||||
init_eth_tester,
|
|
||||||
init_wallet_extension,
|
|
||||||
):
|
|
||||||
|
|
||||||
c = RpcClient(default_chain_spec)
|
|
||||||
x = ProviderWalletExtension(init_eth_tester)
|
|
||||||
|
|
||||||
# a hack to make available missing rpc calls we need
|
|
||||||
setattr(c.w3.eth, 'personal', x)
|
|
||||||
setattr(c.w3.eth, 'sign_transaction', x.sign_transaction)
|
|
||||||
setattr(c.w3.eth, 'send_raw_transaction', x.send_raw_transaction)
|
|
||||||
setattr(c.w3.eth, 'sign', x.sign)
|
|
||||||
return c.w3
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
|
||||||
def init_w3(
|
|
||||||
init_database,
|
|
||||||
init_eth_tester,
|
|
||||||
init_eth_account_roles,
|
|
||||||
init_w3_conn,
|
|
||||||
):
|
|
||||||
|
|
||||||
for address in init_w3_conn.eth.accounts:
|
|
||||||
nonce = init_w3_conn.eth.getTransactionCount(address, 'pending')
|
|
||||||
Nonce.init(address, nonce=nonce, session=init_database)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
yield init_w3_conn
|
|
||||||
logg.debug('mining om nom nom... {}'.format(init_eth_tester.mine_block()))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
|
||||||
def init_eth_account_roles(
|
|
||||||
init_database,
|
|
||||||
w3_account_roles,
|
|
||||||
):
|
|
||||||
|
|
||||||
address = w3_account_roles.get('eth_account_gas_provider')
|
|
||||||
role = AccountRole.set('GAS_GIFTER', address)
|
|
||||||
init_database.add(role)
|
|
||||||
|
|
||||||
return w3_account_roles
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
|
||||||
def init_rpc(
|
|
||||||
default_chain_spec,
|
|
||||||
init_eth_account_roles,
|
|
||||||
init_eth_tester,
|
|
||||||
init_wallet_extension,
|
|
||||||
):
|
|
||||||
|
|
||||||
c = RpcClient(default_chain_spec)
|
|
||||||
x = ProviderWalletExtension(init_eth_tester)
|
|
||||||
|
|
||||||
# a hack to make available missing rpc calls we need
|
|
||||||
setattr(c.w3.eth, 'personal', x)
|
|
||||||
setattr(c.w3.eth, 'sign_transaction', x.sign_transaction)
|
|
||||||
setattr(c.w3.eth, 'send_raw_transaction', x.send_raw_transaction)
|
|
||||||
setattr(c.w3.eth, 'sign', x.sign)
|
|
||||||
yield c
|
|
||||||
logg.debug('mining om nom nom... {}'.format(init_eth_tester.mine_block()))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def w3_account_roles(
|
|
||||||
config,
|
|
||||||
w3,
|
|
||||||
):
|
|
||||||
|
|
||||||
role_ids = [
|
|
||||||
'eth_account_bancor_deployer',
|
|
||||||
'eth_account_reserve_owner',
|
|
||||||
'eth_account_reserve_minter',
|
|
||||||
'eth_account_accounts_index_owner',
|
|
||||||
'eth_account_accounts_index_writer',
|
|
||||||
'eth_account_sarafu_owner',
|
|
||||||
'eth_account_sarafu_gifter',
|
|
||||||
'eth_account_approval_owner',
|
|
||||||
'eth_account_faucet_owner',
|
|
||||||
'eth_account_gas_provider',
|
|
||||||
]
|
|
||||||
roles = {}
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
for r in role_ids:
|
|
||||||
a = w3.eth.accounts[i]
|
|
||||||
try:
|
|
||||||
a = config.get(r.upper())
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
roles[r] = a
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
return roles
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
|
||||||
def w3_account_token_owners(
|
|
||||||
tokens_to_deploy,
|
|
||||||
w3,
|
|
||||||
):
|
|
||||||
|
|
||||||
token_owners = {}
|
|
||||||
|
|
||||||
i = 1
|
|
||||||
for t in tokens_to_deploy:
|
|
||||||
token_owners[t[2]] = w3.eth.accounts[i]
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
return token_owners
|
|
@ -1,246 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
import pytest
|
|
||||||
import web3
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.api import AdminApi
|
|
||||||
from cic_eth.db.models.role import AccountRole
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
from cic_eth.db.enum import (
|
|
||||||
StatusEnum,
|
|
||||||
StatusBits,
|
|
||||||
status_str,
|
|
||||||
)
|
|
||||||
from cic_eth.error import InitializationError
|
|
||||||
from cic_eth.eth.task import sign_and_register_tx
|
|
||||||
from cic_eth.eth.tx import cache_gas_refill_data
|
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.eth.task import sign_tx
|
|
||||||
from cic_eth.eth.tx import otx_cache_parse_tx
|
|
||||||
from cic_eth.queue.tx import create as queue_create
|
|
||||||
from cic_eth.queue.tx import get_tx
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def test_resend_inplace(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
init_w3,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
c = RpcClient(default_chain_spec)
|
|
||||||
|
|
||||||
sigs = []
|
|
||||||
|
|
||||||
gas_provider = c.gas_provider()
|
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
gas_provider,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_refill = celery.signature(
|
|
||||||
'cic_eth.eth.tx.refill_gas',
|
|
||||||
[
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_nonce.link(s_refill)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
t.get()
|
|
||||||
for r in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
q = init_database.query(Otx)
|
|
||||||
q = q.join(TxCache)
|
|
||||||
q = q.filter(TxCache.recipient==init_w3.eth.accounts[0])
|
|
||||||
o = q.first()
|
|
||||||
tx_raw = o.signed_tx
|
|
||||||
|
|
||||||
tx_dict = unpack_signed_raw_tx(bytes.fromhex(tx_raw[2:]), default_chain_spec.chain_id())
|
|
||||||
gas_price_before = tx_dict['gasPrice']
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_send',
|
|
||||||
[
|
|
||||||
chain_str,
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
api = AdminApi(c, queue=None)
|
|
||||||
t = api.resend(tx_dict['hash'], chain_str, unlock=True)
|
|
||||||
t.get()
|
|
||||||
i = 0
|
|
||||||
tx_hash_new_hex = None
|
|
||||||
for r in t.collect():
|
|
||||||
tx_hash_new_hex = r[1]
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
tx_raw_new = get_tx(tx_hash_new_hex)
|
|
||||||
logg.debug('get {}'.format(tx_raw_new))
|
|
||||||
tx_dict_new = unpack_signed_raw_tx(bytes.fromhex(tx_raw_new['signed_tx'][2:]), default_chain_spec.chain_id())
|
|
||||||
assert tx_hash_new_hex != tx_dict['hash']
|
|
||||||
assert tx_dict_new['gasPrice'] > gas_price_before
|
|
||||||
|
|
||||||
tx_dict_after = get_tx(tx_dict['hash'])
|
|
||||||
|
|
||||||
logg.debug('logggg {}'.format(status_str(tx_dict_after['status'])))
|
|
||||||
assert tx_dict_after['status'] & StatusBits.MANUAL
|
|
||||||
|
|
||||||
|
|
||||||
#def test_check_fix_nonce(
|
|
||||||
# default_chain_spec,
|
|
||||||
# init_database,
|
|
||||||
# init_eth_account_roles,
|
|
||||||
# init_w3,
|
|
||||||
# eth_empty_accounts,
|
|
||||||
# celery_session_worker,
|
|
||||||
# ):
|
|
||||||
#
|
|
||||||
# chain_str = str(default_chain_spec)
|
|
||||||
#
|
|
||||||
# sigs = []
|
|
||||||
# for i in range(5):
|
|
||||||
# s = celery.signature(
|
|
||||||
# 'cic_eth.eth.tx.refill_gas',
|
|
||||||
# [
|
|
||||||
# eth_empty_accounts[i],
|
|
||||||
# chain_str,
|
|
||||||
# ],
|
|
||||||
# queue=None,
|
|
||||||
# )
|
|
||||||
# sigs.append(s)
|
|
||||||
#
|
|
||||||
# t = celery.group(sigs)()
|
|
||||||
# txs = t.get()
|
|
||||||
# assert t.successful()
|
|
||||||
#
|
|
||||||
# tx_hash = web3.Web3.keccak(hexstr=txs[2])
|
|
||||||
# c = RpcClient(default_chain_spec)
|
|
||||||
# api = AdminApi(c, queue=None)
|
|
||||||
# address = init_eth_account_roles['eth_account_gas_provider']
|
|
||||||
# nonce_spec = api.check_nonce(address)
|
|
||||||
# assert nonce_spec['nonce']['network'] == 0
|
|
||||||
# assert nonce_spec['nonce']['queue'] == 4
|
|
||||||
# assert nonce_spec['nonce']['blocking'] == None
|
|
||||||
#
|
|
||||||
# s_set = celery.signature(
|
|
||||||
# 'cic_eth.queue.tx.set_rejected',
|
|
||||||
# [
|
|
||||||
# tx_hash.hex(),
|
|
||||||
# ],
|
|
||||||
# queue=None,
|
|
||||||
# )
|
|
||||||
# t = s_set.apply_async()
|
|
||||||
# t.get()
|
|
||||||
# t.collect()
|
|
||||||
# assert t.successful()
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# nonce_spec = api.check_nonce(address)
|
|
||||||
# assert nonce_spec['nonce']['blocking'] == 2
|
|
||||||
# assert nonce_spec['tx']['blocking'] == tx_hash.hex()
|
|
||||||
#
|
|
||||||
# t = api.fix_nonce(address, nonce_spec['nonce']['blocking'])
|
|
||||||
# t.get()
|
|
||||||
# t.collect()
|
|
||||||
# assert t.successful()
|
|
||||||
#
|
|
||||||
# for tx in txs[3:]:
|
|
||||||
# tx_hash = web3.Web3.keccak(hexstr=tx)
|
|
||||||
# tx_dict = get_tx(tx_hash.hex())
|
|
||||||
# assert tx_dict['status'] == StatusEnum.OVERRIDDEN
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#def test_tag_account(
|
|
||||||
# init_database,
|
|
||||||
# eth_empty_accounts,
|
|
||||||
# init_rpc,
|
|
||||||
# ):
|
|
||||||
#
|
|
||||||
# api = AdminApi(init_rpc)
|
|
||||||
#
|
|
||||||
# api.tag_account('foo', eth_empty_accounts[0])
|
|
||||||
# api.tag_account('bar', eth_empty_accounts[1])
|
|
||||||
# api.tag_account('bar', eth_empty_accounts[2])
|
|
||||||
#
|
|
||||||
# assert AccountRole.get_address('foo') == eth_empty_accounts[0]
|
|
||||||
# assert AccountRole.get_address('bar') == eth_empty_accounts[2]
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#def test_ready(
|
|
||||||
# init_database,
|
|
||||||
# eth_empty_accounts,
|
|
||||||
# init_rpc,
|
|
||||||
# w3,
|
|
||||||
# ):
|
|
||||||
#
|
|
||||||
# api = AdminApi(init_rpc)
|
|
||||||
#
|
|
||||||
# with pytest.raises(InitializationError):
|
|
||||||
# api.ready()
|
|
||||||
#
|
|
||||||
# bogus_account = os.urandom(20)
|
|
||||||
# bogus_account_hex = '0x' + bogus_account.hex()
|
|
||||||
#
|
|
||||||
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', web3.Web3.toChecksumAddress(bogus_account_hex))
|
|
||||||
# with pytest.raises(KeyError):
|
|
||||||
# api.ready()
|
|
||||||
#
|
|
||||||
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', eth_empty_accounts[0])
|
|
||||||
# api.ready()
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#def test_tx(
|
|
||||||
# default_chain_spec,
|
|
||||||
# cic_registry,
|
|
||||||
# init_database,
|
|
||||||
# init_rpc,
|
|
||||||
# init_w3,
|
|
||||||
# celery_session_worker,
|
|
||||||
# ):
|
|
||||||
#
|
|
||||||
# tx = {
|
|
||||||
# 'from': init_w3.eth.accounts[0],
|
|
||||||
# 'to': init_w3.eth.accounts[1],
|
|
||||||
# 'nonce': 42,
|
|
||||||
# 'gas': 21000,
|
|
||||||
# 'gasPrice': 1000000,
|
|
||||||
# 'value': 128,
|
|
||||||
# 'chainId': default_chain_spec.chain_id(),
|
|
||||||
# 'data': '',
|
|
||||||
# }
|
|
||||||
#
|
|
||||||
# (tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
|
|
||||||
# queue_create(
|
|
||||||
# tx['nonce'],
|
|
||||||
# tx['from'],
|
|
||||||
# tx_hash_hex,
|
|
||||||
# tx_signed_raw_hex,
|
|
||||||
# str(default_chain_spec),
|
|
||||||
# )
|
|
||||||
# tx_recovered = unpack_signed_raw_tx(bytes.fromhex(tx_signed_raw_hex[2:]), default_chain_spec.chain_id())
|
|
||||||
# cache_gas_refill_data(tx_hash_hex, tx_recovered)
|
|
||||||
#
|
|
||||||
# api = AdminApi(init_rpc, queue=None)
|
|
||||||
# tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex)
|
|
@ -1,40 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
import web3
|
|
||||||
from cic_eth.api.api_task import Api
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def test_balance_complex_api(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
init_w3,
|
|
||||||
cic_registry,
|
|
||||||
dummy_token,
|
|
||||||
dummy_token_registered,
|
|
||||||
celery_session_worker,
|
|
||||||
init_eth_tester,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
|
|
||||||
api = Api(chain_str, queue=None, callback_param='foo')
|
|
||||||
|
|
||||||
a = web3.Web3.toChecksumAddress('0x' + os.urandom(20).hex())
|
|
||||||
t = api.balance(a, 'DUM')
|
|
||||||
t.get()
|
|
||||||
r = None
|
|
||||||
for c in t.collect():
|
|
||||||
r = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
logg.debug(r)
|
|
||||||
|
|
||||||
assert r[0].get('balance_incoming') != None
|
|
||||||
assert r[0].get('balance_outgoing') != None
|
|
||||||
assert r[0].get('balance_network') != None
|
|
||||||
|
|
||||||
logg.debug('r {}'.format(r))
|
|
@ -1,115 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.api.api_task import Api
|
|
||||||
from cic_eth.eth.token import TokenTxFactory
|
|
||||||
from cic_eth.eth.task import sign_tx
|
|
||||||
from tests.mock.filter import (
|
|
||||||
block_filter,
|
|
||||||
tx_filter,
|
|
||||||
)
|
|
||||||
from cic_eth.db.models.nonce import (
|
|
||||||
Nonce,
|
|
||||||
NonceReservation,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def test_list_tx(
|
|
||||||
default_chain_spec,
|
|
||||||
default_chain_registry,
|
|
||||||
init_database,
|
|
||||||
init_rpc,
|
|
||||||
init_w3,
|
|
||||||
init_eth_tester,
|
|
||||||
dummy_token_gifted,
|
|
||||||
cic_registry,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
tx_hashes = []
|
|
||||||
# external tx
|
|
||||||
nonce = init_w3.eth.getTransactionCount(init_w3.eth.accounts[0])
|
|
||||||
q = init_database.query(Nonce)
|
|
||||||
q = q.filter(Nonce.address_hex==init_w3.eth.accounts[0])
|
|
||||||
o = q.first()
|
|
||||||
o.nonce = nonce
|
|
||||||
init_database.add(o)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
NonceReservation.next(init_w3.eth.accounts[0], 'foo', session=init_database)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
init_eth_tester.mine_blocks(13)
|
|
||||||
txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
|
|
||||||
tx = txf.transfer(dummy_token_gifted, init_w3.eth.accounts[1], 3000, default_chain_spec, 'foo')
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
|
|
||||||
tx_hashes.append(tx_hash_hex)
|
|
||||||
init_w3.eth.sendRawTransaction(tx_signed_raw_hex)
|
|
||||||
# add to filter
|
|
||||||
rcpt = init_w3.eth.getTransactionReceipt(tx_hash_hex)
|
|
||||||
a = rcpt['blockNumber']
|
|
||||||
block_filter.add(a.to_bytes(4, 'big'))
|
|
||||||
a = rcpt['blockNumber'] + rcpt['transactionIndex']
|
|
||||||
tx_filter.add(a.to_bytes(4, 'big'))
|
|
||||||
|
|
||||||
# external tx
|
|
||||||
NonceReservation.next(init_w3.eth.accounts[0], 'bar', session=init_database)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
init_eth_tester.mine_blocks(28)
|
|
||||||
txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
|
|
||||||
tx = txf.transfer(dummy_token_gifted, init_w3.eth.accounts[1], 4000, default_chain_spec, 'bar')
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
|
|
||||||
tx_hashes.append(tx_hash_hex)
|
|
||||||
init_w3.eth.sendRawTransaction(tx_signed_raw_hex)
|
|
||||||
# add to filter
|
|
||||||
rcpt = init_w3.eth.getTransactionReceipt(tx_hash_hex)
|
|
||||||
a = rcpt['blockNumber']
|
|
||||||
block_filter.add(a.to_bytes(4, 'big'))
|
|
||||||
a = rcpt['blockNumber'] + rcpt['transactionIndex']
|
|
||||||
tx_filter.add(a.to_bytes(4, 'big'))
|
|
||||||
|
|
||||||
# custodial tx
|
|
||||||
#NonceReservation.next(init_w3.eth.accounts[0], 'blinky', session=init_database)
|
|
||||||
#init_database.commit()
|
|
||||||
|
|
||||||
init_eth_tester.mine_blocks(3)
|
|
||||||
#txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
|
|
||||||
api = Api(str(default_chain_spec), queue=None)
|
|
||||||
t = api.transfer(init_w3.eth.accounts[0], init_w3.eth.accounts[1], 1000, 'DUM') #, 'blinky')
|
|
||||||
t.get()
|
|
||||||
tx_hash_hex = None
|
|
||||||
for c in t.collect():
|
|
||||||
tx_hash_hex = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
tx_hashes.append(tx_hash_hex)
|
|
||||||
|
|
||||||
# custodial tx
|
|
||||||
#NonceReservation.next(init_w3.eth.accounts[0], 'clyde', session=init_database)
|
|
||||||
init_database.commit()
|
|
||||||
init_eth_tester.mine_blocks(6)
|
|
||||||
api = Api(str(default_chain_spec), queue=None)
|
|
||||||
t = api.transfer(init_w3.eth.accounts[0], init_w3.eth.accounts[1], 2000, 'DUM') #, 'clyde')
|
|
||||||
t.get()
|
|
||||||
tx_hash_hex = None
|
|
||||||
for c in t.collect():
|
|
||||||
tx_hash_hex = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
tx_hashes.append(tx_hash_hex)
|
|
||||||
|
|
||||||
# test the api
|
|
||||||
t = api.list(init_w3.eth.accounts[1], external_task='tests.mock.filter.filter')
|
|
||||||
r = t.get()
|
|
||||||
for c in t.collect():
|
|
||||||
r = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
assert len(r) == 4
|
|
||||||
for tx in r:
|
|
||||||
logg.debug('have tx {}'.format(r))
|
|
||||||
tx_hashes.remove(tx['hash'])
|
|
||||||
assert len(tx_hashes) == 0
|
|
0
apps/cic-eth/tests/task/__init__.py
Normal file
0
apps/cic-eth/tests/task/__init__.py
Normal file
294
apps/cic-eth/tests/task/api/test_admin.py
Normal file
294
apps/cic-eth/tests/task/api/test_admin.py
Normal file
@ -0,0 +1,294 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
import pytest
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
unpack,
|
||||||
|
TxFormat,
|
||||||
|
)
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.gas import Gas
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.api import AdminApi
|
||||||
|
from cic_eth.db.models.role import AccountRole
|
||||||
|
from cic_eth.db.models.otx import Otx
|
||||||
|
from cic_eth.db.models.tx import TxCache
|
||||||
|
from cic_eth.db.enum import (
|
||||||
|
StatusEnum,
|
||||||
|
StatusBits,
|
||||||
|
status_str,
|
||||||
|
LockEnum,
|
||||||
|
)
|
||||||
|
from cic_eth.error import InitializationError
|
||||||
|
from cic_eth.eth.tx import (
|
||||||
|
cache_gas_data,
|
||||||
|
)
|
||||||
|
#from cic_eth.eth.gas import cache_gas_tx
|
||||||
|
from cic_eth.queue.tx import (
|
||||||
|
create as queue_create,
|
||||||
|
get_tx,
|
||||||
|
)
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
#def test_resend_inplace(
|
||||||
|
# default_chain_spec,
|
||||||
|
# init_database,
|
||||||
|
# init_w3,
|
||||||
|
# celery_session_worker,
|
||||||
|
# ):
|
||||||
|
#
|
||||||
|
# chain_str = str(default_chain_spec)
|
||||||
|
# c = RpcClient(default_chain_spec)
|
||||||
|
#
|
||||||
|
# sigs = []
|
||||||
|
#
|
||||||
|
# gas_provider = c.gas_provider()
|
||||||
|
#
|
||||||
|
# s_nonce = celery.signature(
|
||||||
|
# 'cic_eth.eth.tx.reserve_nonce',
|
||||||
|
# [
|
||||||
|
# init_w3.eth.accounts[0],
|
||||||
|
# gas_provider,
|
||||||
|
# ],
|
||||||
|
# queue=None,
|
||||||
|
# )
|
||||||
|
# s_refill = celery.signature(
|
||||||
|
# 'cic_eth.eth.tx.refill_gas',
|
||||||
|
# [
|
||||||
|
# chain_str,
|
||||||
|
# ],
|
||||||
|
# queue=None,
|
||||||
|
# )
|
||||||
|
# s_nonce.link(s_refill)
|
||||||
|
# t = s_nonce.apply_async()
|
||||||
|
# t.get()
|
||||||
|
# for r in t.collect():
|
||||||
|
# pass
|
||||||
|
# assert t.successful()
|
||||||
|
#
|
||||||
|
# q = init_database.query(Otx)
|
||||||
|
# q = q.join(TxCache)
|
||||||
|
# q = q.filter(TxCache.recipient==init_w3.eth.accounts[0])
|
||||||
|
# o = q.first()
|
||||||
|
# tx_raw = o.signed_tx
|
||||||
|
#
|
||||||
|
# tx_dict = unpack_signed_raw_tx(bytes.fromhex(tx_raw[2:]), default_chain_spec.chain_id())
|
||||||
|
# gas_price_before = tx_dict['gasPrice']
|
||||||
|
#
|
||||||
|
# s = celery.signature(
|
||||||
|
# 'cic_eth.admin.ctrl.lock_send',
|
||||||
|
# [
|
||||||
|
# chain_str,
|
||||||
|
# init_w3.eth.accounts[0],
|
||||||
|
# ],
|
||||||
|
# queue=None,
|
||||||
|
# )
|
||||||
|
# t = s.apply_async()
|
||||||
|
# t.get()
|
||||||
|
# assert t.successful()
|
||||||
|
#
|
||||||
|
# api = AdminApi(c, queue=None)
|
||||||
|
# t = api.resend(tx_dict['hash'], chain_str, unlock=True)
|
||||||
|
# t.get()
|
||||||
|
# i = 0
|
||||||
|
# tx_hash_new_hex = None
|
||||||
|
# for r in t.collect():
|
||||||
|
# tx_hash_new_hex = r[1]
|
||||||
|
# assert t.successful()
|
||||||
|
#
|
||||||
|
# tx_raw_new = get_tx(tx_hash_new_hex)
|
||||||
|
# logg.debug('get {}'.format(tx_raw_new))
|
||||||
|
# tx_dict_new = unpack_signed_raw_tx(bytes.fromhex(tx_raw_new['signed_tx'][2:]), default_chain_spec.chain_id())
|
||||||
|
# assert tx_hash_new_hex != tx_dict['hash']
|
||||||
|
# assert tx_dict_new['gasPrice'] > gas_price_before
|
||||||
|
#
|
||||||
|
# tx_dict_after = get_tx(tx_dict['hash'])
|
||||||
|
#
|
||||||
|
# logg.debug('logggg {}'.format(status_str(tx_dict_after['status'])))
|
||||||
|
# assert tx_dict_after['status'] & StatusBits.MANUAL
|
||||||
|
|
||||||
|
|
||||||
|
#def test_check_fix_nonce(
|
||||||
|
# default_chain_spec,
|
||||||
|
# init_database,
|
||||||
|
# init_eth_account_roles,
|
||||||
|
# init_w3,
|
||||||
|
# eth_empty_accounts,
|
||||||
|
# celery_session_worker,
|
||||||
|
# ):
|
||||||
|
#
|
||||||
|
# chain_str = str(default_chain_spec)
|
||||||
|
#
|
||||||
|
# sigs = []
|
||||||
|
# for i in range(5):
|
||||||
|
# s = celery.signature(
|
||||||
|
# 'cic_eth.eth.tx.refill_gas',
|
||||||
|
# [
|
||||||
|
# eth_empty_accounts[i],
|
||||||
|
# chain_str,
|
||||||
|
# ],
|
||||||
|
# queue=None,
|
||||||
|
# )
|
||||||
|
# sigs.append(s)
|
||||||
|
#
|
||||||
|
# t = celery.group(sigs)()
|
||||||
|
# txs = t.get()
|
||||||
|
# assert t.successful()
|
||||||
|
#
|
||||||
|
# tx_hash = web3.Web3.keccak(hexstr=txs[2])
|
||||||
|
# c = RpcClient(default_chain_spec)
|
||||||
|
# api = AdminApi(c, queue=None)
|
||||||
|
# address = init_eth_account_roles['eth_account_gas_provider']
|
||||||
|
# nonce_spec = api.check_nonce(address)
|
||||||
|
# assert nonce_spec['nonce']['network'] == 0
|
||||||
|
# assert nonce_spec['nonce']['queue'] == 4
|
||||||
|
# assert nonce_spec['nonce']['blocking'] == None
|
||||||
|
#
|
||||||
|
# s_set = celery.signature(
|
||||||
|
# 'cic_eth.queue.tx.set_rejected',
|
||||||
|
# [
|
||||||
|
# tx_hash.hex(),
|
||||||
|
# ],
|
||||||
|
# queue=None,
|
||||||
|
# )
|
||||||
|
# t = s_set.apply_async()
|
||||||
|
# t.get()
|
||||||
|
# t.collect()
|
||||||
|
# assert t.successful()
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# nonce_spec = api.check_nonce(address)
|
||||||
|
# assert nonce_spec['nonce']['blocking'] == 2
|
||||||
|
# assert nonce_spec['tx']['blocking'] == tx_hash.hex()
|
||||||
|
#
|
||||||
|
# t = api.fix_nonce(address, nonce_spec['nonce']['blocking'])
|
||||||
|
# t.get()
|
||||||
|
# t.collect()
|
||||||
|
# assert t.successful()
|
||||||
|
#
|
||||||
|
# for tx in txs[3:]:
|
||||||
|
# tx_hash = web3.Web3.keccak(hexstr=tx)
|
||||||
|
# tx_dict = get_tx(tx_hash.hex())
|
||||||
|
# assert tx_dict['status'] == StatusEnum.OVERRIDDEN
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
def test_have_account(
|
||||||
|
default_chain_spec,
|
||||||
|
custodial_roles,
|
||||||
|
init_celery_tasks,
|
||||||
|
eth_rpc,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = AdminApi(None, queue=None)
|
||||||
|
t = api.have_account(custodial_roles['ALICE'], default_chain_spec)
|
||||||
|
assert t.get() != None
|
||||||
|
|
||||||
|
bogus_address = add_0x(to_checksum_address(os.urandom(20).hex()))
|
||||||
|
api = AdminApi(None, queue=None)
|
||||||
|
t = api.have_account(bogus_address, default_chain_spec)
|
||||||
|
assert t.get() == None
|
||||||
|
|
||||||
|
|
||||||
|
def test_locking(
|
||||||
|
default_chain_spec,
|
||||||
|
init_database,
|
||||||
|
agent_roles,
|
||||||
|
init_celery_tasks,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = AdminApi(None, queue=None)
|
||||||
|
|
||||||
|
t = api.lock(default_chain_spec, agent_roles['ALICE'], LockEnum.SEND)
|
||||||
|
t.get()
|
||||||
|
t = api.get_lock()
|
||||||
|
r = t.get()
|
||||||
|
assert len(r) == 1
|
||||||
|
|
||||||
|
t = api.unlock(default_chain_spec, agent_roles['ALICE'], LockEnum.SEND)
|
||||||
|
t.get()
|
||||||
|
t = api.get_lock()
|
||||||
|
r = t.get()
|
||||||
|
assert len(r) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_tag_account(
|
||||||
|
default_chain_spec,
|
||||||
|
init_database,
|
||||||
|
agent_roles,
|
||||||
|
eth_rpc,
|
||||||
|
init_celery_tasks,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = AdminApi(eth_rpc, queue=None)
|
||||||
|
|
||||||
|
t = api.tag_account('foo', agent_roles['ALICE'], default_chain_spec)
|
||||||
|
t.get()
|
||||||
|
t = api.tag_account('bar', agent_roles['BOB'], default_chain_spec)
|
||||||
|
t.get()
|
||||||
|
t = api.tag_account('bar', agent_roles['CAROL'], default_chain_spec)
|
||||||
|
t.get()
|
||||||
|
|
||||||
|
assert AccountRole.get_address('foo', init_database) == agent_roles['ALICE']
|
||||||
|
assert AccountRole.get_address('bar', init_database) == agent_roles['CAROL']
|
||||||
|
|
||||||
|
|
||||||
|
#def test_ready(
|
||||||
|
# init_database,
|
||||||
|
# agent_roles,
|
||||||
|
# eth_rpc,
|
||||||
|
# ):
|
||||||
|
#
|
||||||
|
# api = AdminApi(eth_rpc)
|
||||||
|
#
|
||||||
|
# with pytest.raises(InitializationError):
|
||||||
|
# api.ready()
|
||||||
|
#
|
||||||
|
# bogus_account = os.urandom(20)
|
||||||
|
# bogus_account_hex = '0x' + bogus_account.hex()
|
||||||
|
#
|
||||||
|
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', web3.Web3.toChecksumAddress(bogus_account_hex))
|
||||||
|
# with pytest.raises(KeyError):
|
||||||
|
# api.ready()
|
||||||
|
#
|
||||||
|
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', eth_empty_accounts[0])
|
||||||
|
# api.ready()
|
||||||
|
|
||||||
|
|
||||||
|
def test_tx(
|
||||||
|
default_chain_spec,
|
||||||
|
cic_registry,
|
||||||
|
init_database,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
agent_roles,
|
||||||
|
contract_roles,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_id = default_chain_spec.chain_id()
|
||||||
|
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)
|
||||||
|
c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 1024, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
tx = unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), chain_id)
|
||||||
|
queue_create(tx['nonce'], agent_roles['ALICE'], tx_hash_hex, tx_signed_raw_hex, default_chain_spec, session=init_database)
|
||||||
|
cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict())
|
||||||
|
|
||||||
|
api = AdminApi(eth_rpc, queue=None, call_address=contract_roles['DEFAULT'])
|
||||||
|
tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex)
|
||||||
|
logg.warning('code missing to verify tx contents {}'.format(tx))
|
@ -3,49 +3,50 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import celery
|
import celery
|
||||||
from cic_registry import CICRegistry
|
from cic_eth_registry.erc20 import ERC20Token
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
# platform imports
|
# local imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
from cic_eth.eth.factory import TxFactory
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def test_account_api(
|
def test_account_api(
|
||||||
default_chain_spec,
|
default_chain_spec,
|
||||||
init_w3,
|
|
||||||
init_database,
|
init_database,
|
||||||
init_eth_account_roles,
|
init_eth_rpc,
|
||||||
|
account_registry,
|
||||||
|
custodial_roles,
|
||||||
celery_session_worker,
|
celery_session_worker,
|
||||||
):
|
):
|
||||||
api = Api(str(default_chain_spec), callback_param='accounts', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
api = Api(str(default_chain_spec), callback_param='accounts', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
||||||
t = api.create_account('', register=False)
|
t = api.create_account('', register=False)
|
||||||
t.get()
|
t.get_leaf()
|
||||||
for r in t.collect():
|
|
||||||
print(r)
|
|
||||||
assert t.successful()
|
assert t.successful()
|
||||||
|
|
||||||
|
|
||||||
def test_transfer_api(
|
def test_transfer_api(
|
||||||
default_chain_spec,
|
default_chain_spec,
|
||||||
init_w3,
|
eth_rpc,
|
||||||
cic_registry,
|
|
||||||
init_database,
|
init_database,
|
||||||
bancor_registry,
|
foo_token,
|
||||||
bancor_tokens,
|
custodial_roles,
|
||||||
|
agent_roles,
|
||||||
|
cic_registry,
|
||||||
|
register_tokens,
|
||||||
celery_session_worker,
|
celery_session_worker,
|
||||||
):
|
):
|
||||||
|
|
||||||
token = CICRegistry.get_address(default_chain_spec, bancor_tokens[0])
|
#token = CICRegistry.get_address(default_chain_spec, bancor_tokens[0])
|
||||||
|
foo_token_cache = ERC20Token(eth_rpc, foo_token)
|
||||||
|
|
||||||
api = Api(str(default_chain_spec), callback_param='transfer', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
api = Api(str(default_chain_spec), callback_param='transfer', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
||||||
t = api.transfer(init_w3.eth.accounts[2], init_w3.eth.accounts[4], 111, token.symbol())
|
t = api.transfer(custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], 1024, foo_token_cache.symbol)
|
||||||
t.get()
|
t.get_leaf()
|
||||||
for r in t.collect():
|
|
||||||
print(r)
|
|
||||||
assert t.successful()
|
assert t.successful()
|
||||||
|
|
||||||
|
|
||||||
@ -55,8 +56,8 @@ def test_convert_api(
|
|||||||
init_w3,
|
init_w3,
|
||||||
cic_registry,
|
cic_registry,
|
||||||
init_database,
|
init_database,
|
||||||
bancor_registry,
|
foo_token,
|
||||||
bancor_tokens,
|
bar_token,
|
||||||
celery_session_worker,
|
celery_session_worker,
|
||||||
):
|
):
|
||||||
|
|
||||||
@ -64,9 +65,8 @@ def test_convert_api(
|
|||||||
token_bob = CICRegistry.get_address(default_chain_spec, bancor_tokens[1])
|
token_bob = CICRegistry.get_address(default_chain_spec, bancor_tokens[1])
|
||||||
|
|
||||||
api = Api(str(default_chain_spec), callback_param='convert', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
api = Api(str(default_chain_spec), callback_param='convert', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
||||||
t = api.convert(init_w3.eth.accounts[2], 110, 100, token_alice.symbol(), token_bob.symbol())
|
t = api.convert(custodial_roles['FOO_TOKEN_GIFTER'], 110, 100, foo_token_cache.symbol, bar_token_cache.symbol)
|
||||||
for r in t.collect():
|
t.get_leaf()
|
||||||
print(r)
|
|
||||||
assert t.successful()
|
assert t.successful()
|
||||||
|
|
||||||
|
|
||||||
@ -94,14 +94,14 @@ def test_convert_transfer_api(
|
|||||||
|
|
||||||
def test_refill_gas(
|
def test_refill_gas(
|
||||||
default_chain_spec,
|
default_chain_spec,
|
||||||
cic_registry,
|
|
||||||
init_database,
|
init_database,
|
||||||
init_w3,
|
|
||||||
celery_session_worker,
|
|
||||||
eth_empty_accounts,
|
eth_empty_accounts,
|
||||||
|
init_eth_rpc,
|
||||||
|
custodial_roles,
|
||||||
|
celery_session_worker,
|
||||||
):
|
):
|
||||||
|
|
||||||
api = Api(str(default_chain_spec), callback_param='convert_transfer', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
api = Api(str(default_chain_spec), callback_param='refill_gas', callback_task='cic_eth.callbacks.noop.noop', queue=None)
|
||||||
t = api.refill_gas(eth_empty_accounts[0])
|
t = api.refill_gas(eth_empty_accounts[0])
|
||||||
t.get()
|
t.get()
|
||||||
for r in t.collect():
|
for r in t.collect():
|
55
apps/cic-eth/tests/task/api/test_balance.py
Normal file
55
apps/cic-eth/tests/task/api/test_balance.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.api.api_task import Api
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
def test_balance_simple_api(
|
||||||
|
default_chain_spec,
|
||||||
|
init_database,
|
||||||
|
cic_registry,
|
||||||
|
foo_token,
|
||||||
|
register_tokens,
|
||||||
|
api,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_str = str(default_chain_spec)
|
||||||
|
|
||||||
|
a = to_checksum_address('0x' + os.urandom(20).hex())
|
||||||
|
t = api.balance(a, 'FOO', include_pending=False)
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
logg.debug(r)
|
||||||
|
|
||||||
|
assert r[0].get('balance_network') != None
|
||||||
|
|
||||||
|
|
||||||
|
def test_balance_complex_api(
|
||||||
|
default_chain_spec,
|
||||||
|
init_database,
|
||||||
|
cic_registry,
|
||||||
|
foo_token,
|
||||||
|
register_tokens,
|
||||||
|
api,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_str = str(default_chain_spec)
|
||||||
|
|
||||||
|
a = to_checksum_address('0x' + os.urandom(20).hex())
|
||||||
|
t = api.balance(a, 'FOO', include_pending=True)
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
logg.debug(r)
|
||||||
|
|
||||||
|
assert r[0].get('balance_incoming') != None
|
||||||
|
assert r[0].get('balance_outgoing') != None
|
||||||
|
assert r[0].get('balance_network') != None
|
||||||
|
|
120
apps/cic-eth/tests/task/api/test_list.py
Normal file
120
apps/cic-eth/tests/task/api/test_list.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.erc20 import ERC20
|
||||||
|
from chainlib.eth.tx import receipt
|
||||||
|
from cic_eth.api.api_task import Api
|
||||||
|
from tests.mock.filter import (
|
||||||
|
block_filter,
|
||||||
|
tx_filter,
|
||||||
|
)
|
||||||
|
from cic_eth.db.models.nonce import (
|
||||||
|
Nonce,
|
||||||
|
NonceReservation,
|
||||||
|
)
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_tx(
|
||||||
|
default_chain_spec,
|
||||||
|
init_database,
|
||||||
|
cic_registry,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
custodial_roles,
|
||||||
|
agent_roles,
|
||||||
|
foo_token,
|
||||||
|
register_tokens,
|
||||||
|
init_eth_tester,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_id = default_chain_spec.chain_id()
|
||||||
|
|
||||||
|
tx_hashes = []
|
||||||
|
|
||||||
|
# external tx
|
||||||
|
nonce_oracle = RPCNonceOracle(custodial_roles['FOO_TOKEN_GIFTER'], eth_rpc)
|
||||||
|
nonce = nonce_oracle.get_nonce()
|
||||||
|
|
||||||
|
q = init_database.query(Nonce)
|
||||||
|
q = q.filter(Nonce.address_hex==agent_roles['ALICE'])
|
||||||
|
o = q.first()
|
||||||
|
o.nonce = nonce
|
||||||
|
init_database.add(o)
|
||||||
|
init_database.commit()
|
||||||
|
|
||||||
|
# TODO: implement cachenonceoracle instead, this is useless
|
||||||
|
# external tx one
|
||||||
|
Nonce.next(custodial_roles['FOO_TOKEN_GIFTER'], 'foo', session=init_database)
|
||||||
|
init_database.commit()
|
||||||
|
|
||||||
|
init_eth_tester.mine_blocks(13)
|
||||||
|
c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id)
|
||||||
|
(tx_hash_hex, o) = c.transfer(foo_token, custodial_roles['FOO_TOKEN_GIFTER'], agent_roles['ALICE'], 1024)
|
||||||
|
eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
a = r['block_number']
|
||||||
|
block_filter.add(a.to_bytes(4, 'big'))
|
||||||
|
|
||||||
|
a = r['block_number'] + r['transaction_index']
|
||||||
|
tx_filter.add(a.to_bytes(4, 'big'))
|
||||||
|
|
||||||
|
tx_hashes.append(tx_hash_hex)
|
||||||
|
|
||||||
|
# external tx two
|
||||||
|
Nonce.next(agent_roles['ALICE'], 'foo', session=init_database)
|
||||||
|
init_database.commit()
|
||||||
|
|
||||||
|
init_eth_tester.mine_blocks(13)
|
||||||
|
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)
|
||||||
|
c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id)
|
||||||
|
(tx_hash_hex, o) = c.transfer(foo_token, agent_roles['ALICE'], agent_roles['BOB'], 256)
|
||||||
|
eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
a = r['block_number']
|
||||||
|
block_filter.add(a.to_bytes(4, 'big'))
|
||||||
|
|
||||||
|
a = r['block_number'] + r['transaction_index']
|
||||||
|
tx_filter.add(a.to_bytes(4, 'big'))
|
||||||
|
|
||||||
|
tx_hashes.append(tx_hash_hex)
|
||||||
|
|
||||||
|
init_eth_tester.mine_blocks(28)
|
||||||
|
|
||||||
|
# custodial tx 1
|
||||||
|
api = Api(str(default_chain_spec), queue=None)
|
||||||
|
t = api.transfer(agent_roles['ALICE'], agent_roles['CAROL'], 64, 'FOO') #, 'blinky')
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
tx_hashes.append(r)
|
||||||
|
|
||||||
|
# custodial tx 2
|
||||||
|
api = Api(str(default_chain_spec), queue=None)
|
||||||
|
t = api.transfer(agent_roles['ALICE'], agent_roles['DAVE'], 16, 'FOO') #, 'blinky')
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
tx_hashes.append(r)
|
||||||
|
|
||||||
|
logg.debug('r {}'.format(r))
|
||||||
|
|
||||||
|
# test the api
|
||||||
|
t = api.list(agent_roles['ALICE'], external_task='tests.mock.filter.filter')
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
|
||||||
|
|
||||||
|
assert len(r) == 3
|
||||||
|
logg.debug('rrrr {}'.format(r))
|
||||||
|
|
||||||
|
for tx in r:
|
||||||
|
logg.debug('have tx {}'.format(tx))
|
||||||
|
tx_hashes.remove(tx['hash'])
|
||||||
|
assert len(tx_hashes) == 1
|
1
apps/cic-eth/tests/task/conftest.py
Normal file
1
apps/cic-eth/tests/task/conftest.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
from tests.fixtures_celery import *
|
@ -5,8 +5,12 @@ import time
|
|||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
import pytest
|
import pytest
|
||||||
import web3
|
|
||||||
import celery
|
import celery
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.tx import receipt
|
||||||
|
from eth_accounts_index import AccountRegistry
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.error import OutOfGasError
|
from cic_eth.error import OutOfGasError
|
||||||
@ -16,33 +20,30 @@ from cic_eth.db.enum import StatusEnum
|
|||||||
from cic_eth.db.enum import StatusEnum
|
from cic_eth.db.enum import StatusEnum
|
||||||
from cic_eth.db.models.nonce import Nonce
|
from cic_eth.db.models.nonce import Nonce
|
||||||
from cic_eth.db.models.role import AccountRole
|
from cic_eth.db.models.role import AccountRole
|
||||||
from cic_eth.eth.account import AccountTxFactory
|
|
||||||
|
|
||||||
logg = logging.getLogger() #__name__)
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def test_create_account(
|
def test_create_account(
|
||||||
default_chain_spec,
|
default_chain_spec,
|
||||||
init_w3,
|
eth_rpc,
|
||||||
init_database,
|
init_database,
|
||||||
celery_session_worker,
|
celery_session_worker,
|
||||||
|
caplog,
|
||||||
):
|
):
|
||||||
|
|
||||||
s = celery.signature(
|
s = celery.signature(
|
||||||
'cic_eth.eth.account.create',
|
'cic_eth.eth.account.create',
|
||||||
[
|
[
|
||||||
'foo',
|
'foo',
|
||||||
str(default_chain_spec),
|
default_chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
t = s.apply_async()
|
t = s.apply_async()
|
||||||
r = t.get()
|
r = t.get()
|
||||||
logg.debug('got account {}'.format(r))
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
q = session.query(Nonce).filter(Nonce.address_hex==r)
|
q = session.query(Nonce).filter(Nonce.address_hex==r)
|
||||||
o = q.first()
|
o = q.first()
|
||||||
logg.debug('oooo s {}'.format(o))
|
|
||||||
session.close()
|
session.close()
|
||||||
assert o != None
|
assert o != None
|
||||||
assert o.nonce == 0
|
assert o.nonce == 0
|
||||||
@ -51,7 +52,7 @@ def test_create_account(
|
|||||||
'cic_eth.eth.account.have',
|
'cic_eth.eth.account.have',
|
||||||
[
|
[
|
||||||
r,
|
r,
|
||||||
str(default_chain_spec),
|
default_chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
t = s.apply_async()
|
t = s.apply_async()
|
||||||
@ -60,38 +61,39 @@ def test_create_account(
|
|||||||
|
|
||||||
def test_register_account(
|
def test_register_account(
|
||||||
default_chain_spec,
|
default_chain_spec,
|
||||||
accounts_registry,
|
account_registry,
|
||||||
init_database,
|
init_database,
|
||||||
init_eth_tester,
|
init_eth_tester,
|
||||||
init_w3,
|
eth_accounts,
|
||||||
init_rpc,
|
eth_rpc,
|
||||||
cic_registry,
|
cic_registry,
|
||||||
celery_session_worker,
|
|
||||||
eth_empty_accounts,
|
eth_empty_accounts,
|
||||||
|
custodial_roles,
|
||||||
|
call_sender,
|
||||||
|
celery_session_worker,
|
||||||
):
|
):
|
||||||
|
|
||||||
logg.debug('chainspec {}'.format(str(default_chain_spec)))
|
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
s_nonce = celery.signature(
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
[
|
[
|
||||||
eth_empty_accounts[0],
|
eth_empty_accounts[0],
|
||||||
init_w3.eth.accounts[0],
|
custodial_roles['ACCOUNT_REGISTRY_WRITER'],
|
||||||
],
|
],
|
||||||
queue=None,
|
queue=None,
|
||||||
)
|
)
|
||||||
s_register = celery.signature(
|
s_register = celery.signature(
|
||||||
'cic_eth.eth.account.register',
|
'cic_eth.eth.account.register',
|
||||||
[
|
[
|
||||||
str(default_chain_spec),
|
default_chain_spec.asdict(),
|
||||||
init_w3.eth.accounts[0],
|
custodial_roles['ACCOUNT_REGISTRY_WRITER'],
|
||||||
],
|
],
|
||||||
|
queue=None,
|
||||||
)
|
)
|
||||||
s_nonce.link(s_register)
|
s_nonce.link(s_register)
|
||||||
t = s_nonce.apply_async()
|
t = s_nonce.apply_async()
|
||||||
address = t.get()
|
address = t.get()
|
||||||
for r in t.collect():
|
for r in t.collect():
|
||||||
pass
|
logg.debug('r {}'.format(r))
|
||||||
assert t.successful()
|
assert t.successful()
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
@ -103,8 +105,9 @@ def test_register_account(
|
|||||||
'cic_eth.eth.tx.send',
|
'cic_eth.eth.tx.send',
|
||||||
[
|
[
|
||||||
[tx_signed_hex],
|
[tx_signed_hex],
|
||||||
str(default_chain_spec),
|
default_chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
|
queue=None,
|
||||||
)
|
)
|
||||||
t = s_send.apply_async()
|
t = s_send.apply_async()
|
||||||
address = t.get()
|
address = t.get()
|
||||||
@ -113,13 +116,16 @@ def test_register_account(
|
|||||||
|
|
||||||
init_eth_tester.mine_block()
|
init_eth_tester.mine_block()
|
||||||
|
|
||||||
assert accounts_registry.have(eth_empty_accounts[0])
|
c = AccountRegistry()
|
||||||
|
o = c.have(account_registry, eth_empty_accounts[0], sender_address=call_sender)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert int(strip_0x(r), 16) == 1
|
||||||
|
|
||||||
|
|
||||||
def test_role_task(
|
def test_role_task(
|
||||||
|
default_chain_spec,
|
||||||
init_database,
|
init_database,
|
||||||
celery_session_worker,
|
celery_session_worker,
|
||||||
default_chain_spec,
|
|
||||||
):
|
):
|
||||||
|
|
||||||
address = '0x' + os.urandom(20).hex()
|
address = '0x' + os.urandom(20).hex()
|
||||||
@ -130,9 +136,53 @@ def test_role_task(
|
|||||||
'cic_eth.eth.account.role',
|
'cic_eth.eth.account.role',
|
||||||
[
|
[
|
||||||
address,
|
address,
|
||||||
str(default_chain_spec),
|
default_chain_spec.asdict(),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
t = s.apply_async()
|
t = s.apply_async()
|
||||||
r = t.get()
|
r = t.get()
|
||||||
assert r == 'foo'
|
assert r == 'foo'
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def test_gift(
|
||||||
|
init_database,
|
||||||
|
default_chain_spec,
|
||||||
|
contract_roles,
|
||||||
|
agent_roles,
|
||||||
|
account_registry,
|
||||||
|
faucet,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
init_celery_tasks,
|
||||||
|
cic_registry,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['ACCOUNT_REGISTRY_WRITER'], eth_rpc)
|
||||||
|
c = AccountRegistry(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=default_chain_spec.chain_id())
|
||||||
|
(tx_hash_hex, o) = c.add(account_registry, contract_roles['ACCOUNT_REGISTRY_WRITER'], agent_roles['ALICE'])
|
||||||
|
eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
s_nonce = celery.signature(
|
||||||
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
|
[
|
||||||
|
agent_roles['ALICE'],
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
s_gift = celery.signature(
|
||||||
|
'cic_eth.eth.account.gift',
|
||||||
|
[
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
s_nonce.link(s_gift)
|
||||||
|
t = s_nonce.apply_async()
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
167
apps/cic-eth/tests/task/test_task_erc20.py
Normal file
167
apps/cic-eth/tests/task/test_task_erc20.py
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
import celery
|
||||||
|
from chainlib.eth.erc20 import ERC20
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
receipt,
|
||||||
|
TxFormat,
|
||||||
|
)
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.queue.tx import register_tx
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_otx_cache_transfer(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
token_roles,
|
||||||
|
agent_roles,
|
||||||
|
eth_signer,
|
||||||
|
eth_rpc,
|
||||||
|
init_database,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], eth_rpc)
|
||||||
|
c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=default_chain_spec.chain_id())
|
||||||
|
transfer_value = 100 * (10**6)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.transfer(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database)
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.cache_transfer_data',
|
||||||
|
[
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
r = t.get()
|
||||||
|
|
||||||
|
assert r[0] == tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_balance_task(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
token_roles,
|
||||||
|
agent_roles,
|
||||||
|
eth_signer,
|
||||||
|
eth_rpc,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], eth_rpc)
|
||||||
|
c = ERC20(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=default_chain_spec.chain_id())
|
||||||
|
transfer_value = 100 * (10**6)
|
||||||
|
(tx_hash_hex, o) = c.transfer(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value)
|
||||||
|
eth_rpc.do(o)
|
||||||
|
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
token_object = {
|
||||||
|
'address': foo_token,
|
||||||
|
}
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.balance',
|
||||||
|
[
|
||||||
|
[token_object],
|
||||||
|
agent_roles['ALICE'],
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
r = t.get()
|
||||||
|
assert r[0]['balance_network'] == transfer_value
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_transfer_task(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
agent_roles,
|
||||||
|
custodial_roles,
|
||||||
|
eth_signer,
|
||||||
|
eth_rpc,
|
||||||
|
init_database,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
token_object = {
|
||||||
|
'address': foo_token,
|
||||||
|
}
|
||||||
|
transfer_value = 100 * (10 ** 6)
|
||||||
|
|
||||||
|
s_nonce = celery.signature(
|
||||||
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
|
[
|
||||||
|
[token_object],
|
||||||
|
custodial_roles['FOO_TOKEN_GIFTER'],
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
s_transfer = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.transfer',
|
||||||
|
[
|
||||||
|
custodial_roles['FOO_TOKEN_GIFTER'],
|
||||||
|
agent_roles['ALICE'],
|
||||||
|
transfer_value,
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
s_nonce.link(s_transfer)
|
||||||
|
t = s_nonce.apply_async()
|
||||||
|
r = t.get_leaf()
|
||||||
|
|
||||||
|
logg.debug('result {}'.format(r))
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_approve_task(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
agent_roles,
|
||||||
|
custodial_roles,
|
||||||
|
eth_signer,
|
||||||
|
eth_rpc,
|
||||||
|
init_database,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
token_object = {
|
||||||
|
'address': foo_token,
|
||||||
|
}
|
||||||
|
transfer_value = 100 * (10 ** 6)
|
||||||
|
|
||||||
|
s_nonce = celery.signature(
|
||||||
|
'cic_eth.eth.tx.reserve_nonce',
|
||||||
|
[
|
||||||
|
[token_object],
|
||||||
|
custodial_roles['FOO_TOKEN_GIFTER'],
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
s_transfer = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.approve',
|
||||||
|
[
|
||||||
|
custodial_roles['FOO_TOKEN_GIFTER'],
|
||||||
|
agent_roles['ALICE'],
|
||||||
|
transfer_value,
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
s_nonce.link(s_transfer)
|
||||||
|
t = s_nonce.apply_async()
|
||||||
|
r = t.get_leaf()
|
||||||
|
|
||||||
|
logg.debug('result {}'.format(r))
|
69
apps/cic-eth/tests/task/test_task_tx.py
Normal file
69
apps/cic-eth/tests/task/test_task_tx.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
import celery
|
||||||
|
from chainlib.eth.gas import Gas
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
TxFormat,
|
||||||
|
unpack,
|
||||||
|
transaction,
|
||||||
|
receipt,
|
||||||
|
)
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_eth.queue.tx import register_tx
|
||||||
|
from cic_eth.eth.tx import cache_gas_data
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip()
|
||||||
|
def test_tx_send(
|
||||||
|
init_database,
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
agent_roles,
|
||||||
|
contract_roles,
|
||||||
|
celery_session_worker,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_id = default_chain_spec.chain_id()
|
||||||
|
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)
|
||||||
|
c = Gas(signer=eth_signer, nonce_oracle=nonce_oracle, chain_id=chain_id)
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 1024, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
#unpack(bytes.fromhex(strip_0x(tx_signed_raw_hex)), chain_id)
|
||||||
|
register_tx(tx_hash_hex, tx_signed_raw_hex, default_chain_spec, None, session=init_database)
|
||||||
|
cache_gas_data(tx_hash_hex, tx_signed_raw_hex, default_chain_spec.asdict())
|
||||||
|
|
||||||
|
s_send = celery.signature(
|
||||||
|
'cic_eth.eth.tx.send',
|
||||||
|
[
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
t = s_send.apply_async()
|
||||||
|
r = t.get()
|
||||||
|
assert t.successful()
|
||||||
|
|
||||||
|
o = transaction(tx_hash_hex)
|
||||||
|
tx = eth_rpc.do(o)
|
||||||
|
assert r == tx['hash']
|
||||||
|
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
rcpt = eth_rpc.do(o)
|
||||||
|
assert rcpt['status'] == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_sync_tx(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
celery_worker,
|
||||||
|
):
|
||||||
|
pass
|
@ -1,253 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
from cic_registry import CICRegistry
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.nonce import Nonce
|
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
|
|
||||||
#logg = logging.getLogger(__name__)
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def test_balance_complex(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
init_w3,
|
|
||||||
cic_registry,
|
|
||||||
dummy_token_gifted,
|
|
||||||
celery_session_worker,
|
|
||||||
init_eth_tester,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
token_data = {
|
|
||||||
'address': dummy_token_gifted,
|
|
||||||
'converters': [],
|
|
||||||
}
|
|
||||||
|
|
||||||
tx_hashes = []
|
|
||||||
|
|
||||||
# TODO: Temporary workaround for nonce db cache initialization being made before deployments.
|
|
||||||
# Instead use different accounts than system ones for transfers for tests
|
|
||||||
nonce = init_w3.eth.getTransactionCount(init_w3.eth.accounts[0])
|
|
||||||
q = init_database.query(Nonce)
|
|
||||||
q = q.filter(Nonce.address_hex==init_w3.eth.accounts[0])
|
|
||||||
o = q.first()
|
|
||||||
o.nonce = nonce
|
|
||||||
init_database.add(o)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
for i in range(3):
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
[token_data],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_transfer = celery.signature(
|
|
||||||
'cic_eth.eth.token.transfer',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
1000*(i+1),
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_nonce.link(s_transfer)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
t.get()
|
|
||||||
r = None
|
|
||||||
for c in t.collect():
|
|
||||||
r = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
tx_hashes.append(r)
|
|
||||||
|
|
||||||
otx = Otx.load(r)
|
|
||||||
|
|
||||||
s_send = celery.signature(
|
|
||||||
'cic_eth.eth.tx.send',
|
|
||||||
[
|
|
||||||
[otx.signed_tx],
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s_send.apply_async()
|
|
||||||
t.get()
|
|
||||||
for r in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
init_eth_tester.mine_block()
|
|
||||||
|
|
||||||
|
|
||||||
# here insert block sync to get state of balance
|
|
||||||
|
|
||||||
s_balance_base = celery.signature(
|
|
||||||
'cic_eth.eth.token.balance',
|
|
||||||
[
|
|
||||||
[token_data],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_out = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_outgoing',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_in = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_incoming',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
s_balance_out.link(s_balance_in)
|
|
||||||
s_balance_base.link(s_balance_out)
|
|
||||||
t = s_balance_base.apply_async()
|
|
||||||
t.get()
|
|
||||||
r = None
|
|
||||||
for c in t.collect():
|
|
||||||
r = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
assert r[0]['balance_network'] > 0
|
|
||||||
assert r[0]['balance_incoming'] == 0
|
|
||||||
assert r[0]['balance_outgoing'] > 0
|
|
||||||
|
|
||||||
s_balance_base = celery.signature(
|
|
||||||
'cic_eth.eth.token.balance',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_out = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_outgoing',
|
|
||||||
[
|
|
||||||
[token_data],
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_in = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_incoming',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_base.link(s_balance_in)
|
|
||||||
s_balance_out.link(s_balance_base)
|
|
||||||
t = s_balance_out.apply_async()
|
|
||||||
t.get()
|
|
||||||
r = None
|
|
||||||
for c in t.collect():
|
|
||||||
r = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
assert r[0]['balance_network'] > 0
|
|
||||||
assert r[0]['balance_incoming'] > 0
|
|
||||||
assert r[0]['balance_outgoing'] == 0
|
|
||||||
|
|
||||||
# Set confirmed status in backend
|
|
||||||
for tx_hash in tx_hashes:
|
|
||||||
rcpt = init_w3.eth.getTransactionReceipt(tx_hash)
|
|
||||||
assert rcpt['status'] == 1
|
|
||||||
otx = Otx.load(tx_hash, session=init_database)
|
|
||||||
otx.success(block=rcpt['blockNumber'], session=init_database)
|
|
||||||
init_database.add(otx)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
|
|
||||||
s_balance_base = celery.signature(
|
|
||||||
'cic_eth.eth.token.balance',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_out = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_outgoing',
|
|
||||||
[
|
|
||||||
[token_data],
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_in = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_incoming',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_base.link(s_balance_in)
|
|
||||||
s_balance_out.link(s_balance_base)
|
|
||||||
t = s_balance_out.apply_async()
|
|
||||||
t.get()
|
|
||||||
r = None
|
|
||||||
for c in t.collect():
|
|
||||||
r = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
assert r[0]['balance_network'] > 0
|
|
||||||
assert r[0]['balance_incoming'] == 0
|
|
||||||
assert r[0]['balance_outgoing'] == 0
|
|
||||||
|
|
||||||
|
|
||||||
s_balance_base = celery.signature(
|
|
||||||
'cic_eth.eth.token.balance',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_out = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_outgoing',
|
|
||||||
[
|
|
||||||
[token_data],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_in = celery.signature(
|
|
||||||
'cic_eth.queue.balance.balance_incoming',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
chain_str,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
s_balance_base.link(s_balance_in)
|
|
||||||
s_balance_out.link(s_balance_base)
|
|
||||||
t = s_balance_out.apply_async()
|
|
||||||
t.get()
|
|
||||||
r = None
|
|
||||||
for c in t.collect():
|
|
||||||
r = c[1]
|
|
||||||
assert t.successful()
|
|
||||||
assert r[0]['balance_network'] > 0
|
|
||||||
assert r[0]['balance_incoming'] == 0
|
|
||||||
assert r[0]['balance_outgoing'] == 0
|
|
||||||
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
# external imports
|
|
||||||
import pytest
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db import TxConvertTransfer
|
|
||||||
from cic_eth.eth.bancor import BancorTxFactory
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip()
|
|
||||||
def test_transfer_after_convert(
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
cic_registry,
|
|
||||||
bancor_tokens,
|
|
||||||
bancor_registry,
|
|
||||||
default_chain_spec,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
tx_hash = os.urandom(32).hex()
|
|
||||||
txct = TxConvertTransfer(tx_hash, init_w3.eth.accounts[1], default_chain_spec)
|
|
||||||
init_database.add(txct)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.bancor.transfer_converted',
|
|
||||||
[
|
|
||||||
[
|
|
||||||
{
|
|
||||||
'address': bancor_tokens[0],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
1024,
|
|
||||||
tx_hash,
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
t.collect()
|
|
||||||
assert t.successful()
|
|
@ -1,29 +0,0 @@
|
|||||||
# external imports
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db.models.debug import Debug
|
|
||||||
|
|
||||||
|
|
||||||
def test_debug_alert(
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.debug.alert',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
'bar',
|
|
||||||
'baz',
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
q = init_database.query(Debug)
|
|
||||||
q = q.filter(Debug.tag=='bar')
|
|
||||||
o = q.first()
|
|
||||||
assert o.description == 'baz'
|
|
@ -1,81 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.eth.account import unpack_gift
|
|
||||||
from cic_eth.eth.factory import TxFactory
|
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
from cic_eth.db.models.nonce import Nonce
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
script_dir = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
|
|
||||||
def test_faucet(
|
|
||||||
default_chain_spec,
|
|
||||||
faucet_amount,
|
|
||||||
faucet,
|
|
||||||
eth_empty_accounts,
|
|
||||||
bancor_tokens,
|
|
||||||
w3_account_roles,
|
|
||||||
w3_account_token_owners,
|
|
||||||
init_w3,
|
|
||||||
solidity_abis,
|
|
||||||
init_eth_tester,
|
|
||||||
cic_registry,
|
|
||||||
celery_session_worker,
|
|
||||||
init_database,
|
|
||||||
):
|
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[7],
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_gift = celery.signature(
|
|
||||||
'cic_eth.eth.account.gift',
|
|
||||||
[
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
s_send = celery.signature(
|
|
||||||
'cic_eth.eth.tx.send',
|
|
||||||
[
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
s_gift.link(s_send)
|
|
||||||
s_nonce.link(s_gift)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
t.get()
|
|
||||||
for r in t.collect():
|
|
||||||
logg.debug('result {}'.format(r))
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
q = init_database.query(Otx)
|
|
||||||
q = q.join(TxCache)
|
|
||||||
q = q.filter(TxCache.sender==init_w3.eth.accounts[7])
|
|
||||||
o = q.first()
|
|
||||||
signed_tx = o.signed_tx
|
|
||||||
|
|
||||||
tx = unpack_signed_raw_tx(bytes.fromhex(signed_tx[2:]), default_chain_spec.chain_id())
|
|
||||||
giveto = unpack_gift(tx['data'])
|
|
||||||
assert giveto['to'] == init_w3.eth.accounts[7]
|
|
||||||
|
|
||||||
init_eth_tester.mine_block()
|
|
||||||
|
|
||||||
token = init_w3.eth.contract(abi=solidity_abis['ERC20'], address=bancor_tokens[0])
|
|
||||||
|
|
||||||
balance = token.functions.balanceOf(init_w3.eth.accounts[7]).call()
|
|
||||||
|
|
||||||
assert balance == faucet_amount
|
|
@ -1,346 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
import celery
|
|
||||||
from web3.exceptions import ValidationError
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db.enum import StatusEnum
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
from cic_eth.db.models.base import SessionBase
|
|
||||||
from cic_eth.eth.task import sign_and_register_tx
|
|
||||||
from cic_eth.eth.task import sign_tx
|
|
||||||
from cic_eth.eth.token import TokenTxFactory
|
|
||||||
from cic_eth.eth.token import TxFactory
|
|
||||||
from cic_eth.eth.token import cache_transfer_data
|
|
||||||
from cic_eth.eth.rpc import RpcClient
|
|
||||||
from cic_eth.queue.tx import create as queue_create
|
|
||||||
from cic_eth.error import OutOfGasError
|
|
||||||
from cic_eth.db.models.role import AccountRole
|
|
||||||
from cic_eth.error import AlreadyFillingGasError
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def test_refill_gas(
|
|
||||||
default_chain_spec,
|
|
||||||
init_eth_tester,
|
|
||||||
init_rpc,
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
cic_registry,
|
|
||||||
init_eth_account_roles,
|
|
||||||
celery_session_worker,
|
|
||||||
eth_empty_accounts,
|
|
||||||
):
|
|
||||||
|
|
||||||
provider_address = AccountRole.get_address('GAS_GIFTER', init_database)
|
|
||||||
receiver_address = eth_empty_accounts[0]
|
|
||||||
|
|
||||||
c = init_rpc
|
|
||||||
refill_amount = c.refill_amount()
|
|
||||||
|
|
||||||
balance = init_rpc.w3.eth.getBalance(receiver_address)
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
eth_empty_accounts[0],
|
|
||||||
provider_address,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_refill = celery.signature(
|
|
||||||
'cic_eth.eth.tx.refill_gas',
|
|
||||||
[
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
s_nonce.link(s_refill)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for c in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
q = init_database.query(Otx)
|
|
||||||
q = q.join(TxCache)
|
|
||||||
q = q.filter(TxCache.recipient==receiver_address)
|
|
||||||
o = q.first()
|
|
||||||
signed_tx = o.signed_tx
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.send',
|
|
||||||
[
|
|
||||||
[signed_tx],
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
t.collect()
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
init_eth_tester.mine_block()
|
|
||||||
balance_new = init_rpc.w3.eth.getBalance(receiver_address)
|
|
||||||
assert balance_new == (balance + refill_amount)
|
|
||||||
|
|
||||||
# Verify that entry is added in TxCache
|
|
||||||
q = init_database.query(Otx)
|
|
||||||
q = q.join(TxCache)
|
|
||||||
q = q.filter(TxCache.recipient==receiver_address)
|
|
||||||
r = q.first()
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
assert r.status == StatusEnum.SENT
|
|
||||||
|
|
||||||
|
|
||||||
def test_refill_deduplication(
|
|
||||||
default_chain_spec,
|
|
||||||
init_rpc,
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
init_eth_account_roles,
|
|
||||||
cic_registry,
|
|
||||||
celery_session_worker,
|
|
||||||
eth_empty_accounts,
|
|
||||||
):
|
|
||||||
|
|
||||||
provider_address = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS', init_database)
|
|
||||||
receiver_address = eth_empty_accounts[0]
|
|
||||||
|
|
||||||
c = init_rpc
|
|
||||||
refill_amount = c.refill_amount()
|
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
receiver_address,
|
|
||||||
provider_address,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_refill = celery.signature(
|
|
||||||
'cic_eth.eth.tx.refill_gas',
|
|
||||||
[
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
s_nonce.link(s_refill)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for e in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
receiver_address,
|
|
||||||
provider_address,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_refill = celery.signature(
|
|
||||||
'cic_eth.eth.tx.refill_gas',
|
|
||||||
[
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
s_nonce.link(s_refill)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
#with pytest.raises(AlreadyFillingGasError):
|
|
||||||
t.get()
|
|
||||||
for e in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
logg.warning('TODO: complete test by checking that second tx had zero value')
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: check gas is part of the transfer chain, and we cannot create the transfer nonce by uuid before the task. Test is subsumed by transfer task test, but should be tested in isolation
|
|
||||||
#def test_check_gas(
|
|
||||||
# default_chain_spec,
|
|
||||||
# init_eth_tester,
|
|
||||||
# init_w3,
|
|
||||||
# init_rpc,
|
|
||||||
# eth_empty_accounts,
|
|
||||||
# init_database,
|
|
||||||
# cic_registry,
|
|
||||||
# celery_session_worker,
|
|
||||||
# bancor_registry,
|
|
||||||
# bancor_tokens,
|
|
||||||
# ):
|
|
||||||
#
|
|
||||||
# provider_address = init_w3.eth.accounts[0]
|
|
||||||
# gas_receiver_address = eth_empty_accounts[0]
|
|
||||||
# token_receiver_address = init_w3.eth.accounts[1]
|
|
||||||
#
|
|
||||||
## c = init_rpc
|
|
||||||
## txf = TokenTxFactory(gas_receiver_address, c)
|
|
||||||
## tx_transfer = txf.transfer(bancor_tokens[0], token_receiver_address, 42, default_chain_spec, 'foo')
|
|
||||||
##
|
|
||||||
## (tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_transfer, str(default_chain_spec), None)
|
|
||||||
#
|
|
||||||
# token_data = [
|
|
||||||
# {
|
|
||||||
# 'address': bancor_tokens[0],
|
|
||||||
# },
|
|
||||||
# ]
|
|
||||||
#
|
|
||||||
# s_nonce = celery.signature(
|
|
||||||
# 'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
# [
|
|
||||||
# token_data,
|
|
||||||
# init_w3.eth.accounts[0],
|
|
||||||
# ],
|
|
||||||
# queue=None,
|
|
||||||
# )
|
|
||||||
# s_transfer = celery.signature(
|
|
||||||
# 'cic_eth.eth.token.transfer',
|
|
||||||
# [
|
|
||||||
# init_w3.eth.accounts[0],
|
|
||||||
# init_w3.eth.accounts[1],
|
|
||||||
# 1024,
|
|
||||||
# str(default_chain_spec),
|
|
||||||
# ],
|
|
||||||
# queue=None,
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
# gas_price = c.gas_price()
|
|
||||||
# gas_limit = tx_transfer['gas']
|
|
||||||
#
|
|
||||||
# s = celery.signature(
|
|
||||||
# 'cic_eth.eth.tx.check_gas',
|
|
||||||
# [
|
|
||||||
# [tx_hash_hex],
|
|
||||||
# str(default_chain_spec),
|
|
||||||
# [],
|
|
||||||
# gas_receiver_address,
|
|
||||||
# gas_limit * gas_price,
|
|
||||||
# ],
|
|
||||||
# )
|
|
||||||
# s_nonce.link(s_transfer)
|
|
||||||
# t = s_nonce.apply_async()
|
|
||||||
# with pytest.raises(OutOfGasError):
|
|
||||||
# r = t.get()
|
|
||||||
# #assert len(r) == 0
|
|
||||||
#
|
|
||||||
# time.sleep(1)
|
|
||||||
# t.collect()
|
|
||||||
#
|
|
||||||
# session = SessionBase.create_session()
|
|
||||||
# q = session.query(Otx)
|
|
||||||
# q = q.filter(Otx.tx_hash==tx_hash_hex)
|
|
||||||
# r = q.first()
|
|
||||||
# session.close()
|
|
||||||
# assert r.status == StatusEnum.WAITFORGAS
|
|
||||||
|
|
||||||
|
|
||||||
def test_resend_with_higher_gas(
|
|
||||||
default_chain_spec,
|
|
||||||
init_eth_tester,
|
|
||||||
init_w3,
|
|
||||||
init_rpc,
|
|
||||||
init_database,
|
|
||||||
cic_registry,
|
|
||||||
celery_session_worker,
|
|
||||||
bancor_registry,
|
|
||||||
bancor_tokens,
|
|
||||||
):
|
|
||||||
|
|
||||||
c = init_rpc
|
|
||||||
|
|
||||||
token_data = {
|
|
||||||
'address': bancor_tokens[0],
|
|
||||||
}
|
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
[token_data],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_transfer = celery.signature(
|
|
||||||
'cic_eth.eth.token.transfer',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
init_w3.eth.accounts[1],
|
|
||||||
1024,
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# txf = TokenTxFactory(init_w3.eth.accounts[0], c)
|
|
||||||
|
|
||||||
# tx_transfer = txf.transfer(bancor_tokens[0], init_w3.eth.accounts[1], 1024, default_chain_spec, 'foo')
|
|
||||||
# logg.debug('txtransfer {}'.format(tx_transfer))
|
|
||||||
# (tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx_transfer, str(default_chain_spec))
|
|
||||||
# logg.debug('signed raw {}'.format(tx_signed_raw_hex))
|
|
||||||
# queue_create(
|
|
||||||
# tx_transfer['nonce'],
|
|
||||||
# tx_transfer['from'],
|
|
||||||
# tx_hash_hex,
|
|
||||||
# tx_signed_raw_hex,
|
|
||||||
# str(default_chain_spec),
|
|
||||||
# )
|
|
||||||
# logg.debug('create {}'.format(tx_transfer['from']))
|
|
||||||
# cache_transfer_data(
|
|
||||||
# tx_hash_hex,
|
|
||||||
# tx_transfer, #_signed_raw_hex,
|
|
||||||
# )
|
|
||||||
s_nonce.link(s_transfer)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
t.get()
|
|
||||||
for r in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
q = init_database.query(Otx)
|
|
||||||
q = q.join(TxCache)
|
|
||||||
q = q.filter(TxCache.recipient==init_w3.eth.accounts[1])
|
|
||||||
o = q.first()
|
|
||||||
tx_hash_hex = o.tx_hash
|
|
||||||
|
|
||||||
s_resend = celery.signature(
|
|
||||||
'cic_eth.eth.tx.resend_with_higher_gas',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
t = s_resend.apply_async()
|
|
||||||
for r in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
#
|
|
||||||
#def test_resume(
|
|
||||||
# default_chain_spec,
|
|
||||||
# init_eth_tester,
|
|
||||||
# w3,
|
|
||||||
# w3_account_roles,
|
|
||||||
# init_database,
|
|
||||||
# bancor_tokens,
|
|
||||||
# celery_session_worker,
|
|
||||||
# eth_empty_accounts,
|
|
||||||
# ):
|
|
||||||
#
|
|
||||||
# txf = TokenTxFactory()
|
|
||||||
#
|
|
||||||
# tx_transfer = txf.transfer(bancor_tokens[0], eth_empty_accounts[1], 1024)
|
|
||||||
# (tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_transfer)
|
|
||||||
#
|
|
||||||
# resume_tx()
|
|
@ -1,355 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db.models.lock import Lock
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.enum import LockEnum
|
|
||||||
from cic_eth.error import LockedError
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'task_postfix,flag_enum',
|
|
||||||
[
|
|
||||||
('send', LockEnum.SEND),
|
|
||||||
('queue', LockEnum.QUEUE),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_lock_task(
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
default_chain_spec,
|
|
||||||
task_postfix,
|
|
||||||
flag_enum,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
address = '0x' + os.urandom(20).hex()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_{}'.format(task_postfix),
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
assert t.successful()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
q = init_database.query(Lock)
|
|
||||||
q = q.filter(Lock.address==address)
|
|
||||||
lock = q.first()
|
|
||||||
assert lock != None
|
|
||||||
assert lock.flags == flag_enum
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.unlock_{}'.format(task_postfix),
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
assert t.successful()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
q = init_database.query(Lock)
|
|
||||||
q = q.filter(Lock.address==address)
|
|
||||||
lock = q.first()
|
|
||||||
assert lock == None
|
|
||||||
|
|
||||||
|
|
||||||
def test_lock_check_task(
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
default_chain_spec,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
address = '0x' + os.urandom(20).hex()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_send',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_queue',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.check_lock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
LockEnum.SEND,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
|
|
||||||
with pytest.raises(LockedError):
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.check_lock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
LockEnum.CREATE,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
|
|
||||||
def test_lock_arbitrary_task(
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
default_chain_spec,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
address = '0x' + os.urandom(20).hex()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address,
|
|
||||||
LockEnum.SEND | LockEnum.QUEUE,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.check_lock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
LockEnum.SEND | LockEnum.QUEUE,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
with pytest.raises(LockedError):
|
|
||||||
r = t.get()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.unlock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address,
|
|
||||||
LockEnum.SEND,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.check_lock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
LockEnum.SEND,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.unlock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.check_lock',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
LockEnum.QUEUE,
|
|
||||||
address,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
|
|
||||||
def test_lock_list(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
|
|
||||||
# Empty list of no lock set
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.get_lock',
|
|
||||||
[],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
assert len(r) == 0
|
|
||||||
|
|
||||||
# One element if lock set and no link with otx
|
|
||||||
tx_hash = '0x' + os.urandom(32).hex()
|
|
||||||
address_foo = '0x' + os.urandom(20).hex()
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_send',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address_foo,
|
|
||||||
tx_hash,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.get_lock',
|
|
||||||
[],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
assert len(r) == 1
|
|
||||||
assert r[0]['tx_hash'] == None
|
|
||||||
assert r[0]['address'] == address_foo
|
|
||||||
assert r[0]['flags'] == LockEnum.SEND
|
|
||||||
|
|
||||||
# One element if lock set and link with otx, tx_hash now available
|
|
||||||
signed_tx = '0x' + os.urandom(128).hex()
|
|
||||||
otx = Otx.add(
|
|
||||||
0,
|
|
||||||
address_foo,
|
|
||||||
tx_hash,
|
|
||||||
signed_tx,
|
|
||||||
)
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.unlock_send',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address_foo,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_send',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
chain_str,
|
|
||||||
address_foo,
|
|
||||||
tx_hash,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.get_lock',
|
|
||||||
[],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
assert r[0]['tx_hash'] == tx_hash
|
|
||||||
|
|
||||||
|
|
||||||
# Two elements if two locks in place
|
|
||||||
address_bar = '0x' + os.urandom(20).hex()
|
|
||||||
tx_hash = '0x' + os.urandom(32).hex()
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.ctrl.lock_queue',
|
|
||||||
[
|
|
||||||
'bar',
|
|
||||||
chain_str,
|
|
||||||
address_bar,
|
|
||||||
tx_hash,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.get_lock',
|
|
||||||
[],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
assert len(r) == 2
|
|
||||||
|
|
||||||
# One element if filtered by address
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.get_lock',
|
|
||||||
[
|
|
||||||
address_bar,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
assert len(r) == 1
|
|
||||||
assert r[0]['tx_hash'] == None
|
|
||||||
assert r[0]['address'] == address_bar
|
|
||||||
assert r[0]['flags'] == LockEnum.QUEUE
|
|
||||||
|
|
||||||
address_bogus = '0x' + os.urandom(20).hex()
|
|
||||||
# No elements if filtered by non-existent address
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.get_lock',
|
|
||||||
[
|
|
||||||
address_bogus,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
@ -1,136 +0,0 @@
|
|||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.admin.nonce import shift_nonce
|
|
||||||
from cic_eth.queue.tx import create as queue_create
|
|
||||||
from cic_eth.eth.tx import otx_cache_parse_tx
|
|
||||||
from cic_eth.eth.task import sign_tx
|
|
||||||
from cic_eth.db.models.nonce import (
|
|
||||||
NonceReservation,
|
|
||||||
Nonce
|
|
||||||
)
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip()
|
|
||||||
def test_reserve_nonce_task(
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
eth_empty_accounts,
|
|
||||||
):
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
'foo',
|
|
||||||
eth_empty_accounts[0],
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
assert r == 'foo'
|
|
||||||
|
|
||||||
q = init_database.query(Nonce)
|
|
||||||
q = q.filter(Nonce.address_hex==eth_empty_accounts[0])
|
|
||||||
o = q.first()
|
|
||||||
assert o != None
|
|
||||||
|
|
||||||
q = init_database.query(NonceReservation)
|
|
||||||
q = q.filter(NonceReservation.key==str(t))
|
|
||||||
o = q.first()
|
|
||||||
assert o != None
|
|
||||||
|
|
||||||
|
|
||||||
def test_reserve_nonce_chain(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
init_w3,
|
|
||||||
init_rpc,
|
|
||||||
):
|
|
||||||
|
|
||||||
provider_address = init_rpc.gas_provider()
|
|
||||||
q = init_database.query(Nonce)
|
|
||||||
q = q.filter(Nonce.address_hex==provider_address)
|
|
||||||
o = q.first()
|
|
||||||
o.nonce = 42
|
|
||||||
init_database.add(o)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
provider_address,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_gas = celery.signature(
|
|
||||||
'cic_eth.eth.tx.refill_gas',
|
|
||||||
[
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_nonce.link(s_gas)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for c in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
q = init_database.query(Otx)
|
|
||||||
Q = q.join(TxCache)
|
|
||||||
q = q.filter(TxCache.recipient==init_w3.eth.accounts[0])
|
|
||||||
o = q.first()
|
|
||||||
|
|
||||||
assert o.nonce == 42
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip()
|
|
||||||
def test_shift_nonce(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
init_w3,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
|
|
||||||
tx_hashes = []
|
|
||||||
for i in range(5):
|
|
||||||
tx = {
|
|
||||||
'from': init_w3.eth.accounts[0],
|
|
||||||
'to': init_w3.eth.accounts[i],
|
|
||||||
'nonce': i,
|
|
||||||
'gas': 21000,
|
|
||||||
'gasPrice': 1000000,
|
|
||||||
'value': 128,
|
|
||||||
'chainId': default_chain_spec.chain_id(),
|
|
||||||
'data': '',
|
|
||||||
}
|
|
||||||
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, chain_str)
|
|
||||||
queue_create(tx['nonce'], init_w3.eth.accounts[0], tx_hash_hex, tx_signed_raw_hex, chain_str)
|
|
||||||
otx_cache_parse_tx(tx_hash_hex, tx_signed_raw_hex, chain_str)
|
|
||||||
tx_hashes.append(tx_hash_hex)
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.admin.nonce.shift_nonce',
|
|
||||||
[
|
|
||||||
chain_str,
|
|
||||||
tx_hashes[2],
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for _ in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
@ -1,180 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import pytest
|
|
||||||
import celery
|
|
||||||
from cic_registry import zero_address
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.models.tx import TxCache
|
|
||||||
from cic_eth.db.enum import (
|
|
||||||
StatusEnum,
|
|
||||||
StatusBits,
|
|
||||||
)
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
# TODO: Refactor to use test vector decorator
|
|
||||||
def test_status_success(
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
tx_hash = '0x' + os.urandom(32).hex()
|
|
||||||
signed_tx = '0x' + os.urandom(128).hex()
|
|
||||||
account = '0x' + os.urandom(20).hex()
|
|
||||||
|
|
||||||
otx = Otx(0, init_w3.eth.accounts[0], tx_hash, signed_tx)
|
|
||||||
init_database.add(otx)
|
|
||||||
init_database.commit()
|
|
||||||
assert otx.status == StatusEnum.PENDING
|
|
||||||
|
|
||||||
txc = TxCache(tx_hash, account, init_w3.eth.accounts[0], zero_address, zero_address, 13, 13)
|
|
||||||
init_database.add(txc)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_sent_status',
|
|
||||||
[tx_hash],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
init_database.refresh(otx)
|
|
||||||
assert otx.status == StatusEnum.SENT
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_final_status',
|
|
||||||
[tx_hash, 13],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
init_database.refresh(otx)
|
|
||||||
assert otx.status == StatusEnum.SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_tempfail_resend(
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
tx_hash = '0x' + os.urandom(32).hex()
|
|
||||||
signed_tx = '0x' + os.urandom(128).hex()
|
|
||||||
account = '0x' + os.urandom(20).hex()
|
|
||||||
|
|
||||||
otx = Otx(0, init_w3.eth.accounts[0], tx_hash, signed_tx)
|
|
||||||
init_database.add(otx)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
txc = TxCache(tx_hash, account, init_w3.eth.accounts[0], zero_address, zero_address, 13, 13)
|
|
||||||
init_database.add(txc)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_sent_status',
|
|
||||||
[tx_hash, True],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
init_database.refresh(otx)
|
|
||||||
assert otx.status == StatusEnum.SENDFAIL
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_sent_status',
|
|
||||||
[tx_hash],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
init_database.refresh(otx)
|
|
||||||
assert otx.status == StatusEnum.SENT
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_fail(
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
tx_hash = '0x' + os.urandom(32).hex()
|
|
||||||
signed_tx = '0x' + os.urandom(128).hex()
|
|
||||||
account = '0x' + os.urandom(20).hex()
|
|
||||||
|
|
||||||
otx = Otx(0, init_w3.eth.accounts[0], tx_hash, signed_tx)
|
|
||||||
init_database.add(otx)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
txc = TxCache(tx_hash, account, init_w3.eth.accounts[0], zero_address, zero_address, 13, 13)
|
|
||||||
init_database.add(txc)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_sent_status',
|
|
||||||
[tx_hash],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
init_database.refresh(otx)
|
|
||||||
assert otx.status == StatusEnum.SENT
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_final_status',
|
|
||||||
[tx_hash, 13, True],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
init_database.refresh(otx)
|
|
||||||
assert otx.status == StatusEnum.REVERTED
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_fubar(
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
tx_hash = '0x' + os.urandom(32).hex()
|
|
||||||
signed_tx = '0x' + os.urandom(128).hex()
|
|
||||||
account = '0x' + os.urandom(20).hex()
|
|
||||||
|
|
||||||
otx = Otx(0, init_w3.eth.accounts[0], tx_hash, signed_tx)
|
|
||||||
init_database.add(otx)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
txc = TxCache(tx_hash, account, init_w3.eth.accounts[0], zero_address, zero_address, 13, 13)
|
|
||||||
init_database.add(txc)
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_sent_status',
|
|
||||||
[tx_hash],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
assert t.successful()
|
|
||||||
init_database.refresh(otx)
|
|
||||||
assert otx.status == StatusEnum.SENT
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.queue.tx.set_fubar',
|
|
||||||
[tx_hash],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
for n in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
otx = Otx.load(tx_hash)
|
|
||||||
assert otx.status & StatusBits.UNKNOWN_ERROR
|
|
@ -1,133 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db.models.base import SessionBase
|
|
||||||
from cic_eth.db.models.otx import Otx
|
|
||||||
from cic_eth.db.enum import (
|
|
||||||
StatusEnum,
|
|
||||||
StatusBits,
|
|
||||||
is_error_status,
|
|
||||||
)
|
|
||||||
from cic_eth.eth.task import sign_and_register_tx
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def test_states_initial(
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
init_eth_account_roles,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
tx = {
|
|
||||||
'from': init_w3.eth.accounts[0],
|
|
||||||
'to': init_w3.eth.accounts[1],
|
|
||||||
'nonce': 13,
|
|
||||||
'gas': 21000,
|
|
||||||
'gasPrice': 1000000,
|
|
||||||
'value': 128,
|
|
||||||
'chainId': 42,
|
|
||||||
'data': '',
|
|
||||||
}
|
|
||||||
(tx_hash_hex, tx_raw_signed_hex) = sign_and_register_tx(tx, 'foo:bar:42', None)
|
|
||||||
|
|
||||||
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
|
|
||||||
assert otx.status == StatusEnum.PENDING.value
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.check_gas',
|
|
||||||
[
|
|
||||||
[tx_hash_hex],
|
|
||||||
'foo:bar:42',
|
|
||||||
[tx_raw_signed_hex],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
8000000,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for c in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
otx = session.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
|
|
||||||
assert otx.status == StatusEnum.READYSEND.value
|
|
||||||
|
|
||||||
otx.waitforgas(session=session)
|
|
||||||
session.commit()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.check_gas',
|
|
||||||
[
|
|
||||||
[tx_hash_hex],
|
|
||||||
'foo:bar:42',
|
|
||||||
[tx_raw_signed_hex],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
8000000,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for c in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
otx = session.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
|
|
||||||
assert otx.status == StatusEnum.READYSEND.value
|
|
||||||
|
|
||||||
|
|
||||||
def test_states_failed(
|
|
||||||
init_w3,
|
|
||||||
init_database,
|
|
||||||
init_eth_account_roles,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
tx = {
|
|
||||||
'from': init_w3.eth.accounts[0],
|
|
||||||
'to': init_w3.eth.accounts[1],
|
|
||||||
'nonce': 13,
|
|
||||||
'gas': 21000,
|
|
||||||
'gasPrice': 1000000,
|
|
||||||
'value': 128,
|
|
||||||
'chainId': 42,
|
|
||||||
'data': '',
|
|
||||||
}
|
|
||||||
(tx_hash_hex, tx_raw_signed_hex) = sign_and_register_tx(tx, 'foo:bar:42', None)
|
|
||||||
|
|
||||||
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
|
|
||||||
otx.sendfail(session=init_database)
|
|
||||||
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.check_gas',
|
|
||||||
[
|
|
||||||
[tx_hash_hex],
|
|
||||||
'foo:bar:42',
|
|
||||||
[tx_raw_signed_hex],
|
|
||||||
init_w3.eth.accounts[0],
|
|
||||||
8000000,
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for c in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
init_database.commit()
|
|
||||||
|
|
||||||
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
|
|
||||||
assert otx.status & StatusEnum.RETRY == StatusEnum.RETRY
|
|
||||||
#assert otx.status & StatusBits.QUEUED
|
|
||||||
assert is_error_status(otx.status)
|
|
@ -1,51 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.eth.token import TokenTxFactory
|
|
||||||
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
def test_approve(
|
|
||||||
init_rpc,
|
|
||||||
default_chain_spec,
|
|
||||||
celery_session_worker,
|
|
||||||
bancor_tokens,
|
|
||||||
bancor_registry,
|
|
||||||
cic_registry,
|
|
||||||
):
|
|
||||||
|
|
||||||
token_data = [
|
|
||||||
{
|
|
||||||
'address': bancor_tokens[0],
|
|
||||||
},
|
|
||||||
]
|
|
||||||
s_nonce = celery.signature(
|
|
||||||
'cic_eth.eth.tx.reserve_nonce',
|
|
||||||
[
|
|
||||||
token_data,
|
|
||||||
init_rpc.w3.eth.accounts[0],
|
|
||||||
],
|
|
||||||
queue=None,
|
|
||||||
)
|
|
||||||
s_approve = celery.signature(
|
|
||||||
'cic_eth.eth.token.approve',
|
|
||||||
[
|
|
||||||
init_rpc.w3.eth.accounts[0],
|
|
||||||
init_rpc.w3.eth.accounts[1],
|
|
||||||
1024,
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
s_nonce.link(s_approve)
|
|
||||||
t = s_nonce.apply_async()
|
|
||||||
t.get()
|
|
||||||
for r in t.collect():
|
|
||||||
logg.debug('result {}'.format(r))
|
|
||||||
|
|
||||||
assert t.successful()
|
|
@ -1,168 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import celery
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
import cic_eth
|
|
||||||
from cic_eth.db.models.lock import Lock
|
|
||||||
from cic_eth.db.enum import StatusEnum
|
|
||||||
from cic_eth.db.enum import LockEnum
|
|
||||||
from cic_eth.error import LockedError
|
|
||||||
from cic_eth.queue.tx import create as queue_create
|
|
||||||
from cic_eth.queue.tx import set_sent_status
|
|
||||||
from cic_eth.eth.tx import cache_gas_refill_data
|
|
||||||
from cic_eth.error import PermanentTxError
|
|
||||||
from cic_eth.queue.tx import get_tx
|
|
||||||
from cic_eth.eth.task import sign_tx
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: There is no
|
|
||||||
def test_send_reject(
|
|
||||||
default_chain_spec,
|
|
||||||
init_w3,
|
|
||||||
mocker,
|
|
||||||
init_database,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
nonce = init_w3.eth.getTransactionCount(init_w3.eth.accounts[0], 'pending')
|
|
||||||
tx = {
|
|
||||||
'from': init_w3.eth.accounts[0],
|
|
||||||
'to': init_w3.eth.accounts[1],
|
|
||||||
'nonce': nonce,
|
|
||||||
'gas': 21000,
|
|
||||||
'gasPrice': 1000000,
|
|
||||||
'value': 128,
|
|
||||||
'chainId': default_chain_spec.chain_id(),
|
|
||||||
'data': '',
|
|
||||||
}
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, chain_str)
|
|
||||||
queue_create(tx['nonce'], tx['from'], tx_hash_hex, tx_signed_raw_hex, str(default_chain_spec))
|
|
||||||
cache_gas_refill_data(tx_hash_hex, tx)
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.send',
|
|
||||||
[
|
|
||||||
[tx_signed_raw_hex],
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
|
|
||||||
|
|
||||||
def test_sync_tx(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
init_w3,
|
|
||||||
init_wallet_extension,
|
|
||||||
init_eth_tester,
|
|
||||||
celery_session_worker,
|
|
||||||
eth_empty_accounts,
|
|
||||||
):
|
|
||||||
|
|
||||||
nonce = init_w3.eth.getTransactionCount(init_w3.eth.accounts[0], 'pending')
|
|
||||||
tx = {
|
|
||||||
'from': init_w3.eth.accounts[0],
|
|
||||||
'to': init_w3.eth.accounts[1],
|
|
||||||
'nonce': nonce,
|
|
||||||
'gas': 21000,
|
|
||||||
'gasPrice': 1000000,
|
|
||||||
'value': 128,
|
|
||||||
'chainId': default_chain_spec.chain_id(),
|
|
||||||
'data': '',
|
|
||||||
}
|
|
||||||
|
|
||||||
chain_str = str(default_chain_spec)
|
|
||||||
|
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, chain_str)
|
|
||||||
queue_create(tx['nonce'], tx['from'], tx_hash_hex, tx_signed_raw_hex, str(default_chain_spec))
|
|
||||||
cache_gas_refill_data(tx_hash_hex, tx)
|
|
||||||
|
|
||||||
init_w3.eth.send_raw_transaction(tx_signed_raw_hex)
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.sync_tx',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
queue=None
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for _ in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
tx_dict = get_tx(tx_hash_hex)
|
|
||||||
assert tx_dict['status'] == StatusEnum.SENT
|
|
||||||
|
|
||||||
init_eth_tester.mine_block()
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.sync_tx',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
chain_str,
|
|
||||||
],
|
|
||||||
queue=None
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
r = t.get()
|
|
||||||
for _ in t.collect():
|
|
||||||
pass
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
tx_dict = get_tx(tx_hash_hex)
|
|
||||||
assert tx_dict['status'] == StatusEnum.SUCCESS
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_resume_tx(
|
|
||||||
default_chain_spec,
|
|
||||||
init_database,
|
|
||||||
init_w3,
|
|
||||||
celery_session_worker,
|
|
||||||
):
|
|
||||||
|
|
||||||
tx = {
|
|
||||||
'from': init_w3.eth.accounts[0],
|
|
||||||
'to': init_w3.eth.accounts[1],
|
|
||||||
'nonce': 42 ,
|
|
||||||
'gas': 21000,
|
|
||||||
'gasPrice': 1000000,
|
|
||||||
'value': 128,
|
|
||||||
'chainId': default_chain_spec.chain_id(),
|
|
||||||
'data': '',
|
|
||||||
}
|
|
||||||
tx_signed = init_w3.eth.sign_transaction(tx)
|
|
||||||
tx_hash = init_w3.keccak(hexstr=tx_signed['raw'])
|
|
||||||
tx_hash_hex = tx_hash.hex()
|
|
||||||
queue_create(tx['nonce'], tx['from'], tx_hash_hex, tx_signed['raw'], str(default_chain_spec))
|
|
||||||
cache_gas_refill_data(tx_hash_hex, tx)
|
|
||||||
|
|
||||||
set_sent_status(tx_hash_hex, True)
|
|
||||||
|
|
||||||
s = celery.signature(
|
|
||||||
'cic_eth.eth.tx.resume_tx',
|
|
||||||
[
|
|
||||||
tx_hash_hex,
|
|
||||||
str(default_chain_spec),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
t = s.apply_async()
|
|
||||||
t.get()
|
|
||||||
for r in t.collect():
|
|
||||||
logg.debug('collect {}'.format(r))
|
|
||||||
assert t.successful()
|
|
||||||
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
|||||||
|
|
||||||
def test_default(
|
|
||||||
init_database,
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_w3(
|
|
||||||
init_w3,
|
|
||||||
):
|
|
||||||
a = init_w3.eth.accounts[0]
|
|
51
apps/cic-eth/tests/test_chainlib.py
Normal file
51
apps/cic-eth/tests/test_chainlib.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.eth.gas import (
|
||||||
|
balance,
|
||||||
|
price,
|
||||||
|
)
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
count_pending,
|
||||||
|
count_confirmed,
|
||||||
|
)
|
||||||
|
from chainlib.eth.sign import (
|
||||||
|
sign_message,
|
||||||
|
)
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_eth_tester(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_accounts,
|
||||||
|
init_eth_tester,
|
||||||
|
eth_rpc,
|
||||||
|
):
|
||||||
|
|
||||||
|
conn = RPCConnection.connect(default_chain_spec, 'default')
|
||||||
|
o = balance(eth_accounts[0])
|
||||||
|
conn.do(o)
|
||||||
|
|
||||||
|
o = price()
|
||||||
|
conn.do(o)
|
||||||
|
|
||||||
|
o = count_pending(eth_accounts[0])
|
||||||
|
conn.do(o)
|
||||||
|
|
||||||
|
o = count_confirmed(eth_accounts[0])
|
||||||
|
conn.do(o)
|
||||||
|
|
||||||
|
|
||||||
|
def test_signer(
|
||||||
|
default_chain_spec,
|
||||||
|
init_eth_tester,
|
||||||
|
eth_rpc,
|
||||||
|
eth_accounts,
|
||||||
|
):
|
||||||
|
|
||||||
|
o = sign_message(eth_accounts[0], '0x2a')
|
||||||
|
conn = RPCConnection.connect(default_chain_spec, 'signer')
|
||||||
|
r = conn.do(o)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user