Merge branch 'master' into lash/import-scripts-refactor

This commit is contained in:
nolash 2021-02-17 11:30:58 +01:00
commit 9dea78571d
70 changed files with 2863 additions and 955 deletions

@ -1 +1 @@
Subproject commit 06c5f0fb0dca5992a7ffb12874b5cb0c9fdcd706
Subproject commit d2cb3a45558d7ca3a412c97c6aea794d9ac6c6f5

View File

@ -16,7 +16,10 @@ from cic_eth.db.models.role import AccountRole
from cic_eth.db.models.otx import Otx
from cic_eth.db.models.tx import TxCache
from cic_eth.db.models.nonce import Nonce
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import (
StatusEnum,
is_alive,
)
from cic_eth.error import InitializationError
from cic_eth.db.error import TxStateChangeError
from cic_eth.eth.rpc import RpcClient
@ -98,6 +101,19 @@ class AdminApi:
session.close()
def have_account(self, address_hex, chain_str):
s_have = celery.signature(
'cic_eth.eth.account.have',
[
address_hex,
chain_str,
],
queue=self.queue,
)
t = s_have.apply_async()
return t.get()
def resend(self, tx_hash_hex, chain_str, in_place=True, unlock=False):
logg.debug('resend {}'.format(tx_hash_hex))
s_get_tx_cache = celery.signature(
@ -110,24 +126,32 @@ class AdminApi:
# TODO: This check should most likely be in resend task itself
tx_dict = s_get_tx_cache.apply_async().get()
if tx_dict['status'] in [StatusEnum.REVERTED, StatusEnum.SUCCESS, StatusEnum.CANCELLED, StatusEnum.OBSOLETED]:
#if tx_dict['status'] in [StatusEnum.REVERTED, StatusEnum.SUCCESS, StatusEnum.CANCELLED, StatusEnum.OBSOLETED]:
if not is_alive(getattr(StatusEnum, tx_dict['status']).value):
raise TxStateChangeError('Cannot resend mined or obsoleted transaction'.format(txold_hash_hex))
s = None
if in_place:
s = celery.signature(
'cic_eth.eth.tx.resend_with_higher_gas',
[
tx_hash_hex,
chain_str,
None,
1.01,
],
queue=self.queue,
)
else:
if not in_place:
raise NotImplementedError('resend as new not yet implemented')
s = celery.signature(
'cic_eth.eth.tx.resend_with_higher_gas',
[
chain_str,
None,
1.01,
],
queue=self.queue,
)
s_manual = celery.signature(
'cic_eth.queue.tx.set_manual',
[
tx_hash_hex,
],
queue=self.queue,
)
s_manual.link(s)
if unlock:
s_gas = celery.signature(
'cic_eth.admin.ctrl.unlock_send',
@ -139,7 +163,7 @@ class AdminApi:
)
s.link(s_gas)
return s.apply_async()
return s_manual.apply_async()
def check_nonce(self, address):
s = celery.signature(
@ -243,7 +267,9 @@ class AdminApi:
"""
s = celery.signature(
'cic_eth.queue.tx.get_account_tx',
[address],
[
address,
],
queue=self.queue,
)
txs = s.apply_async().get()

View File

@ -30,7 +30,7 @@ class Api:
:param queue: Name of worker queue to submit tasks to
:type queue: str
"""
def __init__(self, chain_str, queue='cic-eth', callback_param=None, callback_task='cic_eth.callbacks.noop', callback_queue=None):
def __init__(self, chain_str, queue='cic-eth', callback_param=None, callback_task='cic_eth.callbacks.noop.noop', callback_queue=None):
self.chain_str = chain_str
self.chain_spec = ChainSpec.from_chain_str(chain_str)
self.callback_param = callback_param
@ -301,13 +301,15 @@ class Api:
return t
def balance(self, address, token_symbol):
def balance(self, address, token_symbol, include_pending=True):
"""Calls the provided callback with the current token balance of the given address.
:param address: Ethereum address of holder
:type address: str, 0x-hex
:param token_symbol: ERC20 token symbol of token to send
:type token_symbol: str
:param include_pending: If set, will include transactions that have not yet been fully processed
:type include_pending: bool
:returns: uuid of root task
:rtype: celery.Task
"""
@ -330,14 +332,45 @@ class Api:
],
queue=self.queue,
)
if self.callback_param != None:
s_balance.link(self.callback_success)
s_tokens.link(s_balance).on_error(self.callback_error)
else:
s_tokens.link(s_balance)
s_result = celery.signature(
'cic_eth.queue.balance.assemble_balances',
[],
queue=self.queue,
)
t = s_tokens.apply_async(queue=self.queue)
last_in_chain = s_balance
if include_pending:
s_balance_incoming = celery.signature(
'cic_eth.queue.balance.balance_incoming',
[
address,
self.chain_str,
],
queue=self.queue,
)
s_balance_outgoing = celery.signature(
'cic_eth.queue.balance.balance_outgoing',
[
address,
self.chain_str,
],
queue=self.queue,
)
s_balance.link(s_balance_incoming)
s_balance_incoming.link(s_balance_outgoing)
last_in_chain = s_balance_outgoing
one = celery.chain(s_tokens, s_balance)
two = celery.chain(s_tokens, s_balance_incoming)
three = celery.chain(s_tokens, s_balance_outgoing)
t = None
if self.callback_param != None:
s_result.link(self.callback_success).on_error(self.callback_error)
t = celery.chord([one, two, three])(s_result)
else:
t = celery.chord([one, two, three])(s_result)
return t
@ -417,6 +450,73 @@ class Api:
return t
def list(self, address, limit=10, external_task=None, external_queue=None):
"""Retrieve an aggregate list of latest transactions of internal and (optionally) external origin in reverse chronological order.
The array of transactions returned have the same dict layout as those passed by the callback filter in cic_eth/runnable/manager
If the external task is defined, this task will be used to query external transactions. If this is not defined, no external transactions will be included. The task must accept (offset, limit, address) as input parameters, and return a bloom filter that will be used to retrieve transaction data for the matching transactions. See cic_eth.ext.tx.list_tx_by_bloom for details on the bloom filter dat format.
:param address: Ethereum address to list transactions for
:type address: str, 0x-hex
:param limit: Amount of results to return
:type limit: number
:param external_task: Celery task providing external transactions
:type external_task: str
:param external_queue: Celery task queue providing exernal transactions task
:type external_queue: str
:returns: List of transactions
:rtype: list of dict
"""
offset = 0
s_local = celery.signature(
'cic_eth.queue.tx.get_account_tx',
[
address,
],
queue=self.queue,
)
s_brief = celery.signature(
'cic_eth.ext.tx.tx_collate',
[
self.chain_str,
offset,
limit
],
queue=self.queue,
)
if self.callback_param != None:
s_assemble.link(self.callback_success).on_error(self.callback_error)
t = None
if external_task != None:
s_external_get = celery.signature(
external_task,
[
address,
offset,
limit,
],
queue=external_queue,
)
s_external_process = celery.signature(
'cic_eth.ext.tx.list_tx_by_bloom',
[
address,
self.chain_str,
],
queue=self.queue,
)
c = celery.chain(s_external_get, s_external_process)
t = celery.chord([s_local, c])(s_brief)
else:
t = s_local.apply_sync()
return t
def ping(self, r):
"""A noop callback ping for testing purposes.

View File

@ -18,4 +18,4 @@ def noop(self, result, param, status_code):
:rtype: bool
"""
logg.info('noop callback {} {} {}'.format(result, param, status_code))
return True
return result

View File

@ -1,6 +1,29 @@
# standard imports
import enum
@enum.unique
class StatusBits(enum.IntEnum):
"""Individual bit flags that are combined to define the state and legacy of a queued transaction
"""
QUEUED = 0x01 # transaction should be sent to network
IN_NETWORK = 0x08 # transaction is in network
DEFERRED = 0x10 # an attempt to send the transaction to network has failed
GAS_ISSUES = 0x20 # transaction is pending sender account gas funding
LOCAL_ERROR = 0x100 # errors that originate internally from the component
NODE_ERROR = 0x200 # errors originating in the node (invalid RLP input...)
NETWORK_ERROR = 0x400 # errors that originate from the network (REVERT)
UNKNOWN_ERROR = 0x800 # unclassified errors (the should not occur)
FINAL = 0x1000 # transaction processing has completed
OBSOLETE = 0x2000 # transaction has been replaced by a different transaction with higher fee
MANUAL = 0x8000 # transaction processing has been manually overridden
@enum.unique
class StatusEnum(enum.IntEnum):
"""
@ -22,21 +45,27 @@ class StatusEnum(enum.IntEnum):
* SUCCESS: THe transaction was successfully mined. (Block number will be set)
"""
PENDING=-9
SENDFAIL=-8
RETRY=-7
READYSEND=-6
OBSOLETED=-2
WAITFORGAS=-1
SENT=0
FUBAR=1
CANCELLED=2
OVERRIDDEN=3
REJECTED=7
REVERTED=8
SUCCESS=9
PENDING = 0
SENDFAIL = StatusBits.DEFERRED | StatusBits.LOCAL_ERROR
RETRY = StatusBits.QUEUED | StatusBits.DEFERRED
READYSEND = StatusBits.QUEUED
OBSOLETED = StatusBits.OBSOLETE | StatusBits.IN_NETWORK
WAITFORGAS = StatusBits.GAS_ISSUES
SENT = StatusBits.IN_NETWORK
FUBAR = StatusBits.FINAL | StatusBits.UNKNOWN_ERROR
CANCELLED = StatusBits.IN_NETWORK | StatusBits.FINAL | StatusBits.OBSOLETE
OVERRIDDEN = StatusBits.FINAL | StatusBits.OBSOLETE | StatusBits.MANUAL
REJECTED = StatusBits.NODE_ERROR | StatusBits.FINAL
REVERTED = StatusBits.IN_NETWORK | StatusBits.FINAL | StatusBits.NETWORK_ERROR
SUCCESS = StatusBits.IN_NETWORK | StatusBits.FINAL
@enum.unique
class LockEnum(enum.IntEnum):
"""
STICKY: When set, reset is not possible
@ -48,4 +77,78 @@ class LockEnum(enum.IntEnum):
CREATE=2
SEND=4
QUEUE=8
QUERY=16
ALL=int(0xfffffffffffffffe)
def status_str(v, bits_only=False):
"""Render a human-readable string describing the status
If the bit field exactly matches a StatusEnum value, the StatusEnum label will be returned.
If a StatusEnum cannot be matched, the string will be postfixed with "*", unless explicitly instructed to return bit field labels only.
:param v: Status bit field
:type v: number
:param bits_only: Only render individual bit labels.
:type bits_only: bool
:returns: Status string
:rtype: str
"""
s = ''
if not bits_only:
try:
s = StatusEnum(v).name
return s
except ValueError:
pass
for i in range(16):
b = (1 << i)
if (b & 0xffff) & v:
n = StatusBits(b).name
if len(s) > 0:
s += ','
s += n
if not bits_only:
s += '*'
return s
def all_errors():
"""Bit mask of all error states
:returns: Error flags
:rtype: number
"""
return StatusBits.LOCAL_ERROR | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
def is_error_status(v):
"""Check if value is an error state
:param v: Status bit field
:type v: number
:returns: True if error
:rtype: bool
"""
return bool(v & all_errors())
def dead():
"""Bit mask defining whether a transaction is still likely to be processed on the network.
:returns: Bit mask
:rtype: number
"""
return StatusBits.FINAL | StatusBits.OBSOLETE
def is_alive(v):
"""Check if transaction is still likely to be processed on the network.
The contingency of "likely" refers to the case a transaction has been obsoleted after sent to the network, but the network still confirms the obsoleted transaction. The return value of this method will not change as a result of this, BUT the state itself will (as the FINAL bit will be set).
:returns:
"""
return bool(v & dead() == 0)

View File

@ -27,8 +27,8 @@ def upgrade():
sa.Column('destination_token_address', sa.String(42), nullable=False),
sa.Column('sender', sa.String(42), nullable=False),
sa.Column('recipient', sa.String(42), nullable=False),
sa.Column('from_value', sa.String(), nullable=False),
sa.Column('to_value', sa.String(), nullable=True),
sa.Column('from_value', sa.NUMERIC(), nullable=False),
sa.Column('to_value', sa.NUMERIC(), nullable=True),
sa.Column('block_number', sa.BIGINT(), nullable=True),
sa.Column('tx_index', sa.Integer, nullable=True),
)

View File

@ -19,7 +19,6 @@ def upgrade():
op.create_table(
'tx_cache',
sa.Column('id', sa.Integer, primary_key=True),
# sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=True),
sa.Column('otx_id', sa.Integer, sa.ForeignKey('otx.id'), nullable=True),
sa.Column('date_created', sa.DateTime, nullable=False),
sa.Column('date_updated', sa.DateTime, nullable=False),
@ -27,8 +26,8 @@ def upgrade():
sa.Column('destination_token_address', sa.String(42), nullable=False),
sa.Column('sender', sa.String(42), nullable=False),
sa.Column('recipient', sa.String(42), nullable=False),
sa.Column('from_value', sa.BIGINT(), nullable=False),
sa.Column('to_value', sa.BIGINT(), nullable=True),
sa.Column('from_value', sa.NUMERIC(), nullable=False),
sa.Column('to_value', sa.NUMERIC(), nullable=True),
sa.Column('block_number', sa.BIGINT(), nullable=True),
sa.Column('tx_index', sa.Integer, nullable=True),
)

View File

@ -1,9 +1,14 @@
# stanard imports
import logging
# third-party imports
from sqlalchemy import Column, Integer
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
logg = logging.getLogger()
Model = declarative_base(name='Model')
@ -21,7 +26,11 @@ class SessionBase(Model):
transactional = True
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
poolable = True
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
"""Whether the database backend supports connection pools. Should be explicitly set by initialization code"""
procedural = True
"""Whether the database backend supports stored procedures"""
localsessions = {}
"""Contains dictionary of sessions initiated by db model components"""
@staticmethod
@ -71,3 +80,23 @@ class SessionBase(Model):
"""
SessionBase.engine.dispose()
SessionBase.engine = None
@staticmethod
def bind_session(session=None):
localsession = session
if localsession == None:
localsession = SessionBase.create_session()
localsession_key = str(id(localsession))
logg.debug('creating new session {}'.format(localsession_key))
SessionBase.localsessions[localsession_key] = localsession
return localsession
@staticmethod
def release_session(session=None):
session_key = str(id(session))
if SessionBase.localsessions.get(session_key) != None:
logg.debug('destroying session {}'.format(session_key))
session.commit()
session.close()

View File

@ -8,7 +8,12 @@ from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
# local imports
from .base import SessionBase
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import (
StatusEnum,
StatusBits,
status_str,
is_error_status,
)
from cic_eth.db.error import TxStateChangeError
#from cic_eth.eth.util import address_hex_from_signed_tx
@ -54,21 +59,24 @@ class Otx(SessionBase):
block = Column(Integer)
def __set_status(self, status, session=None):
localsession = session
if localsession == None:
localsession = SessionBase.create_session()
def __set_status(self, status, session):
self.status |= status
session.add(self)
session.flush()
self.status = status
localsession.add(self)
localsession.flush()
if self.tracing:
self.__state_log(session=localsession)
def __reset_status(self, status, session):
status_edit = ~status & self.status
self.status &= status_edit
session.add(self)
session.flush()
if session==None:
localsession.commit()
localsession.close()
def __status_already_set(self, status):
r = bool(self.status & status)
if r:
logg.warning('status bit {} already set on {}'.format(status.name, self.tx_hash))
return r
def set_block(self, block, session=None):
@ -102,9 +110,23 @@ class Otx(SessionBase):
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
if self.status >= StatusEnum.SENT.value:
raise TxStateChangeError('WAITFORGAS cannot succeed final state, had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.WAITFORGAS, session)
if self.__status_already_set(StatusBits.GAS_ISSUES):
return
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if self.status & StatusBits.IN_NETWORK:
raise TxStateChangeError('GAS_ISSUES cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.GAS_ISSUES, session)
self.__reset_status(StatusBits.QUEUED | StatusBits.DEFERRED, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def fubar(self, session=None):
@ -112,28 +134,89 @@ class Otx(SessionBase):
Only manipulates object, does not transaction or commit to backend.
"""
self.__set_status(StatusEnum.FUBAR, session)
if self.__status_already_set(StatusBits.UNKNOWN_ERROR):
return
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('FUBAR cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if is_error_status(self.status):
raise TxStateChangeError('FUBAR cannot be set on an entry with an error state already set ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.UNKNOWN_ERROR | StatusBits.FINAL, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def reject(self, session=None):
"""Marks transaction as "rejected," which means the node rejected sending the transaction to the network. The nonce has not been spent, and the transaction should be replaced.
Only manipulates object, does not transaction or commit to backend.
"""
if self.status >= StatusEnum.SENT.value:
raise TxStateChangeError('REJECTED cannot succeed SENT or final state, had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.REJECTED, session)
if self.__status_already_set(StatusBits.NODE_ERROR):
return
def override(self, session=None):
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('REJECTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if self.status & StatusBits.IN_NETWORK:
raise TxStateChangeError('REJECTED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status)))
if is_error_status(self.status):
raise TxStateChangeError('REJECTED cannot be set on an entry with an error state already set ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.NODE_ERROR | StatusBits.FINAL, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def override(self, manual=False, session=None):
"""Marks transaction as manually overridden.
Only manipulates object, does not transaction or commit to backend.
"""
if self.status >= StatusEnum.SENT.value:
raise TxStateChangeError('OVERRIDDEN cannot succeed SENT or final state, had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.OVERRIDDEN, session)
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if self.status & StatusBits.IN_NETWORK:
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already IN_NETWORK ({})'.format(status_str(self.status)))
if self.status & StatusBits.OBSOLETE:
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry already OBSOLETE ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.OBSOLETE, session)
#if manual:
# self.__set_status(StatusBits.MANUAL, session)
self.__reset_status(StatusBits.QUEUED | StatusBits.IN_NETWORK, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def manual(self, session=None):
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('OVERRIDDEN/OBSOLETED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.MANUAL, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def retry(self, session=None):
"""Marks transaction as ready to retry after a timeout following a sendfail or a completed gas funding.
@ -142,9 +225,23 @@ class Otx(SessionBase):
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
if self.status != StatusEnum.SENT.value and self.status != StatusEnum.SENDFAIL.value:
raise TxStateChangeError('RETRY must follow SENT or SENDFAIL, but had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.RETRY, session)
if self.__status_already_set(StatusBits.QUEUED):
return
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('RETRY cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if not is_error_status(self.status) and not StatusBits.IN_NETWORK & self.status > 0:
raise TxStateChangeError('RETRY cannot be set on an entry that has no error ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.QUEUED, session)
self.__reset_status(StatusBits.GAS_ISSUES, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def readysend(self, session=None):
@ -154,9 +251,23 @@ class Otx(SessionBase):
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
if self.status != StatusEnum.PENDING.value and self.status != StatusEnum.WAITFORGAS.value:
raise TxStateChangeError('READYSEND must follow PENDING or WAITFORGAS, but had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.READYSEND, session)
if self.__status_already_set(StatusBits.QUEUED):
return
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('READYSEND cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if is_error_status(self.status):
raise TxStateChangeError('READYSEND cannot be set on an errored state ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.QUEUED, session)
self.__reset_status(StatusBits.GAS_ISSUES, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def sent(self, session=None):
@ -166,9 +277,21 @@ class Otx(SessionBase):
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
if self.status > StatusEnum.SENT:
raise TxStateChangeError('SENT after {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.SENT, session)
if self.__status_already_set(StatusBits.IN_NETWORK):
return
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('SENT cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.IN_NETWORK, session)
self.__reset_status(StatusBits.DEFERRED | StatusBits.QUEUED | StatusBits.LOCAL_ERROR | StatusBits.NODE_ERROR, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def sendfail(self, session=None):
@ -178,9 +301,23 @@ class Otx(SessionBase):
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
if self.status not in [StatusEnum.PENDING, StatusEnum.SENT, StatusEnum.WAITFORGAS]:
raise TxStateChangeError('SENDFAIL must follow SENT or PENDING, but had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.SENDFAIL, session)
if self.__status_already_set(StatusBits.NODE_ERROR):
return
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('SENDFAIL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if self.status & StatusBits.IN_NETWORK:
raise TxStateChangeError('SENDFAIL cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
self.__set_status(StatusBits.LOCAL_ERROR | StatusBits.DEFERRED, session)
self.__reset_status(StatusBits.QUEUED | StatusBits.GAS_ISSUES, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def minefail(self, block, session=None):
@ -192,14 +329,25 @@ class Otx(SessionBase):
:type block: number
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
if self.__status_already_set(StatusBits.NETWORK_ERROR):
return
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('REVERTED cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if not self.status & StatusBits.IN_NETWORK:
raise TxStateChangeError('REVERTED cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status)))
if block != None:
self.block = block
if self.status != StatusEnum.SENT:
logg.warning('REVERTED should follow SENT, but had {}'.format(StatusEnum(self.status).name))
#if self.status != StatusEnum.PENDING and self.status != StatusEnum.OBSOLETED and self.status != StatusEnum.SENT:
#if self.status > StatusEnum.SENT:
# raise TxStateChangeError('REVERTED must follow OBSOLETED, PENDING or SENT, but had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.REVERTED, session)
self.__set_status(StatusBits.NETWORK_ERROR | StatusBits.FINAL, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def cancel(self, confirmed=False, session=None):
@ -213,18 +361,36 @@ class Otx(SessionBase):
:type confirmed: bool
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('CANCEL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if confirmed:
if self.status != StatusEnum.OBSOLETED:
logg.warning('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
#raise TxStateChangeError('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
if not self.status & StatusBits.OBSOLETE:
raise TxStateChangeError('CANCEL can only be set on an entry marked OBSOLETE ({})'.format(status_str(self.status)))
self.__set_status(StatusEnum.CANCELLED, session)
elif self.status != StatusEnum.OBSOLETED:
if self.status > StatusEnum.SENT:
logg.warning('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
#raise TxStateChangeError('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
else:
self.__set_status(StatusEnum.OBSOLETED, session)
# if confirmed:
# if self.status != StatusEnum.OBSOLETED:
# logg.warning('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
# #raise TxStateChangeError('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
# self.__set_status(StatusEnum.CANCELLED, session)
# elif self.status != StatusEnum.OBSOLETED:
# if self.status > StatusEnum.SENT:
# logg.warning('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
# #raise TxStateChangeError('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
# self.__set_status(StatusEnum.OBSOLETED, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
def success(self, block, session=None):
"""Marks that transaction was successfully mined.
@ -235,16 +401,27 @@ class Otx(SessionBase):
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
"""
session = SessionBase.bind_session(session)
if self.status & StatusBits.FINAL:
raise TxStateChangeError('SUCCESS cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
if not self.status & StatusBits.IN_NETWORK:
raise TxStateChangeError('SUCCESS cannot be set on an entry without IN_NETWORK state set ({})'.format(status_str(self.status)))
if is_error_status(self.status):
raise TxStateChangeError('SUCCESS cannot be set on an entry with error state set ({})'.format(status_str(self.status)))
if block != None:
self.block = block
if self.status != StatusEnum.SENT:
logg.error('SUCCESS should follow SENT, but had {}'.format(StatusEnum(self.status).name))
#raise TxStateChangeError('SUCCESS must follow SENT, but had {}'.format(StatusEnum(self.status).name))
self.__set_status(StatusEnum.SUCCESS, session)
if self.tracing:
self.__state_log(session=session)
SessionBase.release_session(session)
@staticmethod
def get(status=0, limit=4096, status_exact=True):
def get(status=0, limit=4096, status_exact=True, session=None):
"""Returns outgoing transaction lists by status.
Status may either be matched exactly, or be an upper bound of the integer value of the status enum.
@ -259,26 +436,32 @@ class Otx(SessionBase):
:rtype: tuple, where first element is transaction hash
"""
e = None
session = Otx.create_session()
session = SessionBase.bind_session(session)
if status_exact:
e = session.query(Otx.tx_hash).filter(Otx.status==status).order_by(Otx.date_created.asc()).limit(limit).all()
else:
e = session.query(Otx.tx_hash).filter(Otx.status<=status).order_by(Otx.date_created.asc()).limit(limit).all()
session.close()
SessionBase.release_session(session)
return e
@staticmethod
def load(tx_hash):
def load(tx_hash, session=None):
"""Retrieves the outgoing transaction record by transaction hash.
:param tx_hash: Transaction hash
:type tx_hash: str, 0x-hex
"""
session = Otx.create_session()
session = SessionBase.bind_session(session)
q = session.query(Otx)
q = q.filter(Otx.tx_hash==tx_hash)
session.close()
SessionBase.release_session(session)
return q.first()
@ -450,6 +633,3 @@ class OtxSync(SessionBase):
self.tx_height_session = 0
self.block_height_backlog = 0
self.tx_height_backlog = 0

View File

@ -2,7 +2,7 @@
import datetime
# third-party imports
from sqlalchemy import Column, String, Integer, DateTime, Enum, ForeignKey, Boolean
from sqlalchemy import Column, String, Integer, DateTime, Enum, ForeignKey, Boolean, NUMERIC
from sqlalchemy.ext.hybrid import hybrid_method, hybrid_property
#from sqlalchemy.orm import relationship, backref
#from sqlalchemy.ext.declarative import declarative_base
@ -55,8 +55,8 @@ class TxCache(SessionBase):
destination_token_address = Column(String(42))
sender = Column(String(42))
recipient = Column(String(42))
from_value = Column(String())
to_value = Column(String())
from_value = Column(NUMERIC())
to_value = Column(NUMERIC())
block_number = Column(Integer())
tx_index = Column(Integer())
date_created = Column(DateTime, default=datetime.datetime.utcnow)
@ -64,16 +64,6 @@ class TxCache(SessionBase):
date_checked = Column(DateTime, default=datetime.datetime.utcnow)
def values(self):
from_value_hex = bytes.fromhex(self.from_value)
from_value = int.from_bytes(from_value_hex, 'big')
to_value_hex = bytes.fromhex(self.to_value)
to_value = int.from_bytes(to_value_hex, 'big')
return (from_value, to_value)
def check(self):
"""Update the "checked" timestamp to current time.
@ -116,15 +106,14 @@ class TxCache(SessionBase):
if otx == None:
raise NotLocalTxError('new {}'.format(tx_hash_new))
values = txc.values()
txc_new = TxCache(
otx.tx_hash,
txc.sender,
txc.recipient,
txc.source_token_address,
txc.destination_token_address,
values[0],
values[1],
int(txc.from_value),
int(txc.to_value),
)
localsession.add(txc_new)
localsession.commit()
@ -141,17 +130,12 @@ class TxCache(SessionBase):
raise FileNotFoundError('outgoing transaction record unknown {} (add a Tx first)'.format(tx_hash))
self.otx_id = tx.id
# if tx == None:
# session.close()
# raise ValueError('tx hash {} (outgoing: {}) not found'.format(tx_hash, outgoing))
# session.close()
self.sender = sender
self.recipient = recipient
self.source_token_address = source_token_address
self.destination_token_address = destination_token_address
self.from_value = num_serialize(from_value).hex()
self.to_value = num_serialize(to_value).hex()
self.from_value = from_value
self.to_value = to_value
self.block_number = block_number
self.tx_index = tx_index
# not automatically set in sqlite, it seems:

View File

@ -6,6 +6,7 @@ import celery
import requests
import web3
from cic_registry import CICRegistry
from cic_registry import zero_address
from cic_registry.chain import ChainSpec
# platform imports
@ -17,6 +18,7 @@ from cic_eth.eth.task import sign_and_register_tx
from cic_eth.eth.task import create_check_gas_and_send_task
from cic_eth.eth.factory import TxFactory
from cic_eth.eth.util import unpack_signed_raw_tx
from cic_eth.ext.address import translate_address
celery_app = celery.current_app
logg = logging.getLogger()
@ -185,7 +187,6 @@ def balance(tokens, holder_address, chain_str):
"""
#abi = ContractRegistry.abi('ERC20Token')
chain_spec = ChainSpec.from_chain_str(chain_str)
balances = []
c = RpcClient(chain_spec)
for t in tokens:
#token = CICRegistry.get_address(t['address'])
@ -193,9 +194,9 @@ def balance(tokens, holder_address, chain_str):
#o = c.w3.eth.contract(abi=abi, address=t['address'])
o = CICRegistry.get_address(chain_spec, t['address']).contract
b = o.functions.balanceOf(holder_address).call()
logg.debug('balance {} for {}: {}'.format(t['address'], holder_address, b))
balances.append(b)
return b
t['balance_network'] = b
return tokens
@celery_app.task(bind=True)
@ -324,7 +325,7 @@ def resolve_tokens_by_symbol(token_symbols, chain_str):
token = CICRegistry.get_token(chain_spec, token_symbol)
tokens.append({
'address': token.address(),
#'converters': [],
'converters': [],
})
return tokens
@ -445,3 +446,59 @@ def cache_approve_data(
cache_id = tx_cache.id
session.close()
return (tx_hash_hex, cache_id)
class ExtendedTx:
_default_decimals = 6
def __init__(self, tx_hash, chain_spec):
self._chain_spec = chain_spec
self.chain = str(chain_spec)
self.hash = tx_hash
self.sender = None
self.sender_label = None
self.recipient = None
self.recipient_label = None
self.source_token_value = 0
self.destination_token_value = 0
self.source_token = zero_address
self.destination_token = zero_address
self.source_token_symbol = ''
self.destination_token_symbol = ''
self.source_token_decimals = ExtendedTx._default_decimals
self.destination_token_decimals = ExtendedTx._default_decimals
def set_actors(self, sender, recipient, trusted_declarator_addresses=None):
self.sender = sender
self.recipient = recipient
if trusted_declarator_addresses != None:
self.sender_label = translate_address(sender, trusted_declarator_addresses, self.chain)
self.recipient_label = translate_address(recipient, trusted_declarator_addresses, self.chain)
def set_tokens(self, source, source_value, destination=None, destination_value=None):
if destination == None:
destination = source
if destination_value == None:
destination_value = source_value
st = CICRegistry.get_address(self._chain_spec, source)
dt = CICRegistry.get_address(self._chain_spec, destination)
self.source_token = source
self.source_token_symbol = st.symbol()
self.source_token_decimals = st.decimals()
self.source_token_value = source_value
self.destination_token = destination
self.destination_token_symbol = dt.symbol()
self.destination_token_decimals = dt.decimals()
self.destination_token_value = destination_value
def to_dict(self):
o = {}
for attr in dir(self):
if attr[0] == '_' or attr in ['set_actors', 'set_tokens', 'to_dict']:
continue
o[attr] = getattr(self, attr)
return o

View File

@ -13,7 +13,10 @@ from .rpc import RpcClient
from cic_eth.db import Otx, SessionBase
from cic_eth.db.models.tx import TxCache
from cic_eth.db.models.lock import Lock
from cic_eth.db.enum import LockEnum
from cic_eth.db.enum import (
LockEnum,
StatusBits,
)
from cic_eth.error import PermanentTxError
from cic_eth.error import TemporaryTxError
from cic_eth.error import NotLocalTxError
@ -341,7 +344,6 @@ def send(self, txs, chain_str):
chain_spec = ChainSpec.from_chain_str(chain_str)
tx_hex = txs[0]
logg.debug('send transaction {}'.format(tx_hex))
@ -399,9 +401,10 @@ def refill_gas(self, recipient_address, chain_str):
chain_spec = ChainSpec.from_chain_str(chain_str)
session = SessionBase.create_session()
status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
q = session.query(Otx.tx_hash)
q = q.join(TxCache)
q = q.filter(Otx.status<=0)
q = q.filter(Otx.status.op('&')(StatusBits.FINAL.value)==0)
q = q.filter(TxCache.from_value!='0x00')
q = q.filter(TxCache.recipient==recipient_address)
c = q.count()
@ -495,7 +498,7 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_str, gas=None, default_fa
tx_signed_raw_bytes = bytes.fromhex(otx.signed_tx[2:])
tx = unpack_signed_raw_tx(tx_signed_raw_bytes, chain_spec.chain_id())
logg.debug('otx {} {}'.format(tx, otx.signed_tx))
logg.debug('resend otx {} {}'.format(tx, otx.signed_tx))
queue = self.request.delivery_info['routing_key']

View File

@ -104,3 +104,5 @@ def tx_hex_string(tx_hex, chain_id):
tx_raw_bytes = bytes.fromhex(tx_hex)
return tx_string(tx_raw_bytes, chain_id)

View File

View File

@ -0,0 +1,43 @@
# standard imports
import logging
# third-party imports
import celery
from cic_registry.chain import ChainSpec
from cic_registry import CICRegistry
celery_app = celery.current_app
logg = logging.getLogger()
def translate_address(address, trusted_addresses, chain_spec):
for trusted_address in trusted_addresses:
o = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', 'Declarator')
fn = o.function('declaration')
declaration_hex = fn(trusted_address, address).call()
declaration_bytes = declaration_hex[0].rstrip(b'\x00')
declaration = None
try:
declaration = declaration_bytes.decode('utf-8', errors='strict')
except UnicodeDecodeError:
continue
return declaration
@celery_app.task()
def translate_tx_addresses(tx, trusted_addresses, chain_str):
chain_spec = ChainSpec.from_chain_str(chain_str)
declaration = None
if tx['sender_label'] == None:
declaration = translate_address(tx['sender'], trusted_addresses, chain_spec)
tx['sender_label'] = declaration
declaration = None
if tx['recipient_label'] == None:
declaration = translate_address(tx['recipient'], trusted_addresses, chain_spec)
tx['recipient_label'] = declaration
return tx

View File

@ -0,0 +1,176 @@
# standard imports
import logging
import math
# third-pary imports
import web3
import celery
import moolb
from cic_registry.chain import ChainSpec
from cic_registry.registry import CICRegistry
from hexathon import strip_0x
# local imports
from cic_eth.eth.rpc import RpcClient
from cic_eth.db.models.otx import Otx
from cic_eth.eth.util import unpack_signed_raw_tx
from cic_eth.db.enum import StatusEnum
from cic_eth.eth.token import unpack_transfer
from cic_eth.queue.tx import get_tx_cache
from cic_eth.queue.time import tx_times
celery_app = celery.current_app
logg = logging.getLogger()
MAX_BLOCK_TX = 250
# TODO: Make this method easier to read
@celery_app.task()
def list_tx_by_bloom(bloomspec, address, chain_str):
"""Retrieve external transaction data matching the provided filter
The bloom filter representation with the following structure (the size of the filter will be inferred from the size of the provided filter data):
{
'alg': <str; hashing algorithm, currently only "sha256" is understood>,
'high': <number; highest block number in matched set>,
'low': <number; lowest block number in matched set>,
'filter_rounds': <number; hashing rounds used to generate filter entry>,
'block_filter': <hex; bloom filter data with block matches>,
'blocktx_filter': <hex; bloom filter data with block+tx matches>,
}
:param bloomspec: Bloom filter data
:type bloomspec: dict (see description above)
:param address: Recipient address to use in matching
:type address: str, 0x-hex
:param chain_str: Chain spec string representation
:type chain_str: str
:returns: dict of transaction data as dict, keyed by transaction hash
:rtype: dict of dict
"""
chain_spec = ChainSpec.from_chain_str(chain_str)
c = RpcClient(chain_spec)
block_filter_data = bytes.fromhex(bloomspec['block_filter'])
tx_filter_data = bytes.fromhex(bloomspec['blocktx_filter'])
databitlen = len(block_filter_data)*8
block_filter = moolb.Bloom(databitlen, bloomspec['filter_rounds'], default_data=block_filter_data)
tx_filter = moolb.Bloom(databitlen, bloomspec['filter_rounds'], default_data=tx_filter_data)
txs = {}
for block_height in range(bloomspec['low'], bloomspec['high']):
block_height_bytes = block_height.to_bytes(4, 'big')
if block_filter.check(block_height_bytes):
logg.debug('filter matched block {}'.format(block_height))
block = c.w3.eth.getBlock(block_height, True)
for tx_index in range(0, len(block.transactions)):
composite = tx_index + block_height
tx_index_bytes = composite.to_bytes(4, 'big')
if tx_filter.check(tx_index_bytes):
logg.debug('filter matched block {} tx {}'.format(block_height, tx_index))
try:
tx = c.w3.eth.getTransactionByBlock(block_height, tx_index)
except web3.exceptions.TransactionNotFound:
logg.debug('false positive on block {} tx {}'.format(block_height, tx_index))
continue
tx_address = None
tx_token_value = 0
try:
transfer_data = unpack_transfer(tx['data'])
tx_address = transfer_data['to']
tx_token_value = transfer_data['amount']
except ValueError:
logg.debug('not a transfer transaction, skipping {}'.format(tx))
continue
if address == tx_address:
status = StatusEnum.SENT
try:
rcpt = c.w3.eth.getTransactionReceipt(tx.hash)
if rcpt['status'] == 0:
pending = StatusEnum.REVERTED
else:
pending = StatusEnum.SUCCESS
except web3.exceptions.TransactionNotFound:
pass
tx_hash_hex = tx['hash'].hex()
token = CICRegistry.get_address(chain_spec, tx['to'])
token_symbol = token.symbol()
token_decimals = token.decimals()
times = tx_times(tx_hash_hex, chain_str)
tx_r = {
'hash': tx_hash_hex,
'sender': tx['from'],
'recipient': tx_address,
'source_value': tx_token_value,
'destination_value': tx_token_value,
'source_token': tx['to'],
'destination_token': tx['to'],
'source_token_symbol': token_symbol,
'destination_token_symbol': token_symbol,
'source_token_decimals': token_decimals,
'destination_token_decimals': token_decimals,
'source_token_chain': chain_str,
'destination_token_chain': chain_str,
'nonce': tx['nonce'],
}
if times['queue'] != None:
tx_r['date_created'] = times['queue']
else:
tx_r['date_created'] = times['network']
txs[tx_hash_hex] = tx_r
break
return txs
# TODO: Surely it must be possible to optimize this
# TODO: DRY this with callback filter in cic_eth/runnable/manager
# TODO: Remove redundant fields from end representation (timestamp, tx_hash)
@celery_app.task()
def tx_collate(tx_batches, chain_str, offset, limit, newest_first=True):
"""Merges transaction data from multiple sources and sorts them in chronological order.
:param tx_batches: Transaction data inputs
:type tx_batches: lists of lists of transaction data
:param chain_str: Chain spec string representation
:type chain_str: str
:param offset: Number of sorted results to skip (not yet implemented)
:type offset: number
:param limit: Maximum number of results to return (not yet implemented)
:type limit: number
:param newest_first: If True, returns results in reverse chronological order
:type newest_first: bool
:returns: Transactions
:rtype: list
"""
txs_by_block = {}
chain_spec = ChainSpec.from_chain_str(chain_str)
for b in tx_batches:
for v in b.values():
tx = None
k = None
try:
hx = strip_0x(v)
tx = unpack_signed_raw_tx(bytes.fromhex(hx), chain_spec.chain_id())
txc = get_tx_cache(tx['hash'])
txc['timestamp'] = int(txc['date_created'].timestamp())
txc['hash'] = txc['tx_hash']
tx = txc
except TypeError:
tx = v
tx['timestamp'] = tx['date_created']
k = '{}.{}.{}'.format(tx['timestamp'], tx['sender'], tx['nonce'])
txs_by_block[k] = tx
txs = []
ks = list(txs_by_block.keys())
ks.sort()
if newest_first:
ks.reverse()
for k in ks:
txs.append(txs_by_block[k])
return txs

View File

@ -0,0 +1,120 @@
# standard imports
import logging
# third-party imports
import celery
from hexathon import strip_0x
# local imports
from cic_registry.chain import ChainSpec
from cic_eth.db import SessionBase
from cic_eth.db.models.otx import Otx
from cic_eth.db.models.tx import TxCache
from cic_eth.db.enum import (
StatusBits,
dead,
)
celery_app = celery.current_app
logg = logging.getLogger()
def __balance_outgoing_compatible(token_address, holder_address, chain_str):
session = SessionBase.create_session()
q = session.query(TxCache.from_value)
q = q.join(Otx)
q = q.filter(TxCache.sender==holder_address)
status_compare = dead()
q = q.filter(Otx.status.op('&')(status_compare)==0)
q = q.filter(TxCache.source_token_address==token_address)
delta = 0
for r in q.all():
delta += int(r[0])
session.close()
return delta
@celery_app.task()
def balance_outgoing(tokens, holder_address, chain_str):
"""Retrieve accumulated value of unprocessed transactions sent from the given address.
:param tokens: list of token spec dicts with addresses to retrieve balances for
:type tokens: list of str, 0x-hex
:param holder_address: Sender address
:type holder_address: str, 0x-hex
:param chain_str: Chain spec string representation
:type chain_str: str
:returns: Tokens dicts with outgoing balance added
:rtype: dict
"""
chain_spec = ChainSpec.from_chain_str(chain_str)
for t in tokens:
b = __balance_outgoing_compatible(t['address'], holder_address, chain_str)
t['balance_outgoing'] = b
return tokens
def __balance_incoming_compatible(token_address, receiver_address, chain_str):
session = SessionBase.create_session()
q = session.query(TxCache.to_value)
q = q.join(Otx)
q = q.filter(TxCache.recipient==receiver_address)
status_compare = dead()
q = q.filter(Otx.status.op('&')(status_compare)==0)
# TODO: this can change the result for the recipient if tx is later obsoleted and resubmission is delayed.
q = q.filter(Otx.status.op('&')(StatusBits.IN_NETWORK)==StatusBits.IN_NETWORK)
q = q.filter(TxCache.destination_token_address==token_address)
delta = 0
for r in q.all():
delta += int(r[0])
session.close()
return delta
@celery_app.task()
def balance_incoming(tokens, receipient_address, chain_str):
"""Retrieve accumulated value of unprocessed transactions to be received by the given address.
:param tokens: list of token spec dicts with addresses to retrieve balances for
:type tokens: list of str, 0x-hex
:param holder_address: Recipient address
:type holder_address: str, 0x-hex
:param chain_str: Chain spec string representation
:type chain_str: str
:returns: Tokens dicts with outgoing balance added
:rtype: dict
"""
chain_spec = ChainSpec.from_chain_str(chain_str)
for t in tokens:
b = __balance_incoming_compatible(t['address'], receipient_address, chain_str)
t['balance_incoming'] = b
return tokens
@celery_app.task()
def assemble_balances(balances_collection):
"""Combines token spec dicts with individual balances into a single token spec dict.
A "balance" means any field that is keyed with a string starting with "balance_"
:param balances_collection: Token spec dicts
:type balances_collection: list of lists of dicts
:returns: Single token spec dict per token with all balances
:rtype: list of dicts
"""
tokens = {}
for c in balances_collection:
for b in c:
address = b['address']
if tokens.get(address) == None:
tokens[address] = {
'address': address,
'converters': b['converters'],
}
for k in b.keys():
if k[:8] == 'balance_':
tokens[address][k] = b[k]
return list(tokens.values())

View File

@ -0,0 +1,40 @@
# standard imports
import logging
# third-party imports
import web3
import celery
from cic_registry.chain import ChainSpec
# local imports
from cic_eth.eth.rpc import RpcClient
from cic_eth.db.models.otx import Otx
from cic_eth.error import NotLocalTxError
celery_app = celery.current_app
logg = logging.getLogger()
# TODO: This method does not belong in the _queue_ module, it operates across queue and network
@celery_app.task()
def tx_times(tx_hash, chain_str):
chain_spec = ChainSpec.from_chain_str(chain_str)
c = RpcClient(chain_spec)
time_pair = {
'network': None,
'queue': None,
}
try:
rcpt = c.w3.eth.getTransactionReceipt(tx_hash)
block = c.w3.eth.getBlock(rcpt['blockHash'])
logg.debug('rcpt {}'.format(block))
time_pair['network'] = block['timestamp']
except web3.exceptions.TransactionNotFound:
pass
otx = Otx.load(tx_hash)
if otx != None:
time_pair['queue'] = int(otx['date_created'].timestamp())
return time_pair

View File

@ -5,19 +5,27 @@ import datetime
# third-party imports
import celery
from hexathon import strip_0x
from sqlalchemy import or_
from sqlalchemy import not_
from sqlalchemy import tuple_
from sqlalchemy import func
# local imports
from cic_registry import CICRegistry
from cic_registry.chain import ChainSpec
from cic_eth.db.models.otx import Otx
from cic_eth.db.models.otx import OtxStateLog
from cic_eth.db.models.tx import TxCache
from cic_eth.db.models.lock import Lock
from cic_eth.db import SessionBase
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import LockEnum
from cic_eth.db.enum import (
StatusEnum,
LockEnum,
StatusBits,
is_alive,
dead,
)
from cic_eth.eth.util import unpack_signed_raw_tx # TODO: should not be in same sub-path as package that imports queue.tx
from cic_eth.error import NotLocalTxError
from cic_eth.error import LockedError
@ -70,10 +78,7 @@ def create(nonce, holder_address, tx_hash, signed_tx, chain_str, obsolete_predec
for otx in q.all():
logg.info('otx {} obsoleted by {}'.format(otx.tx_hash, tx_hash))
if otx.status == StatusEnum.SENT:
otx.cancel(False, session=session)
elif otx.status != StatusEnum.OBSOLETED:
otx.override(session=session)
otx.cancel(confirmed=False, session=session)
session.commit()
session.close()
@ -167,6 +172,7 @@ def set_final_status(tx_hash, block=None, fail=False):
return tx_hash
@celery_app.task()
def set_cancel(tx_hash, manual=False):
"""Used to set the status when a transaction is cancelled.
@ -250,6 +256,33 @@ def set_fubar(tx_hash):
return tx_hash
@celery_app.task()
def set_manual(tx_hash):
"""Used to set the status when queue is manually changed
Will set the state to MANUAL
:param tx_hash: Transaction hash of record to modify
:type tx_hash: str, 0x-hex
:raises NotLocalTxError: If transaction not found in queue.
"""
session = SessionBase.create_session()
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
if o == None:
session.close()
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
session.flush()
o.manual(session=session)
session.commit()
session.close()
return tx_hash
@celery_app.task()
def set_ready(tx_hash):
"""Used to mark a transaction as ready to be sent to network
@ -265,14 +298,11 @@ def set_ready(tx_hash):
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
session.flush()
if o.status == StatusEnum.WAITFORGAS or o.status == StatusEnum.PENDING:
if o.status & StatusBits.GAS_ISSUES or o.status == StatusEnum.PENDING:
o.readysend(session=session)
else:
o.retry(session=session)
logg.debug('ot otx otx {} {}'.format(tx_hash, o))
session.add(o)
session.commit()
session.close()
@ -304,6 +334,7 @@ def set_waitforgas(tx_hash):
return tx_hash
@celery_app.task()
def get_state_log(tx_hash):
@ -350,7 +381,6 @@ def get_tx_cache(tx_hash):
session.close()
values = txc.values()
tx = {
'tx_hash': otx.tx_hash,
'signed_tx': otx.signed_tx,
@ -359,10 +389,12 @@ def get_tx_cache(tx_hash):
'status_code': otx.status,
'source_token': txc.source_token_address,
'destination_token': txc.destination_token_address,
'block_number': txc.block_number,
'tx_index': txc.tx_index,
'sender': txc.sender,
'recipient': txc.recipient,
'from_value': values[0],
'to_value': values[1],
'from_value': int(txc.from_value),
'to_value': int(txc.to_value),
'date_created': txc.date_created,
'date_updated': txc.date_updated,
'date_checked': txc.date_checked,
@ -483,13 +515,14 @@ def get_paused_txs(status=None, sender=None, chain_id=0):
q = session.query(Otx)
if status != None:
if status == StatusEnum.PENDING or status >= StatusEnum.SENT:
#if status == StatusEnum.PENDING or status >= StatusEnum.SENT:
if status == StatusEnum.PENDING or status & StatusBits.IN_NETWORK or not is_alive(status):
raise ValueError('not a valid paused tx value: {}'.format(status))
q = q.filter(Otx.status==status)
q = q.filter(Otx.status.op('&')(status.value)==status.value)
q = q.join(TxCache)
else:
q = q.filter(Otx.status>StatusEnum.PENDING)
q = q.filter(Otx.status<StatusEnum.SENT)
q = q.filter(Otx.status>StatusEnum.PENDING.value)
q = q.filter(not_(Otx.status.op('&')(StatusBits.IN_NETWORK.value)>0))
if sender != None:
q = q.filter(TxCache.sender==sender)
@ -508,7 +541,7 @@ def get_paused_txs(status=None, sender=None, chain_id=0):
return txs
def get_status_tx(status, before=None, limit=0):
def get_status_tx(status, before=None, exact=False, limit=0):
"""Retrieve transaction with a specific queue status.
:param status: Status to match transactions with
@ -525,7 +558,10 @@ def get_status_tx(status, before=None, limit=0):
q = session.query(Otx)
q = q.join(TxCache)
q = q.filter(TxCache.date_updated<before)
q = q.filter(Otx.status==status)
if exact:
q = q.filter(Otx.status==status.value)
else:
q = q.filter(Otx.status.op('&')(status.value)==status.value)
i = 0
for o in q.all():
if limit > 0 and i == limit:
@ -565,9 +601,12 @@ def get_upcoming_tx(status=StatusEnum.READYSEND, recipient=None, before=None, ch
q_outer = q_outer.join(Lock, isouter=True)
q_outer = q_outer.filter(or_(Lock.flags==None, Lock.flags.op('&')(LockEnum.SEND.value)==0))
if status >= StatusEnum.SENT:
raise ValueError('not a valid non-final tx value: {}'.format(s))
q_outer = q_outer.filter(Otx.status==status.value)
if not is_alive(status):
raise ValueError('not a valid non-final tx value: {}'.format(status))
if status == StatusEnum.PENDING:
q_outer = q_outer.filter(Otx.status==status.value)
else:
q_outer = q_outer.filter(Otx.status.op('&')(status.value)==status.value)
if recipient != None:
q_outer = q_outer.filter(TxCache.recipient==recipient)
@ -627,6 +666,7 @@ def get_account_tx(address, as_sender=True, as_recipient=True, counterpart=None)
"""
if not as_sender and not as_recipient:
raise ValueError('at least one of as_sender and as_recipient must be True')
txs = {}
session = SessionBase.create_session()
@ -642,10 +682,11 @@ def get_account_tx(address, as_sender=True, as_recipient=True, counterpart=None)
results = q.all()
for r in results:
if txs.get(r.tx_hash) != None:
logg.debug('tx {} already recorded'.format(r.tx_hash))
continue
txs[r.tx_hash] = r.signed_tx
session.close()
return txs

View File

@ -0,0 +1,4 @@
from .callback import CallbackFilter
from .tx import TxFilter
from .gas import GasFilter
from .register import RegistrationFilter

View File

@ -0,0 +1,2 @@
class SyncFilter:
pass

View File

@ -0,0 +1,107 @@
# standard imports
import logging
# third-party imports
import web3
import celery
from cic_registry.error import UnknownContractError
# local imports
from .base import SyncFilter
from cic_eth.eth.token import unpack_transfer
from cic_eth.eth.token import unpack_transferfrom
from cic_eth.eth.token import ExtendedTx
from .base import SyncFilter
logg = logging.getLogger()
transfer_method_signature = '0xa9059cbb' # keccak256(transfer(address,uint256))
transferfrom_method_signature = '0x23b872dd' # keccak256(transferFrom(address,address,uint256))
giveto_method_signature = '0x63e4bff4' # keccak256(giveTo(address))
class CallbackFilter(SyncFilter):
trusted_addresses = []
def __init__(self, method, queue):
self.queue = queue
self.method = method
def call_back(self, transfer_type, result):
s = celery.signature(
self.method,
[
result,
transfer_type,
int(rcpt.status == 0),
],
queue=tc.queue,
)
# s_translate = celery.signature(
# 'cic_eth.ext.address.translate',
# [
# result,
# self.trusted_addresses,
# chain_str,
# ],
# queue=self.queue,
# )
# s_translate.link(s)
# s_translate.apply_async()
s.apply_async()
def parse_data(self, tx, rcpt):
transfer_type = 'transfer'
transfer_data = None
method_signature = tx.input[:10]
if method_signature == transfer_method_signature:
transfer_data = unpack_transfer(tx.input)
transfer_data['from'] = tx['from']
transfer_data['token_address'] = tx['to']
elif method_signature == transferfrom_method_signature:
transfer_type = 'transferfrom'
transfer_data = unpack_transferfrom(tx.input)
transfer_data['token_address'] = tx['to']
# TODO: do not rely on logs here
elif method_signature == giveto_method_signature:
transfer_type = 'tokengift'
transfer_data = unpack_gift(tx.input)
for l in rcpt.logs:
if l.topics[0].hex() == '0x45c201a59ac545000ead84f30b2db67da23353aa1d58ac522c48505412143ffa':
transfer_data['value'] = web3.Web3.toInt(hexstr=l.data)
token_address_bytes = l.topics[2][32-20:]
transfer_data['token_address'] = web3.Web3.toChecksumAddress(token_address_bytes.hex())
transfer_data['from'] = rcpt.to
return (transfer_type, transfer_data)
def filter(self, w3, tx, rcpt, chain_spec):
logg.debug('applying callback filter "{}:{}"'.format(self.queue, self.method))
chain_str = str(chain_spec)
transfer_data = self.parse_data(tx, rcpt)
transfer_data = None
if len(tx.input) < 10:
logg.debug('callbacks filter data length not sufficient for method signature in tx {}, skipping'.format(tx['hash']))
return
logg.debug('checking callbacks filter input {}'.format(tx.input[:10]))
if transfer_data != None:
token_symbol = None
result = None
try:
tokentx = ExtendedTx(self.chain_spec)
tokentx.set_actors(transfer_data['from'], transfer_data['to'], self.trusted_addresses)
tokentx.set_tokens(transfer_data['token_address'], transfer_data['value'])
self.call_back(tokentx.to_dict())
except UnknownContractError:
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tc.queue, tc.method, transfer_data['to'], tx.hash.hex()))

View File

@ -0,0 +1,61 @@
#__convert_log_hash = '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095' # keccak256(Conversion(address,address,address,uint256,uint256,address)
#def parse_convert_log(w3, entry):
# data = entry.data[2:]
# from_amount = int(data[:64], 16)
# to_amount = int(data[64:128], 16)
# holder_address_hex_raw = '0x' + data[-40:]
# holder_address_hex = w3.toChecksumAddress(holder_address_hex_raw)
# o = {
# 'from_amount': from_amount,
# 'to_amount': to_amount,
# 'holder_address': holder_address_hex
# }
# logg.debug('parsed convert log {}'.format(o))
# return o
#def convert_filter(w3, tx, rcpt, chain_spec):
# destination_token_address = None
# recipient_address = None
# amount = 0
# for l in rcpt['logs']:
# event_topic_hex = l['topics'][0].hex()
# if event_topic_hex == __convert_log_hash:
# tx_hash_hex = tx['hash'].hex()
# try:
# convert_transfer = TxConvertTransfer.get(tx_hash_hex)
# except UnknownConvertError:
# logg.warning('skipping unknown convert tx {}'.format(tx_hash_hex))
# continue
# if convert_transfer.transfer_tx_hash != None:
# logg.warning('convert tx {} cache record already has transfer hash {}, skipping'.format(tx_hash_hex, convert_transfer.transfer_hash))
# continue
# recipient_address = convert_transfer.recipient_address
# logg.debug('found convert event {} recipient'.format(tx_hash_hex, recipient_address))
# r = parse_convert_log(l)
# destination_token_address = l['topics'][3][-20:]
#
# if destination_token_address == zero_address or destination_token_address == None:
# return None
#
# destination_token_address_hex = destination_token_address.hex()
# s = celery.signature(
# 'cic_eth.eth.bancor.transfer_converted',
# [
# [{
# 'address': w3.toChecksumAddress(destination_token_address_hex),
# }],
# r['holder_address'],
# recipient_address,
# r['to_amount'],
# tx_hash_hex,
# str(chain_spec),
# ],
# queue=queue,
# )
# logg.info('sending tx signature {}'.format(s))
# t = s.apply_async()
# logg.debug('submitted transfer after convert task uuid {} {}'.format(t, t.successful()))
# return t

View File

@ -0,0 +1,54 @@
# standard imports
import logging
# third-party imports
from cic_registry.chain import ChainSpec
# local imports
from cic_eth.db.models.base import SessionBase
from cic_eth.db.models.tx import TxCache
from cic_eth.db import Otx
from cic_eth.queue.tx import get_paused_txs
from cic_eth.eth.task import create_check_gas_and_send_task
from .base import SyncFilter
logg = logging.getLogger()
class GasFilter(SyncFilter):
def __init__(self, gas_provider):
self.gas_provider = gas_provider
def filter(self, w3, tx, rcpt, chain_str):
logg.debug('applying gas filter')
tx_hash_hex = tx.hash.hex()
if tx['value'] > 0:
logg.debug('gas refill tx {}'.format(tx_hash_hex))
session = SessionBase.create_session()
q = session.query(TxCache.recipient)
q = q.join(Otx)
q = q.filter(Otx.tx_hash==tx_hash_hex)
r = q.first()
session.close()
if r == None:
logg.warning('unsolicited gas refill tx {}'.format(tx_hash_hex))
return
chain_spec = ChainSpec.from_chain_str(chain_str)
txs = get_paused_txs(StatusEnum.WAITFORGAS, r[0], chain_spec.chain_id())
if len(txs) > 0:
logg.info('resuming gas-in-waiting txs for {}: {}'.format(r[0], txs.keys()))
s = create_check_gas_and_send_task(
list(txs.values()),
str(chain_str),
r[0],
0,
tx_hashes_hex=list(txs.keys()),
queue=queue,
)
s.apply_async()

View File

@ -0,0 +1,35 @@
# standard imports
import logging
# third-party imports
import celery
from chainlib.eth.address import to_checksum
# local imports
from .base import SyncFilter
logg = logging.getLogger()
account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd' # keccak256(AccountAdded(address,uint256))
class RegistrationFilter(SyncFilter):
def filter(self, w3, tx, rcpt, chain_spec):
logg.debug('applying registration filter')
registered_address = None
for l in rcpt['logs']:
event_topic_hex = l['topics'][0].hex()
if event_topic_hex == account_registry_add_log_hash:
address_bytes = l.topics[1][32-20:]
address = to_checksum(address_bytes.hex())
logg.debug('request token gift to {}'.format(address))
s = celery.signature(
'cic_eth.eth.account.gift',
[
address,
str(chain_spec),
],
queue=queue,
)
s.apply_async()

View File

@ -0,0 +1,38 @@
# standard imports
import logging
# third-party imports
import celery
# local imports
from cic_eth.db.models.otx import Otx
from .base import SyncFilter
logg = logging.getLogger()
class TxFilter(SyncFilter):
def __init__(self, queue):
self.queue = queue
def filter(self, w3, tx, rcpt, chain_spec):
logg.debug('applying tx filter')
tx_hash_hex = tx.hash.hex()
otx = Otx.load(tx_hash_hex)
if otx == None:
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
return None
logg.info('otx found {}'.format(otx.tx_hash))
s = celery.siignature(
'cic_eth.queue.tx.set_final_status',
[
tx_hash_hex,
rcpt.blockNumber,
rcpt.status == 0,
],
queue=self.queue,
)
t = s.apply_async()
return t

View File

@ -0,0 +1,207 @@
# standard imports
import os
import sys
import logging
import time
import argparse
import sys
import re
# third-party imports
import confini
import celery
import rlp
import web3
from web3 import HTTPProvider, WebsocketProvider
from cic_registry import CICRegistry
from cic_registry.chain import ChainSpec
from cic_registry import zero_address
from cic_registry.chain import ChainRegistry
from cic_registry.error import UnknownContractError
from cic_bancor.bancor import BancorRegistryClient
# local imports
import cic_eth
from cic_eth.eth import RpcClient
from cic_eth.db import SessionBase
from cic_eth.db import Otx
from cic_eth.db import TxConvertTransfer
from cic_eth.db.models.tx import TxCache
from cic_eth.db.enum import StatusEnum
from cic_eth.db import dsn_from_config
from cic_eth.queue.tx import get_paused_txs
from cic_eth.sync import Syncer
from cic_eth.sync.error import LoopDone
from cic_eth.db.error import UnknownConvertError
from cic_eth.eth.util import unpack_signed_raw_tx
from cic_eth.eth.task import create_check_gas_and_send_task
from cic_eth.sync.backend import SyncerBackend
from cic_eth.eth.token import unpack_transfer
from cic_eth.eth.token import unpack_transferfrom
from cic_eth.eth.account import unpack_gift
from cic_eth.runnable.daemons.filters import (
CallbackFilter,
GasFilter,
TxFilter,
RegistrationFilter,
)
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
config_dir = os.path.join('/usr/local/etc/cic-eth')
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
argparser.add_argument('mode', type=str, help='sync mode: (head|history)', default='head')
args = argparser.parse_args(sys.argv[1:])
if args.v == True:
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
# override args
args_override = {
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
queue = args.q
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
re_websocket = re.compile('^wss?://')
re_http = re.compile('^https?://')
blockchain_provider = config.get('ETH_PROVIDER')
if re.match(re_websocket, blockchain_provider) != None:
blockchain_provider = WebsocketProvider(blockchain_provider)
elif re.match(re_http, blockchain_provider) != None:
blockchain_provider = HTTPProvider(blockchain_provider)
else:
raise ValueError('unknown provider url {}'.format(blockchain_provider))
def web3_constructor():
w3 = web3.Web3(blockchain_provider)
return (blockchain_provider, w3)
RpcClient.set_constructor(web3_constructor)
c = RpcClient(chain_spec)
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
chain_registry = ChainRegistry(chain_spec)
CICRegistry.add_chain_registry(chain_registry, True)
declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', interface='Declarator')
dsn = dsn_from_config(config)
SessionBase.connect(dsn)
def main():
global chain_spec, c, queue
if config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER') != None:
CICRegistry.add_role(chain_spec, config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER'), 'AccountRegistry', True)
syncers = []
block_offset = c.w3.eth.blockNumber
chain = str(chain_spec)
if SyncerBackend.first(chain):
from cic_eth.sync.history import HistorySyncer
backend = SyncerBackend.initial(chain, block_offset)
syncer = HistorySyncer(backend)
syncers.append(syncer)
if args.mode == 'head':
from cic_eth.sync.head import HeadSyncer
block_sync = SyncerBackend.live(chain, block_offset+1)
syncers.append(HeadSyncer(block_sync))
elif args.mode == 'history':
from cic_eth.sync.history import HistorySyncer
backends = SyncerBackend.resume(chain, block_offset+1)
for backend in backends:
syncers.append(HistorySyncer(backend))
if len(syncers) == 0:
logg.info('found no unsynced history. terminating')
sys.exit(0)
else:
sys.stderr.write("unknown mode '{}'\n".format(args.mode))
sys.exit(1)
# bancor_registry_contract = CICRegistry.get_contract(chain_spec, 'BancorRegistry', interface='Registry')
# bancor_chain_registry = CICRegistry.get_chain_registry(chain_spec)
# bancor_registry = BancorRegistryClient(c.w3, bancor_chain_registry, config.get('ETH_ABI_DIR'))
# bancor_registry.load()
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None:
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
sys.exit(1)
trusted_addresses = trusted_addresses_src.split(',')
for address in trusted_addresses:
logg.info('using trusted address {}'.format(address))
CallbackFilter.trusted_addresses = trusted_addresses
callback_filters = []
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
task_split = cb.split(':')
task_queue = queue
if len(task_split) > 1:
task_queue = task_split[0]
callback_filter = CallbackFilter(task_split[1], task_queue)
callback_filters.append(callback_filter)
tx_filter = TxFilter(queue)
registration_filter = RegistrationFilter()
gas_filter = GasFilter(c.gas_provider())
i = 0
for syncer in syncers:
logg.debug('running syncer index {}'.format(i))
syncer.filter.append(gas_filter.filter)
syncer.filter.append(registration_filter.filter)
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
syncer.filter.append(tx_filter.filter)
#syncer.filter.append(convert_filter)
for cf in callback_filters:
syncer.filter.append(cf.filter)
try:
syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')))
except LoopDone as e:
sys.stderr.write("sync '{}' done at block {}\n".format(args.mode, e))
i += 1
sys.exit(0)
if __name__ == '__main__':
main()

View File

@ -220,15 +220,16 @@ def main():
if config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER') != None:
CICRegistry.add_role(chain_spec, config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER'), 'AccountRegistry', True)
if config.get('CIC_DECLARATOR_ADDRESS') != None:
abi_path = os.path.join(config.get('ETH_ABI_DIR'), '{}.json'.format(interface))
f = open(abi_path)
abi = json.load(abi_path)
f.close()
c = w3.eth.contract(abi=abi, address=address)
trusted_addresses = config.get('CIC_TRUSTED_ADDRESSES', []).split(',')
oracle = DeclaratorOracleAdapter(contract, trusted_addresses)
chain_registry.add_oracle(oracle)
declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', interface='Declarator')
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None:
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
sys.exit(1)
trusted_addresses = trusted_addresses_src.split(',')
for address in trusted_addresses:
logg.info('using trusted address {}'.format(address))
oracle = DeclaratorOracleAdapter(declarator.contract, trusted_addresses)
chain_registry.add_oracle('naive_erc20_oracle', oracle)
#chain_spec = CICRegistry.default_chain_spec

View File

@ -1,410 +0,0 @@
# standard imports
import os
import sys
import logging
import time
import argparse
import sys
import re
# third-party imports
import confini
import celery
import rlp
import web3
from web3 import HTTPProvider, WebsocketProvider
from cic_registry import CICRegistry
from cic_registry.chain import ChainSpec
from cic_registry import zero_address
from cic_registry.chain import ChainRegistry
from cic_registry.error import UnknownContractError
from cic_bancor.bancor import BancorRegistryClient
# local imports
import cic_eth
from cic_eth.eth import RpcClient
from cic_eth.db import SessionBase
from cic_eth.db import Otx
from cic_eth.db import TxConvertTransfer
from cic_eth.db.models.tx import TxCache
from cic_eth.db.enum import StatusEnum
from cic_eth.db import dsn_from_config
from cic_eth.queue.tx import get_paused_txs
from cic_eth.sync import Syncer
from cic_eth.sync.error import LoopDone
from cic_eth.db.error import UnknownConvertError
from cic_eth.eth.util import unpack_signed_raw_tx
from cic_eth.eth.task import create_check_gas_and_send_task
from cic_eth.sync.backend import SyncerBackend
from cic_eth.eth.token import unpack_transfer
from cic_eth.eth.token import unpack_transferfrom
from cic_eth.eth.account import unpack_gift
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
config_dir = os.path.join('/usr/local/etc/cic-eth')
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
argparser.add_argument('mode', type=str, help='sync mode: (head|history)', default='head')
args = argparser.parse_args(sys.argv[1:])
if args.v == True:
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
# override args
args_override = {
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
queue = args.q
dsn = dsn_from_config(config)
SessionBase.connect(dsn)
# TODO: There is too much code in this file, split it up
transfer_callbacks = []
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
task_split = cb.split(':')
task_queue = queue
if len(task_split) > 1:
task_queue = task_split[0]
task_pair = (task_split[1], task_queue)
transfer_callbacks.append(task_pair)
# TODO: move to contract registry
__convert_log_hash = '0x7154b38b5dd31bb3122436a96d4e09aba5b323ae1fd580025fab55074334c095' # keccak256(Conversion(address,address,address,uint256,uint256,address)
__account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd' # keccak256(AccountAdded(address,uint256))
__transfer_method_signature = '0xa9059cbb' # keccak256(transfer(address,uint256))
__transferfrom_method_signature = '0x23b872dd' # keccak256(transferFrom(address,address,uint256))
__giveto_method_signature = '0x63e4bff4' # keccak256(giveTo(address))
# TODO: move to bancor package
def parse_convert_log(w3, entry):
data = entry.data[2:]
from_amount = int(data[:64], 16)
to_amount = int(data[64:128], 16)
holder_address_hex_raw = '0x' + data[-40:]
holder_address_hex = w3.toChecksumAddress(holder_address_hex_raw)
o = {
'from_amount': from_amount,
'to_amount': to_amount,
'holder_address': holder_address_hex
}
logg.debug('parsed convert log {}'.format(o))
return o
def registration_filter(w3, tx, rcpt, chain_spec):
registered_address = None
for l in rcpt['logs']:
event_topic_hex = l['topics'][0].hex()
if event_topic_hex == __account_registry_add_log_hash:
address_bytes = l.topics[1][32-20:]
address = web3.Web3.toChecksumAddress(address_bytes.hex())
logg.debug('request token gift to {}'.format(address))
s = celery.signature(
'cic_eth.eth.account.gift',
[
address,
str(chain_spec),
],
queue=queue,
)
s.apply_async()
def convert_filter(w3, tx, rcpt, chain_spec):
destination_token_address = None
recipient_address = None
amount = 0
for l in rcpt['logs']:
event_topic_hex = l['topics'][0].hex()
if event_topic_hex == __convert_log_hash:
tx_hash_hex = tx['hash'].hex()
try:
convert_transfer = TxConvertTransfer.get(tx_hash_hex)
except UnknownConvertError:
logg.warning('skipping unknown convert tx {}'.format(tx_hash_hex))
continue
if convert_transfer.transfer_tx_hash != None:
logg.warning('convert tx {} cache record already has transfer hash {}, skipping'.format(tx_hash_hex, convert_transfer.transfer_hash))
continue
recipient_address = convert_transfer.recipient_address
logg.debug('found convert event {} recipient'.format(tx_hash_hex, recipient_address))
r = parse_convert_log(l)
destination_token_address = l['topics'][3][-20:]
if destination_token_address == zero_address or destination_token_address == None:
return None
destination_token_address_hex = destination_token_address.hex()
s = celery.signature(
'cic_eth.eth.bancor.transfer_converted',
[
[{
'address': w3.toChecksumAddress(destination_token_address_hex),
}],
r['holder_address'],
recipient_address,
r['to_amount'],
tx_hash_hex,
str(chain_spec),
],
queue=queue,
)
logg.info('sending tx signature {}'.format(s))
t = s.apply_async()
logg.debug('submitted transfer after convert task uuid {} {}'.format(t, t.successful()))
return t
def tx_filter(w3, tx, rcpt, chain_spec):
tx_hash_hex = tx.hash.hex()
otx = Otx.load(tx_hash_hex)
if otx == None:
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
return None
logg.info('otx found {}'.format(otx.tx_hash))
s = celery.signature(
'cic_eth.queue.tx.set_final_status',
[
tx_hash_hex,
rcpt.blockNumber,
rcpt.status == 0,
],
queue=queue,
)
t = s.apply_async()
return t
# TODO: replace with registry call instead
def get_token_symbol(w3, address):
#token = CICRegistry.get_address(CICRegistry.chain_spec, tx['to'])
logg.warning('token verification missing')
c = w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=address)
return c.functions.symbol().call()
# TODO: replace with registry call instead
def get_token_decimals(w3, address):
#token = CICRegistry.get_address(CICRegistry.chain_spec, tx['to'])
logg.warning('token verification missing')
c = w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=address)
return c.functions.decimals().call()
def callbacks_filter(w3, tx, rcpt, chain_spec):
transfer_data = None
if len(tx.input) < 10:
logg.debug('callbacks filter data length not sufficient for method signature in tx {}, skipping'.format(tx['hash']))
return
logg.debug('checking callbacks filter input {}'.format(tx.input[:10]))
transfer_type = 'transfer'
method_signature = tx.input[:10]
if method_signature == __transfer_method_signature:
transfer_data = unpack_transfer(tx.input)
transfer_data['from'] = tx['from']
transfer_data['token_address'] = tx['to']
elif method_signature == __transferfrom_method_signature:
transfer_type = 'transferfrom'
transfer_data = unpack_transferfrom(tx.input)
transfer_data['token_address'] = tx['to']
elif method_signature == __giveto_method_signature:
transfer_type = 'tokengift'
transfer_data = unpack_gift(tx.input)
for l in rcpt.logs:
if l.topics[0].hex() == '0x45c201a59ac545000ead84f30b2db67da23353aa1d58ac522c48505412143ffa':
transfer_data['amount'] = web3.Web3.toInt(hexstr=l.data)
token_address_bytes = l.topics[2][32-20:]
transfer_data['token_address'] = web3.Web3.toChecksumAddress(token_address_bytes.hex())
transfer_data['from'] = rcpt.to
if transfer_data != None:
for tc in transfer_callbacks:
token_symbol = None
try:
logg.debug('checking token {}'.format(transfer_data['token_address']))
token_symbol = get_token_symbol(w3, transfer_data['token_address'])
token_decimals = get_token_decimals(w3, transfer_data['token_address'])
logg.debug('calling transfer callback {}:{} for tx {}'.format(tc[1], tc[0], tx['hash']))
except UnknownContractError:
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tc[1], tc[0], transfer_data['to'], tx.hash.hex()))
continue
result = {
'hash': tx.hash.hex(),
'sender': transfer_data['from'],
'recipient': transfer_data['to'],
'source_value': transfer_data['amount'],
'destination_value': transfer_data['amount'],
'source_token': transfer_data['token_address'],
'destination_token': transfer_data['token_address'],
'source_token_symbol': token_symbol,
'destination_token_symbol': token_symbol,
'source_token_decimals': token_decimals,
'destination_token_decimals': token_decimals,
'chain': str(chain_spec),
}
s = celery.signature(
tc[0],
[
result,
transfer_type,
int(rcpt.status == 0),
],
queue=tc[1],
)
s.apply_async()
class GasFilter:
def __init__(self, gas_provider):
self.gas_provider = gas_provider
def filter(self, w3, tx, rcpt, chain_str):
tx_hash_hex = tx.hash.hex()
if tx['value'] > 0:
logg.debug('gas refill tx {}'.format(tx_hash_hex))
session = SessionBase.create_session()
q = session.query(TxCache.recipient)
q = q.join(Otx)
q = q.filter(Otx.tx_hash==tx_hash_hex)
r = q.first()
session.close()
if r == None:
logg.warning('unsolicited gas refill tx {}'.format(tx_hash_hex))
return
chain_spec = ChainSpec.from_chain_str(chain_str)
txs = get_paused_txs(StatusEnum.WAITFORGAS, r[0], chain_spec.chain_id())
if len(txs) > 0:
logg.info('resuming gas-in-waiting txs for {}: {}'.format(r[0], txs.keys()))
s = create_check_gas_and_send_task(
list(txs.values()),
str(chain_str),
r[0],
0,
tx_hashes_hex=list(txs.keys()),
queue=queue,
)
s.apply_async()
re_websocket = re.compile('^wss?://')
re_http = re.compile('^https?://')
blockchain_provider = config.get('ETH_PROVIDER')
if re.match(re_websocket, blockchain_provider) != None:
blockchain_provider = WebsocketProvider(blockchain_provider)
elif re.match(re_http, blockchain_provider) != None:
blockchain_provider = HTTPProvider(blockchain_provider)
else:
raise ValueError('unknown provider url {}'.format(blockchain_provider))
def web3_constructor():
w3 = web3.Web3(blockchain_provider)
return (blockchain_provider, w3)
RpcClient.set_constructor(web3_constructor)
def main():
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
c = RpcClient(chain_spec)
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
chain_registry = ChainRegistry(chain_spec)
CICRegistry.add_chain_registry(chain_registry)
if config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER') != None:
CICRegistry.add_role(chain_spec, config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER'), 'AccountRegistry', True)
syncers = []
block_offset = c.w3.eth.blockNumber
chain = str(chain_spec)
if SyncerBackend.first(chain):
from cic_eth.sync.history import HistorySyncer
backend = SyncerBackend.initial(chain, block_offset)
syncer = HistorySyncer(backend)
syncers.append(syncer)
if args.mode == 'head':
from cic_eth.sync.head import HeadSyncer
block_sync = SyncerBackend.live(chain, block_offset+1)
syncers.append(HeadSyncer(block_sync))
elif args.mode == 'history':
from cic_eth.sync.history import HistorySyncer
backends = SyncerBackend.resume(chain, block_offset+1)
for backend in backends:
syncers.append(HistorySyncer(backend))
if len(syncers) == 0:
logg.info('found no unsynced history. terminating')
sys.exit(0)
else:
sys.stderr.write("unknown mode '{}'\n".format(args.mode))
sys.exit(1)
# bancor_registry_contract = CICRegistry.get_contract(chain_spec, 'BancorRegistry', interface='Registry')
# bancor_chain_registry = CICRegistry.get_chain_registry(chain_spec)
# bancor_registry = BancorRegistryClient(c.w3, bancor_chain_registry, config.get('ETH_ABI_DIR'))
# bancor_registry.load()
i = 0
for syncer in syncers:
logg.debug('running syncer index {}'.format(i))
gas_filter = GasFilter(c.gas_provider()).filter
syncer.filter.append(gas_filter)
syncer.filter.append(registration_filter)
syncer.filter.append(callbacks_filter)
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
syncer.filter.append(tx_filter)
syncer.filter.append(convert_filter)
try:
syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')))
except LoopDone as e:
sys.stderr.write("sync '{}' done at block {}\n".format(args.mode, e))
i += 1
sys.exit(0)
if __name__ == '__main__':
main()

View File

@ -10,7 +10,7 @@ version = (
0,
10,
0,
'alpha.25',
'alpha.26',
)
version_object = semver.VersionInfo(

View File

@ -1,8 +1,5 @@
[cic]
registry_address =
token_index_address =
accounts_index_address =
declarator_address =
approval_escrow_address =
chain_spec =
tx_retry_delay =
trust_address =

View File

@ -1,8 +1,4 @@
[cic]
registry_address =
token_index_address =
accounts_index_address =
declarator_address =
approval_escrow_address =
chain_spec =
trusted_addresses =
trust_address =

View File

@ -2,4 +2,4 @@
. ./db.sh
/usr/local/bin/cic-eth-dispatcher $@
/usr/local/bin/cic-eth-dispatcherd $@

View File

@ -2,4 +2,4 @@
. ./db.sh
/usr/local/bin/cic-eth-manager $@
/usr/local/bin/cic-eth-managerd $@

View File

@ -2,4 +2,4 @@
. ./db.sh
/usr/local/bin/cic-eth-retrier $@
/usr/local/bin/cic-eth-retrierd $@

View File

@ -9,7 +9,7 @@ echo "!!! starting signer"
python /usr/local/bin/crypto-dev-daemon -vv -c /usr/local/etc/crypto-dev-signer &
echo "!!! starting tracker"
/usr/local/bin/cic-eth-tasker $@
/usr/local/bin/cic-eth-taskerd $@
# thanks! https://docs.docker.com/config/containers/multi-service_container/
sleep 1;

View File

@ -2,7 +2,7 @@ web3==5.12.2
celery==4.4.7
crypto-dev-signer~=0.4.13rc2
confini~=0.3.6b1
cic-registry~=0.5.3a10
cic-registry~=0.5.3a18
cic-bancor~=0.0.6
redis==3.5.3
alembic==1.4.2
@ -16,3 +16,7 @@ uWSGI==2.0.19.1
semver==2.13.0
eth-gas-proxy==0.0.1a4
websocket-client==0.57.0
moolb~=0.1.1b2
eth-address-index~=0.1.0a8
chainlib~=0.0.1a12
hexathon~=0.0.1a3

View File

@ -33,7 +33,10 @@ packages =
cic_eth.db.models
cic_eth.queue
cic_eth.sync
cic_eth.ext
cic_eth.runnable
cic_eth.runnable.daemons
cic_eth.runnable.daemons.filters
cic_eth.callbacks
scripts =
./scripts/migrate.py
@ -41,10 +44,10 @@ scripts =
[options.entry_points]
console_scripts =
# daemons
cic-eth-tasker = cic_eth.runnable.tasker:main
cic-eth-manager = cic_eth.runnable.manager:main
cic-eth-dispatcher = cic_eth.runnable.dispatcher:main
cic-eth-retrier = cic_eth.runnable.retry:main
cic-eth-taskerd = cic_eth.runnable.daemons.tasker:main
cic-eth-managerd = cic_eth.runnable.daemons.manager:main
cic-eth-dispatcherd = cic_eth.runnable.daemons.dispatcher:main
cic-eth-retrierd = cic_eth.runnable.daemons.retry:main
# tools
cic-eth-create = cic_eth.runnable.create:main
cic-eth-inspect = cic_eth.runnable.view:main

View File

@ -15,12 +15,15 @@ def celery_includes():
'cic_eth.eth.token',
'cic_eth.eth.request',
'cic_eth.eth.tx',
'cic_eth.ext.tx',
'cic_eth.queue.tx',
'cic_eth.queue.balance',
'cic_eth.admin.ctrl',
'cic_eth.admin.nonce',
'cic_eth.eth.account',
'cic_eth.callbacks.noop',
'cic_eth.callbacks.http',
'tests.mock.filter',
]

View File

@ -1,10 +1,50 @@
# third-party imports
import pytest
# standard imports
import os
import json
import logging
# third-party imports
import pytest
from eth_address_declarator import AddressDeclarator
# local imports
from cic_registry import CICRegistry
from cic_registry import to_identifier
from cic_registry.contract import Contract
from cic_registry.error import ChainExistsError
logg = logging.getLogger()
script_dir = os.path.dirname(__file__)
@pytest.fixture(scope='session')
def local_cic_registry(
cic_registry,
):
path = os.path.realpath(os.path.join(script_dir, 'testdata', 'abi'))
CICRegistry.add_path(path)
return cic_registry
@pytest.fixture(scope='function')
def address_declarator(
bloxberg_config,
default_chain_spec,
default_chain_registry,
local_cic_registry,
init_rpc,
init_w3,
):
c = init_rpc.w3.eth.contract(abi=AddressDeclarator.abi(), bytecode=AddressDeclarator.bytecode())
default_description = '0x{:<064s}'.format(b'test'.hex())
logg.debug('default_ {}'.format(default_description))
tx_hash = c.constructor(default_description).transact()
rcpt = init_rpc.w3.eth.getTransactionReceipt(tx_hash)
registry = init_rpc.w3.eth.contract(abi=CICRegistry.abi(), address=local_cic_registry)
chain_identifier = to_identifier(default_chain_registry.chain())
registry.functions.set(to_identifier('AddressDeclarator'), rcpt.contractAddress, chain_identifier, bloxberg_config['digest']).transact()
return rcpt.contractAddress

View File

@ -10,7 +10,11 @@ import web3
# local imports
from cic_eth.api import AdminApi
from cic_eth.db.models.role import AccountRole
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import (
StatusEnum,
StatusBits,
status_str,
)
from cic_eth.error import InitializationError
from cic_eth.eth.task import sign_and_register_tx
from cic_eth.eth.tx import cache_gas_refill_data
@ -64,7 +68,11 @@ def test_resend_inplace(
api = AdminApi(c, queue=None)
t = api.resend(tx_dict['hash'], chain_str, unlock=True)
tx_hash_new_hex = t.get()
t.get()
i = 0
tx_hash_new_hex = None
for r in t.collect():
tx_hash_new_hex = r[1]
assert t.successful()
tx_raw_new = get_tx(tx_hash_new_hex)
@ -74,142 +82,144 @@ def test_resend_inplace(
assert tx_dict_new['gasPrice'] > gas_price_before
tx_dict_after = get_tx(tx_dict['hash'])
assert tx_dict_after['status'] == StatusEnum.OVERRIDDEN
logg.debug('logggg {}'.format(status_str(tx_dict_after['status'])))
assert tx_dict_after['status'] & StatusBits.MANUAL
def test_check_fix_nonce(
default_chain_spec,
init_database,
init_eth_account_roles,
init_w3,
eth_empty_accounts,
celery_session_worker,
):
chain_str = str(default_chain_spec)
sigs = []
for i in range(5):
s = celery.signature(
'cic_eth.eth.tx.refill_gas',
[
eth_empty_accounts[i],
chain_str,
],
queue=None,
)
sigs.append(s)
t = celery.group(sigs)()
txs = t.get()
assert t.successful()
tx_hash = web3.Web3.keccak(hexstr=txs[2])
c = RpcClient(default_chain_spec)
api = AdminApi(c, queue=None)
address = init_eth_account_roles['eth_account_gas_provider']
nonce_spec = api.check_nonce(address)
assert nonce_spec['nonce']['network'] == 0
assert nonce_spec['nonce']['queue'] == 4
assert nonce_spec['nonce']['blocking'] == None
s_set = celery.signature(
'cic_eth.queue.tx.set_rejected',
[
tx_hash.hex(),
],
queue=None,
)
t = s_set.apply_async()
t.get()
t.collect()
assert t.successful()
nonce_spec = api.check_nonce(address)
assert nonce_spec['nonce']['blocking'] == 2
assert nonce_spec['tx']['blocking'] == tx_hash.hex()
t = api.fix_nonce(address, nonce_spec['nonce']['blocking'])
t.get()
t.collect()
assert t.successful()
for tx in txs[3:]:
tx_hash = web3.Web3.keccak(hexstr=tx)
tx_dict = get_tx(tx_hash.hex())
assert tx_dict['status'] == StatusEnum.OVERRIDDEN
def test_tag_account(
init_database,
eth_empty_accounts,
init_rpc,
):
api = AdminApi(init_rpc)
api.tag_account('foo', eth_empty_accounts[0])
api.tag_account('bar', eth_empty_accounts[1])
api.tag_account('bar', eth_empty_accounts[2])
assert AccountRole.get_address('foo') == eth_empty_accounts[0]
assert AccountRole.get_address('bar') == eth_empty_accounts[2]
def test_ready(
init_database,
eth_empty_accounts,
init_rpc,
w3,
):
api = AdminApi(init_rpc)
with pytest.raises(InitializationError):
api.ready()
bogus_account = os.urandom(20)
bogus_account_hex = '0x' + bogus_account.hex()
api.tag_account('ETH_GAS_PROVIDER_ADDRESS', web3.Web3.toChecksumAddress(bogus_account_hex))
with pytest.raises(KeyError):
api.ready()
api.tag_account('ETH_GAS_PROVIDER_ADDRESS', eth_empty_accounts[0])
api.ready()
def test_tx(
default_chain_spec,
cic_registry,
init_database,
init_rpc,
init_w3,
celery_session_worker,
):
tx = {
'from': init_w3.eth.accounts[0],
'to': init_w3.eth.accounts[1],
'nonce': 42,
'gas': 21000,
'gasPrice': 1000000,
'value': 128,
'chainId': default_chain_spec.chain_id(),
'data': '',
}
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
queue_create(
tx['nonce'],
tx['from'],
tx_hash_hex,
tx_signed_raw_hex,
str(default_chain_spec),
)
tx_recovered = unpack_signed_raw_tx(bytes.fromhex(tx_signed_raw_hex[2:]), default_chain_spec.chain_id())
cache_gas_refill_data(tx_hash_hex, tx_recovered)
api = AdminApi(init_rpc, queue=None)
tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex)
#def test_check_fix_nonce(
# default_chain_spec,
# init_database,
# init_eth_account_roles,
# init_w3,
# eth_empty_accounts,
# celery_session_worker,
# ):
#
# chain_str = str(default_chain_spec)
#
# sigs = []
# for i in range(5):
# s = celery.signature(
# 'cic_eth.eth.tx.refill_gas',
# [
# eth_empty_accounts[i],
# chain_str,
# ],
# queue=None,
# )
# sigs.append(s)
#
# t = celery.group(sigs)()
# txs = t.get()
# assert t.successful()
#
# tx_hash = web3.Web3.keccak(hexstr=txs[2])
# c = RpcClient(default_chain_spec)
# api = AdminApi(c, queue=None)
# address = init_eth_account_roles['eth_account_gas_provider']
# nonce_spec = api.check_nonce(address)
# assert nonce_spec['nonce']['network'] == 0
# assert nonce_spec['nonce']['queue'] == 4
# assert nonce_spec['nonce']['blocking'] == None
#
# s_set = celery.signature(
# 'cic_eth.queue.tx.set_rejected',
# [
# tx_hash.hex(),
# ],
# queue=None,
# )
# t = s_set.apply_async()
# t.get()
# t.collect()
# assert t.successful()
#
#
# nonce_spec = api.check_nonce(address)
# assert nonce_spec['nonce']['blocking'] == 2
# assert nonce_spec['tx']['blocking'] == tx_hash.hex()
#
# t = api.fix_nonce(address, nonce_spec['nonce']['blocking'])
# t.get()
# t.collect()
# assert t.successful()
#
# for tx in txs[3:]:
# tx_hash = web3.Web3.keccak(hexstr=tx)
# tx_dict = get_tx(tx_hash.hex())
# assert tx_dict['status'] == StatusEnum.OVERRIDDEN
#
#
#def test_tag_account(
# init_database,
# eth_empty_accounts,
# init_rpc,
# ):
#
# api = AdminApi(init_rpc)
#
# api.tag_account('foo', eth_empty_accounts[0])
# api.tag_account('bar', eth_empty_accounts[1])
# api.tag_account('bar', eth_empty_accounts[2])
#
# assert AccountRole.get_address('foo') == eth_empty_accounts[0]
# assert AccountRole.get_address('bar') == eth_empty_accounts[2]
#
#
#def test_ready(
# init_database,
# eth_empty_accounts,
# init_rpc,
# w3,
# ):
#
# api = AdminApi(init_rpc)
#
# with pytest.raises(InitializationError):
# api.ready()
#
# bogus_account = os.urandom(20)
# bogus_account_hex = '0x' + bogus_account.hex()
#
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', web3.Web3.toChecksumAddress(bogus_account_hex))
# with pytest.raises(KeyError):
# api.ready()
#
# api.tag_account('ETH_GAS_PROVIDER_ADDRESS', eth_empty_accounts[0])
# api.ready()
#
#
#def test_tx(
# default_chain_spec,
# cic_registry,
# init_database,
# init_rpc,
# init_w3,
# celery_session_worker,
# ):
#
# tx = {
# 'from': init_w3.eth.accounts[0],
# 'to': init_w3.eth.accounts[1],
# 'nonce': 42,
# 'gas': 21000,
# 'gasPrice': 1000000,
# 'value': 128,
# 'chainId': default_chain_spec.chain_id(),
# 'data': '',
# }
#
# (tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
# queue_create(
# tx['nonce'],
# tx['from'],
# tx_hash_hex,
# tx_signed_raw_hex,
# str(default_chain_spec),
# )
# tx_recovered = unpack_signed_raw_tx(bytes.fromhex(tx_signed_raw_hex[2:]), default_chain_spec.chain_id())
# cache_gas_refill_data(tx_hash_hex, tx_recovered)
#
# api = AdminApi(init_rpc, queue=None)
# tx = api.tx(default_chain_spec, tx_hash=tx_hash_hex)

View File

@ -29,27 +29,6 @@ def test_account_api(
assert t.successful()
def test_balance_api(
default_chain_spec,
default_chain_registry,
init_w3,
cic_registry,
init_database,
bancor_tokens,
bancor_registry,
celery_session_worker,
):
token = CICRegistry.get_address(default_chain_spec, bancor_tokens[0])
api = Api(str(default_chain_spec), callback_param='balance', callback_task='cic_eth.callbacks.noop.noop', queue=None)
t = api.balance(init_w3.eth.accounts[2], token.symbol())
t.get()
for r in t.collect():
print(r)
assert t.successful()
def test_transfer_api(
default_chain_spec,
init_w3,

View File

@ -0,0 +1,40 @@
# standard imports
import os
import logging
# local imports
import web3
from cic_eth.api.api_task import Api
logg = logging.getLogger()
def test_balance_complex_api(
default_chain_spec,
init_database,
init_w3,
cic_registry,
dummy_token,
dummy_token_registered,
celery_session_worker,
init_eth_tester,
):
chain_str = str(default_chain_spec)
api = Api(chain_str, queue=None, callback_param='foo')
a = web3.Web3.toChecksumAddress('0x' + os.urandom(20).hex())
t = api.balance(a, 'DUM')
t.get()
r = None
for c in t.collect():
r = c[1]
assert t.successful()
logg.debug(r)
assert r[0].get('balance_incoming') != None
assert r[0].get('balance_outgoing') != None
assert r[0].get('balance_network') != None
logg.debug('r {}'.format(r))

View File

@ -0,0 +1,92 @@
# standard imports
import logging
# local imports
from cic_eth.api.api_task import Api
from cic_eth.eth.token import TokenTxFactory
from cic_eth.eth.task import sign_tx
from tests.mock.filter import (
block_filter,
tx_filter,
)
logg = logging.getLogger()
def test_list_tx(
default_chain_spec,
default_chain_registry,
init_database,
init_rpc,
init_w3,
init_eth_tester,
dummy_token_gifted,
cic_registry,
celery_session_worker,
):
tx_hashes = []
# external tx
init_eth_tester.mine_blocks(13)
txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
tx = txf.transfer(dummy_token_gifted, init_w3.eth.accounts[1], 3000, default_chain_spec)
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
tx_hashes.append(tx_hash_hex)
init_w3.eth.sendRawTransaction(tx_signed_raw_hex)
# add to filter
rcpt = init_w3.eth.getTransactionReceipt(tx_hash_hex)
a = rcpt['blockNumber']
block_filter.add(a.to_bytes(4, 'big'))
a = rcpt['blockNumber'] + rcpt['transactionIndex']
tx_filter.add(a.to_bytes(4, 'big'))
# external tx
init_eth_tester.mine_blocks(28)
txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
tx = txf.transfer(dummy_token_gifted, init_w3.eth.accounts[1], 4000, default_chain_spec)
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
tx_hashes.append(tx_hash_hex)
init_w3.eth.sendRawTransaction(tx_signed_raw_hex)
# add to filter
rcpt = init_w3.eth.getTransactionReceipt(tx_hash_hex)
a = rcpt['blockNumber']
block_filter.add(a.to_bytes(4, 'big'))
a = rcpt['blockNumber'] + rcpt['transactionIndex']
tx_filter.add(a.to_bytes(4, 'big'))
# custodial tx
init_eth_tester.mine_blocks(3)
txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
api = Api(str(default_chain_spec), queue=None)
t = api.transfer(init_w3.eth.accounts[0], init_w3.eth.accounts[1], 1000, 'DUM')
t.get()
tx_hash_hex = None
for c in t.collect():
tx_hash_hex = c[1]
assert t.successful()
tx_hashes.append(tx_hash_hex)
# custodial tx
init_eth_tester.mine_blocks(6)
api = Api(str(default_chain_spec), queue=None)
t = api.transfer(init_w3.eth.accounts[0], init_w3.eth.accounts[1], 2000, 'DUM')
t.get()
tx_hash_hex = None
for c in t.collect():
tx_hash_hex = c[1]
assert t.successful()
tx_hashes.append(tx_hash_hex)
# test the api
t = api.list(init_w3.eth.accounts[1], external_task='tests.mock.filter.filter')
r = t.get()
for c in t.collect():
r = c[1]
assert t.successful()
assert len(r) == 4
for tx in r:
logg.debug('have tx {}'.format(r))
tx_hashes.remove(tx['hash'])
assert len(tx_hashes) == 0

View File

@ -0,0 +1 @@
from .filter import *

View File

@ -0,0 +1,22 @@
# third-party imports
import celery
import moolb
celery_app = celery.current_app
block_filter = moolb.Bloom(1024, 3)
tx_filter = moolb.Bloom(1024, 3)
lo = 0
hi = 100
@celery_app.task()
def filter(address, offset, limit):
return {
'alg': 'sha256',
'high': hi,
'low': lo,
'block_filter': block_filter.to_bytes().hex(),
'blocktx_filter': tx_filter.to_bytes().hex(),
'filter_rounds': 3,
}

View File

@ -0,0 +1,232 @@
# standard imports
import logging
# third-party imports
from cic_registry import CICRegistry
import celery
# local imports
from cic_eth.eth.rpc import RpcClient
from cic_eth.db.models.otx import Otx
from cic_eth.eth.util import unpack_signed_raw_tx
#logg = logging.getLogger(__name__)
logg = logging.getLogger()
def test_balance_complex(
default_chain_spec,
init_database,
init_w3,
cic_registry,
dummy_token_gifted,
celery_session_worker,
init_eth_tester,
):
chain_str = str(default_chain_spec)
token_data = {
'address': dummy_token_gifted,
'converters': [],
}
tx_hashes = []
for i in range(3):
s = celery.signature(
'cic_eth.eth.token.transfer',
[
[token_data],
init_w3.eth.accounts[0],
init_w3.eth.accounts[1],
1000*(i+1),
chain_str,
],
)
t = s.apply_async()
t.get()
r = None
for c in t.collect():
r = c[1]
assert t.successful()
tx_hashes.append(r)
otx = Otx.load(r)
s_send = celery.signature(
'cic_eth.eth.tx.send',
[
[otx.signed_tx],
chain_str,
],
)
t = s_send.apply_async()
t.get()
for r in t.collect():
pass
assert t.successful()
init_eth_tester.mine_block()
# here insert block sync to get state of balance
s_balance_base = celery.signature(
'cic_eth.eth.token.balance',
[
[token_data],
init_w3.eth.accounts[0],
chain_str,
],
)
s_balance_out = celery.signature(
'cic_eth.queue.balance.balance_outgoing',
[
init_w3.eth.accounts[0],
chain_str,
]
)
s_balance_in = celery.signature(
'cic_eth.queue.balance.balance_incoming',
[
init_w3.eth.accounts[0],
chain_str,
]
)
s_balance_out.link(s_balance_in)
s_balance_base.link(s_balance_out)
t = s_balance_base.apply_async()
t.get()
r = None
for c in t.collect():
r = c[1]
assert t.successful()
assert r[0]['balance_network'] > 0
assert r[0]['balance_incoming'] == 0
assert r[0]['balance_outgoing'] > 0
s_balance_base = celery.signature(
'cic_eth.eth.token.balance',
[
init_w3.eth.accounts[1],
chain_str,
],
)
s_balance_out = celery.signature(
'cic_eth.queue.balance.balance_outgoing',
[
[token_data],
init_w3.eth.accounts[1],
chain_str,
]
)
s_balance_in = celery.signature(
'cic_eth.queue.balance.balance_incoming',
[
init_w3.eth.accounts[1],
chain_str,
]
)
s_balance_base.link(s_balance_in)
s_balance_out.link(s_balance_base)
t = s_balance_out.apply_async()
t.get()
r = None
for c in t.collect():
r = c[1]
assert t.successful()
assert r[0]['balance_network'] > 0
assert r[0]['balance_incoming'] > 0
assert r[0]['balance_outgoing'] == 0
# Set confirmed status in backend
for tx_hash in tx_hashes:
rcpt = init_w3.eth.getTransactionReceipt(tx_hash)
assert rcpt['status'] == 1
otx = Otx.load(tx_hash, session=init_database)
otx.success(block=rcpt['blockNumber'], session=init_database)
init_database.add(otx)
init_database.commit()
s_balance_base = celery.signature(
'cic_eth.eth.token.balance',
[
init_w3.eth.accounts[1],
chain_str,
],
)
s_balance_out = celery.signature(
'cic_eth.queue.balance.balance_outgoing',
[
[token_data],
init_w3.eth.accounts[1],
chain_str,
]
)
s_balance_in = celery.signature(
'cic_eth.queue.balance.balance_incoming',
[
init_w3.eth.accounts[1],
chain_str,
]
)
s_balance_base.link(s_balance_in)
s_balance_out.link(s_balance_base)
t = s_balance_out.apply_async()
t.get()
r = None
for c in t.collect():
r = c[1]
assert t.successful()
assert r[0]['balance_network'] > 0
assert r[0]['balance_incoming'] == 0
assert r[0]['balance_outgoing'] == 0
s_balance_base = celery.signature(
'cic_eth.eth.token.balance',
[
init_w3.eth.accounts[0],
chain_str,
],
)
s_balance_out = celery.signature(
'cic_eth.queue.balance.balance_outgoing',
[
[token_data],
init_w3.eth.accounts[0],
chain_str,
]
)
s_balance_in = celery.signature(
'cic_eth.queue.balance.balance_incoming',
[
init_w3.eth.accounts[0],
chain_str,
]
)
s_balance_base.link(s_balance_in)
s_balance_out.link(s_balance_base)
t = s_balance_out.apply_async()
t.get()
r = None
for c in t.collect():
r = c[1]
assert t.successful()
assert r[0]['balance_network'] > 0
assert r[0]['balance_incoming'] == 0
assert r[0]['balance_outgoing'] == 0

View File

@ -10,7 +10,10 @@ from cic_registry import zero_address
# local imports
from cic_eth.db.models.otx import Otx
from cic_eth.db.models.tx import TxCache
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import (
StatusEnum,
StatusBits,
)
logg = logging.getLogger()
@ -169,6 +172,9 @@ def test_status_fubar(
)
t = s.apply_async()
t.get()
for n in t.collect():
pass
assert t.successful()
init_database.refresh(otx)
assert otx.status == StatusEnum.FUBAR
otx = Otx.load(tx_hash)
assert otx.status & StatusBits.UNKNOWN_ERROR

View File

@ -8,7 +8,11 @@ import celery
# local imports
from cic_eth.db.models.base import SessionBase
from cic_eth.db.models.otx import Otx
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import (
StatusEnum,
StatusBits,
is_error_status,
)
from cic_eth.eth.task import sign_and_register_tx
logg = logging.getLogger()
@ -101,7 +105,7 @@ def test_states_failed(
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
otx.sendfail(session=init_database)
init_database.add(otx)
init_database.commit()
s = celery.signature(
@ -121,5 +125,9 @@ def test_states_failed(
pass
assert t.successful()
init_database.commit()
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
assert otx.status == StatusEnum.RETRY.value
assert otx.status & StatusEnum.RETRY == StatusEnum.RETRY
#assert otx.status & StatusBits.QUEUED
assert is_error_status(otx.status)

View File

@ -0,0 +1 @@
[{"inputs":[{"internalType":"bytes32","name":"_initialDescription","type":"bytes32"}],"stateMutability":"nonpayable","type":"constructor"},{"inputs":[{"internalType":"address","name":"_subject","type":"address"},{"internalType":"bytes32","name":"_proof","type":"bytes32"}],"name":"addDeclaration","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"nonpayable","type":"function"},{"inputs":[{"internalType":"uint256","name":"","type":"uint256"},{"internalType":"uint256","name":"","type":"uint256"}],"name":"contents","outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_declarator","type":"address"},{"internalType":"address","name":"_subject","type":"address"}],"name":"declaration","outputs":[{"internalType":"bytes32[]","name":"","type":"bytes32[]"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_declarator","type":"address"},{"internalType":"uint256","name":"_idx","type":"uint256"}],"name":"declarationAddressAt","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_declarator","type":"address"}],"name":"declarationCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_subject","type":"address"},{"internalType":"uint256","name":"_idx","type":"uint256"}],"name":"declaratorAddressAt","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"address","name":"_subject","type":"address"}],"name":"declaratorCount","outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"owner","outputs":[{"internalType":"address","name":"","type":"address"}],"stateMutability":"view","type":"function"},{"inputs":[{"internalType":"bytes4","name":"interfaceID","type":"bytes4"}],"name":"supportsInterface","outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view","type":"function"},{"inputs":[],"name":"transferOwnership","outputs":[],"stateMutability":"nonpayable","type":"function"}]

View File

@ -24,7 +24,6 @@ class Response:
status = 200
@pytest.mark.skip()
def test_callback_http(
celery_session_worker,
mocker,
@ -43,7 +42,6 @@ def test_callback_http(
t.get()
@pytest.mark.skip()
def test_callback_tcp(
celery_session_worker,
):

View File

@ -0,0 +1,20 @@
from cic_eth.db.enum import (
StatusEnum,
StatusBits,
status_str,
)
def test_status_str():
# String representation for a status in StatusEnum
s = status_str(StatusEnum.REVERTED)
assert s == 'REVERTED'
# String representation for a status not in StatusEnum
s = status_str(StatusBits.LOCAL_ERROR | StatusBits.NODE_ERROR)
assert s == 'LOCAL_ERROR,NODE_ERROR*'
# String representation for a status in StatusEnum, but bits only representation bit set
s = status_str(StatusEnum.REVERTED, bits_only=True)
assert s == 'IN_NETWORK,NETWORK_ERROR,FINAL'

View File

@ -9,7 +9,11 @@ import pytest
from cic_eth.db.models.base import SessionBase
from cic_eth.db.models.otx import OtxStateLog
from cic_eth.db.models.otx import Otx
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import (
StatusEnum,
StatusBits,
is_alive,
)
logg = logging.getLogger()
@ -70,15 +74,24 @@ def test_state_log(
otx = Otx.add(0, address, tx_hash, signed_tx, session=init_database)
otx.waitforgas(session=init_database)
init_database.commit()
otx.readysend(session=init_database)
init_database.commit()
otx.sent(session=init_database)
init_database.commit()
otx.success(1024, session=init_database)
init_database.commit()
q = init_database.query(OtxStateLog)
q = q.filter(OtxStateLog.otx_id==otx.id)
q = q.order_by(OtxStateLog.date.asc())
logs = q.all()
assert logs[0].status == StatusEnum.PENDING
assert logs[1].status == StatusEnum.WAITFORGAS
assert logs[2].status == StatusEnum.SENT
assert logs[3].status == StatusEnum.SUCCESS
assert logs[2].status & StatusBits.QUEUED
assert logs[3].status & StatusBits.IN_NETWORK
assert not is_alive(logs[4].status)

View File

@ -1,55 +0,0 @@
# standard imports
import logging
# third-party imports
import pytest
# local imports
from cic_eth.db import Otx
from cic_eth.db.error import TxStateChangeError
logg = logging.getLogger()
# Check that invalid transitions throw exceptions
# sent
def test_db_queue_states(
init_database,
):
session = init_database
# these values are completely arbitary
tx_hash = '0xF182DFA3AD48723E7E222FE7B4C2C44C23CD4D7FF413E8999DFA15ECE53F'
address = '0x38C5559D6EDDDA1F705D3AB1A664CA1B397EB119'
signed_tx = '0xA5866A5383249AE843546BDA46235A1CA1614F538FB486140693C2EF1956FC53213F6AEF0F99F44D7103871AF3A12B126DCF9BFB7AF11143FAB3ECE2B452EE35D1320C4C7C6F999C8DF4EB09E729715B573F6672ED852547F552C4AE99D17DCD14C810'
o = Otx(
nonce=42,
address=address[2:],
tx_hash=tx_hash[2:],
signed_tx=signed_tx[2:],
)
session.add(o)
session.commit()
o.sent(session=session)
session.commit()
# send after sent is ok
o.sent(session=session)
session.commit()
o.sendfail(session=session)
session.commit()
with pytest.raises(TxStateChangeError):
o.sendfail(session=session)
o.sent(session=session)
session.commit()
o.minefail(1234, session=session)
session.commit()
with pytest.raises(TxStateChangeError):
o.sent(session=session)

View File

@ -0,0 +1,97 @@
# standard imports
import os
# third-party imports
import pytest
# local imports
from cic_eth.db.models.otx import Otx
from cic_eth.db.enum import (
StatusEnum,
StatusBits,
is_alive,
)
@pytest.fixture(scope='function')
def otx(
init_database,
):
bogus_hash = '0x' + os.urandom(32).hex()
bogus_address = '0x' + os.urandom(20).hex()
bogus_tx_raw = '0x' + os.urandom(128).hex()
return Otx(0, bogus_address, bogus_hash, bogus_tx_raw)
def test_status_chain_gas(
init_database,
otx,
):
otx.waitforgas(init_database)
otx.readysend(init_database)
otx.sent(init_database)
otx.success(1024, init_database)
assert not is_alive(otx.status)
def test_status_chain_straight_success(
init_database,
otx,
):
otx.readysend(init_database)
otx.sent(init_database)
otx.success(1024, init_database)
assert not is_alive(otx.status)
def test_status_chain_straight_revert(
init_database,
otx,
):
otx.readysend(init_database)
otx.sent(init_database)
otx.minefail(1024, init_database)
assert not is_alive(otx.status)
def test_status_chain_nodeerror(
init_database,
otx,
):
otx.readysend(init_database)
otx.sendfail(init_database)
otx.retry(init_database)
otx.sent(init_database)
otx.success(1024, init_database)
assert not is_alive(otx.status)
def test_status_chain_nodeerror_multiple(
init_database,
otx,
):
otx.readysend(init_database)
otx.sendfail(init_database)
otx.retry(init_database)
otx.sendfail(init_database)
otx.retry(init_database)
otx.sent(init_database)
otx.success(1024, init_database)
assert not is_alive(otx.status)
def test_status_chain_nodeerror(
init_database,
otx,
):
otx.readysend(init_database)
otx.reject(init_database)
assert not is_alive(otx.status)

View File

@ -59,9 +59,8 @@ def test_set(
assert (tx_stored.recipient == tx_def['to'])
assert (tx_stored.source_token_address == bogus_from_token)
assert (tx_stored.destination_token_address == zero_address)
assert (tx_stored.from_value == '1b1ae4d6e2ef500000')
assert (tx_stored.to_value == '0d8d726b7177a80000')
assert (tx_stored.values() == (tx_def['value'], to_value))
assert (tx_stored.from_value == tx_def['value'])
assert (tx_stored.to_value == to_value)
assert (tx_stored.block_number == 666)
assert (tx_stored.tx_index == 13)

View File

@ -126,4 +126,5 @@ def test_queue_cache_convert(
assert txc.recipient == init_w3.eth.accounts[0]
assert txc.source_token_address == bancor_tokens[0]
assert txc.destination_token_address == bancor_tokens[1]
assert txc.values() == (amount, amount)
assert txc.from_value == amount
assert txc.to_value == amount

View File

@ -0,0 +1,58 @@
# standard imports
import os
import logging
# third-party imports
import web3
from cic_registry import CICRegistry
# local imports
from cic_eth.eth.token import ExtendedTx
logg = logging.getLogger()
def test_extended_token(
default_chain_spec,
dummy_token,
local_cic_registry,
address_declarator,
init_w3,
):
address_foo = web3.Web3.toChecksumAddress('0x' + os.urandom(20).hex())
label_foo = '0x{:<064s}'.format(b'foo'.hex())
address_bar = web3.Web3.toChecksumAddress('0x' + os.urandom(20).hex())
label_bar = '0x{:<064s}'.format(b'bar'.hex())
label_token = '0x{:<064s}'.format(b'toktoktok'.hex())
# TODO: still need to test results with two different tokens
token_contract = init_w3.eth.contract(abi=CICRegistry.abi('ERC20'), address=dummy_token)
token = CICRegistry.add_token(default_chain_spec, token_contract)
declarator = CICRegistry.get_contract(default_chain_spec, 'AddressDeclarator', 'Declarator')
fn = declarator.function('addDeclaration')
fn(address_foo, label_foo).transact({'from': init_w3.eth.accounts[1]})
fn(address_bar, label_bar).transact({'from': init_w3.eth.accounts[1]})
fn(dummy_token, label_token).transact({'from': init_w3.eth.accounts[1]})
tx_hash = '0x' + os.urandom(32).hex()
xtx = ExtendedTx(tx_hash, default_chain_spec)
xtx.set_actors(address_foo, address_bar, [init_w3.eth.accounts[1]])
xtx.set_tokens(dummy_token, 1024)
tx = xtx.to_dict()
logg.debug('tx {}'.format(tx))
assert tx['hash'] == tx_hash
assert tx['source_token'] == dummy_token
assert tx['destination_token'] == dummy_token
assert tx['source_token_symbol'] == token.symbol()
assert tx['destination_token_symbol'] == token.symbol()
assert tx['source_token_value'] == 1024
assert tx['destination_token_value'] == 1024
assert tx['source_token_decimals'] == token.decimals()
assert tx['destination_token_decimals'] == token.decimals()
assert tx['sender'] == address_foo
assert tx['sender_label'] == 'foo'
assert tx['recipient'] == address_bar
assert tx['recipient_label'] == 'bar'
assert tx['chain'] == str(default_chain_spec)

View File

@ -85,4 +85,5 @@ def test_queue_cache_transfer(
assert txc.recipient == init_w3.eth.accounts[1]
assert txc.source_token_address == bancor_tokens[0]
assert txc.destination_token_address == bancor_tokens[0]
assert txc.values() == (value, value)
assert txc.from_value == value
assert txc.to_value == value

View File

@ -0,0 +1,33 @@
# third-party imports
from eth_address_declarator import AddressDeclarator
from cic_registry import CICRegistry
# local imports
from cic_eth.ext.address import translate_tx_addresses
def test_translate(
default_chain_spec,
address_declarator,
init_rpc,
init_w3,
):
chain_str = str(default_chain_spec)
c = init_rpc.w3.eth.contract(abi=AddressDeclarator.abi(), address=address_declarator)
description = '0x{:<064s}'.format(b'foo'.hex())
c.functions.addDeclaration(init_w3.eth.accounts[2], description).transact({'from': init_w3.eth.accounts[1]})
description = '0x{:<064s}'.format(b'bar'.hex())
c.functions.addDeclaration(init_w3.eth.accounts[3], description).transact({'from': init_w3.eth.accounts[1]})
tx = {
'sender': init_w3.eth.accounts[2],
'sender_label': None,
'recipient': init_w3.eth.accounts[3],
'recipient_label': None,
}
tx = translate_tx_addresses(tx, [init_w3.eth.accounts[1]], chain_str)
assert tx['sender_label'] == 'foo'
assert tx['recipient_label'] == 'bar'

View File

@ -0,0 +1,109 @@
# standard imports
import logging
# third-party imports
import celery
import moolb
# local imports
from cic_eth.eth.token import TokenTxFactory
from cic_eth.eth.task import sign_tx
logg = logging.getLogger()
# TODO: This test fails when not run alone. Identify which fixture leaves a dirty state
def test_filter_process(
init_rpc,
default_chain_spec,
default_chain_registry,
celery_session_worker,
init_eth_tester,
init_w3,
dummy_token_gifted,
cic_registry,
):
b = moolb.Bloom(1024, 3)
t = moolb.Bloom(1024, 3)
tx_hashes = []
# external tx
init_eth_tester.mine_blocks(13)
txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
tx = txf.transfer(dummy_token_gifted, init_w3.eth.accounts[1], 3000, default_chain_spec)
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
tx_hashes.append(tx_hash_hex)
init_w3.eth.sendRawTransaction(tx_signed_raw_hex)
# add to filter
rcpt = init_w3.eth.getTransactionReceipt(tx_hash_hex)
a = rcpt['blockNumber']
b.add(a.to_bytes(4, 'big'))
a = rcpt['blockNumber'] + rcpt['transactionIndex']
t.add(a.to_bytes(4, 'big'))
# external tx
init_eth_tester.mine_blocks(28)
txf = TokenTxFactory(init_w3.eth.accounts[0], init_rpc)
tx = txf.transfer(dummy_token_gifted, init_w3.eth.accounts[1], 4000, default_chain_spec)
(tx_hash_hex, tx_signed_raw_hex) = sign_tx(tx, str(default_chain_spec))
tx_hashes.append(tx_hash_hex)
init_w3.eth.sendRawTransaction(tx_signed_raw_hex)
# add to filter
rcpt = init_w3.eth.getTransactionReceipt(tx_hash_hex)
a = rcpt['blockNumber']
b.add(a.to_bytes(4, 'big'))
a = rcpt['blockNumber'] + rcpt['transactionIndex']
t.add(a.to_bytes(4, 'big'))
# init_eth_tester.mine_blocks(13)
# tx_hash_one = init_w3.eth.sendTransaction({
# 'from': init_w3.eth.accounts[2],
# 'to': init_w3.eth.accounts[1],
# 'value': 1024,
# })
# rcpt = init_w3.eth.getTransactionReceipt(tx_hash_one)
# a = rcpt['blockNumber']
# b.add(a.to_bytes(4, 'big'))
# a = rcpt['blockNumber'] + rcpt['transactionIndex']
# t.add(a.to_bytes(4, 'big'))
#
# init_eth_tester.mine_blocks(28)
# tx_hash_two = init_w3.eth.sendTransaction({
# 'from': init_w3.eth.accounts[3],
# 'to': init_w3.eth.accounts[1],
# 'value': 2048,
# })
# rcpt = init_w3.eth.getTransactionReceipt(tx_hash_two)
# a = rcpt['blockNumber']
# b.add(a.to_bytes(4, 'big'))
# a = rcpt['blockNumber'] + rcpt['transactionIndex']
# t.add(a.to_bytes(4, 'big'))
init_eth_tester.mine_blocks(10)
o = {
'alg': 'sha256',
'filter_rounds': 3,
'low': 0,
'high': 50,
'block_filter': b.to_bytes().hex(),
'blocktx_filter': t.to_bytes().hex(),
}
s = celery.signature(
'cic_eth.ext.tx.list_tx_by_bloom',
[
o,
init_w3.eth.accounts[1],
str(default_chain_spec),
],
queue=None
)
t = s.apply_async()
r = t.get()
assert len(r) == 2
for tx_hash in r.keys():
tx_hashes.remove(tx_hash)
assert len(tx_hashes) == 0

View File

@ -0,0 +1,158 @@
# standard imports
import os
import logging
# third-party imports
import pytest
# local imports
from cic_eth.db.models.otx import Otx
from cic_eth.db.models.tx import TxCache
from cic_eth.queue.balance import (
balance_outgoing,
balance_incoming,
assemble_balances,
)
logg = logging.getLogger()
def test_assemble():
token_foo = '0x' + os.urandom(20).hex()
token_bar = '0x' + os.urandom(20).hex()
b = [
[
{
'address': token_foo,
'converters': [],
'balance_foo': 42,
},
{
'address': token_bar,
'converters': [],
'balance_baz': 666,
},
],
[
{
'address': token_foo,
'converters': [],
'balance_bar': 13,
},
{
'address': token_bar,
'converters': [],
'balance_xyzzy': 1337,
}
]
]
r = assemble_balances(b)
logg.debug('r {}'.format(r))
assert r[0]['address'] == token_foo
assert r[1]['address'] == token_bar
assert r[0].get('balance_foo') != None
assert r[0].get('balance_bar') != None
assert r[1].get('balance_baz') != None
assert r[1].get('balance_xyzzy') != None
@pytest.mark.skip()
def test_outgoing_balance(
default_chain_spec,
init_database,
):
chain_str = str(default_chain_spec)
recipient = '0x' + os.urandom(20).hex()
tx_hash = '0x' + os.urandom(32).hex()
signed_tx = '0x' + os.urandom(128).hex()
otx = Otx.add(0, recipient, tx_hash, signed_tx, session=init_database)
init_database.add(otx)
init_database.commit()
token_address = '0x' + os.urandom(20).hex()
sender = '0x' + os.urandom(20).hex()
txc = TxCache(
tx_hash,
sender,
recipient,
token_address,
token_address,
1000,
1000,
)
init_database.add(txc)
init_database.commit()
token_data = {
'address': token_address,
'converters': [],
}
b = balance_outgoing([token_data], sender, chain_str)
assert b[0]['balance_outgoing'] == 1000
otx.sent(session=init_database)
init_database.commit()
b = balance_outgoing([token_data], sender, chain_str)
assert b[0]['balance_outgoing'] == 1000
otx.success(block=1024, session=init_database)
init_database.commit()
b = balance_outgoing([token_data], sender, chain_str)
assert b[0]['balance_outgoing'] == 0
@pytest.mark.skip()
def test_incoming_balance(
default_chain_spec,
init_database,
):
chain_str = str(default_chain_spec)
recipient = '0x' + os.urandom(20).hex()
tx_hash = '0x' + os.urandom(32).hex()
signed_tx = '0x' + os.urandom(128).hex()
otx = Otx.add(0, recipient, tx_hash, signed_tx, session=init_database)
init_database.add(otx)
init_database.commit()
token_address = '0x' + os.urandom(20).hex()
sender = '0x' + os.urandom(20).hex()
txc = TxCache(
tx_hash,
sender,
recipient,
token_address,
token_address,
1000,
1000,
)
init_database.add(txc)
init_database.commit()
token_data = {
'address': token_address,
'converters': [],
}
b = balance_incoming([token_data], recipient, chain_str)
assert b[0]['balance_incoming'] == 0
otx.sent(session=init_database)
init_database.commit()
b = balance_incoming([token_data], recipient, chain_str)
assert b[0]['balance_incoming'] == 1000
otx.success(block=1024, session=init_database)
init_database.commit()
b = balance_incoming([token_data], recipient, chain_str)
assert b[0]['balance_incoming'] == 0

View File

@ -16,8 +16,14 @@ from cic_eth.db.models.otx import OtxSync
from cic_eth.db.models.tx import TxCache
from cic_eth.db.models.lock import Lock
from cic_eth.db.models.base import SessionBase
from cic_eth.db.enum import StatusEnum
from cic_eth.db.enum import LockEnum
from cic_eth.db.enum import (
StatusEnum,
LockEnum,
StatusBits,
is_alive,
is_error_status,
status_str,
)
from cic_eth.queue.tx import create as queue_create
from cic_eth.queue.tx import set_final_status
from cic_eth.queue.tx import set_sent_status
@ -63,13 +69,14 @@ def test_finalize(
set_sent_status(tx_hash.hex())
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[0]).first()
assert otx.status == StatusEnum.OBSOLETED
assert otx.status & StatusBits.OBSOLETE
assert not is_alive(otx.status)
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[1]).first()
assert otx.status == StatusEnum.OBSOLETED
assert otx.status & StatusBits.OBSOLETE
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[2]).first()
assert otx.status == StatusEnum.OBSOLETED
assert otx.status & StatusBits.OBSOLETE
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[3]).first()
assert otx.status == StatusEnum.PENDING
@ -82,19 +89,22 @@ def test_finalize(
set_final_status(tx_hashes[3], 1024)
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[0]).first()
assert otx.status == StatusEnum.CANCELLED
assert otx.status & (StatusBits.OBSOLETE | StatusBits.FINAL)
assert not is_alive(otx.status)
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[1]).first()
assert otx.status == StatusEnum.CANCELLED
assert otx.status & (StatusBits.OBSOLETE | StatusBits.FINAL)
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[2]).first()
assert otx.status == StatusEnum.CANCELLED
assert otx.status & (StatusBits.OBSOLETE | StatusBits.FINAL)
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[3]).first()
assert otx.status == StatusEnum.SUCCESS
assert otx.status & (StatusBits.IN_NETWORK | StatusBits.FINAL)
assert not is_error_status(otx.status)
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hashes[4]).first()
assert otx.status == StatusEnum.SENT
assert otx.status & (StatusBits.IN_NETWORK | StatusBits.FINAL)
assert not is_error_status(otx.status)
def test_expired(
@ -404,7 +414,7 @@ def test_obsoletion(
session = SessionBase.create_session()
q = session.query(Otx)
q = q.filter(Otx.status==StatusEnum.OBSOLETED)
q = q.filter(Otx.status.op('&')(StatusEnum.OBSOLETED.value)==StatusEnum.OBSOLETED.value)
z = 0
for o in q.all():
z += o.nonce
@ -416,13 +426,13 @@ def test_obsoletion(
session = SessionBase.create_session()
q = session.query(Otx)
q = q.filter(Otx.status==StatusEnum.OBSOLETED)
q = q.filter(Otx.status.op('&')(StatusEnum.CANCELLED.value)==StatusEnum.OBSOLETED.value)
zo = 0
for o in q.all():
zo += o.nonce
q = session.query(Otx)
q = q.filter(Otx.status==StatusEnum.CANCELLED)
q = q.filter(Otx.status.op('&')(StatusEnum.CANCELLED.value)==StatusEnum.CANCELLED.value)
zc = 0
for o in q.all():
zc += o.nonce
@ -450,16 +460,20 @@ def test_retry(
q = q.filter(Otx.tx_hash==tx_hash)
otx = q.first()
assert otx.status == StatusEnum.RETRY
assert (otx.status & StatusEnum.RETRY.value) == StatusEnum.RETRY.value
assert is_error_status(otx.status)
set_sent_status(tx_hash, False)
set_ready(tx_hash)
init_database.commit()
q = init_database.query(Otx)
q = q.filter(Otx.tx_hash==tx_hash)
otx = q.first()
assert otx.status == StatusEnum.RETRY
assert (otx.status & StatusEnum.RETRY.value) == StatusBits.QUEUED.value
assert not is_error_status(otx.status)
def test_get_account_tx(

View File

@ -120,7 +120,7 @@ ARG cryptocurrency_cli_tools_version=0.0.4
RUN pip install --extra-index-url $pip_extra_index_url cryptocurrency-cli-tools==$cryptocurrency_cli_tools_version
RUN echo Install smart contract interface implementations, least frequently changed first
ARG giftable_erc20_token_version=0.0.7b7
ARG giftable_erc20_token_version=0.0.7b10
RUN pip install --extra-index-url $pip_extra_index_url giftable-erc20-token==$giftable_erc20_token_version
ARG eth_accounts_index_version=0.0.10a6
@ -129,14 +129,17 @@ RUN pip install --extra-index-url $pip_extra_index_url eth-accounts-index==$eth_
ARG erc20_approval_escrow_version=0.3.0a4
RUN pip install --extra-index-url $pip_extra_index_url erc20-approval-escrow==$erc20_approval_escrow_version
ARG erc20_single_shot_faucet_version=0.2.0a5
RUN pip install --extra-index-url $pip_extra_index_url erc20-single-shot-faucet==$erc20_single_shot_faucet_version
#ARG erc20_single_shot_faucet_version=0.2.0a5
#RUN pip install --extra-index-url $pip_extra_index_url erc20-single-shot-faucet==$erc20_single_shot_faucet_version
ARG sarafu_faucet_version==0.0.1a10
RUN pip install --extra-index-url $pip_extra_index_url sarafu-faucet==$sarafu_faucet_version
ARG eth_address_index_version==0.1.0a8
RUN pip install --extra-index-url $pip_extra_index_url eth-address-index==$eth_address_index_version
RUN echo Install cic specific python packages
ARG cic_registry_version=0.5.3a11
ARG cic_registry_version=0.5.3a18
RUN pip install --extra-index-url $pip_extra_index_url cic-registry==$cic_registry_version
RUN echo Install misc helpers
@ -147,6 +150,9 @@ RUN pip install --extra-index-url $pip_extra_index_url crypto-dev-signer==$cryp
ARG eth_gas_proxy_version==0.0.1a4
RUN pip install --extra-index-url $pip_extra_index_url eth-gas-proxy==$eth_gas_proxy_version
ARG cic_contracts_version==0.0.2a2
RUN pip install --extra-index-url $pip_extra_index_url cic-contracts==$cic_contracts_version
WORKDIR /root
COPY contract-migration/testdata/pgp testdata/pgp

View File

@ -29,7 +29,7 @@ if [[ -n "${ETH_PROVIDER}" ]]; then
echo "waiting for ${ETH_PROVIDER}..."
./wait-for-it.sh "${ETH_PROVIDER_HOST}:${ETH_PROVIDER_PORT}"
DEV_ETH_RESERVE_ADDRESS=`giftable-token-deploy -p $ETH_PROVIDER -y $keystore_file -i $CIC_CHAIN_SPEC --account $DEV_ETH_ACCOUNT_RESERVE_MINTER --minter $DEV_ETH_ACCOUNT_RESERVE_MINTER -v -w --name "Sarafu" --symbol "SRF" $DEV_ETH_RESERVE_AMOUNT`
DEV_ETH_RESERVE_ADDRESS=`giftable-token-deploy -p $ETH_PROVIDER -y $keystore_file -i $CIC_CHAIN_SPEC --account $DEV_ETH_ACCOUNT_RESERVE_MINTER --minter $DEV_ETH_ACCOUNT_RESERVE_MINTER --minter $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER -v -w --name "Sarafu" --symbol "SRF" --decimals 6 $DEV_ETH_RESERVE_AMOUNT`
#BANCOR_REGISTRY_ADDRESS=`cic-bancor-deploy --bancor-dir /usr/local/share/cic/bancor -z $DEV_ETH_RESERVE_ADDRESS -p $ETH_PROVIDER -o $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER`
@ -44,6 +44,7 @@ if [[ -n "${ETH_PROVIDER}" ]]; then
>&2 echo "deploy address declarator contract"
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
CIC_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -v $declarator_description`
cic-registry-set -y $keystore_file -r $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -k AddressDeclarator -p $ETH_PROVIDER $CIC_DECLARATOR_ADDRESS -vv
else
echo "\$ETH_PROVIDER not set!"
@ -59,6 +60,7 @@ export DEV_ETH_RESERVE_AMOUNT=$DEV_ETH_RESERVE_AMOUNT
export DEV_ETH_ACCOUNTS_INDEX_ADDRESS=$CIC_ACCOUNTS_INDEX_ADDRESS
export BANCOR_REGISTRY_ADDRESS=$BANCOR_REGISTRY_ADDRESS
export CIC_REGISTRY_ADDRESS=$CIC_REGISTRY_ADDRESS
export CIC_TRUST_ADDRESS=$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
EOF

View File

@ -14,7 +14,6 @@ DEV_DATABASE_NAME_CIC_ETH=${DEV_DATABASE_NAME_CIC_ETH:-"cic-eth"}
CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic}
# Debug flag
#debug='-v'
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
keystore_file=./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c
debug='-vv'
@ -31,7 +30,7 @@ truncate $env_out_file -s 0
set -e
set -a
pip install --extra-index-url $DEV_PIP_EXTRA_INDEX_URL cic-eth==0.10.0a25 chainlib==0.0.1a4
pip install --extra-index-url $DEV_PIP_EXTRA_INDEX_URL cic-eth==0.10.0a25 chainlib==0.0.1a11
>&2 echo "create account for gas gifter"
old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER
@ -49,12 +48,12 @@ DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`cic-eth-create $debug --redis-host
echo DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=$DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER >> $env_out_file
cic-eth-tag TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER
>&2 echo "create account for faucet owner"
DEV_ETH_ACCOUNT_FAUCET_OWNER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_FAUCET_OWNER >> $env_out_file
cic-eth-tag FAUCET_GIFTER $DEV_ETH_ACCOUNT_FAUCET_OWNER
#>&2 echo "create account for faucet owner"
#DEV_ETH_ACCOUNT_FAUCET_OWNER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
#echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_FAUCET_OWNER >> $env_out_file
#cic-eth-tag FAUCET_GIFTER $DEV_ETH_ACCOUNT_FAUCET_OWNER
>&2 echo "create account for accounts index owner"
>&2 echo "create account for accounts index writer"
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=`cic-eth-create $debug --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=$DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER >> $env_out_file
cic-eth-tag ACCOUNTS_INDEX_WRITER $DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER
@ -100,7 +99,7 @@ export CIC_TRANSFER_AUTHORIZATION_ADDRESS=$CIC_TRANSFER_AUTHORIZATION_ADDRESS
# Deploy one-time token faucet for newly created token
>&2 echo "deploy faucet"
DEV_ETH_SARAFU_FAUCET_ADDRESS=`erc20-single-shot-faucet-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_ETH_SARAFU_TOKEN_ADDRESS --editor $DEV_ETH_ACCOUNT_FAUCET_OWNER --set-amount $faucet_amount -w $debug`
DEV_ETH_SARAFU_FAUCET_ADDRESS=`sarafu-faucet-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_ETH_SARAFU_TOKEN_ADDRESS --editor $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER --set-amount $faucet_amount --accounts-index-address $DEV_ETH_ACCOUNTS_INDEX_ADDRESS -w $debug`
echo DEV_ETH_SARAFU_FAUCET_ADDRESS=$DEV_ETH_SARAFU_FAUCET_ADDRESS >> $env_out_file
export DEV_ETH_SARAFU_FAUCET_ADDRESS=$DEV_ETH_SARAFU_FAUCET_ADDRESS
@ -112,6 +111,8 @@ export DEV_ETH_SARAFU_FAUCET_ADDRESS=$DEV_ETH_SARAFU_FAUCET_ADDRESS
>&2 echo "register faucet contract in registry"
>&2 cic-registry-set -y $keystore_file -r $CIC_REGISTRY_ADDRESS -k Faucet -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_SARAFU_FAUCET_ADDRESS
>&2 echo "add faucet contract as token minter"
>&2 giftable-token-add -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug --token-address $DEV_ETH_SARAFU_TOKEN_ADDRESS $DEV_ETH_SARAFU_FAUCET_ADDRESS
>&2 echo "deploy token symbol index contract"
CIC_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug`
@ -131,7 +132,7 @@ token_description_two=0x54686973206973207468652053617261667520746f6b656e00000000
# Register address declarator
>&2 echo "registry address declarator to registry"
>&2 echo "add address declarator to registry"
>&2 cic-registry-set -y $keystore_file -r $CIC_REGISTRY_ADDRESS -k AddressDeclarator -i $CIC_CHAIN_SPEC -w -p $ETH_PROVIDER $CIC_DECLARATOR_ADDRESS
# We're done with the registry at this point, seal it off

View File

@ -41,3 +41,5 @@ yaml-acl==0.0.1
rlp==2.0.1
cryptocurrency-cli-tools==0.0.4
giftable-erc20-token==0.0.7b7
hexathon==0.0.1a3
chainlib==0.0.1a12

View File

@ -117,9 +117,9 @@ services:
CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis:6379}
DEV_PIP_EXTRA_INDEX_URL: ${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433}
command: ["./seed_cic_eth.sh"]
deploy:
restart_policy:
condition: on-failure
# deploy:
#restart_policy:
# condition: on-failure
depends_on:
- eth
- postgres
@ -129,56 +129,71 @@ services:
- contract-config:/tmp/cic/config
# cic-cache-tracker:
# # image: registry.gitlab.com/grassrootseconomics/cic-cache:master-latest
# build: apps/cic-cache
# environment:
# CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS # supplied at contract-config after contract provisioning
# ETH_PROVIDER: ${ETH_PROVIDER:-http://eth:8545}
# BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor}
# DATABASE_USER: ${DATABASE_USER:-grassroots}
# DATABASE_PASSWORD: ${DATABASE_PASSWORD:-tralala} # this is is set at initdb see: postgres/initdb/create_db.sql
# DATABASE_HOST: ${DATABASE_HOST:-postgres}
# DATABASE_PORT: ${DATABASE_PORT:-5432}
# DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-cic_cache}
# DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
# DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
# ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
# deploy:
# restart_policy:
# condition: on-failure
# depends_on:
# - postgres
# - eth
# command:
# - /bin/sh
# - -c
# - |
# if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
# /usr/local/bin/cic-cache-tracker -vv
# volumes:
# - contract-config:/tmp/cic/config/:ro
# entrypoint: ["/usr/local/bin/cic-cache-tracker", "-vv"]
# command: "/usr/local/bin/cic-cache-tracker -vv"
cic-cache-tracker:
build:
context: apps/cic-cache/
dockerfile: docker/Dockerfile
environment:
CIC_REGISTRY_ADDRESS: $CIC_REGISTRY_ADDRESS # supplied at contract-config after contract provisioning
ETH_PROVIDER: ${ETH_PROVIDER:-http://eth:8545}
DATABASE_USER: ${DATABASE_USER:-grassroots}
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-tralala} # this is is set at initdb see: postgres/initdb/create_db.sql
DATABASE_HOST: ${DATABASE_HOST:-postgres}
DATABASE_PORT: ${DATABASE_PORT:-5432}
DATABASE_NAME: ${DATABASE_NAME_CIC_CACHE:-cic_cache}
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
DATABASE_DEBUG: 1
ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
CIC_TRUST_ADDRESS: ${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER:-0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C}
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-Bloxberg:8996}
CELERY_BROKER_URL: redis://redis:6379
CELERY_RESULT_URL: redis://redis:6379
deploy:
restart_policy:
condition: on-failure
depends_on:
- redis
- postgres
- eth
command:
- /bin/bash
- -c
- |
if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
/usr/local/bin/cic-cache-tracker -vv
volumes:
- contract-config:/tmp/cic/config/:ro
# cic-cache-server:
# image: grassrootseconomics:cic-cache-uwsgi
# environment:
# DATABASE_USER: $DATABASE_USER
# DATABASE_HOST: $DATABASE_HOST
# DATABASE_PORT: $DATABASE_PORT
# DATABASE_PASSWORD: $DATABASE_PASSWORD
# DATABASE_NAME: $DATABASE_NAME_CIC_CACHE
# PGPASSWORD: $DATABASE_PASSWORD
# SERVER_PORT: 80
# ports:
# - ${HTTP_PORT_CIC_CACHE}:80
# depends_on:
# - postgres
# deploy:
# restart_policy:
# condition: on-failure
# command: "/root/start_uwsgi.sh"
cic-cache-server:
build:
context: apps/cic-cache/
dockerfile: docker/Dockerfile
environment:
DATABASE_USER: $DATABASE_USER
DATABASE_HOST: $DATABASE_HOST
DATABASE_PORT: $DATABASE_PORT
DATABASE_PASSWORD: $DATABASE_PASSWORD
DATABASE_NAME: $DATABASE_NAME_CIC_CACHE
DATABASE_DEBUG: 1
PGPASSWORD: $DATABASE_PASSWORD
SERVER_PORT: 8000
ports:
- ${HTTP_PORT_CIC_CACHE:-63313}:8000
depends_on:
- postgres
deploy:
restart_policy:
condition: on-failure
command:
- /bin/bash
- -c
- |
if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
"/usr/local/bin/uwsgi" \
--wsgi-file /usr/src/cic-cache/cic_cache/runnable/server.py \
--http :80 \
--pyargv -vv
cic-eth-tasker:
# image: grassrootseconomics:cic-eth-service
@ -373,42 +388,42 @@ services:
# command: "/root/start_retry.sh -q cic-eth -vv"
cic-eth-server:
build:
context: apps/
dockerfile: cic-eth/docker/Dockerfile
environment:
CIC_CHAIN_SPEC: $CIC_CHAIN_SPEC
CELERY_BROKER_URL: $CELERY_BROKER_URL
CELERY_RESULT_URL: $CELERY_RESULT_URL
SERVER_PORT: 8000
depends_on:
- eth
- postgres
- redis
ports:
- ${HTTP_PORT_CIC_ETH:-63314}:8000
deploy:
restart_policy:
condition: on-failure
volumes:
- contract-config:/tmp/cic/config/:ro
command:
- /bin/bash
- -c
- |
if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
"/usr/local/bin/uwsgi" \
--wsgi-file /usr/src/cic-eth/cic_eth/runnable/server_agent.py \
--http :80 \
--pyargv -vv
# entrypoint:
# - "/usr/local/bin/uwsgi"
# - "--wsgi-file"
# - "/usr/src/cic-eth/cic_eth/runnable/server_agent.py"
# - "--http"
# - ":80"
# command: "--pyargv -vv"
# cic-eth-server:
# build:
# context: apps/
# dockerfile: cic-eth/docker/Dockerfile
# environment:
# CIC_CHAIN_SPEC: $CIC_CHAIN_SPEC
# CELERY_BROKER_URL: $CELERY_BROKER_URL
# CELERY_RESULT_URL: $CELERY_RESULT_URL
# SERVER_PORT: 8000
# depends_on:
# - eth
# - postgres
# - redis
# ports:
# - ${HTTP_PORT_CIC_ETH:-63314}:8000
# deploy:
# restart_policy:
# condition: on-failure
# volumes:
# - contract-config:/tmp/cic/config/:ro
# command:
# - /bin/bash
# - -c
# - |
# if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
# "/usr/local/bin/uwsgi" \
# --wsgi-file /usr/src/cic-eth/cic_eth/runnable/server_agent.py \
# --http :80 \
# --pyargv -vv
## entrypoint:
## - "/usr/local/bin/uwsgi"
## - "--wsgi-file"
## - "/usr/src/cic-eth/cic_eth/runnable/server_agent.py"
## - "--http"
## - ":80"
# # command: "--pyargv -vv"