Merge remote-tracking branch 'origin/master' into bvander/integration-tests-on-docker

This commit is contained in:
Blair Vanderlugt 2021-08-18 11:40:15 -04:00
commit f738689096
145 changed files with 1817 additions and 917 deletions

View File

@ -4,3 +4,4 @@ omit =
scripts/* scripts/*
cic_cache/db/migrations/* cic_cache/db/migrations/*
cic_cache/version.py cic_cache/version.py
cic_cache/cli

View File

@ -0,0 +1 @@
include *requirements.txt cic_cache/data/config/*

View File

@ -55,15 +55,37 @@ class Api:
queue=callback_queue, queue=callback_queue,
) )
def list(self, offset, limit, address=None): def list(self, offset=0, limit=100, address=None, oldest=False):
s = celery.signature( s = celery.signature(
'cic_cache.tasks.tx.tx_filter', 'cic_cache.tasks.tx.tx_filter',
[ [
0, offset,
100, limit,
address, address,
oldest,
], ],
queue=None queue=self.queue,
)
if self.callback_param != None:
s.link(self.callback_success).on_error(self.callback_error)
t = s.apply_async()
return t
def list_content(self, offset=0, limit=100, address=None, block_offset=None, block_limit=None, oldest=False):
s = celery.signature(
'cic_cache.tasks.tx.tx_filter_content',
[
offset,
limit,
address,
block_offset,
block_limit,
oldest,
],
queue=self.queue,
) )
if self.callback_param != None: if self.callback_param != None:
s.link(self.callback_success).on_error(self.callback_error) s.link(self.callback_success).on_error(self.callback_error)

View File

@ -10,12 +10,16 @@ from cic_cache.db.list import (
list_transactions_mined, list_transactions_mined,
list_transactions_account_mined, list_transactions_account_mined,
list_transactions_mined_with_data, list_transactions_mined_with_data,
list_transactions_mined_with_data_index,
list_transactions_account_mined_with_data_index,
list_transactions_account_mined_with_data,
) )
logg = logging.getLogger() logg = logging.getLogger()
DEFAULT_FILTER_SIZE = 8192 * 8 DEFAULT_FILTER_SIZE = 8192 * 8
DEFAULT_LIMIT = 100
class Cache: class Cache:
@ -32,7 +36,7 @@ class BloomCache(Cache):
return n return n
def load_transactions(self, offset, limit): def load_transactions(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions. """Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index. Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
@ -49,7 +53,7 @@ class BloomCache(Cache):
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx :return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
:rtype: tuple :rtype: tuple
""" """
rows = list_transactions_mined(self.session, offset, limit) rows = list_transactions_mined(self.session, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3) f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3) f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
@ -59,6 +63,11 @@ class BloomCache(Cache):
if highest_block == -1: if highest_block == -1:
highest_block = r[0] highest_block = r[0]
lowest_block = r[0] lowest_block = r[0]
else:
if oldest:
highest_block = r[0]
else:
lowest_block = r[0]
block = r[0].to_bytes(4, byteorder='big') block = r[0].to_bytes(4, byteorder='big')
tx = r[1].to_bytes(4, byteorder='big') tx = r[1].to_bytes(4, byteorder='big')
f_block.add(block) f_block.add(block)
@ -67,7 +76,7 @@ class BloomCache(Cache):
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),) return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
def load_transactions_account(self, address, offset, limit): def load_transactions_account(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient. """Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
:param address: Address to retrieve transactions for. :param address: Address to retrieve transactions for.
@ -79,7 +88,7 @@ class BloomCache(Cache):
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx :return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
:rtype: tuple :rtype: tuple
""" """
rows = list_transactions_account_mined(self.session, address, offset, limit) rows = list_transactions_account_mined(self.session, address, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3) f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3) f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
@ -89,6 +98,11 @@ class BloomCache(Cache):
if highest_block == -1: if highest_block == -1:
highest_block = r[0] highest_block = r[0]
lowest_block = r[0] lowest_block = r[0]
else:
if oldest:
highest_block = r[0]
else:
lowest_block = r[0]
block = r[0].to_bytes(4, byteorder='big') block = r[0].to_bytes(4, byteorder='big')
tx = r[1].to_bytes(4, byteorder='big') tx = r[1].to_bytes(4, byteorder='big')
f_block.add(block) f_block.add(block)
@ -99,8 +113,21 @@ class BloomCache(Cache):
class DataCache(Cache): class DataCache(Cache):
def load_transactions_with_data(self, offset, end): def load_transactions_with_data(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
rows = list_transactions_mined_with_data(self.session, offset, end) if limit == 0:
limit = DEFAULT_LIMIT
rows = list_transactions_mined_with_data(self.session, offset, limit, block_offset, block_limit, oldest=oldest)
return self.__process_rows(rows, oldest)
def load_transactions_account_with_data(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
if limit == 0:
limit = DEFAULT_LIMIT
rows = list_transactions_account_mined_with_data(self.session, address, offset, limit, block_offset, block_limit, oldest=oldest)
return self.__process_rows(rows, oldest)
def __process_rows(self, rows, oldest):
tx_cache = [] tx_cache = []
highest_block = -1; highest_block = -1;
lowest_block = -1; lowest_block = -1;
@ -109,6 +136,11 @@ class DataCache(Cache):
if highest_block == -1: if highest_block == -1:
highest_block = r['block_number'] highest_block = r['block_number']
lowest_block = r['block_number'] lowest_block = r['block_number']
else:
if oldest:
highest_block = r['block_number']
else:
lowest_block = r['block_number']
tx_type = 'unknown' tx_type = 'unknown'
if r['value'] != None: if r['value'] != None:

View File

@ -0,0 +1,15 @@
# local imports
from .base import *
from .chain import (
EthChainInterface,
chain_interface,
)
from .rpc import RPC
from .arg import ArgumentParser
from .config import Config
from .celery import CeleryApp
from .registry import (
connect_registry,
connect_token_registry,
connect_declarator,
)

View File

@ -0,0 +1,20 @@
# external imports
from chainlib.eth.cli import ArgumentParser as BaseArgumentParser
# local imports
from .base import (
CICFlag,
Flag,
)
class ArgumentParser(BaseArgumentParser):
def process_local_flags(self, local_arg_flags):
if local_arg_flags & CICFlag.CELERY:
self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-cache', help='Task queue')
if local_arg_flags & CICFlag.SYNCER:
self.add_argument('--offset', type=int, default=0, help='Start block height for initial history sync')
self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
if local_arg_flags & CICFlag.CHAIN:
self.add_argument('-r', '--registry-address', type=str, dest='registry_address', help='CIC registry contract address')

View File

@ -0,0 +1,31 @@
# standard imports
import enum
# external imports
from chainlib.eth.cli import (
argflag_std_read,
argflag_std_write,
argflag_std_base,
Flag,
)
class CICFlag(enum.IntEnum):
# celery - nibble 1
CELERY = 1
# redis - nibble 2
# REDIS = 16
# REDIS_CALLBACK = 32
# chain - nibble 3
CHAIN = 256
# sync - nibble 4
SYNCER = 4096
argflag_local_task = CICFlag.CELERY
#argflag_local_taskcallback = argflag_local_task | CICFlag.REDIS | CICFlag.REDIS_CALLBACK
argflag_local_chain = CICFlag.CHAIN
argflag_local_sync = CICFlag.SYNCER | CICFlag.CHAIN

View File

@ -0,0 +1,24 @@
# standard imports
import logging
# external imports
import celery
logg = logging.getLogger(__name__)
class CeleryApp:
@classmethod
def from_config(cls, config):
backend_url = config.get('CELERY_RESULT_URL')
broker_url = config.get('CELERY_BROKER_URL')
celery_app = None
if backend_url != None:
celery_app = celery.Celery(broker=broker_url, backend=backend_url)
logg.info('creating celery app on {} with backend on {}'.format(broker_url, backend_url))
else:
celery_app = celery.Celery(broker=broker_url)
logg.info('creating celery app without results backend on {}'.format(broker_url))
return celery_app

View File

@ -0,0 +1,21 @@
# external imports
from chainlib.eth.block import (
block_by_number,
Block,
)
from chainlib.eth.tx import (
receipt,
Tx,
)
from chainlib.interface import ChainInterface
class EthChainInterface(ChainInterface):
def __init__(self):
self._tx_receipt = receipt
self._block_by_number = block_by_number
self._block_from_src = Block.from_src
self._src_normalize = Tx.src_normalize
chain_interface = EthChainInterface()

View File

@ -0,0 +1,63 @@
# standard imports
import os
import logging
# external imports
from chainlib.eth.cli import (
Config as BaseConfig,
Flag,
)
# local imports
from .base import CICFlag
script_dir = os.path.dirname(os.path.realpath(__file__))
logg = logging.getLogger(__name__)
class Config(BaseConfig):
local_base_config_dir = os.path.join(script_dir, '..', 'data', 'config')
@classmethod
def from_args(cls, args, arg_flags, local_arg_flags, extra_args={}, default_config_dir=None, base_config_dir=None, default_fee_limit=None):
expanded_base_config_dir = [cls.local_base_config_dir]
if base_config_dir != None:
if isinstance(base_config_dir, str):
base_config_dir = [base_config_dir]
for d in base_config_dir:
expanded_base_config_dir.append(d)
config = BaseConfig.from_args(args, arg_flags, extra_args=extra_args, default_config_dir=default_config_dir, base_config_dir=expanded_base_config_dir, load_callback=None)
local_args_override = {}
# if local_arg_flags & CICFlag.REDIS:
# local_args_override['REDIS_HOST'] = getattr(args, 'redis_host')
# local_args_override['REDIS_PORT'] = getattr(args, 'redis_port')
# local_args_override['REDIS_DB'] = getattr(args, 'redis_db')
# local_args_override['REDIS_TIMEOUT'] = getattr(args, 'redis_timeout')
if local_arg_flags & CICFlag.CHAIN:
local_args_override['CIC_REGISTRY_ADDRESS'] = getattr(args, 'registry_address')
if local_arg_flags & CICFlag.CELERY:
local_args_override['CELERY_QUEUE'] = getattr(args, 'celery_queue')
if local_arg_flags & CICFlag.SYNCER:
local_args_override['SYNCER_OFFSET'] = getattr(args, 'offset')
local_args_override['SYNCER_NO_HISTORY'] = getattr(args, 'no_history')
config.dict_override(local_args_override, 'local cli args')
# if local_arg_flags & CICFlag.REDIS_CALLBACK:
# config.add(getattr(args, 'redis_host_callback'), '_REDIS_HOST_CALLBACK')
# config.add(getattr(args, 'redis_port_callback'), '_REDIS_PORT_CALLBACK')
if local_arg_flags & CICFlag.CELERY:
config.add(config.true('CELERY_DEBUG'), 'CELERY_DEBUG', exists_ok=True)
logg.debug('config loaded:\n{}'.format(config))
return config

View File

@ -0,0 +1,33 @@
# standard imports
import logging
# external imports
from cic_eth_registry import CICRegistry
from cic_eth_registry.lookup.declarator import AddressDeclaratorLookup
from cic_eth_registry.lookup.tokenindex import TokenIndexLookup
from chainlib.eth.constant import ZERO_ADDRESS
logg = logging.getLogger()
def connect_token_registry(self, conn, chain_spec, sender_address=ZERO_ADDRESS):
registry = CICRegistry(chain_spec, conn)
token_registry_address = registry.by_name('TokenRegistry', sender_address=sender_address)
logg.debug('using token registry address {}'.format(token_registry_address))
lookup = TokenIndexLookup(chain_spec, token_registry_address)
CICRegistry.add_lookup(lookup)
def connect_declarator(self, conn, chain_spec, trusted_addresses, sender_address=ZERO_ADDRESS):
registry = CICRegistry(chain_spec, conn)
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
logg.debug('using declarator address {}'.format(declarator_address))
lookup = AddressDeclaratorLookup(chain_spec, declarator_address, trusted_addresses)
CICRegistry.add_lookup(lookup)
def connect_registry(conn, chain_spec, registry_address, sender_address=ZERO_ADDRESS):
CICRegistry.address = registry_address
registry = CICRegistry(chain_spec, conn)
registry_address = registry.by_name('ContractRegistry', sender_address=sender_address)
return registry

View File

@ -0,0 +1,43 @@
# standard imports
import logging
# external imports
from chainlib.connection import (
RPCConnection,
ConnType,
)
from chainlib.eth.connection import EthUnixSignerConnection
from chainlib.chain import ChainSpec
logg = logging.getLogger(__name__)
class RPC:
def __init__(self, chain_spec, rpc_provider, signer_provider=None):
self.chain_spec = chain_spec
self.rpc_provider = rpc_provider
self.signer_provider = signer_provider
def get_default(self):
return RPCConnection.connect(self.chain_spec, 'default')
@staticmethod
def from_config(config):
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
RPCConnection.register_location(config.get('RPC_HTTP_PROVIDER'), chain_spec, 'default')
if config.get('SIGNER_PROVIDER'):
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, tag='signer')
RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, 'signer')
rpc = RPC(chain_spec, config.get('RPC_HTTP_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
logg.info('set up rpc: {}'.format(rpc))
return rpc
def __str__(self):
return 'RPC factory, chain {}, rpc {}, signer {}'.format(self.chain_spec, self.rpc_provider, self.signer_provider)

View File

@ -0,0 +1,5 @@
[celery]
broker_url = redis://localhost:6379
result_url =
queue = cic-cache
debug = 0

View File

@ -0,0 +1,4 @@
[cic]
registry_address =
trust_address =
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas

View File

@ -0,0 +1,10 @@
[database]
engine =
driver =
host =
port =
name = cic-cache
user =
password =
debug = 0
pool_size = 0

View File

@ -0,0 +1,2 @@
[signer]
provider =

View File

@ -0,0 +1,4 @@
[syncer]
loop_interval = 1
offset = 0
no_history = 0

View File

@ -13,6 +13,9 @@ def list_transactions_mined(
session, session,
offset, offset,
limit, limit,
block_offset,
block_limit,
oldest=False,
): ):
"""Executes db query to return all confirmed transactions according to the specified offset and limit. """Executes db query to return all confirmed transactions according to the specified offset and limit.
@ -23,15 +26,62 @@ def list_transactions_mined(
:result: Result set :result: Result set
:rtype: SQLAlchemy.ResultProxy :rtype: SQLAlchemy.ResultProxy
""" """
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(limit, offset) order_by = 'DESC'
if oldest:
order_by = 'ASC'
if block_offset:
if block_limit:
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
else:
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
else:
s = "SELECT block_number, tx_index FROM tx ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
r = session.execute(s) r = session.execute(s)
return r return r
def list_transactions_mined_with_data( def list_transactions_mined_with_data(
session,
offset,
limit,
block_offset,
block_limit,
oldest=False,
):
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
:param block_offset: First block to include in search
:type block_offset: int
:param block_limit: Last block to include in search
:type block_limit: int
:result: Result set
:rtype: SQLAlchemy.ResultProxy
"""
order_by = 'DESC'
if oldest:
order_by = 'ASC'
if block_offset:
if block_limit:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, limit, offset)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, limit, offset)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, limit, offset)
r = session.execute(s)
return r
def list_transactions_mined_with_data_index(
session, session,
offset, offset,
end, end,
block_offset,
block_limit,
oldest=False,
): ):
"""Executes db query to return all confirmed transactions according to the specified offset and limit. """Executes db query to return all confirmed transactions according to the specified offset and limit.
@ -42,7 +92,87 @@ def list_transactions_mined_with_data(
:result: Result set :result: Result set
:rtype: SQLAlchemy.ResultProxy :rtype: SQLAlchemy.ResultProxy
""" """
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} ORDER BY block_number ASC, tx_index ASC".format(offset, end)
order_by = 'DESC'
if oldest:
order_by = 'ASC'
if block_offset:
if block_limit:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} and block_number <= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, order_by, order_by, offset, end)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, order_by, order_by, offset, end)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(order_by, order_by, offset, end)
r = session.execute(s)
return r
def list_transactions_account_mined_with_data_index(
session,
address,
offset,
limit,
block_offset,
block_limit,
oldest=False,
):
"""Executes db query to return all confirmed transactions according to the specified offset and limit, filtered by address
:param offset: Offset in data set to return transactions from
:type offset: int
:param limit: Max number of transactions to retrieve
:type limit: int
:result: Result set
:rtype: SQLAlchemy.ResultProxy
"""
order_by = 'DESC'
if oldest:
order_by = 'ASC'
if block_offset:
if block_limit:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
r = session.execute(s)
return r
def list_transactions_account_mined_with_data(
session,
address,
offset,
limit,
block_offset,
block_limit,
oldest=False,
):
"""Executes db query to return all confirmed transactions according to the specified offset and limit.
:param block_offset: First block to include in search
:type block_offset: int
:param block_limit: Last block to include in search
:type block_limit: int
:result: Result set
:rtype: SQLAlchemy.ResultProxy
"""
order_by = 'DESC'
if oldest:
order_by = 'ASC'
if block_offset:
if block_limit:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
else:
s = "SELECT tx_hash, block_number, date_block, sender, recipient, from_value, to_value, source_token, destination_token, success, domain, value FROM tx LEFT JOIN tag_tx_link ON tx.id = tag_tx_link.tx_id LEFT JOIN tag ON tag_tx_link.tag_id = tag.id WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
r = session.execute(s) r = session.execute(s)
return r return r
@ -53,6 +183,9 @@ def list_transactions_account_mined(
address, address,
offset, offset,
limit, limit,
block_offset,
block_limit,
oldest=False,
): ):
"""Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient. """Same as list_transactions_mined(...), but only retrieves transaction where the specified account address is sender or recipient.
@ -65,7 +198,20 @@ def list_transactions_account_mined(
:result: Result set :result: Result set
:rtype: SQLAlchemy.ResultProxy :rtype: SQLAlchemy.ResultProxy
""" """
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number DESC, tx_index DESC LIMIT {} OFFSET {}".format(address, address, limit, offset)
order_by = 'DESC'
if oldest:
order_by = 'ASC'
if block_offset:
if block_limit:
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND block_number <= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, block_limit, address, address, order_by, order_by, limit, offset)
else:
s = "SELECT block_number, tx_index FROM tx WHERE block_number >= {} AND (sender = '{}' OR recipient = '{}') ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(block_offset, address, address, order_by, order_by, limit, offset)
else:
s = "SELECT block_number, tx_index FROM tx WHERE sender = '{}' OR recipient = '{}' ORDER BY block_number {}, tx_index {} LIMIT {} OFFSET {}".format(address, address, order_by, order_by, limit, offset)
r = session.execute(s) r = session.execute(s)
return r return r

View File

@ -7,7 +7,7 @@ Create Date: 2021-04-01 08:10:29.156243
""" """
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from chainsyncer.db.migrations.sqlalchemy import ( from chainsyncer.db.migrations.default.export import (
chainsyncer_upgrade, chainsyncer_upgrade,
chainsyncer_downgrade, chainsyncer_downgrade,
) )

View File

@ -91,13 +91,14 @@ def process_transactions_all_data(session, env):
if env.get('HTTP_X_CIC_CACHE_MODE') != 'all': if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
return None return None
offset = r[1] logg.debug('got data request {}'.format(env))
end = r[2] block_offset = r[1]
block_end = r[2]
if int(r[2]) < int(r[1]): if int(r[2]) < int(r[1]):
raise ValueError('cart before the horse, dude') raise ValueError('cart before the horse, dude')
c = DataCache(session) c = DataCache(session)
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, end) (lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(0, 0, block_offset, block_end, oldest=True) # oldest needs to be settable
for r in tx_cache: for r in tx_cache:
r['date_block'] = r['date_block'].timestamp() r['date_block'] = r['date_block'].timestamp()

View File

@ -8,15 +8,7 @@ import sys
import re import re
# external imports # external imports
import confini
import celery
import sqlalchemy import sqlalchemy
import rlp
import cic_base.config
import cic_base.log
import cic_base.argparse
import cic_base.rpc
from cic_base.eth.syncer import chain_interface
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
@ -34,6 +26,7 @@ from chainsyncer.driver.history import HistorySyncer
from chainsyncer.db.models.base import SessionBase from chainsyncer.db.models.base import SessionBase
# local imports # local imports
import cic_cache.cli
from cic_cache.db import ( from cic_cache.db import (
dsn_from_config, dsn_from_config,
add_tag, add_tag,
@ -43,32 +36,36 @@ from cic_cache.runnable.daemons.filters import (
FaucetFilter, FaucetFilter,
) )
script_dir = os.path.realpath(os.path.dirname(__file__)) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
def add_block_args(argparser): # process args
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync') arg_flags = cic_cache.cli.argflag_std_read
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync') local_arg_flags = cic_cache.cli.argflag_local_sync
return argparser argparser = cic_cache.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
args = argparser.parse_args()
# process config
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
logg = cic_base.log.create() # connect to database
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
args = cic_base.argparse.parse(argparser, logg)
config = cic_base.config.create(args.c, args, args.env_prefix)
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
config.add(args.no_history, '_NO_HISTORY', True)
cic_base.config.log(config)
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) # set up rpc
rpc = cic_cache.cli.RPC.from_config(config)
conn = rpc.get_default()
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) # set up chain provisions
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
registry = None
try:
registry = cic_cache.cli.connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
except UnknownContractError as e:
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
sys.exit(1)
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
def register_filter_tags(filters, session): def register_filter_tags(filters, session):
@ -95,14 +92,12 @@ def main():
syncers = [] syncers = []
#if SQLBackend.first(chain_spec):
# backend = SQLBackend.initial(chain_spec, block_offset)
syncer_backends = SQLBackend.resume(chain_spec, block_offset) syncer_backends = SQLBackend.resume(chain_spec, block_offset)
if len(syncer_backends) == 0: if len(syncer_backends) == 0:
initial_block_start = config.get('SYNCER_HISTORY_START') initial_block_start = config.get('SYNCER_OFFSET')
initial_block_offset = block_offset initial_block_offset = block_offset
if config.get('_NO_HISTORY'): if config.get('SYNCER_NO_HISTORY'):
initial_block_start = block_offset initial_block_start = block_offset
initial_block_offset += 1 initial_block_offset += 1
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start)) syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
@ -112,10 +107,10 @@ def main():
logg.info('resuming sync session {}'.format(syncer_backend)) logg.info('resuming sync session {}'.format(syncer_backend))
for syncer_backend in syncer_backends: for syncer_backend in syncer_backends:
syncers.append(HistorySyncer(syncer_backend, chain_interface)) syncers.append(HistorySyncer(syncer_backend, cic_cache.cli.chain_interface))
syncer_backend = SQLBackend.live(chain_spec, block_offset+1) syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
syncers.append(HeadSyncer(syncer_backend, chain_interface)) syncers.append(HeadSyncer(syncer_backend, cic_cache.cli.chain_interface))
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None: if trusted_addresses_src == None:

View File

@ -2,14 +2,17 @@
import celery import celery
# local imports # local imports
from cic_cache.cache import BloomCache from cic_cache.cache import (
BloomCache,
DataCache,
)
from cic_cache.db.models.base import SessionBase from cic_cache.db.models.base import SessionBase
celery_app = celery.current_app celery_app = celery.current_app
@celery_app.task(bind=True) @celery_app.task(bind=True)
def tx_filter(self, offset, limit, address=None, encoding='hex'): def tx_filter(self, offset, limit, address=None, oldest=False, encoding='hex'):
queue = self.request.delivery_info.get('routing_key') queue = self.request.delivery_info.get('routing_key')
session = SessionBase.create_session() session = SessionBase.create_session()
@ -17,9 +20,9 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
c = BloomCache(session) c = BloomCache(session)
b = None b = None
if address == None: if address == None:
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit) (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit, oldest=oldest)
else: else:
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit) (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit, oldest=oldest)
session.close() session.close()
@ -35,4 +38,17 @@ def tx_filter(self, offset, limit, address=None, encoding='hex'):
return o return o
@celery_app.task(bind=True)
def tx_filter_content(self, offset, limit, address=None, block_offset=None, block_limit=None, oldest=False, encoding='hex'):
session = SessionBase.create_session()
c = DataCache(session)
b = None
if address == None:
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
else:
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data_index(address, offset, limit, block_offset=block_offset, block_limit=block_limit)
session.close()
return (lowest_block, highest_block, tx_cache,)

View File

@ -4,8 +4,8 @@ import semver
version = ( version = (
0, 0,
2, 2,
0, 1,
'alpha.2', 'alpha.1',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@ -1,2 +0,0 @@
[bancor]
dir =

View File

@ -1,4 +1,3 @@
[cic] [cic]
registry_address = registry_address =
chain_spec =
trust_address = trust_address =

View File

@ -1,3 +0,0 @@
[bancor]
registry_address =
dir = /usr/local/share/bancor

View File

@ -1,4 +1,3 @@
[cic] [cic]
chain_spec =
registry_address = registry_address =
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C

View File

@ -1,2 +0,0 @@
[eth]
provider = http://localhost:63545

View File

@ -1,3 +1,4 @@
[syncer] [syncer]
loop_interval = 1 loop_interval = 1
history_start = 0 offset = 0
no_history = 0

View File

@ -1,2 +0,0 @@
[eth]
provider = ws://localhost:8545

View File

@ -1,3 +0,0 @@
[syncer]
loop_interval = 5
history_start = 0

View File

@ -1,13 +1,17 @@
cic-base~=0.2.0a4
alembic==1.4.2 alembic==1.4.2
confini>=0.3.6rc3,<0.5.0 confini>=0.3.6rc4,<0.5.0
uwsgi==2.0.19.1 uwsgi==2.0.19.1
moolb~=0.1.0 moolb~=0.1.1b2
cic-eth-registry~=0.5.6a2 cic-eth-registry~=0.5.8a1
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
semver==2.13.0 semver==2.13.0
psycopg2==2.8.6 psycopg2==2.8.6
celery==4.4.7 celery==4.4.7
redis==3.5.3 redis==3.5.3
chainsyncer[sql]~=0.0.3a5 chainsyncer[sql]>=0.0.6a1,<0.1.0
erc20-faucet~=0.2.2a2 erc20-faucet>=0.2.4a2, <0.3.0
#chainlib-eth==0.0.7a5,<0.1.0
chainlib-eth==0.0.7a5
#chainlib==0.0.7a4,<0.1.0
chainlib==0.0.7a4
eth-address-index>=0.1.4a1,<0.2.0

View File

@ -23,11 +23,13 @@ licence_files =
[options] [options]
python_requires = >= 3.6 python_requires = >= 3.6
include_package_data = True
packages = packages =
cic_cache cic_cache
cic_cache.tasks cic_cache.tasks
cic_cache.db cic_cache.db
cic_cache.db.models cic_cache.db.models
cic_cache.cli
cic_cache.runnable cic_cache.runnable
cic_cache.runnable.daemons cic_cache.runnable.daemons
cic_cache.runnable.daemons.filters cic_cache.runnable.daemons.filters
@ -39,3 +41,4 @@ console_scripts =
cic-cache-trackerd = cic_cache.runnable.daemons.tracker:main cic-cache-trackerd = cic_cache.runnable.daemons.tracker:main
cic-cache-serverd = cic_cache.runnable.daemons.server:main cic-cache-serverd = cic_cache.runnable.daemons.server:main
cic-cache-taskerd = cic_cache.runnable.daemons.tasker:main cic-cache-taskerd = cic_cache.runnable.daemons.tasker:main
cic-cache-list = cic_cache.runable.list:main

View File

@ -6,5 +6,5 @@ sqlparse==0.4.1
pytest-celery==0.0.0a1 pytest-celery==0.0.0a1
eth_tester==0.5.0b3 eth_tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
cic_base[full]==0.1.3a3+build.984b5cff sarafu-faucet~=0.0.5a2
sarafu-faucet~=0.0.4a1 erc20-transfer-authorization>=0.3.4a1,<0.4.0

View File

@ -0,0 +1,40 @@
# standard imports
import os
# external imports
import chainlib.cli
# local imports
import cic_cache.cli
script_dir = os.path.dirname(os.path.realpath(__file__))
config_dir = os.path.join(script_dir, '..', 'testdata', 'config')
def test_argumentparserto_config():
argparser = cic_cache.cli.ArgumentParser()
local_flags = 0xffff
argparser.process_local_flags(local_flags)
argparser.add_argument('--foo', type=str)
args = argparser.parse_args([
'-q', 'baz',
'--offset', '13',
'--no-history',
'-r','0xdeadbeef',
'-vv',
'--foo', 'bar',
])
extra_args = {
'foo': '_BARBARBAR',
}
config = cic_cache.cli.Config.from_args(args, chainlib.cli.argflag_std_base, local_flags, extra_args=extra_args, base_config_dir=config_dir)
assert config.get('_BARBARBAR') == 'bar'
assert config.get('CELERY_QUEUE') == 'baz'
assert config.get('SYNCER_NO_HISTORY') == True
assert config.get('SYNCER_OFFSET') == 13
assert config.get('CIC_REGISTRY_ADDRESS') == '0xdeadbeef'

View File

@ -0,0 +1,17 @@
# standard imports
import tempfile
# local imports
import cic_cache.cli
def test_cli_celery():
cf = tempfile.mkdtemp()
config = {
'CELERY_RESULT_URL': 'filesystem://' + cf,
}
cic_cache.cli.CeleryApp.from_config(config)
config['CELERY_BROKER_URL'] = 'filesystem://' + cf
cic_cache.cli.CeleryApp.from_config(config)

View File

@ -0,0 +1,68 @@
# external imports
import pytest
from chainlib.eth.gas import (
Gas,
RPCGasOracle,
)
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.block import (
block_latest,
Block,
)
from chainlib.eth.pytest.fixtures_chain import default_chain_spec
from chainlib.eth.pytest.fixtures_ethtester import *
from cic_eth_registry.pytest.fixtures_contracts import *
from hexathon import add_0x
# local imports
import cic_cache.cli
@pytest.mark.xfail()
def test_cli_rpc(
eth_rpc,
eth_signer,
default_chain_spec,
):
config = {
'CHAIN_SPEC': str(default_chain_spec),
'RPC_HTTP_PROVIDER': 'http://localhost:8545',
}
rpc = cic_cache.cli.RPC.from_config(config, default_label='foo')
conn = rpc.get_by_label('foo')
#o = block_latest()
#conn.do(o)
def test_cli_chain(
default_chain_spec,
eth_rpc,
eth_signer,
contract_roles,
):
ifc = cic_cache.cli.EthChainInterface()
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
gas_oracle = RPCGasOracle(conn=eth_rpc)
c = Gas(default_chain_spec, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle, signer=eth_signer)
recipient = add_0x(os.urandom(20).hex())
(tx_hash, o) = c.create(contract_roles['CONTRACT_DEPLOYER'], recipient, 1024)
r = eth_rpc.do(o)
o = ifc.tx_receipt(r)
r = eth_rpc.do(o)
assert r['status'] == 1
o = ifc.block_by_number(1)
block_src = eth_rpc.do(o)
block = ifc.block_from_src(block_src)
assert block.number == 1
with pytest.raises(KeyError):
assert block_src['gasUsed'] == 21000
assert block_src['gas_used'] == 21000
block_src = ifc.src_normalize(block_src)
assert block_src['gasUsed'] == 21000
assert block_src['gas_used'] == 21000

View File

@ -64,7 +64,6 @@ def txs(
dt.timestamp(), dt.timestamp(),
) )
tx_number = 42 tx_number = 42
tx_hash_second = '0x' + os.urandom(32).hex() tx_hash_second = '0x' + os.urandom(32).hex()
tx_signed_second = '0x' + os.urandom(128).hex() tx_signed_second = '0x' + os.urandom(128).hex()
@ -93,6 +92,44 @@ def txs(
] ]
@pytest.fixture(scope='function')
def more_txs(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
):
session = init_database
tx_number = 666
tx_hash = '0x' + os.urandom(32).hex()
tx_signed = '0x' + os.urandom(128).hex()
nonce = 3
dt = datetime.datetime.utcnow()
dt += datetime.timedelta(hours=1)
db.add_transaction(
session,
tx_hash,
list_defaults['block']+2,
tx_number,
list_actors['alice'],
list_actors['diane'],
list_tokens['bar'],
list_tokens['bar'],
2048,
4096,
False,
dt.timestamp(),
)
session.commit()
return [tx_hash] + txs
@pytest.fixture(scope='function') @pytest.fixture(scope='function')
def tag_txs( def tag_txs(
init_database, init_database,

View File

@ -8,6 +8,7 @@ import json
import pytest import pytest
# local imports # local imports
from cic_cache import db
from cic_cache import BloomCache from cic_cache import BloomCache
from cic_cache.cache import DataCache from cic_cache.cache import DataCache
@ -18,7 +19,6 @@ def test_cache(
init_database, init_database,
list_defaults, list_defaults,
list_actors, list_actors,
list_tokens,
txs, txs,
): ):
@ -37,9 +37,6 @@ def test_cache(
def test_cache_data( def test_cache_data(
init_database, init_database,
list_defaults,
list_actors,
list_tokens,
txs, txs,
tag_txs, tag_txs,
): ):
@ -47,10 +44,209 @@ def test_cache_data(
session = init_database session = init_database
c = DataCache(session) c = DataCache(session)
b = c.load_transactions_with_data(410000, 420000) b = c.load_transactions_with_data(0, 3) #410000, 420000) #, 100, block_offset=410000, block_limit=420000, oldest=True)
assert len(b[2]) == 2 assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == txs[1] assert b[2][0]['tx_hash'] == txs[0]
assert b[2][1]['tx_type'] == 'unknown' assert b[2][0]['tx_type'] == 'unknown'
assert b[2][0]['tx_type'] == 'test.taag' assert b[2][1]['tx_type'] == 'test.taag'
def test_cache_ranges(
init_database,
list_defaults,
list_actors,
list_tokens,
more_txs,
):
session = init_database
oldest = list_defaults['block'] - 1
mid = list_defaults['block']
newest = list_defaults['block'] + 2
c = BloomCache(session)
b = c.load_transactions(0, 100)
assert b[0] == oldest
assert b[1] == newest
b = c.load_transactions(1, 2)
assert b[0] == oldest
assert b[1] == mid
b = c.load_transactions(0, 2)
assert b[0] == mid
assert b[1] == newest
b = c.load_transactions(0, 1)
assert b[0] == newest
assert b[1] == newest
b = c.load_transactions(0, 100, oldest=True)
assert b[0] == oldest
assert b[1] == newest
b = c.load_transactions(0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest
assert b[1] == mid
b = c.load_transactions(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
assert b[0] == oldest
assert b[1] == mid
# now check when supplying account
b = c.load_transactions_account(list_actors['alice'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
b = c.load_transactions_account(list_actors['bob'], 0, 100)
assert b[0] == mid
assert b[1] == mid
b = c.load_transactions_account(list_actors['diane'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
# add block filter to the mix
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
b = c.load_transactions_account(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
b = c.load_transactions_account(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid
assert b[1] == mid
b = c.load_transactions_account(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest
assert b[1] == oldest
def test_cache_ranges_data(
init_database,
list_defaults,
list_actors,
list_tokens,
more_txs,
):
session = init_database
oldest = list_defaults['block'] - 1
mid = list_defaults['block']
newest = list_defaults['block'] + 2
c = DataCache(session)
b = c.load_transactions_with_data(0, 100)
assert b[0] == oldest
assert b[1] == newest
assert len(b[2]) == 3
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][2]['tx_hash'] == more_txs[2]
b = c.load_transactions_with_data(1, 2)
assert b[0] == oldest
assert b[1] == mid
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[1]
assert b[2][1]['tx_hash'] == more_txs[2]
b = c.load_transactions_with_data(0, 2)
assert b[0] == mid
assert b[1] == newest
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1]
b = c.load_transactions_with_data(0, 1)
assert b[0] == newest
assert b[1] == newest
assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[0]
b = c.load_transactions_with_data(0, 100, oldest=True)
assert b[0] == oldest
assert b[1] == newest
assert len(b[2]) == 3
assert b[2][0]['tx_hash'] == more_txs[2]
assert b[2][1]['tx_hash'] == more_txs[1]
assert b[2][2]['tx_hash'] == more_txs[0]
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1]
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest
assert b[1] == mid
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[1]
assert b[2][1]['tx_hash'] == more_txs[2]
b = c.load_transactions_with_data(0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'], oldest=True)
assert b[0] == oldest
assert b[1] == mid
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[2]
assert b[2][1]['tx_hash'] == more_txs[1]
# now check when supplying account
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
assert len(b[2]) == 3
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1]
assert b[2][2]['tx_hash'] == more_txs[2]
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100)
assert b[0] == mid
assert b[1] == mid
assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100)
assert b[0] == oldest
assert b[1] == newest
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[2]
# add block filter to the mix
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['alice'], 0, 100, block_offset=list_defaults['block'])
assert b[0] == mid
assert b[1] == newest
assert len(b[2]) == 2
assert b[2][0]['tx_hash'] == more_txs[0]
assert b[2][1]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['bob'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == mid
assert b[1] == mid
assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[1]
b = c.load_transactions_account_with_data(list_actors['diane'], 0, 100, block_offset=list_defaults['block'] - 1, block_limit=list_defaults['block'])
assert b[0] == oldest
assert b[1] == oldest
assert len(b[2]) == 1
assert b[2][0]['tx_hash'] == more_txs[2]

View File

@ -0,0 +1,2 @@
[foo]
bar_baz = xyzzy

View File

@ -46,7 +46,7 @@ def get_adjusted_balance(self, token_symbol, amount, timestamp):
def aux_setup(rpc, config, sender_address=ZERO_ADDRESS): def aux_setup(rpc, config, sender_address=ZERO_ADDRESS):
chain_spec_str = config.get('CIC_CHAIN_SPEC') chain_spec_str = config.get('CHAIN_SPEC')
chain_spec = ChainSpec.from_chain_str(chain_spec_str) chain_spec = ChainSpec.from_chain_str(chain_spec_str)
token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL') token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')

View File

@ -1,5 +1,5 @@
celery==4.4.7 celery==4.4.7
erc20-demurrage-token~=0.0.2a3 erc20-demurrage-token~=0.0.3a1
cic-eth-registry~=0.5.6a1 cic-eth-registry~=0.5.8a1
chainlib~=0.0.5a1 chainlib~=0.0.7a1
cic_eth~=0.12.0a2 cic_eth~=0.12.2a4

View File

@ -1,6 +1,6 @@
[metadata] [metadata]
name = cic-eth-aux-erc20-demurrage-token name = cic-eth-aux-erc20-demurrage-token
version = 0.0.2a4 version = 0.0.2a6
description = cic-eth tasks supporting erc20 demurrage token description = cic-eth tasks supporting erc20 demurrage token
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no

View File

@ -5,8 +5,7 @@ pytest-cov==2.10.1
eth-tester==0.5.0b3 eth-tester==0.5.0b3
py-evm==0.3.0a20 py-evm==0.3.0a20
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
cic-eth~=0.12.0a1
liveness~=0.0.1a7 liveness~=0.0.1a7
eth-accounts-index==0.0.12a1 eth-accounts-index==0.1.1a1
eth-contract-registry==0.5.6a1 eth-contract-registry==0.5.8a1
eth-address-index==0.1.2a1 eth-address-index==0.2.1a1

View File

@ -6,4 +6,5 @@ omit =
cic_eth/sync/head.py cic_eth/sync/head.py
cic_eth/sync/mempool.py cic_eth/sync/mempool.py
cic_eth/queue/state.py cic_eth/queue/state.py
cic_eth/cli
*redis*.py *redis*.py

View File

@ -1,2 +1,2 @@
include *requirements.txt config/test/* include *requirements.txt config/test/* cic_eth/data/config/*

View File

@ -520,9 +520,9 @@ class Api(ApiBase):
s_external_get = celery.signature( s_external_get = celery.signature(
external_task, external_task,
[ [
address,
offset, offset,
limit, limit,
address,
], ],
queue=external_queue, queue=external_queue,
) )

View File

@ -21,7 +21,7 @@ def health(*args, **kwargs):
session = SessionBase.create_session() session = SessionBase.create_session()
config = kwargs['config'] config = kwargs['config']
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
logg.debug('check gas balance of gas gifter for chain {}'.format(chain_spec)) logg.debug('check gas balance of gas gifter for chain {}'.format(chain_spec))
try: try:

View File

@ -15,7 +15,7 @@ logg = logging.getLogger().getChild(__name__)
def health(*args, **kwargs): def health(*args, **kwargs):
blocked = True blocked = True
max_attempts = 5 max_attempts = 5
conn = RPCConnection.connect(kwargs['config'].get('CIC_CHAIN_SPEC'), tag='signer') conn = RPCConnection.connect(kwargs['config'].get('CHAIN_SPEC'), tag='signer')
for i in range(max_attempts): for i in range(max_attempts):
idx = i + 1 idx = i + 1
logg.debug('attempt signer connection check {}/{}'.format(idx, max_attempts)) logg.debug('attempt signer connection check {}/{}'.format(idx, max_attempts))

View File

@ -0,0 +1,10 @@
# local imports
from .base import *
from .chain import (
EthChainInterface,
chain_interface,
)
from .rpc import RPC
from .arg import ArgumentParser
from .config import Config
from .celery import CeleryApp

View File

@ -0,0 +1,31 @@
# external imports
from chainlib.eth.cli import ArgumentParser as BaseArgumentParser
# local imports
from .base import (
CICFlag,
Flag,
)
class ArgumentParser(BaseArgumentParser):
def process_local_flags(self, local_arg_flags):
if local_arg_flags & CICFlag.REDIS:
self.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
self.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
self.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use')
if local_arg_flags & CICFlag.REDIS_CALLBACK:
self.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback')
self.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback')
self.add_argument('--redis-timeout', default=20.0, type=float, help='Redis callback timeout')
if local_arg_flags & CICFlag.CELERY:
self.add_argument('-q', '--celery-queue', dest='celery_queue', type=str, default='cic-eth', help='Task queue')
if local_arg_flags & CICFlag.SYNCER:
self.add_argument('--offset', type=int, default=0, help='Start block height for initial history sync')
self.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
if local_arg_flags & CICFlag.CHAIN:
self.add_argument('-r', '--registry-address', type=str, dest='registry_address', help='CIC registry contract address')

View File

@ -0,0 +1,31 @@
# standard imports
import enum
# external imports
from chainlib.eth.cli import (
argflag_std_read,
argflag_std_write,
argflag_std_base,
Flag,
)
class CICFlag(enum.IntEnum):
# celery - nibble 1
CELERY = 1
# redis - nibble 2
REDIS = 16
REDIS_CALLBACK = 32
# chain - nibble 3
CHAIN = 256
# sync - nibble 4
SYNCER = 4096
argflag_local_task = CICFlag.CELERY
argflag_local_taskcallback = argflag_local_task | CICFlag.REDIS | CICFlag.REDIS_CALLBACK
argflag_local_chain = CICFlag.CHAIN
argflag_local_sync = CICFlag.SYNCER | CICFlag.CHAIN

View File

@ -0,0 +1,24 @@
# standard imports
import logging
# external imports
import celery
logg = logging.getLogger(__name__)
class CeleryApp:
@classmethod
def from_config(cls, config):
backend_url = config.get('CELERY_RESULT_URL')
broker_url = config.get('CELERY_BROKER_URL')
celery_app = None
if backend_url != None:
celery_app = celery.Celery(broker=broker_url, backend=backend_url)
logg.info('creating celery app on {} with backend on {}'.format(broker_url, backend_url))
else:
celery_app = celery.Celery(broker=broker_url)
logg.info('creating celery app without results backend on {}'.format(broker_url))
return celery_app

View File

@ -0,0 +1,21 @@
# external imports
from chainlib.eth.block import (
block_by_number,
Block,
)
from chainlib.eth.tx import (
receipt,
Tx,
)
from chainlib.interface import ChainInterface
class EthChainInterface(ChainInterface):
def __init__(self):
self._tx_receipt = receipt
self._block_by_number = block_by_number
self._block_from_src = Block.from_src
self._src_normalize = Tx.src_normalize
chain_interface = EthChainInterface()

View File

@ -0,0 +1,63 @@
# standard imports
import os
import logging
# external imports
from chainlib.eth.cli import (
Config as BaseConfig,
Flag,
)
# local imports
from .base import CICFlag
script_dir = os.path.dirname(os.path.realpath(__file__))
logg = logging.getLogger(__name__)
class Config(BaseConfig):
local_base_config_dir = os.path.join(script_dir, '..', 'data', 'config')
@classmethod
def from_args(cls, args, arg_flags, local_arg_flags, extra_args={}, default_config_dir=None, base_config_dir=None, default_fee_limit=None):
expanded_base_config_dir = [cls.local_base_config_dir]
if base_config_dir != None:
if isinstance(base_config_dir, str):
base_config_dir = [base_config_dir]
for d in base_config_dir:
expanded_base_config_dir.append(d)
config = BaseConfig.from_args(args, arg_flags, extra_args=extra_args, default_config_dir=default_config_dir, base_config_dir=expanded_base_config_dir, load_callback=None)
local_args_override = {}
if local_arg_flags & CICFlag.REDIS:
local_args_override['REDIS_HOST'] = getattr(args, 'redis_host')
local_args_override['REDIS_PORT'] = getattr(args, 'redis_port')
local_args_override['REDIS_DB'] = getattr(args, 'redis_db')
local_args_override['REDIS_TIMEOUT'] = getattr(args, 'redis_timeout')
if local_arg_flags & CICFlag.CHAIN:
local_args_override['CIC_REGISTRY_ADDRESS'] = getattr(args, 'registry_address')
if local_arg_flags & CICFlag.CELERY:
local_args_override['CELERY_QUEUE'] = getattr(args, 'celery_queue')
if local_arg_flags & CICFlag.SYNCER:
local_args_override['SYNCER_OFFSET'] = getattr(args, 'offset')
local_args_override['SYNCER_NO_HISTORY'] = getattr(args, 'no_history')
config.dict_override(local_args_override, 'local cli args')
if local_arg_flags & CICFlag.REDIS_CALLBACK:
config.add(getattr(args, 'redis_host_callback'), '_REDIS_HOST_CALLBACK')
config.add(getattr(args, 'redis_port_callback'), '_REDIS_PORT_CALLBACK')
if local_arg_flags & CICFlag.CELERY:
config.add(config.true('CELERY_DEBUG'), 'CELERY_DEBUG', exists_ok=True)
logg.debug('config loaded:\n{}'.format(config))
return config

View File

@ -0,0 +1,90 @@
# standard imports
import logging
# external imports
from chainlib.connection import (
RPCConnection,
ConnType,
)
from chainlib.eth.connection import (
EthUnixSignerConnection,
EthHTTPSignerConnection,
)
from chainlib.chain import ChainSpec
logg = logging.getLogger(__name__)
class RPC:
def __init__(self, chain_spec, rpc_provider, signer_provider=None):
self.chain_spec = chain_spec
self.rpc_provider = rpc_provider
self.signer_provider = signer_provider
def get_default(self):
return self.get_by_label('default')
def get_by_label(self, label):
return RPCConnection.connect(self.chain_spec, label)
@staticmethod
def from_config(config, use_signer=False, default_label='default', signer_label='signer'):
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
RPCConnection.register_location(config.get('RPC_HTTP_PROVIDER'), chain_spec, default_label)
if use_signer:
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, signer_label)
RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, signer_label)
RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, signer_label)
RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, signer_label)
rpc = RPC(chain_spec, config.get('RPC_HTTP_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
logg.info('set up rpc: {}'.format(rpc))
return rpc
def __str__(self):
return 'RPC factory, chain {}, rpc {}, signer {}'.format(self.chain_spec, self.rpc_provider, self.signer_provider)
# TOOD: re-implement file backend option from omittec code:
#broker = config.get('CELERY_BROKER_URL')
#if broker[:4] == 'file':
# bq = tempfile.mkdtemp()
# bp = tempfile.mkdtemp()
# conf_update = {
# 'broker_url': broker,
# 'broker_transport_options': {
# 'data_folder_in': bq,
# 'data_folder_out': bq,
# 'data_folder_processed': bp,
# },
# }
# if config.true('CELERY_DEBUG'):
# conf_update['result_extended'] = True
# current_app.conf.update(conf_update)
# logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
#else:
# conf_update = {
# 'broker_url': broker,
# }
# if config.true('CELERY_DEBUG'):
# conf_update['result_extended'] = True
# current_app.conf.update(conf_update)
#
#result = config.get('CELERY_RESULT_URL')
#if result[:4] == 'file':
# rq = tempfile.mkdtemp()
# current_app.conf.update({
# 'result_backend': 'file://{}'.format(rq),
# })
# logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq))
#else:
# current_app.conf.update({
# 'result_backend': result,
# })
#

View File

@ -0,0 +1,5 @@
[celery]
broker_url = redis://localhost:6379
result_url =
queue = cic-eth
debug = 0

View File

@ -1,8 +1,6 @@
[cic] [cic]
registry_address = registry_address =
chain_spec = evm:bloxberg:8996
tx_retry_delay =
trust_address = trust_address =
default_token_symbol = GFT default_token_symbol =
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
run_dir = /run run_dir = /run

View File

@ -0,0 +1,10 @@
[database]
engine =
driver =
host =
port =
name =
user =
password =
debug = 0
pool_size = 0

View File

@ -1,2 +1,2 @@
[SYNCER] [dispatcher]
loop_interval = 1 loop_interval = 1

View File

@ -1,3 +1,2 @@
[eth] [eth]
provider = http://localhost:8545
gas_gifter_minimum_balance = 10000000000000000000000 gas_gifter_minimum_balance = 10000000000000000000000

View File

@ -1,4 +1,5 @@
[redis] [redis]
host = localhost host = localhost
port = 63379 port = 6379
db = 0 db = 0
timeout = 20.0

View File

@ -0,0 +1,3 @@
[retry]
delay =
batch_size =

View File

@ -0,0 +1,2 @@
[signer]
provider =

View File

@ -0,0 +1,4 @@
[syncer]
loop_interval = 1
offset = 0
no_history = 0

View File

@ -12,9 +12,11 @@ from chainlib.eth.tx import (
transaction_by_block, transaction_by_block,
receipt, receipt,
) )
from chainlib.eth.error import RequestMismatchException
from chainlib.eth.block import block_by_number from chainlib.eth.block import block_by_number
from chainlib.eth.contract import abi_decode_single from chainlib.eth.contract import abi_decode_single
from chainlib.eth.constant import ZERO_ADDRESS from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.eth.tx import Tx
from hexathon import strip_0x from hexathon import strip_0x
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from cic_eth_registry.erc20 import ERC20Token from cic_eth_registry.erc20 import ERC20Token
@ -23,6 +25,8 @@ from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusEnum from chainqueue.db.enum import StatusEnum
from chainqueue.sql.query import get_tx_cache from chainqueue.sql.query import get_tx_cache
from eth_erc20 import ERC20 from eth_erc20 import ERC20
from erc20_faucet import Faucet
from potaahto.symbols import snake_and_camel
# local imports # local imports
from cic_eth.queue.time import tx_times from cic_eth.queue.time import tx_times
@ -35,6 +39,32 @@ logg = logging.getLogger()
MAX_BLOCK_TX = 250 MAX_BLOCK_TX = 250
def parse_transaction(chain_spec, rpc, tx, sender_address=None):
try:
transfer_data = ERC20.parse_transfer_request(tx['input'])
tx_address = transfer_data[0]
tx_token_value = transfer_data[1]
logg.debug('matched transfer transaction {} in block {} sender {} recipient {} value {}'.format(tx['hash'], tx['block_number'], tx['from'], tx_address, tx_token_value))
return (tx_address, tx_token_value)
except RequestMismatchException:
pass
try:
transfer_data = Faucet.parse_give_to_request(tx['input'])
tx_address = transfer_data[0]
c = Faucet(chain_spec)
o = c.token_amount(tx['to'], sender_address=sender_address, height=tx['block_number'])
r = rpc.do(o)
tx_token_value = Faucet.parse_token_amount(r)
logg.debug('matched giveto transaction {} in block {} sender {} recipient {} value {}'.format(tx['hash'], tx['block_number'], tx['from'], tx_address, tx_token_value))
return (tx_address, tx_token_value)
except RequestMismatchException:
pass
return None
# TODO: Make this method easier to read # TODO: Make this method easier to read
@celery_app.task(bind=True, base=BaseTask) @celery_app.task(bind=True, base=BaseTask)
def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict): def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
@ -71,36 +101,39 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
tx_filter = moolb.Bloom(databitlen, bloomspec['filter_rounds'], default_data=tx_filter_data) tx_filter = moolb.Bloom(databitlen, bloomspec['filter_rounds'], default_data=tx_filter_data)
txs = {} txs = {}
logg.debug('processing filter with span low {} to high {}'.format(bloomspec['low'], bloomspec['high']))
for block_height in range(bloomspec['low'], bloomspec['high']): for block_height in range(bloomspec['low'], bloomspec['high']):
block_height_bytes = block_height.to_bytes(4, 'big') block_height_bytes = block_height.to_bytes(4, 'big')
if block_filter.check(block_height_bytes): if block_filter.check(block_height_bytes):
logg.debug('filter matched block {}'.format(block_height)) logg.debug('filter matched block {}'.format(block_height))
o = block_by_number(block_height) o = block_by_number(block_height)
block = rpc.do(o) block = rpc.do(o)
logg.debug('block {}'.format(block))
for tx_index in range(0, len(block['transactions'])): for tx_index in range(0, len(block['transactions'])):
composite = tx_index + block_height tx_index_bytes = tx_index.to_bytes(4, 'big')
tx_index_bytes = composite.to_bytes(4, 'big') composite = block_height_bytes + tx_index_bytes
if tx_filter.check(tx_index_bytes): if tx_filter.check(composite):
logg.debug('filter matched block {} tx {}'.format(block_height, tx_index)) logg.debug('filter matched block {} tx {}'.format(block_height, tx_index))
try:
#tx = c.w3.eth.getTransactionByBlock(block_height, tx_index)
o = transaction_by_block(block['hash'], tx_index) o = transaction_by_block(block['hash'], tx_index)
try:
tx = rpc.do(o) tx = rpc.do(o)
except Exception as e: except Exception as e:
logg.debug('false positive on block {} tx {} ({})'.format(block_height, tx_index, e)) logg.debug('false positive on block {} tx {} ({})'.format(block_height, tx_index, e))
continue continue
tx = Tx(tx).src()
logg.debug('got tx {}'.format(tx))
tx_address = None tx_address = None
tx_token_value = 0 tx_token_value = 0
try:
transfer_data = ERC20.parse_transfer_request(tx['data']) transfer_data = parse_transaction(chain_spec, rpc, tx, sender_address=BaseTask.call_address)
if transfer_data == None:
continue
tx_address = transfer_data[0] tx_address = transfer_data[0]
tx_token_value = transfer_data[1] tx_token_value = transfer_data[1]
except ValueError:
logg.debug('not a transfer transaction, skipping {}'.format(tx))
continue
if address == tx_address: if address == tx_address:
status = StatusEnum.SENT status = StatusEnum.SENT
try: try:
@ -136,6 +169,7 @@ def list_tx_by_bloom(self, bloomspec, address, chain_spec_dict):
return txs return txs
# TODO: Surely it must be possible to optimize this # TODO: Surely it must be possible to optimize this
# TODO: DRY this with callback filter in cic_eth/runnable/manager # TODO: DRY this with callback filter in cic_eth/runnable/manager
# TODO: Remove redundant fields from end representation (timestamp, tx_hash) # TODO: Remove redundant fields from end representation (timestamp, tx_hash)

View File

@ -7,54 +7,30 @@ import json
import argparse import argparse
# external imports # external imports
import celery
import confini
import redis import redis
from xdg.BaseDirectory import xdg_config_home from xdg.BaseDirectory import xdg_config_home
from chainlib.chain import ChainSpec
# local imports # local imports
import cic_eth.cli
from cic_eth.api import Api from cic_eth.api import Api
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger('create_account_script') logg = logging.getLogger()
logging.getLogger('confini').setLevel(logging.WARNING)
logging.getLogger('gnupg').setLevel(logging.WARNING)
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic') arg_flags = cic_eth.cli.argflag_std_base
local_arg_flags = cic_eth.cli.argflag_local_taskcallback
argparser = argparse.ArgumentParser() argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index') argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index')
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') argparser.process_local_flags(local_arg_flags)
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback')
argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback')
argparser.add_argument('--timeout', default=20.0, type=float, help='Callback timeout')
argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue')
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
args = argparser.parse_args() args = argparser.parse_args()
if args.vv: extra_args = {
logg.setLevel(logging.DEBUG) 'no_register': None,
if args.v:
logg.setLevel(logging.INFO)
config_dir = args.c
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
config.process()
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
'REDIS_HOST': getattr(args, 'redis_host'),
'REDIS_PORT': getattr(args, 'redis_port'),
'REDIS_DB': getattr(args, 'redis_db'),
} }
config.dict_override(args_override, 'cli') config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
celery_app = cic_eth.cli.CeleryApp.from_config(config)
def main(): def main():
redis_host = config.get('REDIS_HOST') redis_host = config.get('REDIS_HOST')
@ -68,20 +44,20 @@ def main():
ps.get_message() ps.get_message()
api = Api( api = Api(
config.get('CIC_CHAIN_SPEC'), config.get('CHAIN_SPEC'),
queue=args.q, queue=config.get('CELERY_QUEUE'),
callback_param='{}:{}:{}:{}'.format(args.redis_host_callback, args.redis_port_callback, redis_db, redis_channel), callback_param='{}:{}:{}:{}'.format(config.get('_REDIS_HOST_CALLBACK'), config.get('_REDIS_PORT_CALLBACK'), config.get('REDIS_DB'), redis_channel),
callback_task='cic_eth.callbacks.redis.redis', callback_task='cic_eth.callbacks.redis.redis',
callback_queue=args.q, callback_queue=config.get('CELERY_QUEUE'),
) )
register = not args.no_register register = not config.get('_NO_REGISTER')
logg.debug('register {}'.format(register)) logg.debug('register {}'.format(register))
t = api.create_account(register=register) t = api.create_account(register=register)
ps.get_message() ps.get_message()
try: try:
o = ps.get_message(timeout=args.timeout) o = ps.get_message(timeout=config.get('REDIS_TIMEOUT'))
except TimeoutError as e: except TimeoutError as e:
sys.stderr.write('got no new address from cic-eth before timeout: {}\n'.format(e)) sys.stderr.write('got no new address from cic-eth before timeout: {}\n'.format(e))
sys.exit(1) sys.exit(1)

View File

@ -12,64 +12,38 @@ from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.eth.address import is_checksum_address from chainlib.eth.address import is_checksum_address
# local imports # local imports
import cic_eth.cli
from cic_eth.api.admin import AdminApi from cic_eth.api.admin import AdminApi
from cic_eth.db.enum import LockEnum from cic_eth.db.enum import LockEnum
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
default_format = 'terminal'
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
argparser = argparse.ArgumentParser() arg_flags = cic_eth.cli.argflag_std_read
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)') local_arg_flags = cic_eth.cli.argflag_local_task | cic_eth.cli.argflag_local_chain
argparser.add_argument('-f', '--format', dest='f', default=default_format, type=str, help='Output format') argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use') argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec') argparser.process_local_flags(local_arg_flags)
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
def process_lock_args(argparser): def process_lock_args(argparser):
argparser.add_argument('flags', type=str, help='Flags to manipulate') argparser.add_argument('flags', type=str, help='Flags to manipulate')
argparser.add_argument('address', default=ZERO_ADDRESS, nargs='?', type=str, help='Ethereum address to unlock,') argparser.add_argument('address', default=ZERO_ADDRESS, nargs='?', type=str, help='Ethereum address to unlock,')
sub = argparser.add_subparsers() sub = argparser.add_subparsers(help='')
sub.dest = "command" sub.dest = "command"
sub_lock = sub.add_parser('lock', help='Set or reset locks') sub_lock = sub.add_parser('lock', help='Set or reset locks')
sub_unlock = sub.add_parser('unlock', help='Set or reset locks') sub_unlock = sub.add_parser('unlock', help='Set or reset locks')
process_lock_args(sub_lock) process_lock_args(sub_lock)
process_lock_args(sub_unlock) process_lock_args(sub_unlock)
args = argparser.parse_args() args = argparser.parse_args()
if args.v == True: config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c) celery_app = cic_eth.cli.CeleryApp.from_config(config)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
args_override = {
'ETH_PROVIDER': getattr(args, 'p'),
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
# override args
config.dict_override(args_override, 'cli')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
queue = args.q
chain_spec = None
if config.get('CIC_CHAIN_SPEC') != None and config.get('CIC_CHAIN_SPEC') != '::':
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
admin_api = AdminApi(None) admin_api = AdminApi(None)
@ -100,7 +74,7 @@ def main():
args.address, args.address,
flags, flags,
], ],
queue=queue, queue=config.get('CELERY_QUEUE'),
) )
t = s.apply_async() t = s.apply_async()
logg.debug('unlock {} on {} task {}'.format(flags, args.address, t)) logg.debug('unlock {} on {} task {}'.format(flags, args.address, t))
@ -119,7 +93,7 @@ def main():
args.address, args.address,
flags, flags,
], ],
queue=queue, queue=config.get('CELERY_QUEUE'),
) )
t = s.apply_async() t = s.apply_async()
logg.debug('lock {} on {} task {}'.format(flags, args.address, t)) logg.debug('lock {} on {} task {}'.format(flags, args.address, t))

View File

@ -8,8 +8,7 @@ import sys
import re import re
import datetime import datetime
# third-party imports # external imports
import confini
import celery import celery
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
@ -24,7 +23,7 @@ from chainqueue.error import NotLocalTxError
from chainqueue.sql.state import set_reserved from chainqueue.sql.state import set_reserved
# local imports # local imports
import cic_eth import cic_eth.cli
from cic_eth.db import SessionBase from cic_eth.db import SessionBase
from cic_eth.db.enum import LockEnum from cic_eth.db.enum import LockEnum
from cic_eth.db import dsn_from_config from cic_eth.db import dsn_from_config
@ -39,51 +38,30 @@ from cic_eth.error import (
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
arg_flags = cic_eth.cli.argflag_std_read
local_arg_flags = cic_eth.cli.argflag_local_sync | cic_eth.cli.argflag_local_task
argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
args = argparser.parse_args()
config_dir = os.path.join('/usr/local/etc/cic-eth') config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') # connect to celery
argparser.add_argument('-p', '--provider', default='http://localhost:8545', dest='p', type=str, help='rpc provider') celery_app = cic_eth.cli.CeleryApp.from_config(config)
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
args = argparser.parse_args(sys.argv[1:])
if args.v == True:
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
# override args
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
'ETH_PROVIDER': getattr(args, 'p'),
}
config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
queue = args.q
# connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, tag='default') # set up rpc
rpc = cic_eth.cli.RPC.from_config(config)
conn = rpc.get_default()
run = True run = True
class DispatchSyncer: class DispatchSyncer:
yield_delay = 0.0005 yield_delay = 0.0005

View File

@ -6,7 +6,6 @@ import argparse
import re import re
# external imports # external imports
import confini
import celery import celery
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
@ -14,6 +13,7 @@ from chainlib.connection import RPCConnection
from chainsyncer.filter import SyncFilter from chainsyncer.filter import SyncFilter
# local imports # local imports
import cic_eth.cli
from cic_eth.db import dsn_from_config from cic_eth.db import dsn_from_config
from cic_eth.db import SessionBase from cic_eth.db import SessionBase
from cic_eth.admin.ctrl import lock_send from cic_eth.admin.ctrl import lock_send
@ -25,66 +25,41 @@ from cic_eth.stat import init_chain_stat
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
config_dir = os.path.join('/usr/local/etc/cic-eth') arg_flags = cic_eth.cli.argflag_std_read
local_arg_flags = cic_eth.cli.argflag_local_sync | cic_eth.cli.argflag_local_task
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provider')
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('--batch-size', dest='batch_size', type=int, default=50, help='max amount of txs to resend per iteration') argparser.add_argument('--batch-size', dest='batch_size', type=int, default=50, help='max amount of txs to resend per iteration')
argparser.add_argument('--retry-delay', dest='retry_delay', type=int, help='seconds to wait for retrying a transaction that is marked as sent') argparser.add_argument('--retry-delay', dest='retry_delay', type=int, default=20, help='seconds to wait for retrying a transaction that is marked as sent')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser.process_local_flags(local_arg_flags)
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to') args = argparser.parse_args()
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
args = argparser.parse_args(sys.argv[1:])
extra_args = {
if args.v == True: 'retry_delay': 'RETRY_DELAY',
logging.getLogger().setLevel(logging.INFO) 'batch_size': 'RETRY_BATCH_SIZE',
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
# override args
args_override = {
'ETH_PROVIDER': getattr(args, 'p'),
'CIC_CHAIN_SPEC': getattr(args, 'i'),
'CIC_TX_RETRY_DELAY': getattr(args, 'retry_delay'),
} }
config.dict_override(args_override, 'cli flag') config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
config.add(args.batch_size, '_BATCH_SIZE', True)
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
queue = args.q
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, tag='default')
# connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
# set up rpc
rpc = cic_eth.cli.RPC.from_config(config)
conn = rpc.get_default()
def main(): def main():
conn = RPCConnection.connect(chain_spec, 'default') straggler_delay = int(config.get('RETRY_DELAY'))
straggler_delay = int(config.get('CIC_TX_RETRY_DELAY'))
loop_interval = config.get('SYNCER_LOOP_INTERVAL') loop_interval = config.get('SYNCER_LOOP_INTERVAL')
if loop_interval == None: if loop_interval == None:
stat = init_chain_stat(conn) stat = init_chain_stat(conn)
loop_interval = stat.block_average() loop_interval = stat.block_average()
syncer = RetrySyncer(conn, chain_spec, straggler_delay, batch_size=config.get('_BATCH_SIZE')) syncer = RetrySyncer(conn, chain_spec, cic_eth.cli.chain_interface, straggler_delay, batch_size=config.get('RETRY_BATCH_SIZE'))
syncer.backend.set(0, 0) syncer.backend.set(0, 0)
fltr = StragglerFilter(chain_spec, queue=queue) fltr = StragglerFilter(chain_spec, queue=config.get('CELERY_QUEUE'))
syncer.add_filter(fltr) syncer.add_filter(fltr)
syncer.loop(int(loop_interval), conn) syncer.loop(int(loop_interval), conn)

View File

@ -21,14 +21,17 @@ from chainlib.eth.connection import (
EthUnixSignerConnection, EthUnixSignerConnection,
EthHTTPSignerConnection, EthHTTPSignerConnection,
) )
from chainlib.eth.address import to_checksum_address
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
from cic_eth_registry.erc20 import ERC20Token from cic_eth_registry.erc20 import ERC20Token
from hexathon import add_0x
import liveness.linux import liveness.linux
# local imports # local imports
import cic_eth.cli
from cic_eth.eth import ( from cic_eth.eth import (
erc20, erc20,
tx, tx,
@ -70,114 +73,53 @@ from cic_eth.task import BaseTask
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
script_dir = os.path.dirname(os.path.realpath(__file__)) arg_flags = cic_eth.cli.argflag_std_read
local_arg_flags = cic_eth.cli.argflag_local_task
config_dir = os.path.join('/usr/local/etc/cic-eth') argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
argparser = argparse.ArgumentParser()
argparser.add_argument('-p', '--provider', dest='p', type=str, help='rpc provider')
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
argparser.add_argument('-r', type=str, help='CIC registry address')
argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use') argparser.add_argument('--default-token-symbol', dest='default_token_symbol', type=str, help='Symbol of default token to use')
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage') argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path') argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path')
argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path') argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path')
argparser.add_argument('-v', action='store_true', help='be verbose')
argparser.add_argument('-vv', action='store_true', help='be more verbose')
args = argparser.parse_args() args = argparser.parse_args()
if args.vv: # process config
logging.getLogger().setLevel(logging.DEBUG) extra_args = {
elif args.v: 'default_token_symbol': 'CIC_DEFAULT_TOKEN_SYMBOL',
logging.getLogger().setLevel(logging.INFO) 'aux_all': None,
'aux': None,
config = confini.Config(args.c, args.env_prefix) 'trace_queue_status': 'TASKS_TRACE_QUEUE_STATUS',
config.process()
# override args
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
'CIC_DEFAULT_TOKEN_SYMBOL': getattr(args, 'default_token_symbol'),
'ETH_PROVIDER': getattr(args, 'p'),
'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'),
} }
config.add(args.q, '_CELERY_QUEUE', True) config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
health_modules = config.get('CIC_HEALTH_MODULES', []) # connect to celery
if len(health_modules) != 0: celery_app = cic_eth.cli.CeleryApp.from_config(config)
health_modules = health_modules.split(',')
logg.debug('health mods {}'.format(health_modules))
# set up rpc
rpc = cic_eth.cli.RPC.from_config(config, use_signer=True)
conn = rpc.get_default()
# connect to database # connect to database
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
# set up celery
current_app = celery.Celery(__name__)
broker = config.get('CELERY_BROKER_URL')
if broker[:4] == 'file':
bq = tempfile.mkdtemp()
bp = tempfile.mkdtemp()
conf_update = {
'broker_url': broker,
'broker_transport_options': {
'data_folder_in': bq,
'data_folder_out': bq,
'data_folder_processed': bp,
},
}
if config.true('CELERY_DEBUG'):
conf_update['result_extended'] = True
current_app.conf.update(conf_update)
logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
else:
conf_update = {
'broker_url': broker,
}
if config.true('CELERY_DEBUG'):
conf_update['result_extended'] = True
current_app.conf.update(conf_update)
result = config.get('CELERY_RESULT_URL')
if result[:4] == 'file':
rq = tempfile.mkdtemp()
current_app.conf.update({
'result_backend': 'file://{}'.format(rq),
})
logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq))
else:
current_app.conf.update({
'result_backend': result,
})
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, 'signer')
RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, 'signer')
RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, 'signer')
RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
RPCConnection.register_location(config.get('SIGNER_SOCKET_PATH'), chain_spec, 'signer')
Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS') Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS')
#import cic_eth.checks.gas
#if not cic_eth.checks.gas.health(config=config): # execute health checks
# raise RuntimeError() # TODO: health should be separate service with endpoint that can be queried
health_modules = config.get('CIC_HEALTH_MODULES', [])
if len(health_modules) != 0:
health_modules = health_modules.split(',')
logg.debug('health mods {}'.format(health_modules))
liveness.linux.load(health_modules, rundir=config.get('CIC_RUN_DIR'), config=config, unit='cic-eth-tasker') liveness.linux.load(health_modules, rundir=config.get('CIC_RUN_DIR'), config=config, unit='cic-eth-tasker')
rpc = RPCConnection.connect(chain_spec, 'default')
# set up chain provisions
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
registry = None
try: try:
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS')) registry = connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
except UnknownContractError as e: except UnknownContractError as e:
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e)) logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
sys.exit(1) sys.exit(1)
@ -188,15 +130,15 @@ if trusted_addresses_src == None:
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS') logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
sys.exit(1) sys.exit(1)
trusted_addresses = trusted_addresses_src.split(',') trusted_addresses = trusted_addresses_src.split(',')
for address in trusted_addresses: for i, address in enumerate(trusted_addresses):
if config.get('_UNSAFE'):
trusted_addresses[i] = to_checksum_address(address)
logg.info('using trusted address {}'.format(address)) logg.info('using trusted address {}'.format(address))
connect_declarator(conn, chain_spec, trusted_addresses)
connect_token_registry(conn, chain_spec)
connect_declarator(rpc, chain_spec, trusted_addresses) # detect auxiliary task modules (plugins)
connect_token_registry(rpc, chain_spec)
# detect aux
# TODO: move to separate file # TODO: move to separate file
#aux_dir = os.path.join(script_dir, '..', '..', 'aux')
aux = [] aux = []
if args.aux_all: if args.aux_all:
if len(args.aux) > 0: if len(args.aux) > 0:
@ -249,36 +191,24 @@ elif len(args.aux) > 0:
for v in aux: for v in aux:
mname = 'cic_eth_aux.' + v mname = 'cic_eth_aux.' + v
mod = importlib.import_module(mname) mod = importlib.import_module(mname)
mod.aux_setup(rpc, config) mod.aux_setup(conn, config)
logg.info('loaded aux module {}'.format(mname)) logg.info('loaded aux module {}'.format(mname))
def main(): def main():
argv = ['worker'] argv = ['worker']
if args.vv: log_level = logg.getEffectiveLevel()
argv.append('--loglevel=DEBUG') log_level_name = logging.getLevelName(log_level)
elif args.v: argv.append('--loglevel=' + log_level_name)
argv.append('--loglevel=INFO')
argv.append('-Q') argv.append('-Q')
argv.append(args.q) argv.append(config.get('CELERY_QUEUE'))
argv.append('-n') argv.append('-n')
argv.append(args.q) argv.append(config.get('CELERY_QUEUE'))
# if config.true('SSL_ENABLE_CLIENT'):
# Callback.ssl = True
# Callback.ssl_cert_file = config.get('SSL_CERT_FILE')
# Callback.ssl_key_file = config.get('SSL_KEY_FILE')
# Callback.ssl_password = config.get('SSL_PASSWORD')
#
# if config.get('SSL_CA_FILE') != '':
# Callback.ssl_ca_file = config.get('SSL_CA_FILE')
rpc = RPCConnection.connect(chain_spec, 'default')
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL') BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol) BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
default_token = ERC20Token(chain_spec, rpc, BaseTask.default_token_address) default_token = ERC20Token(chain_spec, conn, BaseTask.default_token_address)
default_token.load(rpc) default_token.load(conn)
BaseTask.default_token_decimals = default_token.decimals BaseTask.default_token_decimals = default_token.decimals
BaseTask.default_token_name = default_token.name BaseTask.default_token_name = default_token.name
@ -286,13 +216,13 @@ def main():
logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address)) logg.info('default token set to {} {}'.format(BaseTask.default_token_symbol, BaseTask.default_token_address))
liveness.linux.set(rundir=config.get('CIC_RUN_DIR')) liveness.linux.set(rundir=config.get('CIC_RUN_DIR'))
current_app.worker_main(argv) celery_app.worker_main(argv)
liveness.linux.reset(rundir=config.get('CIC_RUN_DIR')) liveness.linux.reset(rundir=config.get('CIC_RUN_DIR'))
@celery.signals.eventlet_pool_postshutdown.connect @celery.signals.eventlet_pool_postshutdown.connect
def shutdown(sender=None, headers=None, body=None, **kwargs): def shutdown(sender=None, headers=None, body=None, **kwargs):
logg.warning('in shudown event hook') logg.warning('in shutdown event hook')
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -8,14 +8,6 @@ import sys
import re import re
# external imports # external imports
import confini
import celery
import rlp
import cic_base.config
import cic_base.log
import cic_base.argparse
import cic_base.rpc
from cic_base.eth.syncer import chain_interface
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.constant import ZERO_ADDRESS from chainlib.eth.constant import ZERO_ADDRESS
@ -30,8 +22,13 @@ from chainsyncer.backend.sql import SQLBackend
from chainsyncer.driver.head import HeadSyncer from chainsyncer.driver.head import HeadSyncer
from chainsyncer.driver.history import HistorySyncer from chainsyncer.driver.history import HistorySyncer
from chainsyncer.db.models.base import SessionBase from chainsyncer.db.models.base import SessionBase
from chainlib.eth.address import (
is_checksum_address,
to_checksum_address,
)
# local imports # local imports
import cic_eth.cli
from cic_eth.db import dsn_from_config from cic_eth.db import dsn_from_config
from cic_eth.runnable.daemons.filters import ( from cic_eth.runnable.daemons.filters import (
CallbackFilter, CallbackFilter,
@ -47,61 +44,50 @@ from cic_eth.registry import (
connect_token_registry, connect_token_registry,
) )
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
script_dir = os.path.realpath(os.path.dirname(__file__)) arg_flags = cic_eth.cli.argflag_std_read
local_arg_flags = cic_eth.cli.argflag_local_sync
argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
args = argparser.parse_args()
def add_block_args(argparser): # process config
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync') config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
return argparser
# connect to celery
cic_eth.cli.CeleryApp.from_config(config)
logg = cic_base.log.create() # set up database
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
args = cic_base.argparse.parse(argparser, logg)
config = cic_base.config.create(args.c, args, args.env_prefix)
config.add(args.y, '_KEYSTORE_FILE', True)
config.add(args.q, '_CELERY_QUEUE', True)
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
config.add(args.no_history, '_NO_HISTORY', True)
cic_base.config.log(config)
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG')) SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) # set up rpc
rpc = cic_eth.cli.RPC.from_config(config)
conn = rpc.get_default()
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) # set up chain provisions
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
rpc = RPCConnection.connect(chain_spec, 'default')
registry = None registry = None
try: try:
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS')) registry = connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
except UnknownContractError as e: except UnknownContractError as e:
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e)) logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
sys.exit(1) sys.exit(1)
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS'))) logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
def main(): def main():
# connect to celery
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
# Connect to blockchain with chainlib # Connect to blockchain with chainlib
o = block_latest() o = block_latest()
r = rpc.do(o) r = conn.do(o)
block_current = int(r, 16) block_current = int(r, 16)
block_offset = block_current + 1 block_offset = block_current + 1
loop_interval = config.get('SYNCER_LOOP_INTERVAL') loop_interval = config.get('SYNCER_LOOP_INTERVAL')
if loop_interval == None: if loop_interval == None:
stat = init_chain_stat(rpc, block_start=block_current) stat = init_chain_stat(conn, block_start=block_current)
loop_interval = stat.block_average() loop_interval = stat.block_average()
logg.debug('current block height {}'.format(block_offset)) logg.debug('current block height {}'.format(block_offset))
@ -113,9 +99,9 @@ def main():
syncer_backends = SQLBackend.resume(chain_spec, block_offset) syncer_backends = SQLBackend.resume(chain_spec, block_offset)
if len(syncer_backends) == 0: if len(syncer_backends) == 0:
initial_block_start = config.get('SYNCER_HISTORY_START') initial_block_start = config.get('SYNCER_OFFSET')
initial_block_offset = block_offset initial_block_offset = block_offset
if config.get('_NO_HISTORY'): if config.true('SYNCER_NO_HISTORY'):
initial_block_start = block_offset initial_block_start = block_offset
initial_block_offset += 1 initial_block_offset += 1
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start)) syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
@ -128,40 +114,45 @@ def main():
for syncer_backend in syncer_backends: for syncer_backend in syncer_backends:
try: try:
syncers.append(HistorySyncer(syncer_backend, chain_interface)) syncers.append(HistorySyncer(syncer_backend, cic_eth.cli.chain_interface))
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend)) logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
except AttributeError: except AttributeError:
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend)) logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
syncers.append(HeadSyncer(syncer_backend, chain_interface)) syncers.append(HeadSyncer(syncer_backend, cic_eth.cli.chain_interface))
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS')) connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS') trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
if trusted_addresses_src == None: if trusted_addresses_src == None:
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS') logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
sys.exit(1) sys.exit(1)
trusted_addresses = trusted_addresses_src.split(',') trusted_addresses = trusted_addresses_src.split(',')
for address in trusted_addresses: for i, address in enumerate(trusted_addresses):
if not config.get('_UNSAFE'):
if not is_checksum_address(address):
raise ValueError('address {} is not a valid checksum address'.format(address))
else:
trusted_addresses[i] = to_checksum_address(address)
logg.info('using trusted address {}'.format(address)) logg.info('using trusted address {}'.format(address))
connect_declarator(rpc, chain_spec, trusted_addresses) connect_declarator(conn, chain_spec, trusted_addresses)
connect_token_registry(rpc, chain_spec) connect_token_registry(conn, chain_spec)
CallbackFilter.trusted_addresses = trusted_addresses CallbackFilter.trusted_addresses = trusted_addresses
callback_filters = [] callback_filters = []
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','): for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
task_split = cb.split(':') task_split = cb.split(':')
task_queue = config.get('_CELERY_QUEUE') task_queue = config.get('CELERY_QUEUE')
if len(task_split) > 1: if len(task_split) > 1:
task_queue = task_split[0] task_queue = task_split[0]
callback_filter = CallbackFilter(chain_spec, task_split[1], task_queue) callback_filter = CallbackFilter(chain_spec, task_split[1], task_queue)
callback_filters.append(callback_filter) callback_filters.append(callback_filter)
tx_filter = TxFilter(chain_spec, config.get('_CELERY_QUEUE')) tx_filter = TxFilter(chain_spec, config.get('CELERY_QUEUE'))
account_registry_address = registry.by_name('AccountRegistry') account_registry_address = registry.by_name('AccountRegistry')
registration_filter = RegistrationFilter(chain_spec, account_registry_address, queue=config.get('_CELERY_QUEUE')) registration_filter = RegistrationFilter(chain_spec, account_registry_address, queue=config.get('CELERY_QUEUE'))
gas_filter = GasFilter(chain_spec, config.get('_CELERY_QUEUE')) gas_filter = GasFilter(chain_spec, config.get('CELERY_QUEUE'))
#transfer_auth_filter = TransferAuthFilter(registry, chain_spec, config.get('_CELERY_QUEUE')) #transfer_auth_filter = TransferAuthFilter(registry, chain_spec, config.get('_CELERY_QUEUE'))
@ -176,7 +167,7 @@ def main():
for cf in callback_filters: for cf in callback_filters:
syncer.add_filter(cf) syncer.add_filter(cf)
r = syncer.loop(int(loop_interval), rpc) r = syncer.loop(int(loop_interval), conn)
sys.stderr.write("sync {} done at block {}\n".format(syncer, r)) sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
i += 1 i += 1

View File

@ -12,50 +12,27 @@ import confini
import celery import celery
# local imports # local imports
import cic_eth.cli
from cic_eth.api import Api from cic_eth.api import Api
from cic_eth.api.admin import AdminApi from cic_eth.api.admin import AdminApi
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
default_format = 'terminal' arg_flags = cic_eth.cli.argflag_std_base
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic') local_arg_flags = cic_eth.cli.argflag_local_taskcallback
argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.process_local_flags(local_arg_flags)
argparser = argparse.ArgumentParser()
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
args = argparser.parse_args() args = argparser.parse_args()
if args.v == True: config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c) celery_app = cic_eth.cli.CeleryApp.from_config(config)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
config.dict_override(args_override, 'cli args')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
api = Api(config.get('CHAIN_SPEC'), queue=config.get('CELERY_QUEUE'))
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
queue = args.q
api = Api(config.get('CIC_CHAIN_SPEC'), queue=queue)
admin_api = AdminApi(None) admin_api = AdminApi(None)
def main(): def main():
t = admin_api.registry() t = admin_api.registry()
registry_address = t.get() registry_address = t.get()

View File

@ -5,65 +5,38 @@ import re
import os import os
# third-party imports # third-party imports
import celery
import confini
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.connection import EthHTTPConnection from chainlib.eth.connection import EthHTTPConnection
# local imports # local imports
import cic_eth.cli
from cic_eth.api.admin import AdminApi from cic_eth.api.admin import AdminApi
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
logging.getLogger('web3').setLevel(logging.WARNING) arg_flags = cic_eth.cli.argflag_std_base
logging.getLogger('urllib3').setLevel(logging.WARNING) local_arg_flags = cic_eth.cli.argflag_local_taskcallback
argparser = cic_eth.cli.ArgumentParser(arg_flags)
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic') argparser.add_argument('--unlock', action='store_true', help='Unlock account after resend')
argparser.add_positional('tx_hash', type=str, help='Transaction hash')
argparser.process_local_flags(local_arg_flags)
argparser = argparse.ArgumentParser() extra_args = {
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use') 'unlock': None,
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)') 'tx_hash': None,
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, default='Ethereum:1', help='Chain specification string') }
argparser.add_argument('--unlock', action='store_true', help='Append task to unlock account')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
argparser.add_argument('tx_hash', type=str, help='Transaction hash')
args = argparser.parse_args() args = argparser.parse_args()
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
if args.vv: chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
logg.setLevel(logging.DEBUG)
elif args.v:
logg.setLevel(logging.INFO)
config_dir = os.path.join(args.c) celery_app = cic_eth.cli.CeleryApp.from_config(config)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process()
args_override = {
'ETH_PROVIDER': getattr(args, 'p'),
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
# override args
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
config.add(args.tx_hash, '_TX_HASH', True)
config.add(args.unlock, '_UNLOCK', True)
chain_spec = ChainSpec.from_chain_str(args.i)
rpc = EthHTTPConnection(config.get('ETH_PROVIDER'))
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
def main(): def main():
api = AdminApi(rpc) api = AdminApi(None)
tx_details = api.tx(chain_spec, args.tx_hash) tx_details = api.tx(chain_spec, config.get('_TX_HASH'))
t = api.resend(args.tx_hash, chain_spec, unlock=config.get('_UNLOCK')) t = api.resend(args.tx_hash, chain_spec, unlock=config.get('_UNLOCK'))
print(t.get_leaf()) print(t.get_leaf())

View File

@ -6,8 +6,7 @@ import argparse
import re import re
# external imports # external imports
import celery import cic_eth.cli
import confini
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from xdg.BaseDirectory import xdg_config_home from xdg.BaseDirectory import xdg_config_home
@ -19,43 +18,28 @@ from cic_eth.db.models.base import SessionBase
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic') arg_flags = cic_eth.cli.argflag_std_base
local_arg_flags = cic_eth.cli.argflag_local_taskcallback
argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.add_positional('tag', type=str, help='address tag')
argparser.add_positional('address', type=str, help='address')
argparser.process_local_flags(local_arg_flags)
args = argparser.parse_args()
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') celery_app = cic_eth.cli.CeleryApp.from_config(config)
argparser.add_argument('-p', '--provider', dest='p', type=str, help='Web3 provider url (http only)')
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
argparser.add_argument('tag', type=str, help='address tag')
argparser.add_argument('address', type=str, help='address')
args = argparser.parse_args(sys.argv[1:])
if args.v == True: admin_api = AdminApi(None)
logging.getLogger().setLevel(logging.INFO)
elif args.vv == True:
logging.getLogger().setLevel(logging.DEBUG)
config = confini.Config(args.c) chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
config.process()
args_override = {
'ETH_PROVIDER': getattr(args, 'p'),
'CIC_CHAIN_SPEC': getattr(args, 'i'),
}
config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}\n{}'.format(args.c, config))
chain_spec = ChainSpec.from_chain_str(args.i) celery_app = cic_eth.cli.CeleryApp.from_config(config)
api = AdminApi(None)
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
def main(): def main():
api = AdminApi(None) admin_api.tag_account(args.tag, args.address, chain_spec)
api.tag_account(args.tag, args.address, chain_spec)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -7,88 +7,58 @@ import json
import argparse import argparse
# external imports # external imports
import celery
import confini
import redis import redis
from xdg.BaseDirectory import xdg_config_home from xdg.BaseDirectory import xdg_config_home
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
# local imports # local imports
import cic_eth.cli
from cic_eth.api import Api from cic_eth.api import Api
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger('create_account_script') logg = logging.getLogger('create_account_script')
logging.getLogger('confini').setLevel(logging.WARNING)
logging.getLogger('gnupg').setLevel(logging.WARNING)
default_config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic') arg_flags = cic_eth.cli.argflag_std_base
local_arg_flags = cic_eth.cli.argflag_local_taskcallback
argparser = argparse.ArgumentParser() argparser = cic_eth.cli.ArgumentParser(arg_flags)
argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index') argparser.add_argument('--token-symbol', dest='token_symbol', type=str, help='Token symbol')
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') argparser.add_positional('sender', type=str, help='Token transaction sender')
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec') argparser.add_positional('recipient', type=str, help='Token transaction recipient')
argparser.add_argument('--token-symbol', dest='token_symbol', type=str, help='Symbol of token to transfer') argparser.add_positional('value', type=int, help='Token transaction value')
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission') argparser.process_local_flags(local_arg_flags)
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback')
argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback')
argparser.add_argument('--timeout', default=20.0, type=float, help='Callback timeout')
argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue')
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
argparser.add_argument('sender', type=str, help='Transaction sender')
argparser.add_argument('recipient', type=str, help='Transaction recipient')
argparser.add_argument('value', type=int, help='Transaction value with decimals')
args = argparser.parse_args() args = argparser.parse_args()
if args.vv: extra_args = {
logg.setLevel(logging.DEBUG) 'token_symbol': None,
if args.v: 'sender': None,
logg.setLevel(logging.INFO) 'recipient': None,
'value': None,
config_dir = args.c
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
config.process()
args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'),
'REDIS_HOST': getattr(args, 'redis_host'),
'REDIS_PORT': getattr(args, 'redis_port'),
'REDIS_DB': getattr(args, 'redis_db'),
} }
config.dict_override(args_override, 'cli') config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags, extra_args=extra_args)
config.add(to_checksum_address(args.sender), '_SENDER', True)
config.add(to_checksum_address(args.recipient), '_RECIPIENT', True) celery_app = cic_eth.cli.CeleryApp.from_config(config)
config.add(args.value, '_VALUE', True)
config.add(args.token_symbol, '_SYMBOL', True)
if config.get('_SYMBOL') == None:
raise ValueError('gas transfers not yet supported; token symbol required')
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
def main(): def main():
redis_host = config.get('REDIS_HOST')
redis_port = config.get('REDIS_PORT')
redis_db = config.get('REDIS_DB')
redis_channel = str(uuid.uuid4()) redis_channel = str(uuid.uuid4())
r = redis.Redis(redis_host, redis_port, redis_db) r = redis.Redis(config.get('REDIS_HOST'), config.get('REDIS_PORT'), config.get('REDIS_DB'))
ps = r.pubsub() ps = r.pubsub()
ps.subscribe(redis_channel) ps.subscribe(redis_channel)
ps.get_message() ps.get_message()
api = Api( api = Api(
config.get('CIC_CHAIN_SPEC'), config.get('CHAIN_SPEC'),
queue=args.q, queue=config.get('CELERY_QUEUE'),
callback_param='{}:{}:{}:{}'.format(args.redis_host_callback, args.redis_port_callback, redis_db, redis_channel), callback_param='{}:{}:{}:{}'.format(config.get('_REDIS_HOST_CALLBACK'), config.get('_REDIS_PORT_CALLBACK'), config.get('REDIS_DB'), redis_channel),
callback_task='cic_eth.callbacks.redis.redis', callback_task='cic_eth.callbacks.redis.redis',
callback_queue=args.q, callback_queue=config.get('CELERY_QUEUE')
) )
t = api.transfer(config.get('_SENDER'), config.get('_RECIPIENT'), config.get('_VALUE'), config.get('_SYMBOL')) t = api.transfer(config.get('_SENDER'), config.get('_RECIPIENT'), config.get('_VALUE'), config.get('_TOKEN_SYMBOL'))
ps.get_message() ps.get_message()
o = ps.get_message(timeout=args.timeout) o = ps.get_message(timeout=config.get('REDIS_TIMEOUT'))
m = json.loads(o['data']) m = json.loads(o['data'])
print(m['result']) print(m['result'])

View File

@ -3,7 +3,7 @@ import logging
import datetime import datetime
# external imports # external imports
from chainsyncer.driver import HeadSyncer from chainsyncer.driver.head import HeadSyncer
from chainsyncer.backend.memory import MemBackend from chainsyncer.backend.memory import MemBackend
from chainsyncer.error import NoBlockForYou from chainsyncer.error import NoBlockForYou
from chainlib.eth.block import ( from chainlib.eth.block import (
@ -39,9 +39,9 @@ class DbSessionMemBackend(MemBackend):
class RetrySyncer(HeadSyncer): class RetrySyncer(HeadSyncer):
def __init__(self, conn, chain_spec, stalled_grace_seconds, batch_size=50, failed_grace_seconds=None): def __init__(self, conn, chain_spec, chain_interface, stalled_grace_seconds, batch_size=50, failed_grace_seconds=None):
backend = DbSessionMemBackend(chain_spec, None) backend = DbSessionMemBackend(chain_spec, None)
super(RetrySyncer, self).__init__(backend) super(RetrySyncer, self).__init__(backend, chain_interface)
self.chain_spec = chain_spec self.chain_spec = chain_spec
if failed_grace_seconds == None: if failed_grace_seconds == None:
failed_grace_seconds = stalled_grace_seconds failed_grace_seconds = stalled_grace_seconds

View File

@ -10,7 +10,7 @@ version = (
0, 0,
12, 12,
2, 2,
'alpha.3', 'alpha.4',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@ -1,2 +0,0 @@
[bancor]
dir = /usr/local/share/cic/bancor

View File

@ -1,4 +0,0 @@
[celery]
broker_url = redis://
result_url = redis://
debug = 0

View File

@ -1,2 +0,0 @@
[custody]
account_index_address =

View File

@ -1,10 +0,0 @@
[database]
NAME=cic-eth
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
POOL_SIZE=50
DEBUG=0

View File

@ -1,2 +0,0 @@
[dispatcher]
loop_interval = 0.9

View File

@ -1,2 +0,0 @@
[bancor]
dir = /usr/local/share/cic/bancor

View File

@ -1,4 +0,0 @@
[celery]
broker_url = redis://localhost:63379
result_url = redis://localhost:63379
debug = 0

View File

@ -0,0 +1,2 @@
[chain]
spec = evm:bloxberg:8996

View File

@ -1,8 +0,0 @@
[cic]
registry_address =
chain_spec = evm:bloxberg:8996
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
tx_retry_delay = 20
default_token_symbol = GFT
health_modules = cic_eth.check.db,cic_eth.check.redis,cic_eth.check.signer,cic_eth.check.gas
run_dir = /run

View File

@ -1,2 +0,0 @@
[custody]
account_index_address =

View File

@ -1,10 +0,0 @@
[database]
NAME=cic_eth
USER=postgres
PASSWORD=tralala
HOST=localhost
PORT=63432
ENGINE=postgresql
DRIVER=psycopg2
POOL_SIZE=50
DEBUG=0

View File

@ -1,2 +0,0 @@
[dispatcher]
loop_interval = 0.9

View File

@ -1,3 +0,0 @@
[eth]
provider = http://localhost:63545
gas_gifter_minimum_balance = 10000000000000000000000

View File

@ -1,5 +0,0 @@
[signer]
socket_path = ipc:///tmp/crypto-dev-signer/jsonrpc.ipc
secret = deedbeef
database_name = signer_test
dev_keys_path =

View File

@ -1,6 +0,0 @@
[SSL]
enable_client = false
cert_file =
key_file =
password =
ca_file =

View File

@ -1,3 +0,0 @@
[SYNCER]
loop_interval =
history_start = 0

View File

@ -1,3 +0,0 @@
[bancor]
registry_address = 0xb708175e3f6Cd850643aAF7B32212AFad50e2549
dir = /home/lash/src/ext/cic/grassrootseconomics/cic-platform/contrib/bancor_0.6

View File

@ -1,8 +0,0 @@
[database]
NAME=cic-eth
USER=postgres
PASSWORD=
HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite

View File

@ -1,3 +0,0 @@
[eth]
gas_provider_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
provider = http://localhost:8545

View File

@ -1,3 +0,0 @@
[celery]
broker_url = redis://
result_url = file://

View File

@ -1,2 +0,0 @@
[signer]
socket_path = /tmp/crypto-dev-signer/jsonrpc.ipc

Some files were not shown because too many files have changed in this diff Show More