2021-03-01 21:15:17 +01:00
|
|
|
# standard imports
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import logging
|
|
|
|
import time
|
|
|
|
import argparse
|
|
|
|
import sys
|
|
|
|
import re
|
|
|
|
|
2021-04-06 21:11:42 +02:00
|
|
|
# external imports
|
2021-03-29 15:27:53 +02:00
|
|
|
from cic_eth_registry.error import UnknownContractError
|
2021-03-01 21:15:17 +01:00
|
|
|
from chainlib.chain import ChainSpec
|
2021-03-29 15:27:53 +02:00
|
|
|
from chainlib.eth.constant import ZERO_ADDRESS
|
|
|
|
from chainlib.connection import RPCConnection
|
2021-03-01 21:15:17 +01:00
|
|
|
from chainlib.eth.block import (
|
|
|
|
block_latest,
|
|
|
|
)
|
|
|
|
from hexathon import (
|
|
|
|
strip_0x,
|
|
|
|
)
|
2021-04-15 18:04:47 +02:00
|
|
|
from chainsyncer.backend.sql import SQLBackend
|
2021-06-30 16:44:17 +02:00
|
|
|
from chainsyncer.driver.head import HeadSyncer
|
|
|
|
from chainsyncer.driver.history import HistorySyncer
|
2021-03-01 21:15:17 +01:00
|
|
|
from chainsyncer.db.models.base import SessionBase
|
2021-08-17 08:46:51 +02:00
|
|
|
from chainlib.eth.address import (
|
|
|
|
is_checksum_address,
|
|
|
|
to_checksum_address,
|
|
|
|
)
|
2021-03-01 21:15:17 +01:00
|
|
|
|
|
|
|
# local imports
|
2021-08-17 08:46:51 +02:00
|
|
|
import cic_eth.cli
|
2021-03-01 21:15:17 +01:00
|
|
|
from cic_eth.db import dsn_from_config
|
|
|
|
from cic_eth.runnable.daemons.filters import (
|
|
|
|
CallbackFilter,
|
|
|
|
GasFilter,
|
|
|
|
TxFilter,
|
|
|
|
RegistrationFilter,
|
2021-03-04 16:06:14 +01:00
|
|
|
TransferAuthFilter,
|
2021-12-22 19:24:05 +01:00
|
|
|
TokenFilter,
|
2021-03-01 21:15:17 +01:00
|
|
|
)
|
2021-04-06 21:11:42 +02:00
|
|
|
from cic_eth.stat import init_chain_stat
|
2021-04-16 22:24:07 +02:00
|
|
|
from cic_eth.registry import (
|
|
|
|
connect as connect_registry,
|
|
|
|
connect_declarator,
|
|
|
|
connect_token_registry,
|
|
|
|
)
|
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
logging.basicConfig(level=logging.WARNING)
|
|
|
|
logg = logging.getLogger()
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
arg_flags = cic_eth.cli.argflag_std_read
|
|
|
|
local_arg_flags = cic_eth.cli.argflag_local_sync
|
|
|
|
argparser = cic_eth.cli.ArgumentParser(arg_flags)
|
|
|
|
argparser.process_local_flags(local_arg_flags)
|
|
|
|
args = argparser.parse_args()
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
# process config
|
|
|
|
config = cic_eth.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
# connect to celery
|
|
|
|
cic_eth.cli.CeleryApp.from_config(config)
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
# set up database
|
2021-03-01 21:15:17 +01:00
|
|
|
dsn = dsn_from_config(config)
|
2021-03-29 15:27:53 +02:00
|
|
|
SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG'))
|
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
# set up rpc
|
|
|
|
rpc = cic_eth.cli.RPC.from_config(config)
|
|
|
|
conn = rpc.get_default()
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
# set up chain provisions
|
|
|
|
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
2021-07-12 21:50:32 +02:00
|
|
|
registry = None
|
|
|
|
try:
|
2021-08-17 08:46:51 +02:00
|
|
|
registry = connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
2021-07-12 21:50:32 +02:00
|
|
|
except UnknownContractError as e:
|
|
|
|
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
|
|
|
sys.exit(1)
|
|
|
|
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
|
2021-05-13 18:37:44 +02:00
|
|
|
|
2021-03-01 21:15:17 +01:00
|
|
|
def main():
|
|
|
|
# Connect to blockchain with chainlib
|
|
|
|
|
|
|
|
o = block_latest()
|
2021-08-17 08:46:51 +02:00
|
|
|
r = conn.do(o)
|
2021-04-06 17:14:04 +02:00
|
|
|
block_current = int(r, 16)
|
|
|
|
block_offset = block_current + 1
|
|
|
|
|
2021-04-06 21:11:42 +02:00
|
|
|
loop_interval = config.get('SYNCER_LOOP_INTERVAL')
|
|
|
|
if loop_interval == None:
|
2021-08-17 08:46:51 +02:00
|
|
|
stat = init_chain_stat(conn, block_start=block_current)
|
2021-04-06 21:11:42 +02:00
|
|
|
loop_interval = stat.block_average()
|
|
|
|
|
2021-05-13 18:37:44 +02:00
|
|
|
logg.debug('current block height {}'.format(block_offset))
|
2021-03-01 21:15:17 +01:00
|
|
|
|
|
|
|
syncers = []
|
|
|
|
|
2021-04-15 18:04:47 +02:00
|
|
|
#if SQLBackend.first(chain_spec):
|
|
|
|
# backend = SQLBackend.initial(chain_spec, block_offset)
|
|
|
|
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
2021-03-01 21:15:17 +01:00
|
|
|
|
|
|
|
if len(syncer_backends) == 0:
|
2021-12-22 19:24:05 +01:00
|
|
|
initial_block_start = int(config.get('SYNCER_OFFSET'))
|
|
|
|
initial_block_offset = int(block_offset)
|
2021-08-17 08:46:51 +02:00
|
|
|
if config.true('SYNCER_NO_HISTORY'):
|
2021-12-22 19:24:05 +01:00
|
|
|
initial_block_start = initial_block_offset
|
2021-05-13 18:37:44 +02:00
|
|
|
initial_block_offset += 1
|
|
|
|
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
|
|
|
|
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
|
2021-03-01 21:15:17 +01:00
|
|
|
else:
|
|
|
|
for syncer_backend in syncer_backends:
|
|
|
|
logg.info('resuming sync session {}'.format(syncer_backend))
|
|
|
|
|
2021-04-15 18:04:47 +02:00
|
|
|
syncer_backends.append(SQLBackend.live(chain_spec, block_offset+1))
|
2021-03-01 21:15:17 +01:00
|
|
|
|
|
|
|
for syncer_backend in syncer_backends:
|
|
|
|
try:
|
2021-08-17 08:46:51 +02:00
|
|
|
syncers.append(HistorySyncer(syncer_backend, cic_eth.cli.chain_interface))
|
2021-03-01 21:15:17 +01:00
|
|
|
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
|
|
|
|
except AttributeError:
|
|
|
|
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
|
2021-08-17 08:46:51 +02:00
|
|
|
syncers.append(HeadSyncer(syncer_backend, cic_eth.cli.chain_interface))
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
connect_registry(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
2021-04-16 22:24:07 +02:00
|
|
|
|
2021-03-01 21:15:17 +01:00
|
|
|
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
|
|
|
if trusted_addresses_src == None:
|
|
|
|
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
|
|
|
sys.exit(1)
|
|
|
|
trusted_addresses = trusted_addresses_src.split(',')
|
2021-08-17 08:46:51 +02:00
|
|
|
for i, address in enumerate(trusted_addresses):
|
|
|
|
if not config.get('_UNSAFE'):
|
|
|
|
if not is_checksum_address(address):
|
|
|
|
raise ValueError('address {} is not a valid checksum address'.format(address))
|
|
|
|
else:
|
|
|
|
trusted_addresses[i] = to_checksum_address(address)
|
2021-03-01 21:15:17 +01:00
|
|
|
logg.info('using trusted address {}'.format(address))
|
2021-08-17 08:46:51 +02:00
|
|
|
connect_declarator(conn, chain_spec, trusted_addresses)
|
|
|
|
connect_token_registry(conn, chain_spec)
|
2021-03-01 21:15:17 +01:00
|
|
|
CallbackFilter.trusted_addresses = trusted_addresses
|
|
|
|
|
|
|
|
callback_filters = []
|
|
|
|
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
|
|
|
|
task_split = cb.split(':')
|
2021-08-17 08:46:51 +02:00
|
|
|
task_queue = config.get('CELERY_QUEUE')
|
2021-03-01 21:15:17 +01:00
|
|
|
if len(task_split) > 1:
|
|
|
|
task_queue = task_split[0]
|
|
|
|
callback_filter = CallbackFilter(chain_spec, task_split[1], task_queue)
|
|
|
|
callback_filters.append(callback_filter)
|
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
tx_filter = TxFilter(chain_spec, config.get('CELERY_QUEUE'))
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-07-12 21:50:32 +02:00
|
|
|
account_registry_address = registry.by_name('AccountRegistry')
|
2021-08-17 08:46:51 +02:00
|
|
|
registration_filter = RegistrationFilter(chain_spec, account_registry_address, queue=config.get('CELERY_QUEUE'))
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
gas_filter = GasFilter(chain_spec, config.get('CELERY_QUEUE'))
|
2021-03-01 21:15:17 +01:00
|
|
|
|
2021-12-22 19:24:05 +01:00
|
|
|
token_gas_cache_filter = TokenFilter(chain_spec, config.get('CELERY_QUEUE'))
|
|
|
|
|
2021-03-29 15:27:53 +02:00
|
|
|
#transfer_auth_filter = TransferAuthFilter(registry, chain_spec, config.get('_CELERY_QUEUE'))
|
2021-03-04 16:06:14 +01:00
|
|
|
|
2021-03-01 21:15:17 +01:00
|
|
|
i = 0
|
|
|
|
for syncer in syncers:
|
|
|
|
logg.debug('running syncer index {}'.format(i))
|
|
|
|
syncer.add_filter(gas_filter)
|
|
|
|
syncer.add_filter(registration_filter)
|
|
|
|
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
|
|
|
|
syncer.add_filter(tx_filter)
|
2021-12-22 19:24:05 +01:00
|
|
|
syncer.add_filter(token_gas_cache_filter)
|
2021-03-29 15:27:53 +02:00
|
|
|
#syncer.add_filter(transfer_auth_filter)
|
2021-03-01 21:15:17 +01:00
|
|
|
for cf in callback_filters:
|
|
|
|
syncer.add_filter(cf)
|
|
|
|
|
2021-08-17 08:46:51 +02:00
|
|
|
r = syncer.loop(int(loop_interval), conn)
|
2021-03-01 21:15:17 +01:00
|
|
|
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
|
|
|
|
|
|
|
i += 1
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|