Add mock filters and flag for manually adding filters

This commit is contained in:
lash 2022-01-23 18:07:58 +00:00
parent 8bfb91d7db
commit 83f26efd15
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
5 changed files with 50 additions and 8 deletions

View File

@ -6,6 +6,7 @@ import logging
from chainsyncer.backend.file import chain_dir_for
from leveldir.numeric import NumDir
from leveldir.hex import HexDir
from hexathon import strip_0x
logg = logging.getLogger(__name__)
@ -45,6 +46,14 @@ class RuledFilter:
RuledFilter.tx_dir = HexDir(RuledFilter.tx_path, 32, levels=2)
@classmethod
def block_callback(cls, block, extra=None):
src = str(block.src()).encode('utf-8')
hash_bytes = bytes.fromhex(strip_0x(block.hash))
cls.block_hash_dir.add(hash_bytes, src)
cls.block_num_dir.add(block.number, hash_bytes)
def filter(self, conn, block, tx, db_session=None):
if self.rules_filter != None:
if not self.rules_filter.apply_rules(tx):

View File

@ -13,13 +13,6 @@ logg = logging.getLogger(__name__)
class CacheFilter(RuledFilter):
def block_callback(self, block, extra=None):
src = str(block.src()).encode('utf-8')
hash_bytes = bytes.fromhex(strip_0x(block.hash))
self.block_hash_dir.add(hash_bytes, src)
self.block_num_dir.add(block.number, hash_bytes)
def ruled_filter(self, conn, block, tx, db_session=None):
src = str(tx.src()).encode('utf-8')
self.tx_dir.add(bytes.fromhex(strip_0x(tx.hash)), src)

View File

@ -0,0 +1,15 @@
# standard imports
import logging
logg = logging.getLogger(__name__)
class Filter:
def __init__(self, *args, **kwargs):
pass
def filter(self, conn, block, tx, db_session=None):
logg.debug('PLAIN MOCK for {} {}'.format(block.number, tx.index))

View File

@ -0,0 +1,17 @@
# standard imports
import logging
# local imports
from eth_monitor.filters import RuledFilter
logg = logging.getLogger(__name__)
class Filter(RuledFilter):
def __init__(self, *args, **kwargs):
super(Filter, self).__init__(rules_filter=kwargs.get('rules_filter'))
def ruled_filter(self, conn, block, tx, db_session=None):
logg.debug('RULE MOCK for {} {}'.format(block.number, tx.index))

View File

@ -48,6 +48,7 @@ argparser.add_argument('--skip-history', action='store_true', dest='skip_history
argparser.add_argument('--includes-file', type=str, dest='includes_file', help='Load include rules from file')
argparser.add_argument('--include-default', action='store_true', help='Include all transactions by default')
argparser.add_argument('--excludes-file', type=str, dest='excludes_file', help='Load exclude rules from file')
argparser.add_argument('-f', '--filter', type=str, action='append', help='Add python module filter path')
argparser.add_argument('--cache-dir', dest='cache_dir', type=str, help='Directory to store tx data')
argparser.add_argument('--single', action='store_true', help='Execute a single sync, regardless of previous states')
argparser.add_argument('-v', action='store_true', help='Be verbose')
@ -171,7 +172,7 @@ def setup_backend_resume(chain_spec, block_offset, state_dir, callback, sync_off
logg.info('resuming sync session {}'.format(syncer_backend))
for syncer_backend in syncer_backends:
syncers.append(HistorySyncer(syncer_backend, chain_interface, block_callback=cache_filter.block_callback))
syncers.append(HistorySyncer(syncer_backend, chain_interface, block_callback=RuledFilter.block_callback))
syncer_backend = FileBackend.live(chain_spec, block_offset+1, base_dir=state_dir)
syncers.append(HeadSyncer(syncer_backend, chain_interface, block_callback=cache_filter.block_callback))
@ -210,6 +211,13 @@ if __name__ == '__main__':
cache_filter,
]
if args.filter != None:
import importlib
for fltr in args.filter:
m = importlib.import_module(fltr)
fltr_object = m.Filter(rules_filter=address_rules)
filters.append(fltr_object)
syncer_setup_func = None
if config.true('_SINGLE'):
syncer_setup_func = setup_backend_single