Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
909b85b2b7
|
||
|
|
2f9663a8f8
|
||
|
|
637ead1a38
|
||
|
|
412018fc64
|
||
|
|
95663621bc
|
||
|
|
0726f7a730
|
||
|
|
2c8ad85307
|
||
|
|
5df6656981
|
||
|
|
4af735e5d3
|
||
|
|
6b6b26f1ae
|
||
|
|
44bdda80bf
|
||
|
|
f039d6c9ad
|
||
|
|
58787b3884
|
||
|
|
b192dd6e95
|
||
|
|
ca1441d50d
|
||
|
|
ca82ea247f
|
||
|
|
384c79bed0
|
||
|
|
36acf3f09a
|
19
CHANGELOG
19
CHANGELOG
@@ -1,3 +1,22 @@
|
||||
* 0.4.8
|
||||
- Add unlock action description to info loglevel for unlock tool
|
||||
* 0.4.7
|
||||
- Upgrade shep to avoid sync in persist set
|
||||
* 0.4.6
|
||||
- Upgrade shep to handle filesystem list exception
|
||||
* 0.4.5
|
||||
- Upgrade chainlib
|
||||
- Upgrade shep to guarantee atomic state locks
|
||||
* 0.4.4
|
||||
- Reinstate tx index bump in sync state on filter execution complete
|
||||
* 0.4.3
|
||||
- Refactor lock code, move into module from runnable script
|
||||
* 0.4.2
|
||||
- Same as 0.4.1
|
||||
* 0.4.1
|
||||
- Correct runnable entry in setup
|
||||
* 0.4.0
|
||||
- Refactor on shep
|
||||
* 0.3.7
|
||||
- Remove hard eth dependency in settings rendering
|
||||
- Add unlock cli tool
|
||||
|
||||
@@ -141,3 +141,4 @@ class SyncDriver:
|
||||
|
||||
def get(self, conn):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@@ -31,16 +31,25 @@ class LockError(Exception):
|
||||
class FilterDone(Exception):
|
||||
"""Exception raised when all registered filters have been executed
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class InterruptError(FilterDone):
|
||||
"""Exception for interrupting or attempting to use an interrupted sync
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class IncompleteFilterError(Exception):
|
||||
"""Exception raised if filter reset is executed prematurely
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class FilterInitializationError(BackendError):
|
||||
"""Exception raised if filter state does not match the registered filters
|
||||
"""
|
||||
pass
|
||||
|
||||
#class AbortTx(Exception):
|
||||
# """
|
||||
|
||||
@@ -43,13 +43,17 @@ class FilterState:
|
||||
self.state_store.add('RESET')
|
||||
|
||||
self.state = self.state_store.state
|
||||
self.elements = self.state_store.elements
|
||||
self.put = self.state_store.put
|
||||
self.mask = self.state_store.mask
|
||||
self.name = self.state_store.name
|
||||
self.set = self.state_store.set
|
||||
self.next = self.state_store.next
|
||||
self.move = self.state_store.move
|
||||
self.unset = self.state_store.unset
|
||||
self.peek = self.state_store.peek
|
||||
self.from_name = self.state_store.from_name
|
||||
self.list = self.state_store.list
|
||||
self.state_store.sync()
|
||||
self.all = self.state_store.all
|
||||
self.started = False
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
import importlib
|
||||
|
||||
# external imports
|
||||
import chainlib.cli
|
||||
|
||||
# local imports
|
||||
import chainsyncer.cli
|
||||
from chainsyncer.settings import ChainsyncerSettings
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
arg_flags = chainlib.cli.argflag_std_base | chainlib.cli.Flag.CHAIN_SPEC
|
||||
argparser = chainlib.cli.ArgumentParser(arg_flags)
|
||||
argparser.add_argument('--state-dir', type=str, dest='state_dir', help='State directory')
|
||||
argparser.add_argument('--session-id', type=str, dest='session_id', help='Session id for state')
|
||||
argparser.add_argument('--action', type=str, choices=['repeat', 'continue'], help='Action to take on lock. Repeat means re-run the locked filter. Continue means resume execution for next filter.')
|
||||
argparser.add_positional('tx', type=str, help='Transaction hash to unlock')
|
||||
|
||||
sync_flags = chainsyncer.cli.SyncFlag.RANGE | chainsyncer.cli.SyncFlag.HEAD
|
||||
chainsyncer.cli.process_flags(argparser, sync_flags)
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
base_config_dir = chainsyncer.cli.config_dir,
|
||||
config = chainlib.cli.Config.from_args(args, arg_flags, base_config_dir=base_config_dir)
|
||||
config = chainsyncer.cli.process_config(config, args, sync_flags)
|
||||
config.add(args.state_dir, '_STATE_DIR', False)
|
||||
config.add(args.session_id, '_SESSION_ID', False)
|
||||
logg.debug('config loaded:\n{}'.format(config))
|
||||
|
||||
settings = ChainsyncerSettings()
|
||||
settings.process_sync_backend(config)
|
||||
|
||||
|
||||
def main():
|
||||
state_dir = None
|
||||
if settings.get('SYNCER_BACKEND') == 'mem':
|
||||
raise ValueError('cannot unlock volatile state store')
|
||||
|
||||
if settings.get('SYNCER_BACKEND') == 'fs':
|
||||
syncer_store_module = importlib.import_module('chainsyncer.store.fs')
|
||||
syncer_store_class = getattr(syncer_store_module, 'SyncFsStore')
|
||||
elif settings.get('SYNCER_BACKEND') == 'rocksdb':
|
||||
syncer_store_module = importlib.import_module('chainsyncer.store.rocksdb')
|
||||
syncer_store_class = getattr(syncer_store_module, 'SyncRocksDbStore')
|
||||
else:
|
||||
syncer_store_module = importlib.import_module(settings.get('SYNCER_BACKEND'))
|
||||
syncer_store_class = getattr(syncer_store_module, 'SyncStore')
|
||||
|
||||
state_dir = os.path.join(config.get('_STATE_DIR'), settings.get('SYNCER_BACKEND'))
|
||||
sync_path = os.path.join(config.get('_SESSION_ID'), 'sync', 'filter')
|
||||
sync_store = syncer_store_class(state_dir, session_id=sync_path)
|
||||
|
||||
logg.info('session is {}'.format(sync_store.session_id))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
113
chainsyncer/runnable/unlock.py
Normal file
113
chainsyncer/runnable/unlock.py
Normal file
@@ -0,0 +1,113 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
import importlib
|
||||
|
||||
# external imports
|
||||
import chainlib.cli
|
||||
from shep.persist import PersistedState
|
||||
|
||||
# local imports
|
||||
import chainsyncer.cli
|
||||
from chainsyncer.settings import ChainsyncerSettings
|
||||
from chainsyncer.store import SyncStore
|
||||
from chainsyncer.filter import (
|
||||
FilterState,
|
||||
SyncFilter,
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
valid_fwd = [
|
||||
'fwd',
|
||||
'forward',
|
||||
'next',
|
||||
'continue',
|
||||
]
|
||||
|
||||
valid_rwd = [
|
||||
'rwd',
|
||||
'rewind',
|
||||
'current',
|
||||
'back',
|
||||
'repeat',
|
||||
'replay',
|
||||
]
|
||||
|
||||
action_is_forward = False
|
||||
|
||||
arg_flags = chainlib.cli.argflag_std_base | chainlib.cli.Flag.CHAIN_SPEC
|
||||
argparser = chainlib.cli.ArgumentParser(arg_flags)
|
||||
argparser.add_argument('--state-dir', type=str, dest='state_dir', help='State directory')
|
||||
argparser.add_positional('action', type=str, help='Action to take on lock. Repeat means re-run the locked filter. Continue means resume execution for next filter.')
|
||||
|
||||
sync_flags = chainsyncer.cli.SyncFlag.RANGE | chainsyncer.cli.SyncFlag.HEAD
|
||||
chainsyncer.cli.process_flags(argparser, sync_flags)
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.action in valid_fwd:
|
||||
action_is_forward = True
|
||||
elif args.action not in valid_rwd:
|
||||
sys.stderr.write('action argument must be one of {} or {}\n'.format(valid_rwd, valid_fwd))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
base_config_dir = chainsyncer.cli.config_dir,
|
||||
config = chainlib.cli.Config.from_args(args, arg_flags, base_config_dir=base_config_dir)
|
||||
config = chainsyncer.cli.process_config(config, args, sync_flags)
|
||||
config.add(args.state_dir, '_STATE_DIR', False)
|
||||
logg.debug('config loaded:\n{}'.format(config))
|
||||
|
||||
settings = ChainsyncerSettings()
|
||||
settings.process_sync_backend(config)
|
||||
logg.debug('settings:\n{}'.format(str(settings)))
|
||||
|
||||
|
||||
class FilterInNameOnly(SyncFilter):
|
||||
|
||||
def __init__(self, k):
|
||||
self.k = k
|
||||
|
||||
|
||||
def common_name(self):
|
||||
return self.k
|
||||
|
||||
|
||||
def main():
|
||||
if settings.get('SYNCER_BACKEND') == 'mem':
|
||||
raise ValueError('cannot unlock volatile state store')
|
||||
|
||||
state_dir = config.get('_STATE_DIR')
|
||||
|
||||
if config.get('SYNCER_BACKEND') == 'fs':
|
||||
syncer_store_module = importlib.import_module('chainsyncer.store.fs')
|
||||
syncer_store_class = getattr(syncer_store_module, 'SyncFsStore')
|
||||
elif config.get('SYNCER_BACKEND') == 'rocksdb':
|
||||
syncer_store_module = importlib.import_module('chainsyncer.store.rocksdb')
|
||||
syncer_store_class = getattr(syncer_store_module, 'SyncRocksDbStore')
|
||||
else:
|
||||
syncer_store_module = importlib.import_module(config.get('SYNCER_BACKEND'))
|
||||
syncer_store_class = getattr(syncer_store_module, 'SyncStore')
|
||||
|
||||
logg.info('using engine {} module {}.{}'.format(config.get('SYNCER_BACKEND'), syncer_store_module.__file__, syncer_store_class.__name__))
|
||||
|
||||
store = syncer_store_class(state_dir)
|
||||
|
||||
filter_list = store.load_filter_list()
|
||||
for i, k in enumerate(filter_list):
|
||||
fltr = FilterInNameOnly(k)
|
||||
store.register(fltr)
|
||||
filter_list[i] = k.upper()
|
||||
|
||||
store.connect()
|
||||
store.start(ignore_lock=True)
|
||||
store.unlock_filter(not action_is_forward)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -13,12 +13,15 @@ class SyncSession:
|
||||
def __init__(self, session_store):
|
||||
self.session_store = session_store
|
||||
self.started = self.session_store.started
|
||||
self.get = self.session_store.get
|
||||
self.next = self.session_store.next_item
|
||||
self.item = None
|
||||
self.filters = self.session_store.filters
|
||||
|
||||
|
||||
|
||||
def get(self, k):
|
||||
return self.session_store.get(str(k))
|
||||
|
||||
|
||||
def start(self, offset=0, target=-1):
|
||||
self.session_store.start(offset=offset, target=target)
|
||||
self.item = self.session_store.next_item()
|
||||
|
||||
@@ -6,16 +6,12 @@ from hexathon import (
|
||||
to_int as hex_to_int,
|
||||
strip_0x,
|
||||
)
|
||||
from chainlib.settings import ChainSettings
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ChainsyncerSettings:
|
||||
|
||||
def __init__(self):
|
||||
self.o = {}
|
||||
self.get = self.o.get
|
||||
|
||||
class ChainsyncerSettings(ChainSettings):
|
||||
|
||||
def process_sync_backend(self, config):
|
||||
self.o['SYNCER_BACKEND'] = config.get('SYNCER_BACKEND')
|
||||
|
||||
@@ -13,6 +13,7 @@ from chainsyncer.error import (
|
||||
InterruptError,
|
||||
IncompleteFilterError,
|
||||
SyncDone,
|
||||
FilterInitializationError,
|
||||
)
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
@@ -35,19 +36,19 @@ def sync_state_deserialize(b):
|
||||
# NOT thread safe
|
||||
class SyncItem:
|
||||
|
||||
def __init__(self, offset, target, sync_state, filter_state, started=False, ignore_invalid=False):
|
||||
def __init__(self, offset, target, sync_state, filter_state, started=False, ignore_lock=False):
|
||||
self.offset = offset
|
||||
self.target = target
|
||||
self.sync_state = sync_state
|
||||
self.filter_state = filter_state
|
||||
self.state_key = str(offset)
|
||||
|
||||
logg.debug('get key {}'.format(self.state_key))
|
||||
v = self.sync_state.get(self.state_key)
|
||||
|
||||
(self.cursor, self.tx_cursor, self.target) = sync_state_deserialize(v)
|
||||
|
||||
if self.filter_state.state(self.state_key) & self.filter_state.from_name('LOCK') and not ignore_invalid:
|
||||
filter_state = self.filter_state.state(self.state_key)
|
||||
if filter_state & self.filter_state.from_name('LOCK') > 0 and not ignore_lock:
|
||||
raise LockError(self.state_key)
|
||||
|
||||
self.count = len(self.filter_state.all(pure=True)) - 4
|
||||
@@ -56,7 +57,7 @@ class SyncItem:
|
||||
self.skip_filter = True
|
||||
elif not started:
|
||||
self.filter_state.move(self.state_key, self.filter_state.from_name('RESET'))
|
||||
|
||||
|
||||
|
||||
def __check_done(self):
|
||||
if self.filter_state.state(self.state_key) & self.filter_state.from_name('INTERRUPT') > 0:
|
||||
@@ -65,11 +66,12 @@ class SyncItem:
|
||||
raise FilterDone(self.state_key)
|
||||
|
||||
|
||||
def reset(self):
|
||||
if self.filter_state.state(self.state_key) & self.filter_state.from_name('LOCK') > 0:
|
||||
raise LockError('reset attempt on {} when state locked'.format(self.state_key))
|
||||
if self.filter_state.state(self.state_key) & self.filter_state.from_name('DONE') == 0:
|
||||
raise IncompleteFilterError('reset attempt on {} when incomplete'.format(self.state_key))
|
||||
def reset(self, check_incomplete=True):
|
||||
if check_incomplete:
|
||||
if self.filter_state.state(self.state_key) & self.filter_state.from_name('LOCK') > 0:
|
||||
raise LockError('reset attempt on {} when state locked'.format(self.state_key))
|
||||
if self.filter_state.state(self.state_key) & self.filter_state.from_name('DONE') == 0:
|
||||
raise IncompleteFilterError('reset attempt on {} when incomplete'.format(self.state_key))
|
||||
self.filter_state.move(self.state_key, self.filter_state.from_name('RESET'))
|
||||
|
||||
|
||||
@@ -98,17 +100,16 @@ class SyncItem:
|
||||
self.sync_state.replace(self.state_key, b)
|
||||
|
||||
|
||||
def __find_advance(self):
|
||||
v = self.filter_state.state(self.state_key)
|
||||
|
||||
|
||||
def advance(self):
|
||||
def advance(self, ignore_lock=False):
|
||||
if self.skip_filter:
|
||||
raise FilterDone()
|
||||
self.__check_done()
|
||||
|
||||
if self.filter_state.state(self.state_key) & self.filter_state.from_name('LOCK') > 0:
|
||||
raise LockError('advance attempt on {} when state locked'.format(self.state_key))
|
||||
if ignore_lock:
|
||||
self.filter_state.unset(self.state_key, self.filter_state.from_name('LOCK'))
|
||||
else:
|
||||
raise LockError('advance attempt on {} when state locked'.format(self.state_key))
|
||||
done = False
|
||||
try:
|
||||
self.filter_state.next(self.state_key)
|
||||
@@ -148,7 +149,7 @@ class SyncItem:
|
||||
class SyncStore:
|
||||
|
||||
def __init__(self, path, session_id=None):
|
||||
self.session_id = None
|
||||
self.session_id = session_id
|
||||
self.session_path = None
|
||||
self.is_default = False
|
||||
self.first = False
|
||||
@@ -157,16 +158,7 @@ class SyncStore:
|
||||
self.item_keys = []
|
||||
self.started = False
|
||||
self.thresholds = []
|
||||
self.default_path = os.path.join(path, 'default')
|
||||
|
||||
if session_id == None:
|
||||
self.session_path = os.path.realpath(self.default_path)
|
||||
self.is_default = True
|
||||
else:
|
||||
if session_id == 'default':
|
||||
self.is_default = True
|
||||
given_path = os.path.join(path, session_id)
|
||||
self.session_path = os.path.realpath(given_path)
|
||||
self.session_path = path
|
||||
|
||||
|
||||
def setup_sync_state(self, factory=None, event_callback=None):
|
||||
@@ -201,20 +193,23 @@ class SyncStore:
|
||||
self.filter_state.register(fltr)
|
||||
|
||||
|
||||
def start(self, offset=0, target=-1):
|
||||
def start(self, offset=0, target=-1, ignore_lock=False):
|
||||
if self.started:
|
||||
return
|
||||
|
||||
self.load(target)
|
||||
self.save_filter_list()
|
||||
|
||||
self.load(target, ignore_lock=ignore_lock)
|
||||
|
||||
if self.first:
|
||||
state_bytes = sync_state_serialize(offset, 0, target)
|
||||
block_number_str = str(offset)
|
||||
self.state.put(block_number_str, contents=state_bytes)
|
||||
self.filter_state.put(block_number_str)
|
||||
o = SyncItem(offset, target, self.state, self.filter_state)
|
||||
self.items[offset] = o
|
||||
self.item_keys.append(offset)
|
||||
o = SyncItem(offset, target, self.state, self.filter_state, ignore_lock=ignore_lock)
|
||||
k = str(offset)
|
||||
self.items[k] = o
|
||||
self.item_keys.append(k)
|
||||
elif offset > 0:
|
||||
logg.warning('block number argument {} for start ignored for already initiated sync {}'.format(offset, self.session_id))
|
||||
self.started = True
|
||||
@@ -237,7 +232,7 @@ class SyncStore:
|
||||
self.state.put(str(item.cursor), contents=state_bytes)
|
||||
|
||||
|
||||
def load(self, target):
|
||||
def load(self, target, ignore_lock=False):
|
||||
self.state.sync(self.state.NEW)
|
||||
self.state.sync(self.state.SYNC)
|
||||
|
||||
@@ -261,12 +256,15 @@ class SyncStore:
|
||||
item_target = target
|
||||
if i < lim:
|
||||
item_target = thresholds[i+1]
|
||||
o = SyncItem(block_number, item_target, self.state, self.filter_state, started=True)
|
||||
self.items[block_number] = o
|
||||
self.item_keys.append(block_number)
|
||||
o = SyncItem(block_number, item_target, self.state, self.filter_state, started=True, ignore_lock=ignore_lock)
|
||||
k = str(block_number)
|
||||
self.items[k] = o
|
||||
self.item_keys.append(k)
|
||||
logg.info('added existing {}'.format(o))
|
||||
|
||||
self.get_target()
|
||||
v = self.get_target()
|
||||
if v != None:
|
||||
target = v
|
||||
|
||||
if len(thresholds) == 0:
|
||||
if self.target != None:
|
||||
@@ -295,3 +293,83 @@ class SyncStore:
|
||||
|
||||
def disconnect(self):
|
||||
self.filter_state.disconnect()
|
||||
|
||||
|
||||
def save_filter_list(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def load_filter_list(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def __get_locked_item(self):
|
||||
locked_item = self.filter_state.list(self.filter_state.state_store.LOCK)
|
||||
|
||||
if len(locked_item) == 0:
|
||||
logg.error('Sync filter in store {} is not locked\n'.format(self))
|
||||
return None
|
||||
elif len(locked_item) > 1:
|
||||
raise FilterInitializationError('More than one locked filter item encountered in store {}. That should never happen, so I do not know what to do next.\n'.format(self))
|
||||
return locked_item[0]
|
||||
|
||||
|
||||
def __get_filter_index(self, k):
|
||||
i = -1
|
||||
fltrs = self.load_filter_list()
|
||||
for fltr in fltrs:
|
||||
i += 1
|
||||
if k == fltr.upper():
|
||||
logg.debug('lock filter match at filter list index {}'.format(i))
|
||||
return (i, fltrs,)
|
||||
|
||||
|
||||
def unlock_filter(self, revert=False):
|
||||
locked_item_key = self.__get_locked_item()
|
||||
if locked_item_key == None:
|
||||
return False
|
||||
locked_item = self.get(locked_item_key)
|
||||
state = self.filter_state.state(locked_item_key)
|
||||
locked_state = state - self.filter_state.state_store.LOCK
|
||||
locked_state_name = self.filter_state.name(locked_state)
|
||||
|
||||
logg.debug('found locked item {} in state {}'.format(locked_item, locked_state))
|
||||
|
||||
(i, fltrs) = self.__get_filter_index(locked_state_name)
|
||||
|
||||
if i == -1:
|
||||
raise FilterInitializationError('locked state {} ({}) found for item {}, but matching filter has not been registered'.format(locked_state_name, locked_state, locked_item))
|
||||
|
||||
direction = None
|
||||
if revert:
|
||||
self.__unlock_previous(locked_item, fltrs, i)
|
||||
new_state = self.filter_state.state(locked_item_key)
|
||||
direction = 'previous'
|
||||
else:
|
||||
self.__unlock_next(locked_item, fltrs, i)
|
||||
new_state = self.filter_state.state(locked_item_key)
|
||||
direction = 'next'
|
||||
|
||||
logg.info('chainstate unlock to {} {} ({}) -> {} ({})'.format(direction, self.filter_state.name(state), state, self.filter_state.name(new_state), new_state))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def __unlock_next(self, item, lst, index):
|
||||
if index == len(lst) - 1:
|
||||
item.reset(check_incomplete=False)
|
||||
else:
|
||||
item.release()
|
||||
|
||||
|
||||
def __unlock_previous(self, item, lst, index):
|
||||
if index == 0:
|
||||
item.reset(check_incomplete=False)
|
||||
else:
|
||||
new_state_str = lst[index - 1]
|
||||
new_state = self.filter_state.state_store.from_name(new_state_str)
|
||||
self.filter_state.state_store.move(item.state_key, new_state)
|
||||
|
||||
|
||||
def peek_current_filter(self):
|
||||
pass
|
||||
|
||||
@@ -24,7 +24,8 @@ class SyncFsStore(SyncStore):
|
||||
create_path = True
|
||||
|
||||
if create_path:
|
||||
self.__create_path(base_path, self.default_path, session_id=session_id)
|
||||
#self.__create_path(base_path, self.default_path, session_id=session_id)
|
||||
os.makedirs(self.session_path)
|
||||
|
||||
self.session_id = os.path.basename(self.session_path)
|
||||
logg.info('session id {} resolved {} path {}'.format(session_id, self.session_id, self.session_path))
|
||||
@@ -74,3 +75,25 @@ class SyncFsStore(SyncStore):
|
||||
f.write(str(v))
|
||||
f.close()
|
||||
self.target = v
|
||||
|
||||
|
||||
def load_filter_list(self):
|
||||
fltr = []
|
||||
fp = os.path.join(self.session_path, 'filter_list')
|
||||
f = open(fp, 'r')
|
||||
while True:
|
||||
v = f.readline()
|
||||
if len(v) == 0:
|
||||
break
|
||||
v = v.rstrip()
|
||||
fltr.append(v)
|
||||
f.close()
|
||||
return fltr
|
||||
|
||||
|
||||
def save_filter_list(self):
|
||||
fp = os.path.join(self.session_path, 'filter_list')
|
||||
f = open(fp, 'w')
|
||||
for fltr in self.filters:
|
||||
f.write(fltr.common_name() + '\n')
|
||||
f.close()
|
||||
|
||||
@@ -14,10 +14,7 @@ logg = logging.getLogger(__name__)
|
||||
class SyncMemStore(SyncStore):
|
||||
|
||||
def __init__(self, session_id=None, state_event_callback=None, filter_state_event_callback=None):
|
||||
super(SyncMemStore, self).__init__('/dev/null', session_id=session_id)
|
||||
|
||||
self.session_id = os.path.basename(self.session_path)
|
||||
logg.info('session id {} resolved {} path {}'.format(session_id, self.session_id, self.session_path))
|
||||
super(SyncMemStore, self).__init__(None, session_id=session_id)
|
||||
|
||||
factory = None
|
||||
self.setup_sync_state(factory, state_event_callback)
|
||||
@@ -36,5 +33,13 @@ class SyncMemStore(SyncStore):
|
||||
|
||||
def stop(self, item):
|
||||
if item != None:
|
||||
super(SyncRocksDbStore, self).stop(item)
|
||||
super(SyncMemStore, self).stop(item)
|
||||
logg.info('I am an in-memory only state store. I am shutting down now, so all state will now be discarded.')
|
||||
|
||||
|
||||
def save_filter_list(self):
|
||||
pass
|
||||
|
||||
|
||||
def load_filter_list(self):
|
||||
return []
|
||||
|
||||
@@ -43,8 +43,8 @@ class SyncRocksDbStore(SyncStore):
|
||||
prefix_factory = RocksDbStoreAdder(self.factory, 'filter')
|
||||
self.setup_filter_state(prefix_factory, filter_state_event_callback)
|
||||
|
||||
self.session_id = os.path.basename(self.session_path)
|
||||
logg.info('session id {} resolved {} path {}'.format(session_id, self.session_id, self.session_path))
|
||||
#self.session_id = os.path.basename(self.session_path)
|
||||
#logg.info('session id {} resolved {} path {}'.format(session_id, self.session_id, self.session_path))
|
||||
|
||||
self.target_db = RocksDbStoreAdder(self.factory, '.stat').add('target')
|
||||
|
||||
@@ -64,3 +64,16 @@ class SyncRocksDbStore(SyncStore):
|
||||
if item != None:
|
||||
super(SyncRocksDbStore, self).stop(item)
|
||||
self.factory.close()
|
||||
|
||||
|
||||
def save_filter_list(self):
|
||||
fltr = []
|
||||
for v in self.filters:
|
||||
fltr.append(v.common_name())
|
||||
self.target_db.put('filter_list', ','.join(fltr))
|
||||
|
||||
|
||||
def load_filter_list(self):
|
||||
v = self.target_db.get('filter_list')
|
||||
v = v.decode('utf-8')
|
||||
return v.split(',')
|
||||
|
||||
@@ -13,7 +13,8 @@ from chainsyncer.error import NoBlockForYou
|
||||
from chainsyncer.driver import SyncDriver
|
||||
|
||||
logging.STATETRACE = 5
|
||||
logg = logging.getLogger().getChild(__name__)
|
||||
logging.addLevelName('STATETRACE', logging.STATETRACE)
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def state_event_handler(k, v_old, v_new):
|
||||
|
||||
@@ -5,6 +5,7 @@ import unittest
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
# local imports
|
||||
from chainsyncer.session import SyncSession
|
||||
@@ -35,7 +36,10 @@ def filter_change_callback(k, old_state, new_state):
|
||||
class TestStoreBase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.path = tempfile.mkdtemp()
|
||||
self.base_path = tempfile.mkdtemp()
|
||||
self.session_id = str(uuid.uuid4())
|
||||
self.path = os.path.join(self.base_path, self.session_id)
|
||||
os.makedirs(self.path)
|
||||
self.store_factory = None
|
||||
self.persist = True
|
||||
|
||||
@@ -46,6 +50,7 @@ class TestStoreBase(unittest.TestCase):
|
||||
"default",
|
||||
"store_start",
|
||||
"store_resume",
|
||||
"filter_list",
|
||||
"sync_process_nofilter",
|
||||
"sync_process_onefilter",
|
||||
"sync_process_outoforder",
|
||||
@@ -66,60 +71,25 @@ class TestStoreBase(unittest.TestCase):
|
||||
def t_default(self):
|
||||
bogus_item = MockItem(0, 0, 0, 0)
|
||||
store = self.store_factory()
|
||||
|
||||
fp = os.path.join(self.path, store.session_id)
|
||||
|
||||
if store.session_path == None:
|
||||
return
|
||||
|
||||
#fp = os.path.join(self.path, store.session_id)
|
||||
fp = self.path
|
||||
session_id = store.session_id
|
||||
st = None
|
||||
try:
|
||||
st = os.stat(fp)
|
||||
except FileNotFoundError as e:
|
||||
logg.warning('error {} persist {}'.format(e, self.persist))
|
||||
if self.persist:
|
||||
raise e
|
||||
st = os.stat(fp)
|
||||
|
||||
if st != None:
|
||||
self.assertTrue(stat.S_ISDIR(st.st_mode))
|
||||
self.assertTrue(store.is_default)
|
||||
|
||||
fpd = os.path.join(self.path, 'default')
|
||||
try:
|
||||
st = os.stat(fpd)
|
||||
except FileNotFoundError as e:
|
||||
logg.warning('error {} persist {}'.format(e, self.persist))
|
||||
if self.persist:
|
||||
raise e
|
||||
if st != None:
|
||||
self.assertTrue(stat.S_ISDIR(st.st_mode))
|
||||
self.assertTrue(store.is_default)
|
||||
|
||||
fpd = os.path.realpath(fpd)
|
||||
self.assertEqual(fpd, fp)
|
||||
#self.assertTrue(store.is_default)
|
||||
|
||||
store.stop(bogus_item)
|
||||
store = self.store_factory()
|
||||
fpr = os.path.join(self.path, session_id)
|
||||
self.assertEqual(fp, fpr)
|
||||
self.assertTrue(store.is_default)
|
||||
|
||||
store.stop(bogus_item)
|
||||
store = self.store_factory('default')
|
||||
fpr = os.path.join(self.path, session_id)
|
||||
self.assertEqual(fp, fpr)
|
||||
self.assertTrue(store.is_default)
|
||||
|
||||
store.stop(bogus_item)
|
||||
store = self.store_factory('foo')
|
||||
fpf = os.path.join(self.path, 'foo')
|
||||
try:
|
||||
st = os.stat(fpf)
|
||||
except FileNotFoundError as e:
|
||||
logg.warning('error {} persist {}'.format(e, self.persist))
|
||||
if self.persist:
|
||||
raise e
|
||||
if st != None:
|
||||
self.assertTrue(stat.S_ISDIR(st.st_mode))
|
||||
self.assertFalse(store.is_default)
|
||||
|
||||
fpr = os.path.join(self.path, self.session_id)
|
||||
self.assertEqual(fp, self.path)
|
||||
|
||||
|
||||
def t_store_start(self):
|
||||
bogus_item = MockItem(0, 0, 0, 0)
|
||||
@@ -262,7 +232,7 @@ class TestStoreBase(unittest.TestCase):
|
||||
if self.persist:
|
||||
store = self.store_factory('foo')
|
||||
store.start()
|
||||
o = store.get(2)
|
||||
o = store.get('2')
|
||||
|
||||
|
||||
def t_sync_history_interrupted(self):
|
||||
@@ -282,7 +252,7 @@ class TestStoreBase(unittest.TestCase):
|
||||
store.stop(bogus_item)
|
||||
store = self.store_factory('foo')
|
||||
store.start()
|
||||
o = store.get(0)
|
||||
o = store.get('0')
|
||||
self.assertEqual(o.cursor, 2)
|
||||
self.assertEqual(o.target, 13)
|
||||
o.next(advance_block=True)
|
||||
@@ -306,3 +276,26 @@ class TestStoreBase(unittest.TestCase):
|
||||
o.next(advance_block=True)
|
||||
with self.assertRaises(SyncDone):
|
||||
o.next(advance_block=True)
|
||||
|
||||
|
||||
def t_filter_list(self):
|
||||
bogus_item = MockItem(0, 0, 0, 0)
|
||||
store = self.store_factory()
|
||||
|
||||
if store.session_path == None:
|
||||
return
|
||||
|
||||
fltr_one = MockFilter('foo_bar')
|
||||
store.register(fltr_one)
|
||||
|
||||
fltr_two = MockFilter('bar_baz')
|
||||
store.register(fltr_two)
|
||||
|
||||
store.start()
|
||||
store.stop(bogus_item)
|
||||
|
||||
store = self.store_factory()
|
||||
r = store.load_filter_list()
|
||||
|
||||
self.assertEqual(r[0], 'foo_bar')
|
||||
self.assertEqual(r[1], 'bar_baz')
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
confini~=0.6.0
|
||||
semver==2.13.0
|
||||
hexathon~=0.1.5
|
||||
chainlib>=0.1.0b1,<0.2.0
|
||||
shep~=0.2.3
|
||||
hexathon~=0.1.6
|
||||
chainlib~=0.2.0
|
||||
shep~=0.2.9
|
||||
|
||||
11
setup.cfg
11
setup.cfg
@@ -1,6 +1,6 @@
|
||||
[metadata]
|
||||
name = chainsyncer
|
||||
version = 0.3.7
|
||||
version = 0.4.9
|
||||
description = Generic blockchain syncer driver
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
@@ -22,18 +22,19 @@ licence_files =
|
||||
|
||||
[options]
|
||||
include_package_data = True
|
||||
python_requires = >= 3.6
|
||||
python_requires = >= 3.7
|
||||
packages =
|
||||
chainsyncer
|
||||
chainsyncer.driver
|
||||
chainsyncer.unittest
|
||||
chainsyncer.store
|
||||
chainsyncer.cli
|
||||
chainsyncer.runnable
|
||||
|
||||
#[options.package_data]
|
||||
#* =
|
||||
# sql/*
|
||||
|
||||
#[options.entry_points]
|
||||
#console_scripts =
|
||||
# blocksync-celery = chainsyncer.runnable.tracker:main
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
chainsyncer-unlock = chainsyncer.runnable.unlock:main
|
||||
|
||||
@@ -19,17 +19,22 @@ from chainsyncer.unittest import (
|
||||
MockConn,
|
||||
MockTx,
|
||||
MockBlock,
|
||||
MockFilterError,
|
||||
state_event_handler,
|
||||
filter_state_event_handler,
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
|
||||
logging.basicConfig(level=logging.STATETRACE)
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
|
||||
class TestFilter(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.path = tempfile.mkdtemp()
|
||||
self.store = SyncFsStore(self.path)
|
||||
self.store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
self.session = SyncSession(self.store)
|
||||
self.conn = MockConn()
|
||||
|
||||
@@ -73,6 +78,279 @@ class TestFilter(unittest.TestCase):
|
||||
self.assertEqual(len(fltr_two.contents), 0)
|
||||
|
||||
|
||||
def test_filter_resume_single_revert(self):
|
||||
fltr_one = MockFilter('foo', brk_hard=True)
|
||||
self.store.register(fltr_one)
|
||||
|
||||
self.session.start()
|
||||
|
||||
item = self.store.get('0')
|
||||
item.next()
|
||||
|
||||
tx_hash = os.urandom(32).hex()
|
||||
tx = MockTx(42, tx_hash)
|
||||
block = MockBlock(13, [tx_hash])
|
||||
|
||||
with self.assertRaises(MockFilterError):
|
||||
self.session.filter(self.conn, block, tx)
|
||||
|
||||
# Unlock the state, reverting to previous filter
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
self.conn = MockConn()
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
store.connect()
|
||||
store.start(ignore_lock=True)
|
||||
store.unlock_filter(revert=True)
|
||||
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
session = SyncSession(store)
|
||||
self.conn = MockConn()
|
||||
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
|
||||
session.start()
|
||||
|
||||
session.filter(self.conn, block, tx)
|
||||
|
||||
|
||||
|
||||
def test_filter_resume_single_continue(self):
|
||||
fltr_one = MockFilter('foo', brk_hard=True)
|
||||
self.store.register(fltr_one)
|
||||
|
||||
self.session.start()
|
||||
|
||||
item = self.store.get('0')
|
||||
item.next()
|
||||
|
||||
tx_hash = os.urandom(32).hex()
|
||||
tx = MockTx(42, tx_hash)
|
||||
block = MockBlock(13, [tx_hash])
|
||||
|
||||
with self.assertRaises(MockFilterError):
|
||||
self.session.filter(self.conn, block, tx)
|
||||
|
||||
# Unlock the state, reverting to previous filter
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
self.conn = MockConn()
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
store.connect()
|
||||
store.start(ignore_lock=True)
|
||||
store.unlock_filter(revert=False)
|
||||
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
session = SyncSession(store)
|
||||
self.conn = MockConn()
|
||||
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
store.connect()
|
||||
|
||||
session.start()
|
||||
|
||||
session.filter(self.conn, block, tx)
|
||||
|
||||
|
||||
|
||||
def test_filter_resume_multi_revert_last(self):
|
||||
fltr_one = MockFilter('foo')
|
||||
self.store.register(fltr_one)
|
||||
|
||||
fltr_two = MockFilter('bar', brk_hard=True)
|
||||
self.store.register(fltr_two)
|
||||
|
||||
self.session.start()
|
||||
|
||||
item = self.store.get('0')
|
||||
item.next()
|
||||
|
||||
tx_hash = os.urandom(32).hex()
|
||||
tx = MockTx(42, tx_hash)
|
||||
block = MockBlock(13, [tx_hash])
|
||||
|
||||
with self.assertRaises(MockFilterError):
|
||||
self.session.filter(self.conn, block, tx)
|
||||
|
||||
# Unlock the state, reverting to previous filter
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
self.conn = MockConn()
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_bar = MockFilter('bar')
|
||||
store.register(fltr_bar)
|
||||
store.connect()
|
||||
store.start(ignore_lock=True)
|
||||
store.unlock_filter(revert=True)
|
||||
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
session = SyncSession(store)
|
||||
self.conn = MockConn()
|
||||
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_two = MockFilter('bar')
|
||||
store.register(fltr_two)
|
||||
|
||||
store.connect()
|
||||
|
||||
session.start()
|
||||
|
||||
session.filter(self.conn, block, tx)
|
||||
|
||||
|
||||
def test_filter_resume_multi_continue_last(self):
|
||||
fltr_one = MockFilter('foo')
|
||||
self.store.register(fltr_one)
|
||||
|
||||
fltr_two = MockFilter('bar', brk_hard=True)
|
||||
self.store.register(fltr_two)
|
||||
|
||||
self.session.start()
|
||||
|
||||
item = self.store.get('0')
|
||||
item.next()
|
||||
|
||||
tx_hash = os.urandom(32).hex()
|
||||
tx = MockTx(42, tx_hash)
|
||||
block = MockBlock(13, [tx_hash])
|
||||
|
||||
with self.assertRaises(MockFilterError):
|
||||
self.session.filter(self.conn, block, tx)
|
||||
|
||||
# Unlock the state, reverting to previous filter
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
self.conn = MockConn()
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_bar = MockFilter('bar')
|
||||
store.register(fltr_bar)
|
||||
store.connect()
|
||||
store.start(ignore_lock=True)
|
||||
store.unlock_filter(revert=False)
|
||||
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
session = SyncSession(store)
|
||||
self.conn = MockConn()
|
||||
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_two = MockFilter('bar')
|
||||
store.register(fltr_two)
|
||||
|
||||
session.start()
|
||||
|
||||
session.filter(self.conn, block, tx)
|
||||
|
||||
|
||||
def test_filter_resume_multi_revert_middle(self):
|
||||
fltr_one = MockFilter('foo')
|
||||
self.store.register(fltr_one)
|
||||
|
||||
fltr_two = MockFilter('bar', brk_hard=True)
|
||||
self.store.register(fltr_two)
|
||||
|
||||
fltr_three = MockFilter('baz')
|
||||
self.store.register(fltr_three)
|
||||
|
||||
self.session.start()
|
||||
|
||||
item = self.store.get('0')
|
||||
item.next()
|
||||
|
||||
tx_hash = os.urandom(32).hex()
|
||||
tx = MockTx(42, tx_hash)
|
||||
block = MockBlock(13, [tx_hash])
|
||||
|
||||
with self.assertRaises(MockFilterError):
|
||||
self.session.filter(self.conn, block, tx)
|
||||
|
||||
# Unlock the state, reverting to previous filter
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
self.conn = MockConn()
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_two = MockFilter('bar')
|
||||
store.register(fltr_two)
|
||||
fltr_three = MockFilter('baz')
|
||||
store.register(fltr_three)
|
||||
|
||||
store.connect()
|
||||
store.start(ignore_lock=True)
|
||||
store.unlock_filter(revert=True)
|
||||
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
session = SyncSession(store)
|
||||
self.conn = MockConn()
|
||||
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_two = MockFilter('bar')
|
||||
store.register(fltr_two)
|
||||
fltr_three = MockFilter('baz')
|
||||
store.register(fltr_three)
|
||||
|
||||
store.connect()
|
||||
|
||||
session.start()
|
||||
|
||||
session.filter(self.conn, block, tx)
|
||||
|
||||
|
||||
def test_filter_resume_multi_continue_middle(self):
|
||||
fltr_one = MockFilter('foo')
|
||||
self.store.register(fltr_one)
|
||||
|
||||
fltr_two = MockFilter('bar', brk_hard=True)
|
||||
self.store.register(fltr_two)
|
||||
|
||||
fltr_three = MockFilter('baz')
|
||||
self.store.register(fltr_three)
|
||||
|
||||
self.session.start()
|
||||
|
||||
item = self.store.get('0')
|
||||
item.next()
|
||||
|
||||
tx_hash = os.urandom(32).hex()
|
||||
tx = MockTx(42, tx_hash)
|
||||
block = MockBlock(13, [tx_hash])
|
||||
|
||||
with self.assertRaises(MockFilterError):
|
||||
self.session.filter(self.conn, block, tx)
|
||||
|
||||
# Unlock the state, reverting to previous filter
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
self.conn = MockConn()
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_two = MockFilter('bar')
|
||||
store.register(fltr_two)
|
||||
fltr_three = MockFilter('baz')
|
||||
store.register(fltr_three)
|
||||
|
||||
store.connect()
|
||||
store.start(ignore_lock=True)
|
||||
store.unlock_filter(revert=False)
|
||||
|
||||
store = SyncFsStore(self.path, state_event_callback=state_event_handler, filter_state_event_callback=filter_state_event_handler)
|
||||
session = SyncSession(store)
|
||||
self.conn = MockConn()
|
||||
|
||||
fltr_one = MockFilter('foo')
|
||||
store.register(fltr_one)
|
||||
fltr_two = MockFilter('bar')
|
||||
store.register(fltr_two)
|
||||
fltr_three = MockFilter('baz')
|
||||
store.register(fltr_three)
|
||||
|
||||
session.start()
|
||||
|
||||
session.filter(self.conn, block, tx)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user