WIP more work on lock cli tool

This commit is contained in:
lash 2022-04-27 09:59:40 +00:00
parent 3a2317d253
commit 36acf3f09a
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
1 changed files with 13 additions and 16 deletions

View File

@ -8,10 +8,12 @@ import importlib
# external imports # external imports
import chainlib.cli import chainlib.cli
from shep.persist import PersistedState
# local imports # local imports
import chainsyncer.cli import chainsyncer.cli
from chainsyncer.settings import ChainsyncerSettings from chainsyncer.settings import ChainsyncerSettings
from chainsyncer.filter import FilterState
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
@ -20,9 +22,7 @@ logg = logging.getLogger()
arg_flags = chainlib.cli.argflag_std_base | chainlib.cli.Flag.CHAIN_SPEC arg_flags = chainlib.cli.argflag_std_base | chainlib.cli.Flag.CHAIN_SPEC
argparser = chainlib.cli.ArgumentParser(arg_flags) argparser = chainlib.cli.ArgumentParser(arg_flags)
argparser.add_argument('--state-dir', type=str, dest='state_dir', help='State directory') argparser.add_argument('--state-dir', type=str, dest='state_dir', help='State directory')
argparser.add_argument('--session-id', type=str, dest='session_id', help='Session id for state') argparser.add_positional('action', type=str, help='Action to take on lock. Repeat means re-run the locked filter. Continue means resume execution for next filter.')
argparser.add_argument('--action', type=str, choices=['repeat', 'continue'], help='Action to take on lock. Repeat means re-run the locked filter. Continue means resume execution for next filter.')
argparser.add_positional('tx', type=str, help='Transaction hash to unlock')
sync_flags = chainsyncer.cli.SyncFlag.RANGE | chainsyncer.cli.SyncFlag.HEAD sync_flags = chainsyncer.cli.SyncFlag.RANGE | chainsyncer.cli.SyncFlag.HEAD
chainsyncer.cli.process_flags(argparser, sync_flags) chainsyncer.cli.process_flags(argparser, sync_flags)
@ -33,7 +33,6 @@ base_config_dir = chainsyncer.cli.config_dir,
config = chainlib.cli.Config.from_args(args, arg_flags, base_config_dir=base_config_dir) config = chainlib.cli.Config.from_args(args, arg_flags, base_config_dir=base_config_dir)
config = chainsyncer.cli.process_config(config, args, sync_flags) config = chainsyncer.cli.process_config(config, args, sync_flags)
config.add(args.state_dir, '_STATE_DIR', False) config.add(args.state_dir, '_STATE_DIR', False)
config.add(args.session_id, '_SESSION_ID', False)
logg.debug('config loaded:\n{}'.format(config)) logg.debug('config loaded:\n{}'.format(config))
settings = ChainsyncerSettings() settings = ChainsyncerSettings()
@ -41,26 +40,24 @@ settings.process_sync_backend(config)
def main(): def main():
state_dir = None
if settings.get('SYNCER_BACKEND') == 'mem': if settings.get('SYNCER_BACKEND') == 'mem':
raise ValueError('cannot unlock volatile state store') raise ValueError('cannot unlock volatile state store')
if settings.get('SYNCER_BACKEND') == 'fs': if settings.get('SYNCER_BACKEND') == 'fs':
syncer_store_module = importlib.import_module('chainsyncer.store.fs') syncer_store_module = importlib.import_module('shep.store.file')
syncer_store_class = getattr(syncer_store_module, 'SyncFsStore') syncer_store_class = getattr(syncer_store_module, 'SimpleFileStoreFactory')
elif settings.get('SYNCER_BACKEND') == 'rocksdb': elif settings.get('SYNCER_BACKEND') == 'rocksdb':
syncer_store_module = importlib.import_module('chainsyncer.store.rocksdb') syncer_store_module = importlib.import_module('shep.store.rocksdb')
syncer_store_class = getattr(syncer_store_module, 'SyncRocksDbStore') syncer_store_class = getattr(syncer_store_module, 'RocksdbStoreFactory')
else: else:
syncer_store_module = importlib.import_module(settings.get('SYNCER_BACKEND')) raise NotImplementedError('cannot use backend: {}'.format(config.get('SYNCER_BACKEND')))
syncer_store_class = getattr(syncer_store_module, 'SyncStore')
state_dir = os.path.join(config.get('_STATE_DIR'), settings.get('SYNCER_BACKEND')) state_dir = config.get('_STATE_DIR')
sync_path = os.path.join(config.get('_SESSION_ID'), 'sync', 'filter')
sync_store = syncer_store_class(state_dir, session_id=sync_path)
logg.info('session is {}'.format(sync_store.session_id))
factory = syncer_store_class(state_dir)
base_state = PersistedState(factory.add, 0, check_alias=False)
state = FilterState(base_state, scan=True)
print(state)
if __name__ == '__main__': if __name__ == '__main__':
main() main()