Implement cli processing, settings renderer
This commit is contained in:
		
							parent
							
								
									99cbf20b64
								
							
						
					
					
						commit
						7e78fd0da2
					
				
							
								
								
									
										3
									
								
								chaind/cli/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								chaind/cli/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,3 @@ | ||||
| from .base import * | ||||
| from .arg import ArgumentParser | ||||
| from .config import Config | ||||
							
								
								
									
										17
									
								
								chaind/cli/arg.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								chaind/cli/arg.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,17 @@ | ||||
| # external imports | ||||
| from chainlib.eth.cli import ArgumentParser as BaseArgumentParser | ||||
| 
 | ||||
| # local imports | ||||
| from .base import SyncFlag, Flag | ||||
| 
 | ||||
| 
 | ||||
| class ArgumentParser(BaseArgumentParser): | ||||
| 
 | ||||
|     def process_local_flags(self, local_arg_flags): | ||||
|         if local_arg_flags & SyncFlag.SESSION: | ||||
|             self.add_argument('--session-id', dest='session_id', type=str, help='Session to store state and data under') | ||||
|             self.add_argument('--runtime-dir', dest='runtime_dir', type=str, help='Directory to store volatile data') | ||||
|             self.add_argument('--data-dir', dest='data_dir', type=str, help='Directory to store persistent data') | ||||
|         if local_arg_flags & SyncFlag.SYNCER: | ||||
|             self.add_argument('--offset', type=int, help='Block to start sync from. Default is the latest block at first run.') | ||||
|             self.add_argument('--until', type=int, default=-1, help='Block to stop sync on. Default is do not stop.') | ||||
							
								
								
									
										22
									
								
								chaind/cli/base.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										22
									
								
								chaind/cli/base.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,22 @@ | ||||
| # standard imports | ||||
| import enum | ||||
| 
 | ||||
| # external imports | ||||
| from chainlib.eth.cli import ( | ||||
|     argflag_std_read, | ||||
|     argflag_std_write, | ||||
|     argflag_std_base, | ||||
|     Flag, | ||||
|     ) | ||||
| 
 | ||||
| 
 | ||||
| class SyncFlag(enum.IntEnum): | ||||
|     SESSION = 1 | ||||
|     SYNCER = 16 | ||||
|     QUEUE = 256 | ||||
|     DISPATCH = 512 | ||||
|     SOCKET = 4096 | ||||
| 
 | ||||
| 
 | ||||
| argflag_local_sync = argflag_std_base | Flag.CHAIN_SPEC | SyncFlag.SYNCER | SyncFlag.SESSION | ||||
| argflag_local_queue = SyncFlag.QUEUE | Flag.CHAIN_SPEC | SyncFlag.SOCKET | SyncFlag.SESSION | ||||
							
								
								
									
										46
									
								
								chaind/cli/config.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								chaind/cli/config.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,46 @@ | ||||
| # standard imports | ||||
| import logging | ||||
| import os | ||||
| 
 | ||||
| # external imports | ||||
| from chainlib.cli import ( | ||||
|         Config as BaseConfig, | ||||
|         Flag, | ||||
|         ) | ||||
| from chaind.cli import SyncFlag | ||||
| 
 | ||||
| script_dir = os.path.dirname(os.path.realpath(__file__)) | ||||
| 
 | ||||
| logg = logging.getLogger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class Config(BaseConfig): | ||||
| 
 | ||||
|     local_base_config_dir = os.path.join(script_dir, '..', 'data', 'config') | ||||
| 
 | ||||
|     @classmethod | ||||
|     def from_args(cls, engine, args, arg_flags, local_arg_flags, extra_args={}, default_config_dir=None, base_config_dir=None, default_fee_limit=None): | ||||
|         expanded_base_config_dir = [cls.local_base_config_dir] | ||||
|         if base_config_dir != None: | ||||
|             if isinstance(base_config_dir, str): | ||||
|                 base_config_dir = [base_config_dir] | ||||
|             for d in base_config_dir: | ||||
|                 expanded_base_config_dir.append(d) | ||||
|         config = BaseConfig.from_args(args, arg_flags, extra_args=extra_args, default_config_dir=default_config_dir, base_config_dir=expanded_base_config_dir, load_callback=None) | ||||
| 
 | ||||
|         local_args_override = {} | ||||
|         if local_arg_flags & SyncFlag.SESSION: | ||||
|             local_args_override['SESSION_ID'] = getattr(args, 'session_id') | ||||
|             local_args_override['SESSION_RUNTIME_DIR'] = getattr(args, 'runtime_dir') | ||||
|             local_args_override['SESSION_DATA_DIR'] = getattr(args, 'data_dir') | ||||
|             local_args_override['SYNCER_OFFSET'] = getattr(args, 'offset') | ||||
|             local_args_override['SYNCER_LIMIT'] = getattr(args, 'until') | ||||
| 
 | ||||
|         if local_arg_flags & SyncFlag.SOCKET: | ||||
|             local_args_override['SESSION_SOCKET_PATH'] = getattr(args, 'socket') | ||||
| 
 | ||||
|         config.dict_override(local_args_override, 'local cli args') | ||||
| 
 | ||||
|         config.add(engine, 'CHAIND_ENGINE', False) | ||||
| 
 | ||||
|         return config | ||||
| @ -4,12 +4,6 @@ runtime_dir = | ||||
| id = | ||||
| data_dir = | ||||
| 
 | ||||
| [database] | ||||
| engine = | ||||
| name = chaind | ||||
| driver = | ||||
| user = | ||||
| password = | ||||
| host = | ||||
| port = | ||||
| debug = 0 | ||||
| [syncer] | ||||
| offset = | ||||
| limit = | ||||
|  | ||||
| @ -117,3 +117,4 @@ class SessionController: | ||||
|             logg.debug('{} bytes sent'.format(len(v))) | ||||
|         except BrokenPipeError: | ||||
|             logg.debug('they just hung up. how rude.') | ||||
|         srvs.close() | ||||
|  | ||||
							
								
								
									
										127
									
								
								chaind/settings.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										127
									
								
								chaind/settings.py
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,127 @@ | ||||
| # standard imports | ||||
| import logging | ||||
| import os | ||||
| 
 | ||||
| # external imports | ||||
| from chainlib.chain import ChainSpec | ||||
| from chainlib.eth.block import block_latest | ||||
| from hexathon import ( | ||||
|         to_int as hex_to_int, | ||||
|         strip_0x, | ||||
|         ) | ||||
| 
 | ||||
| logg = logging.getLogger(__name__) | ||||
| 
 | ||||
| 
 | ||||
| class ChaindSettings: | ||||
| 
 | ||||
|     def __init__(self): | ||||
|         self.o = {} | ||||
|         self.get = self.o.get | ||||
| 
 | ||||
| 
 | ||||
|     def process_common(self, config): | ||||
|         self.o['CHAIN_SPEC'] = ChainSpec.from_chain_str(config.get('CHAIN_SPEC')) | ||||
|         self.o['SOCKET_PATH'] = config.get('SESSION_SOCKET_PATH') | ||||
| 
 | ||||
|     | ||||
|     def process_sync_range(self, config): | ||||
|         o = block_latest() | ||||
|         r = self.o['RPC'].do(o) | ||||
|         block_offset = int(strip_0x(r), 16) + 1 | ||||
|         logg.info('network block height at startup is {}'.format(block_offset)) | ||||
| 
 | ||||
|         keep_alive = False | ||||
|         session_block_offset = 0 | ||||
|         block_limit = 0 | ||||
|         session_block_offset = int(config.get('SYNCER_OFFSET')) | ||||
| 
 | ||||
|         until = int(config.get('SYNCER_LIMIT')) | ||||
|         if until > 0: | ||||
|             if until <= session_block_offset: | ||||
|                 raise ValueError('sync termination block number must be later than offset ({} >= {})'.format(session_block_offset, until)) | ||||
|             block_limit = until | ||||
|         else: | ||||
|             keep_alive=True | ||||
|             block_limit = -1 | ||||
| 
 | ||||
|         if session_block_offset == -1: | ||||
|             session_block_offset = block_offset | ||||
|         elif not config.true('_KEEP_ALIVE'): | ||||
|             if block_limit == 0: | ||||
|                 lock_limit = block_offset | ||||
|      | ||||
|         self.o['SYNCER_OFFSET'] = session_block_offset | ||||
|         self.o['SYNCER_LIMIT'] = block_limit | ||||
| 
 | ||||
| 
 | ||||
|     def process_sync_session(self, config): | ||||
|         session_id = config.get('SESSION_ID') | ||||
| 
 | ||||
|         base_dir = os.getcwd() | ||||
|         data_dir = config.get('SESSION_DATA_DIR') | ||||
|         if data_dir == None: | ||||
|             data_dir = os.path.join(base_dir, '.chaind', 'chaind') | ||||
|         data_engine_dir = os.path.join(data_dir, config.get('CHAIND_ENGINE')) | ||||
|         os.makedirs(data_engine_dir, exist_ok=True) | ||||
| 
 | ||||
|         # check if existing session | ||||
|         if session_id == None: | ||||
|             fp = os.path.join(data_engine_dir, 'default') | ||||
|             try: | ||||
|                 os.stat(fp) | ||||
|                 fp = os.path.realpath(fp) | ||||
|             except FileNotFoundError: | ||||
|                 fp = None | ||||
|             if fp != None: | ||||
|                 session_id = os.path.basename(fp) | ||||
| 
 | ||||
|         make_default = False | ||||
|         if session_id == None: | ||||
|             session_id = str(uuid.uuid4()) | ||||
|             make_default = True | ||||
| 
 | ||||
|         # create the session persistent dir | ||||
|         session_dir = os.path.join(data_engine_dir, session_id) | ||||
|         if make_default: | ||||
|             fp = os.path.join(data_engine_dir, 'default') | ||||
|             os.symlink(session_dir, fp) | ||||
| 
 | ||||
|         data_dir = os.path.join(session_dir, 'sync') | ||||
|         os.makedirs(data_dir, exist_ok=True) | ||||
| 
 | ||||
|         # create volatile dir | ||||
|         uid = os.getuid() | ||||
|         runtime_dir = config.get('SESSION_RUNTIME_DIR') | ||||
|         if runtime_dir == None: | ||||
|             runtime_dir = os.path.join('/run', 'user', str(uid), 'chaind') | ||||
|         runtime_dir = os.path.join(runtime_dir, config.get('CHAIND_ENGINE'), session_id, 'sync') | ||||
|         os.makedirs(runtime_dir, exist_ok=True) | ||||
| 
 | ||||
|         self.o['SESSION_RUNTIME_DIR'] = runtime_dir | ||||
|         self.o['SESSION_DATA_DIR'] = data_dir | ||||
|         self.o['SESSION_ID'] = session_id | ||||
| 
 | ||||
|      | ||||
|     def process_sync_interface(self, config): | ||||
|         raise NotImplementedError('no sync interface implementation defined') | ||||
| 
 | ||||
| 
 | ||||
|     def process_sync(self, config): | ||||
|         self.process_sync_interface(config) | ||||
|         self.process_sync_session(config) | ||||
|         self.process_sync_range(config) | ||||
| 
 | ||||
|                  | ||||
|     def process(self, config): | ||||
|         self.process_common(config) | ||||
|         self.process_sync(config) | ||||
| 
 | ||||
| 
 | ||||
|     def __str__(self): | ||||
|         ks = list(self.o.keys()) | ||||
|         ks.sort() | ||||
|         s = '' | ||||
|         for k in ks: | ||||
|             s += '{}: {}\n'.format(k, self.o.get(k)) | ||||
|         return s | ||||
| @ -1,80 +0,0 @@ | ||||
| #!/usr/bin/python | ||||
| import os | ||||
| import argparse | ||||
| import logging | ||||
| 
 | ||||
| # external imports | ||||
| import alembic | ||||
| from alembic.config import Config as AlembicConfig | ||||
| import confini | ||||
| import chainqueue.db | ||||
| import chainsyncer.db | ||||
| 
 | ||||
| # local imports | ||||
| from chaind import Environment | ||||
| 
 | ||||
| logging.basicConfig(level=logging.WARNING) | ||||
| logg = logging.getLogger() | ||||
| 
 | ||||
| rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) | ||||
| dbdir = os.path.join(rootdir, 'chaind', 'db') | ||||
| configdir = os.path.join(rootdir, 'chaind', 'data', 'config') | ||||
| default_migrations_dir = os.path.join(dbdir, 'migrations') | ||||
| 
 | ||||
| env = Environment(env=os.environ) | ||||
| 
 | ||||
| argparser = argparse.ArgumentParser() | ||||
| argparser.add_argument('-c', type=str, default=env.config_dir, help='config directory') | ||||
| argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') | ||||
| argparser.add_argument('--data-dir', dest='data_dir', type=str, help='data directory') | ||||
| argparser.add_argument('--migrations-dir', dest='migrations_dir', default=default_migrations_dir, type=str, help='path to alembic migrations directory') | ||||
| argparser.add_argument('--reset', action='store_true', help='reset exsting database') | ||||
| argparser.add_argument('-v', action='store_true', help='be verbose') | ||||
| argparser.add_argument('-vv', action='store_true', help='be more verbose') | ||||
| args = argparser.parse_args() | ||||
| 
 | ||||
| if args.vv: | ||||
|     logging.getLogger().setLevel(logging.DEBUG) | ||||
| elif args.v: | ||||
|     logging.getLogger().setLevel(logging.INFO) | ||||
| 
 | ||||
| # process config | ||||
| logg.debug('loading config from {}'.format(args.c)) | ||||
| config = confini.Config(configdir, args.env_prefix, override_dirs=[args.c]) | ||||
| config.process() | ||||
| args_override = { | ||||
|             'SESSION_DATA_DIR': getattr(args, 'data_dir'), | ||||
|         } | ||||
| config.dict_override(args_override, 'cli args') | ||||
| 
 | ||||
| if config.get('DATABASE_ENGINE') == 'sqlite': | ||||
|     config.add(os.path.join(config.get('SESSION_DATA_DIR'), config.get('DATABASE_NAME') + '.sqlite'), 'DATABASE_NAME', True) | ||||
|   | ||||
| config.censor('PASSWORD', 'DATABASE') | ||||
| 
 | ||||
| logg.debug('config loaded:\n{}'.format(config)) | ||||
| config.add(os.path.join(args.migrations_dir, config.get('DATABASE_ENGINE')), '_MIGRATIONS_DIR', True) | ||||
| if not os.path.isdir(config.get('_MIGRATIONS_DIR')): | ||||
|     logg.debug('migrations dir for engine {} not found, reverting to default'.format(config.get('DATABASE_ENGINE'))) | ||||
|     config.add(os.path.join(args.migrations_dir, 'default'), '_MIGRATIONS_DIR', True) | ||||
| 
 | ||||
| os.makedirs(config.get('SESSION_DATA_DIR'), exist_ok=True) | ||||
| 
 | ||||
| dsn = chainqueue.db.dsn_from_config(config) | ||||
| 
 | ||||
| def main(): | ||||
|     logg.info('using migrations dir {}'.format(config.get('_MIGRATIONS_DIR'))) | ||||
|     logg.info('using db {}'.format(dsn)) | ||||
|     ac = AlembicConfig(os.path.join(config.get('_MIGRATIONS_DIR'), 'alembic.ini')) | ||||
|     ac.set_main_option('sqlalchemy.url', dsn) | ||||
|     ac.set_main_option('script_location', config.get('_MIGRATIONS_DIR')) | ||||
| 
 | ||||
|     if args.reset: | ||||
|         logg.debug('reset is set, purging existing content') | ||||
|         alembic.command.downgrade(ac, 'base') | ||||
| 
 | ||||
|     alembic.command.upgrade(ac, 'head') | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == '__main__': | ||||
|     main() | ||||
		Loading…
	
		Reference in New Issue
	
	Block a user