Implement settings processing on chainlib 0.3.0 structure

This commit is contained in:
lash 2022-05-13 09:40:18 +00:00
parent 0a5818ebf1
commit 139e2772af
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746

View File

@ -5,27 +5,29 @@ import uuid
# external imports # external imports
from chainlib.settings import ChainSettings from chainlib.settings import ChainSettings
from chainsyncer.settings import ChainsyncerSettings
from chainqueue.settings import ChainqueueSettings
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
class ChaindSettings(ChainsyncerSettings, ChainqueueSettings): class ChaindSettings(ChainSettings):
def __init__(self, include_sync=False, include_queue=False): def __init__(settings, include_sync=False, include_queue=False):
super(ChaindSettings, self).__init__() super(ChaindSettings, settings).__init__()
self.include_sync = include_sync settings.include_sync = include_sync
self.include_queue = include_queue settings.include_queue = include_queue
def process_session(self, config): def dir_for(self, k):
return os.path.join(self.o['SESSION_DIR'], k)
def process_session(settings, config):
session_id = config.get('SESSION_ID') session_id = config.get('SESSION_ID')
base_dir = os.getcwd() base_dir = os.getcwd()
data_dir = config.get('SESSION_DATA_DIR') data_dir = config.get('SESSION_DATA_DIR')
if data_dir == None: if data_dir == None:
data_dir = os.path.join(base_dir, '.chaind', 'chaind', self.o.get('CHAIND_BACKEND')) data_dir = os.path.join(base_dir, '.chaind', 'chaind', settings.o.get('CHAIND_BACKEND'))
data_engine_dir = os.path.join(data_dir, config.get('CHAIND_ENGINE')) data_engine_dir = os.path.join(data_dir, config.get('CHAIND_ENGINE'))
os.makedirs(data_engine_dir, exist_ok=True) os.makedirs(data_engine_dir, exist_ok=True)
@ -59,89 +61,100 @@ class ChaindSettings(ChainsyncerSettings, ChainqueueSettings):
uid = os.getuid() uid = os.getuid()
runtime_dir = config.get('SESSION_RUNTIME_DIR') runtime_dir = config.get('SESSION_RUNTIME_DIR')
if runtime_dir == None: if runtime_dir == None:
runtime_dir = os.path.join('/run', 'user', str(uid), 'chaind', self.o.get('CHAIND_BACKEND')) runtime_dir = os.path.join('/run', 'user', str(uid), 'chaind', settings.o.get('CHAIND_BACKEND'))
#runtime_dir = os.path.join(runtime_dir, config.get('CHAIND_ENGINE'), session_id, config.get('CHAIND_COMPONENT')) #runtime_dir = os.path.join(runtime_dir, config.get('CHAIND_ENGINE'), session_id, config.get('CHAIND_COMPONENT'))
runtime_dir = os.path.join(runtime_dir, config.get('CHAIND_ENGINE'), session_id) runtime_dir = os.path.join(runtime_dir, config.get('CHAIND_ENGINE'), session_id)
os.makedirs(runtime_dir, exist_ok=True) os.makedirs(runtime_dir, exist_ok=True)
self.o['SESSION_RUNTIME_DIR'] = runtime_dir settings.set('SESSION_RUNTIME_DIR', runtime_dir)
self.o['SESSION_DIR'] = session_dir settings.set('SESSION_DIR', session_dir)
self.o['SESSION_DATA_DIR'] = data_dir settings.set('SESSION_DATA_DIR'], data_dir)
self.o['SESSION_ID'] = session_id settings.set('SESSION_ID', session_id)
return settings
def process_sync_interface(self, config): def process_sync_interface(settings, config):
raise NotImplementedError('no sync interface implementation defined') raise NotImplementedError('no sync interface implementation defined')
def process_sync(self, config): def process_sync(settings, config):
self.process_sync_interface(config) settings = process_sync_interface(settings, config)
self.process_sync_range(config) settings = process_sync_range(settings, config)
return settings
def process_socket(self, config): def process_socket(settings, config):
socket_path = config.get('SESSION_SOCKET_PATH') socket_path = config.get('SESSION_SOCKET_PATH')
if socket_path == None: if socket_path == None:
socket_path = os.path.join(self.o['SESSION_RUNTIME_DIR'], 'chaind.sock') socket_path = os.path.join(settings.get('SESSION_RUNTIME_DIR'), 'chaind.sock')
self.o['SESSION_SOCKET_PATH'] = socket_path settings.get('SESSION_SOCKET_PATH', socket_path)
return settings
def process_dispatch(self, config): def process_dispatch(settings, config):
self.o['SESSION_DISPATCH_DELAY'] = 0.01 settings.set('SESSION_DISPATCH_DELAY', 0.01)
return settings
def process_token(self, config): def process_token(settings, config):
self.o['TOKEN_MODULE'] = config.get('TOKEN_MODULE') settings.set('TOKEN_MODULE', config.get('TOKEN_MODULE'))
return settings
def process_backend(self, config): def process_backend(settings, config):
if self.include_sync and self.include_queue: syncer_backend = settings.get('SYNCER_BACKEND')
if self.o['QUEUE_BACKEND'] != self.o['SYNCER_BACKEND']: queue_backend = settings.get('QUEUE_BACKEND')
raise ValueError('queue and syncer backends must match. queue "{}" != syncer "{}"'.format(self.o['QUEUE_BACKEND'], self.o['SYNCER_BACKEND'])) backend = None
self.o['CHAIND_BACKEND'] = self.o['SYNCER_BACKEND'] if settings.include_sync and settings.include_queue:
elif self.include_sync: if queue_backend != syncer_backend:
self.o['CHAIND_BACKEND'] = self.o['SYNCER_BACKEND'] raise ValueError('queue and syncer backends must match. queue "{}" != syncer "{}"'.format(queue_backend, syncer_backend))
elif self.include_queue: backend = syncer_backend
self.o['CHAIND_BACKEND'] = self.o['QUEUE_BACKEND'] elif settings.include_sync:
backend = syncer_backend
elif settings.include_queue:
backend = queue_backen
else: else:
raise ValueError('at least one backend must be set') raise ValueError('at least one backend must be set')
settings.set('CHAIND_BACKEND', backend)
return settings
def process_chaind_queue(self, config):
def process_chaind_queue(settings, config):
if config.get('QUEUE_STATE_PATH') == None: if config.get('QUEUE_STATE_PATH') == None:
queue_state_dir = self.dir_for('queue') queue_state_dir = settings.dir_for('queue')
config.add(queue_state_dir, 'QUEUE_STATE_PATH', False) config.add(queue_state_dir, 'QUEUE_STATE_PATH', False)
logg.debug('setting queue state path {}'.format(queue_state_dir)) logg.debug('setting queue state path {}'.format(queue_state_dir))
self.process_queue_tx(config) settings = process_queue_tx(settings, config)
self.process_queue_paths(config) settings = process_queue_paths(settings, config)
if config.get('QUEUE_BACKEND') == 'fs': if config.get('QUEUE_BACKEND') == 'fs':
self.process_queue_backend_fs(config) settings = process_queue_backend_fs(settings, config)
self.process_queue_backend(config) settings = process_queue_backend(settings, config)
self.process_queue_store(config) settings = process_queue_store(settings, config)
return settings
def process(self, config): def process_settings(settings, config):
#super(ChaindSettings, self).process(config) settings = process_common(settings, config)
self.process_common(config)
if self.include_queue: if settings = include_queue:
self.process_queue_backend(config) settings = process_queue_backend(settings, config)
if self.include_sync: if settings = include_sync:
self.process_sync_backend(config) settings = process_sync_backend(settings, config)
self.process_backend(config) settings = process_backend(settings, config)
self.process_session(config) settings = process_session(settings, config)
if self.include_sync: if settings = include_sync:
self.process_sync(config) settings = process_sync(settings, config)
if self.include_queue: if settings = include_queue:
self.process_chaind_queue(config) settings = process_chaind_queue(settings, config)
self.process_dispatch(config) settings = process_dispatch(settings, config)
self.process_token(config) settings = process_token(settings, config)
self.process_socket(config) settings = process_socket(settings, config)
return settings
def dir_for(self, k):
return os.path.join(self.o['SESSION_DIR'], k)