Update state config names, remove dead code

This commit is contained in:
lash 2022-05-13 13:47:11 +00:00
parent ba894044d8
commit 9f2a791b1f
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
3 changed files with 24 additions and 67 deletions

View File

@ -13,8 +13,6 @@ def apply_flag(flag):
def apply_arg(arg):
arg.add_long('session-id', 'session', help='Session to store state and data under')
arg.add_long('runtime-dir', 'session', help='Directory to store volatile data')
arg.add_long('data-dir', 'session', help='Directory to store persistent data')
arg.add_long('socket-path', 'socket', help='UNIX socket path')
arg.add_long('send-socket', 'socket_client', typ=bool, help='Send to UNIX socket')
arg.add_long('token-module', 'token', help='Python module path to resolve tokens from identifiers')

View File

@ -1,5 +1,5 @@
[session]
socket_path =
runtime_dir =
runtime_path =
id =
data_dir =
data_path =

View File

@ -5,6 +5,7 @@ import uuid
# external imports
from chainlib.settings import ChainSettings
from chainqueue.settings import *
logg = logging.getLogger(__name__)
@ -18,14 +19,14 @@ class ChaindSettings(ChainSettings):
def dir_for(self, k):
return os.path.join(self.o['SESSION_DIR'], k)
return os.path.join(self.o['SESSION_PATH'], k)
def process_session(settings, config):
session_id = config.get('SESSION_ID')
base_dir = os.getcwd()
data_dir = config.get('SESSION_DATA_DIR')
data_dir = config.get('SESSION_DATA_PATH')
if data_dir == None:
data_dir = os.path.join(base_dir, '.chaind', 'chaind', settings.o.get('CHAIND_BACKEND'))
data_engine_dir = os.path.join(data_dir, config.get('CHAIND_ENGINE'))
@ -48,42 +49,36 @@ def process_session(settings, config):
make_default = True
# create the session persistent dir
session_dir = os.path.join(data_engine_dir, session_id)
session_path = os.path.join(data_engine_dir, session_id)
if make_default:
fp = os.path.join(data_engine_dir, 'default')
os.symlink(session_dir, fp)
os.symlink(session_path, fp)
#data_dir = os.path.join(session_dir, config.get('CHAIND_COMPONENT'))
data_dir = session_dir
os.makedirs(data_dir, exist_ok=True)
data_path = session_path
os.makedirs(data_path, exist_ok=True)
# create volatile dir
uid = os.getuid()
runtime_dir = config.get('SESSION_RUNTIME_DIR')
if runtime_dir == None:
runtime_dir = os.path.join('/run', 'user', str(uid), 'chaind', settings.o.get('CHAIND_BACKEND'))
#runtime_dir = os.path.join(runtime_dir, config.get('CHAIND_ENGINE'), session_id, config.get('CHAIND_COMPONENT'))
runtime_dir = os.path.join(runtime_dir, config.get('CHAIND_ENGINE'), session_id)
os.makedirs(runtime_dir, exist_ok=True)
runtime_path = config.get('SESSION_RUNTIME_PATH')
if runtime_path == None:
runtime_path = os.path.join('/run', 'user', str(uid), 'chaind', settings.get('CHAIND_BACKEND'))
runtime_path = os.path.join(runtime_path, config.get('CHAIND_ENGINE'), session_id)
os.makedirs(runtime_path, exist_ok=True)
settings.set('SESSION_RUNTIME_DIR', runtime_dir)
settings.set('SESSION_DIR', session_dir)
settings.set('SESSION_DATA_DIR', data_dir)
settings.set('SESSION_RUNTIME_PATH', runtime_path)
settings.set('SESSION_PATH', session_path)
settings.set('SESSION_DATA_PATH', data_path)
settings.set('SESSION_ID', session_id)
return settings
def process_sync(settings, config):
settings = process_sync_range(settings, config)
return settings
def process_socket(settings, config):
socket_path = config.get('SESSION_SOCKET_PATH')
if socket_path == None:
socket_path = os.path.join(settings.get('SESSION_RUNTIME_DIR'), 'chaind.sock')
settings.get('SESSION_SOCKET_PATH', socket_path)
socket_path = os.path.join(settings.get('SESSION_RUNTIME_PATH'), 'chaind.sock')
settings.set('SESSION_SOCKET_PATH', socket_path)
return settings
@ -98,56 +93,20 @@ def process_token(settings, config):
def process_backend(settings, config):
syncer_backend = settings.get('SYNCER_BACKEND')
queue_backend = settings.get('QUEUE_BACKEND')
backend = None
if settings.include_sync and settings.include_queue:
if queue_backend != syncer_backend:
raise ValueError('queue and syncer backends must match. queue "{}" != syncer "{}"'.format(queue_backend, syncer_backend))
backend = syncer_backend
elif settings.include_sync:
backend = syncer_backend
elif settings.include_queue:
backend = queue_backen
else:
raise ValueError('at least one backend must be set')
settings.set('CHAIND_BACKEND', backend)
settings.set('CHAIND_BACKEND', config.get('STATE_BACKEND')) #backend)
return settings
def process_chaind_queue(settings, config):
if config.get('QUEUE_STATE_PATH') == None:
def process_queue(settings, config):
if config.get('STATE_PATH') == None:
queue_state_dir = settings.dir_for('queue')
config.add(queue_state_dir, 'QUEUE_STATE_PATH', False)
config.add(queue_state_dir, 'STATE_PATH', False)
logg.debug('setting queue state path {}'.format(queue_state_dir))
settings = process_queue_tx(settings, config)
settings = process_queue_paths(settings, config)
if config.get('QUEUE_BACKEND') == 'fs':
if config.get('STATE_BACKEND') == 'fs':
settings = process_queue_backend_fs(settings, config)
settings = process_queue_backend(settings, config)
settings = process_queue_store(settings, config)
return settings
#def process_settings(settings, config):
# if settings = include_queue:
# settings = process_queue_backend(settings, config)
# if settings = include_sync:
# settings = process_sync_backend(settings, config)
#
# settings = process_backend(settings, config)
# settings = process_session(settings, config)
#
# if settings = include_sync:
# settings = process_sync(settings, config)
# if settings = include_queue:
# settings = process_chaind_queue(settings, config)
# settings = process_dispatch(settings, config)
# settings = process_token(settings, config)
#
# settings = process_socket(settings, config)
#
# return settings