2021-02-18 06:04:30 +01:00
|
|
|
# standard imports
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import argparse
|
2022-01-04 17:01:01 +01:00
|
|
|
import tempfile
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
|
|
# third-party imports
|
|
|
|
import celery
|
|
|
|
import confini
|
|
|
|
|
|
|
|
# local imports
|
2021-10-20 17:02:36 +02:00
|
|
|
import cic_cache.cli
|
2021-02-18 06:04:30 +01:00
|
|
|
from cic_cache.db import dsn_from_config
|
|
|
|
from cic_cache.db.models.base import SessionBase
|
|
|
|
from cic_cache.tasks.tx import *
|
|
|
|
|
|
|
|
logging.basicConfig(level=logging.WARNING)
|
|
|
|
logg = logging.getLogger()
|
|
|
|
|
2021-10-20 17:02:36 +02:00
|
|
|
# process args
|
|
|
|
arg_flags = cic_cache.cli.argflag_std_base
|
|
|
|
local_arg_flags = cic_cache.cli.argflag_local_task
|
|
|
|
argparser = cic_cache.cli.ArgumentParser(arg_flags)
|
|
|
|
argparser.process_local_flags(local_arg_flags)
|
2021-02-18 06:04:30 +01:00
|
|
|
args = argparser.parse_args()
|
|
|
|
|
2021-10-20 17:02:36 +02:00
|
|
|
# process config
|
|
|
|
config = cic_cache.cli.Config.from_args(args, arg_flags, local_arg_flags)
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
|
|
# connect to database
|
2022-01-04 17:01:01 +01:00
|
|
|
dsn = dsn_from_config(config, 'cic_cache')
|
2021-02-18 06:04:30 +01:00
|
|
|
SessionBase.connect(dsn)
|
|
|
|
|
|
|
|
# set up celery
|
|
|
|
current_app = celery.Celery(__name__)
|
|
|
|
|
|
|
|
broker = config.get('CELERY_BROKER_URL')
|
|
|
|
if broker[:4] == 'file':
|
|
|
|
bq = tempfile.mkdtemp()
|
|
|
|
bp = tempfile.mkdtemp()
|
|
|
|
current_app.conf.update({
|
|
|
|
'broker_url': broker,
|
|
|
|
'broker_transport_options': {
|
|
|
|
'data_folder_in': bq,
|
|
|
|
'data_folder_out': bq,
|
|
|
|
'data_folder_processed': bp,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
|
|
|
logg.warning('celery broker dirs queue i/o {} processed {}, will NOT be deleted on shutdown'.format(bq, bp))
|
|
|
|
else:
|
|
|
|
current_app.conf.update({
|
|
|
|
'broker_url': broker,
|
|
|
|
})
|
|
|
|
|
|
|
|
result = config.get('CELERY_RESULT_URL')
|
|
|
|
if result[:4] == 'file':
|
|
|
|
rq = tempfile.mkdtemp()
|
|
|
|
current_app.conf.update({
|
|
|
|
'result_backend': 'file://{}'.format(rq),
|
|
|
|
})
|
|
|
|
logg.warning('celery backend store dir {} created, will NOT be deleted on shutdown'.format(rq))
|
|
|
|
else:
|
|
|
|
current_app.conf.update({
|
|
|
|
'result_backend': result,
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
argv = ['worker']
|
|
|
|
if args.vv:
|
|
|
|
argv.append('--loglevel=DEBUG')
|
|
|
|
elif args.v:
|
|
|
|
argv.append('--loglevel=INFO')
|
|
|
|
argv.append('-Q')
|
2021-10-20 17:02:36 +02:00
|
|
|
argv.append(config.get('CELERY_QUEUE'))
|
2021-02-18 06:04:30 +01:00
|
|
|
argv.append('-n')
|
2021-10-20 17:02:36 +02:00
|
|
|
argv.append(config.get('CELERY_QUEUE'))
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
|
|
current_app.worker_main(argv)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|