2021-02-06 16:13:47 +01:00
# standard imports
import argparse
import logging
import os
import tempfile
# third party imports
import celery
2021-04-16 22:24:07 +02:00
import i18n
2021-02-06 16:13:47 +01:00
import redis
from confini import Config
# local imports
from cic_ussd . db import dsn_from_config
from cic_ussd . db . models . base import SessionBase
2021-03-04 17:47:13 +01:00
from cic_ussd . metadata . signer import Signer
2021-04-09 15:00:15 +02:00
from cic_ussd . metadata . base import Metadata
2021-02-06 16:13:47 +01:00
from cic_ussd . redis import InMemoryStore
from cic_ussd . session . ussd_session import UssdSession as InMemoryUssdSession
2021-04-06 19:53:38 +02:00
from cic_ussd . validator import validate_presence
2021-02-06 16:13:47 +01:00
logging . basicConfig ( level = logging . WARNING )
logg = logging . getLogger ( )
2021-04-09 15:00:15 +02:00
logging . getLogger ( ' gnupg ' ) . setLevel ( logging . WARNING )
2021-02-06 16:13:47 +01:00
config_directory = ' /usr/local/etc/cic-ussd/ '
# define arguments
arg_parser = argparse . ArgumentParser ( )
arg_parser . add_argument ( ' -c ' , type = str , default = config_directory , help = ' config directory. ' )
arg_parser . add_argument ( ' -q ' , type = str , default = ' cic-ussd ' , help = ' queue name for worker tasks ' )
arg_parser . add_argument ( ' -v ' , action = ' store_true ' , help = ' be verbose ' )
arg_parser . add_argument ( ' -vv ' , action = ' store_true ' , help = ' be more verbose ' )
arg_parser . add_argument ( ' --env-prefix ' , default = os . environ . get ( ' CONFINI_ENV_PREFIX ' ) , dest = ' env_prefix ' , type = str , help = ' environment prefix for variables to overwrite configuration ' )
args = arg_parser . parse_args ( )
# define log levels
if args . vv :
logging . getLogger ( ) . setLevel ( logging . DEBUG )
elif args . v :
logging . getLogger ( ) . setLevel ( logging . INFO )
2021-04-16 22:24:07 +02:00
# parse config
config = Config ( args . c , args . env_prefix )
config . process ( )
config . add ( args . q , ' _CELERY_QUEUE ' , True )
config . censor ( ' PASSWORD ' , ' DATABASE ' )
logg . debug ( ' config loaded from {} : \n {} ' . format ( args . c , config ) )
2021-02-06 16:13:47 +01:00
# connect to database
data_source_name = dsn_from_config ( config )
2021-04-09 15:00:15 +02:00
SessionBase . connect ( data_source_name , pool_size = int ( config . get ( ' DATABASE_POOL_SIZE ' ) ) , debug = config . true ( ' DATABASE_DEBUG ' ) )
2021-02-06 16:13:47 +01:00
# verify database connection with minimal sanity query
session = SessionBase . create_session ( )
session . execute ( ' SELECT version_num FROM alembic_version ' )
session . close ( )
# define universal redis cache access
InMemoryStore . cache = redis . StrictRedis ( host = config . get ( ' REDIS_HOSTNAME ' ) ,
port = config . get ( ' REDIS_PORT ' ) ,
password = config . get ( ' REDIS_PASSWORD ' ) ,
db = config . get ( ' REDIS_DATABASE ' ) ,
decode_responses = True )
InMemoryUssdSession . redis_cache = InMemoryStore . cache
2021-03-04 17:47:13 +01:00
# define metadata URL
2021-04-09 15:00:15 +02:00
Metadata . base_url = config . get ( ' CIC_META_URL ' )
2021-03-04 17:47:13 +01:00
# define signer values
2021-04-06 19:53:38 +02:00
export_dir = config . get ( ' PGP_EXPORT_DIR ' )
if export_dir :
validate_presence ( path = export_dir )
Signer . gpg_path = export_dir
2021-03-05 17:28:07 +01:00
Signer . gpg_passphrase = config . get ( ' PGP_PASSPHRASE ' )
2021-04-06 19:53:38 +02:00
key_file_path = f " { config . get ( ' PGP_KEYS_PATH ' ) } { config . get ( ' PGP_PRIVATE_KEYS ' ) } "
if key_file_path :
validate_presence ( path = key_file_path )
Signer . key_file_path = key_file_path
2021-03-04 17:47:13 +01:00
2021-04-16 22:24:07 +02:00
# set up translations
i18n . load_path . append ( config . get ( ' APP_LOCALE_PATH ' ) )
i18n . set ( ' fallback ' , config . get ( ' APP_LOCALE_FALLBACK ' ) )
2021-02-06 16:13:47 +01:00
# set up celery
current_app = celery . Celery ( __name__ )
# define celery configs
broker = config . get ( ' CELERY_BROKER_URL ' )
if broker [ : 4 ] == ' file ' :
broker_queue = tempfile . mkdtemp ( )
broker_processed = tempfile . mkdtemp ( )
current_app . conf . update ( {
' broker_url ' : broker ,
' broker_transport_options ' : {
' data_folder_in ' : broker_queue ,
' data_folder_out ' : broker_queue ,
' data_folder_processed ' : broker_processed
} ,
} )
logg . warning (
f ' celery broker dirs queue i/o { broker_queue } processed { broker_processed } , will NOT be deleted on shutdown ' )
else :
current_app . conf . update ( {
' broker_url ' : broker
} )
result = config . get ( ' CELERY_RESULT_URL ' )
if result [ : 4 ] == ' file ' :
result_queue = tempfile . mkdtemp ( )
current_app . conf . update ( {
' result_backend ' : ' file:// {} ' . format ( result_queue ) ,
} )
logg . warning ( ' celery backend store dir {} created, will NOT be deleted on shutdown ' . format ( result_queue ) )
else :
current_app . conf . update ( {
' result_backend ' : result ,
} )
import cic_ussd . tasks
def main ( ) :
argv = [ ' worker ' ]
if args . vv :
argv . append ( ' --loglevel=DEBUG ' )
elif args . v :
argv . append ( ' --loglevel=INFO ' )
argv . append ( ' -Q ' )
argv . append ( args . q )
2021-07-03 18:55:51 +02:00
argv . append ( ' -n ' )
argv . append ( args . q )
2021-02-06 16:13:47 +01:00
current_app . worker_main ( argv )
if __name__ == ' __main__ ' :
main ( )