Merge branch 'philip/bump-test-coverage' into 'master'
Rehabilitate test coverage in ussd and cic-notify See merge request grassrootseconomics/cic-internal-integration!323
This commit is contained in:
commit
b8cd7eec56
@ -17,7 +17,7 @@ from cic_eth_registry.error import UnknownContractError
|
||||
# local imports
|
||||
from cic_eth.error import SeppukuError
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.eth.util import CacheGasOracle
|
||||
from cic_eth.eth.util import CacheGasOracle, MaxGasOracle
|
||||
|
||||
#logg = logging.getLogger().getChild(__name__)
|
||||
logg = logging.getLogger()
|
||||
@ -41,21 +41,24 @@ class BaseTask(celery.Task):
|
||||
|
||||
|
||||
def create_gas_oracle(self, conn, address=None, *args, **kwargs):
|
||||
if address == None:
|
||||
return RPCGasOracle(
|
||||
x = None
|
||||
if address is None:
|
||||
x = RPCGasOracle(
|
||||
conn,
|
||||
code_callback=kwargs.get('code_callback', self.get_min_fee_limit),
|
||||
min_price=self.min_fee_price,
|
||||
id_generator=kwargs.get('id_generator'),
|
||||
)
|
||||
else:
|
||||
|
||||
return CacheGasOracle(
|
||||
conn,
|
||||
address,
|
||||
method=kwargs.get('method'),
|
||||
min_price=self.min_fee_price,
|
||||
id_generator=kwargs.get('id_generator'),
|
||||
)
|
||||
x = MaxGasOracle(conn)
|
||||
x.code_callback = x.get_fee_units
|
||||
|
||||
return x
|
||||
|
||||
|
||||
def get_min_fee_limit(self, code):
|
||||
return self.min_fee_limit
|
||||
|
||||
|
||||
def get_min_fee_limit(self, code):
|
||||
@ -84,7 +87,7 @@ class BaseTask(celery.Task):
|
||||
)
|
||||
s.apply_async()
|
||||
|
||||
|
||||
|
||||
class CriticalTask(BaseTask):
|
||||
retry_jitter = True
|
||||
retry_backoff = True
|
||||
@ -96,7 +99,7 @@ class CriticalSQLAlchemyTask(CriticalTask):
|
||||
sqlalchemy.exc.DatabaseError,
|
||||
sqlalchemy.exc.TimeoutError,
|
||||
sqlalchemy.exc.ResourceClosedError,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class CriticalWeb3Task(CriticalTask):
|
||||
@ -104,7 +107,7 @@ class CriticalWeb3Task(CriticalTask):
|
||||
ConnectionError,
|
||||
)
|
||||
safe_gas_threshold_amount = 60000 * 3
|
||||
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
||||
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
||||
safe_gas_gifter_balance = safe_gas_threshold_amount * 5 * 100
|
||||
|
||||
|
||||
@ -122,13 +125,13 @@ class CriticalSQLAlchemyAndSignerTask(CriticalTask):
|
||||
sqlalchemy.exc.DatabaseError,
|
||||
sqlalchemy.exc.TimeoutError,
|
||||
sqlalchemy.exc.ResourceClosedError,
|
||||
)
|
||||
)
|
||||
|
||||
class CriticalWeb3AndSignerTask(CriticalWeb3Task):
|
||||
autoretry_for = (
|
||||
ConnectionError,
|
||||
)
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
def check_health(self):
|
||||
pass
|
||||
|
@ -1,10 +1,9 @@
|
||||
[DATABASE]
|
||||
user = postgres
|
||||
password =
|
||||
host = localhost
|
||||
port = 5432
|
||||
name = /tmp/cic-notify.db
|
||||
#engine = postgresql
|
||||
#driver = psycopg2
|
||||
engine = sqlite
|
||||
driver = pysqlite
|
||||
[database]
|
||||
name=cic_notify_test
|
||||
user=
|
||||
password=
|
||||
host=localhost
|
||||
port=
|
||||
engine=sqlite
|
||||
driver=pysqlite
|
||||
debug=0
|
||||
|
7
apps/cic-notify/.coveragerc
Normal file
7
apps/cic-notify/.coveragerc
Normal file
@ -0,0 +1,7 @@
|
||||
[report]
|
||||
omit =
|
||||
venv/*
|
||||
scripts/*
|
||||
cic_notify/db/migrations/*
|
||||
cic_notify/runnable/*
|
||||
cic_notify/version.py
|
@ -3,6 +3,7 @@ import logging
|
||||
import re
|
||||
|
||||
# third-party imports
|
||||
import cic_notify.tasks.sms.db
|
||||
from celery.app.control import Inspect
|
||||
import celery
|
||||
|
||||
@ -13,45 +14,16 @@ app = celery.current_app
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logg = logging.getLogger()
|
||||
|
||||
sms_tasks_matcher = r"^(cic_notify.tasks.sms)(\.\w+)?"
|
||||
|
||||
|
||||
re_q = r'^cic-notify'
|
||||
def get_sms_queue_tasks(app, task_prefix='cic_notify.tasks.sms.'):
|
||||
host_queues = []
|
||||
|
||||
i = Inspect(app=app)
|
||||
qs = i.active_queues()
|
||||
for host in qs.keys():
|
||||
for q in qs[host]:
|
||||
if re.match(re_q, q['name']):
|
||||
host_queues.append((host, q['name'],))
|
||||
|
||||
task_prefix_len = len(task_prefix)
|
||||
queue_tasks = []
|
||||
for (host, queue) in host_queues:
|
||||
i = Inspect(app=app, destination=[host])
|
||||
for tasks in i.registered_tasks().values():
|
||||
for task in tasks:
|
||||
if len(task) >= task_prefix_len and task[:task_prefix_len] == task_prefix:
|
||||
queue_tasks.append((queue, task,))
|
||||
|
||||
return queue_tasks
|
||||
|
||||
|
||||
class Api:
|
||||
# TODO: Implement callback strategy
|
||||
def __init__(self, queue=None):
|
||||
def __init__(self, queue: any = 'cic-notify'):
|
||||
"""
|
||||
:param queue: The queue on which to execute notification tasks
|
||||
:type queue: str
|
||||
"""
|
||||
self.queue = queue
|
||||
self.sms_tasks = get_sms_queue_tasks(app)
|
||||
logg.debug('sms tasks {}'.format(self.sms_tasks))
|
||||
|
||||
|
||||
def sms(self, message, recipient):
|
||||
def sms(self, message: str, recipient: str):
|
||||
"""This function chains all sms tasks in order to send a message, log and persist said data to disk
|
||||
:param message: The message to be sent to the recipient.
|
||||
:type message: str
|
||||
@ -60,24 +32,9 @@ class Api:
|
||||
:return: a celery Task
|
||||
:rtype: Celery.Task
|
||||
"""
|
||||
signatures = []
|
||||
for q in self.sms_tasks:
|
||||
|
||||
if not self.queue:
|
||||
queue = q[0]
|
||||
else:
|
||||
queue = self.queue
|
||||
|
||||
signature = celery.signature(
|
||||
q[1],
|
||||
[
|
||||
message,
|
||||
recipient,
|
||||
],
|
||||
queue=queue,
|
||||
)
|
||||
signatures.append(signature)
|
||||
|
||||
t = celery.group(signatures)()
|
||||
|
||||
return t
|
||||
s_send = celery.signature('cic_notify.tasks.sms.africastalking.send', [message, recipient], queue=self.queue)
|
||||
s_log = celery.signature('cic_notify.tasks.sms.log.log', [message, recipient], queue=self.queue)
|
||||
s_persist_notification = celery.signature(
|
||||
'cic_notify.tasks.sms.db.persist_notification', [message, recipient], queue=self.queue)
|
||||
signatures = [s_send, s_log, s_persist_notification]
|
||||
return celery.group(signatures)()
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
script_location = .
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
@ -27,28 +27,17 @@ script_location = migrations
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to migrations/versions. When using multiple version
|
||||
# to ./versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat migrations/versions
|
||||
# version_locations = %(here)s/bar %(here)s/bat ./versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = postgres+psycopg2://postgres@localhost/cic-notify
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks=black
|
||||
# black.type=console_scripts
|
||||
# black.entrypoint=black
|
||||
# black.options=-l 79
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
@ -11,7 +11,7 @@ config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
fileConfig(config.config_file_name, disable_existing_loggers=True)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
@ -56,11 +56,14 @@ def run_migrations_online():
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
connectable = context.config.attributes.get("connection", None)
|
||||
|
||||
if connectable is None:
|
||||
connectable = engine_from_config(
|
||||
context.config.get_section(context.config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
|
@ -7,7 +7,7 @@ import celery
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = celery_app.log.get_default_logger()
|
||||
local_logg = logging.getLogger(__name__)
|
||||
local_logg = logging.getLogger()
|
||||
|
||||
|
||||
@celery_app.task
|
||||
|
@ -1,5 +1,9 @@
|
||||
pytest~=6.0.1
|
||||
pytest-celery~=0.0.0a1
|
||||
pytest-mock~=3.3.1
|
||||
pysqlite3~=0.4.3
|
||||
pytest-cov==2.10.1
|
||||
Faker==11.1.0
|
||||
faker-e164==0.1.0
|
||||
pytest==6.2.5
|
||||
pytest-celery~=0.0.0
|
||||
pytest-mock==3.6.1
|
||||
pysqlite3~=0.4.6
|
||||
pytest-cov==3.0.0
|
||||
pytest-alembic==0.7.0
|
||||
requests-mock==1.9.3
|
||||
|
0
apps/cic-notify/tests/__init__.py
Normal file
0
apps/cic-notify/tests/__init__.py
Normal file
@ -0,0 +1,28 @@
|
||||
import pytest
|
||||
|
||||
|
||||
def test_single_head_revision(alembic_runner):
|
||||
heads = alembic_runner.heads
|
||||
head_count = len(heads)
|
||||
assert head_count == 1
|
||||
|
||||
|
||||
def test_upgrade(alembic_runner):
|
||||
try:
|
||||
alembic_runner.migrate_up_to("head")
|
||||
except RuntimeError:
|
||||
pytest.fail('Failed to upgrade to the head revision.')
|
||||
|
||||
|
||||
def test_up_down_consistency(alembic_runner):
|
||||
try:
|
||||
for revision in alembic_runner.history.revisions:
|
||||
alembic_runner.migrate_up_to(revision)
|
||||
except RuntimeError:
|
||||
pytest.fail('Failed to upgrade through each revision individually.')
|
||||
|
||||
try:
|
||||
for revision in reversed(alembic_runner.history.revisions):
|
||||
alembic_runner.migrate_down_to(revision)
|
||||
except RuntimeError:
|
||||
pytest.fail('Failed to downgrade through each revision individually.')
|
@ -0,0 +1,27 @@
|
||||
# standard imports
|
||||
|
||||
# external imports
|
||||
from faker import Faker
|
||||
from faker_e164.providers import E164Provider
|
||||
|
||||
# local imports
|
||||
from cic_notify.db.enum import NotificationStatusEnum, NotificationTransportEnum
|
||||
from cic_notify.db.models.notification import Notification
|
||||
|
||||
|
||||
# test imports
|
||||
from tests.helpers.phone import phone_number
|
||||
|
||||
|
||||
def test_notification(init_database):
|
||||
message = 'Hello world'
|
||||
recipient = phone_number()
|
||||
notification = Notification(NotificationTransportEnum.SMS, recipient, message)
|
||||
init_database.add(notification)
|
||||
init_database.commit()
|
||||
|
||||
notification = init_database.query(Notification).get(1)
|
||||
assert notification.status == NotificationStatusEnum.UNKNOWN
|
||||
assert notification.recipient == recipient
|
||||
assert notification.message == message
|
||||
assert notification.transport == NotificationTransportEnum.SMS
|
38
apps/cic-notify/tests/cic_notify/db/test_db.py
Normal file
38
apps/cic-notify/tests/cic_notify/db/test_db.py
Normal file
@ -0,0 +1,38 @@
|
||||
# standard imports
|
||||
import os
|
||||
|
||||
# third-party imports
|
||||
|
||||
# local imports
|
||||
from cic_notify.db import dsn_from_config
|
||||
|
||||
|
||||
def test_dsn_from_config(load_config):
|
||||
"""
|
||||
"""
|
||||
# test dsn for other db formats
|
||||
overrides = {
|
||||
'DATABASE_PASSWORD': 'password',
|
||||
'DATABASE_DRIVER': 'psycopg2',
|
||||
'DATABASE_ENGINE': 'postgresql'
|
||||
}
|
||||
load_config.dict_override(dct=overrides, dct_description='Override values to test different db formats.')
|
||||
|
||||
scheme = f'{load_config.get("DATABASE_ENGINE")}+{load_config.get("DATABASE_DRIVER")}'
|
||||
|
||||
dsn = dsn_from_config(load_config)
|
||||
assert dsn == f"{scheme}://{load_config.get('DATABASE_USER')}:{load_config.get('DATABASE_PASSWORD')}@{load_config.get('DATABASE_HOST')}:{load_config.get('DATABASE_PORT')}/{load_config.get('DATABASE_NAME')}"
|
||||
|
||||
# undoes overrides to revert engine and drivers to sqlite
|
||||
overrides = {
|
||||
'DATABASE_PASSWORD': '',
|
||||
'DATABASE_DRIVER': 'pysqlite',
|
||||
'DATABASE_ENGINE': 'sqlite'
|
||||
}
|
||||
load_config.dict_override(dct=overrides, dct_description='Override values to test different db formats.')
|
||||
|
||||
# test dsn for sqlite engine
|
||||
dsn = dsn_from_config(load_config)
|
||||
scheme = f'{load_config.get("DATABASE_ENGINE")}+{load_config.get("DATABASE_DRIVER")}'
|
||||
assert dsn == f'{scheme}:///{load_config.get("DATABASE_NAME")}'
|
||||
|
@ -0,0 +1,75 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import os
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
import requests_mock
|
||||
|
||||
|
||||
# local imports
|
||||
from cic_notify.error import NotInitializedError, AlreadyInitializedError, NotificationSendError
|
||||
from cic_notify.tasks.sms.africastalking import AfricasTalkingNotifier
|
||||
|
||||
# test imports
|
||||
from tests.helpers.phone import phone_number
|
||||
|
||||
|
||||
def test_africas_talking_notifier(africastalking_response, caplog):
|
||||
caplog.set_level(logging.DEBUG)
|
||||
with pytest.raises(NotInitializedError) as error:
|
||||
AfricasTalkingNotifier()
|
||||
assert str(error.value) == ''
|
||||
|
||||
api_key = os.urandom(24).hex()
|
||||
sender_id = 'bar'
|
||||
username = 'sandbox'
|
||||
AfricasTalkingNotifier.initialize(username, api_key, sender_id)
|
||||
africastalking_notifier = AfricasTalkingNotifier()
|
||||
assert africastalking_notifier.sender_id == sender_id
|
||||
assert africastalking_notifier.initiated is True
|
||||
|
||||
with pytest.raises(AlreadyInitializedError) as error:
|
||||
AfricasTalkingNotifier.initialize(username, api_key, sender_id)
|
||||
assert str(error.value) == ''
|
||||
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
message = 'Hello world.'
|
||||
recipient = phone_number()
|
||||
africastalking_response.get('SMSMessageData').get('Recipients')[0]['number'] = recipient
|
||||
request_mocker.register_uri(method='POST',
|
||||
headers={'content-type': 'application/json'},
|
||||
json=africastalking_response,
|
||||
url='https://api.sandbox.africastalking.com/version1/messaging',
|
||||
status_code=200)
|
||||
africastalking_notifier.send(message, recipient)
|
||||
assert f'Africastalking response sender-id {africastalking_response}' in caplog.text
|
||||
africastalking_notifier.sender_id = None
|
||||
africastalking_notifier.send(message, recipient)
|
||||
assert f'africastalking response no-sender-id {africastalking_response}' in caplog.text
|
||||
with pytest.raises(NotificationSendError) as error:
|
||||
status = 'InvalidPhoneNumber'
|
||||
status_code = 403
|
||||
africastalking_response.get('SMSMessageData').get('Recipients')[0]['status'] = status
|
||||
africastalking_response.get('SMSMessageData').get('Recipients')[0]['statusCode'] = status_code
|
||||
|
||||
request_mocker.register_uri(method='POST',
|
||||
headers={'content-type': 'application/json'},
|
||||
json=africastalking_response,
|
||||
url='https://api.sandbox.africastalking.com/version1/messaging',
|
||||
status_code=200)
|
||||
africastalking_notifier.send(message, recipient)
|
||||
assert str(error.value) == f'Sending notification failed due to: {status}'
|
||||
with pytest.raises(NotificationSendError) as error:
|
||||
recipients = []
|
||||
status = 'InsufficientBalance'
|
||||
africastalking_response.get('SMSMessageData')['Recipients'] = recipients
|
||||
africastalking_response.get('SMSMessageData')['Message'] = status
|
||||
|
||||
request_mocker.register_uri(method='POST',
|
||||
headers={'content-type': 'application/json'},
|
||||
json=africastalking_response,
|
||||
url='https://api.sandbox.africastalking.com/version1/messaging',
|
||||
status_code=200)
|
||||
africastalking_notifier.send(message, recipient)
|
||||
assert str(error.value) == f'Unexpected number of recipients: {len(recipients)}. Status: {status}'
|
26
apps/cic-notify/tests/cic_notify/tasks/sms/test_db_tasks.py
Normal file
26
apps/cic-notify/tests/cic_notify/tasks/sms/test_db_tasks.py
Normal file
@ -0,0 +1,26 @@
|
||||
# standard imports
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
|
||||
# local imports
|
||||
from cic_notify.db.enum import NotificationStatusEnum, NotificationTransportEnum
|
||||
from cic_notify.db.models.notification import Notification
|
||||
|
||||
# test imports
|
||||
from tests.helpers.phone import phone_number
|
||||
|
||||
|
||||
def test_persist_notification(celery_session_worker, init_database):
|
||||
message = 'Hello world.'
|
||||
recipient = phone_number()
|
||||
s_persist_notification = celery.signature(
|
||||
'cic_notify.tasks.sms.db.persist_notification', (message, recipient)
|
||||
)
|
||||
s_persist_notification.apply_async().get()
|
||||
|
||||
notification = Notification.session.query(Notification).filter_by(recipient=recipient).first()
|
||||
assert notification.status == NotificationStatusEnum.UNKNOWN
|
||||
assert notification.recipient == recipient
|
||||
assert notification.message == message
|
||||
assert notification.transport == NotificationTransportEnum.SMS
|
21
apps/cic-notify/tests/cic_notify/tasks/sms/test_log_tasks.py
Normal file
21
apps/cic-notify/tests/cic_notify/tasks/sms/test_log_tasks.py
Normal file
@ -0,0 +1,21 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
|
||||
# local imports
|
||||
|
||||
# test imports
|
||||
from tests.helpers.phone import phone_number
|
||||
|
||||
|
||||
def test_log(caplog, celery_session_worker):
|
||||
message = 'Hello world.'
|
||||
recipient = phone_number()
|
||||
caplog.set_level(logging.INFO)
|
||||
s_log = celery.signature(
|
||||
'cic_notify.tasks.sms.log.log', [message, recipient]
|
||||
)
|
||||
s_log.apply_async().get()
|
||||
assert f'message to {recipient}: {message}' in caplog.text
|
24
apps/cic-notify/tests/cic_notify/test_api.py
Normal file
24
apps/cic-notify/tests/cic_notify/test_api.py
Normal file
@ -0,0 +1,24 @@
|
||||
# standard imports
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
|
||||
# local imports
|
||||
from cic_notify.api import Api
|
||||
|
||||
# test imports
|
||||
from tests.helpers.phone import phone_number
|
||||
|
||||
|
||||
def test_api(celery_session_worker, mocker):
|
||||
mocked_group = mocker.patch('celery.group')
|
||||
message = 'Hello world.'
|
||||
recipient = phone_number()
|
||||
s_send = celery.signature('cic_notify.tasks.sms.africastalking.send', [message, recipient], queue=None)
|
||||
s_log = celery.signature('cic_notify.tasks.sms.log.log', [message, recipient], queue=None)
|
||||
s_persist_notification = celery.signature(
|
||||
'cic_notify.tasks.sms.db.persist_notification', [message, recipient], queue=None)
|
||||
signatures = [s_send, s_log, s_persist_notification]
|
||||
api = Api(queue=None)
|
||||
api.sms(message, recipient)
|
||||
mocked_group.assert_called_with(signatures)
|
@ -1,31 +1,13 @@
|
||||
# standard imports
|
||||
import sys
|
||||
import os
|
||||
import pytest
|
||||
import logging
|
||||
|
||||
# third party imports
|
||||
import confini
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
sys.path.insert(0, root_dir)
|
||||
|
||||
# local imports
|
||||
from cic_notify.db.models.base import SessionBase
|
||||
#from transport.notification import AfricastalkingNotification
|
||||
|
||||
# fixtures
|
||||
from tests.fixtures_config import *
|
||||
from tests.fixtures_celery import *
|
||||
from tests.fixtures_database import *
|
||||
# test imports
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
#@pytest.fixture(scope='session')
|
||||
#def africastalking_notification(
|
||||
# load_config,
|
||||
# ):
|
||||
# return AfricastalkingNotificationTransport(load_config)
|
||||
#
|
||||
from .fixtures.celery import *
|
||||
from .fixtures.config import *
|
||||
from .fixtures.database import *
|
||||
from .fixtures.result import *
|
||||
|
@ -37,12 +37,6 @@ def celery_config():
|
||||
shutil.rmtree(rq)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def celery_worker_parameters():
|
||||
return {
|
||||
# 'queues': ('cic-notify'),
|
||||
}
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def celery_enable_logging():
|
||||
return True
|
32
apps/cic-notify/tests/fixtures/config.py
vendored
Normal file
32
apps/cic-notify/tests/fixtures/config.py
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
from confini import Config
|
||||
|
||||
logg = logging.getLogger(__file__)
|
||||
|
||||
|
||||
fixtures_dir = os.path.dirname(__file__)
|
||||
root_directory = os.path.dirname(os.path.dirname(fixtures_dir))
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def alembic_config():
|
||||
migrations_directory = os.path.join(root_directory, 'cic_notify', 'db', 'migrations', 'default')
|
||||
file = os.path.join(migrations_directory, 'alembic.ini')
|
||||
return {
|
||||
'file': file,
|
||||
'script_location': migrations_directory
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def load_config():
|
||||
config_directory = os.path.join(root_directory, '.config/test')
|
||||
config = Config(default_dir=config_directory)
|
||||
config.process()
|
||||
logg.debug('config loaded\n{}'.format(config))
|
||||
return config
|
54
apps/cic-notify/tests/fixtures/database.py
vendored
Normal file
54
apps/cic-notify/tests/fixtures/database.py
vendored
Normal file
@ -0,0 +1,54 @@
|
||||
# standard imports
|
||||
import os
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
|
||||
# local imports
|
||||
from cic_notify.db import dsn_from_config
|
||||
from cic_notify.db.models.base import SessionBase, create_engine
|
||||
|
||||
from .config import root_directory
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def alembic_engine(load_config):
|
||||
data_source_name = dsn_from_config(load_config)
|
||||
return create_engine(data_source_name)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def database_engine(load_config):
|
||||
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
||||
try:
|
||||
os.unlink(load_config.get('DATABASE_NAME'))
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
dsn = dsn_from_config(load_config)
|
||||
SessionBase.connect(dsn)
|
||||
return dsn
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def init_database(load_config, database_engine):
|
||||
db_directory = os.path.join(root_directory, 'cic_notify', 'db')
|
||||
migrations_directory = os.path.join(db_directory, 'migrations', load_config.get('DATABASE_ENGINE'))
|
||||
if not os.path.isdir(migrations_directory):
|
||||
migrations_directory = os.path.join(db_directory, 'migrations', 'default')
|
||||
|
||||
session = SessionBase.create_session()
|
||||
|
||||
alembic_config = AlembicConfig(os.path.join(migrations_directory, 'alembic.ini'))
|
||||
alembic_config.set_main_option('sqlalchemy.url', database_engine)
|
||||
alembic_config.set_main_option('script_location', migrations_directory)
|
||||
|
||||
alembic.command.downgrade(alembic_config, 'base')
|
||||
alembic.command.upgrade(alembic_config, 'head')
|
||||
|
||||
yield session
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
|
24
apps/cic-notify/tests/fixtures/result.py
vendored
Normal file
24
apps/cic-notify/tests/fixtures/result.py
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
# standard imports
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
|
||||
|
||||
# local imports
|
||||
|
||||
# test imports
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def africastalking_response():
|
||||
return {
|
||||
"SMSMessageData": {
|
||||
"Message": "Sent to 1/1 Total Cost: KES 0.8000",
|
||||
"Recipients": [{
|
||||
"statusCode": 101,
|
||||
"number": "+254711XXXYYY",
|
||||
"status": "Success",
|
||||
"cost": "KES 0.8000",
|
||||
"messageId": "ATPid_SampleTxnId123"
|
||||
}]
|
||||
}
|
||||
}
|
@ -1,20 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
import confini
|
||||
|
||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
logg = logging.getLogger(__file__)
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def load_config():
|
||||
config_dir = os.path.join(root_dir, '.config/test')
|
||||
conf = confini.Config(config_dir, 'CICTEST')
|
||||
conf.process()
|
||||
logg.debug('config {}'.format(conf))
|
||||
return conf
|
@ -1,48 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
|
||||
# local imports
|
||||
from cic_notify.db import SessionBase
|
||||
from cic_notify.db import dsn_from_config
|
||||
|
||||
|
||||
@pytest.fixture(scope='session')
|
||||
def database_engine(
|
||||
load_config,
|
||||
):
|
||||
dsn = dsn_from_config(load_config)
|
||||
SessionBase.connect(dsn)
|
||||
return dsn
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def init_database(
|
||||
load_config,
|
||||
database_engine,
|
||||
):
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(__file__))
|
||||
dbdir = os.path.join(rootdir, 'cic_notify', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
|
||||
if not os.path.isdir(migrationsdir):
|
||||
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
|
||||
|
||||
session = SessionBase.create_session()
|
||||
|
||||
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
|
||||
ac.set_main_option('sqlalchemy.url', database_engine)
|
||||
ac.set_main_option('script_location', migrationsdir)
|
||||
|
||||
alembic.command.downgrade(ac, 'base')
|
||||
alembic.command.upgrade(ac, 'head')
|
||||
|
||||
yield session
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
|
16
apps/cic-notify/tests/helpers/phone.py
Normal file
16
apps/cic-notify/tests/helpers/phone.py
Normal file
@ -0,0 +1,16 @@
|
||||
# standard imports
|
||||
|
||||
# external imports
|
||||
from faker import Faker
|
||||
from faker_e164.providers import E164Provider
|
||||
|
||||
# local imports
|
||||
|
||||
# test imports
|
||||
|
||||
fake = Faker()
|
||||
fake.add_provider(E164Provider)
|
||||
|
||||
|
||||
def phone_number() -> str:
|
||||
return fake.e164('KE')
|
@ -1,34 +0,0 @@
|
||||
# standard imports
|
||||
import json
|
||||
|
||||
# third party imports
|
||||
import pytest
|
||||
import celery
|
||||
|
||||
# local imports
|
||||
from cic_notify.tasks.sms import db
|
||||
from cic_notify.tasks.sms import log
|
||||
|
||||
def test_log_notification(
|
||||
celery_session_worker,
|
||||
):
|
||||
|
||||
recipient = '+25412121212'
|
||||
content = 'bar'
|
||||
s_log = celery.signature('cic_notify.tasks.sms.log.log')
|
||||
t = s_log.apply_async(args=[recipient, content])
|
||||
|
||||
r = t.get()
|
||||
|
||||
|
||||
def test_db_notification(
|
||||
init_database,
|
||||
celery_session_worker,
|
||||
):
|
||||
|
||||
recipient = '+25412121213'
|
||||
content = 'foo'
|
||||
s_db = celery.signature('cic_notify.tasks.sms.db.persist_notification')
|
||||
t = s_db.apply_async(args=[recipient, content])
|
||||
|
||||
r = t.get()
|
@ -3,4 +3,5 @@ omit =
|
||||
venv/*
|
||||
scripts/*
|
||||
cic_ussd/db/migrations/*
|
||||
cic_ussd/runnable/*
|
||||
cic_ussd/runnable/*
|
||||
cic_ussd/version.py
|
@ -14,7 +14,7 @@ class Cache:
|
||||
store: Redis = None
|
||||
|
||||
|
||||
def cache_data(key: str, data: str):
|
||||
def cache_data(key: str, data: [bytes, float, int, str]):
|
||||
"""
|
||||
:param key:
|
||||
:type key:
|
||||
|
@ -63,10 +63,7 @@ class Account(SessionBase):
|
||||
def remove_guardian(self, phone_number: str):
|
||||
set_guardians = self.guardians.split(',')
|
||||
set_guardians.remove(phone_number)
|
||||
if len(set_guardians) > 1:
|
||||
self.guardians = ','.join(set_guardians)
|
||||
else:
|
||||
self.guardians = set_guardians[0]
|
||||
self.guardians = ','.join(set_guardians)
|
||||
|
||||
def get_guardians(self) -> list:
|
||||
return self.guardians.split(',') if self.guardians else []
|
||||
|
@ -7,3 +7,4 @@ from .custom import CustomMetadata
|
||||
from .person import PersonMetadata
|
||||
from .phone import PhonePointerMetadata
|
||||
from .preferences import PreferencesMetadata
|
||||
from .tokens import TokenMetadata
|
||||
|
@ -417,7 +417,7 @@ class MenuProcessor:
|
||||
preferred_language = get_cached_preferred_language(self.account.blockchain_address)
|
||||
if not preferred_language:
|
||||
preferred_language = i18n.config.get('fallback')
|
||||
return translation_for(self.display_key,preferred_language,token_symbol=token_symbol)
|
||||
return translation_for(self.display_key, preferred_language, token_symbol=token_symbol)
|
||||
|
||||
def exit_successful_transaction(self):
|
||||
"""
|
||||
|
@ -87,7 +87,7 @@ def is_valid_guardian_addition(state_machine_data: Tuple[str, dict, Account, Ses
|
||||
guardianship = Guardianship()
|
||||
is_system_guardian = guardianship.is_system_guardian(phone_number)
|
||||
is_initiator = phone_number == account.phone_number
|
||||
is_existent_guardian = phone_number in account.get_guardians()
|
||||
is_existent_guardian = phone_number in account.get_guardians() or is_system_guardian
|
||||
|
||||
failure_reason = ''
|
||||
if not is_valid_account:
|
||||
|
@ -6,3 +6,6 @@ password_pepper=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
|
||||
[machine]
|
||||
states=states/
|
||||
transitions=transitions/
|
||||
|
||||
[system]
|
||||
guardians_file = var/lib/sys/guardians.txt
|
||||
|
@ -1,2 +1,2 @@
|
||||
[chain]
|
||||
spec = 'evm:foo:1:bar'
|
||||
spec = evm:foo:1:bar
|
||||
|
@ -1,3 +1,10 @@
|
||||
[locale]
|
||||
fallback=sw
|
||||
path=var/lib/locale/
|
||||
path=
|
||||
file_builders=var/lib/sys/
|
||||
|
||||
[schema]
|
||||
file_path = data/schema
|
||||
|
||||
[languages]
|
||||
file = var/lib/sys/languages.json
|
||||
|
@ -1,12 +1,12 @@
|
||||
cic-eth[services]~=0.12.4a13
|
||||
Faker==8.1.2
|
||||
cic-eth[services]~=0.12.7
|
||||
Faker==11.1.0
|
||||
faker-e164==0.1.0
|
||||
pytest==6.2.4
|
||||
pytest-alembic==0.2.5
|
||||
pytest==6.2.5
|
||||
pytest-alembic==0.7.0
|
||||
pytest-celery==0.0.0a1
|
||||
pytest-cov==2.10.1
|
||||
pytest-mock==3.3.1
|
||||
pytest-cov==3.0.0
|
||||
pytest-mock==3.6.1
|
||||
pytest-ordering==0.6
|
||||
pytest-redis==2.0.0
|
||||
requests-mock==1.8.0
|
||||
tavern==1.14.2
|
||||
pytest-redis==2.3.0
|
||||
requests-mock==1.9.3
|
||||
tavern==1.18.0
|
||||
|
@ -2,10 +2,16 @@
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import calculate_available_balance, get_balances, get_cached_available_balance
|
||||
from cic_ussd.account.balance import (calculate_available_balance,
|
||||
get_balances,
|
||||
get_cached_adjusted_balance,
|
||||
get_cached_available_balance)
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.account.tokens import get_cached_token_data_list
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.error import CachedDataNotFoundError
|
||||
|
||||
# test imports
|
||||
@ -57,19 +63,45 @@ def test_calculate_available_balance(activated_account,
|
||||
'balance_outgoing': balance_outgoing,
|
||||
'balance_incoming': balance_incoming
|
||||
}
|
||||
assert calculate_available_balance(balances) == available_balance
|
||||
assert calculate_available_balance(balances, 6) == available_balance
|
||||
|
||||
|
||||
def test_get_cached_available_balance(activated_account,
|
||||
balances,
|
||||
cache_balances,
|
||||
cache_default_token_data,
|
||||
load_chain_spec):
|
||||
cached_available_balance = get_cached_available_balance(activated_account.blockchain_address)
|
||||
available_balance = calculate_available_balance(balances[0])
|
||||
load_chain_spec,
|
||||
token_symbol):
|
||||
identifier = [bytes.fromhex(activated_account.blockchain_address), token_symbol.encode('utf-8')]
|
||||
cached_available_balance = get_cached_available_balance(6, identifier)
|
||||
available_balance = calculate_available_balance(balances[0], 6)
|
||||
assert cached_available_balance == available_balance
|
||||
address = blockchain_address()
|
||||
with pytest.raises(CachedDataNotFoundError) as error:
|
||||
cached_available_balance = get_cached_available_balance(address)
|
||||
identifier = [bytes.fromhex(address), token_symbol.encode('utf-8')]
|
||||
key = cache_data_key(identifier=identifier, salt=MetadataPointer.BALANCES)
|
||||
cached_available_balance = get_cached_available_balance(6, identifier)
|
||||
assert cached_available_balance is None
|
||||
assert str(error.value) == f'No cached available balance for address: {address}'
|
||||
assert str(error.value) == f'No cached available balance at {key}'
|
||||
|
||||
|
||||
def test_get_cached_adjusted_balance(activated_account, cache_adjusted_balances, token_symbol):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
balances_identifier = [identifier, token_symbol.encode('utf-8')]
|
||||
key = cache_data_key(balances_identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
adjusted_balances = get_cached_data(key)
|
||||
assert get_cached_adjusted_balance(balances_identifier) == adjusted_balances
|
||||
|
||||
|
||||
def test_get_account_tokens_balance(activated_account,
|
||||
cache_token_data_list,
|
||||
celery_session_worker,
|
||||
load_chain_spec,
|
||||
load_config,
|
||||
mock_async_balance_api_query,
|
||||
token_symbol):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
chain_str = Chain.spec.__str__()
|
||||
get_balances(blockchain_address, chain_str, token_symbol, asynchronous=True)
|
||||
assert mock_async_balance_api_query.get('address') == blockchain_address
|
||||
assert mock_async_balance_api_query.get('token_symbol') == token_symbol
|
||||
|
21
apps/cic-ussd/tests/cic_ussd/account/test_guardianship.py
Normal file
21
apps/cic-ussd/tests/cic_ussd/account/test_guardianship.py
Normal file
@ -0,0 +1,21 @@
|
||||
# standard imports
|
||||
import os
|
||||
|
||||
# external imports
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.guardianship import Guardianship
|
||||
|
||||
# test imports
|
||||
from tests.fixtures.config import root_directory
|
||||
|
||||
|
||||
def test_guardianship(load_config, setup_guardianship):
|
||||
guardians_file = os.path.join(root_directory, load_config.get('SYSTEM_GUARDIANS_FILE'))
|
||||
with open(guardians_file, 'r') as system_guardians:
|
||||
guardians = [line.strip() for line in system_guardians]
|
||||
assert Guardianship.guardians == guardians
|
||||
|
||||
guardianship = Guardianship()
|
||||
assert guardianship.is_system_guardian(Guardianship.guardians[0]) is True
|
||||
assert guardianship.is_system_guardian('+254712345678') is False
|
@ -11,8 +11,7 @@ from cic_ussd.account.statement import (filter_statement_transactions,
|
||||
generate,
|
||||
get_cached_statement,
|
||||
parse_statement_transactions,
|
||||
query_statement,
|
||||
statement_transaction_set)
|
||||
query_statement)
|
||||
from cic_ussd.account.transaction import transaction_actors
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
|
||||
@ -74,12 +73,3 @@ def test_query_statement(blockchain_address, limit, load_chain_spec, activated_a
|
||||
query_statement(blockchain_address, limit)
|
||||
assert mock_transaction_list_query.get('address') == blockchain_address
|
||||
assert mock_transaction_list_query.get('limit') == limit
|
||||
|
||||
|
||||
def test_statement_transaction_set(cache_default_token_data, load_chain_spec, preferences, set_locale_files, statement):
|
||||
parsed_transactions = parse_statement_transactions(statement)
|
||||
preferred_language = preferences.get('preferred_language')
|
||||
transaction_set = statement_transaction_set(preferred_language, parsed_transactions)
|
||||
transaction_set.startswith('Sent')
|
||||
transaction_set = statement_transaction_set(preferred_language, [])
|
||||
transaction_set.startswith('No')
|
||||
|
@ -1,17 +1,80 @@
|
||||
# standard imports
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.account.tokens import get_cached_default_token, get_default_token_symbol, query_default_token
|
||||
from cic_ussd.account.tokens import (collate_token_metadata,
|
||||
create_account_tokens_list,
|
||||
get_active_token_symbol,
|
||||
get_default_token_symbol,
|
||||
get_cached_default_token,
|
||||
get_cached_token_data,
|
||||
get_cached_token_data_list,
|
||||
get_cached_token_symbol_list,
|
||||
hashed_token_proof,
|
||||
handle_token_symbol_list,
|
||||
order_account_tokens_list,
|
||||
parse_token_list,
|
||||
process_token_data,
|
||||
query_default_token,
|
||||
query_token_data,
|
||||
remove_from_account_tokens_list,
|
||||
set_active_token)
|
||||
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||
from cic_ussd.error import CachedDataNotFoundError
|
||||
|
||||
|
||||
# test imports
|
||||
|
||||
|
||||
def test_collate_token_metadata(token_meta_symbol, token_proof_symbol):
|
||||
description = token_proof_symbol.get('description')
|
||||
issuer = token_proof_symbol.get('issuer')
|
||||
location = token_meta_symbol.get('location')
|
||||
contact = token_meta_symbol.get('contact')
|
||||
data = {
|
||||
'description': description,
|
||||
'issuer': issuer,
|
||||
'location': location,
|
||||
'contact': contact
|
||||
}
|
||||
assert collate_token_metadata(token_proof_symbol, token_meta_symbol) == data
|
||||
|
||||
|
||||
def test_create_account_tokens_list(activated_account,
|
||||
cache_balances,
|
||||
cache_token_data,
|
||||
cache_token_symbol_list,
|
||||
init_cache):
|
||||
create_account_tokens_list(activated_account.blockchain_address)
|
||||
key = cache_data_key(bytes.fromhex(activated_account.blockchain_address), MetadataPointer.TOKEN_DATA_LIST)
|
||||
cached_data_list = json.loads(get_cached_data(key))
|
||||
data = get_cached_token_data_list(activated_account.blockchain_address)
|
||||
assert cached_data_list == data
|
||||
|
||||
|
||||
def test_get_active_token_symbol(activated_account, set_active_token, valid_recipient):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_ACTIVE)
|
||||
active_token_symbol = get_cached_data(key)
|
||||
assert active_token_symbol == get_active_token_symbol(activated_account.blockchain_address)
|
||||
with pytest.raises(CachedDataNotFoundError) as error:
|
||||
get_active_token_symbol(valid_recipient.blockchain_address)
|
||||
assert str(error.value) == 'No active token set.'
|
||||
|
||||
|
||||
def test_get_cached_token_data(activated_account, cache_token_data, token_symbol):
|
||||
identifier = [bytes.fromhex(activated_account.blockchain_address), token_symbol.encode('utf-8')]
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_DATA)
|
||||
token_data = json.loads(get_cached_data(key))
|
||||
assert token_data == get_cached_token_data(activated_account.blockchain_address, token_symbol)
|
||||
|
||||
|
||||
def test_get_cached_default_token(cache_default_token_data, default_token_data, load_chain_spec):
|
||||
chain_str = Chain.spec.__str__()
|
||||
cached_default_token = get_cached_default_token(chain_str)
|
||||
@ -27,6 +90,84 @@ def test_get_default_token_symbol_from_api(default_token_data, load_chain_spec,
|
||||
assert default_token_symbol == default_token_data['symbol']
|
||||
|
||||
|
||||
def test_get_cached_token_data_list(activated_account, cache_token_data_list):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
key = cache_data_key(identifier=bytes.fromhex(blockchain_address), salt=MetadataPointer.TOKEN_DATA_LIST)
|
||||
token_symbols_list = json.loads(get_cached_data(key))
|
||||
assert token_symbols_list == get_cached_token_data_list(blockchain_address)
|
||||
|
||||
|
||||
def test_get_cached_token_symbol_list(activated_account, cache_token_symbol_list):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
key = cache_data_key(identifier=bytes.fromhex(blockchain_address), salt=MetadataPointer.TOKEN_SYMBOLS_LIST)
|
||||
token_symbols_list = json.loads(get_cached_data(key))
|
||||
assert token_symbols_list == get_cached_token_symbol_list(blockchain_address)
|
||||
|
||||
|
||||
def test_hashed_token_proof(token_proof_symbol):
|
||||
hash_object = hashlib.new("sha256")
|
||||
token_proof = json.dumps(token_proof_symbol)
|
||||
hash_object.update(token_proof.encode('utf-8'))
|
||||
assert hash_object.digest().hex() == hashed_token_proof(token_proof_symbol)
|
||||
|
||||
|
||||
def test_handle_token_symbol_list(activated_account, init_cache):
|
||||
handle_token_symbol_list(activated_account.blockchain_address, 'GFT')
|
||||
cached_token_symbol_list = get_cached_token_symbol_list(activated_account.blockchain_address)
|
||||
assert len(cached_token_symbol_list) == 1
|
||||
handle_token_symbol_list(activated_account.blockchain_address, 'DET')
|
||||
cached_token_symbol_list = get_cached_token_symbol_list(activated_account.blockchain_address)
|
||||
assert len(cached_token_symbol_list) == 2
|
||||
|
||||
|
||||
def test_order_account_tokens_list(activated_account, token_list_entries):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
last_sent_token_key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_LAST_SENT)
|
||||
cache_data(last_sent_token_key, 'FII')
|
||||
|
||||
last_received_token_key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_LAST_RECEIVED)
|
||||
cache_data(last_received_token_key, 'DET')
|
||||
|
||||
ordered_list = order_account_tokens_list(token_list_entries, identifier)
|
||||
assert ordered_list == [
|
||||
{
|
||||
'name': 'Fee',
|
||||
'symbol': 'FII',
|
||||
'issuer': 'Foo',
|
||||
'contact': {
|
||||
'phone': '+254712345678'
|
||||
},
|
||||
'location': 'Fum',
|
||||
'balance': 50.0
|
||||
},
|
||||
{
|
||||
'name': 'Demurrage Token',
|
||||
'symbol': 'DET',
|
||||
'issuer': 'Grassroots Economics',
|
||||
'contact': {
|
||||
'phone': '+254700000000',
|
||||
'email': 'info@grassrootseconomics.org'},
|
||||
'location': 'Fum',
|
||||
'balance': 49.99
|
||||
},
|
||||
{
|
||||
'name': 'Giftable Token',
|
||||
'symbol': 'GFT',
|
||||
'issuer': 'Grassroots Economics',
|
||||
'contact': {
|
||||
'phone': '+254700000000',
|
||||
'email': 'info@grassrootseconomics.org'},
|
||||
'location': 'Fum',
|
||||
'balance': 60.0
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def test_parse_token_list(token_list_entries):
|
||||
parsed_token_list = ['1. FII 50.0', '2. GFT 60.0', '3. DET 49.99']
|
||||
assert parsed_token_list == parse_token_list(token_list_entries)
|
||||
|
||||
|
||||
def test_query_default_token(default_token_data, load_chain_spec, mock_sync_default_token_api_query):
|
||||
chain_str = Chain.spec.__str__()
|
||||
queried_default_token_data = query_default_token(chain_str)
|
||||
@ -40,3 +181,38 @@ def test_get_default_token_symbol_from_cache(cache_default_token_data, default_t
|
||||
default_token_symbol = get_default_token_symbol()
|
||||
assert default_token_symbol is not None
|
||||
assert default_token_symbol == default_token_data.get('symbol')
|
||||
|
||||
|
||||
def test_remove_from_account_tokens_list(token_list_entries):
|
||||
assert remove_from_account_tokens_list(token_list_entries, 'GFT') == ([{
|
||||
'name': 'Giftable Token',
|
||||
'symbol': 'GFT',
|
||||
'issuer': 'Grassroots Economics',
|
||||
'contact': {
|
||||
'phone': '+254700000000',
|
||||
'email': 'info@grassrootseconomics.org'
|
||||
},
|
||||
'location': 'Fum',
|
||||
'balance': 60.0
|
||||
}],
|
||||
[
|
||||
{
|
||||
'name': 'Fee',
|
||||
'symbol': 'FII',
|
||||
'issuer': 'Foo',
|
||||
'contact': {'phone': '+254712345678'},
|
||||
'location': 'Fum',
|
||||
'balance': 50.0
|
||||
},
|
||||
{
|
||||
'name': 'Demurrage Token',
|
||||
'symbol': 'DET',
|
||||
'issuer': 'Grassroots Economics',
|
||||
'contact': {
|
||||
'phone': '+254700000000',
|
||||
'email': 'info@grassrootseconomics.org'
|
||||
},
|
||||
'location': 'Fum',
|
||||
'balance': 49.99
|
||||
}
|
||||
])
|
||||
|
@ -1,5 +1,4 @@
|
||||
# standard imports
|
||||
from decimal import Decimal
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
@ -37,11 +36,11 @@ def test_aux_transaction_data(preferences, set_locale_files, transactions_list):
|
||||
|
||||
|
||||
@pytest.mark.parametrize("value, expected_result", [
|
||||
(50000000, Decimal('50.00')),
|
||||
(100000, Decimal('0.10'))
|
||||
(50000000, 50.0),
|
||||
(100000, 0.1)
|
||||
])
|
||||
def test_from_wei(cache_default_token_data, expected_result, value):
|
||||
assert from_wei(value) == expected_result
|
||||
assert from_wei(6, value) == expected_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize("value, expected_result", [
|
||||
@ -49,7 +48,7 @@ def test_from_wei(cache_default_token_data, expected_result, value):
|
||||
(0.10, 100000)
|
||||
])
|
||||
def test_to_wei(cache_default_token_data, expected_result, value):
|
||||
assert to_wei(value) == expected_result
|
||||
assert to_wei(6, value) == expected_result
|
||||
|
||||
|
||||
@pytest.mark.parametrize("decimals, value, expected_result", [
|
||||
@ -108,8 +107,8 @@ def test_outgoing_transaction_processor(activated_account,
|
||||
activated_account.blockchain_address,
|
||||
valid_recipient.blockchain_address)
|
||||
|
||||
outgoing_tx_processor.transfer(amount, token_symbol)
|
||||
outgoing_tx_processor.transfer(amount, 6, token_symbol)
|
||||
assert mock_transfer_api.get('from_address') == activated_account.blockchain_address
|
||||
assert mock_transfer_api.get('to_address') == valid_recipient.blockchain_address
|
||||
assert mock_transfer_api.get('value') == to_wei(amount)
|
||||
assert mock_transfer_api.get('value') == to_wei(6, amount)
|
||||
assert mock_transfer_api.get('token_symbol') == token_symbol
|
||||
|
@ -90,7 +90,7 @@ def test_standard_metadata_id(activated_account, cache_person_metadata, pending_
|
||||
|
||||
def test_account_create(init_cache, init_database, load_chain_spec, mock_account_creation_task_result, task_uuid):
|
||||
chain_str = Chain.spec.__str__()
|
||||
create(chain_str, phone_number(), init_database)
|
||||
create(chain_str, phone_number(), init_database, 'en')
|
||||
assert len(init_database.query(TaskTracker).all()) == 1
|
||||
account_creation_data = get_cached_data(task_uuid)
|
||||
assert json.loads(account_creation_data).get('status') == AccountStatus.PENDING.name
|
||||
|
@ -23,7 +23,7 @@ def test_ussd_metadata_handler(activated_account,
|
||||
setup_metadata_signer):
|
||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||
cic_type = MetadataPointer.PERSON
|
||||
metadata_client = UssdMetadataHandler(cic_type, identifier)
|
||||
metadata_client = UssdMetadataHandler(cic_type=cic_type, identifier=identifier)
|
||||
assert metadata_client.cic_type == cic_type
|
||||
assert metadata_client.engine == 'pgp'
|
||||
assert metadata_client.identifier == identifier
|
||||
|
72
apps/cic-ussd/tests/cic_ussd/metadata/test_tokens_meta.py
Normal file
72
apps/cic-ussd/tests/cic_ussd/metadata/test_tokens_meta.py
Normal file
@ -0,0 +1,72 @@
|
||||
# standard imports
|
||||
import json
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
import requests_mock
|
||||
from cic_types.condiments import MetadataPointer
|
||||
from requests.exceptions import HTTPError
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.metadata import TokenMetadata
|
||||
from cic_ussd.metadata.tokens import token_metadata_handler, query_token_metadata, query_token_info
|
||||
|
||||
|
||||
# test imports
|
||||
|
||||
|
||||
def test_token_metadata_handler(activated_account,
|
||||
init_cache,
|
||||
setup_metadata_request_handler,
|
||||
setup_metadata_signer,
|
||||
token_meta_symbol,
|
||||
token_symbol):
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
with pytest.raises(HTTPError) as error:
|
||||
metadata_client = TokenMetadata(identifier=b'foo', cic_type=MetadataPointer.TOKEN_META_SYMBOL)
|
||||
reason = 'Not Found'
|
||||
status_code = 401
|
||||
request_mocker.register_uri('GET', metadata_client.url, status_code=status_code, reason=reason)
|
||||
token_metadata_handler(metadata_client)
|
||||
assert str(error.value) == f'Client Error: {status_code}, reason: {reason}'
|
||||
|
||||
identifier = token_symbol.encode('utf-8')
|
||||
metadata_client = TokenMetadata(identifier, cic_type=MetadataPointer.TOKEN_META_SYMBOL)
|
||||
request_mocker.register_uri('GET', metadata_client.url, json=token_meta_symbol, status_code=200, reason='OK')
|
||||
token_metadata_handler(metadata_client)
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_META_SYMBOL)
|
||||
cached_token_meta_symbol = get_cached_data(key)
|
||||
assert json.loads(cached_token_meta_symbol) == token_meta_symbol
|
||||
|
||||
|
||||
def test_query_token_metadata(init_cache,
|
||||
setup_metadata_request_handler,
|
||||
setup_metadata_signer,
|
||||
token_meta_symbol,
|
||||
token_proof_symbol,
|
||||
token_symbol):
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
identifier = token_symbol.encode('utf-8')
|
||||
metadata_client = TokenMetadata(identifier, cic_type=MetadataPointer.TOKEN_META_SYMBOL)
|
||||
request_mocker.register_uri('GET', metadata_client.url, json=token_meta_symbol, status_code=200, reason='OK')
|
||||
query_token_metadata(identifier)
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_META_SYMBOL)
|
||||
cached_token_meta_symbol = get_cached_data(key)
|
||||
assert json.loads(cached_token_meta_symbol) == token_meta_symbol
|
||||
|
||||
|
||||
def test_query_token_info(init_cache,
|
||||
setup_metadata_request_handler,
|
||||
setup_metadata_signer,
|
||||
token_meta_symbol,
|
||||
token_proof_symbol,
|
||||
token_symbol):
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
identifier = token_symbol.encode('utf-8')
|
||||
metadata_client = TokenMetadata(identifier, cic_type=MetadataPointer.TOKEN_PROOF_SYMBOL)
|
||||
request_mocker.register_uri('GET', metadata_client.url, json=token_proof_symbol, status_code=200, reason='OK')
|
||||
query_token_info(identifier)
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_PROOF_SYMBOL)
|
||||
cached_token_proof_symbol = get_cached_data(key)
|
||||
assert json.loads(cached_token_proof_symbol) == token_proof_symbol
|
@ -1,6 +1,6 @@
|
||||
# standard imports
|
||||
import json
|
||||
import datetime
|
||||
import os
|
||||
|
||||
# external imports
|
||||
from cic_types.condiments import MetadataPointer
|
||||
@ -10,195 +10,464 @@ from cic_ussd.account.balance import get_cached_available_balance
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
from cic_ussd.account.statement import (
|
||||
get_cached_statement,
|
||||
parse_statement_transactions,
|
||||
statement_transaction_set
|
||||
parse_statement_transactions
|
||||
)
|
||||
from cic_ussd.account.tokens import get_default_token_symbol
|
||||
from cic_ussd.account.tokens import (get_active_token_symbol,
|
||||
get_cached_token_data)
|
||||
from cic_ussd.account.transaction import from_wei, to_wei
|
||||
from cic_ussd.cache import cache_data, cache_data_key
|
||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||
from cic_ussd.metadata import PersonMetadata
|
||||
from cic_ussd.phone_number import Support
|
||||
from cic_ussd.processor.menu import response
|
||||
from cic_ussd.processor.util import parse_person_metadata
|
||||
from cic_ussd.processor.menu import response, MenuProcessor
|
||||
from cic_ussd.processor.util import parse_person_metadata, ussd_menu_list
|
||||
from cic_ussd.translation import translation_for
|
||||
|
||||
|
||||
# test imports
|
||||
|
||||
|
||||
def test_menu_processor(activated_account,
|
||||
balances,
|
||||
cache_balances,
|
||||
cache_default_token_data,
|
||||
cache_preferences,
|
||||
cache_person_metadata,
|
||||
cache_statement,
|
||||
celery_session_worker,
|
||||
generic_ussd_session,
|
||||
init_database,
|
||||
load_chain_spec,
|
||||
load_support_phone,
|
||||
load_ussd_menu,
|
||||
mock_get_adjusted_balance,
|
||||
mock_sync_balance_api_query,
|
||||
mock_transaction_list_query,
|
||||
valid_recipient):
|
||||
preferred_language = get_cached_preferred_language(activated_account.blockchain_address)
|
||||
available_balance = get_cached_available_balance(activated_account.blockchain_address)
|
||||
token_symbol = get_default_token_symbol()
|
||||
def test_account_balance(activated_account, cache_balances, cache_preferences, cache_token_data,
|
||||
generic_ussd_session, init_database, set_active_token):
|
||||
"""blockchain_address = activated_account.blockchain_address
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
decimals = token_data.get("decimals")
|
||||
identifier = bytes.fromhex(blockchain_address)
|
||||
balances_identifier = [identifier, token_symbol.encode('utf-8')]
|
||||
available_balance = get_cached_available_balance(decimals, balances_identifier)
|
||||
with_available_balance = 'ussd.account_balances.available_balance'
|
||||
with_fees = 'ussd.account_balances.with_fees'
|
||||
ussd_menu = UssdMenu.find_by_name('account_balances')
|
||||
name = ussd_menu.get('name')
|
||||
resp = response(activated_account, 'ussd.account_balances', name, init_database, generic_ussd_session)
|
||||
resp = response(activated_account, with_available_balance, with_available_balance[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(with_available_balance,
|
||||
preferred_language,
|
||||
available_balance=available_balance,
|
||||
token_symbol=token_symbol)
|
||||
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
with_fees = 'ussd.account_balances.with_fees'
|
||||
key = cache_data_key(balances_identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
adjusted_balance = 45931650.64654012
|
||||
cache_data(key, json.dumps(adjusted_balance))
|
||||
resp = response(activated_account, 'ussd.account_balances', name, init_database, generic_ussd_session)
|
||||
tax_wei = to_wei(int(available_balance)) - int(adjusted_balance)
|
||||
tax = from_wei(int(tax_wei))
|
||||
resp = response(activated_account, with_fees, with_fees[5:], init_database, generic_ussd_session)
|
||||
tax_wei = to_wei(decimals, int(available_balance)) - int(adjusted_balance)
|
||||
tax = from_wei(decimals, int(tax_wei))
|
||||
assert resp == translation_for(key=with_fees,
|
||||
preferred_language=preferred_language,
|
||||
available_balance=available_balance,
|
||||
tax=tax,
|
||||
token_symbol=token_symbol)
|
||||
token_symbol=token_symbol)"""
|
||||
pass
|
||||
|
||||
cached_statement = get_cached_statement(activated_account.blockchain_address)
|
||||
statement = json.loads(cached_statement)
|
||||
statement_transactions = parse_statement_transactions(statement)
|
||||
transaction_sets = [statement_transactions[tx:tx + 3] for tx in range(0, len(statement_transactions), 3)]
|
||||
first_transaction_set = []
|
||||
middle_transaction_set = []
|
||||
last_transaction_set = []
|
||||
if transaction_sets:
|
||||
first_transaction_set = statement_transaction_set(preferred_language, transaction_sets[0])
|
||||
if len(transaction_sets) >= 2:
|
||||
middle_transaction_set = statement_transaction_set(preferred_language, transaction_sets[1])
|
||||
if len(transaction_sets) >= 3:
|
||||
last_transaction_set = statement_transaction_set(preferred_language, transaction_sets[2])
|
||||
|
||||
display_key = 'ussd.first_transaction_set'
|
||||
ussd_menu = UssdMenu.find_by_name('first_transaction_set')
|
||||
name = ussd_menu.get('name')
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
def test_account_statement(activated_account,
|
||||
cache_preferences,
|
||||
cache_statement,
|
||||
generic_ussd_session,
|
||||
init_database,
|
||||
set_active_token,
|
||||
set_locale_files):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
cached_statement = get_cached_statement(blockchain_address)
|
||||
statement_list = parse_statement_transactions(statement=json.loads(cached_statement))
|
||||
first_transaction_set = 'ussd.first_transaction_set'
|
||||
middle_transaction_set = 'ussd.middle_transaction_set'
|
||||
last_transaction_set = 'ussd.last_transaction_set'
|
||||
fallback = translation_for('helpers.no_transaction_history', preferred_language)
|
||||
transaction_sets = ussd_menu_list(fallback=fallback, menu_list=statement_list, split=3)
|
||||
resp = response(activated_account, first_transaction_set, first_transaction_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(first_transaction_set, preferred_language, first_transaction_set=transaction_sets[0])
|
||||
resp = response(activated_account, middle_transaction_set, middle_transaction_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(middle_transaction_set, preferred_language,
|
||||
middle_transaction_set=transaction_sets[1])
|
||||
resp = response(activated_account, last_transaction_set, last_transaction_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(last_transaction_set, preferred_language, last_transaction_set=transaction_sets[2])
|
||||
|
||||
assert resp == translation_for(display_key, preferred_language, first_transaction_set=first_transaction_set)
|
||||
|
||||
display_key = 'ussd.middle_transaction_set'
|
||||
ussd_menu = UssdMenu.find_by_name('middle_transaction_set')
|
||||
name = ussd_menu.get('name')
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
def test_add_guardian_pin_authorization(activated_account,
|
||||
cache_preferences,
|
||||
guardian_account,
|
||||
generic_ussd_session,
|
||||
init_database):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
add_guardian_pin_authorization = 'ussd.add_guardian_pin_authorization'
|
||||
activated_account.add_guardian(guardian_account.phone_number)
|
||||
init_database.flush()
|
||||
generic_ussd_session['external_session_id'] = os.urandom(20).hex()
|
||||
generic_ussd_session['msisdn'] = guardian_account.phone_number
|
||||
generic_ussd_session['data'] = {'guardian_phone_number': guardian_account.phone_number}
|
||||
generic_ussd_session['state'] = 'add_guardian_pin_authorization'
|
||||
resp = response(activated_account,
|
||||
add_guardian_pin_authorization,
|
||||
add_guardian_pin_authorization[5:],
|
||||
init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(f'{add_guardian_pin_authorization}.first', preferred_language,
|
||||
guardian_information=guardian_account.standard_metadata_id())
|
||||
|
||||
assert resp == translation_for(display_key, preferred_language, middle_transaction_set=middle_transaction_set)
|
||||
|
||||
display_key = 'ussd.last_transaction_set'
|
||||
ussd_menu = UssdMenu.find_by_name('last_transaction_set')
|
||||
name = ussd_menu.get('name')
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
def test_guardian_list(activated_account,
|
||||
cache_preferences,
|
||||
generic_ussd_session,
|
||||
guardian_account,
|
||||
init_database):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
guardians_list = 'ussd.guardian_list'
|
||||
guardians_list_header = translation_for('helpers.guardians_list_header', preferred_language)
|
||||
guardian_information = guardian_account.standard_metadata_id()
|
||||
guardians = guardians_list_header + '\n' + f'{guardian_information}\n'
|
||||
activated_account.add_guardian(guardian_account.phone_number)
|
||||
init_database.flush()
|
||||
resp = response(activated_account, guardians_list, guardians_list[5:], init_database, generic_ussd_session)
|
||||
assert resp == translation_for(guardians_list, preferred_language, guardians_list=guardians)
|
||||
guardians = translation_for('helpers.no_guardians_list', preferred_language)
|
||||
identifier = bytes.fromhex(guardian_account.blockchain_address)
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
cache_data(key, json.dumps({'preferred_language': preferred_language}))
|
||||
resp = response(guardian_account, guardians_list, guardians_list[5:], init_database, generic_ussd_session)
|
||||
assert resp == translation_for(guardians_list, preferred_language, guardians_list=guardians)
|
||||
|
||||
assert resp == translation_for(display_key, preferred_language, last_transaction_set=last_transaction_set)
|
||||
|
||||
display_key = 'ussd.display_user_metadata'
|
||||
ussd_menu = UssdMenu.find_by_name('display_user_metadata')
|
||||
name = ussd_menu.get('name')
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
def test_account_tokens(activated_account, cache_token_data_list, celery_session_worker, generic_ussd_session,
|
||||
init_cache, init_database):
|
||||
"""blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
cached_token_data_list = get_cached_token_data_list(blockchain_address)
|
||||
token_data_list = ['1. GFT 50.0']
|
||||
fallback = translation_for('helpers.no_tokens_list', preferred_language)
|
||||
token_list_sets = ussd_menu_list(fallback=fallback, menu_list=token_data_list, split=3)
|
||||
first_account_tokens_set = 'ussd.first_account_tokens_set'
|
||||
middle_account_tokens_set = 'ussd.middle_account_tokens_set'
|
||||
last_account_tokens_set = 'ussd.last_account_tokens_set'
|
||||
resp = response(activated_account, first_account_tokens_set, first_account_tokens_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(first_account_tokens_set, preferred_language,
|
||||
first_account_tokens_set=token_list_sets[0])
|
||||
assert generic_ussd_session.get('data').get('account_tokens_list') == cached_token_data_list
|
||||
resp = response(activated_account, middle_account_tokens_set, middle_account_tokens_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(middle_account_tokens_set, preferred_language,
|
||||
middle_account_tokens_set=token_list_sets[1])
|
||||
resp = response(activated_account, last_account_tokens_set, last_account_tokens_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(last_account_tokens_set, preferred_language,
|
||||
last_account_tokens_set=token_list_sets[2])"""
|
||||
pass
|
||||
|
||||
|
||||
def test_help(activated_account, cache_preferences, generic_ussd_session, init_database):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
help = 'ussd.help'
|
||||
resp = response(activated_account, help, help[5:], init_database, generic_ussd_session)
|
||||
assert resp == translation_for(help, preferred_language, support_phone=Support.phone_number)
|
||||
|
||||
|
||||
def test_person_data(activated_account, cache_person_metadata, cache_preferences, cached_ussd_session,
|
||||
generic_ussd_session, init_database):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
identifier = bytes.fromhex(blockchain_address)
|
||||
display_user_metadata = 'ussd.display_user_metadata'
|
||||
person_metadata = PersonMetadata(identifier)
|
||||
cached_person_metadata = person_metadata.get_cached_metadata()
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
assert resp == parse_person_metadata(cached_person_metadata, display_key, preferred_language)
|
||||
resp = response(activated_account, display_user_metadata, display_user_metadata[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == parse_person_metadata(cached_person_metadata, display_user_metadata, preferred_language)
|
||||
|
||||
display_key = 'ussd.account_balances_pin_authorization'
|
||||
ussd_menu = UssdMenu.find_by_name('account_balances_pin_authorization')
|
||||
name = ussd_menu.get('name')
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
assert resp == translation_for(f'{display_key}.first', preferred_language)
|
||||
|
||||
activated_account.failed_pin_attempts = 1
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
retry_pin_entry = translation_for('ussd.retry_pin_entry', preferred_language, remaining_attempts=2)
|
||||
assert resp == translation_for(f'{display_key}.retry', preferred_language, retry_pin_entry=retry_pin_entry)
|
||||
activated_account.failed_pin_attempts = 0
|
||||
def test_guarded_account_metadata(activated_account, generic_ussd_session, init_database):
|
||||
reset_guarded_pin_authorization = 'ussd.reset_guarded_pin_authorization'
|
||||
generic_ussd_session['data'] = {'guarded_account_phone_number': activated_account.phone_number}
|
||||
menu_processor = MenuProcessor(activated_account, reset_guarded_pin_authorization,
|
||||
reset_guarded_pin_authorization[5:], init_database, generic_ussd_session)
|
||||
assert menu_processor.guarded_account_metadata() == activated_account.standard_metadata_id()
|
||||
|
||||
display_key = 'ussd.start'
|
||||
ussd_menu = UssdMenu.find_by_name('start')
|
||||
name = ussd_menu.get('name')
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
assert resp == translation_for(display_key,
|
||||
|
||||
def test_guardian_metadata(activated_account, generic_ussd_session, guardian_account, init_database):
|
||||
add_guardian_pin_authorization = 'ussd.add_guardian_pin_authorization'
|
||||
generic_ussd_session['data'] = {'guardian_phone_number': guardian_account.phone_number}
|
||||
menu_processor = MenuProcessor(activated_account, add_guardian_pin_authorization,
|
||||
add_guardian_pin_authorization[5:], init_database, generic_ussd_session)
|
||||
assert menu_processor.guardian_metadata() == guardian_account.standard_metadata_id()
|
||||
|
||||
|
||||
def test_language(activated_account, cache_preferences, generic_ussd_session, init_database, load_languages):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
initial_language_selection = 'ussd.initial_language_selection'
|
||||
select_preferred_language = 'ussd.select_preferred_language'
|
||||
initial_middle_language_set = 'ussd.initial_middle_language_set'
|
||||
middle_language_set = 'ussd.middle_language_set'
|
||||
initial_last_language_set = 'ussd.initial_last_language_set'
|
||||
last_language_set = 'ussd.last_language_set'
|
||||
|
||||
key = cache_data_key('system:languages'.encode('utf-8'), MetadataPointer.NONE)
|
||||
cached_system_languages = get_cached_data(key)
|
||||
language_list: list = json.loads(cached_system_languages)
|
||||
|
||||
fallback = translation_for('helpers.no_language_list', preferred_language)
|
||||
language_list_sets = ussd_menu_list(fallback=fallback, menu_list=language_list, split=3)
|
||||
|
||||
resp = response(activated_account, initial_language_selection, initial_language_selection[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(initial_language_selection, preferred_language,
|
||||
first_language_set=language_list_sets[0])
|
||||
|
||||
resp = response(activated_account, select_preferred_language, select_preferred_language[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(select_preferred_language, preferred_language,
|
||||
first_language_set=language_list_sets[0])
|
||||
|
||||
resp = response(activated_account, initial_middle_language_set, initial_middle_language_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(initial_middle_language_set, preferred_language,
|
||||
middle_language_set=language_list_sets[1])
|
||||
|
||||
resp = response(activated_account, initial_last_language_set, initial_last_language_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(initial_last_language_set, preferred_language,
|
||||
last_language_set=language_list_sets[2])
|
||||
|
||||
resp = response(activated_account, middle_language_set, middle_language_set[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(middle_language_set, preferred_language, middle_language_set=language_list_sets[1])
|
||||
|
||||
resp = response(activated_account, last_language_set, last_language_set[5:], init_database, generic_ussd_session)
|
||||
assert resp == translation_for(last_language_set, preferred_language, last_language_set=language_list_sets[2])
|
||||
|
||||
|
||||
def test_account_creation_prompt(activated_account, cache_preferences, generic_ussd_session, init_database,
|
||||
load_languages):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
user_input = ''
|
||||
if preferred_language == 'en':
|
||||
user_input = '1'
|
||||
elif preferred_language == 'sw':
|
||||
user_input = '2'
|
||||
account_creation_prompt = 'ussd.account_creation_prompt'
|
||||
generic_ussd_session['user_input'] = user_input
|
||||
resp = response(activated_account, account_creation_prompt, account_creation_prompt[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(account_creation_prompt, preferred_language)
|
||||
|
||||
|
||||
def test_reset_guarded_pin_authorization(activated_account, cache_preferences, generic_ussd_session, guardian_account,
|
||||
init_database):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
reset_guarded_pin_authorization = 'ussd.reset_guarded_pin_authorization'
|
||||
generic_ussd_session['external_session_id'] = os.urandom(20).hex()
|
||||
generic_ussd_session['msisdn'] = guardian_account.phone_number
|
||||
generic_ussd_session['data'] = {'guarded_account_phone_number': activated_account.phone_number}
|
||||
resp = response(activated_account,
|
||||
reset_guarded_pin_authorization,
|
||||
reset_guarded_pin_authorization[5:],
|
||||
init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(f'{reset_guarded_pin_authorization}.first', preferred_language,
|
||||
guarded_account_information=activated_account.phone_number)
|
||||
|
||||
|
||||
def test_start(activated_account, cache_balances, cache_preferences, cache_token_data, cache_token_data_list,
|
||||
cache_token_symbol_list, celery_session_worker, generic_ussd_session, init_database, load_chain_spec,
|
||||
mock_sync_balance_api_query, set_active_token):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
decimals = token_data.get("decimals")
|
||||
identifier = bytes.fromhex(blockchain_address)
|
||||
balances_identifier = [identifier, token_symbol.encode('utf-8')]
|
||||
available_balance = get_cached_available_balance(decimals, balances_identifier)
|
||||
start = 'ussd.start'
|
||||
resp = response(activated_account, start, start[5:], init_database, generic_ussd_session)
|
||||
assert resp == translation_for(start,
|
||||
preferred_language,
|
||||
account_balance=available_balance,
|
||||
account_token_name=token_symbol)
|
||||
|
||||
display_key = 'ussd.start'
|
||||
ussd_menu = UssdMenu.find_by_name('start')
|
||||
name = ussd_menu.get('name')
|
||||
older_timestamp = (activated_account.created - datetime.timedelta(days=35))
|
||||
activated_account.created = older_timestamp
|
||||
init_database.flush()
|
||||
response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
assert mock_get_adjusted_balance['timestamp'] == int((datetime.datetime.now() - datetime.timedelta(days=30)).timestamp())
|
||||
|
||||
display_key = 'ussd.transaction_pin_authorization'
|
||||
ussd_menu = UssdMenu.find_by_name('transaction_pin_authorization')
|
||||
name = ussd_menu.get('name')
|
||||
def test_token_selection_pin_authorization(activated_account, cache_preferences, cache_token_data, generic_ussd_session,
|
||||
init_database, set_active_token):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
token_selection_pin_authorization = 'ussd.token_selection_pin_authorization'
|
||||
generic_ussd_session['data'] = {'selected_token': token_data}
|
||||
resp = response(activated_account,
|
||||
token_selection_pin_authorization,
|
||||
token_selection_pin_authorization[5:],
|
||||
init_database,
|
||||
generic_ussd_session)
|
||||
token_name = token_data.get('name')
|
||||
token_symbol = token_data.get('symbol')
|
||||
token_issuer = token_data.get('issuer')
|
||||
token_contact = token_data.get('contact')
|
||||
token_location = token_data.get('location')
|
||||
data = f'{token_name} ({token_symbol})\n{token_issuer}\n{token_contact}\n{token_location}\n'
|
||||
assert resp == translation_for(f'{token_selection_pin_authorization}.first', preferred_language,
|
||||
token_data=data)
|
||||
|
||||
|
||||
def test_transaction_pin_authorization(activated_account, cache_preferences, cache_token_data, generic_ussd_session,
|
||||
init_database, set_active_token, valid_recipient):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
decimals = token_data.get("decimals")
|
||||
transaction_pin_authorization = 'ussd.transaction_pin_authorization'
|
||||
generic_ussd_session['data'] = {
|
||||
'recipient_phone_number': valid_recipient.phone_number,
|
||||
'transaction_amount': '15'
|
||||
}
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
resp = response(activated_account, transaction_pin_authorization, transaction_pin_authorization[5:], init_database,
|
||||
generic_ussd_session)
|
||||
user_input = generic_ussd_session.get('data').get('transaction_amount')
|
||||
transaction_amount = to_wei(value=int(user_input))
|
||||
transaction_amount = to_wei(decimals, int(user_input))
|
||||
tx_recipient_information = valid_recipient.standard_metadata_id()
|
||||
tx_sender_information = activated_account.standard_metadata_id()
|
||||
assert resp == translation_for(f'{display_key}.first',
|
||||
assert resp == translation_for(f'{transaction_pin_authorization}.first',
|
||||
preferred_language,
|
||||
recipient_information=tx_recipient_information,
|
||||
transaction_amount=from_wei(transaction_amount),
|
||||
transaction_amount=from_wei(decimals, transaction_amount),
|
||||
token_symbol=token_symbol,
|
||||
sender_information=tx_sender_information)
|
||||
|
||||
display_key = 'ussd.exit_insufficient_balance'
|
||||
ussd_menu = UssdMenu.find_by_name('exit_insufficient_balance')
|
||||
name = ussd_menu.get('name')
|
||||
|
||||
def test_guardian_exits(activated_account, cache_preferences, cache_token_data, generic_ussd_session, guardian_account,
|
||||
init_database, set_active_token):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
generic_ussd_session['data'] = {'guardian_phone_number': guardian_account.phone_number}
|
||||
# testing exit guardian addition success
|
||||
exit_guardian_addition_success = 'ussd.exit_guardian_addition_success'
|
||||
resp = response(activated_account, exit_guardian_addition_success, exit_guardian_addition_success[5:],
|
||||
init_database, generic_ussd_session)
|
||||
assert resp == translation_for(exit_guardian_addition_success, preferred_language,
|
||||
guardian_information=guardian_account.standard_metadata_id())
|
||||
|
||||
# testing exit guardian removal success
|
||||
exit_guardian_removal_success = 'ussd.exit_guardian_removal_success'
|
||||
resp = response(activated_account, exit_guardian_removal_success, exit_guardian_removal_success[5:],
|
||||
init_database, generic_ussd_session)
|
||||
assert resp == translation_for(exit_guardian_removal_success, preferred_language,
|
||||
guardian_information=guardian_account.standard_metadata_id())
|
||||
|
||||
generic_ussd_session['data'] = {'failure_reason': 'foo'}
|
||||
# testing exit invalid guardian addition
|
||||
exit_invalid_guardian_addition = 'ussd.exit_invalid_guardian_addition'
|
||||
resp = response(activated_account, exit_invalid_guardian_addition, exit_invalid_guardian_addition[5:],
|
||||
init_database, generic_ussd_session)
|
||||
assert resp == translation_for(exit_invalid_guardian_addition, preferred_language, error_exit='foo')
|
||||
|
||||
# testing exit invalid guardian removal
|
||||
exit_invalid_guardian_removal = 'ussd.exit_invalid_guardian_removal'
|
||||
resp = response(activated_account, exit_invalid_guardian_removal, exit_invalid_guardian_removal[5:],
|
||||
init_database, generic_ussd_session)
|
||||
assert resp == translation_for(exit_invalid_guardian_removal, preferred_language, error_exit='foo')
|
||||
|
||||
|
||||
def test_exit_pin_reset_initiated_success(activated_account, cache_preferences, generic_ussd_session, init_database):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
exit_pin_reset_initiated_success = 'ussd.exit_pin_reset_initiated_success'
|
||||
generic_ussd_session['data'] = {'guarded_account_phone_number': activated_account.phone_number}
|
||||
resp = response(activated_account, exit_pin_reset_initiated_success, exit_pin_reset_initiated_success[5:],
|
||||
init_database, generic_ussd_session)
|
||||
assert resp == translation_for(exit_pin_reset_initiated_success,
|
||||
preferred_language,
|
||||
guarded_account_information=activated_account.standard_metadata_id())
|
||||
|
||||
|
||||
def test_exit_insufficient_balance(activated_account, cache_balances, cache_preferences, cache_token_data,
|
||||
generic_ussd_session, init_database, set_active_token, valid_recipient):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
decimals = token_data.get("decimals")
|
||||
identifier = bytes.fromhex(blockchain_address)
|
||||
balances_identifier = [identifier, token_symbol.encode('utf-8')]
|
||||
available_balance = get_cached_available_balance(decimals, balances_identifier)
|
||||
tx_recipient_information = valid_recipient.standard_metadata_id()
|
||||
exit_insufficient_balance = 'ussd.exit_insufficient_balance'
|
||||
generic_ussd_session['data'] = {
|
||||
'recipient_phone_number': valid_recipient.phone_number,
|
||||
'transaction_amount': '85'
|
||||
}
|
||||
transaction_amount = generic_ussd_session.get('data').get('transaction_amount')
|
||||
transaction_amount = to_wei(value=int(transaction_amount))
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
assert resp == translation_for(display_key,
|
||||
transaction_amount = to_wei(decimals, int(transaction_amount))
|
||||
resp = response(activated_account, exit_insufficient_balance, exit_insufficient_balance[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(exit_insufficient_balance,
|
||||
preferred_language,
|
||||
amount=from_wei(transaction_amount),
|
||||
amount=from_wei(decimals, transaction_amount),
|
||||
token_symbol=token_symbol,
|
||||
recipient_information=tx_recipient_information,
|
||||
token_balance=available_balance)
|
||||
|
||||
display_key = 'ussd.exit_invalid_menu_option'
|
||||
ussd_menu = UssdMenu.find_by_name('exit_invalid_menu_option')
|
||||
name = ussd_menu.get('name')
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
assert resp == translation_for(display_key, preferred_language, support_phone=Support.phone_number)
|
||||
|
||||
display_key = 'ussd.exit_successful_transaction'
|
||||
ussd_menu = UssdMenu.find_by_name('exit_successful_transaction')
|
||||
name = ussd_menu.get('name')
|
||||
def test_exit_invalid_menu_option(activated_account, cache_preferences, generic_ussd_session, init_database,
|
||||
load_support_phone):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
exit_invalid_menu_option = 'ussd.exit_invalid_menu_option'
|
||||
resp = response(activated_account, exit_invalid_menu_option, exit_invalid_menu_option[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(exit_invalid_menu_option, preferred_language, support_phone=Support.phone_number)
|
||||
|
||||
|
||||
def test_exit_pin_blocked(activated_account, cache_preferences, generic_ussd_session, init_database,
|
||||
load_support_phone):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
exit_pin_blocked = 'ussd.exit_pin_blocked'
|
||||
resp = response(activated_account, exit_pin_blocked, exit_pin_blocked[5:], init_database, generic_ussd_session)
|
||||
assert resp == translation_for(exit_pin_blocked, preferred_language, support_phone=Support.phone_number)
|
||||
|
||||
|
||||
def test_exit_successful_token_selection(activated_account, cache_preferences, cache_token_data, generic_ussd_session,
|
||||
init_database, set_active_token):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
exit_successful_token_selection = 'ussd.exit_successful_token_selection'
|
||||
generic_ussd_session['data'] = {'selected_token': token_data}
|
||||
resp = response(activated_account, exit_successful_token_selection, exit_successful_token_selection[5:],
|
||||
init_database, generic_ussd_session)
|
||||
assert resp == translation_for(exit_successful_token_selection, preferred_language, token_symbol=token_symbol)
|
||||
|
||||
|
||||
def test_exit_successful_transaction(activated_account, cache_preferences, cache_token_data, generic_ussd_session,
|
||||
init_database, set_active_token, valid_recipient):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
decimals = token_data.get("decimals")
|
||||
tx_recipient_information = valid_recipient.standard_metadata_id()
|
||||
tx_sender_information = activated_account.standard_metadata_id()
|
||||
exit_successful_transaction = 'ussd.exit_successful_transaction'
|
||||
generic_ussd_session['data'] = {
|
||||
'recipient_phone_number': valid_recipient.phone_number,
|
||||
'transaction_amount': '15'
|
||||
}
|
||||
transaction_amount = generic_ussd_session.get('data').get('transaction_amount')
|
||||
transaction_amount = to_wei(value=int(transaction_amount))
|
||||
resp = response(activated_account, display_key, name, init_database, generic_ussd_session)
|
||||
assert resp == translation_for(display_key,
|
||||
transaction_amount = to_wei(decimals, int(transaction_amount))
|
||||
resp = response(activated_account, exit_successful_transaction, exit_successful_transaction[5:], init_database,
|
||||
generic_ussd_session)
|
||||
assert resp == translation_for(exit_successful_transaction,
|
||||
preferred_language,
|
||||
transaction_amount=from_wei(transaction_amount),
|
||||
transaction_amount=from_wei(decimals, transaction_amount),
|
||||
token_symbol=token_symbol,
|
||||
recipient_information=tx_recipient_information,
|
||||
sender_information=tx_sender_information)
|
||||
|
@ -10,13 +10,16 @@ from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||
from cic_ussd.db.models.task_tracker import TaskTracker
|
||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||
from cic_ussd.metadata import PersonMetadata
|
||||
from cic_ussd.processor.ussd import get_menu, handle_menu, handle_menu_operations
|
||||
from cic_ussd.processor.ussd import (get_menu,
|
||||
handle_menu,
|
||||
handle_menu_operations)
|
||||
from cic_ussd.processor.util import ussd_menu_list
|
||||
from cic_ussd.state_machine.logic.language import preferred_langauge_from_selection
|
||||
from cic_ussd.translation import translation_for
|
||||
|
||||
# test imports
|
||||
@ -43,7 +46,7 @@ def test_handle_menu(activated_account,
|
||||
ussd_menu = UssdMenu.find_by_name('exit_pin_blocked')
|
||||
assert menu_resp.get('name') == ussd_menu.get('name')
|
||||
menu_resp = handle_menu(pending_account, init_database)
|
||||
ussd_menu = UssdMenu.find_by_name('initial_language_selection')
|
||||
ussd_menu = UssdMenu.find_by_name('initial_pin_entry')
|
||||
assert menu_resp.get('name') == ussd_menu.get('name')
|
||||
identifier = bytes.fromhex(strip_0x(pending_account.blockchain_address))
|
||||
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||
@ -75,38 +78,62 @@ def test_get_menu(activated_account,
|
||||
assert menu_resp.get('name') == ussd_menu.get('name')
|
||||
|
||||
|
||||
def test_handle_menu_operations(activated_account,
|
||||
cache_preferences,
|
||||
celery_session_worker,
|
||||
generic_ussd_session,
|
||||
init_database,
|
||||
init_cache,
|
||||
load_chain_spec,
|
||||
load_config,
|
||||
mock_account_creation_task_result,
|
||||
persisted_ussd_session,
|
||||
person_metadata,
|
||||
set_locale_files,
|
||||
setup_metadata_request_handler,
|
||||
setup_metadata_signer,
|
||||
task_uuid):
|
||||
# sourcery skip: extract-duplicate-method
|
||||
chain_str = Chain.spec.__str__()
|
||||
def test_handle_no_account_menu_operations(celery_session_worker,
|
||||
init_cache,
|
||||
init_database,
|
||||
load_chain_spec,
|
||||
load_config,
|
||||
load_languages,
|
||||
load_ussd_menu,
|
||||
mock_account_creation_task_result,
|
||||
pending_account,
|
||||
persisted_ussd_session,
|
||||
set_locale_files,
|
||||
task_uuid):
|
||||
initial_language_selection = 'ussd.initial_language_selection'
|
||||
phone = phone_number()
|
||||
external_session_id = os.urandom(20).hex()
|
||||
valid_service_codes = load_config.get('USSD_SERVICE_CODE').split(",")
|
||||
preferred_language = i18n.config.get('fallback')
|
||||
resp = handle_menu_operations(chain_str, external_session_id, phone, None, valid_service_codes[0], init_database, '4444')
|
||||
assert resp == translation_for('ussd.account_creation_prompt', preferred_language)
|
||||
key = cache_data_key('system:languages'.encode('utf-8'), MetadataPointer.NONE)
|
||||
cached_system_languages = get_cached_data(key)
|
||||
language_list: list = json.loads(cached_system_languages)
|
||||
fallback = translation_for('helpers.no_language_list', preferred_language)
|
||||
language_list_sets = ussd_menu_list(fallback=fallback, menu_list=language_list, split=3)
|
||||
resp = handle_menu_operations(external_session_id, phone, None, valid_service_codes[0], init_database, '')
|
||||
assert resp == translation_for(initial_language_selection, preferred_language,
|
||||
first_language_set=language_list_sets[0])
|
||||
cached_ussd_session = get_cached_data(external_session_id)
|
||||
ussd_session = json.loads(cached_ussd_session)
|
||||
assert ussd_session['msisdn'] == phone
|
||||
persisted_ussd_session.external_session_id = external_session_id
|
||||
persisted_ussd_session.msisdn = phone
|
||||
persisted_ussd_session.state = initial_language_selection[5:]
|
||||
init_database.add(persisted_ussd_session)
|
||||
init_database.commit()
|
||||
account_creation_prompt = 'ussd.account_creation_prompt'
|
||||
user_input = '2'
|
||||
resp = handle_menu_operations(external_session_id, phone, None, valid_service_codes[0], init_database, user_input)
|
||||
preferred_language = preferred_langauge_from_selection(user_input)
|
||||
assert resp == translation_for(account_creation_prompt, preferred_language)
|
||||
task_tracker = init_database.query(TaskTracker).filter_by(task_uuid=task_uuid).first()
|
||||
assert task_tracker.task_uuid == task_uuid
|
||||
cached_creation_task_uuid = get_cached_data(task_uuid)
|
||||
creation_task_uuid_data = json.loads(cached_creation_task_uuid)
|
||||
assert creation_task_uuid_data['status'] == 'PENDING'
|
||||
|
||||
|
||||
def test_handle_account_menu_operations(activated_account,
|
||||
cache_preferences,
|
||||
celery_session_worker,
|
||||
init_database,
|
||||
load_config,
|
||||
persisted_ussd_session,
|
||||
person_metadata,
|
||||
set_locale_files,
|
||||
setup_metadata_request_handler,
|
||||
setup_metadata_signer, ):
|
||||
valid_service_codes = load_config.get('USSD_SERVICE_CODE').split(",")
|
||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||
person_metadata_client = PersonMetadata(identifier)
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
@ -117,6 +144,5 @@ def test_handle_menu_operations(activated_account,
|
||||
phone = activated_account.phone_number
|
||||
preferred_language = get_cached_preferred_language(activated_account.blockchain_address)
|
||||
persisted_ussd_session.state = 'enter_transaction_recipient'
|
||||
resp = handle_menu_operations(chain_str, external_session_id, phone, None, valid_service_codes[0], init_database, '1')
|
||||
resp = handle_menu_operations(external_session_id, phone, None, valid_service_codes[0], init_database, '1')
|
||||
assert resp == translation_for('ussd.enter_transaction_recipient', preferred_language)
|
||||
|
||||
|
@ -10,7 +10,10 @@ from cic_types.models.person import get_contact_data_from_vcard
|
||||
# local imports
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
from cic_ussd.metadata import PersonMetadata
|
||||
from cic_ussd.processor.util import latest_input, parse_person_metadata, resume_last_ussd_session
|
||||
from cic_ussd.processor.util import (latest_input,
|
||||
parse_person_metadata,
|
||||
resume_last_ussd_session,
|
||||
ussd_menu_list)
|
||||
from cic_ussd.translation import translation_for
|
||||
|
||||
|
||||
@ -60,3 +63,20 @@ def test_parse_person_metadata(activated_account, cache_person_metadata, cache_p
|
||||
])
|
||||
def test_resume_last_ussd_session(expected_menu_name, last_state, load_ussd_menu):
|
||||
assert resume_last_ussd_session(last_state).get('name') == expected_menu_name
|
||||
|
||||
|
||||
def test_ussd_menu_list(activated_account, cache_preferences, load_ussd_menu, set_locale_files):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
fallback = translation_for('helpers.no_transaction_history', preferred_language)
|
||||
menu_list_sets = ['1. FII 50.0', '2. GFT 60.0', '3. DET 49.99']
|
||||
split = 3
|
||||
menu_list = ussd_menu_list(fallback=fallback, menu_list=menu_list_sets, split=split)
|
||||
menu_list_sets = [menu_list_sets[item:item + split] for item in range(0, len(menu_list), split)]
|
||||
menu_list_reprs = []
|
||||
for i in range(split):
|
||||
try:
|
||||
menu_list_reprs.append(''.join(f'{list_set_item}\n' for list_set_item in menu_list_sets[i]).rstrip('\n'))
|
||||
except IndexError:
|
||||
menu_list_reprs.append(fallback)
|
||||
assert menu_list == menu_list_reprs
|
||||
|
@ -3,8 +3,7 @@ import json
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
|
||||
from cic_types.models.person import Person, get_contact_data_from_vcard
|
||||
|
||||
# local imports
|
||||
@ -12,9 +11,7 @@ from cic_ussd.cache import get_cached_data
|
||||
from cic_ussd.account.maps import gender
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
from cic_ussd.db.enum import AccountStatus
|
||||
from cic_ussd.metadata import PreferencesMetadata
|
||||
from cic_ussd.state_machine.logic.account import (change_preferred_language,
|
||||
edit_user_metadata_attribute,
|
||||
from cic_ussd.state_machine.logic.account import (edit_user_metadata_attribute,
|
||||
parse_gender,
|
||||
parse_person_metadata,
|
||||
save_complete_person_metadata,
|
||||
@ -26,32 +23,6 @@ from cic_ussd.translation import translation_for
|
||||
# test imports
|
||||
|
||||
|
||||
@pytest.mark.parametrize('user_input, expected_preferred_language', [
|
||||
('1', 'en'),
|
||||
('2', 'sw')
|
||||
])
|
||||
def test_change_preferred_language(activated_account,
|
||||
celery_session_worker,
|
||||
expected_preferred_language,
|
||||
init_database,
|
||||
generic_ussd_session,
|
||||
mock_response,
|
||||
preferences,
|
||||
setup_metadata_request_handler,
|
||||
user_input):
|
||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||
preferences_metadata_client = PreferencesMetadata(identifier)
|
||||
with requests_mock.Mocker(real_http=False) as requests_mocker:
|
||||
requests_mocker.register_uri(
|
||||
'POST', preferences_metadata_client.url, status_code=200, reason='OK', json=mock_response
|
||||
)
|
||||
state_machine_data = (user_input, generic_ussd_session, activated_account, init_database)
|
||||
res = change_preferred_language(state_machine_data)
|
||||
init_database.commit()
|
||||
assert res.id is not None
|
||||
assert activated_account.preferred_language == expected_preferred_language
|
||||
|
||||
|
||||
@pytest.mark.parametrize('user_input', [
|
||||
'1',
|
||||
'2',
|
||||
|
@ -0,0 +1,52 @@
|
||||
# standard imports
|
||||
import json
|
||||
|
||||
# external imports
|
||||
import requests_mock
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.metadata import PreferencesMetadata
|
||||
from cic_ussd.state_machine.logic.language import (change_preferred_language,
|
||||
is_valid_language_selection,
|
||||
preferred_langauge_from_selection,
|
||||
process_language_selection)
|
||||
|
||||
# test imports
|
||||
|
||||
|
||||
def test_change_preferred_language(activated_account,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
init_database,
|
||||
load_languages,
|
||||
mocker,
|
||||
setup_metadata_signer,
|
||||
setup_metadata_request_handler):
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
preferences = {
|
||||
'preferred_language': 'en'
|
||||
}
|
||||
ussd_session['data'] = preferences
|
||||
mock_add_preferences_metadata = mocker.patch('cic_ussd.tasks.metadata.add_preferences_metadata.apply_async')
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
metadata_client = PreferencesMetadata(identifier=identifier)
|
||||
request_mocker.register_uri('POST', metadata_client.url, status_code=201, reason='CREATED', json=preferences)
|
||||
state_machine_data = ('1', ussd_session, activated_account, init_database)
|
||||
change_preferred_language(state_machine_data)
|
||||
mock_add_preferences_metadata.assert_called_with(
|
||||
(activated_account.blockchain_address, preferences), {}, queue='cic-ussd')
|
||||
|
||||
|
||||
def test_is_valid_language_selection(activated_account,
|
||||
generic_ussd_session,
|
||||
init_cache,
|
||||
init_database,
|
||||
load_languages):
|
||||
state_machine_data = ('1', generic_ussd_session, activated_account, init_database)
|
||||
assert is_valid_language_selection(state_machine_data) is True
|
||||
state_machine_data = ('12', generic_ussd_session, activated_account, init_database)
|
||||
assert is_valid_language_selection(state_machine_data) is False
|
@ -9,7 +9,10 @@ from cic_ussd.state_machine.logic.menu import (menu_one_selected,
|
||||
menu_four_selected,
|
||||
menu_five_selected,
|
||||
menu_six_selected,
|
||||
menu_nine_selected,
|
||||
menu_zero_zero_selected,
|
||||
menu_eleven_selected,
|
||||
menu_twenty_two_selected,
|
||||
menu_ninety_nine_selected)
|
||||
|
||||
# test imports
|
||||
@ -29,8 +32,14 @@ def test_menu_selection(init_database, pending_account, persisted_ussd_session):
|
||||
assert menu_five_selected(('e', ussd_session, pending_account, init_database)) is False
|
||||
assert menu_six_selected(('6', ussd_session, pending_account, init_database)) is True
|
||||
assert menu_six_selected(('8', ussd_session, pending_account, init_database)) is False
|
||||
assert menu_nine_selected(('9', ussd_session, pending_account, init_database)) is True
|
||||
assert menu_nine_selected(('-', ussd_session, pending_account, init_database)) is False
|
||||
assert menu_zero_zero_selected(('00', ussd_session, pending_account, init_database)) is True
|
||||
assert menu_zero_zero_selected(('/', ussd_session, pending_account, init_database)) is False
|
||||
assert menu_eleven_selected(('11', ussd_session, pending_account, init_database)) is True
|
||||
assert menu_eleven_selected(('*', ussd_session, pending_account, init_database)) is False
|
||||
assert menu_twenty_two_selected(('22', ussd_session, pending_account, init_database)) is True
|
||||
assert menu_twenty_two_selected(('5', ussd_session, pending_account, init_database)) is False
|
||||
assert menu_ninety_nine_selected(('99', ussd_session, pending_account, init_database)) is True
|
||||
assert menu_ninety_nine_selected(('d', ussd_session, pending_account, init_database)) is False
|
||||
|
||||
|
@ -0,0 +1,221 @@
|
||||
# standard imports
|
||||
import json
|
||||
|
||||
# external imports
|
||||
import requests_mock
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.guardianship import Guardianship
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.db.models.account import Account
|
||||
from cic_ussd.metadata import PersonMetadata
|
||||
from cic_ussd.state_machine.logic.pin_guard import (add_pin_guardian,
|
||||
is_dialers_pin_guardian,
|
||||
is_others_pin_guardian,
|
||||
is_set_pin_guardian,
|
||||
remove_pin_guardian,
|
||||
initiate_pin_reset,
|
||||
save_guardian_to_session_data,
|
||||
save_guarded_account_session_data,
|
||||
retrieve_person_metadata,
|
||||
is_valid_guardian_addition)
|
||||
from cic_ussd.translation import translation_for
|
||||
|
||||
|
||||
def test_save_guardian_to_session_data(activated_account,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
guardian_account,
|
||||
init_cache,
|
||||
init_database):
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
ussd_session['msisdn'] = activated_account.phone_number
|
||||
state_machine_data = (guardian_account.phone_number, ussd_session, activated_account, init_database)
|
||||
save_guardian_to_session_data(state_machine_data)
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
assert ussd_session.get('data').get('guardian_phone_number') == guardian_account.phone_number
|
||||
|
||||
|
||||
def test_save_guarded_account_session_data(activated_account,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
guardian_account,
|
||||
init_cache,
|
||||
init_database):
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
ussd_session['msisdn'] = guardian_account.phone_number
|
||||
state_machine_data = (activated_account.phone_number, ussd_session, guardian_account, init_database)
|
||||
save_guarded_account_session_data(state_machine_data)
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
assert ussd_session.get('data').get('guarded_account_phone_number') == activated_account.phone_number
|
||||
|
||||
|
||||
def test_retrieve_person_metadata(activated_account,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
guardian_account,
|
||||
init_cache,
|
||||
init_database,
|
||||
mocker,
|
||||
person_metadata,
|
||||
setup_metadata_request_handler,
|
||||
setup_metadata_signer):
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
ussd_session['msisdn'] = activated_account.phone_number
|
||||
state_machine_data = (guardian_account.phone_number, ussd_session, activated_account, init_database)
|
||||
mocker_query_person_metadata = mocker.patch('cic_ussd.tasks.metadata.query_person_metadata.apply_async')
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
metadata_client = PersonMetadata(identifier)
|
||||
request_mocker.register_uri('GET', metadata_client.url, json=person_metadata, reason='OK', status_code=200)
|
||||
retrieve_person_metadata(state_machine_data)
|
||||
mocker_query_person_metadata.assert_called_with((guardian_account.blockchain_address,), {}, queue='cic-ussd')
|
||||
|
||||
|
||||
def test_is_valid_guardian_addition(activated_account,
|
||||
cache_preferences,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
init_cache,
|
||||
init_database,
|
||||
guardian_account,
|
||||
load_languages,
|
||||
load_ussd_menu,
|
||||
set_locale_files,
|
||||
setup_guardianship):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
state_machine_data = (guardian_account.phone_number, ussd_session, activated_account, init_database)
|
||||
assert is_valid_guardian_addition(state_machine_data) is True
|
||||
|
||||
state_machine_data = (activated_account.phone_number, ussd_session, activated_account, init_database)
|
||||
assert is_valid_guardian_addition(state_machine_data) is False
|
||||
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
failure_reason = translation_for('helpers.error.is_initiator', preferred_language)
|
||||
assert ussd_session.get('data').get('failure_reason') == failure_reason
|
||||
|
||||
state_machine_data = (Guardianship.guardians[0], ussd_session, activated_account, init_database)
|
||||
assert is_valid_guardian_addition(state_machine_data) is False
|
||||
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
failure_reason = translation_for('helpers.error.is_existent_guardian', preferred_language)
|
||||
assert ussd_session.get('data').get('failure_reason') == failure_reason
|
||||
|
||||
|
||||
def test_add_pin_guardian(activated_account, generic_ussd_session, guardian_account, init_database):
|
||||
generic_ussd_session['data'] = {'guardian_phone_number': guardian_account.phone_number}
|
||||
state_machine_data = ('', generic_ussd_session, activated_account, init_database)
|
||||
add_pin_guardian(state_machine_data)
|
||||
account = Account.get_by_phone_number(activated_account.phone_number, init_database)
|
||||
assert account.get_guardians()[0] == guardian_account.phone_number
|
||||
|
||||
|
||||
def test_is_set_pin_guardian(activated_account,
|
||||
cache_preferences,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
init_cache,
|
||||
init_database,
|
||||
guardian_account,
|
||||
load_languages,
|
||||
load_ussd_menu,
|
||||
set_locale_files,
|
||||
setup_guardianship):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
assert is_set_pin_guardian(activated_account, guardian_account.phone_number, preferred_language, init_database,
|
||||
ussd_session) is False
|
||||
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
failure_reason = translation_for('helpers.error.is_not_existent_guardian', preferred_language)
|
||||
assert ussd_session.get('data').get('failure_reason') == failure_reason
|
||||
|
||||
assert is_set_pin_guardian(activated_account, Guardianship.guardians[0], preferred_language, init_database,
|
||||
ussd_session) is True
|
||||
|
||||
assert is_set_pin_guardian(activated_account, activated_account.phone_number, preferred_language, init_database,
|
||||
ussd_session) is False
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
failure_reason = translation_for('helpers.error.is_initiator', preferred_language)
|
||||
assert ussd_session.get('data').get('failure_reason') == failure_reason
|
||||
|
||||
|
||||
def test_is_dialers_pin_guardian(activated_account,
|
||||
cache_preferences,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
init_database,
|
||||
guardian_account):
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
state_machine_data = (guardian_account.phone_number, ussd_session, activated_account, init_database)
|
||||
assert is_dialers_pin_guardian(state_machine_data) is False
|
||||
activated_account.add_guardian(guardian_account.phone_number)
|
||||
init_database.flush()
|
||||
state_machine_data = (guardian_account.phone_number, ussd_session, activated_account, init_database)
|
||||
assert is_dialers_pin_guardian(state_machine_data) is True
|
||||
|
||||
|
||||
def test_is_others_pin_guardian(activated_account,
|
||||
cache_preferences,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
init_database,
|
||||
guardian_account):
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
state_machine_data = (activated_account.phone_number, ussd_session, guardian_account, init_database)
|
||||
assert is_others_pin_guardian(state_machine_data) is False
|
||||
activated_account.add_guardian(guardian_account.phone_number)
|
||||
init_database.flush()
|
||||
state_machine_data = (activated_account.phone_number, ussd_session, guardian_account, init_database)
|
||||
assert is_others_pin_guardian(state_machine_data) is True
|
||||
|
||||
|
||||
def test_remove_pin_guardian(activated_account, generic_ussd_session, guardian_account, init_database):
|
||||
generic_ussd_session['data'] = {'guardian_phone_number': guardian_account.phone_number}
|
||||
activated_account.add_guardian(guardian_account.phone_number)
|
||||
init_database.flush()
|
||||
assert activated_account.get_guardians()[0] == guardian_account.phone_number
|
||||
state_machine_data = ('', generic_ussd_session, activated_account, init_database)
|
||||
remove_pin_guardian(state_machine_data)
|
||||
assert len(activated_account.get_guardians()) == 0
|
||||
|
||||
|
||||
def test_initiate_pin_reset(activated_account,
|
||||
cache_preferences,
|
||||
celery_session_worker,
|
||||
cached_ussd_session,
|
||||
guardian_account,
|
||||
init_cache,
|
||||
init_database,
|
||||
load_ussd_menu,
|
||||
mock_notifier_api,
|
||||
set_locale_files):
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
ussd_session['data'] = {'guarded_account_phone_number': activated_account.phone_number}
|
||||
state_machine_data = ('', ussd_session, guardian_account, init_database)
|
||||
initiate_pin_reset(state_machine_data)
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
preferred_language = get_cached_preferred_language(blockchain_address)
|
||||
message = translation_for('sms.pin_reset_initiated', preferred_language, pin_initiator=guardian_account.standard_metadata_id())
|
||||
assert mock_notifier_api.get('message') == message
|
||||
assert mock_notifier_api.get('recipient') == activated_account.phone_number
|
||||
|
@ -23,6 +23,7 @@ def test_upsell_unregistered_recipient(activated_account,
|
||||
load_support_phone,
|
||||
mock_notifier_api,
|
||||
set_locale_files,
|
||||
set_active_token,
|
||||
valid_recipient):
|
||||
cached_ussd_session.set_data('recipient_phone_number', valid_recipient.phone_number)
|
||||
state_machine_data = ('', cached_ussd_session.to_json(), activated_account, init_database)
|
||||
|
@ -0,0 +1,69 @@
|
||||
# standard imports
|
||||
import json
|
||||
|
||||
# external imports
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.state_machine.logic.tokens import (is_valid_token_selection,
|
||||
process_token_selection,
|
||||
set_selected_active_token)
|
||||
from cic_ussd.account.tokens import get_cached_token_data_list
|
||||
|
||||
|
||||
# test imports
|
||||
|
||||
|
||||
def test_is_valid_token_selection(activated_account,
|
||||
cache_token_data_list,
|
||||
cache_token_symbol_list,
|
||||
cached_ussd_session,
|
||||
init_cache,
|
||||
init_database):
|
||||
cached_token_data_list = get_cached_token_data_list(activated_account.blockchain_address)
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
ussd_session['data'] = {'account_tokens_list': cached_token_data_list}
|
||||
state_machine_data = ('GFT', ussd_session, activated_account, init_database)
|
||||
assert is_valid_token_selection(state_machine_data) is True
|
||||
state_machine_data = ('1', ussd_session, activated_account, init_database)
|
||||
assert is_valid_token_selection(state_machine_data) is True
|
||||
state_machine_data = ('3', ussd_session, activated_account, init_database)
|
||||
assert is_valid_token_selection(state_machine_data) is False
|
||||
|
||||
|
||||
def test_process_token_selection(activated_account,
|
||||
cache_token_data_list,
|
||||
cache_token_symbol_list,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
init_cache,
|
||||
init_database):
|
||||
cached_token_data_list = get_cached_token_data_list(activated_account.blockchain_address)
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
ussd_session['data'] = {'account_tokens_list': cached_token_data_list}
|
||||
state_machine_data = ('GFT', ussd_session, activated_account, init_database)
|
||||
process_token_selection(state_machine_data)
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
assert ussd_session.get('data').get('selected_token').get('symbol') == 'GFT'
|
||||
|
||||
|
||||
def test_set_selected_active_token(activated_account,
|
||||
cache_token_data_list,
|
||||
cache_token_symbol_list,
|
||||
cached_ussd_session,
|
||||
init_cache,
|
||||
init_database):
|
||||
cached_token_data_list = get_cached_token_data_list(activated_account.blockchain_address)
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
ussd_session = json.loads(ussd_session)
|
||||
ussd_session['data'] = {'selected_token': cached_token_data_list[0]}
|
||||
state_machine_data = ('GFT', ussd_session, activated_account, init_database)
|
||||
set_selected_active_token(state_machine_data)
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_ACTIVE)
|
||||
active_token = get_cached_data(key)
|
||||
assert active_token == 'GFT'
|
@ -3,13 +3,12 @@ import json
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
from cic_ussd.account.tokens import get_active_token_symbol, get_cached_token_data
|
||||
from cic_ussd.account.transaction import to_wei
|
||||
from cic_ussd.cache import get_cached_data
|
||||
from cic_ussd.metadata import PersonMetadata
|
||||
from cic_ussd.state_machine.logic.transaction import (is_valid_recipient,
|
||||
is_valid_transaction_amount,
|
||||
has_sufficient_balance,
|
||||
@ -18,7 +17,6 @@ from cic_ussd.state_machine.logic.transaction import (is_valid_recipient,
|
||||
save_recipient_phone_to_session_data,
|
||||
save_transaction_amount_to_session_data)
|
||||
|
||||
|
||||
# test imports
|
||||
|
||||
|
||||
@ -49,17 +47,18 @@ def test_is_valid_transaction_amount(activated_account, amount, expected_result,
|
||||
])
|
||||
def test_has_sufficient_balance(activated_account,
|
||||
cache_balances,
|
||||
cache_default_token_data,
|
||||
cache_token_data,
|
||||
expected_result,
|
||||
generic_ussd_session,
|
||||
init_database,
|
||||
set_active_token,
|
||||
value):
|
||||
state_machine_data = (value, generic_ussd_session, activated_account, init_database)
|
||||
assert has_sufficient_balance(state_machine_data=state_machine_data) == expected_result
|
||||
|
||||
|
||||
def test_process_transaction_request(activated_account,
|
||||
cache_default_token_data,
|
||||
cache_token_data,
|
||||
cached_ussd_session,
|
||||
celery_session_worker,
|
||||
init_cache,
|
||||
@ -67,7 +66,12 @@ def test_process_transaction_request(activated_account,
|
||||
load_chain_spec,
|
||||
load_config,
|
||||
mock_transfer_api,
|
||||
set_active_token,
|
||||
valid_recipient):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
token_symbol = get_active_token_symbol(blockchain_address)
|
||||
token_data = get_cached_token_data(blockchain_address, token_symbol)
|
||||
decimals = token_data.get("decimals")
|
||||
cached_ussd_session.set_data('recipient_phone_number', valid_recipient.phone_number)
|
||||
cached_ussd_session.set_data('transaction_amount', '50')
|
||||
ussd_session = get_cached_data(cached_ussd_session.external_session_id)
|
||||
@ -76,7 +80,7 @@ def test_process_transaction_request(activated_account,
|
||||
process_transaction_request(state_machine_data)
|
||||
assert mock_transfer_api['from_address'] == activated_account.blockchain_address
|
||||
assert mock_transfer_api['to_address'] == valid_recipient.blockchain_address
|
||||
assert mock_transfer_api['value'] == to_wei(50)
|
||||
assert mock_transfer_api['value'] == to_wei(decimals, 50)
|
||||
assert mock_transfer_api['token_symbol'] == load_config.get('TEST_TOKEN_SYMBOL')
|
||||
|
||||
|
||||
|
@ -6,8 +6,10 @@ def test_state_machine(activated_account_ussd_session,
|
||||
celery_session_worker,
|
||||
init_database,
|
||||
init_state_machine,
|
||||
pending_account):
|
||||
load_languages,
|
||||
pending_account,
|
||||
set_locale_files):
|
||||
state_machine = UssdStateMachine(activated_account_ussd_session)
|
||||
state_machine.scan_data(('1', activated_account_ussd_session, pending_account, init_database))
|
||||
assert state_machine.__repr__() == f'<KenyaUssdStateMachine: {state_machine.state}>'
|
||||
assert state_machine.state == 'initial_pin_entry'
|
||||
assert state_machine.state == 'account_creation_prompt'
|
||||
|
@ -4,15 +4,18 @@ import json
|
||||
# external imports
|
||||
import celery
|
||||
import pytest
|
||||
import requests_mock
|
||||
from chainlib.hash import strip_0x
|
||||
from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.statement import filter_statement_transactions
|
||||
from cic_ussd.account.tokens import collate_token_metadata
|
||||
from cic_ussd.account.transaction import transaction_actors
|
||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||
from cic_ussd.db.models.account import Account
|
||||
from cic_ussd.error import AccountCreationDataNotFound
|
||||
from cic_ussd.metadata import TokenMetadata
|
||||
|
||||
|
||||
# test imports
|
||||
@ -22,11 +25,13 @@ from tests.helpers.accounts import blockchain_address
|
||||
def test_account_creation_callback(account_creation_data,
|
||||
cache_account_creation_data,
|
||||
celery_session_worker,
|
||||
cache_default_token_data,
|
||||
custom_metadata,
|
||||
init_cache,
|
||||
init_database,
|
||||
load_chain_spec,
|
||||
mocker,
|
||||
preferences,
|
||||
setup_metadata_request_handler,
|
||||
setup_metadata_signer):
|
||||
phone_number = account_creation_data.get('phone_number')
|
||||
@ -48,10 +53,12 @@ def test_account_creation_callback(account_creation_data,
|
||||
cached_account_creation_data = get_cached_data(task_uuid)
|
||||
cached_account_creation_data = json.loads(cached_account_creation_data)
|
||||
assert cached_account_creation_data.get('status') == account_creation_data.get('status')
|
||||
mock_add_preferences_metadata = mocker.patch('cic_ussd.tasks.metadata.add_preferences_metadata.apply_async')
|
||||
mock_add_phone_pointer = mocker.patch('cic_ussd.tasks.metadata.add_phone_pointer.apply_async')
|
||||
mock_add_custom_metadata = mocker.patch('cic_ussd.tasks.metadata.add_custom_metadata.apply_async')
|
||||
preferred_language = preferences.get('preferred_language')
|
||||
s_account_creation_callback = celery.signature(
|
||||
'cic_ussd.tasks.callback_handler.account_creation_callback', [result, '', 0]
|
||||
'cic_ussd.tasks.callback_handler.account_creation_callback', [result, preferred_language, 0]
|
||||
)
|
||||
s_account_creation_callback.apply_async().get()
|
||||
account = init_database.query(Account).filter_by(phone_number=phone_number).first()
|
||||
@ -59,6 +66,7 @@ def test_account_creation_callback(account_creation_data,
|
||||
cached_account_creation_data = get_cached_data(task_uuid)
|
||||
cached_account_creation_data = json.loads(cached_account_creation_data)
|
||||
assert cached_account_creation_data.get('status') == 'CREATED'
|
||||
mock_add_preferences_metadata.assert_called_with((result, preferences), {}, queue='cic-ussd')
|
||||
mock_add_phone_pointer.assert_called_with((result, phone_number), {}, queue='cic-ussd')
|
||||
mock_add_custom_metadata.assert_called_with((result, custom_metadata), {}, queue='cic-ussd')
|
||||
|
||||
@ -117,12 +125,46 @@ def test_statement_callback(activated_account, mocker, transactions_list):
|
||||
(activated_account.blockchain_address, sender_transaction), {}, queue='cic-ussd')
|
||||
|
||||
|
||||
def test_token_data_callback(activated_account,
|
||||
cache_token_data,
|
||||
cache_token_meta_symbol,
|
||||
cache_token_proof_symbol,
|
||||
celery_session_worker,
|
||||
default_token_data,
|
||||
init_cache,
|
||||
token_meta_symbol,
|
||||
token_symbol):
|
||||
blockchain_address = activated_account.blockchain_address
|
||||
identifier = token_symbol.encode('utf-8')
|
||||
status_code = 1
|
||||
with pytest.raises(ValueError) as error:
|
||||
s_token_data_callback = celery.signature(
|
||||
'cic_ussd.tasks.callback_handler.token_data_callback',
|
||||
[[default_token_data], blockchain_address, status_code])
|
||||
s_token_data_callback.apply_async().get()
|
||||
assert str(error.value) == f'Unexpected status code: {status_code}.'
|
||||
|
||||
token_data_key = cache_data_key([bytes.fromhex(blockchain_address), identifier], MetadataPointer.TOKEN_DATA)
|
||||
token_meta_key = cache_data_key(identifier, MetadataPointer.TOKEN_META_SYMBOL)
|
||||
token_info_key = cache_data_key(identifier, MetadataPointer.TOKEN_PROOF_SYMBOL)
|
||||
token_meta = get_cached_data(token_meta_key)
|
||||
token_meta = json.loads(token_meta)
|
||||
token_info = get_cached_data(token_info_key)
|
||||
token_info = json.loads(token_info)
|
||||
token_data = collate_token_metadata(token_info=token_info, token_metadata=token_meta)
|
||||
token_data = {**token_data, **default_token_data}
|
||||
cached_token_data = json.loads(get_cached_data(token_data_key))
|
||||
for key, value in token_data.items():
|
||||
assert token_data[key] == cached_token_data[key]
|
||||
|
||||
|
||||
def test_transaction_balances_callback(activated_account,
|
||||
balances,
|
||||
cache_balances,
|
||||
cache_default_token_data,
|
||||
cache_token_data,
|
||||
cache_person_metadata,
|
||||
cache_preferences,
|
||||
celery_session_worker,
|
||||
load_chain_spec,
|
||||
mocker,
|
||||
preferences,
|
||||
@ -157,7 +199,16 @@ def test_transaction_balances_callback(activated_account,
|
||||
mocked_chain.assert_called()
|
||||
|
||||
|
||||
def test_transaction_callback(load_chain_spec, mock_async_balance_api_query, transaction_result):
|
||||
def test_transaction_callback(cache_token_data,
|
||||
celery_session_worker,
|
||||
default_token_data,
|
||||
init_cache,
|
||||
load_chain_spec,
|
||||
mock_async_balance_api_query,
|
||||
token_symbol,
|
||||
token_meta_symbol,
|
||||
token_proof_symbol,
|
||||
transaction_result):
|
||||
status_code = 1
|
||||
with pytest.raises(ValueError) as error:
|
||||
s_transaction_callback = celery.signature(
|
||||
@ -166,13 +217,19 @@ def test_transaction_callback(load_chain_spec, mock_async_balance_api_query, tra
|
||||
s_transaction_callback.apply_async().get()
|
||||
assert str(error.value) == f'Unexpected status code: {status_code}.'
|
||||
|
||||
status_code = 0
|
||||
s_transaction_callback = celery.signature(
|
||||
'cic_ussd.tasks.callback_handler.transaction_callback',
|
||||
[transaction_result, 'transfer', status_code])
|
||||
s_transaction_callback.apply_async().get()
|
||||
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
||||
assert mock_async_balance_api_query.get('address') == recipient_transaction.get('blockchain_address') or sender_transaction.get('blockchain_address')
|
||||
assert mock_async_balance_api_query.get('token_symbol') == recipient_transaction.get('token_symbol') or sender_transaction.get('token_symbol')
|
||||
with requests_mock.Mocker(real_http=False) as request_mocker:
|
||||
identifier = token_symbol.encode('utf-8')
|
||||
metadata_client = TokenMetadata(identifier, cic_type=MetadataPointer.TOKEN_META_SYMBOL)
|
||||
request_mocker.register_uri('GET', metadata_client.url, json=token_meta_symbol, status_code=200, reason='OK')
|
||||
metadata_client = TokenMetadata(identifier, cic_type=MetadataPointer.TOKEN_PROOF_SYMBOL)
|
||||
request_mocker.register_uri('GET', metadata_client.url, json=token_proof_symbol, status_code=200, reason='OK')
|
||||
status_code = 0
|
||||
s_transaction_callback = celery.signature(
|
||||
'cic_ussd.tasks.callback_handler.transaction_callback',
|
||||
[transaction_result, 'transfer', status_code])
|
||||
s_transaction_callback.apply_async().get()
|
||||
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
||||
assert mock_async_balance_api_query.get('address') == recipient_transaction.get('blockchain_address') or sender_transaction.get('blockchain_address')
|
||||
assert mock_async_balance_api_query.get('token_symbol') == recipient_transaction.get('token_symbol') or sender_transaction.get('token_symbol')
|
||||
|
||||
|
||||
|
@ -14,13 +14,14 @@ from cic_ussd.translation import translation_for
|
||||
|
||||
|
||||
def test_transaction(cache_default_token_data,
|
||||
cache_token_data,
|
||||
celery_session_worker,
|
||||
load_support_phone,
|
||||
mock_notifier_api,
|
||||
notification_data,
|
||||
set_locale_files):
|
||||
notification_data['transaction_type'] = 'transfer'
|
||||
amount = from_wei(notification_data.get('token_value'))
|
||||
amount = from_wei(6, notification_data.get('token_value'))
|
||||
balance = notification_data.get('available_balance')
|
||||
phone_number = notification_data.get('phone_number')
|
||||
preferred_language = notification_data.get('preferred_language')
|
||||
|
@ -52,6 +52,11 @@ def test_cache_statement(activated_account,
|
||||
cached_statement = get_cached_data(key)
|
||||
cached_statement = json.loads(cached_statement)
|
||||
assert len(cached_statement) == 1
|
||||
|
||||
sender_transaction['token_value'] = 60.0
|
||||
s_parse_transaction = celery.signature(
|
||||
'cic_ussd.tasks.processor.parse_transaction', [sender_transaction])
|
||||
result = s_parse_transaction.apply_async().get()
|
||||
s_cache_statement = celery.signature(
|
||||
'cic_ussd.tasks.processor.cache_statement', [result, activated_account.blockchain_address]
|
||||
)
|
||||
|
162
apps/cic-ussd/tests/fixtures/account.py
vendored
162
apps/cic-ussd/tests/fixtures/account.py
vendored
@ -8,6 +8,7 @@ from cic_types.condiments import MetadataPointer
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.account.tokens import set_active_token
|
||||
from cic_ussd.cache import cache_data, cache_data_key
|
||||
from cic_ussd.db.enum import AccountStatus
|
||||
from cic_ussd.db.models.account import Account
|
||||
@ -36,6 +37,16 @@ def activated_account(init_database, set_fernet_key):
|
||||
return account
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def guardian_account(init_database, set_fernet_key):
|
||||
account = Account(blockchain_address(), phone_number())
|
||||
account.create_password('0000')
|
||||
account.activate_account()
|
||||
init_database.add(account)
|
||||
init_database.commit()
|
||||
return account
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def balances():
|
||||
return [{
|
||||
@ -53,13 +64,22 @@ def cache_account_creation_data(init_cache, account_creation_data):
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_balances(activated_account, balances, init_cache):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
def cache_balances(activated_account, balances, init_cache, token_symbol):
|
||||
identifier = [bytes.fromhex(activated_account.blockchain_address), token_symbol.encode('utf-8')]
|
||||
balances = json.dumps(balances[0])
|
||||
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||
cache_data(key, balances)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_adjusted_balances(activated_account, balances, init_cache, token_symbol):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
balances_identifier = [identifier, token_symbol.encode('utf-8')]
|
||||
key = cache_data_key(balances_identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||
adjusted_balance = 45931650.64654012
|
||||
cache_data(key, adjusted_balance)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||
chain_str = Chain.spec.__str__()
|
||||
@ -68,6 +88,113 @@ def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||
cache_data(key, data)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def set_active_token(activated_account, init_cache, token_symbol):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_ACTIVE)
|
||||
cache_data(key=key, data=token_symbol)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_token_data(activated_account, init_cache, token_data):
|
||||
identifier = [bytes.fromhex(activated_account.blockchain_address), token_data.get('symbol').encode('utf-8')]
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_DATA)
|
||||
cache_data(key=key, data=json.dumps(token_data))
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_token_symbol_list(activated_account, init_cache, token_symbol):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_SYMBOLS_LIST)
|
||||
token_symbols_list = [token_symbol]
|
||||
cache_data(key, json.dumps(token_symbols_list))
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_token_data_list(activated_account, init_cache, token_data):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_DATA_LIST)
|
||||
token_data_list = [token_data]
|
||||
cache_data(key, json.dumps(token_data_list))
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def token_meta_symbol():
|
||||
return {
|
||||
"contact": {
|
||||
"phone": "+254700000000",
|
||||
"email": "info@grassrootseconomics.org"
|
||||
},
|
||||
"country_code": "KE",
|
||||
"location": "Kilifi",
|
||||
"name": "GRASSROOTS ECONOMICS"
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def token_proof_symbol():
|
||||
return {
|
||||
"description": "Community support",
|
||||
"issuer": "Grassroots Economics",
|
||||
"namespace": "ge",
|
||||
"proofs": [
|
||||
"0x4746540000000000000000000000000000000000000000000000000000000000",
|
||||
"1f0f0e3e9db80eeaba22a9d4598e454be885855d6048545546fd488bb709dc2f"
|
||||
],
|
||||
"version": 0
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def token_list_entries():
|
||||
return [
|
||||
{
|
||||
'name': 'Fee',
|
||||
'symbol': 'FII',
|
||||
'issuer': 'Foo',
|
||||
'contact': {'phone': '+254712345678'},
|
||||
'location': 'Fum',
|
||||
'balance': 50.0
|
||||
},
|
||||
{
|
||||
'name': 'Giftable Token',
|
||||
'symbol': 'GFT',
|
||||
'issuer': 'Grassroots Economics',
|
||||
'contact': {
|
||||
'phone': '+254700000000',
|
||||
'email': 'info@grassrootseconomics.org'
|
||||
},
|
||||
'location': 'Fum',
|
||||
'balance': 60.0
|
||||
},
|
||||
{
|
||||
'name': 'Demurrage Token',
|
||||
'symbol': 'DET',
|
||||
'issuer': 'Grassroots Economics',
|
||||
'contact': {
|
||||
'phone': '+254700000000',
|
||||
'email': 'info@grassrootseconomics.org'
|
||||
},
|
||||
'location': 'Fum',
|
||||
'balance': 49.99
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_token_meta_symbol(token_meta_symbol, token_symbol):
|
||||
identifier = token_symbol.encode('utf-8')
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_META_SYMBOL)
|
||||
cache_data(key, json.dumps(token_meta_symbol))
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_token_proof_symbol(token_proof_symbol, token_symbol):
|
||||
identifier = token_symbol.encode('utf-8')
|
||||
key = cache_data_key(identifier, MetadataPointer.TOKEN_PROOF_SYMBOL)
|
||||
cache_data(key, json.dumps(token_proof_symbol))
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def cache_person_metadata(activated_account, init_cache, person_metadata):
|
||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||
@ -100,10 +227,33 @@ def custom_metadata():
|
||||
@pytest.fixture(scope='function')
|
||||
def default_token_data(token_symbol):
|
||||
return {
|
||||
'symbol': token_symbol,
|
||||
'address': blockchain_address(),
|
||||
'name': 'Giftable',
|
||||
'decimals': 6
|
||||
'symbol': token_symbol,
|
||||
'address': '32e860c2a0645d1b7b005273696905f5d6dc5d05',
|
||||
'name': 'Giftable Token',
|
||||
'decimals': 6,
|
||||
"converters": []
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def token_data():
|
||||
return {
|
||||
"description": "Community support",
|
||||
"issuer": "Grassroots Economics",
|
||||
"location": "Kilifi",
|
||||
"contact": {
|
||||
"phone": "+254700000000",
|
||||
"email": "info@grassrootseconomics.org"
|
||||
},
|
||||
"decimals": 6,
|
||||
"name": "Giftable Token",
|
||||
"symbol": "GFT",
|
||||
"address": "32e860c2a0645d1b7b005273696905f5d6dc5d05",
|
||||
"proofs": [
|
||||
"0x4746540000000000000000000000000000000000000000000000000000000000",
|
||||
"1f0f0e3e9db80eeaba22a9d4598e454be885855d6048545546fd488bb709dc2f"
|
||||
],
|
||||
"converters": []
|
||||
}
|
||||
|
||||
|
||||
|
12
apps/cic-ussd/tests/fixtures/cache.py
vendored
12
apps/cic-ussd/tests/fixtures/cache.py
vendored
@ -2,14 +2,18 @@
|
||||
|
||||
# external imports
|
||||
import pytest
|
||||
from pytest_redis import factories
|
||||
|
||||
# local imports
|
||||
from cic_ussd.cache import Cache
|
||||
from cic_ussd.session.ussd_session import UssdSession
|
||||
|
||||
redis_test_proc = factories.redis_proc()
|
||||
redis_db = factories.redisdb('redis_test_proc', decode=True)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def init_cache(redisdb):
|
||||
Cache.store = redisdb
|
||||
UssdSession.store = redisdb
|
||||
return redisdb
|
||||
def init_cache(redis_db):
|
||||
Cache.store = redis_db
|
||||
UssdSession.store = redis_db
|
||||
return redis_db
|
||||
|
33
apps/cic-ussd/tests/fixtures/config.py
vendored
33
apps/cic-ussd/tests/fixtures/config.py
vendored
@ -10,11 +10,13 @@ from confini import Config
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.account.guardianship import Guardianship
|
||||
from cic_ussd.encoder import PasswordEncoder
|
||||
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
|
||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||
from cic_ussd.phone_number import E164Format, Support
|
||||
from cic_ussd.state_machine import UssdStateMachine
|
||||
from cic_ussd.translation import generate_locale_files, Languages
|
||||
from cic_ussd.validator import validate_presence
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
@ -39,6 +41,14 @@ def init_state_machine(load_config):
|
||||
UssdStateMachine.transitions = json_file_parser(filepath=load_config.get('MACHINE_TRANSITIONS'))
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def load_languages(init_cache, load_config):
|
||||
validate_presence(load_config.get('LANGUAGES_FILE'))
|
||||
Languages.load_languages_dict(load_config.get('LANGUAGES_FILE'))
|
||||
languages = Languages()
|
||||
languages.cache_system_languages()
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def load_chain_spec(load_config):
|
||||
chain_spec = ChainSpec.from_chain_str(load_config.get('CHAIN_SPEC'))
|
||||
@ -75,8 +85,23 @@ def set_fernet_key(load_config):
|
||||
PasswordEncoder.set_key(load_config.get('APP_PASSWORD_PEPPER'))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def set_locale_files(load_config):
|
||||
validate_presence(load_config.get('LOCALE_PATH'))
|
||||
i18n.load_path.append(load_config.get('LOCALE_PATH'))
|
||||
@pytest.fixture(scope='function')
|
||||
def setup_guardianship(load_config):
|
||||
guardians_file = os.path.join(root_directory, load_config.get('SYSTEM_GUARDIANS_FILE'))
|
||||
validate_presence(guardians_file)
|
||||
Guardianship.load_system_guardians(guardians_file)
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def set_locale_files(load_config, tmpdir_factory):
|
||||
tmpdir = tmpdir_factory.mktemp("var")
|
||||
tmpdir_path = str(tmpdir)
|
||||
validate_presence(tmpdir_path)
|
||||
import cic_translations
|
||||
package_path = cic_translations.__path__
|
||||
schema_files = os.path.join(package_path[0], load_config.get("SCHEMA_FILE_PATH"))
|
||||
generate_locale_files(locale_dir=tmpdir_path,
|
||||
schema_file_path=schema_files,
|
||||
translation_builder_path=load_config.get('LOCALE_FILE_BUILDERS'))
|
||||
i18n.load_path.append(tmpdir_path)
|
||||
i18n.set('fallback', load_config.get('LOCALE_FALLBACK'))
|
||||
|
3
apps/cic-ussd/tests/fixtures/transaction.py
vendored
3
apps/cic-ussd/tests/fixtures/transaction.py
vendored
@ -40,6 +40,7 @@ def statement(activated_account):
|
||||
'blockchain_address': activated_account.blockchain_address,
|
||||
'token_symbol': 'GFT',
|
||||
'token_value': 25000000,
|
||||
'token_decimals': 6,
|
||||
'role': 'sender',
|
||||
'action_tag': 'Sent',
|
||||
'direction_tag': 'To',
|
||||
@ -63,7 +64,7 @@ def transaction_result(activated_account, load_config, valid_recipient):
|
||||
'destination_token_symbol': load_config.get('TEST_TOKEN_SYMBOL'),
|
||||
'source_token_decimals': 6,
|
||||
'destination_token_decimals': 6,
|
||||
'chain': 'evm:bloxberg:8996'
|
||||
'chain': load_config.get('CHAIN_SPEC')
|
||||
}
|
||||
|
||||
|
||||
|
@ -0,0 +1 @@
|
||||
+254700000000
|
@ -574,9 +574,9 @@ products_edit_pin_authorization.first,"CON Please enter your PIN
|
||||
0. Dheebi"
|
||||
products_edit_pin_authorization.retry,%{retry_pin_entry},%{retry_pin_entry},%{retry_pin_entry},%{retry_pin_entry},%{retry_pin_entry},%{retry_pin_entry},%{retry_pin_entry}
|
||||
account_balances.available_balance,"CON Your balances are as follows:
|
||||
balance: %{available_balance} %{token_symbol}
|
||||
%{available_balance} %{token_symbol}
|
||||
0. Back","CON Salio zako ni zifuatazo:
|
||||
salio: %{available_balance} %{token_symbol}
|
||||
%{available_balance} %{token_symbol}
|
||||
0. Rudi","CON Utyalo waku ni uu:
|
||||
utyalo: %{available_balance} %{token_symbol}
|
||||
0. Syoka itina","CON Matigari maku ni maya:
|
||||
@ -659,9 +659,11 @@ first_transaction_set,"CON %{first_transaction_set}
|
||||
1. Dhuur
|
||||
00. Bai"
|
||||
middle_transaction_set,"CON %{middle_transaction_set}
|
||||
|
||||
11. Next
|
||||
22. Previous
|
||||
00. Exit","CON %{middle_transaction_set}
|
||||
|
||||
11. Mbele
|
||||
22. Rudi
|
||||
00. Ondoka","CON %{middle_transaction_set}
|
||||
@ -681,8 +683,10 @@ middle_transaction_set,"CON %{middle_transaction_set}
|
||||
2. Dheebi
|
||||
00. Bai"
|
||||
last_transaction_set,"CON %{last_transaction_set}
|
||||
|
||||
22. Previous
|
||||
00. Exit","CON %{last_transaction_set}
|
||||
|
||||
22. Rudi
|
||||
00. Ondoka","CON %{last_transaction_set}
|
||||
2. Itina
|
||||
|
|
Loading…
Reference in New Issue
Block a user