From be31de0a8a4051fe620661feb6f8f93c4188823a Mon Sep 17 00:00:00 2001 From: PhilipWafula Date: Mon, 22 Nov 2021 12:59:11 +0300 Subject: [PATCH] Adds auxiliary to handle multi-token ops. --- apps/cic-ussd/cic_ussd/account/tokens.py | 262 +++++++++++++++++++++-- 1 file changed, 243 insertions(+), 19 deletions(-) diff --git a/apps/cic-ussd/cic_ussd/account/tokens.py b/apps/cic-ussd/cic_ussd/account/tokens.py index e9388034..bc0fda0a 100644 --- a/apps/cic-ussd/cic_ussd/account/tokens.py +++ b/apps/cic-ussd/cic_ussd/account/tokens.py @@ -2,20 +2,22 @@ import hashlib import json import logging -from typing import Dict, Optional +from typing import Optional, Union # external imports from cic_eth.api import Api from cic_types.condiments import MetadataPointer # local imports +from cic_ussd.account.balance import get_cached_available_balance from cic_ussd.account.chain import Chain -from cic_ussd.cache import cache_data_key, get_cached_data +from cic_ussd.cache import cache_data, cache_data_key, get_cached_data from cic_ussd.error import CachedDataNotFoundError, SeppukuError from cic_ussd.metadata.tokens import query_token_info, query_token_metadata +from cic_ussd.processor.util import wait_for_cache +from cic_ussd.translation import translation_for - -logg = logging.getLogger(__name__) +logg = logging.getLogger(__file__) def collate_token_metadata(token_info: dict, token_metadata: dict) -> dict: @@ -27,6 +29,7 @@ def collate_token_metadata(token_info: dict, token_metadata: dict) -> dict: :return: :rtype: """ + logg.debug(f'Collating token info: {token_info} and token metadata: {token_metadata}') description = token_info.get('description') issuer = token_info.get('issuer') location = token_metadata.get('location') @@ -39,6 +42,79 @@ def collate_token_metadata(token_info: dict, token_metadata: dict) -> dict: } +def create_account_tokens_list(blockchain_address: str): + """ + :param blockchain_address: + :type blockchain_address: + :return: + :rtype: + """ + token_symbols_list = get_cached_token_symbol_list(blockchain_address=blockchain_address) + token_list_entries = [] + if token_symbols_list: + logg.debug(f'Token symbols: {token_symbols_list} for account: {blockchain_address}') + for token_symbol in token_symbols_list: + entry = {} + logg.debug(f'Processing token data for: {token_symbol}') + key = cache_data_key([bytes.fromhex(blockchain_address), token_symbol.encode('utf-8')], MetadataPointer.TOKEN_DATA) + token_data = get_cached_data(key) + token_data = json.loads(token_data) + logg.debug(f'Retrieved token data: {token_data} for: {token_symbol}') + token_name = token_data.get('name') + entry['name'] = token_name + token_symbol = token_data.get('symbol') + entry['symbol'] = token_symbol + token_issuer = token_data.get('issuer') + entry['issuer'] = token_issuer + token_contact = token_data['contact'].get('phone') + entry['contact'] = token_contact + token_location = token_data.get('location') + entry['location'] = token_location + decimals = token_data.get('decimals') + identifier = [bytes.fromhex(blockchain_address), token_symbol.encode('utf-8')] + wait_for_cache(identifier, f'Cached available balance for token: {token_symbol}', MetadataPointer.BALANCES) + token_balance = get_cached_available_balance(decimals=decimals, identifier=identifier) + entry['balance'] = token_balance + logg.debug(f'APPENDING: {entry}') + token_list_entries.append(entry) + logg.debug(f'TOKEN LIST ENTRIES: {token_list_entries}') + account_tokens_list = order_account_tokens_list(token_list_entries, bytes.fromhex(blockchain_address)) + logg.debug(f'ORDERED ACCOUNT TOKENS LIST: {account_tokens_list}') + key = cache_data_key(bytes.fromhex(blockchain_address), MetadataPointer.TOKEN_DATA_LIST) + cache_data(key, json.dumps(account_tokens_list)) + + +def get_active_token_symbol(blockchain_address: str): + """ + :param blockchain_address: + :type blockchain_address: + :return: + :rtype: + """ + identifier = bytes.fromhex(blockchain_address) + key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_ACTIVE) + active_token_symbol = get_cached_data(key) + if not active_token_symbol: + raise CachedDataNotFoundError('No active token set.') + return active_token_symbol + + +def get_cached_token_data(blockchain_address: str, token_symbol: str): + """ + :param blockchain_address: + :type blockchain_address: + :param token_symbol: + :type token_symbol: + :return: + :rtype: + """ + identifier = [bytes.fromhex(blockchain_address), token_symbol.encode('utf-8')] + key = cache_data_key(identifier, MetadataPointer.TOKEN_DATA) + logg.debug(f'Retrieving token data for: {token_symbol} at: {key}') + token_data = get_cached_data(key) + return json.loads(token_data) + + def get_cached_default_token(chain_str: str) -> Optional[str]: """This function attempts to retrieve the default token's data from the redis cache. :param chain_str: chain name and network id. @@ -72,31 +148,138 @@ def get_default_token_symbol(): raise SeppukuError(f'Could not retrieve default token for: {chain_str}') -def hashed_token_proof(token_proof: dict) -> str: +def get_cached_token_symbol_list(blockchain_address: str) -> Optional[list]: + """ + :param blockchain_address: + :type blockchain_address: + :return: + :rtype: + """ + key = cache_data_key(identifier=bytes.fromhex(blockchain_address), salt=MetadataPointer.TOKEN_SYMBOLS_LIST) + token_symbols_list = get_cached_data(key) + if token_symbols_list: + return json.loads(token_symbols_list) + return token_symbols_list + + +def get_cached_token_data_list(blockchain_address: str) -> Optional[list]: + """ + :param blockchain_address: + :type blockchain_address: + :return: + :rtype: + """ + key = cache_data_key(bytes.fromhex(blockchain_address), MetadataPointer.TOKEN_DATA_LIST) + token_data_list = get_cached_data(key) + if token_data_list: + return json.loads(token_data_list) + return token_data_list + + +def handle_token_symbol_list(blockchain_address: str, token_symbol: str): + """ + :param blockchain_address: + :type blockchain_address: + :param token_symbol: + :type token_symbol: + :return: + :rtype: + """ + token_symbol_list = get_cached_token_symbol_list(blockchain_address) + if token_symbol_list: + if token_symbol not in token_symbol_list: + token_symbol_list.append(token_symbol) + else: + token_symbol_list = [token_symbol] + + identifier = bytes.fromhex(blockchain_address) + key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_SYMBOLS_LIST) + data = json.dumps(token_symbol_list) + cache_data(key, data) + + +def hashed_token_proof(token_proof: Union[dict, str]) -> str: """ :param token_proof: :type token_proof: :return: :rtype: """ - + if isinstance(token_proof, dict): + token_proof = json.dumps(token_proof) + logg.debug(f'Hashing token proof: {token_proof}') hash_object = hashlib.new("sha256") - hash_object.update(json.dumps(token_proof).encode('utf-8')) + hash_object.update(token_proof.encode('utf-8')) return hash_object.digest().hex() -def process_token_data(token_symbol: str): +def order_account_tokens_list(account_tokens_list: list, identifier: bytes) -> list: """ + :param account_tokens_list: + :type account_tokens_list: + :param identifier: + :type identifier: + :return: + :rtype: + """ + logg.debug(f'RECEIVED ACCOUNT TOKENS LIST: {account_tokens_list}') + ordered_tokens_list = [] + # get last sent token + key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_LAST_SENT) + last_sent_token_symbol = get_cached_data(key) + logg.debug(f'LAST SENT TOKEN: {last_sent_token_symbol}') + + # get last received token + key = cache_data_key(identifier=identifier, salt=MetadataPointer.TOKEN_LAST_RECEIVED) + last_received_token_symbol = get_cached_data(key) + logg.debug(f'LAST RECEIVED TOKEN: {last_received_token_symbol}') + + last_sent_token_data, remaining_accounts_token_list = remove_from_account_tokens_list(account_tokens_list, last_sent_token_symbol) + if last_sent_token_data: + ordered_tokens_list.append(last_sent_token_data[0]) + logg.debug(f'ORDERED TOKEN LIST AFTER REMOVING SENT: {ordered_tokens_list}') + logg.debug(f'REMAINING TOKEN LIST AFTER REMOVING SENT: {remaining_accounts_token_list}') + + last_received_token_data, remaining_accounts_token_list = remove_from_account_tokens_list(remaining_accounts_token_list, last_received_token_symbol) + if last_received_token_data: + ordered_tokens_list.append(last_received_token_data[0]) + logg.debug(f'ORDERED TOKEN LIST AFTER REMOVING RECEIVED: {ordered_tokens_list}') + logg.debug(f'REMAINING TOKEN LIST AFTER REMOVING SENT: {remaining_accounts_token_list}') + + # order the by balance + ordered_by_balance = sorted(remaining_accounts_token_list, key=lambda d: d['balance'], reverse=True) + logg.debug(f'ORDERED BY BALANCE FOR REMAINING LIST: {ordered_by_balance}') + return ordered_tokens_list + ordered_by_balance + + +def parse_token_list(account_token_list: list): + parsed_token_list = [] + for i in range(len(account_token_list)): + token_symbol = account_token_list[i].get('symbol') + token_balance = account_token_list[i].get('balance') + token_data_repr = f'{i+1}. {token_symbol} {token_balance}' + parsed_token_list.append(token_data_repr) + return parsed_token_list + + +def process_token_data(blockchain_address: str, token_symbol: str): + """ + + :param blockchain_address: + :type blockchain_address: :param token_symbol: :type token_symbol: :return: :rtype: """ + logg.debug(f'Processing token data for token: {token_symbol}') identifier = token_symbol.encode('utf-8') query_token_metadata(identifier=identifier) token_info = query_token_info(identifier=identifier) hashed_token_info = hashed_token_proof(token_proof=token_info) - query_token_data(hashed_proofs=[hashed_token_info], token_symbols=[token_symbol]) + query_token_data(blockchain_address=blockchain_address, + hashed_proofs=[hashed_token_info], + token_symbols=[token_symbol]) def query_default_token(chain_str: str): @@ -112,17 +295,58 @@ def query_default_token(chain_str: str): return default_token_request_task.get() -def query_token_data(hashed_proofs: list, token_symbols: list): - """ - :param hashed_proofs: - :type hashed_proofs: - :param token_symbols: - :type token_symbols: - :return: - :rtype: - """ - api = Api(callback_param='', +def query_token_data(blockchain_address: str, hashed_proofs: list, token_symbols: list): + """""" + logg.debug(f'Retrieving token metadata for tokens: {", ".join(token_symbols)}') + api = Api(callback_param=blockchain_address, callback_queue='cic-ussd', chain_str=Chain.spec.__str__(), callback_task='cic_ussd.tasks.callback_handler.token_data_callback') api.tokens(token_symbols=token_symbols, proof=hashed_proofs) + + +def remove_from_account_tokens_list(account_tokens_list: list, token_symbol: str): + """ + :param account_tokens_list: + :type account_tokens_list: + :param token_symbol: + :type token_symbol: + :return: + :rtype: + """ + removed_token_data = [] + for i in range(len(account_tokens_list)): + if account_tokens_list[i]['symbol'] == token_symbol: + removed_token_data.append(account_tokens_list[i]) + del account_tokens_list[i] + break + return removed_token_data, account_tokens_list + + +def set_active_token(blockchain_address: str, token_symbol: str): + """ + :param blockchain_address: + :type blockchain_address: + :param token_symbol: + :type token_symbol: + :return: + :rtype: + """ + logg.info(f'Active token set to: {token_symbol}') + key = cache_data_key(identifier=bytes.fromhex(blockchain_address), salt=MetadataPointer.TOKEN_ACTIVE) + cache_data(key=key, data=token_symbol) + + +def token_list_set(preferred_language: str, token_data_reprs: list): + """ + :param preferred_language: + :type preferred_language: + :param token_data_reprs: + :type token_data_reprs: + :return: + :rtype: + """ + if not token_data_reprs: + return translation_for('helpers.no_tokens_list', preferred_language) + return ''.join(f'{token_data_repr}\n' for token_data_repr in token_data_reprs) +