clicada/clicada/tx/tx.py
lash b0f8f39d15 usability: WIP Friendly progress output (#4)
This MR adds colorized progress statements when resolving and retrieving data for user. It disables logging if loglevel is set to default (logging.WARNING at this time).

It also skips metadata lookups that have failed in the same session, speeding up retrievals when same address repeatedly occurs in transaction list.

closes #6
closes #5

Co-authored-by: lash <dev@holbrook.no>
Reviewed-on: #4
Co-authored-by: lash <accounts-grassrootseconomics@holbrook.no>
Co-committed-by: lash <accounts-grassrootseconomics@holbrook.no>
2022-01-21 11:11:23 +00:00

118 lines
3.7 KiB
Python
Raw Blame History

This file contains invisible Unicode characters

This file contains invisible Unicode characters that are indistinguishable to humans but may be processed differently by a computer. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

# standard imports
import urllib.request
import urllib.parse
import logging
import json
# external imports
from cic_types.models.tx import (
TokenTx,
TokenTxType,
)
# local imports
from clicada.encode import tx_normalize
from clicada.error import (
ExpiredRecordError,
MetadataNotFoundError,
)
logg = logging.getLogger(__name__)
class ResolvedTokenTx(TokenTx):
def __init__(self):
super(ResolvedTokenTx, self).__init__()
self.source_token_name = None
self.destination_token_name = None
self.source_token_decimals = None
self.destination_token_decimals = None
self.symmetric = True
self.sender_entity = None
self.recipient_entity = None
def resolve_tokens(self, token_store, show_decimals=False, update=False):
(token_symbol, token_decimals) = token_store.by_address(self.source_token)
self.source_token_decimals = token_decimals
self.source_token_label = token_symbol
token_value = self.to_value / (10 ** token_decimals)
show_token_decimals = token_decimals
if not show_decimals:
show_token_decimals = 0
fmt = '{:.' + str(show_token_decimals) + 'f}'
self.from_value_label = fmt.format(token_value)
if self.destination_token != self.source_token:
self.symmetric = False
(token_symbol, token_decimals) = token_store.by_address(self.destination_token)
show_token_decimals = token_decimals
self.destination_token_label = token_symbol
self.destination_token_decimals = token_decimals
token_value = self.to_value / (10 ** token_decimals)
if not show_decimals:
show_token_decimals = 0
fmt = '{:.' + str(show_token_decimals) + 'f}'
self.to_value_label = fmt.format(token_value)
def resolve_entity(self, user_store, address):
try:
r = user_store.by_address(address)
except MetadataNotFoundError:
return address
return str(r)
def resolve_sender_entity(self, user_store, update=False):
if self.tx_type == TokenTxType.faucet_giveto.value:
return 'FAUCET'
return self.resolve_entity(user_store, self.sender)
def resolve_recipient_entity(self, user_store, update=False):
return self.resolve_entity(user_store, self.recipient)
def resolve_entities(self, user_store, update=False):
self.sender_label = self.resolve_sender_entity(user_store, update=update)
self.recipient_label = self.resolve_recipient_entity(user_store, update=update)
def resolve(self, token_store, user_store, show_decimals=False, update=False):
self.resolve_tokens(token_store, show_decimals, update=update)
self.resolve_entities(user_store, update=update)
def __str__(self):
if self.symmetric:
return '{} {} => {} {} {}'.format(
self.date_block_label,
self.sender_label,
self.recipient_label,
self.destination_token_label,
self.to_value_label,
)
class TxGetter:
def __init__(self, cache_url, limit=0):
self.cache_url = cache_url
self.limit = limit
if limit == 0:
url = urllib.parse.urljoin(self.cache_url, 'defaultlimit')
r = urllib.request.urlopen(url)
self.limit = json.load(r)
logg.debug('set limit {} from {}'.format(self.limit, self.cache_url))
def get(self, address, offset=0, limit=0):
url = urllib.parse.urljoin(self.cache_url, '/txa/user/{}/{}/{}'.format(address, limit, offset))
r = urllib.request.urlopen(url)
return json.load(r)