Merge branch 'lash/token-checksum-address-fix' into 'master'

bug: Normalize token addresses in db for erc20 operations

See merge request grassrootseconomics/cic-internal-integration!304
This commit is contained in:
Philip Wafula 2021-12-22 18:25:29 +00:00
commit 7d3ff89fe5
3 changed files with 14 additions and 8 deletions

View File

@ -515,7 +515,7 @@ class Api(ApiBase):
:param password: Password to encode the password with in the backend (careful, you will have to remember it)
:type password: str
:param register: Register the new account in accounts index backend
:type password: bool
:type register: bool
:returns: uuid of root task
:rtype: celery.Task
"""

View File

@ -395,6 +395,8 @@ def cache_transfer_data(
sender_address = tx_normalize.wallet_address(tx['from'])
recipient_address = tx_normalize.wallet_address(tx_data[0])
token_value = tx_data[1]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session()
@ -402,8 +404,8 @@ def cache_transfer_data(
'hash': tx_hash_hex,
'from': sender_address,
'to': recipient_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'source_token': source_token_address,
'destination_token': destination_token_address,
'from_value': token_value,
'to_value': token_value,
}
@ -435,14 +437,16 @@ def cache_transfer_from_data(
spender_address = tx_data[0]
recipient_address = tx_data[1]
token_value = tx_data[2]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': tx['from'],
'to': recipient_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'source_token': source_token_address,
'destination_token': destination_token_address,
'from_value': token_value,
'to_value': token_value,
}
@ -474,14 +478,16 @@ def cache_approve_data(
sender_address = tx_normalize.wallet_address(tx['from'])
recipient_address = tx_normalize.wallet_address(tx_data[0])
token_value = tx_data[1]
source_token_address = tx_normalize.executable_address(tx['to'])
destination_token_address = source_token_address
session = SessionBase.create_session()
tx_dict = {
'hash': tx_hash_hex,
'from': sender_address,
'to': recipient_address,
'source_token': tx['to'],
'destination_token': tx['to'],
'source_token': source_token_address,
'destination_token': destination_token_address,
'from_value': token_value,
'to_value': token_value,
}

View File

@ -72,7 +72,7 @@ def __balance_incoming_compatible(token_address, receiver_address):
status_compare = dead()
q = q.filter(Otx.status.op('&')(status_compare)==0)
# TODO: this can change the result for the recipient if tx is later obsoleted and resubmission is delayed.
q = q.filter(Otx.status.op('&')(StatusBits.IN_NETWORK)==StatusBits.IN_NETWORK)
#q = q.filter(Otx.status.op('&')(StatusBits.IN_NETWORK)==StatusBits.IN_NETWORK)
q = q.filter(TxCache.destination_token_address==token_address)
delta = 0
for r in q.all():