2021-02-18 06:04:30 +01:00
|
|
|
|
# standard imports
|
|
|
|
|
import logging
|
2021-05-18 19:13:57 +02:00
|
|
|
|
import datetime
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
2021-05-18 19:13:57 +02:00
|
|
|
|
# external imports
|
2021-02-18 06:04:30 +01:00
|
|
|
|
import moolb
|
|
|
|
|
|
|
|
|
|
# local imports
|
2021-05-18 19:13:57 +02:00
|
|
|
|
from cic_cache.db.list import (
|
|
|
|
|
list_transactions_mined,
|
|
|
|
|
list_transactions_account_mined,
|
|
|
|
|
list_transactions_mined_with_data,
|
2021-08-17 10:03:14 +02:00
|
|
|
|
list_transactions_mined_with_data_index,
|
|
|
|
|
list_transactions_account_mined_with_data_index,
|
|
|
|
|
list_transactions_account_mined_with_data,
|
2021-05-18 19:13:57 +02:00
|
|
|
|
)
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
|
|
|
|
logg = logging.getLogger()
|
|
|
|
|
|
|
|
|
|
|
2021-07-10 17:46:14 +02:00
|
|
|
|
DEFAULT_FILTER_SIZE = 8192 * 8
|
2021-08-17 10:03:14 +02:00
|
|
|
|
DEFAULT_LIMIT = 100
|
2021-07-10 17:46:14 +02:00
|
|
|
|
|
2021-05-18 19:13:57 +02:00
|
|
|
|
class Cache:
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
|
|
|
|
def __init__(self, session):
|
|
|
|
|
self.session = session
|
|
|
|
|
|
|
|
|
|
|
2021-05-18 19:13:57 +02:00
|
|
|
|
class BloomCache(Cache):
|
|
|
|
|
|
2021-02-18 06:04:30 +01:00
|
|
|
|
@staticmethod
|
|
|
|
|
def __get_filter_size(n):
|
2021-07-10 17:46:14 +02:00
|
|
|
|
n = DEFAULT_FILTER_SIZE
|
2021-02-18 06:04:30 +01:00
|
|
|
|
logg.warning('filter size hardcoded to {}'.format(n))
|
|
|
|
|
return n
|
|
|
|
|
|
|
|
|
|
|
2021-08-17 10:03:14 +02:00
|
|
|
|
def load_transactions(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
2021-02-18 06:04:30 +01:00
|
|
|
|
"""Retrieves a list of transactions from cache and creates a bloom filter pointing to blocks and transactions.
|
|
|
|
|
|
|
|
|
|
Block and transaction numbers are serialized as 32-bit big-endian numbers. The input to the second bloom filter is the concatenation of the serialized block number and transaction index.
|
|
|
|
|
|
|
|
|
|
For example, if the block number is 13 and the transaction index is 42, the input are:
|
|
|
|
|
|
|
|
|
|
block filter: 0x0d000000
|
|
|
|
|
block+tx filter: 0x0d0000002a0000000
|
|
|
|
|
|
|
|
|
|
:param offset: Offset in data set to return transactions from
|
|
|
|
|
:type offset: int
|
|
|
|
|
:param limit: Max number of transactions to retrieve
|
|
|
|
|
:type limit: int
|
|
|
|
|
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
|
|
|
|
:rtype: tuple
|
|
|
|
|
"""
|
2021-08-17 10:03:14 +02:00
|
|
|
|
rows = list_transactions_mined(self.session, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
|
|
|
|
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
|
|
|
|
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
|
|
|
|
highest_block = -1
|
|
|
|
|
lowest_block = -1
|
|
|
|
|
for r in rows:
|
|
|
|
|
if highest_block == -1:
|
|
|
|
|
highest_block = r[0]
|
2021-08-17 10:03:14 +02:00
|
|
|
|
lowest_block = r[0]
|
|
|
|
|
else:
|
|
|
|
|
if oldest:
|
|
|
|
|
highest_block = r[0]
|
|
|
|
|
else:
|
|
|
|
|
lowest_block = r[0]
|
2021-02-18 06:04:30 +01:00
|
|
|
|
block = r[0].to_bytes(4, byteorder='big')
|
|
|
|
|
tx = r[1].to_bytes(4, byteorder='big')
|
|
|
|
|
f_block.add(block)
|
|
|
|
|
f_blocktx.add(block + tx)
|
|
|
|
|
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
|
|
|
|
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
|
|
|
|
|
|
|
|
|
|
2021-08-17 10:03:14 +02:00
|
|
|
|
def load_transactions_account(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
2021-02-18 06:04:30 +01:00
|
|
|
|
"""Same as load_transactions(...), but only retrieves transactions where the specified account address is sender or recipient.
|
|
|
|
|
|
|
|
|
|
:param address: Address to retrieve transactions for.
|
|
|
|
|
:type address: str, 0x-hex
|
|
|
|
|
:param offset: Offset in data set to return transactions from
|
|
|
|
|
:type offset: int
|
|
|
|
|
:param limit: Max number of transactions to retrieve
|
|
|
|
|
:type limit: int
|
|
|
|
|
:return: Lowest block, bloom filter for blocks, bloom filter for blocks|tx
|
|
|
|
|
:rtype: tuple
|
|
|
|
|
"""
|
2021-08-17 10:03:14 +02:00
|
|
|
|
rows = list_transactions_account_mined(self.session, address, offset, limit, block_offset=block_offset, block_limit=block_limit, oldest=oldest)
|
2021-02-18 06:04:30 +01:00
|
|
|
|
|
|
|
|
|
f_block = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
|
|
|
|
f_blocktx = moolb.Bloom(BloomCache.__get_filter_size(limit), 3)
|
|
|
|
|
highest_block = -1;
|
|
|
|
|
lowest_block = -1;
|
|
|
|
|
for r in rows:
|
|
|
|
|
if highest_block == -1:
|
|
|
|
|
highest_block = r[0]
|
2021-08-17 10:03:14 +02:00
|
|
|
|
lowest_block = r[0]
|
|
|
|
|
else:
|
|
|
|
|
if oldest:
|
|
|
|
|
highest_block = r[0]
|
|
|
|
|
else:
|
|
|
|
|
lowest_block = r[0]
|
2021-02-18 06:04:30 +01:00
|
|
|
|
block = r[0].to_bytes(4, byteorder='big')
|
|
|
|
|
tx = r[1].to_bytes(4, byteorder='big')
|
|
|
|
|
f_block.add(block)
|
|
|
|
|
f_blocktx.add(block + tx)
|
|
|
|
|
logg.debug('added block {} tx {} lo {} hi {}'.format(r[0], r[1], lowest_block, highest_block))
|
|
|
|
|
return (lowest_block, highest_block, f_block.to_bytes(), f_blocktx.to_bytes(),)
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DataCache(Cache):
|
|
|
|
|
|
2021-08-17 10:03:14 +02:00
|
|
|
|
def load_transactions_with_data(self, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
|
|
|
|
if limit == 0:
|
|
|
|
|
limit = DEFAULT_LIMIT
|
|
|
|
|
rows = list_transactions_mined_with_data(self.session, offset, limit, block_offset, block_limit, oldest=oldest)
|
|
|
|
|
return self.__process_rows(rows, oldest)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_transactions_account_with_data(self, address, offset, limit, block_offset=None, block_limit=None, oldest=False):
|
|
|
|
|
if limit == 0:
|
|
|
|
|
limit = DEFAULT_LIMIT
|
|
|
|
|
rows = list_transactions_account_mined_with_data(self.session, address, offset, limit, block_offset, block_limit, oldest=oldest)
|
|
|
|
|
return self.__process_rows(rows, oldest)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def __process_rows(self, rows, oldest):
|
2021-05-18 19:13:57 +02:00
|
|
|
|
tx_cache = []
|
|
|
|
|
highest_block = -1;
|
|
|
|
|
lowest_block = -1;
|
|
|
|
|
date_is_str = None # stick this in startup
|
|
|
|
|
for r in rows:
|
|
|
|
|
if highest_block == -1:
|
|
|
|
|
highest_block = r['block_number']
|
2021-08-17 10:03:14 +02:00
|
|
|
|
lowest_block = r['block_number']
|
|
|
|
|
else:
|
|
|
|
|
if oldest:
|
|
|
|
|
highest_block = r['block_number']
|
|
|
|
|
else:
|
|
|
|
|
lowest_block = r['block_number']
|
2021-05-18 19:13:57 +02:00
|
|
|
|
tx_type = 'unknown'
|
|
|
|
|
|
|
|
|
|
if r['value'] != None:
|
|
|
|
|
tx_type = '{}.{}'.format(r['domain'], r['value'])
|
|
|
|
|
|
|
|
|
|
if date_is_str == None:
|
|
|
|
|
date_is_str = type(r['date_block']).__name__ == 'str'
|
|
|
|
|
|
|
|
|
|
o = {
|
|
|
|
|
'block_number': r['block_number'],
|
|
|
|
|
'tx_hash': r['tx_hash'],
|
|
|
|
|
'date_block': r['date_block'],
|
|
|
|
|
'sender': r['sender'],
|
|
|
|
|
'recipient': r['recipient'],
|
|
|
|
|
'from_value': int(r['from_value']),
|
|
|
|
|
'to_value': int(r['to_value']),
|
|
|
|
|
'source_token': r['source_token'],
|
|
|
|
|
'destination_token': r['destination_token'],
|
2021-05-19 16:47:22 +02:00
|
|
|
|
'success': r['success'],
|
2021-05-18 19:13:57 +02:00
|
|
|
|
'tx_type': tx_type,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if date_is_str:
|
|
|
|
|
o['date_block'] = datetime.datetime.fromisoformat(r['date_block'])
|
|
|
|
|
|
|
|
|
|
tx_cache.append(o)
|
|
|
|
|
return (lowest_block, highest_block, tx_cache)
|