2021-05-18 19:13:57 +02:00
|
|
|
|
# standard imports
|
|
|
|
|
import logging
|
|
|
|
|
import json
|
|
|
|
|
import re
|
|
|
|
|
import base64
|
|
|
|
|
|
2021-07-10 17:46:14 +02:00
|
|
|
|
# external imports
|
2021-10-08 11:19:21 +02:00
|
|
|
|
from hexathon import (
|
|
|
|
|
add_0x,
|
|
|
|
|
strip_0x,
|
|
|
|
|
)
|
2021-11-08 09:58:22 +01:00
|
|
|
|
from chainlib.encode import TxHexNormalizer
|
2021-07-10 17:46:14 +02:00
|
|
|
|
|
2021-05-18 19:13:57 +02:00
|
|
|
|
# local imports
|
|
|
|
|
from cic_cache.cache import (
|
|
|
|
|
BloomCache,
|
|
|
|
|
DataCache,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
logg = logging.getLogger(__name__)
|
2021-07-10 17:46:14 +02:00
|
|
|
|
#logg = logging.getLogger()
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
2021-11-04 09:42:35 +01:00
|
|
|
|
re_transactions_all_bloom = r'/tx/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
|
2021-07-10 17:46:14 +02:00
|
|
|
|
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
2021-11-04 09:42:35 +01:00
|
|
|
|
re_transactions_all_data = r'/txa/?(\d+)?/?(\d+)?/?(\d+)?/?(\d+)?/?'
|
2021-10-08 11:19:21 +02:00
|
|
|
|
re_transactions_account_data = r'/txa/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
2021-11-04 07:59:38 +01:00
|
|
|
|
re_default_limit = r'/defaultlimit/?'
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
|
|
|
|
DEFAULT_LIMIT = 100
|
|
|
|
|
|
2021-11-08 09:58:22 +01:00
|
|
|
|
tx_normalize = TxHexNormalizer()
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
2021-11-04 06:06:34 +01:00
|
|
|
|
def parse_query_account(r):
|
|
|
|
|
address = strip_0x(r[1])
|
2021-11-08 09:58:22 +01:00
|
|
|
|
#address = tx_normalize.wallet_address(address)
|
2021-11-04 06:06:34 +01:00
|
|
|
|
limit = DEFAULT_LIMIT
|
|
|
|
|
g = r.groups()
|
|
|
|
|
if len(g) > 3:
|
2021-11-08 09:58:22 +01:00
|
|
|
|
limit = int(r[4])
|
|
|
|
|
if limit == 0:
|
|
|
|
|
limit = DEFAULT_LIMIT
|
2021-11-04 06:06:34 +01:00
|
|
|
|
offset = 0
|
|
|
|
|
if len(g) > 4:
|
2021-11-08 09:58:22 +01:00
|
|
|
|
offset = int(r[6])
|
2021-11-04 06:06:34 +01:00
|
|
|
|
|
|
|
|
|
logg.debug('account query is address {} offset {} limit {}'.format(address, offset, limit))
|
|
|
|
|
|
|
|
|
|
return (address, offset, limit,)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# r is an re.Match
|
|
|
|
|
def parse_query_any(r):
|
|
|
|
|
limit = DEFAULT_LIMIT
|
|
|
|
|
offset = 0
|
|
|
|
|
block_offset = None
|
|
|
|
|
block_end = None
|
2021-11-04 09:42:35 +01:00
|
|
|
|
if r.lastindex != None:
|
|
|
|
|
if r.lastindex > 0:
|
|
|
|
|
limit = int(r[1])
|
|
|
|
|
if r.lastindex > 1:
|
|
|
|
|
offset = int(r[2])
|
|
|
|
|
if r.lastindex > 2:
|
|
|
|
|
block_offset = int(r[3])
|
|
|
|
|
if r.lastindex > 3:
|
|
|
|
|
block_end = int(r[4])
|
|
|
|
|
if block_end < block_offset:
|
|
|
|
|
raise ValueError('cart before the horse, dude')
|
2021-11-04 06:06:34 +01:00
|
|
|
|
|
|
|
|
|
logg.debug('data query is offset {} limit {} block_offset {} block_end {}'.format(offset, limit, block_offset, block_end))
|
|
|
|
|
|
|
|
|
|
return (offset, limit, block_offset, block_end,)
|
|
|
|
|
|
|
|
|
|
|
2021-11-04 07:59:38 +01:00
|
|
|
|
def process_default_limit(session, env):
|
|
|
|
|
r = re.match(re_default_limit, env.get('PATH_INFO'))
|
|
|
|
|
if not r:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
return ('application/json', str(DEFAULT_LIMIT).encode('utf-8'),)
|
|
|
|
|
|
|
|
|
|
|
2021-05-18 19:13:57 +02:00
|
|
|
|
def process_transactions_account_bloom(session, env):
|
|
|
|
|
r = re.match(re_transactions_account_bloom, env.get('PATH_INFO'))
|
|
|
|
|
if not r:
|
|
|
|
|
return None
|
2021-11-08 09:58:22 +01:00
|
|
|
|
logg.debug('match account bloom')
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
2021-11-04 06:06:34 +01:00
|
|
|
|
(address, offset, limit,) = parse_query_account(r)
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
|
|
|
|
c = BloomCache(session)
|
|
|
|
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
|
|
|
|
|
|
|
|
|
o = {
|
|
|
|
|
'alg': 'sha256',
|
|
|
|
|
'low': lowest_block,
|
|
|
|
|
'high': highest_block,
|
|
|
|
|
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
|
|
|
|
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
|
|
|
|
'filter_rounds': 3,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
j = json.dumps(o)
|
|
|
|
|
|
|
|
|
|
return ('application/json', j.encode('utf-8'),)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process_transactions_all_bloom(session, env):
|
|
|
|
|
r = re.match(re_transactions_all_bloom, env.get('PATH_INFO'))
|
|
|
|
|
if not r:
|
|
|
|
|
return None
|
2021-11-08 09:58:22 +01:00
|
|
|
|
logg.debug('match all bloom')
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
2021-11-04 06:06:34 +01:00
|
|
|
|
(limit, offset, block_offset, block_end,) = parse_query_any(r)
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
|
|
|
|
c = BloomCache(session)
|
|
|
|
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions(offset, limit)
|
|
|
|
|
|
|
|
|
|
o = {
|
|
|
|
|
'alg': 'sha256',
|
|
|
|
|
'low': lowest_block,
|
|
|
|
|
'high': highest_block,
|
|
|
|
|
'block_filter': base64.b64encode(bloom_filter_block).decode('utf-8'),
|
|
|
|
|
'blocktx_filter': base64.b64encode(bloom_filter_tx).decode('utf-8'),
|
|
|
|
|
'filter_rounds': 3,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
j = json.dumps(o)
|
|
|
|
|
|
|
|
|
|
return ('application/json', j.encode('utf-8'),)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process_transactions_all_data(session, env):
|
|
|
|
|
r = re.match(re_transactions_all_data, env.get('PATH_INFO'))
|
|
|
|
|
if not r:
|
|
|
|
|
return None
|
2021-11-04 06:06:34 +01:00
|
|
|
|
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
|
|
|
|
# return None
|
2021-11-08 09:58:22 +01:00
|
|
|
|
logg.debug('match all data')
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
2021-08-17 10:03:14 +02:00
|
|
|
|
logg.debug('got data request {}'.format(env))
|
2021-11-04 06:06:34 +01:00
|
|
|
|
|
|
|
|
|
(offset, limit, block_offset, block_end) = parse_query_any(r)
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
|
|
|
|
c = DataCache(session)
|
2021-11-04 06:06:34 +01:00
|
|
|
|
(lowest_block, highest_block, tx_cache) = c.load_transactions_with_data(offset, limit, block_offset, block_end, oldest=True) # oldest needs to be settable
|
2021-05-18 19:13:57 +02:00
|
|
|
|
|
|
|
|
|
for r in tx_cache:
|
|
|
|
|
r['date_block'] = r['date_block'].timestamp()
|
|
|
|
|
|
|
|
|
|
o = {
|
|
|
|
|
'low': lowest_block,
|
|
|
|
|
'high': highest_block,
|
|
|
|
|
'data': tx_cache,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
j = json.dumps(o)
|
|
|
|
|
|
|
|
|
|
return ('application/json', j.encode('utf-8'),)
|
2021-10-08 11:19:21 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process_transactions_account_data(session, env):
|
|
|
|
|
r = re.match(re_transactions_account_data, env.get('PATH_INFO'))
|
|
|
|
|
if not r:
|
|
|
|
|
return None
|
2021-11-08 09:58:22 +01:00
|
|
|
|
logg.debug('match account data')
|
2021-11-04 06:06:34 +01:00
|
|
|
|
#if env.get('HTTP_X_CIC_CACHE_MODE') != 'all':
|
|
|
|
|
# return None
|
2021-10-08 11:19:21 +02:00
|
|
|
|
|
2021-11-04 06:06:34 +01:00
|
|
|
|
(address, offset, limit,) = parse_query_account(r)
|
2021-10-08 11:19:21 +02:00
|
|
|
|
|
|
|
|
|
c = DataCache(session)
|
|
|
|
|
(lowest_block, highest_block, tx_cache) = c.load_transactions_account_with_data(address, offset, limit)
|
|
|
|
|
|
|
|
|
|
for r in tx_cache:
|
|
|
|
|
r['date_block'] = r['date_block'].timestamp()
|
|
|
|
|
|
|
|
|
|
o = {
|
|
|
|
|
'low': lowest_block,
|
|
|
|
|
'high': highest_block,
|
|
|
|
|
'data': tx_cache,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
j = json.dumps(o)
|
|
|
|
|
|
|
|
|
|
return ('application/json', j.encode('utf-8'),)
|