Make process tx pass

This commit is contained in:
nolash 2021-07-09 15:26:11 +02:00
parent e166a6737f
commit 167ff0f5e4
Signed by untrusted user who does not match committer: lash
GPG Key ID: 21D2E7BB88C2A746
2 changed files with 73 additions and 63 deletions

View File

@ -4,6 +4,9 @@ import json
import re import re
import base64 import base64
# external imports
from hexathon import add_0x
# local imports # local imports
from cic_cache.cache import ( from cic_cache.cache import (
BloomCache, BloomCache,
@ -27,14 +30,13 @@ def process_transactions_account_bloom(session, env):
address = r[1] address = r[1]
if r[2] == None: if r[2] == None:
address = '0x' + address address = add_0x(address)
offset = 0 offset = 0
if r.lastindex > 3: if r.lastindex > 2:
offset = r[4] offset = r[4]
limit = DEFAULT_LIMIT limit = DEFAULT_LIMIT
if r.lastindex > 5: if r.lastindex > 4:
limit = r[6] limit = r[6]
raise ValueError('>>>>>>< offset {} limit {} path {}'.format(offset, limit, env.get('PATH_INFO')))
c = BloomCache(session) c = BloomCache(session)
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit) (lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)

View File

@ -20,7 +20,14 @@ logg = logging.getLogger()
'query_path_prefix, query_role, query_address_index, query_offset, query_offset_index, query_limit, query_limit_index, match_re', 'query_path_prefix, query_role, query_address_index, query_offset, query_offset_index, query_limit, query_limit_index, match_re',
[ [
('/tx/user/', 'alice', 0, None, 3, None, 5, re_transactions_account_bloom), ('/tx/user/', 'alice', 0, None, 3, None, 5, re_transactions_account_bloom),
('/tx/user/', 'alice', 0, 0, 3, None, 5, re_transactions_account_bloom), ('/tx/user/', 'alice', 0, 42, 3, None, 5, re_transactions_account_bloom),
('/tx/user/', 'alice', 0, 42, 3, 13, 5, re_transactions_account_bloom),
('/tx/', None, 0, None, 3, None, 5, re_transactions_all_bloom),
('/tx/', None, 0, 42, 3, None, 5, re_transactions_all_bloom),
('/tx/', None, 0, 42, 3, 13, 5, re_transactions_all_bloom),
('/txa/', None, 0, None, 3, None, 5, re_transactions_all_data),
('/txa/', None, 0, 42, 3, None, 5, re_transactions_all_data),
('/txa/', None, 0, 42, 3, 13, 5, re_transactions_all_data),
], ],
) )
def test_query_regex( def test_query_regex(
@ -35,7 +42,6 @@ def test_query_regex(
match_re, match_re,
): ):
logg.debug('type {}'.format(type(match_re)))
paths = [] paths = []
path = query_path_prefix path = query_path_prefix
query_address = None query_address = None
@ -65,64 +71,66 @@ def test_query_regex(
assert query_offset == int(m[query_offset_index + 1]) assert query_offset == int(m[query_offset_index + 1])
if m.lastindex >= query_limit_index: if m.lastindex >= query_limit_index:
assert query_limit == int(m[query_limit_index + 1]) assert query_limit == int(m[query_limit_index + 1])
if query_address != None: if query_address_index != None:
match_address = strip_0x(m[query_address_index + 1]) match_address = strip_0x(m[query_address_index + 1])
assert query_address == match_address assert query_address == match_address
#
#@pytest.mark.parametrize( @pytest.mark.parametrize(
# 'role_name, query_offset, query_limit, query_match', 'role_name, query_offset, query_limit, query_match',
# [ [
## ('alice', None, None, [(420000, 13), (419999, 42)]), ('alice', None, None, [(420000, 13), (419999, 42)]),
## ('alice', None, 1, [(420000, 13)]), ('alice', None, 1, [(420000, 13)]),
## ('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block'] ('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
# ('alice', 2, None, []), # 420000 == list_defaults['block'] ('alice', 2, None, []), # 420000 == list_defaults['block']
# ], ],
# ) )
#def test_query_process_txs( def test_query_process_txs(
# init_database, init_database,
# list_defaults, list_defaults,
# list_actors, list_actors,
# list_tokens, list_tokens,
# txs, txs,
# zero_filter, zero_filter,
# role_name, role_name,
# query_offset, query_offset,
# query_limit, query_limit,
# query_match, query_match,
# ): ):
#
# actor = None actor = None
# try: try:
# actor = list_actors[role_name] actor = list_actors[role_name]
# except KeyError: except KeyError:
# actor = os.urandom(20).hex() actor = os.urandom(20).hex()
# path_info = '/tx/user/0x' + strip_0x(actor) path_info = '/tx/user/0x' + strip_0x(actor)
# if query_offset != None: if query_offset != None:
# path_info += '/' + str(query_offset) path_info += '/' + str(query_offset)
# if query_limit != None: if query_limit != None:
# path_info += '/' + str(query_limit) if query_offset == None:
# env = { path_info += '/0'
# 'PATH_INFO': path_info, path_info += '/' + str(query_limit)
# } env = {
# logg.debug('using path {}'.format(path_info)) 'PATH_INFO': path_info,
# r = process_transactions_account_bloom(init_database, env) }
# assert r != None logg.debug('using path {}'.format(path_info))
# r = process_transactions_account_bloom(init_database, env)
# o = json.loads(r[1]) assert r != None
# block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
# zero_filter_data = zero_filter.to_bytes() o = json.loads(r[1])
# if len(query_match) == 0: block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
# assert block_filter_data == zero_filter_data zero_filter_data = zero_filter.to_bytes()
# return if len(query_match) == 0:
# assert block_filter_data == zero_filter_data
# assert block_filter_data != zero_filter_data return
# block_filter = copy.copy(zero_filter)
# block_filter.merge(block_filter_data) assert block_filter_data != zero_filter_data
# block_filter_data = block_filter.to_bytes() block_filter = copy.copy(zero_filter)
# assert block_filter_data != zero_filter_data block_filter.merge(block_filter_data)
# block_filter_data = block_filter.to_bytes()
# for (block, tx) in query_match: assert block_filter_data != zero_filter_data
# block = block.to_bytes(4, byteorder='big')
# assert block_filter.check(block) for (block, tx) in query_match:
block = block.to_bytes(4, byteorder='big')
assert block_filter.check(block)