Add query test
This commit is contained in:
parent
31d700efe3
commit
e166a6737f
@ -15,6 +15,8 @@ from cic_cache.db.list import (
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_FILTER_SIZE = 8192 * 8
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
|
|
||||||
def __init__(self, session):
|
def __init__(self, session):
|
||||||
@ -25,7 +27,7 @@ class BloomCache(Cache):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __get_filter_size(n):
|
def __get_filter_size(n):
|
||||||
n = 8192 * 8
|
n = DEFAULT_FILTER_SIZE
|
||||||
logg.warning('filter size hardcoded to {}'.format(n))
|
logg.warning('filter size hardcoded to {}'.format(n))
|
||||||
return n
|
return n
|
||||||
|
|
||||||
|
@ -11,9 +11,10 @@ from cic_cache.cache import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
#logg = logging.getLogger()
|
||||||
|
|
||||||
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
|
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||||
re_transactions_all_data = r'/txa/(\d+)/(\d+)/?'
|
re_transactions_all_data = r'/txa/(\d+)/(\d+)/?'
|
||||||
|
|
||||||
DEFAULT_LIMIT = 100
|
DEFAULT_LIMIT = 100
|
||||||
@ -27,12 +28,13 @@ def process_transactions_account_bloom(session, env):
|
|||||||
address = r[1]
|
address = r[1]
|
||||||
if r[2] == None:
|
if r[2] == None:
|
||||||
address = '0x' + address
|
address = '0x' + address
|
||||||
offset = DEFAULT_LIMIT
|
offset = 0
|
||||||
if r.lastindex > 2:
|
|
||||||
offset = r[3]
|
|
||||||
limit = 0
|
|
||||||
if r.lastindex > 3:
|
if r.lastindex > 3:
|
||||||
limit = r[4]
|
offset = r[4]
|
||||||
|
limit = DEFAULT_LIMIT
|
||||||
|
if r.lastindex > 5:
|
||||||
|
limit = r[6]
|
||||||
|
raise ValueError('>>>>>>< offset {} limit {} path {}'.format(offset, limit, env.get('PATH_INFO')))
|
||||||
|
|
||||||
c = BloomCache(session)
|
c = BloomCache(session)
|
||||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||||
|
@ -5,9 +5,12 @@ import datetime
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
import moolb
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache import db
|
from cic_cache import db
|
||||||
|
from cic_cache import BloomCache
|
||||||
|
from cic_cache.cache import DEFAULT_FILTER_SIZE
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
root_dir = os.path.dirname(script_dir)
|
root_dir = os.path.dirname(script_dir)
|
||||||
@ -101,3 +104,7 @@ def tag_txs(
|
|||||||
|
|
||||||
db.tag_transaction(init_database, txs[1], 'taag', domain='test')
|
db.tag_transaction(init_database, txs[1], 'taag', domain='test')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def zero_filter():
|
||||||
|
return moolb.Bloom(DEFAULT_FILTER_SIZE, 3)
|
||||||
|
@ -10,6 +10,7 @@ from sqlalchemy import text
|
|||||||
from chainlib.eth.tx import Tx
|
from chainlib.eth.tx import Tx
|
||||||
from chainlib.eth.block import Block
|
from chainlib.eth.block import Block
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.error import RequestMismatchException
|
||||||
from hexathon import (
|
from hexathon import (
|
||||||
strip_0x,
|
strip_0x,
|
||||||
add_0x,
|
add_0x,
|
||||||
@ -18,10 +19,21 @@ from hexathon import (
|
|||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db import add_tag
|
from cic_cache.db import add_tag
|
||||||
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
||||||
|
from cic_cache.runnable.daemons.filters.base import TagSyncFilter
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_base_filter_str(
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
f = TagSyncFilter('foo')
|
||||||
|
assert 'foo' == str(f)
|
||||||
|
f = TagSyncFilter('foo', domain='bar')
|
||||||
|
assert 'bar.foo' == str(f)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_erc20_filter(
|
def test_erc20_filter(
|
||||||
eth_rpc,
|
eth_rpc,
|
||||||
foo_token,
|
foo_token,
|
||||||
@ -67,3 +79,95 @@ def test_erc20_filter(
|
|||||||
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||||
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||||
assert r[0] == tx.hash
|
assert r[0] == tx.hash
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_filter_nocontract(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
# incomplete args
|
||||||
|
data = 'a9059cbb'
|
||||||
|
data += strip_0x(list_actors['alice'])
|
||||||
|
data += '1000'.ljust(64, '0')
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': os.urandom(20).hex(),
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'contract_method,contract_input,expected_exception',
|
||||||
|
[
|
||||||
|
('a9059cbb', os.urandom(32).hex(), ValueError), # not enough args
|
||||||
|
('a9059cbb', os.urandom(31).hex(), ValueError), # wrong arg boundary
|
||||||
|
('a9059cbc', os.urandom(64).hex(), RequestMismatchException), # wrong method
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_erc20_filter_bogus(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
contract_method,
|
||||||
|
contract_input,
|
||||||
|
expected_exception,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
# incomplete args
|
||||||
|
data = contract_method
|
||||||
|
data += contract_input
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': foo_token,
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
128
apps/cic-cache/tests/test_query.py
Normal file
128
apps/cic-cache/tests/test_query.py
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
import copy
|
||||||
|
import re
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.runnable.daemons.query import *
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'query_path_prefix, query_role, query_address_index, query_offset, query_offset_index, query_limit, query_limit_index, match_re',
|
||||||
|
[
|
||||||
|
('/tx/user/', 'alice', 0, None, 3, None, 5, re_transactions_account_bloom),
|
||||||
|
('/tx/user/', 'alice', 0, 0, 3, None, 5, re_transactions_account_bloom),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_regex(
|
||||||
|
list_actors,
|
||||||
|
query_path_prefix,
|
||||||
|
query_role,
|
||||||
|
query_address_index,
|
||||||
|
query_offset,
|
||||||
|
query_offset_index,
|
||||||
|
query_limit,
|
||||||
|
query_limit_index,
|
||||||
|
match_re,
|
||||||
|
):
|
||||||
|
|
||||||
|
logg.debug('type {}'.format(type(match_re)))
|
||||||
|
paths = []
|
||||||
|
path = query_path_prefix
|
||||||
|
query_address = None
|
||||||
|
if query_role != None:
|
||||||
|
query_address = strip_0x(list_actors[query_role])
|
||||||
|
paths.append(path + '0x' + query_address)
|
||||||
|
paths.append(path + query_address)
|
||||||
|
if query_offset != None:
|
||||||
|
if query_limit != None:
|
||||||
|
for i in range(len(paths)-1):
|
||||||
|
paths[i] += '/{}/{}'.format(query_offset, query_limit)
|
||||||
|
else:
|
||||||
|
for i in range(len(paths)-1):
|
||||||
|
paths[i] += '/' + str(query_offset)
|
||||||
|
|
||||||
|
for i in range(len(paths)):
|
||||||
|
paths.append(paths[i] + '/')
|
||||||
|
|
||||||
|
for p in paths:
|
||||||
|
logg.debug('testing path {} against {}'.format(p, match_re))
|
||||||
|
m = re.match(match_re, p)
|
||||||
|
l = len(m.groups())
|
||||||
|
logg.debug('laast index match {} groups {}'.format(m.lastindex, l))
|
||||||
|
for i in range(l+1):
|
||||||
|
logg.debug('group {} {}'.format(i, m[i]))
|
||||||
|
if m.lastindex >= query_offset_index:
|
||||||
|
assert query_offset == int(m[query_offset_index + 1])
|
||||||
|
if m.lastindex >= query_limit_index:
|
||||||
|
assert query_limit == int(m[query_limit_index + 1])
|
||||||
|
if query_address != None:
|
||||||
|
match_address = strip_0x(m[query_address_index + 1])
|
||||||
|
assert query_address == match_address
|
||||||
|
|
||||||
|
|
||||||
|
#
|
||||||
|
#@pytest.mark.parametrize(
|
||||||
|
# 'role_name, query_offset, query_limit, query_match',
|
||||||
|
# [
|
||||||
|
## ('alice', None, None, [(420000, 13), (419999, 42)]),
|
||||||
|
## ('alice', None, 1, [(420000, 13)]),
|
||||||
|
## ('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
|
||||||
|
# ('alice', 2, None, []), # 420000 == list_defaults['block']
|
||||||
|
# ],
|
||||||
|
# )
|
||||||
|
#def test_query_process_txs(
|
||||||
|
# init_database,
|
||||||
|
# list_defaults,
|
||||||
|
# list_actors,
|
||||||
|
# list_tokens,
|
||||||
|
# txs,
|
||||||
|
# zero_filter,
|
||||||
|
# role_name,
|
||||||
|
# query_offset,
|
||||||
|
# query_limit,
|
||||||
|
# query_match,
|
||||||
|
# ):
|
||||||
|
#
|
||||||
|
# actor = None
|
||||||
|
# try:
|
||||||
|
# actor = list_actors[role_name]
|
||||||
|
# except KeyError:
|
||||||
|
# actor = os.urandom(20).hex()
|
||||||
|
# path_info = '/tx/user/0x' + strip_0x(actor)
|
||||||
|
# if query_offset != None:
|
||||||
|
# path_info += '/' + str(query_offset)
|
||||||
|
# if query_limit != None:
|
||||||
|
# path_info += '/' + str(query_limit)
|
||||||
|
# env = {
|
||||||
|
# 'PATH_INFO': path_info,
|
||||||
|
# }
|
||||||
|
# logg.debug('using path {}'.format(path_info))
|
||||||
|
# r = process_transactions_account_bloom(init_database, env)
|
||||||
|
# assert r != None
|
||||||
|
#
|
||||||
|
# o = json.loads(r[1])
|
||||||
|
# block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
|
||||||
|
# zero_filter_data = zero_filter.to_bytes()
|
||||||
|
# if len(query_match) == 0:
|
||||||
|
# assert block_filter_data == zero_filter_data
|
||||||
|
# return
|
||||||
|
#
|
||||||
|
# assert block_filter_data != zero_filter_data
|
||||||
|
# block_filter = copy.copy(zero_filter)
|
||||||
|
# block_filter.merge(block_filter_data)
|
||||||
|
# block_filter_data = block_filter.to_bytes()
|
||||||
|
# assert block_filter_data != zero_filter_data
|
||||||
|
#
|
||||||
|
# for (block, tx) in query_match:
|
||||||
|
# block = block.to_bytes(4, byteorder='big')
|
||||||
|
# assert block_filter.check(block)
|
Loading…
Reference in New Issue
Block a user