Add cache handling

This commit is contained in:
lash 2022-03-12 08:48:19 +00:00
parent b763d11eff
commit bd77706d1a
Signed by: lash
GPG Key ID: 21D2E7BB88C2A746
7 changed files with 174 additions and 28 deletions

View File

@ -1,6 +1,66 @@
class CacheTx:
def __init__(self):
self.v_sender = None
self.v_recipient = None
self.v_nonce = None
self.v_value = None
self.block_number = None
self.tx_index = None
self.timestamp = None
def confirm(self, block_number, tx_index, timestamp):
self.block_number = block_number
self.tx_index = tx_index
self.timestamp = timestamp
def init(self, nonce, sender, recipient, value):
self.v_sender = sender
self.v_recipient = recipient
self.v_nonce = nonce
self.v_value = value
def deserialize(self, signed_tx):
raise NotImplementedError()
def set(self, k, v):
k = 'v_' + k
setattr(self, k, v)
def __str__(self):
return '{} -> {} : {}'.format(self.v_sender, self.v_recipient, self.v_value)
class CacheTokenTx(CacheTx):
def __init__(self): #, nonce, sender, recipient, src_token, dst_token, src_value, dst_value):
super(CacheTokenTx, self).__init__()
self.v_src_token = None
self.v_src_value = None
self.v_dst_token = None
self.v_dst_value = None
class Cache:
def put(self, chain_spec, tx):
def __init__(self, translator):
self.translator = translator
def put_serialized(self, chain_spec, signed_tx):
cache_tx = self.translate(chain_spec, signed_tx)
return self.put(chain_spec, cache_tx)
def put(self, chain_spec, cache_tx):
raise NotImplementedError()

View File

@ -27,10 +27,9 @@ class QueueEntry:
self.synced = False
def create(self, seq, signed_tx):
n = str(seq)
def create(self, signed_tx):
signed_tx = normalize_hex(signed_tx)
self.k = self.store.put(self.tx_hash, n, signed_tx)
self.k = self.store.put(self.tx_hash, signed_tx)
self.synced = True

View File

@ -1,26 +1,28 @@
# standard imports
import logging
import re
import datetime
logg = logging.getLogger(__name__)
def to_key(k, v):
return '{:>010s}_{}'.format(k, v)
def to_key(t, n, k):
return '{}_{}_{}'.format(t, n, k)
def from_key(k):
(seq_str, tx_hash) = k.split('_')
return (int(seq_str), tx_hash,)
(ts_str, seq_str, tx_hash) = k.split('_')
return (float(ts_str), int(seq_str), tx_hash, )
re_u = r'^[^_][_A-Z]+$'
class Store:
def __init__(self, state_store, index_store):
def __init__(self, state_store, index_store, counter, cache=None):
self.cache = cache
self.state_store = state_store
self.index_store = index_store
self.counter = counter
for s in dir(self.state_store):
if not re.match(re_u, s):
continue
@ -30,16 +32,20 @@ class Store:
setattr(self, v, getattr(self.state_store, v))
def put(self, k, n, v):
self.index_store.put(k, n)
k = to_key(n, k)
self.state_store.put(k, v)
def put(self, k, v):
n = self.counter.next()
t = datetime.datetime.now().timestamp()
s = to_key(t, n, k)
self.state_store.put(s, v)
self.index_store.put(k, s)
if self.cache != None:
self.cache.put_serialized(v)
def get(self, k):
n = self.index_store.get(k)
k = to_key(n, k)
return (k, self.state_store.get(k))
s = self.index_store.get(k)
v = self.state_store.get(s)
return (s, v,)
def list(self, state=0, limit=4096, strict=False):

View File

@ -12,7 +12,7 @@ from chainqueue import (
)
class MockIndexStore:
class MockContentStore:
def __init__(self):
self.store = {}
@ -26,10 +26,24 @@ class MockIndexStore:
return self.store.get(k)
class MockCounter:
def __init__(self):
self.c = 0
def next(self):
c = self.c
self.c += 1
return c
class TestShepBase(unittest.TestCase):
def setUp(self):
self.path = tempfile.mkdtemp()
factory = SimpleFileStoreFactory(self.path).add
self.state = Status(factory)
self.store = Store(self.state, MockIndexStore())
content_store = MockContentStore()
counter = MockCounter()
self.store = Store(self.state, content_store, counter)

71
tests/test_cache.py Normal file
View File

@ -0,0 +1,71 @@
# standard imports
import os
import logging
import unittest
import hashlib
# external imports
from hexathon import add_0x
# local imports
from chainqueue import QueueEntry
from chainqueue.cache import (
CacheTokenTx,
)
# test imports
from tests.base_shep import TestShepBase
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class MockCacheTokenTx(CacheTokenTx):
def deserialize(self, signed_tx):
h = hashlib.sha1()
h.update(signed_tx + b'\x01')
z = h.digest()
nonce = int.from_bytes(z[:4], 'big')
token_value = int.from_bytes(z[4:8], 'big')
value = int.from_bytes(z[8:12], 'big')
h = hashlib.sha1()
h.update(z)
z = h.digest()
sender = z.hex()
h = hashlib.sha1()
h.update(z)
z = h.digest()
recipient = z.hex()
h = hashlib.sha1()
h.update(z)
z = h.digest()
token = z.hex()
tx = CacheTokenTx()
tx.init(nonce, sender, recipient, value)
tx.set('src_token', token)
tx.set('dst_token', token)
tx.set('src_value', token_value)
tx.set('dst_value', token_value)
return tx
class TestCache(TestShepBase):
def setUp(self):
super(TestCache, self).setUp()
self.tx = MockCacheTokenTx()
def test_basic_translator(self):
a = b'foo'
tx = self.tx.deserialize(a)
print(tx)
if __name__ == '__main__':
unittest.main()

View File

@ -16,24 +16,20 @@ logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class MockTranslator:
pass
class TestShep(TestShepBase):
class TestEntry(TestShepBase):
def test_entry_get(self):
tx_hash_one = add_0x(os.urandom(32).hex())
signed_tx = add_0x(os.urandom(128).hex())
nonce = 42
entry = QueueEntry(self.store, tx_hash_one)
entry.create(nonce, signed_tx)
entry.create(signed_tx)
tx_hash_two = add_0x(os.urandom(32).hex())
signed_tx = add_0x(os.urandom(128).hex())
nonce = 42
entry = QueueEntry(self.store, tx_hash_two)
entry.create(nonce, signed_tx)
entry.create(signed_tx)
txs = self.store.list()
self.assertEqual(len(txs), 2)

View File

@ -31,7 +31,7 @@ class TestShep(TestShepBase):
signed_tx = add_0x(os.urandom(128).hex())
nonce = 42
tx = QueueEntry(self.store, tx_hash)
tx.create(nonce, signed_tx)
tx.create(signed_tx)
tx_retrieved = QueueEntry(self.store, tx_hash)
tx_retrieved.load()