Compare commits
No commits in common. "0c9b42d0866e4e3a0230e6518b9bf0fd783f0c11" and "68f50246d2c729428b477fda07b29f50df9370d6" have entirely different histories.
0c9b42d086
...
68f50246d2
117
chainqueue/adapters/base.py
Normal file
117
chainqueue/adapters/base.py
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
# standard imports
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from chainqueue.enum import StatusBits
|
||||||
|
|
||||||
|
|
||||||
|
class Adapter:
|
||||||
|
"""Base class defining interface to be implemented by chainqueue adapters.
|
||||||
|
|
||||||
|
The chainqueue adapter collects the following actions:
|
||||||
|
|
||||||
|
- add: add a transaction to the queue
|
||||||
|
- upcoming: get queued transactions ready to be sent to network
|
||||||
|
- dispatch: send a queued transaction to the network
|
||||||
|
- translate: decode details of a transaction
|
||||||
|
- create_session, release_session: session management to control queue state integrity
|
||||||
|
|
||||||
|
:param backend: Chainqueue backend
|
||||||
|
:type backend: TODO - abstract backend class. Must implement get, create_session, release_session
|
||||||
|
:param pending_retry_threshold: seconds delay before retrying a transaction stalled in the newtork
|
||||||
|
:type pending_retry_threshold: int
|
||||||
|
:param error_retry_threshold: seconds delay before retrying a transaction that incurred a recoverable error state
|
||||||
|
:type error_retry_threshold: int
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, backend, pending_retry_threshold=0, error_retry_threshold=0):
|
||||||
|
self.backend = backend
|
||||||
|
self.pending_retry_threshold = datetime.timedelta(pending_retry_threshold)
|
||||||
|
self.error_retry_threshold = datetime.timedelta(error_retry_threshold)
|
||||||
|
|
||||||
|
|
||||||
|
def add(self, bytecode, chain_spec, session=None):
|
||||||
|
"""Add a transaction to the queue.
|
||||||
|
|
||||||
|
:param bytecode: Transaction wire format bytecode, in hex
|
||||||
|
:type bytecode: str
|
||||||
|
:param chain_spec: Chain spec to use for transaction decode
|
||||||
|
:type chain_spec: chainlib.chain.ChainSpec
|
||||||
|
:param session: Backend state integrity session
|
||||||
|
:type session: varies
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def translate(self, bytecode, chain_spec):
|
||||||
|
"""Decode details of a transaction.
|
||||||
|
|
||||||
|
:param bytecode: Transaction wire format bytecode, in hex
|
||||||
|
:type bytecode: str
|
||||||
|
:param chain_spec: Chain spec to use for transaction decode
|
||||||
|
:type chain_spec: chainlib.chain.ChainSpec
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def get(self, tx_hash, chain_spec, session=None):
|
||||||
|
"""Retrieve serialized transaction represented by the given transaction hash.
|
||||||
|
|
||||||
|
:param chain_spec: Chain spec to use for transaction decode
|
||||||
|
:type chain_spec: chainlib.chain.ChainSpec
|
||||||
|
:param tx_hash: Transaction hash, in hex
|
||||||
|
:type tx_hash: str
|
||||||
|
:param session: Backend state integrity session
|
||||||
|
:type session: varies
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def dispatch(self, chain_spec, rpc, tx_hash, signed_tx, session=None):
|
||||||
|
"""Send a queued transaction to the network.
|
||||||
|
|
||||||
|
:param chain_spec: Chain spec to use to identify the transaction network
|
||||||
|
:type chain_spec: chainlib.chain.ChainSpec
|
||||||
|
:param rpc: RPC connection to use for transaction send
|
||||||
|
:type rpc: chainlib.connection.RPCConnection
|
||||||
|
:param tx_hash: Transaction hash (checksum of transaction), in hex
|
||||||
|
:type tx_hash: str
|
||||||
|
:param signed_tx: Transaction wire format bytecode, in hex
|
||||||
|
:type signed_tx: str
|
||||||
|
:param session: Backend state integrity session
|
||||||
|
:type session: varies
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def upcoming(self, chain_spec, session=None):
|
||||||
|
"""Get queued transactions ready to be sent to the network.
|
||||||
|
|
||||||
|
The transactions will be a combination of newly submitted transactions, previously sent but stalled transactions, and transactions that could temporarily not be submitted.
|
||||||
|
|
||||||
|
:param chain_spec: Chain spec to use to identify the transaction network
|
||||||
|
:type chain_spec: chainlib.chain.ChainSpec
|
||||||
|
:param session: Backend state integrity session
|
||||||
|
:type session: varies
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
def create_session(self, session=None):
|
||||||
|
"""Create a session context to guarantee atomic state change in backend.
|
||||||
|
|
||||||
|
:param session: If specified, session will be used instead of creating a new one
|
||||||
|
:type session: varies
|
||||||
|
"""
|
||||||
|
return self.backend.create_session(session)
|
||||||
|
|
||||||
|
|
||||||
|
def release_session(self, session=None):
|
||||||
|
"""Release a session context created by create_session.
|
||||||
|
|
||||||
|
If session parameter is defined, final session destruction will be deferred to the initial provider of the session. In other words; if create_session was called with a session, release_session should symmetrically be called with the same session.
|
||||||
|
|
||||||
|
:param session: Session context.
|
||||||
|
:type session: varies
|
||||||
|
"""
|
||||||
|
return self.backend.release_session(session)
|
@ -1,35 +1,15 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import enum
|
import enum
|
||||||
import logging
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class NoopNormalizer:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.address = self.noop
|
|
||||||
self.hash = self.noop
|
|
||||||
self.value = self.noop
|
|
||||||
|
|
||||||
|
|
||||||
def noop(self, v):
|
|
||||||
return v
|
|
||||||
|
|
||||||
|
|
||||||
noop_normalizer = NoopNormalizer()
|
|
||||||
|
|
||||||
|
|
||||||
class CacheTx:
|
class CacheTx:
|
||||||
|
|
||||||
def __init__(self, normalizer=noop_normalizer):
|
def __init__(self):
|
||||||
self.normalizer = normalizer
|
self.v_sender = None
|
||||||
self.sender = None
|
self.v_recipient = None
|
||||||
self.recipient = None
|
self.v_nonce = None
|
||||||
self.nonce = None
|
self.v_value = None
|
||||||
self.value = None
|
|
||||||
|
|
||||||
self.tx_hash = None
|
|
||||||
self.block_number = None
|
self.block_number = None
|
||||||
self.tx_index = None
|
self.tx_index = None
|
||||||
self.timestamp = None
|
self.timestamp = None
|
||||||
@ -41,12 +21,11 @@ class CacheTx:
|
|||||||
self.timestamp = timestamp
|
self.timestamp = timestamp
|
||||||
|
|
||||||
|
|
||||||
def init(self, tx_hash, nonce, sender, recipient, value):
|
def init(self, nonce, sender, recipient, value):
|
||||||
self.tx_hash = self.normalizer.hash(tx_hash)
|
self.v_sender = sender
|
||||||
self.sender = self.normalizer.address(sender)
|
self.v_recipient = recipient
|
||||||
self.recipient = self.normalizer.address(recipient)
|
self.v_nonce = nonce
|
||||||
self.nonce = nonce
|
self.v_value = value
|
||||||
self.value = self.normalizer.value(value)
|
|
||||||
|
|
||||||
|
|
||||||
def deserialize(self, signed_tx):
|
def deserialize(self, signed_tx):
|
||||||
@ -59,14 +38,14 @@ class CacheTx:
|
|||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '{}: {} ({}) -> {} = {}'.format(self.tx_hash, self.sender, self.nonce, self.recipient, self.value)
|
return '{} -> {} : {}'.format(self.v_sender, self.v_recipient, self.v_value)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class CacheTokenTx(CacheTx):
|
class CacheTokenTx(CacheTx):
|
||||||
|
|
||||||
def __init__(self, normalizer=noop_normalizer):
|
def __init__(self): #, nonce, sender, recipient, src_token, dst_token, src_value, dst_value):
|
||||||
super(CacheTokenTx, self).__init__(normalizer=normalizer)
|
super(CacheTokenTx, self).__init__()
|
||||||
self.v_src_token = None
|
self.v_src_token = None
|
||||||
self.v_src_value = None
|
self.v_src_value = None
|
||||||
self.v_dst_token = None
|
self.v_dst_token = None
|
||||||
@ -80,10 +59,9 @@ class CacheSort(enum.Enum):
|
|||||||
|
|
||||||
class CacheFilter:
|
class CacheFilter:
|
||||||
|
|
||||||
def __init__(self, normalizer=noop_normalizer, nonce=None, before=None, after=None, sort=CacheSort.DATE, reverse=False):
|
def __init__(self, senders=None, recipients=None, nonce=None, before=None, after=None, sort=CacheSort.DATE, reverse=False):
|
||||||
self.normalizer = normalizer
|
self.senders = senders
|
||||||
self.senders = None
|
self.recipients = recipients
|
||||||
self.recipients = None
|
|
||||||
self.nonce = nonce
|
self.nonce = nonce
|
||||||
self.before = before
|
self.before = before
|
||||||
self.after = after
|
self.after = after
|
||||||
@ -91,28 +69,6 @@ class CacheFilter:
|
|||||||
self.reverse = reverse
|
self.reverse = reverse
|
||||||
|
|
||||||
|
|
||||||
def add_senders(self, senders):
|
|
||||||
if self.senders == None:
|
|
||||||
self.senders = []
|
|
||||||
if isinstance(senders, str):
|
|
||||||
senders = [senders]
|
|
||||||
for sender in senders:
|
|
||||||
if self.normalizer != None:
|
|
||||||
sender = self.normalizer.address(sender)
|
|
||||||
self.senders.append(sender)
|
|
||||||
|
|
||||||
|
|
||||||
def add_recipients(self, recipients):
|
|
||||||
if self.recipients == None:
|
|
||||||
self.recipients = []
|
|
||||||
if isinstance(recipients, str):
|
|
||||||
recipients = [recipients]
|
|
||||||
for recipient in recipients:
|
|
||||||
if self.normalizer != None:
|
|
||||||
recipient = self.normalizer.address(recipient)
|
|
||||||
self.recipients.append(recipient)
|
|
||||||
|
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
|
|
||||||
def put(self, chain_spec, cache_tx):
|
def put(self, chain_spec, cache_tx):
|
||||||
|
@ -3,10 +3,6 @@ import logging
|
|||||||
import re
|
import re
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# local imports
|
|
||||||
from chainqueue.cache import CacheTx
|
|
||||||
|
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -22,8 +18,7 @@ def from_key(k):
|
|||||||
re_u = r'^[^_][_A-Z]+$'
|
re_u = r'^[^_][_A-Z]+$'
|
||||||
class Store:
|
class Store:
|
||||||
|
|
||||||
def __init__(self, chain_spec, state_store, index_store, counter, cache=None):
|
def __init__(self, state_store, index_store, counter, cache=None):
|
||||||
self.chain_spec = chain_spec
|
|
||||||
self.cache = cache
|
self.cache = cache
|
||||||
self.state_store = state_store
|
self.state_store = state_store
|
||||||
self.index_store = index_store
|
self.index_store = index_store
|
||||||
@ -37,16 +32,14 @@ class Store:
|
|||||||
setattr(self, v, getattr(self.state_store, v))
|
setattr(self, v, getattr(self.state_store, v))
|
||||||
|
|
||||||
|
|
||||||
def put(self, k, v, cache_adapter=CacheTx()):
|
def put(self, k, v):
|
||||||
n = self.counter.next()
|
n = self.counter.next()
|
||||||
t = datetime.datetime.now().timestamp()
|
t = datetime.datetime.now().timestamp()
|
||||||
s = to_key(t, n, k)
|
s = to_key(t, n, k)
|
||||||
self.state_store.put(s, v)
|
self.state_store.put(s, v)
|
||||||
self.index_store.put(k, s)
|
self.index_store.put(k, s)
|
||||||
if self.cache != None:
|
if self.cache != None:
|
||||||
tx = cache_adapter()
|
self.cache.put_serialized(v)
|
||||||
tx.deserialize(v)
|
|
||||||
self.cache.put(self.chain_spec, tx)
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, k):
|
def get(self, k):
|
||||||
@ -55,7 +48,7 @@ class Store:
|
|||||||
return (s, v,)
|
return (s, v,)
|
||||||
|
|
||||||
|
|
||||||
def by_state(self, state=0, limit=4096, strict=False):
|
def list(self, state=0, limit=4096, strict=False):
|
||||||
hashes = []
|
hashes = []
|
||||||
i = 0
|
i = 0
|
||||||
|
|
||||||
@ -75,7 +68,3 @@ class Store:
|
|||||||
pair = from_key(h)
|
pair = from_key(h)
|
||||||
hashes_out.append(pair[1])
|
hashes_out.append(pair[1])
|
||||||
return hashes_out
|
return hashes_out
|
||||||
|
|
||||||
|
|
||||||
def upcoming(self, limit=4096):
|
|
||||||
return self.by_state(state=self.QUEUED, limit=limit)
|
|
||||||
|
@ -4,7 +4,6 @@ import unittest
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from shep.store.file import SimpleFileStoreFactory
|
from shep.store.file import SimpleFileStoreFactory
|
||||||
from chainlib.chain import ChainSpec
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from chainqueue import (
|
from chainqueue import (
|
||||||
@ -12,10 +11,6 @@ from chainqueue import (
|
|||||||
Status,
|
Status,
|
||||||
)
|
)
|
||||||
|
|
||||||
# test imports
|
|
||||||
from tests.common import MockCounter
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class MockContentStore:
|
class MockContentStore:
|
||||||
|
|
||||||
@ -31,6 +26,18 @@ class MockContentStore:
|
|||||||
return self.store.get(k)
|
return self.store.get(k)
|
||||||
|
|
||||||
|
|
||||||
|
class MockCounter:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.c = 0
|
||||||
|
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
c = self.c
|
||||||
|
self.c += 1
|
||||||
|
return c
|
||||||
|
|
||||||
|
|
||||||
class TestShepBase(unittest.TestCase):
|
class TestShepBase(unittest.TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
@ -39,5 +46,4 @@ class TestShepBase(unittest.TestCase):
|
|||||||
self.state = Status(factory)
|
self.state = Status(factory)
|
||||||
content_store = MockContentStore()
|
content_store = MockContentStore()
|
||||||
counter = MockCounter()
|
counter = MockCounter()
|
||||||
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
self.store = Store(self.state, content_store, counter)
|
||||||
self.store = Store(chain_spec, self.state, content_store, counter)
|
|
||||||
|
@ -1,40 +0,0 @@
|
|||||||
# local imports
|
|
||||||
from chainqueue.cache import Cache
|
|
||||||
|
|
||||||
|
|
||||||
class MockCounter:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.c = 0
|
|
||||||
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
c = self.c
|
|
||||||
self.c += 1
|
|
||||||
return c
|
|
||||||
|
|
||||||
|
|
||||||
class MockTokenCache(Cache):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.db = {}
|
|
||||||
self.last_filter = None
|
|
||||||
|
|
||||||
def put(self, chain_spec, cache_tx):
|
|
||||||
self.db[cache_tx.tx_hash] = cache_tx
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, chain_spec, tx_hash):
|
|
||||||
return self.db[tx_hash]
|
|
||||||
|
|
||||||
|
|
||||||
def by_nonce(self, cache_filter):
|
|
||||||
self.last_filter = cache_filter
|
|
||||||
|
|
||||||
|
|
||||||
def by_date(self, cache_filter=None):
|
|
||||||
self.last_filter = cache_filter
|
|
||||||
|
|
||||||
|
|
||||||
def count(self, cache_filter):
|
|
||||||
self.last_filter = cache_filter
|
|
@ -3,22 +3,18 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import unittest
|
import unittest
|
||||||
import hashlib
|
import hashlib
|
||||||
import math
|
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from hexathon import add_0x
|
from hexathon import add_0x
|
||||||
from chainlib.chain import ChainSpec
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from chainqueue import QueueEntry
|
from chainqueue import QueueEntry
|
||||||
from chainqueue.cache import (
|
from chainqueue.cache import (
|
||||||
CacheTokenTx,
|
CacheTokenTx,
|
||||||
CacheFilter,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# test imports
|
# test imports
|
||||||
from tests.base_shep import TestShepBase
|
from tests.base_shep import TestShepBase
|
||||||
from tests.common import MockTokenCache
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG)
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -49,85 +45,26 @@ class MockCacheTokenTx(CacheTokenTx):
|
|||||||
z = h.digest()
|
z = h.digest()
|
||||||
token = z.hex()
|
token = z.hex()
|
||||||
|
|
||||||
h = hashlib.sha256()
|
tx = CacheTokenTx()
|
||||||
h.update(z)
|
tx.init(nonce, sender, recipient, value)
|
||||||
z = h.digest()
|
tx.set('src_token', token)
|
||||||
tx_hash = z.hex()
|
tx.set('dst_token', token)
|
||||||
|
tx.set('src_value', token_value)
|
||||||
|
tx.set('dst_value', token_value)
|
||||||
|
|
||||||
#tx = CacheTokenTx(normalizer=self.normalizer)
|
return tx
|
||||||
self.init(tx_hash, nonce, sender, recipient, value)
|
|
||||||
self.set('src_token', token)
|
|
||||||
self.set('dst_token', token)
|
|
||||||
self.set('src_value', token_value)
|
|
||||||
self.set('dst_value', token_value)
|
|
||||||
self.confirm(42, 13, 1024000)
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
class MockNormalizer:
|
|
||||||
|
|
||||||
def address(self, v):
|
|
||||||
return 'address' + v
|
|
||||||
|
|
||||||
|
|
||||||
def value(self, v):
|
|
||||||
dv = int(math.log10(v) + 1)
|
|
||||||
return float(v / (10 ** dv))
|
|
||||||
|
|
||||||
|
|
||||||
def hash(self, v):
|
|
||||||
return 'ashbashhash' + v
|
|
||||||
|
|
||||||
|
|
||||||
class TestCache(TestShepBase):
|
class TestCache(TestShepBase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestCache, self).setUp()
|
super(TestCache, self).setUp()
|
||||||
self.chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
self.tx = MockCacheTokenTx()
|
||||||
self.cache = MockTokenCache()
|
|
||||||
|
|
||||||
|
def test_basic_translator(self):
|
||||||
def test_cache_instance(self):
|
|
||||||
normalizer = MockNormalizer()
|
|
||||||
a = b'foo'
|
a = b'foo'
|
||||||
tx = MockCacheTokenTx(normalizer=normalizer)
|
tx = self.tx.deserialize(a)
|
||||||
tx.deserialize(a)
|
print(tx)
|
||||||
self.assertTrue(isinstance(tx.value, float))
|
|
||||||
self.assertEqual(tx.sender[:4], 'addr')
|
|
||||||
self.assertEqual(tx.recipient[:4], 'addr')
|
|
||||||
self.assertEqual(tx.tx_hash[:11], 'ashbashhash')
|
|
||||||
|
|
||||||
|
|
||||||
def test_cache_putget(self):
|
|
||||||
a = b'foo'
|
|
||||||
tx = MockCacheTokenTx()
|
|
||||||
tx.deserialize(a)
|
|
||||||
self.cache.put(self.chain_spec, tx)
|
|
||||||
tx_retrieved = self.cache.get(self.chain_spec, tx.tx_hash)
|
|
||||||
self.assertEqual(tx, tx_retrieved)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cache_filter(self):
|
|
||||||
normalizer = MockNormalizer()
|
|
||||||
fltr = CacheFilter(normalizer=normalizer)
|
|
||||||
|
|
||||||
sender = os.urandom(20).hex()
|
|
||||||
fltr.add_senders(sender)
|
|
||||||
|
|
||||||
recipient_one = os.urandom(20).hex()
|
|
||||||
recipient_two = os.urandom(20).hex()
|
|
||||||
fltr.add_recipients([recipient_one, recipient_two])
|
|
||||||
|
|
||||||
self.assertEqual(fltr.senders[0][:4], 'addr')
|
|
||||||
self.assertEqual(fltr.recipients[1][:4], 'addr')
|
|
||||||
|
|
||||||
|
|
||||||
def test_cache_query(self):
|
|
||||||
a = os.urandom(20).hex()
|
|
||||||
fltr = CacheFilter(nonce=42)
|
|
||||||
self.cache.count(fltr)
|
|
||||||
self.assertEqual(self.cache.last_filter, fltr)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -31,31 +31,31 @@ class TestEntry(TestShepBase):
|
|||||||
entry = QueueEntry(self.store, tx_hash_two)
|
entry = QueueEntry(self.store, tx_hash_two)
|
||||||
entry.create(signed_tx)
|
entry.create(signed_tx)
|
||||||
|
|
||||||
txs = self.store.by_state()
|
txs = self.store.list()
|
||||||
self.assertEqual(len(txs), 2)
|
self.assertEqual(len(txs), 2)
|
||||||
|
|
||||||
entry = QueueEntry(self.store, tx_hash_one)
|
entry = QueueEntry(self.store, tx_hash_one)
|
||||||
entry.load()
|
entry.load()
|
||||||
entry.sent()
|
entry.sent()
|
||||||
|
|
||||||
txs = self.store.by_state()
|
txs = self.store.list()
|
||||||
self.assertEqual(len(txs), 1)
|
self.assertEqual(len(txs), 1)
|
||||||
|
|
||||||
txs = self.store.by_state(state=self.store.IN_NETWORK)
|
txs = self.store.list(state=self.store.IN_NETWORK)
|
||||||
self.assertEqual(len(txs), 1)
|
self.assertEqual(len(txs), 1)
|
||||||
|
|
||||||
entry.succeed(0)
|
entry.succeed(0)
|
||||||
txs = self.store.by_state()
|
txs = self.store.list()
|
||||||
self.assertEqual(len(txs), 1)
|
self.assertEqual(len(txs), 1)
|
||||||
|
|
||||||
entry = QueueEntry(self.store, tx_hash_two)
|
entry = QueueEntry(self.store, tx_hash_two)
|
||||||
entry.load()
|
entry.load()
|
||||||
entry.sent()
|
entry.sent()
|
||||||
|
|
||||||
txs = self.store.by_state(state=self.store.IN_NETWORK)
|
txs = self.store.list(state=self.store.IN_NETWORK)
|
||||||
self.assertEqual(len(txs), 2)
|
self.assertEqual(len(txs), 2)
|
||||||
|
|
||||||
txs = self.store.by_state(state=self.store.IN_NETWORK, strict=True)
|
txs = self.store.list(state=self.store.IN_NETWORK, strict=True)
|
||||||
self.assertEqual(len(txs), 1)
|
self.assertEqual(len(txs), 1)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,54 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import tempfile
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
# external imports
|
|
||||||
from shep.store.file import SimpleFileStoreFactory
|
|
||||||
from chainlib.chain import ChainSpec
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from chainqueue import (
|
|
||||||
Store,
|
|
||||||
Status,
|
|
||||||
)
|
|
||||||
|
|
||||||
# test imports
|
|
||||||
from tests.common import (
|
|
||||||
MockCounter,
|
|
||||||
MockTokenCache
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MockContentStore:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.store = {}
|
|
||||||
|
|
||||||
|
|
||||||
def put(self, k, v):
|
|
||||||
self.store[k] = v
|
|
||||||
|
|
||||||
|
|
||||||
def get(self, k):
|
|
||||||
return self.store.get(k)
|
|
||||||
|
|
||||||
|
|
||||||
class TestShepBase(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.path = tempfile.mkdtemp()
|
|
||||||
factory = SimpleFileStoreFactory(self.path).add
|
|
||||||
self.state = Status(factory)
|
|
||||||
content_store = MockContentStore()
|
|
||||||
counter = MockCounter()
|
|
||||||
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
|
||||||
self.cache = MockTokenCache()
|
|
||||||
self.store = Store(chain_spec, self.state, content_store, counter, cache=self.cache)
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -49,11 +49,7 @@ class TestShep(TestShepBase):
|
|||||||
self.state.set('foo', self.state.FINAL)
|
self.state.set('foo', self.state.FINAL)
|
||||||
with self.assertRaises(StateTransitionInvalid):
|
with self.assertRaises(StateTransitionInvalid):
|
||||||
self.state.move('foo', self.state.INSUFFICIENT_FUNDS)
|
self.state.move('foo', self.state.INSUFFICIENT_FUNDS)
|
||||||
|
|
||||||
|
|
||||||
def test_shep_cache(self):
|
|
||||||
self.store.put('foo', 'bar')
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
Loading…
Reference in New Issue
Block a user