Compare commits

...

10 Commits

Author SHA1 Message Date
lash
84b8eb10e6 Remove spam logline 2022-05-01 07:40:32 +00:00
lash
532ff230b4 Remove race waits (defer to client layer) 2022-05-01 06:44:33 +00:00
lash
f7c09acfe2 Add race delay 2022-05-01 06:27:52 +00:00
lash
04d9901f0d Allow backend objects to move between sync and get 2022-04-30 18:31:02 +00:00
lash
b8c2b1b86a Sort statewise results 2022-04-30 16:43:55 +00:00
lash
c94b291d39 Add upcoming tests, event callback pass to shep 2022-04-30 05:42:44 +00:00
lash
6c360ca2e5 Add reserved check method 2022-04-29 06:28:01 +00:00
lash
ff74679de8 Remove unneeded deps 2022-04-28 15:37:06 +00:00
lash
94bd5c8cdf Add cli handling and settings 2022-04-28 12:37:08 +00:00
lash
ccbbcc2157 Sync chainqueue state store on get 2022-04-27 06:23:58 +00:00
14 changed files with 155 additions and 36 deletions

View File

@@ -1,3 +1,12 @@
- 0.1.6
* Sort upcoming queue item chronologically
* Add unit testing for upcoming query method
- 0.1.5
* Add reserved state check method
- 0.1.4
* Dependency cleanups
- 0.1.3
* Add CLI args and config handling, settings object
- 0.1.2
* Add CLI inspection tools
- 0.1.1

View File

@@ -0,0 +1,11 @@
# standard imports
import os
# local imports
from .arg import process_flags
from .config import process_config
__script_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(os.path.dirname(__script_dir), 'data')
config_dir = os.path.join(data_dir, 'config')

2
chainqueue/cli/arg.py Normal file
View File

@@ -0,0 +1,2 @@
def process_flags(argparser, flags):
argparser.add_argument('--backend', type=str, help='Backend to use for state store')

8
chainqueue/cli/config.py Normal file
View File

@@ -0,0 +1,8 @@
def process_config(config, args, flags):
args_override = {}
args_override['QUEUE_BACKEND'] = getattr(args, 'backend')
config.dict_override(args_override, 'local cli args')
return config

View File

@@ -1,9 +1,2 @@
[database]
name =
engine =
driver =
host =
port =
user =
password =
debug = 0
[queue]
backend = mem

View File

@@ -134,6 +134,10 @@ class QueueEntry:
self.store.cache.set_block(self.tx_hash, block, tx)
def test(self, state):
return self.__match_state(state)
def __str__(self):
v = self.store.get(self.tx_hash)
n = self.store.state(v[0])

View File

@@ -24,12 +24,6 @@ class CacheIntegrityError(ChainQueueException):
pass
class BackendIntegrityError(ChainQueueException):
"""Raised when queue backend has invalid state
"""
pass
class DuplicateTxError(ChainQueueException):
"""Backend already knows transaction
"""

8
chainqueue/settings.py Normal file
View File

@@ -0,0 +1,8 @@
# external imports
from chainlib.settings import ChainSettings
class ChainqueueSettings(ChainSettings):
def process_queue_backend(self, config):
self.o['QUEUE_BACKEND'] = config.get('QUEUE_BACKEND')

View File

@@ -106,10 +106,10 @@ class Verify:
class Status(shep.persist.PersistedState):
def __init__(self, store_factory):
def __init__(self, store_factory, allow_invalid=False, event_callback=None):
verify = Verify().verify
self.set_default_state('PENDING')
super(Status, self).__init__(store_factory, 12, verifier=verify)
super(Status, self).__init__(store_factory, 12, verifier=verify, check_alias=not allow_invalid, event_callback=event_callback)
self.add('QUEUED')
self.add('RESERVED')
self.add('IN_NETWORK')

View File

@@ -2,12 +2,15 @@
import re
import datetime
import logging
import time
# local imports
from chainqueue.cache import CacheTx
from chainqueue.entry import QueueEntry
from chainqueue.error import (
NotLocalTxError,
from chainqueue.error import NotLocalTxError
from chainqueue.enum import (
StatusBits,
all_errors,
)
logg = logging.getLogger(__name__)
@@ -21,6 +24,7 @@ def from_key(k):
(ts_str, seq_str, tx_hash) = k.split('_')
return (float(ts_str), int(seq_str), tx_hash, )
all_local_errors = all_errors() - StatusBits.NETWORK_ERROR
re_u = r'^[^_][_A-Z]+$'
class Store:
@@ -45,7 +49,15 @@ class Store:
'modified',
]:
setattr(self, v, getattr(self.state_store, v))
self.state_store.sync()
sync_err = None
try:
self.state_store.sync()
except Exception as e:
sync_err = e
if sync_err != None:
raise FileNotFoundError(sync_err)
def put(self, v, cache_adapter=CacheTx):
@@ -63,29 +75,40 @@ class Store:
def get(self, k):
v = None
s = self.index_store.get(k)
err = None
try:
s = self.index_store.get(k)
except FileNotFoundError:
raise NotLocalTxError(k)
v = self.state_store.get(s)
self.state_store.sync()
v = self.state_store.get(s)
except FileNotFoundError as e:
err = e
if v == None:
raise NotLocalTxError('could not find tx {}: {}'.format(k, err))
return (s, v,)
def by_state(self, state=0, limit=4096, strict=False, threshold=None):
def by_state(self, state=0, not_state=0, limit=4096, strict=False, threshold=None):
hashes = []
i = 0
refs_state = self.state_store.list(state)
refs_state.sort()
for ref in refs_state:
v = from_key(ref)
hsh = v[2]
item_state = self.state_store.state(ref)
if strict:
item_state = self.state_store.state(ref)
if item_state & state != item_state:
continue
logg.info('state {} {}'.format(ref, item_state))
if item_state & not_state > 0:
continue
if threshold != None:
v = self.state_store.modified(ref)
if v > threshold:
@@ -93,7 +116,9 @@ class Store:
hashes.append(hsh)
i += 1
if limit > 0 and i == limit:
break
hashes.sort()
return hashes
@@ -107,6 +132,17 @@ class Store:
return self.by_state(state=self.DEFERRED, limit=limit, threshold=threshold)
def failed(self, limit=4096):
#return self.by_state(state=all_local_errors, limit=limit)
r = []
r += self.by_state(state=self.LOCAL_ERROR, limit=limit)
r += self.by_state(state=self.NODE_ERROR, limit=limit)
r.sort()
if len(r) > limit:
r = r[:limit]
return r
def pending(self, limit=4096):
return self.by_state(state=0, limit=limit, strict=True)
@@ -129,6 +165,7 @@ class Store:
def fail(self, k):
entry = QueueEntry(self, k)
entry.load()
logg.debug('fail {}'.format(k))
entry.sendfail()
@@ -152,3 +189,13 @@ class Store:
entry = QueueEntry(self, k)
entry.load()
entry.sent()
def is_reserved(self, k):
entry = QueueEntry(self, k)
entry.load()
return entry.test(self.RESERVED)
def sync(self):
self.state_store.sync()

View File

@@ -64,7 +64,7 @@ class CounterStore:
v = f.read(8)
self.count = int.from_bytes(v, byteorder='big')
logg.info('counter starts at {}'.format(self.count))
logg.debug('counter starts at {}'.format(self.count))
f.seek(0)

View File

@@ -1,9 +1,9 @@
pysha3==1.0.2
#pysha3==1.0.2
hexathon~=0.1.5
leveldir~=0.3.0
alembic==1.4.2
SQLAlchemy==1.3.20
#alembic==1.4.2
#SQLAlchemy==1.3.20
confini~=0.6.0
pyxdg~=0.27
chainlib>=0.1.0b1,<=0.1.0
shep>=0.1.1rc1,<=0.3.0
#pyxdg~=0.27
chainlib~=0.1.1
shep~=0.2.3

View File

@@ -1,6 +1,6 @@
[metadata]
name = chainqueue
version = 0.1.2
version = 0.1.8
description = Generic blockchain transaction queue control
author = Louis Holbrook
author_email = dev@holbrook.no
@@ -25,7 +25,7 @@ licence_files =
LICENSE.txt
[options]
python_requires = >= 3.6
python_requires = >= 3.7
include_package_data = True
packages =
chainqueue
@@ -33,6 +33,7 @@ packages =
chainqueue.unittest
chainqueue.store
chainqueue.runnable
chainqueue.cli
#[options.entry_points]
#console_scripts =

View File

@@ -6,14 +6,23 @@ import logging
import shutil
# external imports
from chainlib.chain import ChainSpec
from shep.store.noop import NoopStoreFactory
# local imports
from chainqueue.store.fs import (
IndexStore,
CounterStore,
)
from chainqueue.store.base import Store
from chainqueue.error import DuplicateTxError
from chainqueue.state import Status
# tests imports
from tests.common import (
MockTokenCache,
MockCacheTokenTx,
)
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
@@ -58,5 +67,38 @@ class TestStoreImplementations(unittest.TestCase):
store.put(hx, data)
def test_upcoming_limit(self):
index_store = IndexStore(self.path)
counter_store = CounterStore(self.path)
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
factory = NoopStoreFactory().add
state_store = Status(factory)
cache_store = MockTokenCache()
queue_store = Store(chain_spec, state_store, index_store, counter_store, cache=cache_store)
txs = []
for i in range(3):
tx_src = os.urandom(128).hex()
tx = queue_store.put(tx_src, cache_adapter=MockCacheTokenTx)
txs.append(tx)
r = queue_store.upcoming(limit=3)
self.assertEqual(len(r), 0)
for tx in txs:
queue_store.enqueue(tx[1])
r = queue_store.upcoming(limit=3)
self.assertEqual(len(r), 3)
queue_store.send_start(txs[0][1])
r = queue_store.upcoming(limit=3)
self.assertEqual(len(r), 2)
queue_store.send_end(txs[0][1])
r = queue_store.upcoming(limit=3)
self.assertEqual(len(r), 2)
if __name__ == '__main__':
unittest.main()