Compare commits

..

37 Commits

Author SHA1 Message Date
lash
ff74679de8
Remove unneeded deps 2022-04-28 15:37:06 +00:00
lash
94bd5c8cdf
Add cli handling and settings 2022-04-28 12:37:08 +00:00
lash
ccbbcc2157
Sync chainqueue state store on get 2022-04-27 06:23:58 +00:00
lash
57191ea378
Move outputter module to explicit module path in cli 2022-04-26 21:35:20 +00:00
lash
e646edecca
Upgrade shep 2022-04-26 09:21:30 +00:00
lash
95930ef7de
Handle duplicate tx attempts 2022-04-12 13:44:29 +00:00
lash
c22fafad53
Update setup 2022-04-10 15:31:58 +00:00
lash
b5f513b63a
Ignore missing txs, sync store on start 2022-04-10 15:30:08 +00:00
lash
01b674d09e
Add change test for chainqueue entry 2022-04-10 14:00:01 +00:00
lash
0fa12adfa1
Add tx src member 2022-03-15 09:32:41 +00:00
lash
c094ca2198
Implement chainspec in entry, cache tx 2022-03-15 09:00:15 +00:00
lash
e4cc7061f0
Force hashing of tx inside puts 2022-03-15 08:06:39 +00:00
lash
92cb5d1978
Add state finalizers 2022-03-14 21:17:00 +00:00
lash
f8b256b51b
Add reserve, send enclosure 2022-03-14 19:53:54 +00:00
lash
1f7ca28647
Remove log spam 2022-03-13 17:24:25 +00:00
lash
d19fbf005e
Add date modified to state dirs 2022-03-13 17:22:39 +00:00
lash
485b33866b
Harden query tests 2022-03-13 16:10:40 +00:00
lash
04dfb185ce
Implement upcoming query on store 2022-03-13 15:45:48 +00:00
lash
a6e48d93a8
Implement upcoming query on store 2022-03-13 15:40:45 +00:00
lash
51c8124a28
Add store test, move store to subdir module 2022-03-13 14:58:26 +00:00
lash
bdebeb6010
Add missing provision in cache test 2022-03-12 14:22:55 +00:00
lash
d5f19248da
handle strings in tx inputs in test token cache tx object 2022-03-12 14:19:56 +00:00
lash
e457275128
WIP crossroads on hex vs bytes interpretation 2022-03-12 14:12:02 +00:00
lash
0c9b42d086
Prepare integration test 2022-03-12 13:48:40 +00:00
lash
69ad3711cd
Add embedded normalization to filter, cache tx 2022-03-12 12:49:38 +00:00
lash
ed75502f46
Add network token value to core cache tx object 2022-03-12 12:02:45 +00:00
lash
68f50246d2
Add cache interface methods, move old tests 2022-03-12 09:14:23 +00:00
lash
bd77706d1a
Add cache handling 2022-03-12 08:48:19 +00:00
lash
b763d11eff
Simplify queueentry 2022-03-11 21:49:23 +00:00
lash
790c9ddf13
Normalize hex in queueentry 2022-03-11 20:16:42 +00:00
lash
3880249683
Implement strict get match 2022-03-11 19:43:00 +00:00
lash
c4caab6a3a
implement get tx by state 2022-03-11 19:38:12 +00:00
lash
539d3384a6
Add addrses to nonce index on new tx object 2022-03-11 14:10:16 +00:00
lash
38aae6f8c0
WIP Implement transition setters on shep 2022-03-11 12:15:44 +00:00
lash
0682dd8ed3
Rename gas state name 2022-03-11 11:05:56 +00:00
lash
fce9bce6fc
Initial shep provisions 2022-03-11 11:03:05 +00:00
lash
5a92058e74
Bump version 2022-03-11 07:35:18 +00:00
38 changed files with 1454 additions and 145 deletions

View File

@ -1,3 +1,9 @@
- 0.1.2
* Add CLI inspection tools
- 0.1.1
*
- 0.1.0
* Replace state transitions with shep
- 0.0.3 - 0.0.3
* cli tool for listing queue by address * cli tool for listing queue by address
* ensure lowercase hex input in db * ensure lowercase hex input in db

3
chainqueue/__init__.py Normal file
View File

@ -0,0 +1,3 @@
from .state import Status
from .entry import QueueEntry
from .store import Store

View File

@ -1,117 +0,0 @@
# standard imports
import datetime
# local imports
from chainqueue.enum import StatusBits
class Adapter:
"""Base class defining interface to be implemented by chainqueue adapters.
The chainqueue adapter collects the following actions:
- add: add a transaction to the queue
- upcoming: get queued transactions ready to be sent to network
- dispatch: send a queued transaction to the network
- translate: decode details of a transaction
- create_session, release_session: session management to control queue state integrity
:param backend: Chainqueue backend
:type backend: TODO - abstract backend class. Must implement get, create_session, release_session
:param pending_retry_threshold: seconds delay before retrying a transaction stalled in the newtork
:type pending_retry_threshold: int
:param error_retry_threshold: seconds delay before retrying a transaction that incurred a recoverable error state
:type error_retry_threshold: int
"""
def __init__(self, backend, pending_retry_threshold=0, error_retry_threshold=0):
self.backend = backend
self.pending_retry_threshold = datetime.timedelta(pending_retry_threshold)
self.error_retry_threshold = datetime.timedelta(error_retry_threshold)
def add(self, bytecode, chain_spec, session=None):
"""Add a transaction to the queue.
:param bytecode: Transaction wire format bytecode, in hex
:type bytecode: str
:param chain_spec: Chain spec to use for transaction decode
:type chain_spec: chainlib.chain.ChainSpec
:param session: Backend state integrity session
:type session: varies
"""
raise NotImplementedError()
def translate(self, bytecode, chain_spec):
"""Decode details of a transaction.
:param bytecode: Transaction wire format bytecode, in hex
:type bytecode: str
:param chain_spec: Chain spec to use for transaction decode
:type chain_spec: chainlib.chain.ChainSpec
"""
raise NotImplementedError()
def get(self, tx_hash, chain_spec, session=None):
"""Retrieve serialized transaction represented by the given transaction hash.
:param chain_spec: Chain spec to use for transaction decode
:type chain_spec: chainlib.chain.ChainSpec
:param tx_hash: Transaction hash, in hex
:type tx_hash: str
:param session: Backend state integrity session
:type session: varies
"""
raise NotImplementedError()
def dispatch(self, chain_spec, rpc, tx_hash, signed_tx, session=None):
"""Send a queued transaction to the network.
:param chain_spec: Chain spec to use to identify the transaction network
:type chain_spec: chainlib.chain.ChainSpec
:param rpc: RPC connection to use for transaction send
:type rpc: chainlib.connection.RPCConnection
:param tx_hash: Transaction hash (checksum of transaction), in hex
:type tx_hash: str
:param signed_tx: Transaction wire format bytecode, in hex
:type signed_tx: str
:param session: Backend state integrity session
:type session: varies
"""
raise NotImplementedError()
def upcoming(self, chain_spec, session=None):
"""Get queued transactions ready to be sent to the network.
The transactions will be a combination of newly submitted transactions, previously sent but stalled transactions, and transactions that could temporarily not be submitted.
:param chain_spec: Chain spec to use to identify the transaction network
:type chain_spec: chainlib.chain.ChainSpec
:param session: Backend state integrity session
:type session: varies
"""
raise NotImplementedError()
def create_session(self, session=None):
"""Create a session context to guarantee atomic state change in backend.
:param session: If specified, session will be used instead of creating a new one
:type session: varies
"""
return self.backend.create_session(session)
def release_session(self, session=None):
"""Release a session context created by create_session.
If session parameter is defined, final session destruction will be deferred to the initial provider of the session. In other words; if create_session was called with a session, release_session should symmetrically be called with the same session.
:param session: Session context.
:type session: varies
"""
return self.backend.release_session(session)

1
chainqueue/cache/__init__.py vendored Normal file
View File

@ -0,0 +1 @@
from .base import *

141
chainqueue/cache/base.py vendored Normal file
View File

@ -0,0 +1,141 @@
# standard imports
import enum
import logging
logg = logging.getLogger(__name__)
class NoopNormalizer:
def __init__(self):
self.address = self.noop
self.hash = self.noop
self.value = self.noop
def noop(self, v):
return v
noop_normalizer = NoopNormalizer()
class CacheTx:
def __init__(self, chain_spec, normalizer=noop_normalizer):
self.normalizer = normalizer
self.sender = None
self.recipient = None
self.nonce = None
self.value = None
self.hash = None
self.block_number = None
self.tx_index = None
self.timestamp = None
self.src = None
self.chain_spec = chain_spec
def confirm(self, block_number, tx_index, timestamp):
self.block_number = block_number
self.tx_index = tx_index
self.timestamp = timestamp
def init(self, tx_hash, nonce, sender, recipient, value):
self.hash = self.normalizer.hash(tx_hash)
self.sender = self.normalizer.address(sender)
self.recipient = self.normalizer.address(recipient)
self.nonce = nonce
self.value = self.normalizer.value(value)
def deserialize(self, signed_tx):
raise NotImplementedError()
def set(self, k, v):
k = 'v_' + k
setattr(self, k, v)
def __str__(self):
return '{}: {} ({}) -> {} = {}'.format(self.hash, self.sender, self.nonce, self.recipient, self.value)
class CacheTokenTx(CacheTx):
def __init__(self, chain_spec, normalizer=noop_normalizer):
super(CacheTokenTx, self).__init__(chain_spec, normalizer=normalizer)
self.v_src_token = None
self.v_src_value = None
self.v_dst_token = None
self.v_dst_value = None
class CacheSort(enum.Enum):
DATE = 1
NONCE = 2
class CacheFilter:
def __init__(self, normalizer=noop_normalizer, nonce=None, before=None, after=None, sort=CacheSort.DATE, reverse=False):
self.normalizer = normalizer
self.senders = None
self.recipients = None
self.nonce = nonce
self.before = before
self.after = after
self.sort = sort
self.reverse = reverse
def add_senders(self, senders):
if self.senders == None:
self.senders = []
if isinstance(senders, str):
senders = [senders]
for sender in senders:
if self.normalizer != None:
sender = self.normalizer.address(sender)
self.senders.append(sender)
def add_recipients(self, recipients):
if self.recipients == None:
self.recipients = []
if isinstance(recipients, str):
recipients = [recipients]
for recipient in recipients:
if self.normalizer != None:
recipient = self.normalizer.address(recipient)
self.recipients.append(recipient)
class Cache:
def put(self, chain_spec, cache_tx):
raise NotImplementedError()
def get(self, chain_spec, tx_hash):
raise NotImplementedError()
def by_nonce(self, cache_filter):
raise NotImplementedError()
def by_date(self, cache_filter=None):
raise NotImplementedError()
def count(self, cache_filter=None):
raise NotImplementedError()
def set_block(self, block, tx):
raise NotImplementedError()

10
chainqueue/cache/fs.py vendored Normal file
View File

@ -0,0 +1,10 @@
# local imports
from .base import Cache
class FsCache(Cache):
def __init__(self, path):
self.path = path

View File

@ -0,0 +1,11 @@
# standard imports
import os
# local imports
from .arg import process_flags
from .config import process_config
__script_dir = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(os.path.dirname(__script_dir), 'data')
config_dir = os.path.join(data_dir, 'config')

2
chainqueue/cli/arg.py Normal file
View File

@ -0,0 +1,2 @@
def process_flags(argparser, flags):
argparser.add_argument('--backend', type=str, help='Backend to use for state store')

8
chainqueue/cli/config.py Normal file
View File

@ -0,0 +1,8 @@
def process_config(config, args, flags):
args_override = {}
args_override['QUEUE_BACKEND'] = getattr(args, 'backend')
config.dict_override(args_override, 'local cli args')
return config

153
chainqueue/cli/output.py Normal file
View File

@ -0,0 +1,153 @@
# standard imports
import logging
import enum
# external imports
from hexathon import add_0x
# local imports
from chainqueue.enum import (
StatusBits,
all_errors,
is_alive,
is_error_status,
status_str,
)
logg = logging.getLogger(__name__)
class OutputCol(enum.Enum):
chainspec = 0
hash = 1
statustext = 2
statuscode = 3
signedtx = 4
class Outputter:
"""Output helper for chainqueue cli listings tools.
:param chain_spec: Chain spec to use as getter context
:type chain_spec: chainlib.chain.ChainSpec
:param writer: Writer to write output to. Will automatically flush.
:type writer: Writer
:param getter: Transaction getter
:type getter: See chainqueue.sql.backend.get_otx
:param session_method: Backend session generator method
:type session_method: varies
:param decode_status: Print status bit details
:type decode_status: bool
"""
all_cols = [
OutputCol.chainspec,
OutputCol.hash,
OutputCol.signedtx,
OutputCol.statustext,
OutputCol.statuscode,
]
default_cols = [
OutputCol.chainspec,
OutputCol.hash,
OutputCol.statustext,
OutputCol.statuscode,
]
def __init__(self, chain_spec, writer, getter, session_method=None, decode_status=True, cols=None):
self.decode_status = decode_status
self.writer = writer
self.getter = getter
self.chain_spec = chain_spec
self.chain_spec_str = str(chain_spec)
self.session = None
if session_method != None:
self.session = session_method()
self.results = {
'pending_error': 0,
'final_error': 0,
'pending': 0,
'final': 0,
}
debug_col_name = []
if cols == None:
self.cols = Outputter.default_cols
else:
self.cols = []
for col in cols:
v = getattr(OutputCol, col)
self.cols.append(v)
for col in self.cols:
debug_col_name.append(col.name)
logg.debug('outputter initialized with cols: {}'.format(','.join(debug_col_name)))
def __del__(self):
if self.session != None:
self.session.close()
def add(self, tx_hash):
"""Retrieve a transaction by hash and add it for summary output generation.
:param tx_hash: Transaction hash
:type tx_hash: str
"""
tx = self.getter(self.chain_spec, tx_hash, session=self.session)
self.__add(tx)
def __add(self, tx):
category = None
if is_alive(tx['status_code']):
category = 'pending'
else:
category = 'final'
self.results[category] += 1
if is_error_status(tx['status_code']):
logg.debug('registered {} as {} with error'.format(tx['tx_hash'], category))
self.results[category + '_error'] += 1
else:
logg.debug('registered {} as {}'.format(tx['tx_hash'], category))
def decode_summary(self):
"""Writes summary to the registered writer.
"""
self.writer.write('pending\t{}\t{}\n'.format(self.results['pending'], self.results['pending_error']))
self.writer.write('final\t{}\t{}\n'.format(self.results['final'], self.results['final_error']))
self.writer.write('total\t{}\t{}\n'.format(self.results['final'] + self.results['pending'], self.results['final_error'] + self.results['pending_error']))
def decode_single(self, tx_hash):
"""Retrieves the transaction with the given hash and writes the details to the underlying writer.
Registers the transaction with the summary generator.
:param tx_hash: Transaction hash
:type tx_hash: str
"""
tx = self.getter(self.chain_spec, tx_hash, session=self.session)
self.__add(tx)
status = tx['status']
if self.decode_status:
status = status_str(tx['status_code'], bits_only=True)
vals = [
self.chain_spec_str,
add_0x(tx_hash),
status,
str(tx['status_code']),
add_0x(tx['signed_tx']),
]
i = 0
l = len(self.cols)
for col in self.cols:
self.writer.write(vals[col.value])
i += 1
if i == l:
self.writer.write('\n')
else:
self.writer.write('\t')

View File

@ -1,9 +1,2 @@
[database] [queue]
name = backend = mem
engine =
driver =
host =
port =
user =
password =
debug = 0

141
chainqueue/entry.py Normal file
View File

@ -0,0 +1,141 @@
# standard imports
import logging
# ecxternal imports
from hexathon import (
add_0x,
strip_0x,
uniform,
)
# local imports
from chainqueue.cache import CacheTx
logg = logging.getLogger(__name__)
def normalize_hex(k):
k = strip_0x(k)
return uniform(k)
class QueueEntry:
def __init__(self, store, tx_hash=None, cache_adapter=CacheTx):
self.store = store
#self.tx_hash = normalize_hex(tx_hash)
self.tx_hash = tx_hash
self.signed_tx = None
self.seq = None
self.k = None
self.synced = False
self.cache_adapter = cache_adapter
def serialize(self):
return self.signed_tx
def create(self, signed_tx):
signed_tx = normalize_hex(signed_tx)
(s, tx_hash) = self.store.put(signed_tx, cache_adapter=self.cache_adapter)
self.k = s
self.synced = True
return tx_hash
def load(self):
(self.k, self.signed_tx) = self.store.get(self.tx_hash)
self.synced = True
def __match_state(self, state):
return bool(self.store.state(self.k) & state)
def waitforfunds(self):
if self.__match_state(self.store.INSUFFICIENT_FUNDS):
return
self.store.move(self.k, self.store.INSUFFICIENT_FUNDS)
def fubar(self):
if self.__match_state(self.store.UNKNOWN_ERROR):
return
self.store.set(self.k, self.store.UNKNOWN_ERROR)
def reject(self):
if self.__match_state(self.store.NODE_ERROR):
return
self.store.set(self.k, self.store.NODE_ERROR)
def override(self, manual=False):
if manual:
self.store.set(self.k, self.store.OBSOLETE | self.store.MANUAL)
else:
self.store.set(self.k, self.store.OBSOLETE)
def manual(self):
self.store.set(self.k, self.store.MANUAL)
def retry(self):
if self.__match_state(self.store.QUEUED):
return
self.store.change(self.k, self.store.QUEUED, self.store.INSUFFICIENT_FUNDS)
def readysend(self):
if self.__match_state(self.store.QUEUED):
return
self.store.change(self.k, self.store.QUEUED, self.store.INSUFFICIENT_FUNDS)
def sent(self):
if self.__match_state(self.store.IN_NETWORK):
return
self.store.change(self.k, self.store.IN_NETWORK, self.store.RESERVED | self.store.DEFERRED | self.store.QUEUED | self.store.LOCAL_ERROR | self.store.NODE_ERROR)
def sendfail(self):
if self.__match_state(self.store.NODE_ERROR):
return
self.store.change(self.k, self.store.LOCAL_ERROR | self.store.DEFERRED, self.store.RESERVED | self.store.QUEUED | self.store.INSUFFICIENT_FUNDS)
def reserve(self):
if self.__match_state(self.store.RESERVED):
return
self.store.change(self.k, self.store.RESERVED, self.store.QUEUED)
def fail(self, block, tx):
if self.__match_state(self.store.NETWORK_ERROR):
return
v = self.store.state(self.k)
self.store.change(self.k, v | self.store.NETWORK_ERROR, self.store.QUEUED)
if self.store.cache:
self.store.cache.set_block(self.tx_hash, block, tx)
def cancel(self, confirmed=False):
if confirmed:
self.store.change(self.k, self.store.OBSOLETE | self.store.FINAL, self.store.RESERVED | self.store.QUEUED)
else:
self.store.change(self.k, self.store.OBSOLETE, self.store.RESERVED | self.store.QUEUED)
def succeed(self, block, tx):
self.store.set(self.k, self.store.FINAL)
if self.store.cache:
self.store.cache.set_block(self.tx_hash, block, tx)
def __str__(self):
v = self.store.get(self.tx_hash)
n = self.store.state(v[0])
s = self.store.name(n)
return '{}: {}'.format(self.tx_hash, s)

View File

@ -42,7 +42,6 @@ class StatusEnum(enum.IntEnum):
""" """
PENDING = 0 PENDING = 0
"""Transaction has been added but no processing has been performed""" """Transaction has been added but no processing has been performed"""
SENDFAIL = StatusBits.DEFERRED | StatusBits.LOCAL_ERROR SENDFAIL = StatusBits.DEFERRED | StatusBits.LOCAL_ERROR
"""Temporary error occurred when sending transaction to node""" """Temporary error occurred when sending transaction to node"""
RETRY = StatusBits.QUEUED | StatusBits.DEFERRED RETRY = StatusBits.QUEUED | StatusBits.DEFERRED

View File

@ -29,3 +29,8 @@ class BackendIntegrityError(ChainQueueException):
""" """
pass pass
class DuplicateTxError(ChainQueueException):
"""Backend already knows transaction
"""
pass

View File

@ -12,7 +12,7 @@ from chainlib.chain import ChainSpec
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
# local imports # local imports
from chainqueue.cli import Outputter from chainqueue.cli.output import Outputter
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)

8
chainqueue/settings.py Normal file
View File

@ -0,0 +1,8 @@
# external imports
from chainlib.settings import ChainSettings
class ChainqueueSettings(ChainSettings):
def process_queue_backend(self, config):
self.o['QUEUE_BACKEND'] = config.get('QUEUE_BACKEND')

138
chainqueue/state.py Normal file
View File

@ -0,0 +1,138 @@
# standard imports
import logging
# external imports
import shep.persist
logg = logging.getLogger(__name__)
class Verify:
def verify(self, state_store, from_state, to_state):
to_state_name = state_store.name(to_state)
m = None
try:
m = getattr(self, to_state_name)
except AttributeError:
return None
r = m(state_store, from_state)
if r != None:
from_state_name = state_store.name(from_state)
r = '{} -> {}: {}'.format(from_state_name, to_state_name, r)
return r
def INSUFFICIENT_FUNDS(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
if from_state & state_store.IN_NETWORK:
return 'already in network'
def UNKNOWN_ERROR(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
if from_state & state_store.RESERVED:
return 'not reserved'
if from_state & state_store.mask_error:
return 'already in error state'
def NODE_ERROR(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
if from_state & state_store.IN_NETWORK:
return 'already in network'
if not from_state & state_store.RESERVED:
return 'not reserved'
if from_state & state_store.mask_error:
return 'already in error state'
def NETWORK_ERROR(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
if from_state & state_store.IN_NETWORK:
return 'already in network'
def OBSOLETE(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
if from_state & state_store.IN_NETWORK:
return 'already in network'
if from_state & state_store.OBSOLETE:
return 'already obsolete'
def MANUAL(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
def QUEUED(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
if from_state & state_store.IN_NETWORK:
if not from_state & state_store.mask_error:
return 'not in error state'
elif from_state & state_store.mask_error:
return 'no first send on error state'
def SENDFAIL(self, state_store, from_state):
return self.NODE_ERROR(state_store, from_state)
def FINAL(self, state_store, from_state):
if from_state & state_store.FINAL:
return 'already finalized'
def _MINEFAIL(self, state_store, from_state):
return self.NETWORK_ERROR(state_store, from_state)
def _CANCEL(self, state_store, from_state):
if from_state:
if from_state & state_store.FINAL:
return 'already finalized'
if not from_state & (state_store.OBSOLETE | state_store.IN_NETWORK):
return 'can only cancel state having OBSOLETE and/or IN_NETWORK'
class Status(shep.persist.PersistedState):
def __init__(self, store_factory):
verify = Verify().verify
self.set_default_state('PENDING')
super(Status, self).__init__(store_factory, 12, verifier=verify)
self.add('QUEUED')
self.add('RESERVED')
self.add('IN_NETWORK')
self.add('DEFERRED')
self.add('INSUFFICIENT_FUNDS')
self.add('LOCAL_ERROR')
self.add('NODE_ERROR')
self.add('NETWORK_ERROR')
self.add('UNKNOWN_ERROR')
self.add('FINAL')
self.add('OBSOLETE')
self.add('MANUAL')
self.alias('SENDFAIL', self.DEFERRED | self.LOCAL_ERROR)
self.alias('RETRY', self.DEFERRED | self.QUEUED)
self.alias('OBSOLETED', self.OBSOLETE | self.IN_NETWORK)
self.alias('FUBAR', self.FINAL | self.UNKNOWN_ERROR)
self.alias('CANCELLED', self.IN_NETWORK | self.FINAL | self.OBSOLETE)
self.alias('OVERRIDDEN', self.FINAL | self.OBSOLETE | self.MANUAL)
self.alias('REJECTED', self.NODE_ERROR | self.FINAL)
self.alias('REVERTED', self.IN_NETWORK | self.FINAL | self.NETWORK_ERROR)
self.alias('SUCCESS', self.IN_NETWORK | self.FINAL)
self.alias('_MINEFAIL', self.FINAL | self.NETWORK_ERROR)
self.alias('_CANCEL', self.FINAL | self.OBSOLETE)
self.mask_error = self.LOCAL_ERROR | self.NODE_ERROR | self.NETWORK_ERROR | self.UNKNOWN_ERROR

View File

@ -0,0 +1,5 @@
from .base import (
to_key,
from_key,
Store,
)

155
chainqueue/store/base.py Normal file
View File

@ -0,0 +1,155 @@
# standard imports
import re
import datetime
import logging
# local imports
from chainqueue.cache import CacheTx
from chainqueue.entry import QueueEntry
from chainqueue.error import (
NotLocalTxError,
)
logg = logging.getLogger(__name__)
def to_key(t, n, k):
return '{}_{}_{}'.format(t, n, k)
def from_key(k):
(ts_str, seq_str, tx_hash) = k.split('_')
return (float(ts_str), int(seq_str), tx_hash, )
re_u = r'^[^_][_A-Z]+$'
class Store:
def __init__(self, chain_spec, state_store, index_store, counter, cache=None):
self.chain_spec = chain_spec
self.cache = cache
self.state_store = state_store
self.index_store = index_store
self.counter = counter
for s in dir(self.state_store):
if not re.match(re_u, s):
continue
v = self.state_store.from_name(s)
setattr(self, s, v)
for v in [
'state',
'change',
'set',
'unset',
'name',
'modified',
]:
setattr(self, v, getattr(self.state_store, v))
self.state_store.sync()
def put(self, v, cache_adapter=CacheTx):
tx = cache_adapter(self.chain_spec)
tx.deserialize(v)
k = tx.hash
n = self.counter.next()
t = datetime.datetime.now().timestamp()
s = to_key(t, n, k)
self.index_store.put(k, s)
self.state_store.put(s, v)
if self.cache != None:
self.cache.put(self.chain_spec, tx)
return (s, k,)
def get(self, k):
try:
s = self.index_store.get(k)
except FileNotFoundError:
raise NotLocalTxError(k)
self.state_store.sync()
v = self.state_store.get(s)
return (s, v,)
def by_state(self, state=0, limit=4096, strict=False, threshold=None):
hashes = []
i = 0
refs_state = self.state_store.list(state)
for ref in refs_state:
v = from_key(ref)
hsh = v[2]
if strict:
item_state = self.state_store.state(ref)
if item_state & state != item_state:
continue
if threshold != None:
v = self.state_store.modified(ref)
if v > threshold:
continue
hashes.append(hsh)
hashes.sort()
return hashes
def upcoming(self, limit=4096):
return self.by_state(state=self.QUEUED, limit=limit)
def deferred(self, limit=4096, threshold=None):
return self.by_state(state=self.DEFERRED, limit=limit, threshold=threshold)
def pending(self, limit=4096):
return self.by_state(state=0, limit=limit, strict=True)
def reserve(self, k):
entry = QueueEntry(self, k)
entry.load()
entry.reserve()
def enqueue(self, k):
entry = QueueEntry(self, k)
entry.load()
try:
entry.retry()
except StateTransitionInvalid:
entry.readysend()
def fail(self, k):
entry = QueueEntry(self, k)
entry.load()
entry.sendfail()
def final(self, k, block, tx, error=False):
entry = QueueEntry(self, k)
entry.load()
if error:
entry.fail(block, tx)
else:
entry.succeed(block, tx)
def send_start(self, k):
entry = QueueEntry(self, k)
entry.load()
entry.reserve()
return entry
def send_end(self, k):
entry = QueueEntry(self, k)
entry.load()
entry.sent()

84
chainqueue/store/fs.py Normal file
View File

@ -0,0 +1,84 @@
# standard imports
import os
import logging
# external imports
from leveldir.hex import HexDir
# local imports
from chainqueue.error import DuplicateTxError
logg = logging.getLogger(__name__)
class IndexStore(HexDir):
def __init__(self, root_path, digest_bytes=32):
os.path.join(root_path, 'contents')
self.store = HexDir(root_path, digest_bytes)
def __exists(self, k):
existing = None
try:
existing = self.get(k)
except FileNotFoundError:
pass
return existing != None
def put(self, k, v):
kb = bytes.fromhex(k)
vb = v.encode('utf-8')
if self.__exists(k):
raise DuplicateTxError(k)
self.store.add(kb, vb)
def get(self, k):
fp = self.store.to_filepath(k)
f = open(fp, 'rb')
v = f.read()
f.close()
return v.decode('utf-8')
class CounterStore:
def __init__(self, root_path):
try:
os.stat(root_path)
except FileNotFoundError:
os.makedirs(root_path)
fp = os.path.join(root_path, '.counter')
f = None
try:
f = open(fp, 'rb+')
except FileNotFoundError:
logg.debug('counter not found, creating new in {}'.format(fp))
f = open(fp, 'wb+')
f.write(b'\x00' * 8)
f.close()
f = open(fp, 'rb+')
v = f.read(8)
self.count = int.from_bytes(v, byteorder='big')
logg.info('counter starts at {}'.format(self.count))
f.seek(0)
self.f = f
def __del__(self):
self.f.close()
def next(self):
c = self.count
self.count += 1
v = self.count.to_bytes(8, 'big')
self.f.write(v)
self.f.seek(0)
return c

View File

@ -1,8 +1,9 @@
pysha3==1.0.2 #pysha3==1.0.2
hexathon~=0.1.0 hexathon~=0.1.5
leveldir~=0.3.0 leveldir~=0.3.0
alembic==1.4.2 #alembic==1.4.2
SQLAlchemy==1.3.20 #SQLAlchemy==1.3.20
confini~=0.5.1 confini~=0.6.0
pyxdg~=0.27 #pyxdg~=0.27
chainlib~=0.0.12 chainlib~=0.1.1
shep~=0.2.3

View File

@ -1,10 +1,10 @@
[metadata] [metadata]
name = chainqueue name = chainqueue
version = 0.0.6rc3 version = 0.1.3
description = Generic blockchain transaction queue control description = Generic blockchain transaction queue control
author = Louis Holbrook author = Louis Holbrook
author_email = dev@holbrook.no author_email = dev@holbrook.no
url = https://gitlab.com/chaintools/chainqueue url = https://gitlab.com/chaintool/chainqueue
keywords = keywords =
cic cic
cryptocurrency cryptocurrency
@ -25,17 +25,16 @@ licence_files =
LICENSE.txt LICENSE.txt
[options] [options]
python_requires = >= 3.6 python_requires = >= 3.7
include_package_data = True include_package_data = True
packages = packages =
chainqueue chainqueue
chainqueue.db chainqueue.cache
chainqueue.db.models
chainqueue.sql
chainqueue.adapters
chainqueue.unittest chainqueue.unittest
chainqueue.store
chainqueue.runnable chainqueue.runnable
chainqueue.cli
[options.entry_points] #[options.entry_points]
console_scripts = #console_scripts =
chainqueue-list = chainqueue.runnable.list:main # chainqueue-list = chainqueue.runnable.list:main

39
tests/base_shep.py Normal file
View File

@ -0,0 +1,39 @@
# standard imports
import tempfile
import unittest
import shutil
import logging
# external imports
from shep.store.file import SimpleFileStoreFactory
from chainlib.chain import ChainSpec
# local imports
from chainqueue import (
Store,
Status,
)
# test imports
from tests.common import (
MockCounter,
MockContentStore,
)
logg = logging.getLogger(__name__)
class TestShepBase(unittest.TestCase):
def setUp(self):
self.path = tempfile.mkdtemp()
factory = SimpleFileStoreFactory(self.path).add
self.state = Status(factory)
content_store = MockContentStore()
counter = MockCounter()
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
self.store = Store(chain_spec, self.state, content_store, counter)
logg.debug('using path {}'.format(self.path))
def tearDown(self):
shutil.rmtree(self.path)

103
tests/common.py Normal file
View File

@ -0,0 +1,103 @@
# standard imports
import hashlib
# local imports
from chainqueue.cache import (
Cache,
CacheTokenTx,
)
class MockCounter:
def __init__(self):
self.c = 0
def next(self):
c = self.c
self.c += 1
return c
class MockTokenCache(Cache):
def __init__(self):
self.db = {}
self.last_filter = None
def put(self, chain_spec, cache_tx):
self.db[cache_tx.hash] = cache_tx
def get(self, chain_spec, tx_hash):
return self.db[tx_hash]
def by_nonce(self, cache_filter):
self.last_filter = cache_filter
def by_date(self, cache_filter=None):
self.last_filter = cache_filter
def count(self, cache_filter):
self.last_filter = cache_filter
class MockCacheTokenTx(CacheTokenTx):
def deserialize(self, signed_tx):
h = hashlib.sha1()
try:
h.update(signed_tx + b'\x01')
except TypeError:
h.update(signed_tx.encode('utf-8') + b'\x01')
z = h.digest()
nonce = int.from_bytes(z[:4], 'big')
token_value = int.from_bytes(z[4:8], 'big')
value = int.from_bytes(z[8:12], 'big')
h = hashlib.sha1()
h.update(z)
z = h.digest()
sender = z.hex()
h = hashlib.sha1()
h.update(z)
z = h.digest()
recipient = z.hex()
h = hashlib.sha1()
h.update(z)
z = h.digest()
token = z.hex()
h = hashlib.sha256()
h.update(z)
z = h.digest()
tx_hash = z.hex()
self.init(tx_hash, nonce, sender, recipient, value)
self.set('src_token', token)
self.set('dst_token', token)
self.set('src_value', token_value)
self.set('dst_value', token_value)
self.confirm(42, 13, 1024000)
return self
class MockContentStore:
def __init__(self):
self.store = {}
def put(self, k, v):
self.store[k] = v
def get(self, k):
return self.store.get(k)

95
tests/test_cache.py Normal file
View File

@ -0,0 +1,95 @@
# standard imports
import os
import logging
import unittest
import math
# external imports
from hexathon import add_0x
from chainlib.chain import ChainSpec
# local imports
from chainqueue import QueueEntry
from chainqueue.cache import (
CacheTokenTx,
CacheFilter,
)
# test imports
from tests.base_shep import TestShepBase
from tests.common import (
MockTokenCache,
MockCacheTokenTx,
)
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class MockNormalizer:
def address(self, v):
return 'address' + v
def value(self, v):
dv = int(math.log10(v) + 1)
return float(v / (10 ** dv))
def hash(self, v):
return 'ashbashhash' + v
class TestCache(TestShepBase):
def setUp(self):
super(TestCache, self).setUp()
self.chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
self.cache = MockTokenCache()
def test_cache_instance(self):
normalizer = MockNormalizer()
a = b'foo'
tx = MockCacheTokenTx(self.chain_spec, normalizer=normalizer)
tx.deserialize(a)
self.assertTrue(isinstance(tx.value, float))
self.assertEqual(tx.sender[:4], 'addr')
self.assertEqual(tx.recipient[:4], 'addr')
self.assertEqual(tx.hash[:11], 'ashbashhash')
def test_cache_putget(self):
a = b'foo'
tx = MockCacheTokenTx(self.chain_spec)
tx.deserialize(a)
self.cache.put(self.chain_spec, tx)
tx_retrieved = self.cache.get(self.chain_spec, tx.hash)
self.assertEqual(tx, tx_retrieved)
def test_cache_filter(self):
normalizer = MockNormalizer()
fltr = CacheFilter(normalizer=normalizer)
sender = os.urandom(20).hex()
fltr.add_senders(sender)
recipient_one = os.urandom(20).hex()
recipient_two = os.urandom(20).hex()
fltr.add_recipients([recipient_one, recipient_two])
self.assertEqual(fltr.senders[0][:4], 'addr')
self.assertEqual(fltr.recipients[1][:4], 'addr')
def test_cache_query(self):
a = os.urandom(20).hex()
fltr = CacheFilter(nonce=42)
self.cache.count(fltr)
self.assertEqual(self.cache.last_filter, fltr)
if __name__ == '__main__':
unittest.main()

85
tests/test_entry.py Normal file
View File

@ -0,0 +1,85 @@
# standard imports
import os
import logging
import unittest
# external imports
from hexathon import add_0x
from chainlib.tx import Tx
from chainlib.block import Block
# local imports
from chainqueue import QueueEntry
# test imports
from tests.base_shep import TestShepBase
from tests.common import MockCacheTokenTx
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class TestEntry(TestShepBase):
def test_entry_get(self):
signed_tx = add_0x(os.urandom(128).hex())
nonce = 42
entry = QueueEntry(self.store, cache_adapter=MockCacheTokenTx)
tx_hash_one = entry.create(signed_tx)
signed_tx = add_0x(os.urandom(128).hex())
nonce = 42
entry = QueueEntry(self.store, cache_adapter=MockCacheTokenTx)
tx_hash_two = entry.create(signed_tx)
txs = self.store.by_state()
self.assertEqual(len(txs), 2)
logg.debug('tx hash one {}'.format(tx_hash_one))
entry = QueueEntry(self.store, tx_hash=tx_hash_one, cache_adapter=MockCacheTokenTx)
entry.load()
entry.sent()
txs = self.store.by_state()
self.assertEqual(len(txs), 1)
txs = self.store.by_state(state=self.store.IN_NETWORK)
self.assertEqual(len(txs), 1)
entry.succeed(None, None)
txs = self.store.by_state()
self.assertEqual(len(txs), 1)
entry = QueueEntry(self.store, tx_hash_two)
entry.load()
entry.sent()
txs = self.store.by_state(state=self.store.IN_NETWORK)
self.assertEqual(len(txs), 2)
txs = self.store.by_state(state=self.store.IN_NETWORK, strict=True)
self.assertEqual(len(txs), 1)
def test_entry_change(self):
signed_tx = add_0x(os.urandom(128).hex())
nonce = 42
entry = QueueEntry(self.store, cache_adapter=MockCacheTokenTx)
tx_hash = entry.create(signed_tx)
block = Block()
block.number = 13
tx = Tx(None)
tx.index = 666
entry.readysend()
entry.reserve()
entry.sendfail()
entry = QueueEntry(self.store, tx_hash, cache_adapter=MockCacheTokenTx)
entry.load()
self.assertEqual(str(entry), tx_hash + ': SENDFAIL')
if __name__ == '__main__':
unittest.main()

119
tests/test_integrate.py Normal file
View File

@ -0,0 +1,119 @@
# standard imports
import os
import tempfile
import unittest
import logging
import time
# external imports
from shep.store.file import SimpleFileStoreFactory
from chainlib.chain import ChainSpec
# local imports
from chainqueue import (
Store,
Status,
)
# test imports
from tests.common import (
MockCounter,
MockTokenCache,
MockCacheTokenTx,
MockContentStore,
)
from tests.base_shep import TestShepBase
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class TestIntegrateBase(TestShepBase):
def setUp(self):
self.path = tempfile.mkdtemp()
factory = SimpleFileStoreFactory(self.path).add
self.state = Status(factory)
content_store = MockContentStore()
counter = MockCounter()
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
self.cache = MockTokenCache()
self.store = Store(chain_spec, self.state, content_store, counter, cache=self.cache)
def test_integration_valid(self):
self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
def test_state_default(self):
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
v = self.store.pending()
self.assertEqual(len(v), 1)
self.assertEqual(v[0], hx)
def test_state_enqueue(self):
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.get(hx)
self.store.enqueue(hx)
v = self.store.upcoming()
self.assertEqual(len(v), 1)
v = self.store.pending()
self.assertEqual(len(v), 0)
def test_state_defer(self):
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.reserve(hx)
self.store.fail(hx)
v = self.store.deferred()
self.assertEqual(len(v), 1)
self.assertEqual(v[0], hx)
def test_state_multiple(self):
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.reserve(hx)
self.store.fail(hx)
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.reserve(hx)
self.store.fail(hx)
v = self.store.deferred()
self.assertEqual(len(v), 2)
def test_state_multiple_sort(self):
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.reserve(hx)
self.store.fail(hx)
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.enqueue(hx)
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.reserve(hx)
self.store.fail(hx)
self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
v = self.store.deferred()
self.assertEqual(len(v), 2)
def test_state_date_threshold(self):
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.reserve(hx)
self.store.fail(hx)
then = self.store.modified(s)
time.sleep(0.1)
(s, hx) = self.store.put(os.urandom(8).hex(), cache_adapter=MockCacheTokenTx)
self.store.reserve(hx)
self.store.fail(hx)
v = self.store.deferred(threshold=then)
self.assertEqual(len(v), 1)
if __name__ == '__main__':
unittest.main()

60
tests/test_shep.py Normal file
View File

@ -0,0 +1,60 @@
# standard imports
import os
import logging
import unittest
# external imports
from hexathon import (
add_0x,
strip_0x,
)
from shep.error import StateTransitionInvalid
# local imports
from chainqueue import QueueEntry
# test imports
from tests.base_shep import TestShepBase
from tests.common import MockCacheTokenTx
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class TestShep(TestShepBase):
def test_shep_setup(self):
pass
def test_shep_tx(self):
signed_tx = add_0x(os.urandom(128).hex())
nonce = 42
tx = QueueEntry(self.store, cache_adapter=MockCacheTokenTx)
tx_hash = tx.create(signed_tx)
tx_retrieved = QueueEntry(self.store, tx_hash=tx_hash)
tx_retrieved.load()
self.assertEqual(tx_retrieved.signed_tx, strip_0x(signed_tx))
def test_shep_valid(self):
self.state.put('foo', 'bar')
self.state.set('foo', self.state.IN_NETWORK)
self.state.set('foo', self.state.FINAL)
def test_shep_invalid(self):
self.state.put('foo', 'bar')
self.state.set('foo', self.state.FINAL)
with self.assertRaises(StateTransitionInvalid):
self.state.move('foo', self.state.INSUFFICIENT_FUNDS)
def test_shep_cache(self):
self.store.put('bar', cache_adapter=MockCacheTokenTx)
if __name__ == '__main__':
unittest.main()

62
tests/test_store.py Normal file
View File

@ -0,0 +1,62 @@
# standard imports
import os
import tempfile
import unittest
import logging
import shutil
# external imports
# local imports
from chainqueue.store.fs import (
IndexStore,
CounterStore,
)
from chainqueue.error import DuplicateTxError
logging.basicConfig(level=logging.DEBUG)
logg = logging.getLogger()
class TestStoreImplementations(unittest.TestCase):
def setUp(self):
self.path = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.path)
def test_basic_index(self):
store = IndexStore(self.path)
hx = os.urandom(32).hex()
data = 'foo_bar_baz'
store.put(hx, data)
r = store.get(hx)
self.assertEqual(data, r)
def test_basic_counter(self):
store = CounterStore(self.path)
v = store.next()
self.assertEqual(v, 0)
v = store.next()
self.assertEqual(v, 1)
store = CounterStore(self.path)
v = store.next()
self.assertEqual(v, 2)
def test_duplicate(self):
store = IndexStore(self.path)
hx = os.urandom(32).hex()
data = 'foo_bar_baz'
store.put(hx, data)
with self.assertRaises(DuplicateTxError):
store.put(hx, data)
if __name__ == '__main__':
unittest.main()