Merge branch 'lash/external-chain-queue' into 'master'

cic-eth: Rehabilitate retrier with chainqueue

See merge request grassrootseconomics/cic-internal-integration!85
This commit is contained in:
Louis Holbrook 2021-04-05 15:07:09 +00:00
commit 9e6bb2acb2
26 changed files with 412 additions and 218 deletions

View File

@ -32,7 +32,7 @@ class ERC20TransferFilter(SyncFilter):
logg.debug('filter {} {}'.format(block, tx)) logg.debug('filter {} {}'.format(block, tx))
token = None token = None
try: try:
token = ERC20Token(conn, tx.inputs[0]) token = ERC20Token(self.chain_spec, conn, tx.inputs[0])
except NotAContractError: except NotAContractError:
logg.debug('not a contract {}'.format(tx.inputs[0])) logg.debug('not a contract {}'.format(tx.inputs[0]))
return False return False

View File

@ -17,7 +17,7 @@ RUN apt-get update && \
# Copy shared requirements from top of mono-repo # Copy shared requirements from top of mono-repo
RUN echo "copying root req file ${root_requirement_file}" RUN echo "copying root req file ${root_requirement_file}"
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a53 RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a58
COPY cic-cache/requirements.txt ./ COPY cic-cache/requirements.txt ./
COPY cic-cache/setup.cfg \ COPY cic-cache/setup.cfg \

View File

@ -1,13 +1,13 @@
cic-base~=0.1.2a53 cic-base~=0.1.2a58
alembic==1.4.2 alembic==1.4.2
confini~=0.3.6rc3 confini~=0.3.6rc3
uwsgi==2.0.19.1 uwsgi==2.0.19.1
moolb~=0.1.0 moolb~=0.1.0
cic-eth-registry~=0.5.4a9 cic-eth-registry~=0.5.4a10
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
semver==2.13.0 semver==2.13.0
psycopg2==2.8.6 psycopg2==2.8.6
celery==4.4.7 celery==4.4.7
redis==3.5.3 redis==3.5.3
chainlib~=0.0.1a44 chainlib~=0.0.2a2
chainsyncer~=0.0.1a20 chainsyncer~=0.0.1a21

View File

@ -528,7 +528,7 @@ class AdminApi:
r = self.rpc.do(o) r = self.rpc.do(o)
tx['recipient_gas_balance'] = r tx['recipient_gas_balance'] = r
tx_unpacked = unpack(bytes.fromhex(tx['signed_tx'][2:]), chain_spec.chain_id()) tx_unpacked = unpack(bytes.fromhex(strip_0x(tx['signed_tx'])), chain_spec)
tx['gas_price'] = tx_unpacked['gasPrice'] tx['gas_price'] = tx_unpacked['gasPrice']
tx['gas_limit'] = tx_unpacked['gas'] tx['gas_limit'] = tx_unpacked['gas']
tx['data'] = tx_unpacked['data'] tx['data'] = tx_unpacked['data']

View File

@ -37,7 +37,7 @@ class Api:
self.callback_param = callback_param self.callback_param = callback_param
self.callback_task = callback_task self.callback_task = callback_task
self.queue = queue self.queue = queue
logg.info('api using queue {}'.format(self.queue)) logg.debug('api using queue {}'.format(self.queue))
self.callback_success = None self.callback_success = None
self.callback_error = None self.callback_error = None
if callback_queue == None: if callback_queue == None:

View File

@ -387,7 +387,7 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_spec_dict, gas=None, defa
r = rpc.do(o) r = rpc.do(o)
current_gas_price = int(r, 16) current_gas_price = int(r, 16)
if tx['gasPrice'] > current_gas_price: if tx['gasPrice'] > current_gas_price:
logg.info('Network gas price {} is lower than overdue tx gas price {}'.format(curent_gas_price, tx['gasPrice'])) logg.info('Network gas price {} is lower than overdue tx gas price {}'.format(current_gas_price, tx['gasPrice']))
#tx['gasPrice'] = int(tx['gasPrice'] * default_factor) #tx['gasPrice'] = int(tx['gasPrice'] * default_factor)
new_gas_price = tx['gasPrice'] + 1 new_gas_price = tx['gasPrice'] + 1
else: else:

View File

@ -98,3 +98,12 @@ def get_state_log(chain_spec_dict, tx_hash):
r = chainqueue.state.get_state_log(chain_spec, tx_hash, session=session) r = chainqueue.state.get_state_log(chain_spec, tx_hash, session=session)
session.close() session.close()
return r return r
@celery_app.task(base=CriticalSQLAlchemyTask)
def obsolete(chain_spec_dict, tx_hash, final):
chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session()
r = chainqueue.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session)
session.close()
return r

View File

@ -0,0 +1,34 @@
# standard imports
import logging
# external imports
import celery
from chainqueue.state import obsolete_by_cache
logg = logging.getLogger()
class StragglerFilter:
def __init__(self, chain_spec, queue='cic-eth'):
self.chain_spec = chain_spec
self.queue = queue
def filter(self, conn, block, tx, db_session=None):
logg.debug('tx {}'.format(tx))
obsolete_by_cache(self.chain_spec, tx.hash, False, session=db_session)
s_send = celery.signature(
'cic_eth.eth.gas.resend_with_higher_gas',
[
tx.hash,
self.chain_spec.asdict(),
],
queue=self.queue,
)
return s_send.apply_async()
def __str__(self):
return 'stragglerfilter'

View File

@ -33,7 +33,7 @@ class TxFilter(SyncFilter):
logg.info('tx filter match on {}'.format(otx.tx_hash)) logg.info('tx filter match on {}'.format(otx.tx_hash))
db_session.flush() db_session.flush()
SessionBase.release_session(db_session) SessionBase.release_session(db_session)
s = celery.signature( s_final_state = celery.signature(
'cic_eth.queue.state.set_final', 'cic_eth.queue.state.set_final',
[ [
self.chain_spec.asdict(), self.chain_spec.asdict(),
@ -43,7 +43,17 @@ class TxFilter(SyncFilter):
], ],
queue=self.queue, queue=self.queue,
) )
t = s.apply_async() s_obsolete_state = celery.signature(
'cic_eth.queue.state.obsolete',
[
self.chain_spec.asdict(),
add_0x(tx_hash_hex),
True,
],
queue=self.queue,
)
t = celery.group(s_obsolete_state, s_final_state)()
return t return t

View File

@ -4,38 +4,22 @@ import sys
import logging import logging
import argparse import argparse
import re import re
import datetime
# external imports # external imports
import confini import confini
import celery import celery
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.tx import unpack
from chainlib.connection import RPCConnection from chainlib.connection import RPCConnection
from chainlib.eth.block import ( from chainsyncer.filter import SyncFilter
block_latest,
block_by_number,
Block,
)
from chainsyncer.driver import HeadSyncer
from chainsyncer.backend import MemBackend
from chainsyncer.error import NoBlockForYou
# local imports # local imports
from cic_eth.db import dsn_from_config from cic_eth.db import dsn_from_config
from cic_eth.db import SessionBase from cic_eth.db import SessionBase
from cic_eth.queue.tx import (
get_status_tx,
get_tx,
# get_upcoming_tx,
)
from cic_eth.admin.ctrl import lock_send from cic_eth.admin.ctrl import lock_send
from cic_eth.db.enum import ( from cic_eth.db.enum import LockEnum
StatusEnum, from cic_eth.runnable.daemons.filters.straggler import StragglerFilter
StatusBits, from cic_eth.sync.retry import RetrySyncer
LockEnum,
)
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
@ -89,189 +73,54 @@ SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
straggler_delay = int(config.get('CIC_TX_RETRY_DELAY')) straggler_delay = int(config.get('CIC_TX_RETRY_DELAY'))
# TODO: we already have the signed raw tx in get, so its a waste of cycles to get_tx here ## TODO: we already have the signed raw tx in get, so its a waste of cycles to get_tx here
def sendfail_filter(w3, tx_hash, rcpt, chain_spec): #def sendfail_filter(w3, tx_hash, rcpt, chain_spec):
tx_dict = get_tx(tx_hash) # tx_dict = get_tx(tx_hash)
tx = unpack(tx_dict['signed_tx'], chain_spec) # tx = unpack(tx_dict['signed_tx'], chain_spec)
logg.debug('submitting tx {} for retry'.format(tx_hash)) # logg.debug('submitting tx {} for retry'.format(tx_hash))
s_check = celery.signature( # s_check = celery.signature(
'cic_eth.admin.ctrl.check_lock', # 'cic_eth.admin.ctrl.check_lock',
[ [
tx_hash, # tx_hash,
chain_str, # chain_str,
LockEnum.QUEUE, # LockEnum.QUEUE,
tx['from'], # tx['from'],
], # ],
queue=queue, # queue=queue,
) # )
# s_resume = celery.signature( ## s_resume = celery.signature(
# 'cic_eth.eth.tx.resume_tx', ## 'cic_eth.eth.tx.resume_tx',
## [
## chain_str,
## ],
## queue=queue,
## )
#
## s_retry_status = celery.signature(
## 'cic_eth.queue.state.set_ready',
## [],
## queue=queue,
## )
# s_resend = celery.signature(
# 'cic_eth.eth.gas.resend_with_higher_gas',
# [ # [
# chain_str, # chain_str,
# ], # ],
# queue=queue, # queue=queue,
# ) # )
# s_retry_status = celery.signature(
# 'cic_eth.queue.state.set_ready',
# [],
# queue=queue,
# )
s_resend = celery.signature(
'cic_eth.eth.gas.resend_with_higher_gas',
[
chain_str,
],
queue=queue,
)
#s_resume.link(s_retry_status)
#s_check.link(s_resume)
s_check.link(s_resend)
s_check.apply_async()
# TODO: can we merely use the dispatcher instead?
def dispatch(conn, chain_spec):
txs = get_status_tx(StatusEnum.RETRY, before=datetime.datetime.utcnow())
if len(txs) == 0:
logg.debug('no retry state txs found')
return
#signed_txs = list(txs.values())
#logg.debug('signed txs {} chain {}'.format(signed_txs, chain_str))
#for tx in signed_txs:
for k in txs.keys():
#tx_cache = get_tx_cache(k)
tx_raw = txs[k]
tx = unpack(tx_raw, chain_spec)
s_check = celery.signature(
'cic_eth.admin.ctrl.check_lock',
[
[tx_raw],
chain_str,
LockEnum.QUEUE,
tx['from'],
],
queue=queue,
)
s_send = celery.signature(
'cic_eth.eth.tx.send',
[
chain_str,
],
queue=queue,
)
s_check.link(s_send)
t = s_check.apply_async()
# try:
# r = t.get()
# logg.debug('submitted as {} result {} with queue task {}'.format(t, r, t.children[0].get()))
# except PermanentTxError as e:
# logg.error('tx {} permanently failed: {}'.format(tx, e))
# except TemporaryTxError as e:
# logg.error('tx {} temporarily failed: {}'.format(tx, e))
# #
# # #s_resume.link(s_retry_status)
#def straggler_filter(w3, tx, rcpt, chain_str): # #s_check.link(s_resume)
# before = datetime.datetime.utcnow() - datetime.timedelta(seconds=straggler_delay) # s_check.link(s_resend)
# txs = get_status_tx(StatusEnum.SENT, before) # s_check.apply_async()
# if len(txs) == 0:
# logg.debug('no straggler txs found')
# return
# txs = list(txs.keys())
# logg.debug('straggler txs {} chain {}'.format(signed_txs, chain_str))
# s_send = celery.signature(
# 'cic_eth.eth.gas.resend_with_higher_gas',
# [
# txs,
# chain_str,
# ],
# queue=queue,
# )
# s_send.apply_async()
class StragglerFilter:
def __init__(self, chain_spec, queue='cic-eth'):
self.chain_spec = chain_spec
self.queue = queue
def filter(self, conn, block, tx, db_session=None):
logg.debug('tx {}'.format(tx))
s_send = celery.signature(
'cic_eth.eth.gas.resend_with_higher_gas',
[
tx,
self.chain_spec.asdict(),
],
queue=self.queue,
)
return s_send.apply_async()
#return s_send
def __str__(self):
return 'stragglerfilter'
class RetrySyncer(HeadSyncer):
def __init__(self, conn, chain_spec, stalled_grace_seconds, batch_size=50, failed_grace_seconds=None):
backend = MemBackend(chain_spec, None)
super(RetrySyncer, self).__init__(backend)
self.chain_spec = chain_spec
if failed_grace_seconds == None:
failed_grace_seconds = stalled_grace_seconds
self.stalled_grace_seconds = stalled_grace_seconds
self.failed_grace_seconds = failed_grace_seconds
self.batch_size = batch_size
self.conn = conn
def get(self, conn):
o = block_latest()
r = conn.do(o)
(pair, flags) = self.backend.get()
n = int(r, 16)
if n == pair[0]:
raise NoBlockForYou('block {} already checked'.format(n))
o = block_by_number(n)
r = conn.do(o)
b = Block(r)
return b
def process(self, conn, block):
before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.stalled_grace_seconds)
stalled_txs = get_status_tx(
StatusBits.IN_NETWORK.value,
not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE,
before=before,
limit=self.batch_size,
)
# stalled_txs = get_upcoming_tx(
# status=StatusBits.IN_NETWORK.value,
# not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE,
# before=before,
# limit=self.batch_size,
# )
for tx in stalled_txs:
self.filter.apply(self.conn, block, tx)
self.backend.set(block.number, 0)
def main(): def main():
#o = block_latest()
conn = RPCConnection.connect(chain_spec, 'default') conn = RPCConnection.connect(chain_spec, 'default')
#block = conn.do(o)
syncer = RetrySyncer(conn, chain_spec, straggler_delay, batch_size=config.get('_BATCH_SIZE')) syncer = RetrySyncer(conn, chain_spec, straggler_delay, batch_size=config.get('_BATCH_SIZE'))
syncer.backend.set(0, 0) syncer.backend.set(0, 0)
syncer.add_filter(StragglerFilter(chain_spec, queue=queue)) fltr = StragglerFilter(chain_spec, queue=queue)
syncer.add_filter(fltr)
syncer.loop(float(straggler_delay), conn) syncer.loop(float(straggler_delay), conn)

View File

@ -0,0 +1,92 @@
# standard imports
import logging
import datetime
# external imports
from chainsyncer.driver import HeadSyncer
from chainsyncer.backend import MemBackend
from chainsyncer.error import NoBlockForYou
from chainlib.eth.block import (
block_by_number,
block_latest,
Block,
)
from chainlib.eth.tx import (
unpack,
Tx,
)
from cic_eth.queue.query import get_status_tx
from chainqueue.db.enum import StatusBits
from hexathon import strip_0x
# local imports
from cic_eth.db import SessionBase
logg = logging.getLogger()
class DbSessionMemBackend(MemBackend):
def connect(self):
self.db_session = SessionBase.create_session()
return self.db_session
def disconnect(self):
self.db_session.close()
self.db_session = None
class RetrySyncer(HeadSyncer):
def __init__(self, conn, chain_spec, stalled_grace_seconds, batch_size=50, failed_grace_seconds=None):
backend = DbSessionMemBackend(chain_spec, None)
super(RetrySyncer, self).__init__(backend)
self.chain_spec = chain_spec
if failed_grace_seconds == None:
failed_grace_seconds = stalled_grace_seconds
self.stalled_grace_seconds = stalled_grace_seconds
self.failed_grace_seconds = failed_grace_seconds
self.batch_size = batch_size
self.conn = conn
def get(self, conn):
o = block_latest()
r = conn.do(o)
(pair, flags) = self.backend.get()
n = int(r, 16)
if n == pair[0]:
raise NoBlockForYou('block {} already checked'.format(n))
o = block_by_number(n)
r = conn.do(o)
b = Block(r)
return b
def process(self, conn, block):
before = datetime.datetime.utcnow() - datetime.timedelta(seconds=self.stalled_grace_seconds)
session = SessionBase.create_session()
stalled_txs = get_status_tx(
self.chain_spec,
StatusBits.IN_NETWORK.value,
not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE,
before=before,
limit=self.batch_size,
session=session,
)
session.close()
# stalled_txs = get_upcoming_tx(
# status=StatusBits.IN_NETWORK.value,
# not_status=StatusBits.FINAL | StatusBits.MANUAL | StatusBits.OBSOLETE,
# before=before,
# limit=self.batch_size,
# )
for tx_signed_raw_hex in stalled_txs.values():
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx_src = unpack(tx_signed_raw_bytes, self.chain_spec)
tx = Tx(tx_src)
self.filter.apply(self.conn, block, tx)
self.backend.set(block.number, 0)

View File

@ -10,7 +10,7 @@ version = (
0, 0,
11, 11,
0, 0,
'alpha.3', 'alpha.4',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@ -29,7 +29,7 @@ RUN /usr/local/bin/python -m pip install --upgrade pip
# python merge_requirements.py | tee merged_requirements.txt # python merge_requirements.py | tee merged_requirements.txt
#RUN cd cic-base && \ #RUN cd cic-base && \
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt # pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a57 RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a60
COPY cic-eth/scripts/ scripts/ COPY cic-eth/scripts/ scripts/
COPY cic-eth/setup.cfg cic-eth/setup.py ./ COPY cic-eth/setup.cfg cic-eth/setup.py ./

View File

@ -1,4 +1,4 @@
cic-base~=0.1.2a57 cic-base~=0.1.2a60
celery==4.4.7 celery==4.4.7
crypto-dev-signer~=0.4.14a17 crypto-dev-signer~=0.4.14a17
confini~=0.3.6rc3 confini~=0.3.6rc3
@ -16,10 +16,10 @@ semver==2.13.0
websocket-client==0.57.0 websocket-client==0.57.0
moolb~=0.1.1b2 moolb~=0.1.1b2
eth-address-index~=0.1.1a7 eth-address-index~=0.1.1a7
chainlib~=0.0.2a1 chainlib~=0.0.2a4
hexathon~=0.0.1a7 hexathon~=0.0.1a7
chainsyncer~=0.0.1a21 chainsyncer~=0.0.1a21
chainqueue~=0.0.1a3 chainqueue~=0.0.1a5
pysha3==1.0.2 pysha3==1.0.2
coincurve==15.0.0 coincurve==15.0.0
sarafu-faucet~=0.0.2a16 sarafu-faucet~=0.0.2a16

View File

@ -37,6 +37,7 @@ packages =
cic_eth.runnable.daemons cic_eth.runnable.daemons
cic_eth.runnable.daemons.filters cic_eth.runnable.daemons.filters
cic_eth.callbacks cic_eth.callbacks
cic_eth.sync
scripts = scripts =
./scripts/migrate.py ./scripts/migrate.py

View File

@ -0,0 +1,89 @@
# external imports
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import OverrideNonceOracle
from chainlib.eth.tx import (
TxFormat,
unpack,
Tx,
)
from chainlib.eth.gas import (
Gas,
OverrideGasOracle,
)
from chainlib.eth.block import (
block_latest,
block_by_number,
Block,
)
from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits
from chainqueue.tx import create as queue_create
from chainqueue.state import (
set_reserved,
set_ready,
set_sent,
)
from hexathon import strip_0x
# local imports
from cic_eth.runnable.daemons.filters.straggler import StragglerFilter
from cic_eth.eth.gas import cache_gas_data
def test_tx(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
celery_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
fltr = StragglerFilter(default_chain_spec, None)
o = block_latest()
r = eth_rpc.do(o)
o = block_by_number(r, include_tx=False)
r = eth_rpc.do(o)
block = Block(r)
block.txs = [tx_hash_hex]
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx_src = unpack(tx_signed_raw_bytes, default_chain_spec)
tx = Tx(tx_src, block=block)
t = fltr.filter(None, block, tx, db_session=init_database)
tx_hash_hex_successor = t.get_leaf()
assert t.successful()
assert tx_hash_hex_successor != tx_hash_hex
otx = Otx.load(tx_hash_hex, session=init_database)
assert otx.status & StatusBits.OBSOLETE > 0
assert otx.status & (StatusBits.FINAL | StatusBits.QUEUED | StatusBits.RESERVED) == 0
otx = Otx.load(tx_hash_hex_successor, session=init_database)
assert otx.status == StatusBits.QUEUED

View File

@ -0,0 +1,110 @@
# external imports
from chainlib.connection import RPCConnection
from chainlib.eth.nonce import OverrideNonceOracle
from chainlib.eth.tx import (
TxFormat,
unpack,
Tx,
)
from chainlib.eth.gas import (
Gas,
OverrideGasOracle,
)
from chainlib.eth.block import (
block_latest,
block_by_number,
Block,
)
from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits
from chainqueue.tx import create as queue_create
from chainqueue.state import (
set_reserved,
set_ready,
set_sent,
)
from hexathon import strip_0x
# local imports
from cic_eth.runnable.daemons.filters.tx import TxFilter
from cic_eth.eth.gas import cache_gas_data
def test_tx(
default_chain_spec,
init_database,
eth_rpc,
eth_signer,
agent_roles,
celery_worker,
):
rpc = RPCConnection.connect(default_chain_spec, 'default')
nonce_oracle = OverrideNonceOracle(agent_roles['ALICE'], 42)
gas_oracle = OverrideGasOracle(price=1000000000, limit=21000)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
tx_hash_hex_orig = tx_hash_hex
gas_oracle = OverrideGasOracle(price=1100000000, limit=21000)
(tx_hash_hex, tx_signed_raw_hex) = c.create(agent_roles['ALICE'], agent_roles['BOB'], 100 * (10 ** 6), tx_format=TxFormat.RLP_SIGNED)
queue_create(
default_chain_spec,
42,
agent_roles['ALICE'],
tx_hash_hex,
tx_signed_raw_hex,
session=init_database,
)
cache_gas_data(
tx_hash_hex,
tx_signed_raw_hex,
default_chain_spec.asdict(),
)
set_ready(default_chain_spec, tx_hash_hex, session=init_database)
set_reserved(default_chain_spec, tx_hash_hex, session=init_database)
set_sent(default_chain_spec, tx_hash_hex, session=init_database)
fltr = TxFilter(default_chain_spec, None)
o = block_latest()
r = eth_rpc.do(o)
o = block_by_number(r, include_tx=False)
r = eth_rpc.do(o)
block = Block(r)
block.txs = [tx_hash_hex]
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx_src = unpack(tx_signed_raw_bytes, default_chain_spec)
tx = Tx(tx_src, block=block)
t = fltr.filter(eth_rpc, block, tx, db_session=init_database)
t.get()
assert t.successful()
otx = Otx.load(tx_hash_hex_orig, session=init_database)
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
assert otx.status & StatusBits.FINAL == StatusBits.FINAL
otx = Otx.load(tx_hash_hex, session=init_database)
assert otx.status & StatusBits.OBSOLETE == 0
assert otx.status & StatusBits.FINAL == StatusBits.FINAL

View File

@ -1,7 +1,7 @@
# standard imports # standard imports
import semver import semver
version = (0, 3, 0, 'alpha.7') version = (0, 3, 0, 'alpha.8')
version_object = semver.VersionInfo( version_object = semver.VersionInfo(
major=version[0], major=version[0],

View File

@ -1,4 +1,4 @@
cic_base[full_graph]~=0.1.2a46 cic_base[full_graph]~=0.1.2a58
cic-eth~=0.10.1b1 cic-eth~=0.11.0a4
cic-notify~=0.4.0a3 cic-notify~=0.4.0a3
cic-types~=0.1.0a8 cic-types~=0.1.0a10

View File

@ -57,8 +57,8 @@ WORKDIR /home/grassroots
USER grassroots USER grassroots
ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433 ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
ARG cic_base_version=0.1.2a57 ARG cic_base_version=0.1.2a60
ARG cic_eth_version=0.11.0a3 ARG cic_eth_version=0.11.0a4
ARG sarafu_faucet_version=0.0.2a16 ARG sarafu_faucet_version=0.0.2a16
ARG cic_contracts_version=0.0.2a2 ARG cic_contracts_version=0.0.2a2
RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \ RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \

View File

@ -96,7 +96,6 @@ def genId(addr, typ):
def genDate(): def genDate():
logg.info(ts_then)
ts = random.randint(ts_then, ts_now) ts = random.randint(ts_then, ts_now)
return datetime.datetime.fromtimestamp(ts).timestamp() return datetime.datetime.fromtimestamp(ts).timestamp()

View File

@ -117,7 +117,7 @@ class Handler:
self.user_dir = user_dir self.user_dir = user_dir
self.balances = balances self.balances = balances
self.chain_spec = chain_spec self.chain_spec = chain_spec
self.tx_factory = ERC20(signer, gas_oracle, nonce_oracle, chain_spec.network_id()) self.tx_factory = ERC20(chain_spec, signer, gas_oracle, nonce_oracle)
def name(self): def name(self):

View File

@ -36,7 +36,7 @@ argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback') argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback') argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback')
argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback') argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback')
argparser.add_argument('--batch-size', dest='batch_size', default=80, type=int, help='burst size of sending transactions to node') argparser.add_argument('--batch-size', dest='batch_size', default=100, type=int, help='burst size of sending transactions to node') # batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size
argparser.add_argument('--batch-delay', dest='batch_delay', default=2, type=int, help='seconds delay between batches') argparser.add_argument('--batch-delay', dest='batch_delay', default=2, type=int, help='seconds delay between batches')
argparser.add_argument('--timeout', default=60.0, type=float, help='Callback timeout') argparser.add_argument('--timeout', default=60.0, type=float, help='Callback timeout')
argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue') argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue')

View File

@ -1,5 +1,5 @@
cic-base[full_graph]==0.1.2a57 cic-base[full_graph]==0.1.2a60
sarafu-faucet==0.0.2a16 sarafu-faucet==0.0.2a16
cic-eth==0.11.0a3 cic-eth==0.11.0a4
cic-types==0.1.0a10 cic-types==0.1.0a10
crypto-dev-signer==0.4.14a17 crypto-dev-signer==0.4.14a17

View File

@ -256,6 +256,7 @@ class Verifier:
def verify_gas(self, address, balance_token=None): def verify_gas(self, address, balance_token=None):
o = balance(address) o = balance(address)
r = self.conn.do(o) r = self.conn.do(o)
logg.debug('wtf {}'.format(r))
actual_balance = int(strip_0x(r), 16) actual_balance = int(strip_0x(r), 16)
if actual_balance == 0: if actual_balance == 0:
raise VerifierError((address, actual_balance), 'gas') raise VerifierError((address, actual_balance), 'gas')

View File

@ -358,7 +358,7 @@ services:
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis} CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis}
CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis} CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis}
TASKS_TRANSFER_CALLBACKS: $TASKS_TRANSFER_CALLBACKS TASKS_TRANSFER_CALLBACKS: $TASKS_TRANSFER_CALLBACKS
CIC_TX_RETRY_DELAY: 15 CIC_TX_RETRY_DELAY: 60
BATCH_SIZE: ${RETRIER_BATCH_SIZE:-50} BATCH_SIZE: ${RETRIER_BATCH_SIZE:-50}
#DATABASE_DEBUG: 1 #DATABASE_DEBUG: 1
depends_on: depends_on: