Compare commits
23 Commits
lash/impor
...
lash/fix-t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
db36ba997d
|
||
|
|
ae1502a651 | ||
|
|
5001113267 | ||
| 451079d004 | |||
| ba8a0b1953 | |||
| bbc948757f | |||
| ca8c1b1f27 | |||
| 854753f120 | |||
| daadbc27e9 | |||
| f37fa1dbcf | |||
|
|
ac264314c0 | ||
|
|
84c1d11b48 | ||
|
|
6fe87652ce | ||
|
|
8f65be16b1 | ||
|
|
34a48a6c6c | ||
|
|
c68d9d8404 | ||
| dd8d4b01e2 | |||
| 4d3cc85573 | |||
| 10717df7d1 | |||
| 3f2e0f5b2e | |||
|
|
42ae8e5ed3 | ||
|
|
96b4ad4a72 | ||
| 1274958493 |
@@ -8,6 +8,7 @@ from cic_registry import zero_address
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.error import LockedError
|
||||
|
||||
@@ -116,9 +117,10 @@ def unlock_queue(chained_input, chain_str, address=zero_address):
|
||||
|
||||
@celery_app.task()
|
||||
def check_lock(chained_input, chain_str, lock_flags, address=None):
|
||||
r = Lock.check(chain_str, lock_flags, address=zero_address)
|
||||
session = SessionBase.create_session()
|
||||
r = Lock.check(chain_str, lock_flags, address=zero_address, session=session)
|
||||
if address != None:
|
||||
r |= Lock.check(chain_str, lock_flags, address=address)
|
||||
r |= Lock.check(chain_str, lock_flags, address=address, session=session)
|
||||
if r > 0:
|
||||
logg.debug('lock check {} has match {} for {}'.format(lock_flags, r, address))
|
||||
raise LockedError(r)
|
||||
|
||||
@@ -457,6 +457,7 @@ class AdminApi:
|
||||
tx_unpacked = unpack_signed_raw_tx(bytes.fromhex(tx['signed_tx'][2:]), chain_spec.chain_id())
|
||||
tx['gas_price'] = tx_unpacked['gasPrice']
|
||||
tx['gas_limit'] = tx_unpacked['gas']
|
||||
tx['data'] = tx_unpacked['data']
|
||||
|
||||
s = celery.signature(
|
||||
'cic_eth.queue.tx.get_state_log',
|
||||
|
||||
@@ -6,10 +6,13 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
# external imports
|
||||
import celery
|
||||
from cic_registry.chain import ChainSpec
|
||||
#from cic_registry.chain import ChainSpec
|
||||
from cic_registry import CICRegistry
|
||||
from chainlib.chain import ChainSpec
|
||||
|
||||
# local imports
|
||||
from cic_eth.eth.factory import TxFactory
|
||||
from cic_eth.db.enum import LockEnum
|
||||
|
||||
|
||||
@@ -18,7 +18,11 @@ logg = celery_app.log.get_default_logger()
|
||||
def redis(self, result, destination, status_code):
|
||||
(host, port, db, channel) = destination.split(':')
|
||||
r = redis_interface.Redis(host=host, port=port, db=db)
|
||||
s = json.dumps(result)
|
||||
data = {
|
||||
'root_id': self.request.root_id,
|
||||
'status': status_code,
|
||||
'result': result,
|
||||
}
|
||||
logg.debug('redis callback on host {} port {} db {} channel {}'.format(host, port, db, channel))
|
||||
r.publish(channel, s)
|
||||
r.publish(channel, json.dumps(data))
|
||||
r.close()
|
||||
|
||||
@@ -20,5 +20,10 @@ def tcp(self, result, destination, status_code):
|
||||
(host, port) = destination.split(':')
|
||||
logg.debug('tcp callback to {} {}'.format(host, port))
|
||||
s.connect((host, int(port)))
|
||||
s.send(json.dumps(result).encode('utf-8'))
|
||||
data = {
|
||||
'root_id': self.request.root_id,
|
||||
'status': status_code,
|
||||
'result': result,
|
||||
}
|
||||
s.send(json.dumps(data).encode('utf-8'))
|
||||
s.close()
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
"""Add chain syncer
|
||||
|
||||
Revision ID: ec40ac0974c1
|
||||
Revises: 6ac7a1dadc46
|
||||
Create Date: 2021-02-23 06:10:19.246304
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from chainsyncer.db.migrations.sqlalchemy import (
|
||||
chainsyncer_upgrade,
|
||||
chainsyncer_downgrade,
|
||||
)
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ec40ac0974c1'
|
||||
down_revision = '6ac7a1dadc46'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
chainsyncer_upgrade(0, 0, 1)
|
||||
|
||||
|
||||
def downgrade():
|
||||
chainsyncer_downgrade(0, 0, 1)
|
||||
@@ -0,0 +1,28 @@
|
||||
"""Add chain syncer
|
||||
|
||||
Revision ID: ec40ac0974c1
|
||||
Revises: 6ac7a1dadc46
|
||||
Create Date: 2021-02-23 06:10:19.246304
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from chainsyncer.db.migrations.sqlalchemy import (
|
||||
chainsyncer_upgrade,
|
||||
chainsyncer_downgrade,
|
||||
)
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'ec40ac0974c1'
|
||||
down_revision = '6ac7a1dadc46'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
chainsyncer_upgrade(0, 0, 1)
|
||||
|
||||
|
||||
def downgrade():
|
||||
chainsyncer_downgrade(0, 0, 1)
|
||||
@@ -55,11 +55,9 @@ class Lock(SessionBase):
|
||||
:returns: New flag state of entry
|
||||
:rtype: number
|
||||
"""
|
||||
localsession = session
|
||||
if localsession == None:
|
||||
localsession = SessionBase.create_session()
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
q = localsession.query(Lock)
|
||||
q = session.query(Lock)
|
||||
#q = q.join(TxCache, isouter=True)
|
||||
q = q.filter(Lock.address==address)
|
||||
q = q.filter(Lock.blockchain==chain_str)
|
||||
@@ -71,7 +69,8 @@ class Lock(SessionBase):
|
||||
lock.address = address
|
||||
lock.blockchain = chain_str
|
||||
if tx_hash != None:
|
||||
q = localsession.query(Otx)
|
||||
session.flush()
|
||||
q = session.query(Otx)
|
||||
q = q.filter(Otx.tx_hash==tx_hash)
|
||||
otx = q.first()
|
||||
if otx != None:
|
||||
@@ -80,12 +79,11 @@ class Lock(SessionBase):
|
||||
lock.flags |= flags
|
||||
r = lock.flags
|
||||
|
||||
localsession.add(lock)
|
||||
localsession.commit()
|
||||
session.add(lock)
|
||||
session.commit()
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
if session == None:
|
||||
localsession.close()
|
||||
|
||||
return r
|
||||
|
||||
|
||||
@@ -110,11 +108,9 @@ class Lock(SessionBase):
|
||||
:returns: New flag state of entry
|
||||
:rtype: number
|
||||
"""
|
||||
localsession = session
|
||||
if localsession == None:
|
||||
localsession = SessionBase.create_session()
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
q = localsession.query(Lock)
|
||||
q = session.query(Lock)
|
||||
#q = q.join(TxCache, isouter=True)
|
||||
q = q.filter(Lock.address==address)
|
||||
q = q.filter(Lock.blockchain==chain_str)
|
||||
@@ -124,14 +120,13 @@ class Lock(SessionBase):
|
||||
if lock != None:
|
||||
lock.flags &= ~flags
|
||||
if lock.flags == 0:
|
||||
localsession.delete(lock)
|
||||
session.delete(lock)
|
||||
else:
|
||||
localsession.add(lock)
|
||||
session.add(lock)
|
||||
r = lock.flags
|
||||
localsession.commit()
|
||||
session.commit()
|
||||
|
||||
if session == None:
|
||||
localsession.close()
|
||||
SessionBase.release_session(session)
|
||||
|
||||
return r
|
||||
|
||||
@@ -156,22 +151,20 @@ class Lock(SessionBase):
|
||||
:rtype: number
|
||||
"""
|
||||
|
||||
localsession = session
|
||||
if localsession == None:
|
||||
localsession = SessionBase.create_session()
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
q = localsession.query(Lock)
|
||||
q = session.query(Lock)
|
||||
#q = q.join(TxCache, isouter=True)
|
||||
q = q.filter(Lock.address==address)
|
||||
q = q.filter(Lock.blockchain==chain_str)
|
||||
q = q.filter(Lock.flags.op('&')(flags)==flags)
|
||||
lock = q.first()
|
||||
if session == None:
|
||||
localsession.close()
|
||||
|
||||
r = 0
|
||||
if lock != None:
|
||||
r = lock.flags & flags
|
||||
|
||||
SessionBase.release_session(session)
|
||||
return r
|
||||
|
||||
|
||||
|
||||
@@ -21,12 +21,9 @@ class Nonce(SessionBase):
|
||||
|
||||
@staticmethod
|
||||
def get(address, session=None):
|
||||
localsession = session
|
||||
if localsession == None:
|
||||
localsession = SessionBase.create_session()
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
|
||||
q = localsession.query(Nonce)
|
||||
q = session.query(Nonce)
|
||||
q = q.filter(Nonce.address_hex==address)
|
||||
nonce = q.first()
|
||||
|
||||
@@ -34,28 +31,29 @@ class Nonce(SessionBase):
|
||||
if nonce != None:
|
||||
nonce_value = nonce.nonce;
|
||||
|
||||
if session == None:
|
||||
localsession.close()
|
||||
SessionBase.release_session(session)
|
||||
|
||||
return nonce_value
|
||||
|
||||
|
||||
@staticmethod
|
||||
def __get(conn, address):
|
||||
r = conn.execute("SELECT nonce FROM nonce WHERE address_hex = '{}'".format(address))
|
||||
def __get(session, address):
|
||||
r = session.execute("SELECT nonce FROM nonce WHERE address_hex = '{}'".format(address))
|
||||
nonce = r.fetchone()
|
||||
session.flush()
|
||||
if nonce == None:
|
||||
return None
|
||||
return nonce[0]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def __set(conn, address, nonce):
|
||||
conn.execute("UPDATE nonce set nonce = {} WHERE address_hex = '{}'".format(nonce, address))
|
||||
def __set(session, address, nonce):
|
||||
session.execute("UPDATE nonce set nonce = {} WHERE address_hex = '{}'".format(nonce, address))
|
||||
session.flush()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def next(address, initial_if_not_exists=0):
|
||||
def next(address, initial_if_not_exists=0, session=None):
|
||||
"""Generate next nonce for the given address.
|
||||
|
||||
If there is no previous nonce record for the address, the nonce may be initialized to a specified value, or 0 if no value has been given.
|
||||
@@ -67,20 +65,32 @@ class Nonce(SessionBase):
|
||||
:returns: Nonce
|
||||
:rtype: number
|
||||
"""
|
||||
conn = Nonce.engine.connect()
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
session.begin_nested()
|
||||
#conn = Nonce.engine.connect()
|
||||
if Nonce.transactional:
|
||||
conn.execute('BEGIN')
|
||||
conn.execute('LOCK TABLE nonce IN SHARE ROW EXCLUSIVE MODE')
|
||||
nonce = Nonce.__get(conn, address)
|
||||
#session.execute('BEGIN')
|
||||
session.execute('LOCK TABLE nonce IN SHARE ROW EXCLUSIVE MODE')
|
||||
session.flush()
|
||||
nonce = Nonce.__get(session, address)
|
||||
logg.debug('get nonce {} for address {}'.format(nonce, address))
|
||||
if nonce == None:
|
||||
nonce = initial_if_not_exists
|
||||
conn.execute("INSERT INTO nonce (nonce, address_hex) VALUES ({}, '{}')".format(nonce, address))
|
||||
session.execute("INSERT INTO nonce (nonce, address_hex) VALUES ({}, '{}')".format(nonce, address))
|
||||
session.flush()
|
||||
logg.debug('setting default nonce to {} for address {}'.format(nonce, address))
|
||||
Nonce.__set(conn, address, nonce+1)
|
||||
if Nonce.transactional:
|
||||
conn.execute('COMMIT')
|
||||
conn.close()
|
||||
Nonce.__set(session, address, nonce+1)
|
||||
#if Nonce.transactional:
|
||||
#session.execute('COMMIT')
|
||||
#session.execute('UNLOCK TABLE nonce')
|
||||
#conn.close()
|
||||
session.commit()
|
||||
session.commit()
|
||||
|
||||
SessionBase.release_session(session)
|
||||
return nonce
|
||||
|
||||
|
||||
|
||||
@@ -79,6 +79,13 @@ class Otx(SessionBase):
|
||||
return r
|
||||
|
||||
|
||||
def __status_not_set(self, status):
|
||||
r = not(self.status & status)
|
||||
if r:
|
||||
logg.warning('status bit {} not set on {}'.format(status.name, self.tx_hash))
|
||||
return r
|
||||
|
||||
|
||||
def set_block(self, block, session=None):
|
||||
"""Set block number transaction was mined in.
|
||||
|
||||
@@ -320,6 +327,32 @@ class Otx(SessionBase):
|
||||
SessionBase.release_session(session)
|
||||
|
||||
|
||||
def dequeue(self, session=None):
|
||||
"""Marks that a process to execute send attempt is underway
|
||||
|
||||
Only manipulates object, does not transaction or commit to backend.
|
||||
|
||||
:raises cic_eth.db.error.TxStateChangeError: State change represents a sequence of events that should not exist.
|
||||
"""
|
||||
if self.__status_not_set(StatusBits.QUEUED):
|
||||
return
|
||||
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
if self.status & StatusBits.FINAL:
|
||||
raise TxStateChangeError('SENDFAIL cannot be set on an entry with FINAL state set ({})'.format(status_str(self.status)))
|
||||
if self.status & StatusBits.IN_NETWORK:
|
||||
raise TxStateChangeError('SENDFAIL cannot be set on an entry with IN_NETWORK state set ({})'.format(status_str(self.status)))
|
||||
|
||||
self.__reset_status(StatusBits.QUEUED, session)
|
||||
|
||||
if self.tracing:
|
||||
self.__state_log(session=session)
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
|
||||
|
||||
def minefail(self, block, session=None):
|
||||
"""Marks that transaction was mined but code execution did not succeed.
|
||||
|
||||
@@ -373,18 +406,6 @@ class Otx(SessionBase):
|
||||
else:
|
||||
self.__set_status(StatusEnum.OBSOLETED, session)
|
||||
|
||||
|
||||
# if confirmed:
|
||||
# if self.status != StatusEnum.OBSOLETED:
|
||||
# logg.warning('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
|
||||
# #raise TxStateChangeError('CANCELLED must follow OBSOLETED, but had {}'.format(StatusEnum(self.status).name))
|
||||
# self.__set_status(StatusEnum.CANCELLED, session)
|
||||
# elif self.status != StatusEnum.OBSOLETED:
|
||||
# if self.status > StatusEnum.SENT:
|
||||
# logg.warning('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||
# #raise TxStateChangeError('OBSOLETED must follow PENDING, SENDFAIL or SENT, but had {}'.format(StatusEnum(self.status).name))
|
||||
# self.__set_status(StatusEnum.OBSOLETED, session)
|
||||
|
||||
if self.tracing:
|
||||
self.__state_log(session=session)
|
||||
|
||||
|
||||
@@ -24,9 +24,10 @@ class AccountRole(SessionBase):
|
||||
tag = Column(Text)
|
||||
address_hex = Column(String(42))
|
||||
|
||||
|
||||
|
||||
# TODO:
|
||||
@staticmethod
|
||||
def get_address(tag):
|
||||
def get_address(tag, session):
|
||||
"""Get Ethereum address matching the given tag
|
||||
|
||||
:param tag: Tag
|
||||
@@ -34,14 +35,26 @@ class AccountRole(SessionBase):
|
||||
:returns: Ethereum address, or zero-address if tag does not exist
|
||||
:rtype: str, 0x-hex
|
||||
"""
|
||||
role = AccountRole.get_role(tag)
|
||||
if role == None:
|
||||
return zero_address
|
||||
return role.address_hex
|
||||
if session == None:
|
||||
raise ValueError('nested bind session calls will not succeed as the first call to release_session in the stack will leave the db object detached further down the stack. We will need additional reference count.')
|
||||
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
role = AccountRole.__get_role(tag, session)
|
||||
|
||||
r = zero_address
|
||||
if role != None:
|
||||
r = role.address_hex
|
||||
|
||||
session.flush()
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
return r
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_role(tag):
|
||||
def get_role(tag, session=None):
|
||||
"""Get AccountRole model object matching the given tag
|
||||
|
||||
:param tag: Tag
|
||||
@@ -49,20 +62,27 @@ class AccountRole(SessionBase):
|
||||
:returns: Role object, if found
|
||||
:rtype: cic_eth.db.models.role.AccountRole
|
||||
"""
|
||||
session = AccountRole.create_session()
|
||||
role = AccountRole.__get_role(session, tag)
|
||||
session.close()
|
||||
#return role.address_hex
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
role = AccountRole.__get_role(tag, session)
|
||||
|
||||
session.flush()
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
return role
|
||||
|
||||
|
||||
@staticmethod
|
||||
def __get_role(session, tag):
|
||||
return session.query(AccountRole).filter(AccountRole.tag==tag).first()
|
||||
def __get_role(tag, session):
|
||||
q = session.query(AccountRole)
|
||||
q = q.filter(AccountRole.tag==tag)
|
||||
r = q.first()
|
||||
return r
|
||||
|
||||
|
||||
@staticmethod
|
||||
def set(tag, address_hex):
|
||||
def set(tag, address_hex, session=None):
|
||||
"""Persist a tag to Ethereum address association.
|
||||
|
||||
This will silently overwrite the existing value.
|
||||
@@ -74,16 +94,18 @@ class AccountRole(SessionBase):
|
||||
:returns: Role object
|
||||
:rtype: cic_eth.db.models.role.AccountRole
|
||||
"""
|
||||
#session = AccountRole.create_session()
|
||||
#role = AccountRole.__get(session, tag)
|
||||
role = AccountRole.get_role(tag) #session, tag)
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
role = AccountRole.__get_role(tag, session)
|
||||
if role == None:
|
||||
role = AccountRole(tag)
|
||||
role.address_hex = address_hex
|
||||
#session.add(role)
|
||||
#session.commit()
|
||||
#session.close()
|
||||
return role #address_hex
|
||||
|
||||
session.flush()
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
return role
|
||||
|
||||
|
||||
@staticmethod
|
||||
@@ -95,20 +117,17 @@ class AccountRole(SessionBase):
|
||||
:returns: Role tag, or None if no match
|
||||
:rtype: str or None
|
||||
"""
|
||||
localsession = session
|
||||
if localsession == None:
|
||||
localsession = SessionBase.create_session()
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
q = localsession.query(AccountRole)
|
||||
q = session.query(AccountRole)
|
||||
q = q.filter(AccountRole.address_hex==address)
|
||||
role = q.first()
|
||||
tag = None
|
||||
if role != None:
|
||||
tag = role.tag
|
||||
|
||||
if session == None:
|
||||
localsession.close()
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
return tag
|
||||
|
||||
|
||||
|
||||
@@ -85,26 +85,27 @@ class TxCache(SessionBase):
|
||||
:param tx_hash_new: tx hash to associate the copied entry with
|
||||
:type tx_hash_new: str, 0x-hex
|
||||
"""
|
||||
localsession = SessionBase.bind_session(session)
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
q = localsession.query(TxCache)
|
||||
q = session.query(TxCache)
|
||||
q = q.join(Otx)
|
||||
q = q.filter(Otx.tx_hash==tx_hash_original)
|
||||
txc = q.first()
|
||||
|
||||
if txc == None:
|
||||
SessionBase.release_session(localsession)
|
||||
SessionBase.release_session(session)
|
||||
raise NotLocalTxError('original {}'.format(tx_hash_original))
|
||||
if txc.block_number != None:
|
||||
SessionBase.release_session(localsession)
|
||||
SessionBase.release_session(session)
|
||||
raise TxStateChangeError('cannot clone tx cache of confirmed tx {}'.format(tx_hash_original))
|
||||
|
||||
q = localsession.query(Otx)
|
||||
session.flush()
|
||||
q = session.query(Otx)
|
||||
q = q.filter(Otx.tx_hash==tx_hash_new)
|
||||
otx = q.first()
|
||||
|
||||
if otx == None:
|
||||
SessionBase.release_session(localsession)
|
||||
SessionBase.release_session(session)
|
||||
raise NotLocalTxError('new {}'.format(tx_hash_new))
|
||||
|
||||
txc_new = TxCache(
|
||||
@@ -115,18 +116,21 @@ class TxCache(SessionBase):
|
||||
txc.destination_token_address,
|
||||
int(txc.from_value),
|
||||
int(txc.to_value),
|
||||
session=session,
|
||||
)
|
||||
localsession.add(txc_new)
|
||||
localsession.commit()
|
||||
session.add(txc_new)
|
||||
session.commit()
|
||||
|
||||
SessionBase.release_session(localsession)
|
||||
SessionBase.release_session(session)
|
||||
|
||||
|
||||
def __init__(self, tx_hash, sender, recipient, source_token_address, destination_token_address, from_value, to_value, block_number=None, tx_index=None, session=None):
|
||||
localsession = SessionBase.bind_session(session)
|
||||
tx = localsession.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||
session = SessionBase.bind_session(session)
|
||||
q = session.query(Otx)
|
||||
q = q.filter(Otx.tx_hash==tx_hash)
|
||||
tx = q.first()
|
||||
if tx == None:
|
||||
SessionBase.release_session(localsession)
|
||||
SessionBase.release_session(session)
|
||||
raise FileNotFoundError('outgoing transaction record unknown {} (add a Tx first)'.format(tx_hash))
|
||||
self.otx_id = tx.id
|
||||
|
||||
@@ -143,5 +147,5 @@ class TxCache(SessionBase):
|
||||
self.date_updated = self.date_created
|
||||
self.date_checked = self.date_created
|
||||
|
||||
SessionBase.release_session(localsession)
|
||||
SessionBase.release_session(session)
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ from cic_registry import CICRegistry
|
||||
from cic_registry.chain import ChainSpec
|
||||
from erc20_single_shot_faucet import Faucet
|
||||
from cic_registry import zero_address
|
||||
from hexathon import strip_0x
|
||||
|
||||
# local import
|
||||
from cic_eth.eth import RpcClient
|
||||
@@ -21,6 +22,7 @@ from cic_eth.db.models.role import AccountRole
|
||||
from cic_eth.db.models.tx import TxCache
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||
from cic_eth.error import RoleMissingError
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
|
||||
#logg = logging.getLogger(__name__)
|
||||
logg = logging.getLogger()
|
||||
@@ -34,6 +36,7 @@ class AccountTxFactory(TxFactory):
|
||||
self,
|
||||
address,
|
||||
chain_spec,
|
||||
session=None,
|
||||
):
|
||||
"""Register an Ethereum account address with the on-chain account registry
|
||||
|
||||
@@ -56,7 +59,7 @@ class AccountTxFactory(TxFactory):
|
||||
'gas': gas,
|
||||
'gasPrice': self.gas_price,
|
||||
'chainId': chain_spec.chain_id(),
|
||||
'nonce': self.next_nonce(),
|
||||
'nonce': self.next_nonce(session=session),
|
||||
'value': 0,
|
||||
})
|
||||
return tx_add
|
||||
@@ -66,6 +69,7 @@ class AccountTxFactory(TxFactory):
|
||||
self,
|
||||
address,
|
||||
chain_spec,
|
||||
session=None,
|
||||
):
|
||||
"""Trigger the on-chain faucet to disburse tokens to the provided Ethereum account
|
||||
|
||||
@@ -86,7 +90,7 @@ class AccountTxFactory(TxFactory):
|
||||
'gas': gas,
|
||||
'gasPrice': self.gas_price,
|
||||
'chainId': chain_spec.chain_id(),
|
||||
'nonce': self.next_nonce(),
|
||||
'nonce': self.next_nonce(session=session),
|
||||
'value': 0,
|
||||
})
|
||||
return tx_add
|
||||
@@ -101,11 +105,12 @@ def unpack_register(data):
|
||||
:returns: Parsed parameters
|
||||
:rtype: dict
|
||||
"""
|
||||
f = data[2:10]
|
||||
data = strip_0x(data)
|
||||
f = data[:8]
|
||||
if f != '0a3b0a4f':
|
||||
raise ValueError('Invalid account index register data ({})'.format(f))
|
||||
|
||||
d = data[10:]
|
||||
d = data[8:]
|
||||
return {
|
||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||
}
|
||||
@@ -120,17 +125,19 @@ def unpack_gift(data):
|
||||
:returns: Parsed parameters
|
||||
:rtype: dict
|
||||
"""
|
||||
f = data[2:10]
|
||||
data = strip_0x(data)
|
||||
f = data[:8]
|
||||
if f != '63e4bff4':
|
||||
raise ValueError('Invalid account index register data ({})'.format(f))
|
||||
raise ValueError('Invalid gift data ({})'.format(f))
|
||||
|
||||
d = data[10:]
|
||||
d = data[8:]
|
||||
return {
|
||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||
}
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
# TODO: Separate out nonce initialization task
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def create(password, chain_str):
|
||||
"""Creates and stores a new ethereum account in the keystore.
|
||||
|
||||
@@ -149,9 +156,13 @@ def create(password, chain_str):
|
||||
logg.debug('created account {}'.format(a))
|
||||
|
||||
# Initialize nonce provider record for account
|
||||
# TODO: this can safely be set to zero, since we are randomly creating account
|
||||
n = c.w3.eth.getTransactionCount(a, 'pending')
|
||||
session = SessionBase.create_session()
|
||||
o = session.query(Nonce).filter(Nonce.address_hex==a).first()
|
||||
q = session.query(Nonce)
|
||||
q = q.filter(Nonce.address_hex==a)
|
||||
o = q.first()
|
||||
session.flush()
|
||||
if o == None:
|
||||
o = Nonce()
|
||||
o.address_hex = a
|
||||
@@ -162,7 +173,7 @@ def create(password, chain_str):
|
||||
return a
|
||||
|
||||
|
||||
@celery_app.task(bind=True, throws=(RoleMissingError,))
|
||||
@celery_app.task(bind=True, throws=(RoleMissingError,), base=CriticalSQLAlchemyTask)
|
||||
def register(self, account_address, chain_str, writer_address=None):
|
||||
"""Creates a transaction to add the given address to the accounts index.
|
||||
|
||||
@@ -178,20 +189,22 @@ def register(self, account_address, chain_str, writer_address=None):
|
||||
"""
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
if writer_address == None:
|
||||
writer_address = AccountRole.get_address('ACCOUNTS_INDEX_WRITER')
|
||||
writer_address = AccountRole.get_address('ACCOUNTS_INDEX_WRITER', session=session)
|
||||
|
||||
if writer_address == zero_address:
|
||||
session.close()
|
||||
raise RoleMissingError(account_address)
|
||||
|
||||
|
||||
logg.debug('adding account address {} to index; writer {}'.format(account_address, writer_address))
|
||||
queue = self.request.delivery_info['routing_key']
|
||||
|
||||
c = RpcClient(chain_spec, holder_address=writer_address)
|
||||
txf = AccountTxFactory(writer_address, c)
|
||||
|
||||
tx_add = txf.add(account_address, chain_spec)
|
||||
tx_add = txf.add(account_address, chain_spec, session=session)
|
||||
session.close()
|
||||
(tx_hash_hex, tx_signed_raw_hex) = sign_and_register_tx(tx_add, chain_str, queue, 'cic_eth.eth.account.cache_account_data')
|
||||
|
||||
gas_budget = tx_add['gas'] * tx_add['gasPrice']
|
||||
@@ -209,7 +222,7 @@ def register(self, account_address, chain_str, writer_address=None):
|
||||
return account_address
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||
def gift(self, account_address, chain_str):
|
||||
"""Creates a transaction to invoke the faucet contract for the given address.
|
||||
|
||||
@@ -324,7 +337,7 @@ def cache_gift_data(
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def cache_account_data(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
|
||||
@@ -32,10 +32,10 @@ class TxFactory:
|
||||
logg.debug('txfactory instance address {} gas price'.format(self.address, self.gas_price))
|
||||
|
||||
|
||||
def next_nonce(self):
|
||||
def next_nonce(self, session=None):
|
||||
"""Returns the current cached nonce value, and increments it for next transaction.
|
||||
|
||||
:returns: Nonce
|
||||
:rtype: number
|
||||
"""
|
||||
return self.nonce_oracle.next()
|
||||
return self.nonce_oracle.next(session=session)
|
||||
|
||||
@@ -52,7 +52,10 @@ class GasOracle():
|
||||
:returns: Etheerum account address
|
||||
:rtype: str, 0x-hex
|
||||
"""
|
||||
return AccountRole.get_address('GAS_GIFTER')
|
||||
session = SessionBase.create_session()
|
||||
a = AccountRole.get_address('GAS_GIFTER', session)
|
||||
session.close()
|
||||
return a
|
||||
|
||||
|
||||
def gas_price(self, category='safe'):
|
||||
|
||||
@@ -14,10 +14,10 @@ class NonceOracle():
|
||||
self.default_nonce = default_nonce
|
||||
|
||||
|
||||
def next(self):
|
||||
def next(self, session=None):
|
||||
"""Get next unique nonce.
|
||||
|
||||
:returns: Nonce
|
||||
:rtype: number
|
||||
"""
|
||||
return Nonce.next(self.address, self.default_nonce)
|
||||
return Nonce.next(self.address, self.default_nonce, session=session)
|
||||
|
||||
@@ -33,7 +33,7 @@ def sign_tx(tx, chain_str):
|
||||
return (tx_hash_hex, tx_transfer_signed['raw'],)
|
||||
|
||||
|
||||
def sign_and_register_tx(tx, chain_str, queue, cache_task=None):
|
||||
def sign_and_register_tx(tx, chain_str, queue, cache_task=None, session=None):
|
||||
"""Signs the provided transaction, and adds it to the transaction queue cache (with status PENDING).
|
||||
|
||||
:param tx: Standard ethereum transaction data
|
||||
@@ -44,6 +44,7 @@ def sign_and_register_tx(tx, chain_str, queue, cache_task=None):
|
||||
:type queue: str
|
||||
:param cache_task: Cache task to call with signed transaction. If None, no task will be called.
|
||||
:type cache_task: str
|
||||
:raises: sqlalchemy.exc.DatabaseError
|
||||
:returns: Tuple; Transaction hash, signed raw transaction data
|
||||
:rtype: tuple
|
||||
"""
|
||||
@@ -51,25 +52,13 @@ def sign_and_register_tx(tx, chain_str, queue, cache_task=None):
|
||||
|
||||
logg.debug('adding queue tx {}'.format(tx_hash_hex))
|
||||
|
||||
# s = celery.signature(
|
||||
# 'cic_eth.queue.tx.create',
|
||||
# [
|
||||
# tx['nonce'],
|
||||
# tx['from'],
|
||||
# tx_hash_hex,
|
||||
# tx_signed_raw_hex,
|
||||
# chain_str,
|
||||
# ],
|
||||
# queue=queue,
|
||||
# )
|
||||
|
||||
# TODO: consider returning this as a signature that consequtive tasks can be linked to
|
||||
queue_create(
|
||||
tx['nonce'],
|
||||
tx['from'],
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
chain_str,
|
||||
session=session,
|
||||
)
|
||||
|
||||
if cache_task != None:
|
||||
|
||||
@@ -8,6 +8,8 @@ import web3
|
||||
from cic_registry import CICRegistry
|
||||
from cic_registry import zero_address
|
||||
from cic_registry.chain import ChainSpec
|
||||
from hexathon import strip_0x
|
||||
from chainlib.status import Status as TxStatus
|
||||
|
||||
# platform imports
|
||||
from cic_eth.db.models.tx import TxCache
|
||||
@@ -19,6 +21,10 @@ from cic_eth.eth.task import create_check_gas_and_send_task
|
||||
from cic_eth.eth.factory import TxFactory
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||
from cic_eth.ext.address import translate_address
|
||||
from cic_eth.task import (
|
||||
CriticalSQLAlchemyTask,
|
||||
CriticalWeb3Task,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
@@ -120,11 +126,12 @@ def unpack_transfer(data):
|
||||
:returns: Parsed parameters
|
||||
:rtype: dict
|
||||
"""
|
||||
f = data[2:10]
|
||||
data = strip_0x(data)
|
||||
f = data[:8]
|
||||
if f != contract_function_signatures['transfer']:
|
||||
raise ValueError('Invalid transfer data ({})'.format(f))
|
||||
|
||||
d = data[10:]
|
||||
d = data[8:]
|
||||
return {
|
||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||
'amount': int(d[64:], 16)
|
||||
@@ -140,11 +147,12 @@ def unpack_transferfrom(data):
|
||||
:returns: Parsed parameters
|
||||
:rtype: dict
|
||||
"""
|
||||
f = data[2:10]
|
||||
data = strip_0x(data)
|
||||
f = data[:8]
|
||||
if f != contract_function_signatures['transferfrom']:
|
||||
raise ValueError('Invalid transferFrom data ({})'.format(f))
|
||||
|
||||
d = data[10:]
|
||||
d = data[8:]
|
||||
return {
|
||||
'from': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||
'to': web3.Web3.toChecksumAddress('0x' + d[128-40:128]),
|
||||
@@ -161,18 +169,19 @@ def unpack_approve(data):
|
||||
:returns: Parsed parameters
|
||||
:rtype: dict
|
||||
"""
|
||||
f = data[2:10]
|
||||
data = strip_0x(data)
|
||||
f = data[:8]
|
||||
if f != contract_function_signatures['approve']:
|
||||
raise ValueError('Invalid approval data ({})'.format(f))
|
||||
|
||||
d = data[10:]
|
||||
d = data[8:]
|
||||
return {
|
||||
'to': web3.Web3.toChecksumAddress('0x' + d[64-40:64]),
|
||||
'amount': int(d[64:], 16)
|
||||
}
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalWeb3Task)
|
||||
def balance(tokens, holder_address, chain_str):
|
||||
"""Return token balances for a list of tokens for given address
|
||||
|
||||
@@ -199,7 +208,7 @@ def balance(tokens, holder_address, chain_str):
|
||||
return tokens
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||
def transfer(self, tokens, holder_address, receiver_address, value, chain_str):
|
||||
"""Transfer ERC20 tokens between addresses
|
||||
|
||||
@@ -253,7 +262,7 @@ def transfer(self, tokens, holder_address, receiver_address, value, chain_str):
|
||||
return tx_hash_hex
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||
def approve(self, tokens, holder_address, spender_address, value, chain_str):
|
||||
"""Approve ERC20 transfer on behalf of holder address
|
||||
|
||||
@@ -307,7 +316,7 @@ def approve(self, tokens, holder_address, spender_address, value, chain_str):
|
||||
return tx_hash_hex
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalWeb3Task)
|
||||
def resolve_tokens_by_symbol(token_symbols, chain_str):
|
||||
"""Returns contract addresses of an array of ERC20 token symbols
|
||||
|
||||
@@ -330,7 +339,7 @@ def resolve_tokens_by_symbol(token_symbols, chain_str):
|
||||
return tokens
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def otx_cache_transfer(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
@@ -354,7 +363,7 @@ def otx_cache_transfer(
|
||||
return txc
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def cache_transfer_data(
|
||||
tx_hash_hex,
|
||||
tx,
|
||||
@@ -390,7 +399,7 @@ def cache_transfer_data(
|
||||
return (tx_hash_hex, cache_id)
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def otx_cache_approve(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
@@ -414,7 +423,7 @@ def otx_cache_approve(
|
||||
return txc
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def cache_approve_data(
|
||||
tx_hash_hex,
|
||||
tx,
|
||||
@@ -470,6 +479,8 @@ class ExtendedTx:
|
||||
self.destination_token_symbol = ''
|
||||
self.source_token_decimals = ExtendedTx._default_decimals
|
||||
self.destination_token_decimals = ExtendedTx._default_decimals
|
||||
self.status = TxStatus.PENDING.name
|
||||
self.status_code = TxStatus.PENDING.value
|
||||
|
||||
|
||||
def set_actors(self, sender, recipient, trusted_declarator_addresses=None):
|
||||
@@ -497,10 +508,18 @@ class ExtendedTx:
|
||||
self.destination_token_value = destination_value
|
||||
|
||||
|
||||
def set_status(self, n):
|
||||
if n:
|
||||
self.status = TxStatus.ERROR.name
|
||||
else:
|
||||
self.status = TxStatus.SUCCESS.name
|
||||
self.status_code = n
|
||||
|
||||
|
||||
def to_dict(self):
|
||||
o = {}
|
||||
for attr in dir(self):
|
||||
if attr[0] == '_' or attr in ['set_actors', 'set_tokens', 'to_dict']:
|
||||
if attr[0] == '_' or attr in ['set_actors', 'set_tokens', 'set_status', 'to_dict']:
|
||||
continue
|
||||
o[attr] = getattr(self, attr)
|
||||
return o
|
||||
|
||||
@@ -32,6 +32,10 @@ from cic_eth.eth.nonce import NonceOracle
|
||||
from cic_eth.error import AlreadyFillingGasError
|
||||
from cic_eth.eth.util import tx_hex_string
|
||||
from cic_eth.admin.ctrl import lock_send
|
||||
from cic_eth.task import (
|
||||
CriticalSQLAlchemyTask,
|
||||
CriticalWeb3Task,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
@@ -40,7 +44,7 @@ MAX_NONCE_ATTEMPTS = 3
|
||||
|
||||
|
||||
# TODO this function is too long
|
||||
@celery_app.task(bind=True, throws=(OutOfGasError))
|
||||
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyTask)
|
||||
def check_gas(self, tx_hashes, chain_str, txs=[], address=None, gas_required=None):
|
||||
"""Check the gas level of the sender address of a transaction.
|
||||
|
||||
@@ -131,7 +135,7 @@ def check_gas(self, tx_hashes, chain_str, txs=[], address=None, gas_required=Non
|
||||
|
||||
|
||||
# TODO: chain chainable transactions that use hashes as inputs may be chained to this function to output signed txs instead.
|
||||
@celery_app.task(bind=True)
|
||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyTask)
|
||||
def hashes_to_txs(self, tx_hashes):
|
||||
"""Return a list of raw signed transactions from the local transaction queue corresponding to a list of transaction hashes.
|
||||
|
||||
@@ -313,7 +317,8 @@ class ParityNodeHandler:
|
||||
return (t, PermanentTxError, 'Fubar {}'.format(tx_hex_string(tx_hex, self.chain_spec.chain_id())))
|
||||
|
||||
|
||||
@celery_app.task(bind=True)
|
||||
# TODO: A lock should be introduced to ensure that the send status change and the transaction send is atomic.
|
||||
@celery_app.task(bind=True, base=CriticalWeb3Task)
|
||||
def send(self, txs, chain_str):
|
||||
"""Send transactions to the network.
|
||||
|
||||
@@ -351,13 +356,6 @@ def send(self, txs, chain_str):
|
||||
|
||||
c = RpcClient(chain_spec)
|
||||
r = None
|
||||
try:
|
||||
r = c.w3.eth.send_raw_transaction(tx_hex)
|
||||
except Exception as e:
|
||||
raiser = ParityNodeHandler(chain_spec, queue)
|
||||
(t, e, m) = raiser.handle(e, tx_hash_hex, tx_hex)
|
||||
raise e(m)
|
||||
|
||||
s_set_sent = celery.signature(
|
||||
'cic_eth.queue.tx.set_sent_status',
|
||||
[
|
||||
@@ -366,6 +364,14 @@ def send(self, txs, chain_str):
|
||||
],
|
||||
queue=queue,
|
||||
)
|
||||
try:
|
||||
r = c.w3.eth.send_raw_transaction(tx_hex)
|
||||
except requests.exceptions.ConnectionError as e:
|
||||
raise(e)
|
||||
except Exception as e:
|
||||
raiser = ParityNodeHandler(chain_spec, queue)
|
||||
(t, e, m) = raiser.handle(e, tx_hash_hex, tx_hex)
|
||||
raise e(m)
|
||||
s_set_sent.apply_async()
|
||||
|
||||
tx_tail = txs[1:]
|
||||
@@ -380,7 +386,8 @@ def send(self, txs, chain_str):
|
||||
return r.hex()
|
||||
|
||||
|
||||
@celery_app.task(bind=True, throws=(AlreadyFillingGasError))
|
||||
# TODO: if this method fails the nonce will be out of sequence. session needs to be extended to include the queue create, so that nonce is rolled back if the second sql query fails. Better yet, split each state change into separate tasks.
|
||||
@celery_app.task(bind=True, throws=(web3.exceptions.TransactionNotFound,), base=CriticalWeb3Task)
|
||||
def refill_gas(self, recipient_address, chain_str):
|
||||
"""Executes a native token transaction to fund the recipient's gas expenditures.
|
||||
|
||||
@@ -399,7 +406,7 @@ def refill_gas(self, recipient_address, chain_str):
|
||||
q = session.query(Otx.tx_hash)
|
||||
q = q.join(TxCache)
|
||||
q = q.filter(Otx.status.op('&')(StatusBits.FINAL.value)==0)
|
||||
q = q.filter(TxCache.from_value!='0x00')
|
||||
q = q.filter(TxCache.from_value!=0)
|
||||
q = q.filter(TxCache.recipient==recipient_address)
|
||||
c = q.count()
|
||||
session.close()
|
||||
@@ -537,7 +544,7 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_str, gas=None, default_fa
|
||||
return tx_hash_hex
|
||||
|
||||
|
||||
@celery_app.task(bind=True, throws=(web3.exceptions.TransactionNotFound,))
|
||||
@celery_app.task(bind=True, throws=(web3.exceptions.TransactionNotFound,), base=CriticalWeb3Task)
|
||||
def sync_tx(self, tx_hash_hex, chain_str):
|
||||
|
||||
queue = self.request.delivery_info['routing_key']
|
||||
@@ -621,7 +628,7 @@ def resume_tx(self, txpending_hash_hex, chain_str):
|
||||
return txpending_hash_hex
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def otx_cache_parse_tx(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
@@ -648,7 +655,7 @@ def otx_cache_parse_tx(
|
||||
return txc
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def cache_gas_refill_data(
|
||||
tx_hash_hex,
|
||||
tx,
|
||||
|
||||
@@ -149,6 +149,9 @@ def tx_collate(tx_batches, chain_str, offset, limit, newest_first=True):
|
||||
txs_by_block = {}
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
|
||||
if isinstance(tx_batches, dict):
|
||||
tx_batches = [tx_batches]
|
||||
|
||||
for b in tx_batches:
|
||||
for v in b.values():
|
||||
tx = None
|
||||
|
||||
@@ -14,6 +14,7 @@ from cic_eth.db.enum import (
|
||||
StatusBits,
|
||||
dead,
|
||||
)
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@@ -35,7 +36,7 @@ def __balance_outgoing_compatible(token_address, holder_address, chain_str):
|
||||
return delta
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def balance_outgoing(tokens, holder_address, chain_str):
|
||||
"""Retrieve accumulated value of unprocessed transactions sent from the given address.
|
||||
|
||||
@@ -73,7 +74,7 @@ def __balance_incoming_compatible(token_address, receiver_address, chain_str):
|
||||
return delta
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def balance_incoming(tokens, receipient_address, chain_str):
|
||||
"""Retrieve accumulated value of unprocessed transactions to be received by the given address.
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from cic_registry.chain import ChainSpec
|
||||
from cic_eth.eth.rpc import RpcClient
|
||||
from cic_eth.db.models.otx import Otx
|
||||
from cic_eth.error import NotLocalTxError
|
||||
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@@ -17,7 +18,7 @@ logg = logging.getLogger()
|
||||
|
||||
|
||||
# TODO: This method does not belong in the _queue_ module, it operates across queue and network
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyAndWeb3Task)
|
||||
def tx_times(tx_hash, chain_str):
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
c = RpcClient(chain_spec)
|
||||
|
||||
@@ -26,6 +26,7 @@ from cic_eth.db.enum import (
|
||||
is_alive,
|
||||
dead,
|
||||
)
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx # TODO: should not be in same sub-path as package that imports queue.tx
|
||||
from cic_eth.error import NotLocalTxError
|
||||
from cic_eth.error import LockedError
|
||||
@@ -86,7 +87,7 @@ def create(nonce, holder_address, tx_hash, signed_tx, chain_str, obsolete_predec
|
||||
|
||||
|
||||
# TODO: Replace set_* with single task for set status
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_sent_status(tx_hash, fail=False):
|
||||
"""Used to set the status after a send attempt
|
||||
|
||||
@@ -118,7 +119,7 @@ def set_sent_status(tx_hash, fail=False):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_final_status(tx_hash, block=None, fail=False):
|
||||
"""Used to set the status of an incoming transaction result.
|
||||
|
||||
@@ -174,7 +175,7 @@ def set_final_status(tx_hash, block=None, fail=False):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_cancel(tx_hash, manual=False):
|
||||
"""Used to set the status when a transaction is cancelled.
|
||||
|
||||
@@ -206,7 +207,7 @@ def set_cancel(tx_hash, manual=False):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_rejected(tx_hash):
|
||||
"""Used to set the status when the node rejects sending a transaction to network
|
||||
|
||||
@@ -232,7 +233,7 @@ def set_rejected(tx_hash):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_fubar(tx_hash):
|
||||
"""Used to set the status when an unexpected error occurs.
|
||||
|
||||
@@ -258,7 +259,7 @@ def set_fubar(tx_hash):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_manual(tx_hash):
|
||||
"""Used to set the status when queue is manually changed
|
||||
|
||||
@@ -284,7 +285,7 @@ def set_manual(tx_hash):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_ready(tx_hash):
|
||||
"""Used to mark a transaction as ready to be sent to network
|
||||
|
||||
@@ -310,7 +311,25 @@ def set_ready(tx_hash):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_dequeue(tx_hash):
|
||||
session = SessionBase.create_session()
|
||||
o = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||
if o == None:
|
||||
session.close()
|
||||
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||
|
||||
session.flush()
|
||||
|
||||
o.dequeue(session=session)
|
||||
session.commit()
|
||||
session.close()
|
||||
|
||||
return tx_hash
|
||||
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_waitforgas(tx_hash):
|
||||
"""Used to set the status when a transaction must be deferred due to gas refill
|
||||
|
||||
@@ -336,7 +355,7 @@ def set_waitforgas(tx_hash):
|
||||
return tx_hash
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_state_log(tx_hash):
|
||||
|
||||
logs = []
|
||||
@@ -355,7 +374,7 @@ def get_state_log(tx_hash):
|
||||
return logs
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_tx_cache(tx_hash):
|
||||
"""Returns an aggregate dictionary of outgoing transaction data and metadata
|
||||
|
||||
@@ -404,7 +423,7 @@ def get_tx_cache(tx_hash):
|
||||
return tx
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_lock(address=None):
|
||||
"""Retrieve all active locks
|
||||
|
||||
@@ -442,7 +461,7 @@ def get_lock(address=None):
|
||||
return locks
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_tx(tx_hash):
|
||||
"""Retrieve a transaction queue record by transaction hash
|
||||
|
||||
@@ -453,7 +472,9 @@ def get_tx(tx_hash):
|
||||
:rtype: dict
|
||||
"""
|
||||
session = SessionBase.create_session()
|
||||
tx = session.query(Otx).filter(Otx.tx_hash==tx_hash).first()
|
||||
q = session.query(Otx)
|
||||
q = q.filter(Otx.tx_hash==tx_hash)
|
||||
tx = q.first()
|
||||
if tx == None:
|
||||
session.close()
|
||||
raise NotLocalTxError('queue does not contain tx hash {}'.format(tx_hash))
|
||||
@@ -469,7 +490,7 @@ def get_tx(tx_hash):
|
||||
return o
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_nonce_tx(nonce, sender, chain_id):
|
||||
"""Retrieve all transactions for address with specified nonce
|
||||
|
||||
@@ -652,7 +673,7 @@ def get_upcoming_tx(status=StatusEnum.READYSEND, recipient=None, before=None, ch
|
||||
return txs
|
||||
|
||||
|
||||
@celery_app.task()
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_account_tx(address, as_sender=True, as_recipient=True, counterpart=None):
|
||||
"""Returns all local queue transactions for a given Ethereum address
|
||||
|
||||
|
||||
86
apps/cic-eth/cic_eth/registry.py
Normal file
86
apps/cic-eth/cic_eth/registry.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import copy
|
||||
|
||||
# external imports
|
||||
from cic_registry import CICRegistry
|
||||
from eth_token_index import TokenUniqueSymbolIndex
|
||||
from eth_accounts_index import AccountRegistry
|
||||
from chainlib.chain import ChainSpec
|
||||
from cic_registry.chain import ChainRegistry
|
||||
from cic_registry.helper.declarator import DeclaratorOracleAdapter
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TokenOracle:
|
||||
|
||||
def __init__(self, conn, chain_spec, registry):
|
||||
self.tokens = []
|
||||
self.chain_spec = chain_spec
|
||||
self.registry = registry
|
||||
|
||||
token_registry_contract = CICRegistry.get_contract(chain_spec, 'TokenRegistry', 'Registry')
|
||||
self.getter = TokenUniqueSymbolIndex(conn, token_registry_contract.address())
|
||||
|
||||
|
||||
def get_tokens(self):
|
||||
token_count = self.getter.count()
|
||||
if token_count == len(self.tokens):
|
||||
return self.tokens
|
||||
|
||||
for i in range(len(self.tokens), token_count):
|
||||
token_address = self.getter.get_index(i)
|
||||
t = self.registry.get_address(self.chain_spec, token_address)
|
||||
token_symbol = t.symbol()
|
||||
self.tokens.append(t)
|
||||
|
||||
logg.debug('adding token idx {} symbol {} address {}'.format(i, token_symbol, token_address))
|
||||
|
||||
return copy.copy(self.tokens)
|
||||
|
||||
|
||||
class AccountsOracle:
|
||||
|
||||
def __init__(self, conn, chain_spec, registry):
|
||||
self.accounts = []
|
||||
self.chain_spec = chain_spec
|
||||
self.registry = registry
|
||||
|
||||
accounts_registry_contract = CICRegistry.get_contract(chain_spec, 'AccountRegistry', 'Registry')
|
||||
self.getter = AccountRegistry(conn, accounts_registry_contract.address())
|
||||
|
||||
|
||||
def get_accounts(self):
|
||||
accounts_count = self.getter.count()
|
||||
if accounts_count == len(self.accounts):
|
||||
return self.accounts
|
||||
|
||||
for i in range(len(self.accounts), accounts_count):
|
||||
account = self.getter.get_index(i)
|
||||
self.accounts.append(account)
|
||||
logg.debug('adding account {}'.format(account))
|
||||
|
||||
return copy.copy(self.accounts)
|
||||
|
||||
|
||||
def init_registry(config, w3):
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
CICRegistry.init(w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||
|
||||
chain_registry = ChainRegistry(chain_spec)
|
||||
CICRegistry.add_chain_registry(chain_registry, True)
|
||||
|
||||
declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', interface='Declarator')
|
||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||
if trusted_addresses_src == None:
|
||||
raise ValueError('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||
trusted_addresses = trusted_addresses_src.split(',')
|
||||
for address in trusted_addresses:
|
||||
logg.info('using trusted address {}'.format(address))
|
||||
|
||||
oracle = DeclaratorOracleAdapter(declarator.contract, trusted_addresses)
|
||||
chain_registry.add_oracle(oracle, 'naive_erc20_oracle')
|
||||
|
||||
return CICRegistry
|
||||
@@ -76,8 +76,9 @@ def main():
|
||||
t = api.create_account(register=register)
|
||||
|
||||
ps.get_message()
|
||||
m = ps.get_message(timeout=args.timeout)
|
||||
print(json.loads(m['data']))
|
||||
o = ps.get_message(timeout=args.timeout)
|
||||
m = json.loads(o['data'])
|
||||
print(m['result'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -21,14 +21,21 @@ import cic_eth
|
||||
from cic_eth.eth import RpcClient
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.db.enum import StatusEnum
|
||||
from cic_eth.db.enum import StatusBits
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.db import dsn_from_config
|
||||
from cic_eth.queue.tx import get_upcoming_tx
|
||||
from cic_eth.queue.tx import (
|
||||
get_upcoming_tx,
|
||||
set_dequeue,
|
||||
)
|
||||
from cic_eth.admin.ctrl import lock_send
|
||||
from cic_eth.sync.error import LoopDone
|
||||
from cic_eth.eth.tx import send as task_tx_send
|
||||
from cic_eth.error import PermanentTxError
|
||||
from cic_eth.error import TemporaryTxError
|
||||
from cic_eth.error import (
|
||||
PermanentTxError,
|
||||
TemporaryTxError,
|
||||
NotLocalTxError,
|
||||
)
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx_hex
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
@@ -91,6 +98,8 @@ run = True
|
||||
|
||||
class DispatchSyncer:
|
||||
|
||||
yield_delay = 0.005
|
||||
|
||||
def __init__(self, chain_spec):
|
||||
self.chain_spec = chain_spec
|
||||
self.chain_id = chain_spec.chain_id()
|
||||
@@ -107,6 +116,11 @@ class DispatchSyncer:
|
||||
for k in txs.keys():
|
||||
tx_raw = txs[k]
|
||||
tx = unpack_signed_raw_tx_hex(tx_raw, self.chain_spec.chain_id())
|
||||
|
||||
try:
|
||||
set_dequeue(tx['hash'])
|
||||
except NotLocalTxError as e:
|
||||
logg.warning('dispatcher was triggered with non-local tx {}'.format(tx['hash']))
|
||||
|
||||
s_check = celery.signature(
|
||||
'cic_eth.admin.ctrl.check_lock',
|
||||
@@ -127,18 +141,22 @@ class DispatchSyncer:
|
||||
)
|
||||
s_check.link(s_send)
|
||||
t = s_check.apply_async()
|
||||
logg.info('processed {}'.format(k))
|
||||
|
||||
|
||||
def loop(self, w3, interval):
|
||||
while run:
|
||||
txs = {}
|
||||
typ = StatusEnum.READYSEND
|
||||
typ = StatusBits.QUEUED
|
||||
utxs = get_upcoming_tx(typ, chain_id=self.chain_id)
|
||||
for k in utxs.keys():
|
||||
txs[k] = utxs[k]
|
||||
self.process(w3, txs)
|
||||
|
||||
time.sleep(interval)
|
||||
if len(utxs) > 0:
|
||||
time.sleep(self.yield_delay)
|
||||
else:
|
||||
time.sleep(interval)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
@@ -5,37 +5,46 @@ import logging
|
||||
import web3
|
||||
import celery
|
||||
from cic_registry.error import UnknownContractError
|
||||
from chainlib.status import Status as TxStatus
|
||||
from chainlib.eth.address import to_checksum
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from hexathon import strip_0x
|
||||
|
||||
# local imports
|
||||
from .base import SyncFilter
|
||||
from cic_eth.eth.token import unpack_transfer
|
||||
from cic_eth.eth.token import unpack_transferfrom
|
||||
from cic_eth.eth.token import (
|
||||
unpack_transfer,
|
||||
unpack_transferfrom,
|
||||
)
|
||||
from cic_eth.eth.account import unpack_gift
|
||||
from cic_eth.eth.token import ExtendedTx
|
||||
from .base import SyncFilter
|
||||
|
||||
logg = logging.getLogger()
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
transfer_method_signature = '0xa9059cbb' # keccak256(transfer(address,uint256))
|
||||
transferfrom_method_signature = '0x23b872dd' # keccak256(transferFrom(address,address,uint256))
|
||||
giveto_method_signature = '0x63e4bff4' # keccak256(giveTo(address))
|
||||
transfer_method_signature = 'a9059cbb' # keccak256(transfer(address,uint256))
|
||||
transferfrom_method_signature = '23b872dd' # keccak256(transferFrom(address,address,uint256))
|
||||
giveto_method_signature = '63e4bff4' # keccak256(giveTo(address))
|
||||
|
||||
|
||||
class CallbackFilter(SyncFilter):
|
||||
|
||||
trusted_addresses = []
|
||||
|
||||
def __init__(self, method, queue):
|
||||
def __init__(self, chain_spec, method, queue):
|
||||
self.queue = queue
|
||||
self.method = method
|
||||
self.chain_spec = chain_spec
|
||||
|
||||
|
||||
def call_back(self, transfer_type, result):
|
||||
logg.debug('result {}'.format(result))
|
||||
s = celery.signature(
|
||||
self.method,
|
||||
[
|
||||
result,
|
||||
transfer_type,
|
||||
int(rcpt.status == 0),
|
||||
int(result['status_code'] == 0),
|
||||
],
|
||||
queue=self.queue,
|
||||
)
|
||||
@@ -50,58 +59,85 @@ class CallbackFilter(SyncFilter):
|
||||
# )
|
||||
# s_translate.link(s)
|
||||
# s_translate.apply_async()
|
||||
s.apply_async()
|
||||
t = s.apply_async()
|
||||
return s
|
||||
|
||||
|
||||
def parse_data(self, tx, rcpt):
|
||||
transfer_type = 'transfer'
|
||||
def parse_data(self, tx):
|
||||
transfer_type = None
|
||||
transfer_data = None
|
||||
method_signature = tx.input[:10]
|
||||
logg.debug('have payload {}'.format(tx.payload))
|
||||
method_signature = tx.payload[:8]
|
||||
|
||||
logg.debug('tx status {}'.format(tx.status))
|
||||
if method_signature == transfer_method_signature:
|
||||
transfer_data = unpack_transfer(tx.input)
|
||||
transfer_data = unpack_transfer(tx.payload)
|
||||
transfer_data['from'] = tx['from']
|
||||
transfer_data['token_address'] = tx['to']
|
||||
|
||||
elif method_signature == transferfrom_method_signature:
|
||||
transfer_type = 'transferfrom'
|
||||
transfer_data = unpack_transferfrom(tx.input)
|
||||
transfer_data = unpack_transferfrom(tx.payload)
|
||||
transfer_data['token_address'] = tx['to']
|
||||
|
||||
# TODO: do not rely on logs here
|
||||
elif method_signature == giveto_method_signature:
|
||||
transfer_type = 'tokengift'
|
||||
transfer_data = unpack_gift(tx.input)
|
||||
for l in rcpt.logs:
|
||||
if l.topics[0].hex() == '0x45c201a59ac545000ead84f30b2db67da23353aa1d58ac522c48505412143ffa':
|
||||
transfer_data['value'] = web3.Web3.toInt(hexstr=l.data)
|
||||
token_address_bytes = l.topics[2][32-20:]
|
||||
transfer_data['token_address'] = web3.Web3.toChecksumAddress(token_address_bytes.hex())
|
||||
transfer_data['from'] = rcpt.to
|
||||
transfer_data = unpack_gift(tx.payload)
|
||||
transfer_data['from'] = tx.inputs[0]
|
||||
transfer_data['value'] = 0
|
||||
transfer_data['token_address'] = ZERO_ADDRESS
|
||||
for l in tx.logs:
|
||||
topics = l['topics']
|
||||
logg.debug('topixx {}'.format(topics))
|
||||
if strip_0x(topics[0]) == '45c201a59ac545000ead84f30b2db67da23353aa1d58ac522c48505412143ffa':
|
||||
transfer_data['value'] = web3.Web3.toInt(hexstr=strip_0x(l['data']))
|
||||
#token_address_bytes = topics[2][32-20:]
|
||||
token_address = strip_0x(topics[2])[64-40:]
|
||||
transfer_data['token_address'] = to_checksum(token_address)
|
||||
|
||||
logg.debug('resolved method {}'.format(transfer_type))
|
||||
|
||||
if transfer_data != None:
|
||||
transfer_data['status'] = tx.status
|
||||
|
||||
return (transfer_type, transfer_data)
|
||||
|
||||
|
||||
def filter(self, w3, tx, rcpt, chain_spec, session=None):
|
||||
logg.debug('applying callback filter "{}:{}"'.format(self.queue, self.method))
|
||||
chain_str = str(chain_spec)
|
||||
|
||||
transfer_data = self.parse_data(tx, rcpt)
|
||||
def filter(self, conn, block, tx, db_session=None):
|
||||
chain_str = str(self.chain_spec)
|
||||
|
||||
transfer_data = None
|
||||
if len(tx.input) < 10:
|
||||
logg.debug('callbacks filter data length not sufficient for method signature in tx {}, skipping'.format(tx['hash']))
|
||||
transfer_type = None
|
||||
try:
|
||||
(transfer_type, transfer_data) = self.parse_data(tx)
|
||||
except TypeError:
|
||||
logg.debug('invalid method data length for tx {}'.format(tx.hash))
|
||||
return
|
||||
|
||||
logg.debug('checking callbacks filter input {}'.format(tx.input[:10]))
|
||||
if len(tx.payload) < 8:
|
||||
logg.debug('callbacks filter data length not sufficient for method signature in tx {}, skipping'.format(tx.hash))
|
||||
return
|
||||
|
||||
logg.debug('checking callbacks filter input {}'.format(tx.payload[:8]))
|
||||
|
||||
if transfer_data != None:
|
||||
logg.debug('wtfoo {}'.format(transfer_data))
|
||||
token_symbol = None
|
||||
result = None
|
||||
try:
|
||||
tokentx = ExtendedTx(self.chain_spec)
|
||||
tokentx = ExtendedTx(tx.hash, self.chain_spec)
|
||||
tokentx.set_actors(transfer_data['from'], transfer_data['to'], self.trusted_addresses)
|
||||
tokentx.set_tokens(transfer_data['token_address'], transfer_data['value'])
|
||||
self.call_back(tokentx.to_dict())
|
||||
if transfer_data['status'] == 0:
|
||||
tokentx.set_status(1)
|
||||
else:
|
||||
tokentx.set_status(0)
|
||||
t = self.call_back(transfer_type, tokentx.to_dict())
|
||||
logg.info('callback success task id {} tx {}'.format(t, tx.hash))
|
||||
except UnknownContractError:
|
||||
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tc.queue, tc.method, transfer_data['to'], tx.hash.hex()))
|
||||
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tc.queue, tc.method, transfer_data['to'], tx.hash))
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'cic-eth callbacks'
|
||||
|
||||
@@ -3,29 +3,30 @@ import logging
|
||||
|
||||
# third-party imports
|
||||
from cic_registry.chain import ChainSpec
|
||||
from hexathon import add_0x
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.enum import StatusBits
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.db.models.tx import TxCache
|
||||
from cic_eth.db import Otx
|
||||
from cic_eth.db.models.otx import Otx
|
||||
from cic_eth.queue.tx import get_paused_txs
|
||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||
from .base import SyncFilter
|
||||
|
||||
logg = logging.getLogger()
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GasFilter(SyncFilter):
|
||||
|
||||
def __init__(self, gas_provider, queue=None):
|
||||
def __init__(self, chain_spec, queue=None):
|
||||
self.queue = queue
|
||||
self.gas_provider = gas_provider
|
||||
self.chain_spec = chain_spec
|
||||
|
||||
|
||||
def filter(self, w3, tx, rcpt, chain_str, session=None):
|
||||
logg.debug('applying gas filter')
|
||||
tx_hash_hex = tx.hash.hex()
|
||||
if tx['value'] > 0:
|
||||
def filter(self, conn, block, tx, session): #rcpt, chain_str, session=None):
|
||||
tx_hash_hex = add_0x(tx.hash)
|
||||
if tx.value > 0:
|
||||
logg.debug('gas refill tx {}'.format(tx_hash_hex))
|
||||
session = SessionBase.bind_session(session)
|
||||
q = session.query(TxCache.recipient)
|
||||
@@ -34,23 +35,26 @@ class GasFilter(SyncFilter):
|
||||
r = q.first()
|
||||
|
||||
if r == None:
|
||||
logg.warning('unsolicited gas refill tx {}'.format(tx_hash_hex))
|
||||
logg.debug('unsolicited gas refill tx {}'.format(tx_hash_hex))
|
||||
SessionBase.release_session(session)
|
||||
return
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
txs = get_paused_txs(StatusEnum.WAITFORGAS, r[0], chain_spec.chain_id(), session=session)
|
||||
txs = get_paused_txs(StatusBits.GAS_ISSUES, r[0], self.chain_spec.chain_id(), session=session)
|
||||
|
||||
SessionBase.release_session(session)
|
||||
|
||||
logg.info('resuming gas-in-waiting txs for {}'.format(r[0]))
|
||||
if len(txs) > 0:
|
||||
logg.info('resuming gas-in-waiting txs for {}: {}'.format(r[0], txs.keys()))
|
||||
s = create_check_gas_and_send_task(
|
||||
list(txs.values()),
|
||||
str(chain_str),
|
||||
str(self.chain_spec),
|
||||
r[0],
|
||||
0,
|
||||
tx_hashes_hex=list(txs.keys()),
|
||||
queue=self.queue,
|
||||
)
|
||||
s.apply_async()
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'eic-eth gasfilter'
|
||||
|
||||
@@ -4,32 +4,48 @@ import logging
|
||||
# third-party imports
|
||||
import celery
|
||||
from chainlib.eth.address import to_checksum
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
strip_0x,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from .base import SyncFilter
|
||||
|
||||
logg = logging.getLogger()
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd' # keccak256(AccountAdded(address,uint256))
|
||||
|
||||
|
||||
class RegistrationFilter(SyncFilter):
|
||||
|
||||
def filter(self, w3, tx, rcpt, chain_spec, session=None):
|
||||
logg.debug('applying registration filter')
|
||||
def __init__(self, chain_spec, queue):
|
||||
self.chain_spec = chain_spec
|
||||
self.queue = queue
|
||||
|
||||
|
||||
def filter(self, conn, block, tx, db_session=None):
|
||||
registered_address = None
|
||||
for l in rcpt['logs']:
|
||||
event_topic_hex = l['topics'][0].hex()
|
||||
logg.debug('register filter checking log {}'.format(tx.logs))
|
||||
for l in tx.logs:
|
||||
event_topic_hex = l['topics'][0]
|
||||
if event_topic_hex == account_registry_add_log_hash:
|
||||
address_bytes = l.topics[1][32-20:]
|
||||
address = to_checksum(address_bytes.hex())
|
||||
# TODO: use abi conversion method instead
|
||||
|
||||
address_hex = strip_0x(l['topics'][1])[64-40:]
|
||||
address = to_checksum(add_0x(address_hex))
|
||||
logg.debug('request token gift to {}'.format(address))
|
||||
s = celery.signature(
|
||||
'cic_eth.eth.account.gift',
|
||||
[
|
||||
address,
|
||||
str(chain_spec),
|
||||
str(self.chain_spec),
|
||||
],
|
||||
queue=queue,
|
||||
queue=self.queue,
|
||||
)
|
||||
s.apply_async()
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'cic-eth account registration'
|
||||
|
||||
|
||||
@@ -3,39 +3,47 @@ import logging
|
||||
|
||||
# third-party imports
|
||||
import celery
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.otx import Otx
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from chainsyncer.db.models.base import SessionBase
|
||||
from chainlib.status import Status
|
||||
from .base import SyncFilter
|
||||
|
||||
logg = logging.getLogger()
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TxFilter(SyncFilter):
|
||||
|
||||
def __init__(self, queue):
|
||||
def __init__(self, chain_spec, queue):
|
||||
self.queue = queue
|
||||
self.chain_spec = chain_spec
|
||||
|
||||
|
||||
def filter(self, w3, tx, rcpt, chain_spec, session=None):
|
||||
session = SessionBase.bind_session(session)
|
||||
logg.debug('applying tx filter')
|
||||
tx_hash_hex = tx.hash.hex()
|
||||
otx = Otx.load(tx_hash_hex, session=session)
|
||||
SessionBase.release_session(session)
|
||||
def filter(self, conn, block, tx, db_session=None):
|
||||
db_session = SessionBase.bind_session(db_session)
|
||||
tx_hash_hex = tx.hash
|
||||
otx = Otx.load(add_0x(tx_hash_hex), session=db_session)
|
||||
if otx == None:
|
||||
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
|
||||
return None
|
||||
logg.info('otx found {}'.format(otx.tx_hash))
|
||||
logg.info('local tx match {}'.format(otx.tx_hash))
|
||||
SessionBase.release_session(db_session)
|
||||
s = celery.signature(
|
||||
'cic_eth.queue.tx.set_final_status',
|
||||
[
|
||||
tx_hash_hex,
|
||||
rcpt.blockNumber,
|
||||
rcpt.status == 0,
|
||||
add_0x(tx_hash_hex),
|
||||
tx.block.number,
|
||||
tx.status == Status.ERROR,
|
||||
],
|
||||
queue=self.queue,
|
||||
)
|
||||
t = s.apply_async()
|
||||
return t
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'cic-eth erc20 transfer filter'
|
||||
|
||||
@@ -1,207 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import time
|
||||
import argparse
|
||||
import sys
|
||||
import re
|
||||
|
||||
# third-party imports
|
||||
import confini
|
||||
import celery
|
||||
import rlp
|
||||
import web3
|
||||
from web3 import HTTPProvider, WebsocketProvider
|
||||
from cic_registry import CICRegistry
|
||||
from cic_registry.chain import ChainSpec
|
||||
from cic_registry import zero_address
|
||||
from cic_registry.chain import ChainRegistry
|
||||
from cic_registry.error import UnknownContractError
|
||||
from cic_bancor.bancor import BancorRegistryClient
|
||||
|
||||
# local imports
|
||||
import cic_eth
|
||||
from cic_eth.eth import RpcClient
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.db import Otx
|
||||
from cic_eth.db import TxConvertTransfer
|
||||
from cic_eth.db.models.tx import TxCache
|
||||
from cic_eth.db.enum import StatusEnum
|
||||
from cic_eth.db import dsn_from_config
|
||||
from cic_eth.queue.tx import get_paused_txs
|
||||
from cic_eth.sync import Syncer
|
||||
from cic_eth.sync.error import LoopDone
|
||||
from cic_eth.db.error import UnknownConvertError
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||
from cic_eth.sync.backend import SyncerBackend
|
||||
from cic_eth.eth.token import unpack_transfer
|
||||
from cic_eth.eth.token import unpack_transferfrom
|
||||
from cic_eth.eth.account import unpack_gift
|
||||
from cic_eth.runnable.daemons.filters import (
|
||||
CallbackFilter,
|
||||
GasFilter,
|
||||
TxFilter,
|
||||
RegistrationFilter,
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
argparser.add_argument('mode', type=str, help='sync mode: (head|history)', default='head')
|
||||
args = argparser.parse_args(sys.argv[1:])
|
||||
|
||||
if args.v == True:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
elif args.vv == True:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config.process()
|
||||
# override args
|
||||
args_override = {
|
||||
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
|
||||
app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
|
||||
queue = args.q
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
|
||||
re_websocket = re.compile('^wss?://')
|
||||
re_http = re.compile('^https?://')
|
||||
blockchain_provider = config.get('ETH_PROVIDER')
|
||||
if re.match(re_websocket, blockchain_provider) != None:
|
||||
blockchain_provider = WebsocketProvider(blockchain_provider)
|
||||
elif re.match(re_http, blockchain_provider) != None:
|
||||
blockchain_provider = HTTPProvider(blockchain_provider)
|
||||
else:
|
||||
raise ValueError('unknown provider url {}'.format(blockchain_provider))
|
||||
|
||||
def web3_constructor():
|
||||
w3 = web3.Web3(blockchain_provider)
|
||||
return (blockchain_provider, w3)
|
||||
RpcClient.set_constructor(web3_constructor)
|
||||
|
||||
c = RpcClient(chain_spec)
|
||||
CICRegistry.init(c.w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||
chain_registry = ChainRegistry(chain_spec)
|
||||
CICRegistry.add_chain_registry(chain_registry, True)
|
||||
|
||||
declarator = CICRegistry.get_contract(chain_spec, 'AddressDeclarator', interface='Declarator')
|
||||
|
||||
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, pool_size=1, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
|
||||
def main():
|
||||
global chain_spec, c, queue
|
||||
|
||||
if config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER') != None:
|
||||
CICRegistry.add_role(chain_spec, config.get('ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER'), 'AccountRegistry', True)
|
||||
|
||||
syncers = []
|
||||
block_offset = c.w3.eth.blockNumber
|
||||
chain = str(chain_spec)
|
||||
|
||||
if SyncerBackend.first(chain):
|
||||
from cic_eth.sync.history import HistorySyncer
|
||||
backend = SyncerBackend.initial(chain, block_offset)
|
||||
syncer = HistorySyncer(backend)
|
||||
syncers.append(syncer)
|
||||
|
||||
if args.mode == 'head':
|
||||
from cic_eth.sync.head import HeadSyncer
|
||||
block_sync = SyncerBackend.live(chain, block_offset+1)
|
||||
syncers.append(HeadSyncer(block_sync))
|
||||
elif args.mode == 'history':
|
||||
from cic_eth.sync.history import HistorySyncer
|
||||
backends = SyncerBackend.resume(chain, block_offset+1)
|
||||
for backend in backends:
|
||||
syncers.append(HistorySyncer(backend))
|
||||
if len(syncers) == 0:
|
||||
logg.info('found no unsynced history. terminating')
|
||||
sys.exit(0)
|
||||
else:
|
||||
sys.stderr.write("unknown mode '{}'\n".format(args.mode))
|
||||
sys.exit(1)
|
||||
|
||||
# bancor_registry_contract = CICRegistry.get_contract(chain_spec, 'BancorRegistry', interface='Registry')
|
||||
# bancor_chain_registry = CICRegistry.get_chain_registry(chain_spec)
|
||||
# bancor_registry = BancorRegistryClient(c.w3, bancor_chain_registry, config.get('ETH_ABI_DIR'))
|
||||
# bancor_registry.load()
|
||||
|
||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||
if trusted_addresses_src == None:
|
||||
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||
sys.exit(1)
|
||||
trusted_addresses = trusted_addresses_src.split(',')
|
||||
for address in trusted_addresses:
|
||||
logg.info('using trusted address {}'.format(address))
|
||||
CallbackFilter.trusted_addresses = trusted_addresses
|
||||
|
||||
callback_filters = []
|
||||
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
|
||||
task_split = cb.split(':')
|
||||
task_queue = queue
|
||||
if len(task_split) > 1:
|
||||
task_queue = task_split[0]
|
||||
callback_filter = CallbackFilter(task_split[1], task_queue)
|
||||
callback_filters.append(callback_filter)
|
||||
|
||||
tx_filter = TxFilter(queue)
|
||||
|
||||
registration_filter = RegistrationFilter()
|
||||
|
||||
gas_filter = GasFilter(c.gas_provider(), queue)
|
||||
|
||||
i = 0
|
||||
for syncer in syncers:
|
||||
logg.debug('running syncer index {}'.format(i))
|
||||
syncer.filter.append(gas_filter.filter)
|
||||
syncer.filter.append(registration_filter.filter)
|
||||
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
|
||||
syncer.filter.append(tx_filter.filter)
|
||||
#syncer.filter.append(convert_filter)
|
||||
for cf in callback_filters:
|
||||
syncer.filter.append(cf.filter)
|
||||
|
||||
try:
|
||||
syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')))
|
||||
except LoopDone as e:
|
||||
sys.stderr.write("sync '{}' done at block {}\n".format(args.mode, e))
|
||||
|
||||
i += 1
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -32,6 +32,7 @@ from cic_eth.admin import ctrl
|
||||
from cic_eth.eth.rpc import RpcClient
|
||||
from cic_eth.eth.rpc import GasOracle
|
||||
from cic_eth.queue import tx
|
||||
from cic_eth.queue import balance
|
||||
from cic_eth.callbacks import Callback
|
||||
from cic_eth.callbacks import http
|
||||
from cic_eth.callbacks import tcp
|
||||
@@ -49,6 +50,7 @@ argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='web3 provider')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
||||
argparser.add_argument('-r', type=str, help='CIC registry address')
|
||||
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, help='Directory containing bytecode and abi')
|
||||
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
@@ -68,6 +70,7 @@ config.process()
|
||||
args_override = {
|
||||
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'TASKS_TRACE_QUEUE_STATUS': getattr(args, 'trace_queue_status'),
|
||||
}
|
||||
@@ -228,7 +231,7 @@ def main():
|
||||
for address in trusted_addresses:
|
||||
logg.info('using trusted address {}'.format(address))
|
||||
oracle = DeclaratorOracleAdapter(declarator.contract, trusted_addresses)
|
||||
chain_registry.add_oracle('naive_erc20_oracle', oracle)
|
||||
chain_registry.add_oracle(oracle, 'naive_erc20_oracle')
|
||||
|
||||
|
||||
#chain_spec = CICRegistry.default_chain_spec
|
||||
|
||||
168
apps/cic-eth/cic_eth/runnable/daemons/tracker.py
Normal file
168
apps/cic-eth/cic_eth/runnable/daemons/tracker.py
Normal file
@@ -0,0 +1,168 @@
|
||||
# standard imports
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import time
|
||||
import argparse
|
||||
import sys
|
||||
import re
|
||||
|
||||
# third-party imports
|
||||
import confini
|
||||
import celery
|
||||
import rlp
|
||||
import web3
|
||||
from web3 import HTTPProvider, WebsocketProvider
|
||||
import cic_base.config
|
||||
import cic_base.log
|
||||
import cic_base.argparse
|
||||
import cic_base.rpc
|
||||
from cic_registry import CICRegistry
|
||||
from chainlib.chain import ChainSpec
|
||||
from cic_registry import zero_address
|
||||
from cic_registry.chain import ChainRegistry
|
||||
from cic_registry.error import UnknownContractError
|
||||
from chainlib.eth.connection import HTTPConnection
|
||||
from chainlib.eth.block import (
|
||||
block_latest,
|
||||
)
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
)
|
||||
from chainsyncer.backend import SyncerBackend
|
||||
from chainsyncer.driver import (
|
||||
HeadSyncer,
|
||||
HistorySyncer,
|
||||
)
|
||||
from chainsyncer.db.models.base import SessionBase
|
||||
|
||||
# local imports
|
||||
from cic_eth.registry import init_registry
|
||||
from cic_eth.eth import RpcClient
|
||||
from cic_eth.db import Otx
|
||||
from cic_eth.db import TxConvertTransfer
|
||||
from cic_eth.db.models.tx import TxCache
|
||||
from cic_eth.db.enum import StatusEnum
|
||||
from cic_eth.db import dsn_from_config
|
||||
from cic_eth.queue.tx import get_paused_txs
|
||||
#from cic_eth.sync import Syncer
|
||||
#from cic_eth.sync.error import LoopDone
|
||||
from cic_eth.db.error import UnknownConvertError
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||
from cic_eth.eth.task import create_check_gas_and_send_task
|
||||
from cic_eth.eth.token import unpack_transfer
|
||||
from cic_eth.eth.token import unpack_transferfrom
|
||||
from cic_eth.eth.account import unpack_gift
|
||||
from cic_eth.runnable.daemons.filters import (
|
||||
CallbackFilter,
|
||||
GasFilter,
|
||||
TxFilter,
|
||||
RegistrationFilter,
|
||||
)
|
||||
|
||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
|
||||
logg = cic_base.log.create()
|
||||
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
||||
#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic')
|
||||
args = cic_base.argparse.parse(argparser, logg)
|
||||
config = cic_base.config.create(args.c, args, args.env_prefix)
|
||||
|
||||
config.add(args.y, '_KEYSTORE_FILE', True)
|
||||
|
||||
config.add(args.q, '_CELERY_QUEUE', True)
|
||||
|
||||
cic_base.config.log(config)
|
||||
|
||||
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn, pool_size=1, debug=config.true('DATABASE_DEBUG'))
|
||||
|
||||
|
||||
def main():
|
||||
# parse chain spec object
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
# connect to celery
|
||||
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
# set up registry
|
||||
w3 = cic_base.rpc.create(config.get('ETH_PROVIDER')) # replace with HTTPConnection when registry has been so refactored
|
||||
registry = init_registry(config, w3)
|
||||
|
||||
# Connect to blockchain with chainlib
|
||||
conn = HTTPConnection(config.get('ETH_PROVIDER'))
|
||||
|
||||
o = block_latest()
|
||||
r = conn.do(o)
|
||||
block_offset = int(strip_0x(r), 16) + 1
|
||||
|
||||
logg.debug('starting at block {}'.format(block_offset))
|
||||
|
||||
syncers = []
|
||||
|
||||
#if SyncerBackend.first(chain_spec):
|
||||
# backend = SyncerBackend.initial(chain_spec, block_offset)
|
||||
syncer_backends = SyncerBackend.resume(chain_spec, block_offset)
|
||||
|
||||
if len(syncer_backends) == 0:
|
||||
logg.info('found no backends to resume')
|
||||
syncer_backends.append(SyncerBackend.initial(chain_spec, block_offset))
|
||||
else:
|
||||
for syncer_backend in syncer_backends:
|
||||
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||
|
||||
syncer_backends.append(SyncerBackend.live(chain_spec, block_offset+1))
|
||||
|
||||
for syncer_backend in syncer_backends:
|
||||
try:
|
||||
syncers.append(HistorySyncer(syncer_backend))
|
||||
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
|
||||
except AttributeError:
|
||||
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
|
||||
syncers.append(HeadSyncer(syncer_backend))
|
||||
|
||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||
if trusted_addresses_src == None:
|
||||
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||
sys.exit(1)
|
||||
trusted_addresses = trusted_addresses_src.split(',')
|
||||
for address in trusted_addresses:
|
||||
logg.info('using trusted address {}'.format(address))
|
||||
CallbackFilter.trusted_addresses = trusted_addresses
|
||||
|
||||
callback_filters = []
|
||||
for cb in config.get('TASKS_TRANSFER_CALLBACKS', '').split(','):
|
||||
task_split = cb.split(':')
|
||||
task_queue = config.get('_CELERY_QUEUE')
|
||||
if len(task_split) > 1:
|
||||
task_queue = task_split[0]
|
||||
callback_filter = CallbackFilter(chain_spec, task_split[1], task_queue)
|
||||
callback_filters.append(callback_filter)
|
||||
|
||||
tx_filter = TxFilter(chain_spec, config.get('_CELERY_QUEUE'))
|
||||
|
||||
registration_filter = RegistrationFilter(chain_spec, config.get('_CELERY_QUEUE'))
|
||||
|
||||
gas_filter = GasFilter(chain_spec, config.get('_CELERY_QUEUE'))
|
||||
|
||||
i = 0
|
||||
for syncer in syncers:
|
||||
logg.debug('running syncer index {}'.format(i))
|
||||
syncer.add_filter(gas_filter)
|
||||
syncer.add_filter(registration_filter)
|
||||
# TODO: the two following filter functions break the filter loop if return uuid. Pro: less code executed. Con: Possibly unintuitive flow break
|
||||
syncer.add_filter(tx_filter)
|
||||
for cf in callback_filters:
|
||||
syncer.add_filter(cf)
|
||||
|
||||
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), conn)
|
||||
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
||||
|
||||
i += 1
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -18,6 +18,7 @@ import web3
|
||||
from cic_registry import CICRegistry
|
||||
from cic_registry.chain import ChainSpec
|
||||
from cic_registry.chain import ChainRegistry
|
||||
from hexathon import add_0x
|
||||
|
||||
# local imports
|
||||
from cic_eth.api import AdminApi
|
||||
@@ -36,8 +37,8 @@ default_abi_dir = '/usr/share/local/cic/solidity/abi'
|
||||
default_config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||
argparser.add_argument('-r', '--registry-address', type=str, help='CIC registry address')
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='Web3 provider url (http only)')
|
||||
argparser.add_argument('-r', '--registry-address', dest='r', type=str, help='CIC registry address')
|
||||
argparser.add_argument('-f', '--format', dest='f', default='terminal', type=str, help='Output format')
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
@@ -61,12 +62,16 @@ config.process()
|
||||
args_override = {
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
}
|
||||
# override args
|
||||
config.dict_override(args_override, 'cli args')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
|
||||
config.add(add_0x(args.query), '_QUERY', True)
|
||||
|
||||
re_websocket = re.compile('^wss?://')
|
||||
re_http = re.compile('^https?://')
|
||||
blockchain_provider = config.get('ETH_PROVIDER')
|
||||
@@ -148,21 +153,20 @@ def render_lock(o, **kwargs):
|
||||
|
||||
# TODO: move each command to submodule
|
||||
def main():
|
||||
logg.debug('len {}'.format(len(args.query)))
|
||||
txs = []
|
||||
renderer = render_tx
|
||||
if len(args.query) > 66:
|
||||
txs = [admin_api.tx(chain_spec, tx_raw=args.query)]
|
||||
elif len(args.query) > 42:
|
||||
txs = [admin_api.tx(chain_spec, tx_hash=args.query)]
|
||||
elif len(args.query) == 42:
|
||||
txs = admin_api.account(chain_spec, args.query, include_recipient=False)
|
||||
if len(config.get('_QUERY')) > 66:
|
||||
txs = [admin_api.tx(chain_spec, tx_raw=config.get('_QUERY'))]
|
||||
elif len(config.get('_QUERY')) > 42:
|
||||
txs = [admin_api.tx(chain_spec, tx_hash=config.get('_QUERY'))]
|
||||
elif len(config.get('_QUERY')) == 42:
|
||||
txs = admin_api.account(chain_spec, config.get('_QUERY'), include_recipient=False)
|
||||
renderer = render_account
|
||||
elif len(args.query) >= 4 and args.query[:4] == 'lock':
|
||||
elif len(config.get('_QUERY')) >= 4 and config.get('_QUERY')[:4] == 'lock':
|
||||
txs = admin_api.get_lock()
|
||||
renderer = render_lock
|
||||
else:
|
||||
raise ValueError('cannot parse argument {}'.format(args.query))
|
||||
raise ValueError('cannot parse argument {}'.format(config.get('_QUERY')))
|
||||
|
||||
if len(txs) == 0:
|
||||
logg.info('no matches found')
|
||||
|
||||
@@ -21,7 +21,7 @@ class HistorySyncer(MinedSyncer):
|
||||
:param mx: Maximum number of blocks to return in one call
|
||||
:type mx: int
|
||||
"""
|
||||
def __init__(self, bc_cache, mx=20):
|
||||
def __init__(self, bc_cache, mx=500):
|
||||
super(HistorySyncer, self).__init__(bc_cache)
|
||||
self.max = mx
|
||||
|
||||
|
||||
@@ -23,6 +23,8 @@ class MinedSyncer(Syncer):
|
||||
:type bc_cache: Object implementing methods from cic_eth.sync.SyncerBackend
|
||||
"""
|
||||
|
||||
yield_delay = 0.005
|
||||
|
||||
def __init__(self, bc_cache):
|
||||
super(MinedSyncer, self).__init__(bc_cache)
|
||||
self.block_offset = 0
|
||||
@@ -98,7 +100,10 @@ class MinedSyncer(Syncer):
|
||||
logg.debug('got blocks {}'.format(e))
|
||||
for block in e:
|
||||
block_number = self.process(c.w3, block.hex())
|
||||
logg.info('processed block {} {}'.format(block_number, block.hex()))
|
||||
logg.debug('processed block {} {}'.format(block_number, block.hex()))
|
||||
self.bc_cache.disconnect()
|
||||
time.sleep(interval)
|
||||
if len(e) > 0:
|
||||
time.sleep(self.yield_delay)
|
||||
else:
|
||||
time.sleep(interval)
|
||||
logg.info("Syncer no longer set to run, gracefully exiting")
|
||||
|
||||
33
apps/cic-eth/cic_eth/task.py
Normal file
33
apps/cic-eth/cic_eth/task.py
Normal file
@@ -0,0 +1,33 @@
|
||||
# import
|
||||
import requests
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import sqlalchemy
|
||||
|
||||
|
||||
class CriticalTask(celery.Task):
|
||||
retry_jitter = True
|
||||
retry_backoff = True
|
||||
retry_backoff_max = 8
|
||||
|
||||
|
||||
class CriticalSQLAlchemyTask(CriticalTask):
|
||||
autoretry_for = (
|
||||
sqlalchemy.exc.DatabaseError,
|
||||
sqlalchemy.exc.TimeoutError,
|
||||
)
|
||||
|
||||
|
||||
class CriticalWeb3Task(CriticalTask):
|
||||
autoretry_for = (
|
||||
requests.exceptions.ConnectionError,
|
||||
)
|
||||
|
||||
|
||||
class CriticalSQLAlchemyAndWeb3Task(CriticalTask):
|
||||
autoretry_for = (
|
||||
sqlalchemy.exc.DatabaseError,
|
||||
sqlalchemy.exc.TimeoutError,
|
||||
requests.exceptions.ConnectionError,
|
||||
)
|
||||
@@ -10,7 +10,7 @@ version = (
|
||||
0,
|
||||
10,
|
||||
0,
|
||||
'alpha.27',
|
||||
'alpha.36',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
registry_address =
|
||||
chain_spec =
|
||||
tx_retry_delay =
|
||||
trust_address =
|
||||
trust_address =
|
||||
|
||||
2
apps/cic-eth/config/docker/bancor.ini
Normal file
2
apps/cic-eth/config/docker/bancor.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[bancor]
|
||||
dir = /usr/local/share/cic/bancor
|
||||
3
apps/cic-eth/config/docker/celery.ini
Normal file
3
apps/cic-eth/config/docker/celery.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[celery]
|
||||
broker_url = redis://localhost:63379
|
||||
result_url = redis://localhost:63379
|
||||
4
apps/cic-eth/config/docker/cic.ini
Normal file
4
apps/cic-eth/config/docker/cic.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[cic]
|
||||
registry_address =
|
||||
chain_spec = evm:bloxberg:8996
|
||||
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
||||
2
apps/cic-eth/config/docker/custody.ini
Normal file
2
apps/cic-eth/config/docker/custody.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[custody]
|
||||
account_index_address =
|
||||
9
apps/cic-eth/config/docker/database.ini
Normal file
9
apps/cic-eth/config/docker/database.ini
Normal file
@@ -0,0 +1,9 @@
|
||||
[database]
|
||||
NAME=cic_eth
|
||||
USER=postgres
|
||||
PASSWORD=tralala
|
||||
HOST=localhost
|
||||
PORT=63432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
||||
2
apps/cic-eth/config/docker/dispatcer.ini
Normal file
2
apps/cic-eth/config/docker/dispatcer.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[dispatcher]
|
||||
loop_interval = 0.9
|
||||
8
apps/cic-eth/config/docker/eth.ini
Normal file
8
apps/cic-eth/config/docker/eth.ini
Normal file
@@ -0,0 +1,8 @@
|
||||
[eth]
|
||||
#ws_provider = ws://localhost:8546
|
||||
#ttp_provider = http://localhost:8545
|
||||
provider = http://localhost:63545
|
||||
gas_provider_address =
|
||||
#chain_id =
|
||||
abi_dir = /home/lash/src/ext/cic/grassrootseconomics/cic-contracts/abis
|
||||
account_accounts_index_writer =
|
||||
4
apps/cic-eth/config/docker/redis.ini
Normal file
4
apps/cic-eth/config/docker/redis.ini
Normal file
@@ -0,0 +1,4 @@
|
||||
[redis]
|
||||
host = localhost
|
||||
port = 63379
|
||||
db = 0
|
||||
5
apps/cic-eth/config/docker/signer.ini
Normal file
5
apps/cic-eth/config/docker/signer.ini
Normal file
@@ -0,0 +1,5 @@
|
||||
[signer]
|
||||
socket_path = /tmp/crypto-dev-signer/jsonrpc.ipc
|
||||
secret = deedbeef
|
||||
database_name = signer_test
|
||||
dev_keys_path =
|
||||
6
apps/cic-eth/config/docker/ssl.ini
Normal file
6
apps/cic-eth/config/docker/ssl.ini
Normal file
@@ -0,0 +1,6 @@
|
||||
[SSL]
|
||||
enable_client = false
|
||||
cert_file =
|
||||
key_file =
|
||||
password =
|
||||
ca_file =
|
||||
2
apps/cic-eth/config/docker/syncer.ini
Normal file
2
apps/cic-eth/config/docker/syncer.ini
Normal file
@@ -0,0 +1,2 @@
|
||||
[SYNCER]
|
||||
loop_interval = 1
|
||||
3
apps/cic-eth/config/docker/tasks.ini
Normal file
3
apps/cic-eth/config/docker/tasks.ini
Normal file
@@ -0,0 +1,3 @@
|
||||
[tasks]
|
||||
transfer_callbacks = taskcall:cic_eth.callbacks.noop.noop
|
||||
trace_queue_status = 1
|
||||
@@ -16,7 +16,7 @@ ARG root_requirement_file='requirements.txt'
|
||||
#RUN apk add linux-headers
|
||||
#RUN apk add libffi-dev
|
||||
RUN apt-get update && \
|
||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps
|
||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
||||
|
||||
# Copy shared requirements from top of mono-repo
|
||||
RUN echo "copying root req file ${root_requirement_file}"
|
||||
@@ -42,7 +42,6 @@ COPY cic-eth/config/ /usr/local/etc/cic-eth/
|
||||
COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
|
||||
RUN apt-get install -y git && \
|
||||
git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
|
||||
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
|
||||
mkdir -p /usr/local/share/cic/solidity && \
|
||||
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. ./db.sh
|
||||
|
||||
/usr/local/bin/cic-eth-managerd $@
|
||||
5
apps/cic-eth/docker/start_tracker.sh
Normal file
5
apps/cic-eth/docker/start_tracker.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
. ./db.sh
|
||||
|
||||
/usr/local/bin/cic-eth-trackerd $@
|
||||
@@ -1,14 +1,14 @@
|
||||
web3==5.12.2
|
||||
celery==4.4.7
|
||||
crypto-dev-signer~=0.4.13rc2
|
||||
confini~=0.3.6b1
|
||||
cic-registry~=0.5.3a18
|
||||
crypto-dev-signer~=0.4.13rc3
|
||||
confini~=0.3.6rc3
|
||||
cic-registry~=0.5.3a22
|
||||
cic-bancor~=0.0.6
|
||||
redis==3.5.3
|
||||
alembic==1.4.2
|
||||
websockets==8.1
|
||||
requests~=2.24.0
|
||||
eth_accounts_index~=0.0.10a7
|
||||
eth_accounts_index~=0.0.10a10
|
||||
erc20-approval-escrow~=0.3.0a5
|
||||
erc20-single-shot-faucet~=0.2.0a6
|
||||
rlp==2.0.1
|
||||
@@ -18,5 +18,7 @@ eth-gas-proxy==0.0.1a4
|
||||
websocket-client==0.57.0
|
||||
moolb~=0.1.1b2
|
||||
eth-address-index~=0.1.0a8
|
||||
chainlib~=0.0.1a12
|
||||
chainlib~=0.0.1a19
|
||||
hexathon~=0.0.1a3
|
||||
chainsyncer~=0.0.1a19
|
||||
cic-base==0.1.1a10
|
||||
|
||||
@@ -45,7 +45,7 @@ scripts =
|
||||
console_scripts =
|
||||
# daemons
|
||||
cic-eth-taskerd = cic_eth.runnable.daemons.tasker:main
|
||||
cic-eth-managerd = cic_eth.runnable.daemons.manager:main
|
||||
cic-eth-trackerd = cic_eth.runnable.daemons.tracker:main
|
||||
cic-eth-dispatcherd = cic_eth.runnable.daemons.dispatcher:main
|
||||
cic-eth-retrierd = cic_eth.runnable.daemons.retry:main
|
||||
# tools
|
||||
|
||||
@@ -37,7 +37,7 @@ def test_refill_gas(
|
||||
eth_empty_accounts,
|
||||
):
|
||||
|
||||
provider_address = AccountRole.get_address('GAS_GIFTER')
|
||||
provider_address = AccountRole.get_address('GAS_GIFTER', init_database)
|
||||
receiver_address = eth_empty_accounts[0]
|
||||
|
||||
c = init_rpc
|
||||
@@ -93,7 +93,7 @@ def test_refill_deduplication(
|
||||
eth_empty_accounts,
|
||||
):
|
||||
|
||||
provider_address = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS')
|
||||
provider_address = AccountRole.get_address('ETH_GAS_PROVIDER_ADDRESS', init_database)
|
||||
receiver_address = eth_empty_accounts[0]
|
||||
|
||||
c = init_rpc
|
||||
|
||||
@@ -27,14 +27,14 @@ def test_states_initial(
|
||||
tx = {
|
||||
'from': init_w3.eth.accounts[0],
|
||||
'to': init_w3.eth.accounts[1],
|
||||
'nonce': 42,
|
||||
'nonce': 13,
|
||||
'gas': 21000,
|
||||
'gasPrice': 1000000,
|
||||
'value': 128,
|
||||
'chainId': 666,
|
||||
'chainId': 42,
|
||||
'data': '',
|
||||
}
|
||||
(tx_hash_hex, tx_raw_signed_hex) = sign_and_register_tx(tx, 'Foo:666', None)
|
||||
(tx_hash_hex, tx_raw_signed_hex) = sign_and_register_tx(tx, 'foo:bar:42', None)
|
||||
|
||||
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
|
||||
assert otx.status == StatusEnum.PENDING.value
|
||||
@@ -43,7 +43,7 @@ def test_states_initial(
|
||||
'cic_eth.eth.tx.check_gas',
|
||||
[
|
||||
[tx_hash_hex],
|
||||
'Foo:666',
|
||||
'foo:bar:42',
|
||||
[tx_raw_signed_hex],
|
||||
init_w3.eth.accounts[0],
|
||||
8000000,
|
||||
@@ -67,7 +67,7 @@ def test_states_initial(
|
||||
'cic_eth.eth.tx.check_gas',
|
||||
[
|
||||
[tx_hash_hex],
|
||||
'Foo:666',
|
||||
'foo:bar:42',
|
||||
[tx_raw_signed_hex],
|
||||
init_w3.eth.accounts[0],
|
||||
8000000,
|
||||
@@ -94,14 +94,14 @@ def test_states_failed(
|
||||
tx = {
|
||||
'from': init_w3.eth.accounts[0],
|
||||
'to': init_w3.eth.accounts[1],
|
||||
'nonce': 42,
|
||||
'nonce': 13,
|
||||
'gas': 21000,
|
||||
'gasPrice': 1000000,
|
||||
'value': 128,
|
||||
'chainId': 666,
|
||||
'chainId': 42,
|
||||
'data': '',
|
||||
}
|
||||
(tx_hash_hex, tx_raw_signed_hex) = sign_and_register_tx(tx, 'Foo:666', None)
|
||||
(tx_hash_hex, tx_raw_signed_hex) = sign_and_register_tx(tx, 'foo:bar:42', None)
|
||||
|
||||
otx = init_database.query(Otx).filter(Otx.tx_hash==tx_hash_hex).first()
|
||||
otx.sendfail(session=init_database)
|
||||
@@ -112,7 +112,7 @@ def test_states_failed(
|
||||
'cic_eth.eth.tx.check_gas',
|
||||
[
|
||||
[tx_hash_hex],
|
||||
'Foo:666',
|
||||
'foo:bar:42',
|
||||
[tx_raw_signed_hex],
|
||||
init_w3.eth.accounts[0],
|
||||
8000000,
|
||||
|
||||
@@ -67,7 +67,7 @@ def test_callback_tcp(
|
||||
logg.debug('recived {} '.format(data))
|
||||
o = json.loads(echo)
|
||||
try:
|
||||
assert o == data
|
||||
assert o['result'] == data
|
||||
except Exception as e:
|
||||
self.exception = e
|
||||
|
||||
@@ -130,7 +130,7 @@ def test_callback_redis(
|
||||
o = json.loads(echo['data'])
|
||||
logg.debug('recived {} '.format(o))
|
||||
try:
|
||||
assert o == data
|
||||
assert o['result'] == data
|
||||
except Exception as e:
|
||||
self.exception = e
|
||||
|
||||
|
||||
@@ -9,18 +9,18 @@ def test_db_role(
|
||||
foo = AccountRole.set('foo', eth_empty_accounts[0])
|
||||
init_database.add(foo)
|
||||
init_database.commit()
|
||||
assert AccountRole.get_address('foo') == eth_empty_accounts[0]
|
||||
assert AccountRole.get_address('foo', init_database) == eth_empty_accounts[0]
|
||||
|
||||
bar = AccountRole.set('bar', eth_empty_accounts[1])
|
||||
init_database.add(bar)
|
||||
init_database.commit()
|
||||
assert AccountRole.get_address('bar') == eth_empty_accounts[1]
|
||||
assert AccountRole.get_address('bar', init_database) == eth_empty_accounts[1]
|
||||
|
||||
foo = AccountRole.set('foo', eth_empty_accounts[2])
|
||||
init_database.add(foo)
|
||||
init_database.commit()
|
||||
assert AccountRole.get_address('foo') == eth_empty_accounts[2]
|
||||
assert AccountRole.get_address('bar') == eth_empty_accounts[1]
|
||||
assert AccountRole.get_address('foo', init_database) == eth_empty_accounts[2]
|
||||
assert AccountRole.get_address('bar', init_database) == eth_empty_accounts[1]
|
||||
|
||||
tag = AccountRole.role_for(eth_empty_accounts[2])
|
||||
assert tag == 'foo'
|
||||
|
||||
@@ -26,7 +26,7 @@ def test_set(
|
||||
'data': '',
|
||||
'chainId': 1,
|
||||
}
|
||||
(tx_hash, tx_signed) = sign_tx(tx_def, 'Foo:1')
|
||||
(tx_hash, tx_signed) = sign_tx(tx_def, 'foo:bar:1')
|
||||
otx = Otx(
|
||||
tx_def['nonce'],
|
||||
tx_def['from'],
|
||||
@@ -82,7 +82,7 @@ def test_clone(
|
||||
'data': '',
|
||||
'chainId': 1,
|
||||
}
|
||||
(tx_hash, tx_signed) = sign_tx(tx_def, 'Foo:1')
|
||||
(tx_hash, tx_signed) = sign_tx(tx_def, 'foo:bar:1')
|
||||
otx = Otx(
|
||||
tx_def['nonce'],
|
||||
tx_def['from'],
|
||||
|
||||
@@ -14,11 +14,11 @@ def test_unpack(
|
||||
'gas': 21000,
|
||||
'gasPrice': 200000000,
|
||||
'data': '0x',
|
||||
'chainId': 8995,
|
||||
'chainId': 42,
|
||||
}
|
||||
|
||||
(tx_hash, tx_raw) = sign_tx(tx, 'Foo:8995')
|
||||
(tx_hash, tx_raw) = sign_tx(tx, 'foo:bar:42')
|
||||
|
||||
tx_recovered = unpack_signed_raw_tx(bytes.fromhex(tx_raw[2:]), 8995)
|
||||
tx_recovered = unpack_signed_raw_tx(bytes.fromhex(tx_raw[2:]), 42)
|
||||
|
||||
assert tx_hash == tx_recovered['hash']
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# local imports
|
||||
from cic_eth.sync.head import HeadSyncer
|
||||
from cic_eth.sync.backend import SyncerBackend
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def test_head(
|
||||
init_rpc,
|
||||
init_database,
|
||||
init_eth_tester,
|
||||
mocker,
|
||||
eth_empty_accounts,
|
||||
):
|
||||
|
||||
#backend = SyncBackend(eth_empty_accounts[0], 'foo')
|
||||
block_number = init_rpc.w3.eth.blockNumber
|
||||
backend = SyncerBackend.live('foo:666', block_number)
|
||||
syncer = HeadSyncer(backend)
|
||||
|
||||
#init_eth_tester.mine_block()
|
||||
nonce = init_rpc.w3.eth.getTransactionCount(init_rpc.w3.eth.accounts[0], 'pending')
|
||||
logg.debug('nonce {}'.format(nonce))
|
||||
tx = {
|
||||
'from': init_rpc.w3.eth.accounts[0],
|
||||
'to': eth_empty_accounts[0],
|
||||
'value': 404,
|
||||
'gas': 21000,
|
||||
'gasPrice': init_rpc.w3.eth.gasPrice,
|
||||
'nonce': nonce,
|
||||
}
|
||||
tx_hash_one = init_rpc.w3.eth.sendTransaction(tx)
|
||||
|
||||
block_number = init_rpc.w3.eth.blockNumber
|
||||
backend.set(block_number, 0)
|
||||
b = syncer.get(init_rpc.w3)
|
||||
|
||||
tx = init_rpc.w3.eth.getTransactionByBlock(b[0], 0)
|
||||
|
||||
assert tx.hash.hex() == tx_hash_one.hex()
|
||||
@@ -1,194 +0,0 @@
|
||||
# standard imports
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import pytest
|
||||
from web3.exceptions import BlockNotFound
|
||||
from cic_registry import CICRegistry
|
||||
|
||||
# local imports
|
||||
from cic_eth.sync.history import HistorySyncer
|
||||
from cic_eth.sync.head import HeadSyncer
|
||||
#from cic_eth.sync import Syncer
|
||||
from cic_eth.db.models.otx import OtxSync
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.sync.backend import SyncerBackend
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
class FinishedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DebugFilter:
|
||||
|
||||
def __init__(self, address):
|
||||
self.txs = []
|
||||
self.monitor_to_address = address
|
||||
|
||||
def filter(self, w3, tx, rcpt, chain_spec):
|
||||
logg.debug('sync filter {}'.format(tx['hash'].hex()))
|
||||
if tx['to'] == self.monitor_to_address:
|
||||
self.txs.append(tx)
|
||||
# hack workaround, latest block hash not found in eth_tester for some reason
|
||||
if len(self.txs) == 2:
|
||||
raise FinishedError('intentionally finished on tx {}'.format(tx))
|
||||
|
||||
|
||||
def test_history(
|
||||
init_rpc,
|
||||
init_database,
|
||||
init_eth_tester,
|
||||
#celery_session_worker,
|
||||
eth_empty_accounts,
|
||||
):
|
||||
|
||||
nonce = init_rpc.w3.eth.getTransactionCount(init_rpc.w3.eth.accounts[0], 'pending')
|
||||
logg.debug('nonce {}'.format(nonce))
|
||||
tx = {
|
||||
'from': init_rpc.w3.eth.accounts[0],
|
||||
'to': eth_empty_accounts[0],
|
||||
'value': 404,
|
||||
'gas': 21000,
|
||||
'gasPrice': init_rpc.w3.eth.gasPrice,
|
||||
'nonce': nonce,
|
||||
}
|
||||
tx_hash_one = init_rpc.w3.eth.sendTransaction(tx)
|
||||
|
||||
nonce = init_rpc.w3.eth.getTransactionCount(init_rpc.w3.eth.accounts[0], 'pending')
|
||||
logg.debug('nonce {}'.format(nonce))
|
||||
tx = {
|
||||
'from': init_rpc.w3.eth.accounts[1],
|
||||
'to': eth_empty_accounts[0],
|
||||
'value': 404,
|
||||
'gas': 21000,
|
||||
'gasPrice': init_rpc.w3.eth.gasPrice,
|
||||
'nonce': nonce,
|
||||
}
|
||||
tx_hash_two = init_rpc.w3.eth.sendTransaction(tx)
|
||||
init_eth_tester.mine_block()
|
||||
|
||||
block_number = init_rpc.w3.eth.blockNumber
|
||||
|
||||
live_syncer = SyncerBackend.live('foo:666', 0)
|
||||
HeadSyncer(live_syncer)
|
||||
|
||||
history_syncers = SyncerBackend.resume('foo:666', block_number)
|
||||
|
||||
for history_syncer in history_syncers:
|
||||
logg.info('history syncer start {} target {}'.format(history_syncer.start(), history_syncer.target()))
|
||||
|
||||
backend = history_syncers[0]
|
||||
|
||||
syncer = HistorySyncer(backend)
|
||||
fltr = DebugFilter(eth_empty_accounts[0])
|
||||
syncer.filter.append(fltr.filter)
|
||||
|
||||
logg.debug('have txs {} {}'.format(tx_hash_one.hex(), tx_hash_two.hex()))
|
||||
|
||||
try:
|
||||
syncer.loop(0.1)
|
||||
except FinishedError:
|
||||
pass
|
||||
except BlockNotFound as e:
|
||||
logg.error('the last block given in loop does not seem to exist :/ {}'.format(e))
|
||||
|
||||
check_hashes = []
|
||||
for h in fltr.txs:
|
||||
check_hashes.append(h['hash'].hex())
|
||||
assert tx_hash_one.hex() in check_hashes
|
||||
assert tx_hash_two.hex() in check_hashes
|
||||
|
||||
|
||||
def test_history_multiple(
|
||||
init_rpc,
|
||||
init_database,
|
||||
init_eth_tester,
|
||||
#celery_session_worker,
|
||||
eth_empty_accounts,
|
||||
):
|
||||
|
||||
block_number = init_rpc.w3.eth.blockNumber
|
||||
live_syncer = SyncerBackend.live('foo:666', block_number)
|
||||
HeadSyncer(live_syncer)
|
||||
|
||||
nonce = init_rpc.w3.eth.getTransactionCount(init_rpc.w3.eth.accounts[0], 'pending')
|
||||
logg.debug('nonce {}'.format(nonce))
|
||||
tx = {
|
||||
'from': init_rpc.w3.eth.accounts[0],
|
||||
'to': eth_empty_accounts[0],
|
||||
'value': 404,
|
||||
'gas': 21000,
|
||||
'gasPrice': init_rpc.w3.eth.gasPrice,
|
||||
'nonce': nonce,
|
||||
}
|
||||
tx_hash_one = init_rpc.w3.eth.sendTransaction(tx)
|
||||
|
||||
|
||||
init_eth_tester.mine_block()
|
||||
block_number = init_rpc.w3.eth.blockNumber
|
||||
history_syncers = SyncerBackend.resume('foo:666', block_number)
|
||||
for history_syncer in history_syncers:
|
||||
logg.info('halfway history syncer start {} target {}'.format(history_syncer.start(), history_syncer.target()))
|
||||
live_syncer = SyncerBackend.live('foo:666', block_number)
|
||||
HeadSyncer(live_syncer)
|
||||
|
||||
nonce = init_rpc.w3.eth.getTransactionCount(init_rpc.w3.eth.accounts[0], 'pending')
|
||||
logg.debug('nonce {}'.format(nonce))
|
||||
tx = {
|
||||
'from': init_rpc.w3.eth.accounts[1],
|
||||
'to': eth_empty_accounts[0],
|
||||
'value': 404,
|
||||
'gas': 21000,
|
||||
'gasPrice': init_rpc.w3.eth.gasPrice,
|
||||
'nonce': nonce,
|
||||
}
|
||||
tx_hash_two = init_rpc.w3.eth.sendTransaction(tx)
|
||||
|
||||
init_eth_tester.mine_block()
|
||||
block_number = init_rpc.w3.eth.blockNumber
|
||||
history_syncers = SyncerBackend.resume('foo:666', block_number)
|
||||
live_syncer = SyncerBackend.live('foo:666', block_number)
|
||||
HeadSyncer(live_syncer)
|
||||
|
||||
for history_syncer in history_syncers:
|
||||
logg.info('history syncer start {} target {}'.format(history_syncer.start(), history_syncer.target()))
|
||||
|
||||
assert len(history_syncers) == 2
|
||||
|
||||
backend = history_syncers[0]
|
||||
syncer = HistorySyncer(backend)
|
||||
fltr = DebugFilter(eth_empty_accounts[0])
|
||||
syncer.filter.append(fltr.filter)
|
||||
try:
|
||||
syncer.loop(0.1)
|
||||
except FinishedError:
|
||||
pass
|
||||
except BlockNotFound as e:
|
||||
logg.error('the last block given in loop does not seem to exist :/ {}'.format(e))
|
||||
|
||||
check_hashes = []
|
||||
for h in fltr.txs:
|
||||
check_hashes.append(h['hash'].hex())
|
||||
assert tx_hash_one.hex() in check_hashes
|
||||
|
||||
|
||||
backend = history_syncers[1]
|
||||
syncer = HistorySyncer(backend)
|
||||
fltr = DebugFilter(eth_empty_accounts[0])
|
||||
syncer.filter.append(fltr.filter)
|
||||
try:
|
||||
syncer.loop(0.1)
|
||||
except FinishedError:
|
||||
pass
|
||||
except BlockNotFound as e:
|
||||
logg.error('the last block given in loop does not seem to exist :/ {}'.format(e))
|
||||
|
||||
check_hashes = []
|
||||
for h in fltr.txs:
|
||||
check_hashes.append(h['hash'].hex())
|
||||
assert tx_hash_two.hex() in check_hashes
|
||||
|
||||
history_syncers = SyncerBackend.resume('foo:666', block_number)
|
||||
|
||||
assert len(history_syncers) == 0
|
||||
@@ -1,79 +0,0 @@
|
||||
# third-party imports
|
||||
import pytest
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.sync import BlockchainSync
|
||||
from cic_eth.sync.backend import SyncerBackend
|
||||
|
||||
|
||||
def test_scratch(
|
||||
init_database,
|
||||
):
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
s = SyncerBackend('Testchain:666', 13)
|
||||
|
||||
syncer = SyncerBackend.live('Testchain:666', 13)
|
||||
|
||||
s = SyncerBackend('Testchain:666', syncer.object_id)
|
||||
|
||||
|
||||
|
||||
def test_live(
|
||||
init_database,
|
||||
):
|
||||
|
||||
s = SyncerBackend.live('Testchain:666', 13)
|
||||
|
||||
s.connect()
|
||||
assert s.db_object.target() == None
|
||||
s.disconnect()
|
||||
|
||||
assert s.get() == (13, 0)
|
||||
|
||||
s.set(14, 1)
|
||||
assert s.get() == (14, 1)
|
||||
|
||||
|
||||
def test_resume(
|
||||
init_database,
|
||||
):
|
||||
|
||||
live = SyncerBackend.live('Testchain:666', 13)
|
||||
live.set(13, 2)
|
||||
|
||||
resumes = SyncerBackend.resume('Testchain:666', 26)
|
||||
|
||||
assert len(resumes) == 1
|
||||
resume = resumes[0]
|
||||
|
||||
assert resume.get() == (13, 2)
|
||||
|
||||
resume.set(13, 4)
|
||||
assert resume.get() == (13, 4)
|
||||
assert resume.start() == (13, 2)
|
||||
assert resume.target() == 26
|
||||
|
||||
|
||||
def test_unsynced(
|
||||
init_database,
|
||||
):
|
||||
|
||||
live = SyncerBackend.live('Testchain:666', 13)
|
||||
live.set(13, 2)
|
||||
|
||||
resumes = SyncerBackend.resume('Testchain:666', 26)
|
||||
live = SyncerBackend.live('Testchain:666', 26)
|
||||
resumes[0].set(18, 12)
|
||||
|
||||
resumes = SyncerBackend.resume('Testchain:666', 42)
|
||||
|
||||
assert len(resumes) == 2
|
||||
|
||||
assert resumes[0].start() == (13, 2)
|
||||
assert resumes[0].get() == (18, 12)
|
||||
assert resumes[0].target() == 26
|
||||
|
||||
assert resumes[1].start() == (26, 0)
|
||||
assert resumes[1].get() == (26, 0)
|
||||
assert resumes[1].target() == 42
|
||||
@@ -7,24 +7,22 @@ def test_unpack(
|
||||
):
|
||||
|
||||
tx = {
|
||||
'nonce': 42,
|
||||
'nonce': 13,
|
||||
'from': init_w3_conn.eth.accounts[0],
|
||||
'to': init_w3_conn.eth.accounts[1],
|
||||
'data': '0xdeadbeef',
|
||||
'value': 1024,
|
||||
'gas': 23000,
|
||||
'gasPrice': 1422521,
|
||||
'chainId': 1337,
|
||||
'chainId': 42,
|
||||
}
|
||||
|
||||
(tx_hash, tx_signed) = sign_tx(tx, 'Foo:1337')
|
||||
(tx_hash, tx_signed) = sign_tx(tx, 'foo:bar:42')
|
||||
|
||||
tx_unpacked = unpack_signed_raw_tx_hex(tx_signed, 1337)
|
||||
tx_unpacked = unpack_signed_raw_tx_hex(tx_signed, 42)
|
||||
|
||||
for k in tx.keys():
|
||||
assert tx[k] == tx_unpacked[k]
|
||||
|
||||
tx_str = tx_hex_string(tx_signed, 1337)
|
||||
assert tx_str == 'tx nonce 42 from 0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf to 0x2B5AD5c4795c026514f8317c7a215E218DcCD6cF hash 0xe5aba32b1a7255d035faccb70cd8bb92c8c4a2f6bbea3f655bc5a8b802bbaa91'
|
||||
|
||||
|
||||
tx_str = tx_hex_string(tx_signed, 42)
|
||||
assert tx_str == 'tx nonce 13 from 0x7E5F4552091A69125d5DfCb7b8C2659029395Bdf to 0x2B5AD5c4795c026514f8317c7a215E218DcCD6cF hash 0x23ba3c2b400fbddcacc77d99644bfb17ac4653a69bfa46e544801fbd841b8f1e'
|
||||
|
||||
1
apps/cic-meta/.gitignore
vendored
1
apps/cic-meta/.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
node_modules
|
||||
dist
|
||||
dist-web
|
||||
dist-server
|
||||
scratch
|
||||
tests
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
* 0.0.7-pending
|
||||
- Add immutable content
|
||||
- Add db lock on server
|
||||
- Add ArgPair and KeyStore to src exports
|
||||
* 0.0.6
|
||||
- Add server build
|
||||
* 0.0.5
|
||||
|
||||
@@ -3,11 +3,8 @@ FROM node:15.3.0-alpine3.10
|
||||
WORKDIR /tmp/src/cic-meta
|
||||
|
||||
COPY cic-meta/package.json \
|
||||
cic-meta/package-lock.json \
|
||||
./
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY cic-meta/src/ src/
|
||||
COPY cic-meta/tests/ tests/
|
||||
COPY cic-meta/scripts/ scripts/
|
||||
@@ -15,6 +12,8 @@ COPY cic-meta/scripts/ scripts/
|
||||
|
||||
RUN alias tsc=node_modules/typescript/bin/tsc
|
||||
|
||||
RUN npm install
|
||||
|
||||
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
|
||||
# COPY cic-meta/scripts/server/initdb/server.postgres.sql /usr/local/share/cic-meta/sql/server.sql
|
||||
|
||||
|
||||
2
apps/cic-meta/package-lock.json
generated
2
apps/cic-meta/package-lock.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "cic-client-meta",
|
||||
"version": "0.0.6",
|
||||
"version": "0.0.7-alpha.2",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
{
|
||||
"name": "cic-client-meta",
|
||||
"version": "0.0.6",
|
||||
"version": "0.0.7-alpha.3",
|
||||
"description": "Signed CRDT metadata graphs for the CIC network",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"scripts": {
|
||||
"test": "mocha -r node_modules/node-localstorage/register -r ts-node/register tests/*.ts",
|
||||
"build": "node_modules/typescript/bin/tsc -d --outDir dist",
|
||||
"build": "node_modules/typescript/bin/tsc -d --outDir dist src/index.ts",
|
||||
"build-server": "tsc -d --outDir dist-server scripts/server/*.ts",
|
||||
"pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack",
|
||||
"clean": "rm -rf dist"
|
||||
},
|
||||
"bin": {
|
||||
"cic-meta-server": "./dist-server/scripts/server/server.js"
|
||||
"clean": "rm -rf dist",
|
||||
"prepare": "npm run build && npm run build-server"
|
||||
},
|
||||
"dependencies": {
|
||||
"@ethereumjs/tx": "^3.0.0-beta.1",
|
||||
@@ -34,6 +32,12 @@
|
||||
"webpack-cli": "^4.2.0"
|
||||
},
|
||||
"author": "Louis Holbrook <dev@holbrook.no>",
|
||||
"contributors": [
|
||||
{
|
||||
"name": "Spencer Ofwiti",
|
||||
"email": "maxspencer56@gmail.com"
|
||||
}
|
||||
],
|
||||
"license": "GPL-3.0-or-later",
|
||||
"engines": {
|
||||
"node": "~15.3.0"
|
||||
|
||||
@@ -101,6 +101,18 @@ function parseDigest(url) {
|
||||
|
||||
async function processRequest(req, res) {
|
||||
let digest = undefined;
|
||||
const headers = {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Methods": "OPTIONS, POST, GET, PUT",
|
||||
"Access-Control-Max-Age": 2592000, // 30 days
|
||||
"Access-Control-Allow-Headers": 'Access-Control-Allow-Origin, Content-Type, x-cic-automerge'
|
||||
};
|
||||
|
||||
if (req.method === "OPTIONS") {
|
||||
res.writeHead(200, headers);
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!['PUT', 'GET', 'POST'].includes(req.method)) {
|
||||
res.writeHead(405, {"Content-Type": "text/plain"});
|
||||
@@ -137,6 +149,7 @@ async function processRequest(req, res) {
|
||||
console.debug('mode', mod);
|
||||
let content = '';
|
||||
let contentType = 'application/json';
|
||||
console.debug('handling data', data);
|
||||
let r:any = undefined;
|
||||
try {
|
||||
switch (mod) {
|
||||
@@ -192,14 +205,17 @@ async function processRequest(req, res) {
|
||||
}
|
||||
|
||||
if (content === undefined) {
|
||||
console.error('empty onctent', data);
|
||||
res.writeHead(400, {"Content-Type": "text/plain"});
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
const responseContentLength = (new TextEncoder().encode(content)).length;
|
||||
res.writeHead(200, {
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Content-Type": contentType,
|
||||
"Content-Length": content.length,
|
||||
"Content-Length": responseContentLength,
|
||||
});
|
||||
res.write(content);
|
||||
res.end();
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export { PGPSigner, PGPKeyStore } from './auth';
|
||||
export { Envelope, Syncable } from './sync';
|
||||
export { PGPSigner, PGPKeyStore, Signer, KeyStore } from './auth';
|
||||
export { ArgPair, Envelope, Syncable } from './sync';
|
||||
export { User } from './assets/user';
|
||||
export { Phone } from './assets/phone';
|
||||
export { Config } from './config';
|
||||
|
||||
@@ -93,7 +93,7 @@ function orderDict(src) {
|
||||
return dst;
|
||||
}
|
||||
|
||||
class Syncable implements JSONSerializable, Authoritative {
|
||||
class Syncable implements JSONSerializable, Authoritative, Signable {
|
||||
|
||||
id: string
|
||||
timestamp: number
|
||||
|
||||
@@ -46,3 +46,5 @@ RUN cd cic-ussd && \
|
||||
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
||||
COPY cic-ussd/.config/ /usr/local/etc/cic-ussd/
|
||||
COPY cic-ussd/cic_ussd/db/migrations/ /usr/local/share/cic-ussd/alembic
|
||||
|
||||
WORKDIR /root
|
||||
@@ -1,58 +0,0 @@
|
||||
#!python3
|
||||
|
||||
"""Gas transfer script
|
||||
|
||||
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
|
||||
.. pgp:: 0826EDA1702D1E87C6E2875121D2E7BB88C2A746
|
||||
|
||||
"""
|
||||
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import web3
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
logging.getLogger('web3').setLevel(logging.WARNING)
|
||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||
|
||||
default_abi_dir = os.environ.get('ETH_ABI_DIR', '/usr/share/local/cic/solidity/abi')
|
||||
default_eth_provider = os.environ.get('ETH_PROVIDER', 'http://localhost:8545')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', default=default_eth_provider, type=str, help='Web3 provider url (http only)')
|
||||
argparser.add_argument('-t', '--token-address', dest='t', type=str, help='Token address. If not set, will return gas balance')
|
||||
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, default=default_abi_dir, help='Directory containing bytecode and abi (default {})'.format(default_abi_dir))
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('account', type=str, help='Account address')
|
||||
args = argparser.parse_args()
|
||||
|
||||
|
||||
if args.v:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
|
||||
def main():
|
||||
w3 = web3.Web3(web3.Web3.HTTPProvider(args.p))
|
||||
|
||||
balance = None
|
||||
if args.t != None:
|
||||
f = open(os.path.join(args.abi_dir, 'ERC20.json'))
|
||||
abi = json.load(f)
|
||||
f.close()
|
||||
c = w3.eth.contract(abi=abi, address=args.t)
|
||||
balance = c.functions.balanceOf(args.account).call()
|
||||
else:
|
||||
balance =w3.eth.getBalance(args.account)
|
||||
|
||||
print(balance)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,47 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
|
||||
import celery
|
||||
from cic_eth.api import Api
|
||||
import confini
|
||||
import argparse
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger('create_account_script')
|
||||
logging.getLogger('confini').setLevel(logging.WARNING)
|
||||
logging.getLogger('gnupg').setLevel(logging.WARNING)
|
||||
|
||||
config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('--no-register', dest='no_register', action='store_true', help='Do not register new account in on-chain accounts index')
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
if args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
api = Api(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
registration_account = None
|
||||
#t = api.create_account(registration_account=registration_account)
|
||||
if len(sys.argv) > 1:
|
||||
registration_account = config.get('DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER', None)
|
||||
|
||||
logg.debug('accounts index writer NOT USED {}'.format(registration_account))
|
||||
|
||||
register = not args.no_register
|
||||
logg.debug('register {}'.format(register))
|
||||
t = api.create_account(register=register)
|
||||
|
||||
print(t.get())
|
||||
@@ -1,48 +0,0 @@
|
||||
#!python3
|
||||
|
||||
"""Decode raw transaction
|
||||
|
||||
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
|
||||
.. pgp:: 0826EDA1702D1E87C6E2875121D2E7BB88C2A746
|
||||
|
||||
"""
|
||||
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_abi_dir = os.environ.get('ETH_ABI_DIR', '/usr/share/local/cic/solidity/abi')
|
||||
default_eth_provider = os.environ.get('ETH_PROVIDER', 'http://localhost:8545')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-i', '--chain-id', dest='i', type=int, help='Numeric network id')
|
||||
argparser.add_argument('tx', type=str, help='hex-encoded signed raw transaction')
|
||||
args = argparser.parse_args()
|
||||
|
||||
|
||||
if args.v:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
|
||||
def main():
|
||||
tx_raw = args.tx
|
||||
if tx_raw[:2] == '0x':
|
||||
tx_raw = tx_raw[2:]
|
||||
tx_raw_bytes = bytes.fromhex(tx_raw)
|
||||
tx = unpack_signed_raw_tx(tx_raw_bytes, args.i)
|
||||
for k in tx.keys():
|
||||
print('{}: {}'.format(k, tx[k]))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,24 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cic_data_dir=${CIC_DATA_DIR:-/tmp/cic}
|
||||
t=${1:-$(mktemp)}
|
||||
prefix=''
|
||||
if [ ! -z $2 ]; then
|
||||
prefix="${2}_"
|
||||
fi
|
||||
|
||||
echo "#!/bin/bash" > $t
|
||||
echo "set +a" >> $t
|
||||
cat $cic_data_dir/.env | sed -e "s/^\([A-Z]\)/export ${prefix}\1/g" >> $t
|
||||
|
||||
#if [ -f $cic_data_dir/.env ]; then
|
||||
#cat $cic_data_dir/.env | sed -e "s/^\([A-Z]\)/export ${prefix}\1/g" >> $t
|
||||
#fi
|
||||
echo "export CONFINI_DIR=$(dirname $(realpath .))/config_template" >> $t
|
||||
source $t
|
||||
echo "export CELERY_BROKER_URL=redis://localhost:${HTTP_PORT_REDIS}" >> $t
|
||||
echo "export CELERY_RESULT_URL=redis://localhost:${HTTP_PORT_REDIS}" >> $t
|
||||
echo "export ETH_PROVIDER=http://localhost:${HTTP_PORT_ETH}" >> $t
|
||||
echo "export META_PROVIDER=http://localhost:${HTTP_PORT_CIC_META}" >> $t
|
||||
echo "set -a" >> $t
|
||||
echo $t
|
||||
@@ -1,105 +0,0 @@
|
||||
#!python3
|
||||
|
||||
"""Gas transfer script
|
||||
|
||||
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
|
||||
.. pgp:: 0826EDA1702D1E87C6E2875121D2E7BB88C2A746
|
||||
|
||||
"""
|
||||
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import web3
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore import DictKeystore
|
||||
from crypto_dev_signer.eth.helper import EthTxExecutor
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
logging.getLogger('web3').setLevel(logging.WARNING)
|
||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||
|
||||
default_abi_dir = '/usr/share/local/cic/solidity/abi'
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||
argparser.add_argument('-w', action='store_true', help='Wait for the last transaction to be confirmed')
|
||||
argparser.add_argument('-ww', action='store_true', help='Wait for every transaction to be confirmed')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, default='Ethereum:1', help='Chain specification string')
|
||||
argparser.add_argument('-a', '--signer-address', dest='a', type=str, help='Signing address')
|
||||
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
argparser.add_argument('recipient', type=str, help='Ethereum address of recipient')
|
||||
argparser.add_argument('amount', type=int, help='Amount of tokens to mint and gift')
|
||||
args = argparser.parse_args()
|
||||
|
||||
|
||||
if args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
|
||||
block_last = args.w
|
||||
block_all = args.ww
|
||||
|
||||
w3 = web3.Web3(web3.Web3.HTTPProvider(args.p))
|
||||
|
||||
signer_address = None
|
||||
keystore = DictKeystore()
|
||||
if args.y != None:
|
||||
logg.debug('loading keystore file {}'.format(args.y))
|
||||
signer_address = keystore.import_keystore_file(args.y)
|
||||
logg.debug('now have key for signer address {}'.format(signer_address))
|
||||
signer = EIP155Signer(keystore)
|
||||
|
||||
chain_pair = args.i.split(':')
|
||||
chain_id = int(chain_pair[1])
|
||||
|
||||
helper = EthTxExecutor(
|
||||
w3,
|
||||
signer_address,
|
||||
signer,
|
||||
chain_id,
|
||||
block=args.ww,
|
||||
)
|
||||
|
||||
|
||||
def build_gas_transaction(recipient, value):
|
||||
def builder(tx):
|
||||
tx['to'] = recipient
|
||||
tx['value'] = value
|
||||
tx['data'] = '0x'
|
||||
return tx
|
||||
return builder
|
||||
|
||||
|
||||
def main():
|
||||
recipient = args.recipient
|
||||
value = args.amount
|
||||
|
||||
logg.debug('sender {} balance before: {}'.format(signer_address, w3.eth.getBalance(signer_address)))
|
||||
logg.debug('recipient {} balance before: {}'.format(recipient, w3.eth.getBalance(recipient)))
|
||||
(tx_hash, rcpt) = helper.sign_and_send(
|
||||
[
|
||||
build_gas_transaction(recipient, value),
|
||||
],
|
||||
)
|
||||
logg.debug('sender {} balance after: {}'.format(signer_address, w3.eth.getBalance(signer_address)))
|
||||
logg.debug('recipient {} balance after: {}'.format(recipient, w3.eth.getBalance(recipient)))
|
||||
|
||||
if block_last:
|
||||
helper.wait_for()
|
||||
|
||||
print(tx_hash)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,101 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const cic = require('cic-client-meta');
|
||||
const http = require('http');
|
||||
const confini = require('confini');
|
||||
|
||||
console.debug('sorry this script doesnt read cli flags, set all in env vars');
|
||||
|
||||
let config_data_dir = process.env.CONFINI_DIR;
|
||||
if (config_data_dir === undefined) {
|
||||
config_data_dir = '/usr/local/etc/cic';
|
||||
}
|
||||
const config = new confini.Config(config_data_dir, process.env.CONFINI_ENV_PREFIX);
|
||||
config.process();
|
||||
Object.keys(config.store).forEach((k) => {
|
||||
console.debug(k, config.get(k));
|
||||
});
|
||||
|
||||
// flatten file list from directories recursively
|
||||
// cheekily though gratefully stolen from https://coderrocketfuel.com/article/recursively-list-all-the-files-in-a-directory-using-node-js
|
||||
const getAllFiles = function(dirPath, arrayOfFiles) {
|
||||
files = fs.readdirSync(dirPath)
|
||||
|
||||
arrayOfFiles = arrayOfFiles || []
|
||||
|
||||
files.forEach(function(file) {
|
||||
if (fs.statSync(dirPath + "/" + file).isDirectory()) {
|
||||
arrayOfFiles = getAllFiles(dirPath + "/" + file, arrayOfFiles)
|
||||
} else {
|
||||
arrayOfFiles.push(path.join(dirPath, "/", file))
|
||||
}
|
||||
})
|
||||
|
||||
return arrayOfFiles
|
||||
}
|
||||
|
||||
async function sendit(uid, envelope) {
|
||||
const d = envelope.toJSON();
|
||||
|
||||
const opts = {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': d.length,
|
||||
'X-CIC-AUTOMERGE': 'client',
|
||||
|
||||
},
|
||||
};
|
||||
let url = config.get('META_PROVIDER'); //['archiveUrl'];
|
||||
url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/');
|
||||
const req = http.request(url + uid, opts, (res) => {
|
||||
res.on('data', process.stdout.write);
|
||||
res.on('end', () => {
|
||||
console.log('result', res.statusCode, res.headers);
|
||||
});
|
||||
});
|
||||
|
||||
req.write(d);
|
||||
req.end();
|
||||
}
|
||||
|
||||
function doit(keystore) {
|
||||
dataDir = 'data';
|
||||
if (process.argv.length > 2) {
|
||||
dataDir = process.argv[2];
|
||||
}
|
||||
console.log('argv', process.argv);
|
||||
console.log('datadir', path.join(dataDir, 'person'));
|
||||
getAllFiles(path.join(dataDir, 'person')).forEach((filename) => {
|
||||
console.debug('person file', filename);
|
||||
const signer = new cic.PGPSigner(keystore);
|
||||
const parts = filename.split('.');
|
||||
const uid = path.basename(parts[0]);
|
||||
|
||||
const d = fs.readFileSync(filename, 'utf-8');
|
||||
const o = JSON.parse(d);
|
||||
|
||||
const s = new cic.Syncable(uid, o);
|
||||
console.log(s);
|
||||
s.setSigner(signer);
|
||||
s.onwrap = (env) => {
|
||||
console.log('env', env);
|
||||
//console.log('sign', s.m.signature.digest);
|
||||
sendit(uid, env);
|
||||
};
|
||||
s.sign();
|
||||
});
|
||||
}
|
||||
|
||||
pk = fs.readFileSync(path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATEKEY_FILE')));
|
||||
pubk = fs.readFileSync(path.join(config.get('PGP_EXPORTS_DIR'), config.get('DEV_PGP_PUBLICKEYS_ACTIVE_FILE')));
|
||||
|
||||
new cic.PGPKeyStore(
|
||||
process.env['PGP_PASSPHRASE'],
|
||||
pk,
|
||||
pubk,
|
||||
undefined,
|
||||
undefined,
|
||||
doit,
|
||||
);
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
mkdir -vp .tmp
|
||||
echo -n '' > .tmp/.env_accounts
|
||||
account_labels=(
|
||||
DEV_ETH_ACCOUNT_BANCOR_DEPLOYER
|
||||
DEV_ETH_ACCOUNT_GAS_PROVIDER
|
||||
DEV_ETH_ACCOUNT_RESERVE_OWNER
|
||||
DEV_ETH_ACCOUNT_RESERVE_MINTER
|
||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_OWNER
|
||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER
|
||||
DEV_ETH_ACCOUNT_SARAFU_OWNER
|
||||
DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
||||
DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER
|
||||
DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER
|
||||
)
|
||||
bip39gen -n ${#account_labels[@]} "$DEV_MNEMONIC"
|
||||
i=0
|
||||
for a in `bip39gen -n ${#account_labels[@]} "$DEV_MNEMONIC" | jq -r .address[]`; do
|
||||
exportline=${account_labels[$i]}=$a
|
||||
export $exportline
|
||||
echo $exportline >> .tmp/.env_accounts
|
||||
echo exportline $exportline
|
||||
i=$(($i+1))
|
||||
done
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
)
|
||||
@@ -1,150 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
import argparse
|
||||
import re
|
||||
import json
|
||||
import signal
|
||||
import random
|
||||
import time
|
||||
|
||||
# third-party imports
|
||||
import confini
|
||||
import web3
|
||||
from cic_registry.chain import ChainSpec
|
||||
from cic_registry.chain import ChainRegistry
|
||||
from cic_registry import CICRegistry
|
||||
from eth_token_index import TokenUniqueSymbolIndex as TokenIndex
|
||||
from eth_accounts_index import AccountRegistry
|
||||
|
||||
from cic_eth.api import Api
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
logging.getLogger('websockets.protocol').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.RequestManager').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.providers.WebsocketProvider').setLevel(logging.CRITICAL)
|
||||
logging.getLogger('web3.providers.HTTPProvider').setLevel(logging.CRITICAL)
|
||||
|
||||
default_data_dir = '/usr/local/share/cic/solidity/abi'
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default='./config', help='config file')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, default=default_data_dir, help='Directory containing bytecode and abi (default: {})'.format(default_data_dir))
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
argparser.add_argument('--wait-max', dest='wait_max', default=2.0, type=float, help='maximum time in decimal seconds to wait between transactions')
|
||||
argparser.add_argument('--account-index-address', dest='account_index', type=str, help='Contract address of accounts index')
|
||||
argparser.add_argument('--token-index-address', dest='token_index', type=str, help='Contract address of token index')
|
||||
argparser.add_argument('--approval-escrow-address', dest='approval_escrow', type=str, help='Contract address for transfer approvals')
|
||||
argparser.add_argument('--declarator-address', dest='declarator', type=str, help='Address of declarations contract to perform lookup against')
|
||||
argparser.add_argument('-a', '--accounts-index-writer', dest='a', type=str, help='Address of account with access to add to accounts index')
|
||||
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'ETH_ABI_DIR': getattr(args, 'abi_dir'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'DEV_ETH_ACCOUNTS_INDEX_ADDRESS': getattr(args, 'account_index'),
|
||||
'DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER': getattr(args, 'a'),
|
||||
'DEV_ETH_ERC20_APPROVAL_ESCROW_ADDRESS': getattr(args, 'approval_escrow'),
|
||||
'DEV_ETH_TOKEN_INDEX_ADDRESS': getattr(args, 'token_index'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.validate()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
|
||||
re_websocket = r'^wss?:'
|
||||
re_http = r'^https?:'
|
||||
blockchain_provider = None
|
||||
if re.match(re_websocket, config.get('ETH_PROVIDER')):
|
||||
blockchain_provider = web3.Web3.WebsocketProvider(config.get('ETH_PROVIDER'))
|
||||
elif re.match(re_http, config.get('ETH_PROVIDER')):
|
||||
blockchain_provider = web3.Web3.HTTPProvider(config.get('ETH_PROVIDER'))
|
||||
w3 = web3.Web3(blockchain_provider)
|
||||
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
CICRegistry.init(w3, config.get('CIC_REGISTRY_ADDRESS'), chain_spec)
|
||||
CICRegistry.add_path(config.get('ETH_ABI_DIR'))
|
||||
|
||||
chain_registry = ChainRegistry(chain_spec)
|
||||
CICRegistry.add_chain_registry(chain_registry, True)
|
||||
|
||||
run = True
|
||||
|
||||
def inthandler(name, frame):
|
||||
logg.warning('got {}, stopping'.format(name))
|
||||
global run
|
||||
run = False
|
||||
|
||||
signal.signal(signal.SIGTERM, inthandler)
|
||||
signal.signal(signal.SIGINT, inthandler)
|
||||
|
||||
api = Api(str(chain_spec))
|
||||
|
||||
f = open(os.path.join(config.get('ETH_ABI_DIR'), 'ERC20.json'))
|
||||
erc20_abi = json.load(f)
|
||||
f.close()
|
||||
|
||||
def get_tokens():
|
||||
tokens = []
|
||||
token_index = TokenIndex(w3, config.get('CIC_TOKEN_INDEX_ADDRESS'))
|
||||
token_count = token_index.count()
|
||||
for i in range(token_count):
|
||||
tokens.append(token_index.get_index(i))
|
||||
logg.debug('tokens {}'.format(tokens))
|
||||
return tokens
|
||||
|
||||
def get_addresses():
|
||||
address_index = AccountRegistry(w3, config.get('CIC_ACCOUNTS_INDEX_ADDRESS'))
|
||||
address_count = address_index.count()
|
||||
addresses = address_index.last(address_count-1)
|
||||
logg.debug('addresses {} {}'.format(address_count, addresses))
|
||||
return addresses
|
||||
|
||||
random.seed()
|
||||
|
||||
while run:
|
||||
n = random.randint(0, 255)
|
||||
|
||||
# some of the time do other things than transfers
|
||||
if n & 0xf8 == 0xf8:
|
||||
t = api.create_account()
|
||||
logg.info('create account {}'.format(t))
|
||||
|
||||
else:
|
||||
tokens = get_tokens()
|
||||
addresses = get_addresses()
|
||||
address_pair = random.choices(addresses, k=2)
|
||||
sender = address_pair[0]
|
||||
recipient = address_pair[1]
|
||||
token = random.choice(tokens)
|
||||
|
||||
c = w3.eth.contract(abi=erc20_abi, address=token)
|
||||
sender_balance = c.functions.balanceOf(sender).call()
|
||||
token_symbol = c.functions.symbol().call()
|
||||
amount = int(random.random() * (sender_balance / 2))
|
||||
|
||||
n = random.randint(0, 255)
|
||||
|
||||
if n & 0xc0 == 0xc0:
|
||||
t = api.transfer_request(sender, recipient, config.get('CIC_APPROVAL_ESCROW_ADDRESS'), amount, token_symbol)
|
||||
logg.info('transfer REQUEST {} {} from {} to {} => {}'.format(amount, token_symbol, sender, recipient, t))
|
||||
else:
|
||||
t = api.transfer(sender, recipient, amount, token_symbol)
|
||||
logg.info('transfer {} {} from {} to {} => {}'.format(amount, token_symbol, sender, recipient, t))
|
||||
|
||||
time.sleep(random.random() * args.wait_max)
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import csv
|
||||
import logging
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
|
||||
import web3
|
||||
import confini
|
||||
|
||||
from cic_registry import CICRegistry
|
||||
from cic_eth.api import Api
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logg = logging.getLogger()
|
||||
|
||||
confini_default_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=confini_default_dir, help='config data dir')
|
||||
argparser.add_argument('-a', '--token-gifter-address', dest='a', type=str, help='Token gifter address')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('-s', '--token-symbol', dest='s', type=str, help='Token symbol')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
}
|
||||
cic_eth_api = Api(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
token_gifter_address = args.a
|
||||
|
||||
if __name__ == '__main__':
|
||||
f = open('./data/amounts', 'r')
|
||||
cr = csv.reader(f)
|
||||
for r in cr:
|
||||
logg.info('sending {} {} from {} to {}'.format(r[1], args.s, token_gifter_address, r[0]))
|
||||
cic_eth_api.transfer(token_gifter_address, r[0], int(r[1]), args.s)
|
||||
f.close()
|
||||
@@ -1,212 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
import json
|
||||
import time
|
||||
import datetime
|
||||
import random
|
||||
import logging
|
||||
import os
|
||||
import base64
|
||||
import hashlib
|
||||
import sys
|
||||
|
||||
import vobject
|
||||
|
||||
import celery
|
||||
import web3
|
||||
from faker import Faker
|
||||
import cic_registry
|
||||
import confini
|
||||
from cic_eth.api import Api
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logg = logging.getLogger()
|
||||
|
||||
fake = Faker(['sl', 'en_US', 'no', 'de', 'ro'])
|
||||
|
||||
#f = open('cic.conf', 'r')
|
||||
#config = json.load(f)
|
||||
#f.close()
|
||||
#
|
||||
|
||||
config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||
|
||||
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
logg.info('loaded config\n{}'.format(config))
|
||||
|
||||
|
||||
w3s = None
|
||||
w3s = web3.Web3(web3.Web3.IPCProvider(config.get('SIGNER_SOCKET_PATH')))
|
||||
#w3s = web3.Web3(web3.Web3.IPCProvider(config['signer']['provider']))
|
||||
#w3 = web3.Web3(web3.Web3.WebsocketProvider(config['eth']['provider']))
|
||||
|
||||
dt_now = datetime.datetime.utcnow()
|
||||
dt_then = dt_now - datetime.timedelta(weeks=150)
|
||||
ts_now = int(dt_now.timestamp())
|
||||
ts_then = int(dt_then.timestamp())
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
api = Api(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
gift_max = 10000
|
||||
gift_factor = (10**9)
|
||||
|
||||
categories = [
|
||||
"food/water",
|
||||
"fuel/energy",
|
||||
"education",
|
||||
"health",
|
||||
"shop",
|
||||
"environment",
|
||||
"transport",
|
||||
"farming/labor",
|
||||
"savingsgroup",
|
||||
]
|
||||
|
||||
phone_idx = []
|
||||
|
||||
|
||||
def genPhoneIndex(phone):
|
||||
h = hashlib.new('sha256')
|
||||
h.update(phone.encode('utf-8'))
|
||||
h.update(b'cic.msisdn')
|
||||
return h.digest().hex()
|
||||
|
||||
|
||||
def genId(addr, typ):
|
||||
h = hashlib.new('sha256')
|
||||
h.update(bytes.fromhex(addr[2:]))
|
||||
h.update(typ.encode('utf-8'))
|
||||
return h.digest().hex()
|
||||
|
||||
|
||||
def genDate():
|
||||
|
||||
logg.info(ts_then)
|
||||
ts = random.randint(ts_then, ts_now)
|
||||
return datetime.datetime.fromtimestamp(ts).timestamp()
|
||||
|
||||
|
||||
def genPhone():
|
||||
return fake.msisdn()
|
||||
|
||||
|
||||
def genPersonal(phone):
|
||||
fn = fake.first_name()
|
||||
ln = fake.last_name()
|
||||
e = fake.email()
|
||||
|
||||
v = vobject.vCard()
|
||||
first_name = fake.first_name()
|
||||
last_name = fake.last_name()
|
||||
v.add('n')
|
||||
v.n.value = vobject.vcard.Name(family=last_name, given=first_name)
|
||||
v.add('fn')
|
||||
v.fn.value = '{} {}'.format(first_name, last_name)
|
||||
v.add('tel')
|
||||
v.tel.typ_param = 'CELL'
|
||||
v.tel.value = phone
|
||||
v.add('email')
|
||||
v.email.value = fake.email()
|
||||
|
||||
vcard_serialized = v.serialize()
|
||||
vcard_base64 = base64.b64encode(vcard_serialized.encode('utf-8'))
|
||||
|
||||
return vcard_base64.decode('utf-8')
|
||||
|
||||
|
||||
def genCats():
|
||||
i = random.randint(0, 3)
|
||||
return random.choices(categories, k=i)
|
||||
|
||||
|
||||
def genAmount():
|
||||
return random.randint(0, gift_max) * gift_factor
|
||||
|
||||
|
||||
def gen():
|
||||
old_blockchain_address = '0x' + os.urandom(20).hex()
|
||||
accounts_index_account = config.get('DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER')
|
||||
if not accounts_index_account:
|
||||
accounts_index_account = None
|
||||
logg.debug('accounts indexwriter {}'.format(accounts_index_account))
|
||||
t = api.create_account()
|
||||
new_blockchain_address = t.get()
|
||||
gender = random.choice(['female', 'male', 'other'])
|
||||
phone = genPhone()
|
||||
v = genPersonal(phone)
|
||||
o = {
|
||||
'date_registered': genDate(),
|
||||
'vcard': v,
|
||||
'gender': gender,
|
||||
'key': {
|
||||
'ethereum': [
|
||||
old_blockchain_address,
|
||||
new_blockchain_address,
|
||||
],
|
||||
},
|
||||
'location': {
|
||||
'latitude': str(fake.latitude()),
|
||||
'longitude': str(fake.longitude()),
|
||||
'external': { # add osm lookup
|
||||
}
|
||||
},
|
||||
'selling': genCats(),
|
||||
}
|
||||
uid = genId(new_blockchain_address, 'cic.person')
|
||||
|
||||
#logg.info('gifting {} to {}'.format(amount, new_blockchain_address))
|
||||
|
||||
return (uid, phone, o)
|
||||
|
||||
|
||||
def prepareLocalFilePath(datadir, address):
|
||||
parts = [
|
||||
address[:2],
|
||||
address[2:4],
|
||||
]
|
||||
dirs = '{}/{}/{}'.format(
|
||||
datadir,
|
||||
parts[0],
|
||||
parts[1],
|
||||
)
|
||||
os.makedirs(dirs, exist_ok=True)
|
||||
return dirs
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
os.makedirs('data/person', exist_ok=True)
|
||||
os.makedirs('data/phone', exist_ok=True)
|
||||
|
||||
fa = open('./data/amounts', 'w')
|
||||
fb = open('./data/addresses', 'w')
|
||||
|
||||
#for i in range(10):
|
||||
for i in range(int(sys.argv[1])):
|
||||
|
||||
(uid, phone, o) = gen()
|
||||
eth = o['key']['ethereum'][1]
|
||||
|
||||
print(o)
|
||||
|
||||
d = prepareLocalFilePath('./data/person', uid)
|
||||
f = open('{}/{}'.format(d, uid), 'w')
|
||||
json.dump(o, f)
|
||||
f.close()
|
||||
|
||||
pidx = genPhoneIndex(phone)
|
||||
d = prepareLocalFilePath('./data/phone', uid)
|
||||
f = open('{}/{}'.format(d, pidx), 'w')
|
||||
f.write(eth)
|
||||
f.close()
|
||||
|
||||
amount = genAmount()
|
||||
fa.write('{},{}\n'.format(eth,amount))
|
||||
fb.write('{}\n'.format(eth))
|
||||
logg.debug('pidx {}, uid {}, eth {}, amount {}'.format(pidx, uid, eth, amount))
|
||||
|
||||
fb.close()
|
||||
fa.close()
|
||||
@@ -1,40 +0,0 @@
|
||||
[metadata]
|
||||
name = cic-dev-fake
|
||||
version = 0.0.1
|
||||
description = Fake data generator tools
|
||||
author = Louis Holbrook
|
||||
author_email = dev@holbrook.no
|
||||
url = https://gitlab.com/nolash/simple-multisig
|
||||
keywords =
|
||||
ethereum
|
||||
classifiers =
|
||||
Programming Language :: Python :: 3
|
||||
Operating System :: OS Independent
|
||||
Development Status :: 3 - Alpha
|
||||
Environment :: No Input/Output (Daemon)
|
||||
Intended Audience :: Developers
|
||||
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||
Topic :: Internet
|
||||
#Topic :: Blockchain :: EVM
|
||||
license = GPL3
|
||||
licence_files =
|
||||
LICENSE
|
||||
|
||||
[options]
|
||||
python_requires = >= 3.6
|
||||
install_requires =
|
||||
web3==5.12.2
|
||||
vobject==0.9.6.1
|
||||
faker==4.17.1
|
||||
tests_require =
|
||||
eth-tester==0.5.0b2
|
||||
py-evm==0.3.0a20
|
||||
scripts =
|
||||
scripts/users.py
|
||||
scripts/tx_generator.py
|
||||
scripts/tx_seed.py
|
||||
|
||||
[options.extras_require]
|
||||
testing =
|
||||
eth-tester==0.5.0b2
|
||||
py-evm==0.3.0a20
|
||||
@@ -1,4 +0,0 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
)
|
||||
@@ -1,10 +0,0 @@
|
||||
cryptocurrency-cli-tools~=0.0.4
|
||||
giftable-erc20-token~=0.0.7b1
|
||||
eth-accounts-index~=0.0.10a2
|
||||
erc20-single-shot-faucet~=0.2.0a1
|
||||
erc20-approval-escrow~=0.3.0a1
|
||||
cic-eth==0.10.0a5
|
||||
vobject==0.9.6.1
|
||||
faker==4.17.1
|
||||
eth-address-index~=0.1.0a1
|
||||
crypto-dev-signer~=0.4.13b9
|
||||
File diff suppressed because one or more lines are too long
Binary file not shown.
|
Before Width: | Height: | Size: 7.0 KiB |
@@ -1,138 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Debug flag
|
||||
#debug='-v'
|
||||
keystore_file=../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c
|
||||
|
||||
debug=''
|
||||
abi_dir=${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
|
||||
|
||||
# Determine token amount
|
||||
token_amount=${DEV_ETH_RESERVE_AMOUNT:0:-1}
|
||||
|
||||
# Determine gas amount
|
||||
#gas_amount=20000000000000000000
|
||||
gas_amount=2000000000000000000
|
||||
|
||||
export DATABASE_NAME=$DATABASE_NAME_CIC_ETH
|
||||
export DATABASE_PORT=$HTTP_PORT_POSTGRES
|
||||
export DATABASE_HOST=localhost
|
||||
|
||||
set -e
|
||||
set -a
|
||||
|
||||
old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER
|
||||
DEV_ETH_ACCOUNT_GAS_GIFTER=`python ./create.py --no-register`
|
||||
echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_GAS_GIFTER
|
||||
export DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_GAS_GIFTER
|
||||
cic-eth-tag GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER
|
||||
|
||||
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`python ./create.py --no-register`
|
||||
echo DEV_ETH_ACCOUNT_SARAFU_GIFTER=$DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
||||
export DEV_ETH_ACCOUNT_SARAFU_GIFTER=$DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
||||
cic-eth-tag SARAFU_GIFTER $DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
||||
|
||||
|
||||
DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER=`python ./create.py --no-register`
|
||||
echo DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER=$DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER
|
||||
export DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER=$DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER
|
||||
cic-eth-tag TRANSFER_APPROVAL_OWNER $DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER
|
||||
|
||||
|
||||
DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER=`python ./create.py --no-register`
|
||||
echo DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER=$DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER
|
||||
export DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER=$DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER
|
||||
cic-eth-tag FAUCET_OWNER $DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER
|
||||
|
||||
|
||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=`python ./create.py --no-register`
|
||||
echo DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=$DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER
|
||||
export DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=$DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER
|
||||
cic-eth-tag ACCOUNTS_INDEX_WRITER $DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER
|
||||
|
||||
|
||||
# Transfer gas to custodial gas provider adddress
|
||||
#echo $old_gas_provider
|
||||
>&2 echo gift gas to gas gifter
|
||||
>&2 echo "python gas.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_GAS_GIFTER $gas_amount"
|
||||
>&2 python gas.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_GAS_GIFTER $gas_amount
|
||||
|
||||
>&2 echo gift gas to sarafu token owner
|
||||
>&2 echo "python gas.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_SARAFU_OWNER $gas_amount"
|
||||
>&2 python gas.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_SARAFU_OWNER $gas_amount
|
||||
|
||||
>&2 echo gift gas to account index owner
|
||||
>&2 echo "python gas.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_OWNER $gas_amount"
|
||||
>&2 python gas.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_OWNER $gas_amount
|
||||
|
||||
|
||||
# Send reserve to token creator
|
||||
>&2 giftable-token-gift -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -a $DEV_ETH_RESERVE_ADDRESS --recipient $DEV_ETH_ACCOUNT_SARAFU_OWNER -w $debug $token_amount
|
||||
|
||||
|
||||
# Create token
|
||||
#DEV_ETH_SARAFU_TOKEN_ADDRESS=`cic-bancor-token -p $ETH_PROVIDER -r $CIC_REGISTRY_ADDRESS -z $DEV_ETH_RESERVE_ADDRESS -o $DEV_ETH_ACCOUNT_SARAFU_OWNER -n $DEV_ETH_SARAFU_TOKEN_NAME -s $DEV_ETH_SARAFU_TOKEN_SYMBOL -d $DEV_ETH_SARAFU_TOKEN_DECIMALS -i $CIC_CHAIN_SPEC $debug $token_amount`
|
||||
DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
|
||||
echo DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_SARAFU_TOKEN_ADDRESS
|
||||
export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_SARAFU_TOKEN_ADDRESS
|
||||
|
||||
|
||||
# Transfer tokens to gifter address
|
||||
>&2 python transfer.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_ETH_SARAFU_TOKEN_ADDRESS --abi-dir $abi_dir -w $debug $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${token_amount:0:-1}
|
||||
|
||||
# Deploy transfer approval contract
|
||||
CIC_APPROVAL_ESCROW_ADDRESS=`erc20-approval-escrow-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --approver $DEV_ETH_ACCOUNT_APPROVAL_ESCROW_OWNER -w $debug`
|
||||
echo CIC_APPROVAL_ESCROW_ADDRESS=$CIC_APPROVAL_ESCROW_ADDRESS
|
||||
export CIC_APPROVAL_ESCROW_ADDRESS=$CIC_APPROVAL_ESCROW_ADDRESS
|
||||
|
||||
# Register transfer approval contract
|
||||
>&2 cic-registry-set -y $keystore_file -r $CIC_REGISTRY_ADDRESS -k TransferApproval -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $CIC_APPROVAL_ESCROW_ADDRESS
|
||||
|
||||
# Deploy one-time token faucet for newly created token
|
||||
DEV_ETH_SARAFU_FAUCET_ADDRESS=`erc20-single-shot-faucet-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_ETH_SARAFU_TOKEN_ADDRESS --editor $DEV_ETH_ACCOUNT_SINGLE_SHOT_FAUCET_OWNER --set-amount 1048576 -w $debug`
|
||||
echo DEV_ETH_SARAFU_FAUCET_ADDRESS=$DEV_ETH_SARAFU_FAUCET_ADDRESS
|
||||
export DEV_ETH_SARAFU_FAUCET_ADDRESS=$DEV_ETH_SARAFU_FAUCET_ADDRESS
|
||||
|
||||
# Transfer tokens to faucet contract
|
||||
>&2 python transfer.py -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_ETH_SARAFU_TOKEN_ADDRESS --abi-dir $abi_dir -w $debug $DEV_ETH_SARAFU_FAUCET_ADDRESS ${token_amount:0:-1}
|
||||
|
||||
# Registry faucet entry
|
||||
>&2 cic-registry-set -y $keystore_file -r $CIC_REGISTRY_ADDRESS -k Faucet -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $DEV_ETH_SARAFU_FAUCET_ADDRESS
|
||||
|
||||
# Deploy token endorser registry
|
||||
#DEV_ETH_TOKEN_ENDORSER_ADDRESS=`eth-token-endorser-deploy -p $ETH_PROVIDER -o $DEV_ETH_ACCOUNT_SARAFU_OWNER $debug`
|
||||
#echo DEV_ETH_TOKEN_ENDORSER_ADDRESS=$DEV_ETH_TOKEN_ENDORSER_ADDRESS
|
||||
#export DEV_ETH_TOKEN_ENDORSER_ADDRESS=$DEV_ETH_TOKEN_ENDORSER_ADDRESS
|
||||
#ENDORSEMENT_MSG=`echo -n 'very cool token' | sha256sum | awk '{print $1;}'`
|
||||
#>&2 eth-token-endorser-add -p $ETH_PROVIDER -a $DEV_ETH_TOKEN_ENDORSER_ADDRESS -t $DEV_ETH_SARAFU_TOKEN_ADDRESS -o $DEV_ETH_ACCOUNT_SARAFU_OWNER $debug $ENDORSEMENT_MSG
|
||||
CIC_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug`
|
||||
echo CIC_TOKEN_INDEX_ADDRESS=$CIC_TOKEN_INDEX_ADDRESS
|
||||
export CIC_TOKEN_INDEX_ADDRESS=$CIC_TOKEN_INDEX_ADDRESS
|
||||
>&2 eth-token-index-add -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_TOKEN_INDEX_ADDRESS -w $debug $DEV_ETH_SARAFU_TOKEN_ADDRESS
|
||||
|
||||
# Register token registry
|
||||
>&2 cic-registry-set -y $keystore_file -r $CIC_REGISTRY_ADDRESS -k TokenRegistry -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER $CIC_TOKEN_INDEX_ADDRESS
|
||||
|
||||
# Deploy address declarator registry
|
||||
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
|
||||
CIC_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w $debug $declarator_description`
|
||||
echo CIC_DECLARATOR_ADDRESS=$CIC_DECLARATOR_ADDRESS
|
||||
export CIC_DECLARATOR_ADDRESS=$CIC_DECLARATOR_ADDRESS
|
||||
token_description_one=`sha256sum sarafu_declaration.json | awk '{ print $1; }'`
|
||||
token_description_two=0x54686973206973207468652053617261667520746f6b656e0000000000000000
|
||||
>&2 eth-address-declarator-add -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_DECLARATOR_ADDRESS -w $debug $DEV_ETH_SARAFU_TOKEN_ADDRESS $token_description_one
|
||||
>&2 eth-address-declarator-add -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_DECLARATOR_ADDRESS -w $debug $DEV_ETH_SARAFU_TOKEN_ADDRESS $token_description_two
|
||||
|
||||
|
||||
# Register address declarator
|
||||
>&2 cic-registry-set -y $keystore_file -r $CIC_REGISTRY_ADDRESS -k AddressDeclarator -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER $CIC_DECLARATOR_ADDRESS
|
||||
|
||||
# We're done with the registry at this point, seal it off
|
||||
>&2 cic-registry-seal -y $keystore_file -i $CIC_CHAIN_SPEC -r $CIC_REGISTRY_ADDRESS -p $ETH_PROVIDER
|
||||
|
||||
|
||||
# Add accounts index writer with key from keystore
|
||||
>&2 eth-accounts-index-add -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -r $CIC_ACCOUNTS_INDEX_ADDRESS --writer $DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER -w $debug
|
||||
|
||||
set +a
|
||||
set +e
|
||||
@@ -1,31 +0,0 @@
|
||||
#!python3
|
||||
|
||||
# Author: Louis Holbrook <dev@holbrook.no> 0826EDA1702D1E87C6E2875121D2E7BB88C2A746
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# File-version: 1
|
||||
# Description: Smoke test for cic-eth create account api
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import celery
|
||||
import confini
|
||||
|
||||
# platform imports
|
||||
from cic_eth import Api
|
||||
|
||||
script_dir = os.path.dirname(__file__)
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic/')
|
||||
|
||||
config = confini.Config(config_dir)
|
||||
config.process()
|
||||
|
||||
a = Api()
|
||||
t = a.create_account()
|
||||
logg.debug('create account task uuid {}'.format(t))
|
||||
@@ -1,105 +0,0 @@
|
||||
#!python3
|
||||
|
||||
"""Token transfer script
|
||||
|
||||
.. moduleauthor:: Louis Holbrook <dev@holbrook.no>
|
||||
.. pgp:: 0826EDA1702D1E87C6E2875121D2E7BB88C2A746
|
||||
|
||||
"""
|
||||
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# standard imports
|
||||
import os
|
||||
import json
|
||||
import argparse
|
||||
import logging
|
||||
|
||||
# third-party imports
|
||||
import web3
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore import DictKeystore
|
||||
from crypto_dev_signer.eth.helper import EthTxExecutor
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
logging.getLogger('web3').setLevel(logging.WARNING)
|
||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||
|
||||
default_abi_dir = '/usr/share/local/cic/solidity/abi'
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8545', type=str, help='Web3 provider url (http only)')
|
||||
argparser.add_argument('-w', action='store_true', help='Wait for the last transaction to be confirmed')
|
||||
argparser.add_argument('-ww', action='store_true', help='Wait for every transaction to be confirmed')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, default='Ethereum:1', help='Chain specification string')
|
||||
argparser.add_argument('--token-address', required='True', dest='t', type=str, help='Token address')
|
||||
argparser.add_argument('-a', '--sender-address', dest='s', type=str, help='Sender account address')
|
||||
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||
argparser.add_argument('--abi-dir', dest='abi_dir', type=str, default=default_abi_dir, help='Directory containing bytecode and abi (default {})'.format(default_abi_dir))
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
argparser.add_argument('recipient', type=str, help='Recipient account address')
|
||||
argparser.add_argument('amount', type=int, help='Amount of tokens to mint and gift')
|
||||
args = argparser.parse_args()
|
||||
|
||||
|
||||
if args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
|
||||
block_last = args.w
|
||||
block_all = args.ww
|
||||
|
||||
w3 = web3.Web3(web3.Web3.HTTPProvider(args.p))
|
||||
|
||||
signer_address = None
|
||||
keystore = DictKeystore()
|
||||
if args.y != None:
|
||||
logg.debug('loading keystore file {}'.format(args.y))
|
||||
signer_address = keystore.import_keystore_file(args.y)
|
||||
logg.debug('now have key for signer address {}'.format(signer_address))
|
||||
signer = EIP155Signer(keystore)
|
||||
|
||||
chain_pair = args.i.split(':')
|
||||
chain_id = int(chain_pair[1])
|
||||
|
||||
helper = EthTxExecutor(
|
||||
w3,
|
||||
signer_address,
|
||||
signer,
|
||||
chain_id,
|
||||
block=args.ww,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
# TODO: Add pure ERC20 abi to collection
|
||||
f = open(os.path.join(args.abi_dir, 'ERC20.json'), 'r')
|
||||
abi = json.load(f)
|
||||
f.close()
|
||||
|
||||
c = w3.eth.contract(abi=abi, address=args.t)
|
||||
|
||||
recipient = args.recipient
|
||||
value = args.amount
|
||||
|
||||
logg.debug('sender {} balance before: {}'.format(signer_address, c.functions.balanceOf(signer_address).call()))
|
||||
logg.debug('recipient {} balance before: {}'.format(recipient, c.functions.balanceOf(recipient).call()))
|
||||
(tx_hash, rcpt) = helper.sign_and_send(
|
||||
[
|
||||
c.functions.transfer(recipient, value).buildTransaction,
|
||||
],
|
||||
)
|
||||
logg.debug('sender {} balance after: {}'.format(signer_address, c.functions.balanceOf(signer_address).call()))
|
||||
logg.debug('recipient {} balance after: {}'.format(recipient, c.functions.balanceOf(recipient).call()))
|
||||
|
||||
if block_last:
|
||||
helper.wait_for()
|
||||
|
||||
print(tx_hash)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -53,7 +53,7 @@ RUN apt-get update && \
|
||||
|
||||
RUN echo installing nodejs tooling
|
||||
|
||||
COPY contract-migration/dev/nvm.sh /root/
|
||||
COPY contract-migration/nvm.sh /root/
|
||||
|
||||
# Install nvm with node and npm
|
||||
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
||||
@@ -105,53 +105,13 @@ RUN cd root && \
|
||||
RUN cd cic-bancor/python && \
|
||||
pip install --extra-index-url $pip_extra_index_url .
|
||||
|
||||
RUN echo installing common python tooling
|
||||
ARG cic_python_commit=beecee783ceac2ea0fa711f888ce4c82f1a81490
|
||||
ARG cic_python_url=https://gitlab.com/grassrootseconomics/cic-python.git/
|
||||
RUN echo Install sum of python dependencies across all components && \
|
||||
git clone --depth 1 $cic_python_url cic-python && \
|
||||
cd cic-python && \
|
||||
git fetch --depth 1 origin $cic_python_commit && \
|
||||
git checkout $cic_python_commit && \
|
||||
pip install --extra-index-url $pip_extra_index_url -r requirements.txt
|
||||
|
||||
RUN echo Install dev-only provisions
|
||||
ARG cryptocurrency_cli_tools_version=0.0.4
|
||||
RUN pip install --extra-index-url $pip_extra_index_url cryptocurrency-cli-tools==$cryptocurrency_cli_tools_version
|
||||
RUN apt-get install -y cargo
|
||||
ARG cic_base_version=0.1.1a10
|
||||
RUN pip install --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version
|
||||
|
||||
RUN echo Install smart contract interface implementations, least frequently changed first
|
||||
ARG giftable_erc20_token_version=0.0.7b10
|
||||
RUN pip install --extra-index-url $pip_extra_index_url giftable-erc20-token==$giftable_erc20_token_version
|
||||
|
||||
ARG eth_accounts_index_version=0.0.10a6
|
||||
RUN pip install --extra-index-url $pip_extra_index_url eth-accounts-index==$eth_accounts_index_version
|
||||
|
||||
ARG erc20_approval_escrow_version=0.3.0a4
|
||||
RUN pip install --extra-index-url $pip_extra_index_url erc20-approval-escrow==$erc20_approval_escrow_version
|
||||
|
||||
#ARG erc20_single_shot_faucet_version=0.2.0a5
|
||||
#RUN pip install --extra-index-url $pip_extra_index_url erc20-single-shot-faucet==$erc20_single_shot_faucet_version
|
||||
|
||||
ARG sarafu_faucet_version==0.0.1a10
|
||||
RUN pip install --extra-index-url $pip_extra_index_url sarafu-faucet==$sarafu_faucet_version
|
||||
|
||||
ARG eth_address_index_version==0.1.0a8
|
||||
RUN pip install --extra-index-url $pip_extra_index_url eth-address-index==$eth_address_index_version
|
||||
|
||||
RUN echo Install cic specific python packages
|
||||
ARG cic_registry_version=0.5.3a18
|
||||
RUN pip install --extra-index-url $pip_extra_index_url cic-registry==$cic_registry_version
|
||||
|
||||
RUN echo Install misc helpers
|
||||
|
||||
ARG crypto_dev_signer_version==0.4.13rc2
|
||||
RUN pip install --extra-index-url $pip_extra_index_url crypto-dev-signer==$crypto_dev_signer_version
|
||||
|
||||
ARG eth_gas_proxy_version==0.0.1a4
|
||||
RUN pip install --extra-index-url $pip_extra_index_url eth-gas-proxy==$eth_gas_proxy_version
|
||||
|
||||
ARG cic_contracts_version==0.0.2a2
|
||||
RUN pip install --extra-index-url $pip_extra_index_url cic-contracts==$cic_contracts_version
|
||||
ARG cic_registry_version=0.5.3a22
|
||||
RUN pip install --extra-index-url $pip_extra_index_url cic-registry==$cic_registry_version
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
@@ -168,6 +128,4 @@ COPY contract-migration/seed_cic_eth.sh seed_cic_eth.sh
|
||||
COPY contract-migration/sarafu_declaration.json sarafu_declaration.json
|
||||
COPY contract-migration/keystore keystore
|
||||
|
||||
LABEL version="4"
|
||||
|
||||
ENTRYPOINT [ "/bin/bash" ]
|
||||
|
||||
0
apps/contract-migration/reset.sh
Normal file → Executable file
0
apps/contract-migration/reset.sh
Normal file → Executable file
72
apps/contract-migration/scripts/README.md
Normal file
72
apps/contract-migration/scripts/README.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# DATA GENERATION TOOLS
|
||||
|
||||
This folder contains tools to generate and import test data.
|
||||
|
||||
## DATA CREATION
|
||||
|
||||
Does not need the cluster to run.
|
||||
|
||||
Vanilla:
|
||||
|
||||
`python create_import_users.py [--dir <datadir>] <number_of_users>`
|
||||
|
||||
If you want to use the `import_balance.py` script to add to the user's balance from an external address, add:
|
||||
|
||||
`python create_import_users.py --gift-threshold <max_units_to_send> [--dir <datadir>] <number_of_users>`
|
||||
|
||||
|
||||
## IMPORT
|
||||
|
||||
Make sure the following is running in the cluster:
|
||||
* eth
|
||||
* postgres
|
||||
* redis
|
||||
* cic-eth-tasker
|
||||
* cic-eth-dispatcher
|
||||
* cic-eth-manager-head
|
||||
|
||||
|
||||
You will want to run these in sequence:
|
||||
|
||||
|
||||
## 1. Metadata
|
||||
|
||||
`node import_meta.js <datadir> <number_of_users>`
|
||||
|
||||
Monitors a folder for output from the `import_users.py` script, adding the metadata found to the `cic-meta` service.
|
||||
|
||||
|
||||
## 2. Balances
|
||||
|
||||
(Only if you used the `--gift-threshold` option above)
|
||||
|
||||
`python -c config -i <newchain:id> -r <cic_registry_address> -p <eth_provider> --head -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
||||
|
||||
This will monitor new mined blocks and send balances to the newly created accounts.
|
||||
|
||||
|
||||
### 3. Users
|
||||
|
||||
Without any modifications to the cluster and config files:
|
||||
|
||||
`python import_users.py -c config --redis-host-callback redis <datadir>`
|
||||
|
||||
** A note on the The callback**: The script uses a redis callback to retrieve the newly generated custodial address. This is the redis server _from the perspective of the cic-eth component_.
|
||||
|
||||
|
||||
## VERIFY
|
||||
|
||||
`python verify.py -c config -i <newchain:id> -r <cic_registry_address> -p <eth_provider> <datadir>`
|
||||
|
||||
Checks
|
||||
* Private key is in cic-eth keystore
|
||||
* Address is in accounts index
|
||||
* Address has balance matching the gift threshold
|
||||
* Metadata can be retrieved and has exact match
|
||||
|
||||
Should exit with code 0 if all input data is found in the respective services.
|
||||
|
||||
|
||||
## KNOWN ISSUES
|
||||
|
||||
If the faucet disbursement is set to a non-zero amount, the balances will be off. The verify script needs to be improved to check the faucet amount.
|
||||
0
apps/contract-migration/scripts/cmd/__init__.py
Normal file
0
apps/contract-migration/scripts/cmd/__init__.py
Normal file
417
apps/contract-migration/scripts/cmd/traffic.py
Normal file
417
apps/contract-migration/scripts/cmd/traffic.py
Normal file
@@ -0,0 +1,417 @@
|
||||
# standard imports
|
||||
import logging
|
||||
import json
|
||||
import uuid
|
||||
import importlib
|
||||
import random
|
||||
import copy
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
# external imports
|
||||
import redis
|
||||
from cic_eth.api.api_task import Api
|
||||
|
||||
logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_args(argparser):
|
||||
"""Parse script specific command line arguments
|
||||
|
||||
:param argparser: Top-level argument parser
|
||||
:type argparser: argparse.ArgumentParser
|
||||
"""
|
||||
argparser.formatter_class = formatter_class=RawTextHelpFormatter
|
||||
argparser.add_argument('--redis-host-callback', dest='redis_host_callback', default='localhost', type=str, help='redis host to use for callback')
|
||||
argparser.add_argument('--redis-port-callback', dest='redis_port_callback', default=6379, type=int, help='redis port to use for callback')
|
||||
argparser.add_argument('--batch-size', dest='batch_size', default=10, type=int, help='number of events to process simultaneously')
|
||||
argparser.description = """Generates traffic on the cic network using dynamically loaded modules as event sources
|
||||
|
||||
"""
|
||||
return argparser
|
||||
|
||||
|
||||
class TrafficItem:
|
||||
"""Represents a single item of traffic meta that will be processed by a traffic generation method
|
||||
|
||||
The traffic generation module passed in the argument must implement a method "do" with interface conforming to local.noop_traffic.do.
|
||||
|
||||
:param item: Traffic generation module.
|
||||
:type item: function
|
||||
"""
|
||||
def __init__(self, item):
|
||||
self.method = item.do
|
||||
self.uuid = uuid.uuid4()
|
||||
self.ext = None
|
||||
self.result = None
|
||||
self.sender = None
|
||||
self.recipient = None
|
||||
self.source_token = None
|
||||
self.destination_token = None
|
||||
self.source_value = 0
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'traffic item method {} uuid {}'.format(self.method, self.uuid)
|
||||
|
||||
|
||||
class TrafficRouter:
|
||||
"""Holds and selects from the collection of traffic generator modules that will be used for the execution.
|
||||
|
||||
:params batch_size: Amount of simultaneous traffic items that can simultanously be in flight.
|
||||
:type batch_size: number
|
||||
:raises ValueError: If batch size is zero of negative
|
||||
"""
|
||||
def __init__(self, batch_size=1):
|
||||
if batch_size < 1:
|
||||
raise ValueError('batch size cannot be 0')
|
||||
self.items = []
|
||||
self.weights = []
|
||||
self.total_weights = 0
|
||||
self.batch_size = batch_size
|
||||
self.reserved = {}
|
||||
self.reserved_count = 0
|
||||
self.traffic = {}
|
||||
|
||||
|
||||
def add(self, item, weight):
|
||||
"""Add a traffic generator module to the list of modules to choose between for traffic item exectuion.
|
||||
|
||||
The probability that a module will be chosen for any single item is the ratio between the weight parameter and the accumulated weights for all items.
|
||||
|
||||
See local.noop for which criteria the generator module must fulfill.
|
||||
|
||||
:param item: Qualified class path to traffic generator module. Will be dynamically loaded.
|
||||
:type item: str
|
||||
:param weight: Selection probability weight
|
||||
:type weight: number
|
||||
:raises ModuleNotFound: Invalid item argument
|
||||
"""
|
||||
self.weights.append(self.total_weights)
|
||||
self.total_weights += weight
|
||||
m = importlib.import_module(item)
|
||||
self.items.append(m)
|
||||
|
||||
|
||||
def reserve(self):
|
||||
"""Selects the module to be used to execute the next traffic item, using the provided weights.
|
||||
|
||||
If the current number of calls to "reserve" without corresponding calls to "release" equals the set batch size limit, None will be returned. The calling code should allow a short grace period before trying the call again.
|
||||
:raises ValueError: No items have been added
|
||||
:returns: A traffic item with the selected module method as the method property.
|
||||
:rtype: TrafficItem|None
|
||||
"""
|
||||
if len(self.items) == 0:
|
||||
raise ValueError('Add at least one item first')
|
||||
|
||||
if len(self.reserved) == self.batch_size:
|
||||
return None
|
||||
|
||||
n = random.randint(0, self.total_weights)
|
||||
item = self.items[0]
|
||||
for i in range(len(self.weights)):
|
||||
if n <= self.weights[i]:
|
||||
item = self.items[i]
|
||||
break
|
||||
|
||||
ti = TrafficItem(item)
|
||||
self.reserved[ti.uuid] = ti
|
||||
return ti
|
||||
|
||||
|
||||
def release(self, traffic_item):
|
||||
"""Releases the traffic item from the list of simultaneous traffic items in flight.
|
||||
|
||||
:param traffic_item: Traffic item
|
||||
:type traffic_item: TrafficItem
|
||||
"""
|
||||
del self.reserved[traffic_item.uuid]
|
||||
|
||||
|
||||
def apply_import_dict(self, keys, dct):
|
||||
"""Convenience method to add traffic generator modules from a dictionary.
|
||||
|
||||
:param keys: Keys in dictionary to add
|
||||
:type keys: list of str
|
||||
:param dct: Dictionary to choose module strings from
|
||||
:type dct: dict
|
||||
:raises ModuleNotFoundError: If one of the module strings refer to an invalid module.
|
||||
"""
|
||||
# parse traffic items
|
||||
for k in keys:
|
||||
if len(k) > 8 and k[:8] == 'TRAFFIC_':
|
||||
v = int(dct.get(k))
|
||||
self.add(k[8:].lower(), v)
|
||||
logg.debug('found traffic item {} weight {}'.format(k, v))
|
||||
|
||||
|
||||
# TODO: This will not work well with big networks. The provisioner should use lazy loading and LRU instead.
|
||||
class TrafficProvisioner:
|
||||
"""Loads metadata necessary for traffic item execution.
|
||||
|
||||
Instantiation will by default trigger retrieval of accounts and tokens on the network.
|
||||
|
||||
It will also populate the aux property of the instance with the values from the static aux parameter template.
|
||||
"""
|
||||
|
||||
oracles = {
|
||||
'account': None,
|
||||
'token': None,
|
||||
}
|
||||
"""Data oracles to be used for traffic item generation"""
|
||||
default_aux = {
|
||||
}
|
||||
"""Aux parameter template to be passed to the traffic generator module"""
|
||||
|
||||
|
||||
def __init__(self):
|
||||
self.tokens = self.oracles['token'].get_tokens()
|
||||
self.accounts = self.oracles['account'].get_accounts()
|
||||
self.aux = copy.copy(self.default_aux)
|
||||
self.__balances = {}
|
||||
for a in self.accounts:
|
||||
self.__balances[a] = {}
|
||||
|
||||
|
||||
# Caches a single address' balance of a single token
|
||||
def __cache_balance(self, holder_address, token, value):
|
||||
if self.__balances.get(holder_address) == None:
|
||||
self.__balances[holder_address] = {}
|
||||
self.__balances[holder_address][token] = value
|
||||
logg.debug('setting cached balance of {} token {} to {}'.format(holder_address, token, value))
|
||||
|
||||
|
||||
def add_aux(self, k, v):
|
||||
"""Add a key-value pair to the aux parameter list.
|
||||
|
||||
Does not protect existing entries from being overwritten.
|
||||
|
||||
:param k: Key
|
||||
:type k: str
|
||||
:param v: Value
|
||||
:type v: any
|
||||
"""
|
||||
logg.debug('added {} = {} to traffictasker'.format(k, v))
|
||||
self.aux[k] = v
|
||||
|
||||
|
||||
# TODO: Balance list type should perhaps be a class (provided by cic-eth package) due to its complexity.
|
||||
def balances(self, refresh_accounts=None):
|
||||
"""Retrieves all token balances for the given account list.
|
||||
|
||||
If refresh_accounts is not None, the balance values for the given accounts will be retrieved from upstream. If the argument is an empty list, the balances will be updated for all tokens of all ccounts. If there are many accounts and/or tokens, this may be a VERY EXPENSIVE OPERATION. The "balance" method can be used instead to update individual account/token pair balances.
|
||||
|
||||
:param accounts: List of accounts to refresh balances for.
|
||||
:type accounts: list of str, 0x-hex
|
||||
:returns: Dict of dict of dicts; v[accounts][token] = {balance_types}
|
||||
:rtype: dict
|
||||
"""
|
||||
if refresh_accounts != None:
|
||||
accounts = refresh_accounts
|
||||
if len(accounts) == 0:
|
||||
accounts = self.accounts
|
||||
for account in accounts:
|
||||
for token in self.tokens:
|
||||
value = self.balance(account, token)
|
||||
self.__cache_balance(account, token.symbol(), value)
|
||||
logg.debug('balance sender {} token {} = {}'.format(account, token, value))
|
||||
else:
|
||||
logg.debug('returning cached balances')
|
||||
|
||||
return self.__balances
|
||||
|
||||
|
||||
# TODO: use proper redis callback
|
||||
def balance(self, account, token):
|
||||
"""Update balance for a single token of a single account from upstream.
|
||||
|
||||
The balance will be the spendable balance at the time of the call. This value may be less than the balance reported by the consensus network, if a previous outgoing transaction is still pending in the network or the custodial system queue.
|
||||
|
||||
:param account: Account to update
|
||||
:type account: str, 0x-hex
|
||||
:param token: Token to update balance for
|
||||
:type token: cic_registry.token.Token
|
||||
:returns: Updated balance
|
||||
:rtype: complex balance dict
|
||||
"""
|
||||
api = Api(
|
||||
str(self.aux['chain_spec']),
|
||||
queue=self.aux['api_queue'],
|
||||
#callback_param='{}:{}:{}:{}'.format(aux['redis_host_callback'], aux['redis_port_callback'], aux['redis_db'], aux['redis_channel']),
|
||||
#callback_task='cic_eth.callbacks.redis.redis',
|
||||
#callback_queue=queue,
|
||||
)
|
||||
t = api.balance(account, token.symbol())
|
||||
r = t.get()
|
||||
for c in t.collect():
|
||||
r = c[1]
|
||||
assert t.successful()
|
||||
#return r[0]['balance_network'] - r[0]['balance_outgoing']
|
||||
return r[0]
|
||||
|
||||
|
||||
def update_balance(self, account, token, value):
|
||||
"""Manually set a token balance for an account.
|
||||
|
||||
:param account: Account to update
|
||||
:type account: str, 0x-hex
|
||||
:param token: Token to update balance for
|
||||
:type token: cic_registry.token.Token
|
||||
:param value: Balance value to set
|
||||
:type value: number
|
||||
:returns: Balance value (unchanged)
|
||||
:rtype: complex balance dict
|
||||
"""
|
||||
self.__cache_balance(account, token.symbol(), value)
|
||||
return value
|
||||
|
||||
|
||||
# TODO: Abstract redis with a generic pubsub adapter
|
||||
class TrafficSyncHandler:
|
||||
"""Encapsulates callback methods required by the chain syncer.
|
||||
|
||||
This implementation uses a redis subscription as backend to retrieve results from asynchronously executed tasks.
|
||||
|
||||
:param config: Configuration of current top-level execution
|
||||
:type config: object with dict get interface
|
||||
:param traffic_router: Traffic router instance to use for the syncer session.
|
||||
:type traffic_router: TrafficRouter
|
||||
:raises Exception: Any Exception redis may raise on connection attempt.
|
||||
"""
|
||||
def __init__(self, config, traffic_router):
|
||||
self.traffic_router = traffic_router
|
||||
self.redis_channel = str(uuid.uuid4())
|
||||
self.pubsub = self.__connect_redis(self.redis_channel, config)
|
||||
self.traffic_items = {}
|
||||
self.config = config
|
||||
self.init = False
|
||||
|
||||
|
||||
# connects to redis
|
||||
def __connect_redis(self, redis_channel, config):
|
||||
r = redis.Redis(config.get('REDIS_HOST'), config.get('REDIS_PORT'), config.get('REDIS_DB'))
|
||||
redis_pubsub = r.pubsub()
|
||||
redis_pubsub.subscribe(redis_channel)
|
||||
logg.debug('redis connected on channel {}'.format(redis_channel))
|
||||
return redis_pubsub
|
||||
|
||||
|
||||
# TODO: This method is too long, split up
|
||||
# TODO: This method will not yet cache balances for newly created accounts
|
||||
def refresh(self, block_number, tx_index):
|
||||
"""Traffic method and item execution driver to be called on every loop execution of the chain syncer.
|
||||
|
||||
Implements the signature required by callbacks called from chainsyncer.driver.loop.
|
||||
|
||||
:param block_number: Syncer block height at time of call.
|
||||
:type block_number: number
|
||||
:param tx_index: Syncer block transaction index at time of call.
|
||||
:type tx_index: number
|
||||
"""
|
||||
traffic_provisioner = TrafficProvisioner()
|
||||
traffic_provisioner.add_aux('redis_channel', self.redis_channel)
|
||||
|
||||
refresh_accounts = None
|
||||
# Note! This call may be very expensive if there are a lot of accounts and/or tokens on the network
|
||||
if not self.init:
|
||||
refresh_accounts = traffic_provisioner.accounts
|
||||
balances = traffic_provisioner.balances(refresh_accounts=refresh_accounts)
|
||||
self.init = True
|
||||
|
||||
if len(traffic_provisioner.tokens) == 0:
|
||||
logg.error('patiently waiting for at least one registered token...')
|
||||
return
|
||||
|
||||
logg.debug('executing handler refresh with accounts {}'.format(traffic_provisioner.accounts))
|
||||
logg.debug('executing handler refresh with tokens {}'.format(traffic_provisioner.tokens))
|
||||
|
||||
sender_indices = [*range(0, len(traffic_provisioner.accounts))]
|
||||
# TODO: only get balances for the selection that we will be generating for
|
||||
|
||||
while True:
|
||||
traffic_item = self.traffic_router.reserve()
|
||||
if traffic_item == None:
|
||||
logg.debug('no traffic_items left to reserve {}'.format(traffic_item))
|
||||
break
|
||||
|
||||
# TODO: temporary selection
|
||||
token_pair = (
|
||||
traffic_provisioner.tokens[0],
|
||||
traffic_provisioner.tokens[0],
|
||||
)
|
||||
sender_index_index = random.randint(0, len(sender_indices)-1)
|
||||
sender_index = sender_indices[sender_index_index]
|
||||
sender = traffic_provisioner.accounts[sender_index]
|
||||
#balance_full = balances[sender][token_pair[0].symbol()]
|
||||
if len(sender_indices) == 1:
|
||||
sender_indices[m] = sender_sender_indices[len(senders)-1]
|
||||
sender_indices = sender_indices[:len(sender_indices)-1]
|
||||
|
||||
balance_full = traffic_provisioner.balance(sender, token_pair[0])
|
||||
|
||||
recipient_index = random.randint(0, len(traffic_provisioner.accounts)-1)
|
||||
recipient = traffic_provisioner.accounts[recipient_index]
|
||||
|
||||
logg.debug('trigger item {} tokens {} sender {} recipient {} balance {}')
|
||||
(e, t, balance_result,) = traffic_item.method(
|
||||
token_pair,
|
||||
sender,
|
||||
recipient,
|
||||
balance_full,
|
||||
traffic_provisioner.aux,
|
||||
block_number,
|
||||
tx_index,
|
||||
)
|
||||
traffic_provisioner.update_balance(sender, token_pair[0], balance_result)
|
||||
sender_indices.append(recipient_index)
|
||||
|
||||
if e != None:
|
||||
logg.info('failed {}: {}'.format(str(traffic_item), e))
|
||||
self.traffic_router.release(traffic_item)
|
||||
continue
|
||||
|
||||
if t == None:
|
||||
logg.info('traffic method {} completed immediately')
|
||||
self.traffic_router.release(traffic_item)
|
||||
traffic_item.ext = t
|
||||
self.traffic_items[traffic_item.ext] = traffic_item
|
||||
|
||||
|
||||
while True:
|
||||
m = self.pubsub.get_message(timeout=0.1)
|
||||
if m == None:
|
||||
break
|
||||
logg.debug('redis message {}'.format(m))
|
||||
if m['type'] == 'message':
|
||||
message_data = json.loads(m['data'])
|
||||
uu = message_data['root_id']
|
||||
match_item = self.traffic_items[uu]
|
||||
self.traffic_router.release(match_item)
|
||||
logg.debug('>>>>>>>>>>>>>>>>>>> match item {} {} {}'.format(match_item, match_item.result, dir(match_item)))
|
||||
if message_data['status'] != 0:
|
||||
logg.error('task item {} failed with error code {}'.format(match_item, message_data['status']))
|
||||
else:
|
||||
match_item.result = message_data['result']
|
||||
logg.debug('got callback result: {}'.format(match_item))
|
||||
|
||||
|
||||
def name(self):
|
||||
"""Returns the common name for the syncer callback implementation. Required by the chain syncer.
|
||||
"""
|
||||
return 'traffic_item_handler'
|
||||
|
||||
|
||||
def filter(self, conn, block, tx, db_session):
|
||||
"""Callback for every transaction found in a block. Required by the chain syncer.
|
||||
|
||||
Currently performs no operation.
|
||||
|
||||
:param conn: A HTTPConnection object to the chain rpc provider.
|
||||
:type conn: chainlib.eth.rpc.HTTPConnection
|
||||
:param block: The block object of current transaction
|
||||
:type block: chainlib.eth.block.Block
|
||||
:param tx: The block transaction object
|
||||
:type tx: chainlib.eth.tx.Tx
|
||||
:param db_session: Syncer backend database session
|
||||
:type db_session: SQLAlchemy.Session
|
||||
"""
|
||||
logg.debug('handler get {}'.format(tx))
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user