Merge remote-tracking branch 'origin/master' into lash/descriptive-documentation

This commit is contained in:
nolash 2021-05-15 06:21:55 +02:00
commit b3915d51ec
34 changed files with 292 additions and 86 deletions

View File

@ -1 +1,2 @@
from .erc20 import * from .erc20 import *
from .faucet import *

View File

@ -0,0 +1,73 @@
# standard imports
import logging
# external imports
from erc20_faucet import Faucet
from chainlib.eth.address import to_checksum_address
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.status import Status
from hexathon import strip_0x
# local imports
import cic_cache.db as cic_cache_db
from .base import TagSyncFilter
#logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger()
class FaucetFilter(TagSyncFilter):
def __init__(self, chain_spec, sender_address=ZERO_ADDRESS):
super(FaucetFilter, self).__init__('give_to', domain='faucet')
self.chain_spec = chain_spec
self.sender_address = sender_address
def filter(self, conn, block, tx, db_session=None):
try:
data = strip_0x(tx.payload)
except ValueError:
return False
logg.debug('data {}'.format(data))
if Faucet.method_for(data[:8]) == None:
return False
token_sender = tx.inputs[0]
token_recipient = data[64+8-40:]
logg.debug('token recipient {}'.format(token_recipient))
f = Faucet(self.chain_spec)
o = f.token(token_sender, sender_address=self.sender_address)
r = conn.do(o)
token = f.parse_token(r)
f = Faucet(self.chain_spec)
o = f.token_amount(token_sender, sender_address=self.sender_address)
r = conn.do(o)
token_value = f.parse_token_amount(r)
cic_cache_db.add_transaction(
db_session,
tx.hash,
block.number,
tx.index,
to_checksum_address(token_sender),
to_checksum_address(token_recipient),
token,
token,
token_value,
token_value,
tx.status == Status.SUCCESS,
block.timestamp,
)
db_session.flush()
cic_cache_db.tag_transaction(
db_session,
tx.hash,
self.tag_name,
domain=self.tag_domain,
)
db_session.commit()
return True

View File

@ -41,16 +41,26 @@ from cic_cache.db import (
) )
from cic_cache.runnable.daemons.filters import ( from cic_cache.runnable.daemons.filters import (
ERC20TransferFilter, ERC20TransferFilter,
FaucetFilter,
) )
script_dir = os.path.realpath(os.path.dirname(__file__)) script_dir = os.path.realpath(os.path.dirname(__file__))
def add_block_args(argparser):
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
return argparser
logg = cic_base.log.create() logg = cic_base.log.create()
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template) argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic') argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
args = cic_base.argparse.parse(argparser, logg) args = cic_base.argparse.parse(argparser, logg)
config = cic_base.config.create(args.c, args, args.env_prefix) config = cic_base.config.create(args.c, args, args.env_prefix)
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
config.add(args.no_history, '_NO_HISTORY', True)
cic_base.config.log(config) cic_base.config.log(config)
dsn = dsn_from_config(config) dsn = dsn_from_config(config)
@ -59,7 +69,6 @@ SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
@ -71,6 +80,7 @@ def register_filter_tags(filters, session):
session.commit() session.commit()
logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1])) logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1]))
except sqlalchemy.exc.IntegrityError: except sqlalchemy.exc.IntegrityError:
session.rollback()
logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1])) logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1]))
@ -82,7 +92,7 @@ def main():
r = rpc.do(o) r = rpc.do(o)
block_offset = int(strip_0x(r), 16) + 1 block_offset = int(strip_0x(r), 16) + 1
logg.debug('starting at block {}'.format(block_offset)) logg.debug('current block height {}'.format(block_offset))
syncers = [] syncers = []
@ -91,8 +101,13 @@ def main():
syncer_backends = SQLBackend.resume(chain_spec, block_offset) syncer_backends = SQLBackend.resume(chain_spec, block_offset)
if len(syncer_backends) == 0: if len(syncer_backends) == 0:
logg.info('found no backends to resume') initial_block_start = config.get('SYNCER_HISTORY_START')
syncer_backends.append(SQLBackend.initial(chain_spec, block_offset)) initial_block_offset = block_offset
if config.get('_NO_HISTORY'):
initial_block_start = block_offset
initial_block_offset += 1
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
else: else:
for syncer_backend in syncer_backends: for syncer_backend in syncer_backends:
logg.info('resuming sync session {}'.format(syncer_backend)) logg.info('resuming sync session {}'.format(syncer_backend))
@ -112,9 +127,11 @@ def main():
logg.info('using trusted address {}'.format(address)) logg.info('using trusted address {}'.format(address))
erc20_transfer_filter = ERC20TransferFilter(chain_spec) erc20_transfer_filter = ERC20TransferFilter(chain_spec)
faucet_filter = FaucetFilter(chain_spec)
filters = [ filters = [
erc20_transfer_filter, erc20_transfer_filter,
faucet_filter,
] ]
session = SessionBase.create_session() session = SessionBase.create_session()

View File

@ -1,2 +1,2 @@
[eth] [eth]
provider = ws://localhost:63546 provider = http://localhost:63545

View File

@ -1,2 +1,3 @@
[syncer] [syncer]
loop_interval = 1 loop_interval = 1
history_start = 0

View File

@ -1,2 +1,3 @@
[syncer] [syncer]
loop_interval = 5 loop_interval = 5
history_start = 0

View File

@ -17,7 +17,7 @@ RUN apt-get update && \
# Copy shared requirements from top of mono-repo # Copy shared requirements from top of mono-repo
RUN echo "copying root req file ${root_requirement_file}" RUN echo "copying root req file ${root_requirement_file}"
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a76 RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
COPY cic-cache/requirements.txt ./ COPY cic-cache/requirements.txt ./
COPY cic-cache/setup.cfg \ COPY cic-cache/setup.cfg \

View File

@ -1,4 +1,4 @@
cic-base~=0.1.2b8 cic-base~=0.1.2b10
alembic==1.4.2 alembic==1.4.2
confini~=0.3.6rc3 confini~=0.3.6rc3
uwsgi==2.0.19.1 uwsgi==2.0.19.1

View File

@ -22,7 +22,7 @@ from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
logg = logging.getLogger() logg = logging.getLogger()
def test_cache( def test_erc20_filter(
eth_rpc, eth_rpc,
foo_token, foo_token,
init_database, init_database,

View File

@ -0,0 +1,71 @@
# standard imports
import logging
# external imports
from chainlib.chain import ChainSpec
from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.block import (
block_by_hash,
Block,
)
from chainlib.eth.tx import (
receipt,
unpack,
transaction,
Tx,
)
from hexathon import strip_0x
from erc20_faucet.faucet import SingleShotFaucet
from sqlalchemy import text
# local imports
from cic_cache.db import add_tag
from cic_cache.runnable.daemons.filters.faucet import FaucetFilter
logg = logging.getLogger()
def test_filter_faucet(
eth_rpc,
eth_signer,
foo_token,
faucet_noregistry,
init_database,
list_defaults,
contract_roles,
agent_roles,
tags,
):
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
fltr = FaucetFilter(chain_spec, contract_roles['CONTRACT_DEPLOYER'])
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)
c = SingleShotFaucet(chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
(tx_hash_hex, o) = c.give_to(faucet_noregistry, agent_roles['ALICE'], agent_roles['ALICE'])
r = eth_rpc.do(o)
tx_src = unpack(bytes.fromhex(strip_0x(o['params'][0])), chain_spec)
o = receipt(r)
r = eth_rpc.do(o)
rcpt = Tx.src_normalize(r)
assert r['status'] == 1
o = block_by_hash(r['block_hash'])
r = eth_rpc.do(o)
block_object = Block(r)
tx = Tx(tx_src, block_object)
tx.apply_receipt(rcpt)
r = fltr.filter(eth_rpc, block_object, tx, init_database)
assert r
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
assert r[0] == tx.hash

View File

@ -20,7 +20,8 @@ from chainlib.eth.tx import (
) )
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.error import JSONRPCException from chainlib.error import JSONRPCException
from eth_accounts_index.registry import AccountRegistry # TODO, use interface module instead (needs gas limit method) from eth_accounts_index.registry import AccountRegistry
from eth_accounts_index import AccountsIndex
from sarafu_faucet import MinterFaucet from sarafu_faucet import MinterFaucet
from chainqueue.db.models.tx import TxCache from chainqueue.db.models.tx import TxCache
@ -127,12 +128,12 @@ def register(self, account_address, chain_spec_dict, writer_address=None):
if writer_address == ZERO_ADDRESS: if writer_address == ZERO_ADDRESS:
session.close() session.close()
raise RoleMissingError('call address for resgistering {}'.format(account_address)) raise RoleMissingError('call address for resgistering {}'.format(account_address))
account_registry_address = registry.by_name('AccountsIndex', sender_address=call_address) account_registry_address = registry.by_name('AccountRegistry', sender_address=call_address)
# Generate and sign transaction # Generate and sign transaction
rpc_signer = RPCConnection.connect(chain_spec, 'signer') rpc_signer = RPCConnection.connect(chain_spec, 'signer')
nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce) nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce)
gas_oracle = self.create_gas_oracle(rpc, AccountsIndex.gas) gas_oracle = self.create_gas_oracle(rpc, AccountRegistry.gas)
account_registry = AccountsIndex(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) account_registry = AccountsIndex(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
(tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED) (tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED)
rpc_signer.disconnect() rpc_signer.disconnect()

View File

@ -3,16 +3,19 @@ import logging
# external imports # external imports
import celery import celery
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import (
UnknownContractError,
NotAContractError,
)
from chainlib.status import Status as TxStatus from chainlib.status import Status as TxStatus
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.error import RequestMismatchException from chainlib.eth.error import RequestMismatchException
from chainlib.eth.constant import ZERO_ADDRESS from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.eth.erc20 import ERC20
from hexathon import ( from hexathon import (
strip_0x, strip_0x,
add_0x, add_0x,
) )
from eth_erc20 import ERC20
from erc20_faucet import Faucet from erc20_faucet import Faucet
# local imports # local imports
@ -124,8 +127,7 @@ class CallbackFilter(SyncFilter):
(transfer_type, transfer_data) = parser(tx, conn) (transfer_type, transfer_data) = parser(tx, conn)
if transfer_type == None: if transfer_type == None:
continue continue
else: break
pass
except RequestMismatchException: except RequestMismatchException:
continue continue
@ -168,7 +170,9 @@ class CallbackFilter(SyncFilter):
t = self.call_back(transfer_type, result) t = self.call_back(transfer_type, result)
logg.info('callback success task id {} tx {} queue {}'.format(t, tx.hash, t.queue)) logg.info('callback success task id {} tx {} queue {}'.format(t, tx.hash, t.queue))
except UnknownContractError: except UnknownContractError:
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tx.queue, tx.method, transfer_data['to'], tx.hash)) logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(self.queue, self.method, transfer_data['to'], tx.hash))
except NotAContractError:
logg.debug('callback filter {}:{} skipping "transfer" on non-contract address {} tx {}'.format(self.queue, self.method, transfer_data['to'], tx.hash))
def __str__(self): def __str__(self):

View File

@ -14,7 +14,7 @@ from .base import SyncFilter
logg = logging.getLogger().getChild(__name__) logg = logging.getLogger().getChild(__name__)
account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd' account_registry_add_log_hash = '0x9cc987676e7d63379f176ea50df0ae8d2d9d1141d1231d4ce15b5965f73c9430'
class RegistrationFilter(SyncFilter): class RegistrationFilter(SyncFilter):

View File

@ -30,7 +30,7 @@ class TxFilter(SyncFilter):
if otx == None: if otx == None:
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex)) logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
return None return None
logg.info('tx filter match on {}'.format(otx.tx_hash)) logg.debug('otx filter match on {}'.format(otx.tx_hash))
db_session.flush() db_session.flush()
SessionBase.release_session(db_session) SessionBase.release_session(db_session)
s_final_state = celery.signature( s_final_state = celery.signature(

View File

@ -51,15 +51,23 @@ from cic_eth.registry import (
script_dir = os.path.realpath(os.path.dirname(__file__)) script_dir = os.path.realpath(os.path.dirname(__file__))
def add_block_args(argparser):
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
return argparser
logg = cic_base.log.create() logg = cic_base.log.create()
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template) argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic') argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
args = cic_base.argparse.parse(argparser, logg) args = cic_base.argparse.parse(argparser, logg)
config = cic_base.config.create(args.c, args, args.env_prefix) config = cic_base.config.create(args.c, args, args.env_prefix)
config.add(args.y, '_KEYSTORE_FILE', True) config.add(args.y, '_KEYSTORE_FILE', True)
config.add(args.q, '_CELERY_QUEUE', True) config.add(args.q, '_CELERY_QUEUE', True)
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
config.add(args.no_history, '_NO_HISTORY', True)
cic_base.config.log(config) cic_base.config.log(config)
@ -69,9 +77,9 @@ SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG'))
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
def main(): def main():
# connect to celery # connect to celery
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
@ -89,7 +97,7 @@ def main():
stat = init_chain_stat(rpc, block_start=block_current) stat = init_chain_stat(rpc, block_start=block_current)
loop_interval = stat.block_average() loop_interval = stat.block_average()
logg.debug('starting at block {}'.format(block_offset)) logg.debug('current block height {}'.format(block_offset))
syncers = [] syncers = []
@ -98,8 +106,13 @@ def main():
syncer_backends = SQLBackend.resume(chain_spec, block_offset) syncer_backends = SQLBackend.resume(chain_spec, block_offset)
if len(syncer_backends) == 0: if len(syncer_backends) == 0:
logg.info('found no backends to resume') initial_block_start = config.get('SYNCER_HISTORY_START')
syncer_backends.append(SQLBackend.initial(chain_spec, block_offset)) initial_block_offset = block_offset
if config.get('_NO_HISTORY'):
initial_block_start = block_offset
initial_block_offset += 1
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
else: else:
for syncer_backend in syncer_backends: for syncer_backend in syncer_backends:
logg.info('resuming sync session {}'.format(syncer_backend)) logg.info('resuming sync session {}'.format(syncer_backend))
@ -155,7 +168,6 @@ def main():
for cf in callback_filters: for cf in callback_filters:
syncer.add_filter(cf) syncer.add_filter(cf)
#r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
r = syncer.loop(int(loop_interval), rpc) r = syncer.loop(int(loop_interval), rpc)
sys.stderr.write("sync {} done at block {}\n".format(syncer, r)) sys.stderr.write("sync {} done at block {}\n".format(syncer, r))

View File

@ -10,7 +10,7 @@ version = (
0, 0,
11, 11,
0, 0,
'beta.12', 'beta.13',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@ -1,2 +1,3 @@
[SYNCER] [SYNCER]
loop_interval = loop_interval =
history_start = 0

View File

@ -1,2 +1,3 @@
[SYNCER] [SYNCER]
loop_interval = loop_interval =
history_start = 0

View File

@ -19,7 +19,7 @@ RUN apt-get update && \
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
# Copy shared requirements from top of mono-repo # Copy shared requirements from top of mono-repo
RUN echo "copying root req file ${root_requirement_file}" RUN echo "copying root req file: ${root_requirement_file}"
#COPY $root_requirement_file . #COPY $root_requirement_file .
#RUN pip install -r $root_requirement_file $pip_extra_index_url_flag #RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
RUN /usr/local/bin/python -m pip install --upgrade pip RUN /usr/local/bin/python -m pip install --upgrade pip
@ -29,7 +29,7 @@ RUN /usr/local/bin/python -m pip install --upgrade pip
# python merge_requirements.py | tee merged_requirements.txt # python merge_requirements.py | tee merged_requirements.txt
#RUN cd cic-base && \ #RUN cd cic-base && \
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt # pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b8 RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
COPY cic-eth/scripts/ scripts/ COPY cic-eth/scripts/ scripts/
COPY cic-eth/setup.cfg cic-eth/setup.py ./ COPY cic-eth/setup.cfg cic-eth/setup.py ./

View File

@ -1,4 +1,4 @@
cic-base==0.1.2b8 cic-base~=0.1.2b11
celery==4.4.7 celery==4.4.7
crypto-dev-signer~=0.4.14b3 crypto-dev-signer~=0.4.14b3
confini~=0.3.6rc3 confini~=0.3.6rc3
@ -7,18 +7,18 @@ redis==3.5.3
alembic==1.4.2 alembic==1.4.2
websockets==8.1 websockets==8.1
requests~=2.24.0 requests~=2.24.0
eth_accounts_index~=0.0.11a11 eth_accounts_index~=0.0.11a12
erc20-transfer-authorization~=0.3.1a6 erc20-transfer-authorization~=0.3.1a6
uWSGI==2.0.19.1 uWSGI==2.0.19.1
semver==2.13.0 semver==2.13.0
websocket-client==0.57.0 websocket-client==0.57.0
moolb~=0.1.1b2 moolb~=0.1.1b2
eth-address-index~=0.1.1a11 eth-address-index~=0.1.1a11
chainlib~=0.0.3a1 chainlib~=0.0.3a2
hexathon~=0.0.1a7 hexathon~=0.0.1a7
chainsyncer[sql]~=0.0.2a4 chainsyncer[sql]~=0.0.2a4
chainqueue~=0.0.2a2 chainqueue~=0.0.2a2
sarafu-faucet==0.0.3a1 sarafu-faucet==0.0.3a3
erc20-faucet==0.2.1a4
coincurve==15.0.0 coincurve==15.0.0
sarafu-faucet==0.0.3a2 potaahto~=0.0.1a2
potaahto~=0.0.1a1

View File

@ -4,7 +4,7 @@ LOCALE_FALLBACK=en
LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/ LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/
MAX_BODY_LENGTH=1024 MAX_BODY_LENGTH=1024
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I= PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
SERVICE_CODE=*483*46# SERVICE_CODE=*483*46#,*483*061#,*384*96#
[phone_number] [phone_number]
REGION=KE REGION=KE

View File

@ -24,7 +24,7 @@ def from_wei(value: int) -> float:
"""This function converts values in Wei to a token in the cic network. """This function converts values in Wei to a token in the cic network.
:param value: Value in Wei :param value: Value in Wei
:type value: int :type value: int
:return: SRF equivalent of value in Wei :return: platform's default token equivalent of value in Wei
:rtype: float :rtype: float
""" """
value = float(value) / 1e+6 value = float(value) / 1e+6
@ -33,9 +33,9 @@ def from_wei(value: int) -> float:
def to_wei(value: int) -> int: def to_wei(value: int) -> int:
"""This functions converts values from a token in the cic network to Wei. """This functions converts values from a token in the cic network to Wei.
:param value: Value in SRF :param value: Value in platform's default token
:type value: int :type value: int
:return: Wei equivalent of value in SRF :return: Wei equivalent of value in platform's default token
:rtype: int :rtype: int
""" """
return int(value * 1e+6) return int(value * 1e+6)

View File

@ -13,7 +13,7 @@ import argparse
import logging import logging
import urllib import urllib
from xdg.BaseDirectory import xdg_config_home from xdg.BaseDirectory import xdg_config_home
from urllib import request from urllib import parse, request
# third-party imports # third-party imports
from confini import Config from confini import Config
@ -92,9 +92,9 @@ def main():
data['text'] = user_input data['text'] = user_input
req = urllib.request.Request(url) req = urllib.request.Request(url)
data_str = json.dumps(data) urlencoded_data = parse.urlencode(data)
data_bytes = data_str.encode('utf-8') data_bytes = urlencoded_data.encode('utf-8')
req.add_header('Content-Type', 'application/json') req.add_header('Content-Type', 'application/x-www-form-urlencoded')
req.data = data_bytes req.data = data_bytes
response = urllib.request.urlopen(req) response = urllib.request.urlopen(req)
response_data = response.read().decode('utf-8') response_data = response.read().decode('utf-8')

View File

@ -4,6 +4,7 @@
# standard imports # standard imports
import json import json
import logging import logging
from urllib.parse import parse_qs
# third-party imports # third-party imports
import celery import celery
@ -33,8 +34,7 @@ from cic_ussd.requests import (get_request_endpoint,
from cic_ussd.runnable.server_base import exportable_parser, logg from cic_ussd.runnable.server_base import exportable_parser, logg
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
from cic_ussd.state_machine import UssdStateMachine from cic_ussd.state_machine import UssdStateMachine
from cic_ussd.validator import check_ip, check_request_content_length, check_service_code, validate_phone_number, \ from cic_ussd.validator import check_ip, check_request_content_length, validate_phone_number, validate_presence
validate_presence
args = exportable_parser.parse_args() args = exportable_parser.parse_args()
@ -124,6 +124,9 @@ else:
raise InitializationError(f'Default token data for: {chain_str} not found.') raise InitializationError(f'Default token data for: {chain_str} not found.')
valid_service_codes = config.get('APP_SERVICE_CODE').split(",")
def application(env, start_response): def application(env, start_response):
"""Loads python code for application to be accessible over web server """Loads python code for application to be accessible over web server
:param env: Object containing server and request information :param env: Object containing server and request information
@ -139,13 +142,27 @@ def application(env, start_response):
if get_request_method(env=env) == 'POST' and get_request_endpoint(env=env) == '/': if get_request_method(env=env) == 'POST' and get_request_endpoint(env=env) == '/':
# get post data if env.get('CONTENT_TYPE') != 'application/x-www-form-urlencoded':
post_data = json.load(env.get('wsgi.input')) start_response('405 Play by the rules', errors_headers)
return []
service_code = post_data.get('serviceCode') post_data = env.get('wsgi.input').read()
phone_number = post_data.get('phoneNumber') post_data = post_data.decode('utf-8')
external_session_id = post_data.get('sessionId')
user_input = post_data.get('text') try:
post_data = parse_qs(post_data)
except TypeError:
start_response('400 Size matters', errors_headers)
return []
service_code = post_data.get('serviceCode')[0]
phone_number = post_data.get('phoneNumber')[0]
external_session_id = post_data.get('sessionId')[0]
try:
user_input = post_data.get('text')[0]
except TypeError:
user_input = ""
# add validation for phone number # add validation for phone number
if phone_number: if phone_number:
@ -162,14 +179,14 @@ def application(env, start_response):
return [] return []
# validate service code # validate service code
if not check_service_code(code=service_code, config=config): if service_code not in valid_service_codes:
response = define_multilingual_responses( response = define_multilingual_responses(
key='ussd.kenya.invalid_service_code', key='ussd.kenya.invalid_service_code',
locales=['en', 'sw'], locales=['en', 'sw'],
prefix='END', prefix='END',
valid_service_code=config.get('APP_SERVICE_CODE')) valid_service_code=valid_service_codes[0])
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response) response_bytes, headers = define_response_with_content(headers=headers, response=response)
start_response('400 Invalid service code', headers) start_response('200 OK', headers)
return [response_bytes] return [response_bytes]
# validate phone number # validate phone number
@ -192,3 +209,8 @@ def application(env, start_response):
start_response('200 OK,', headers) start_response('200 OK,', headers)
SessionBase.session.close() SessionBase.session.close()
return [response_bytes] return [response_bytes]
else:
start_response('405 Play by the rules', errors_headers)
return []

View File

@ -12,6 +12,7 @@ from cic_ussd.chain import Chain
from cic_ussd.db.models.account import AccountStatus, Account from cic_ussd.db.models.account import AccountStatus, Account
from cic_ussd.operations import save_to_in_memory_ussd_session_data from cic_ussd.operations import save_to_in_memory_ussd_session_data
from cic_ussd.phone_number import get_user_by_phone_number from cic_ussd.phone_number import get_user_by_phone_number
from cic_ussd.processor import retrieve_token_symbol
from cic_ussd.redis import create_cached_data_key, get_cached_data from cic_ussd.redis import create_cached_data_key, get_cached_data
from cic_ussd.transactions import OutgoingTransactionProcessor from cic_ussd.transactions import OutgoingTransactionProcessor
@ -124,14 +125,18 @@ def process_transaction_request(state_machine_data: Tuple[str, dict, Account]):
""" """
user_input, ussd_session, user = state_machine_data user_input, ussd_session, user = state_machine_data
# retrieve token symbol
chain_str = Chain.spec.__str__()
# get user from phone number # get user from phone number
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number') recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
recipient = get_user_by_phone_number(phone_number=recipient_phone_number) recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
to_address = recipient.blockchain_address to_address = recipient.blockchain_address
from_address = user.blockchain_address from_address = user.blockchain_address
amount = int(ussd_session.get('session_data').get('transaction_amount')) amount = int(ussd_session.get('session_data').get('transaction_amount'))
chain_str = Chain.spec.__str__() token_symbol = retrieve_token_symbol(chain_str=chain_str)
outgoing_tx_processor = OutgoingTransactionProcessor(chain_str=chain_str, outgoing_tx_processor = OutgoingTransactionProcessor(chain_str=chain_str,
from_address=from_address, from_address=from_address,
to_address=to_address) to_address=to_address)
outgoing_tx_processor.process_outgoing_transfer_transaction(amount=amount) outgoing_tx_processor.process_outgoing_transfer_transaction(amount=amount, token_symbol=token_symbol)

View File

@ -120,7 +120,7 @@ class OutgoingTransactionProcessor:
self.from_address = from_address self.from_address = from_address
self.to_address = to_address self.to_address = to_address
def process_outgoing_transfer_transaction(self, amount: int, token_symbol='SRF'): def process_outgoing_transfer_transaction(self, amount: int, token_symbol: str):
"""This function initiates standard transfers between one account to another """This function initiates standard transfers between one account to another
:param amount: The amount of tokens to be sent :param amount: The amount of tokens to be sent
:type amount: int :type amount: int

View File

@ -45,19 +45,6 @@ def check_request_content_length(config: Config, env: dict):
config.get('APP_MAX_BODY_LENGTH')) config.get('APP_MAX_BODY_LENGTH'))
def check_service_code(code: str, config: Config):
"""Checks whether provided code matches expected service code
:param config: A dictionary object containing configuration values
:type config: Config
:param code: Service code passed over request
:type code: str
:return: Service code validity
:rtype: boolean
"""
return code == config.get('APP_SERVICE_CODE')
def check_known_user(phone: str): def check_known_user(phone: str):
""" """
This method attempts to ascertain whether the user already exists and is known to the system. This method attempts to ascertain whether the user already exists and is known to the system.

View File

@ -158,6 +158,8 @@ en:
Your Sarafu-Network balances is: %{token_balance} Your Sarafu-Network balances is: %{token_balance}
00. Back 00. Back
99. Exit 99. Exit
invalid_service_code: |-
Please dial %{valid_service_code} to access Sarafu Network
help: |- help: |-
CON For assistance call %{support_phone} CON For assistance call %{support_phone}
00. Back 00. Back

View File

@ -158,6 +158,8 @@ sw:
Akaunti yako ya Sarafu-Network ina salio ifuatayo: %{token_balance} Akaunti yako ya Sarafu-Network ina salio ifuatayo: %{token_balance}
00. Nyuma 00. Nyuma
99. Ondoka 99. Ondoka
invalid_service_code: |-
Bonyeza %{valid_service_code} kutumia mtandao wa Sarafu
help: |- help: |-
CON Kwa usaidizi piga simu %{support_phone} CON Kwa usaidizi piga simu %{support_phone}
0. Nyuma 0. Nyuma

View File

@ -54,11 +54,11 @@ ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
ARG pip_extra_args="" ARG pip_extra_args=""
ARG pip_index_url=https://pypi.org/simple ARG pip_index_url=https://pypi.org/simple
ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433 ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
ARG cic_base_version=0.1.2b8 ARG cic_base_version=0.1.2b11
ARG cic_eth_version=0.11.0b12 ARG cic_eth_version=0.11.0b14
ARG sarafu_token_version=0.0.1a8 ARG sarafu_token_version=0.0.1a8
ARG sarafu_faucet_version=0.0.3a2 ARG sarafu_faucet_version=0.0.3a3
RUN pip install --user --index-url https://pypi.org/simple --extra-index-url $pip_extra_index_url \ RUN pip install --index-url https://pypi.org/simple --extra-index-url $pip_extra_index_url \
cic-base[full_graph]==$cic_base_version \ cic-base[full_graph]==$cic_base_version \
cic-eth==$cic_eth_version \ cic-eth==$cic_eth_version \
sarafu-faucet==$sarafu_faucet_version \ sarafu-faucet==$sarafu_faucet_version \

View File

@ -27,16 +27,17 @@ from chainlib.eth.block import (
) )
from chainlib.hash import keccak256_string_to_hex from chainlib.hash import keccak256_string_to_hex
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.erc20 import ERC20
from chainlib.eth.gas import OverrideGasOracle from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import TxFactory from chainlib.eth.tx import TxFactory
from chainlib.jsonrpc import jsonrpc_template from chainlib.jsonrpc import jsonrpc_template
from chainlib.eth.error import EthException from chainlib.eth.error import EthException
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.constant import ZERO_ADDRESS
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore from crypto_dev_signer.keystore.dict import DictKeystore
from cic_types.models.person import Person from cic_types.models.person import Person
from eth_erc20 import ERC20
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
@ -51,7 +52,7 @@ argparser.add_argument('-c', type=str, default=config_dir, help='config root to
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec') argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec') argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address') argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
argparser.add_argument('--token-symbol', default='SRF', type=str, dest='token_symbol', help='Token symbol to use for trnsactions') argparser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol', help='Token symbol to use for trnsactions')
argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)') argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration') argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to') argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
@ -252,6 +253,10 @@ def main():
except ValueError as e: except ValueError as e:
logg.critical('lookup failed for token {}: {}'.format(token_symbol, e)) logg.critical('lookup failed for token {}: {}'.format(token_symbol, e))
sys.exit(1) sys.exit(1)
if sarafu_token_address == ZERO_ADDRESS:
raise KeyError('token address for symbol {} is zero'.format(token_symbol))
logg.info('found token address {}'.format(sarafu_token_address)) logg.info('found token address {}'.format(sarafu_token_address))
syncer_backend = MemBackend(chain_str, 0) syncer_backend = MemBackend(chain_str, 0)

View File

@ -27,7 +27,6 @@ from chainlib.eth.block import (
) )
from chainlib.hash import keccak256_string_to_hex from chainlib.hash import keccak256_string_to_hex
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.erc20 import ERC20
from chainlib.eth.gas import OverrideGasOracle from chainlib.eth.gas import OverrideGasOracle
from chainlib.eth.nonce import RPCNonceOracle from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.tx import TxFactory from chainlib.eth.tx import TxFactory
@ -37,6 +36,7 @@ from chainlib.chain import ChainSpec
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore from crypto_dev_signer.keystore.dict import DictKeystore
from cic_types.models.person import Person from cic_types.models.person import Person
from eth_erc20 import ERC20
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)

View File

@ -1,5 +1,5 @@
cic-base[full_graph]==0.1.2b8 cic-base[full_graph]==0.1.2b9
sarafu-faucet==0.0.3a2 sarafu-faucet==0.0.3a3
cic-eth==0.11.0b12 cic-eth==0.11.0b13
cic-types==0.1.0a10 cic-types==0.1.0a11
crypto-dev-signer==0.4.14b3 crypto-dev-signer==0.4.14b3

View File

@ -34,7 +34,6 @@ from chainlib.eth.block import (
) )
from chainlib.hash import keccak256_string_to_hex from chainlib.hash import keccak256_string_to_hex
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.erc20 import ERC20
from chainlib.eth.gas import ( from chainlib.eth.gas import (
OverrideGasOracle, OverrideGasOracle,
balance, balance,
@ -46,7 +45,8 @@ from cic_types.models.person import (
Person, Person,
generate_metadata_pointer, generate_metadata_pointer,
) )
from erc20_single_shot_faucet import SingleShotFaucet from erc20_faucet import Faucet
from eth_erc20 import ERC20
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
@ -224,7 +224,7 @@ class Verifier:
self.api = cic_eth_api self.api = cic_eth_api
self.data_dir = data_dir self.data_dir = data_dir
self.exit_on_error = exit_on_error self.exit_on_error = exit_on_error
self.faucet_tx_factory = SingleShotFaucet(chain_spec, gas_oracle=gas_oracle) self.faucet_tx_factory = Faucet(chain_spec, gas_oracle=gas_oracle)
verifymethods = [] verifymethods = []
for k in dir(self): for k in dir(self):