Compare commits
38 Commits
lash/contr
...
lash/custo
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d605036b58 | ||
|
|
f66f913307 | ||
|
|
8bf1364864
|
||
|
|
6be3961260
|
||
| 0d6d7179eb | |||
|
e7f48f3ce0
|
|||
|
|
b252fab018 | ||
|
|
4667916d80
|
||
| 1f668384cc | |||
| 123dc55687 | |||
|
|
0b4d8d5937
|
||
|
|
ed6bef4052 | ||
|
|
6a8a356f09 | ||
| 5ec0b67496 | |||
| 7d935bcbc3 | |||
| fd69a3c6bb | |||
|
|
298bcf89e5 | ||
|
|
5d3d773f41 | ||
|
|
e71b2411d0 | ||
|
|
b4bfb76634 | ||
| aab5c8bf85 | |||
| e1564574f7 | |||
| 13253a2dcc | |||
| 9020fe1000 | |||
| a2e7d2973c | |||
| 82f650e81d | |||
| e77940d0de | |||
| 1df62717ef | |||
| c4919d56b1 | |||
| 6d44863a49 | |||
|
|
b02cdee1bd | ||
|
|
75bf8f15be | ||
| 8db76dc0a8 | |||
| a3261f2f0e | |||
| 850dd15451 | |||
| 0c56e84704 | |||
| 63cd8a4aab | |||
|
|
2c326f62ae |
@@ -6,3 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
|
DEBUG=
|
||||||
|
|||||||
@@ -6,3 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=sqlite
|
ENGINE=sqlite
|
||||||
DRIVER=pysqlite
|
DRIVER=pysqlite
|
||||||
|
DEBUG=
|
||||||
|
|||||||
@@ -2,9 +2,14 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .list import list_transactions_mined
|
from .list import (
|
||||||
from .list import list_transactions_account_mined
|
list_transactions_mined,
|
||||||
from .list import add_transaction
|
list_transactions_account_mined,
|
||||||
|
add_transaction,
|
||||||
|
tag_transaction,
|
||||||
|
add_tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|||||||
@@ -2,8 +2,9 @@
|
|||||||
import logging
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
@@ -50,7 +51,8 @@ def list_transactions_account_mined(
|
|||||||
|
|
||||||
|
|
||||||
def add_transaction(
|
def add_transaction(
|
||||||
session, tx_hash,
|
session,
|
||||||
|
tx_hash,
|
||||||
block_number,
|
block_number,
|
||||||
tx_index,
|
tx_index,
|
||||||
sender,
|
sender,
|
||||||
@@ -62,6 +64,33 @@ def add_transaction(
|
|||||||
success,
|
success,
|
||||||
timestamp,
|
timestamp,
|
||||||
):
|
):
|
||||||
|
"""Adds a single transaction to the cache persistent storage. Sensible interpretation of all fields is the responsibility of the caller.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param block_number: Block number
|
||||||
|
:type block_number: int
|
||||||
|
:param tx_index: Transaction index in block
|
||||||
|
:type tx_index: int
|
||||||
|
:param sender: Ethereum address of effective sender
|
||||||
|
:type sender: str, 0x-hex
|
||||||
|
:param receiver: Ethereum address of effective recipient
|
||||||
|
:type receiver: str, 0x-hex
|
||||||
|
:param source_token: Ethereum address of token used by sender
|
||||||
|
:type source_token: str, 0x-hex
|
||||||
|
:param destination_token: Ethereum address of token received by recipient
|
||||||
|
:type destination_token: str, 0x-hex
|
||||||
|
:param from_value: Source token value spent in transaction
|
||||||
|
:type from_value: int
|
||||||
|
:param to_value: Destination token value received in transaction
|
||||||
|
:type to_value: int
|
||||||
|
:param success: True if code execution on network was successful
|
||||||
|
:type success: bool
|
||||||
|
:param date_block: Block timestamp
|
||||||
|
:type date_block: datetime
|
||||||
|
"""
|
||||||
date_block = datetime.datetime.fromtimestamp(timestamp)
|
date_block = datetime.datetime.fromtimestamp(timestamp)
|
||||||
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
|
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
|
||||||
tx_hash,
|
tx_hash,
|
||||||
@@ -77,3 +106,74 @@ def add_transaction(
|
|||||||
date_block,
|
date_block,
|
||||||
)
|
)
|
||||||
session.execute(s)
|
session.execute(s)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def tag_transaction(
|
||||||
|
session,
|
||||||
|
tx_hash,
|
||||||
|
name,
|
||||||
|
domain=None,
|
||||||
|
):
|
||||||
|
"""Tag a single transaction with a single tag.
|
||||||
|
|
||||||
|
Tag must already exist in storage.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param tx_hash: Transaction hash
|
||||||
|
:type tx_hash: str, 0x-hex
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
:raises ValueError: Unknown tag or transaction hash
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
s = text("SELECT id from tx where tx_hash = :a")
|
||||||
|
r = session.execute(s, {'a': tx_hash}).fetchall()
|
||||||
|
tx_id = r[0].values()[0]
|
||||||
|
|
||||||
|
if tx_id == None:
|
||||||
|
raise ValueError('unknown tx hash {}'.format(tx_hash))
|
||||||
|
|
||||||
|
#s = text("SELECT id from tag where value = :a and domain = :b")
|
||||||
|
if domain == None:
|
||||||
|
s = text("SELECT id from tag where value = :a")
|
||||||
|
else:
|
||||||
|
s = text("SELECT id from tag where value = :a and domain = :b")
|
||||||
|
r = session.execute(s, {'a': name, 'b': domain}).fetchall()
|
||||||
|
tag_id = r[0].values()[0]
|
||||||
|
|
||||||
|
logg.debug('type {} {}'.format(type(tag_id), type(tx_id)))
|
||||||
|
|
||||||
|
if tag_id == None:
|
||||||
|
raise ValueError('unknown tag name {} domain {}'.format(name, domain))
|
||||||
|
|
||||||
|
s = text("INSERT INTO tag_tx_link (tag_id, tx_id) VALUES (:a, :b)")
|
||||||
|
r = session.execute(s, {'a': int(tag_id), 'b': int(tx_id)})
|
||||||
|
|
||||||
|
|
||||||
|
def add_tag(
|
||||||
|
session,
|
||||||
|
name,
|
||||||
|
domain=None,
|
||||||
|
):
|
||||||
|
"""Add a single tag to storage.
|
||||||
|
|
||||||
|
:param session: Persistent storage session object
|
||||||
|
:type session: SQLAlchemy session
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
:raises sqlalchemy.exc.IntegrityError: Tag already exists
|
||||||
|
"""
|
||||||
|
|
||||||
|
s = None
|
||||||
|
if domain == None:
|
||||||
|
s = text("INSERT INTO tag (value) VALUES (:b)")
|
||||||
|
else:
|
||||||
|
s = text("INSERT INTO tag (domain, value) VALUES (:a, :b)")
|
||||||
|
session.execute(s, {'a': domain, 'b': name})
|
||||||
|
|||||||
@@ -0,0 +1,38 @@
|
|||||||
|
"""Transaction tags
|
||||||
|
|
||||||
|
Revision ID: aaf2bdce7d6e
|
||||||
|
Revises: 6604de4203e2
|
||||||
|
Create Date: 2021-05-01 09:20:20.775082
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = 'aaf2bdce7d6e'
|
||||||
|
down_revision = '6604de4203e2'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
op.create_table(
|
||||||
|
'tag',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('domain', sa.String(), nullable=True),
|
||||||
|
sa.Column('value', sa.String(), nullable=False),
|
||||||
|
)
|
||||||
|
op.create_index('idx_tag_domain_value', 'tag', ['domain', 'value'], unique=True)
|
||||||
|
|
||||||
|
op.create_table(
|
||||||
|
'tag_tx_link',
|
||||||
|
sa.Column('id', sa.Integer, primary_key=True),
|
||||||
|
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tag.id'), nullable=False),
|
||||||
|
sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
op.drop_table('tag_tx_link')
|
||||||
|
op.drop_index('idx_tag_domain_value')
|
||||||
|
op.drop_table('tag')
|
||||||
@@ -1 +1,2 @@
|
|||||||
from .erc20 import *
|
from .erc20 import *
|
||||||
|
from .faucet import *
|
||||||
|
|||||||
@@ -1,2 +1,27 @@
|
|||||||
class SyncFilter:
|
class TagSyncFilter:
|
||||||
pass
|
"""Holds tag name and domain for an implementing filter.
|
||||||
|
|
||||||
|
:param name: Tag value
|
||||||
|
:type name: str
|
||||||
|
:param domain: Tag domain
|
||||||
|
:type domain: str
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name, domain=None):
|
||||||
|
self.tag_name = name
|
||||||
|
self.tag_domain = domain
|
||||||
|
|
||||||
|
|
||||||
|
def tag(self):
|
||||||
|
"""Return tag value/domain.
|
||||||
|
|
||||||
|
:rtype: Tuple
|
||||||
|
:returns: tag value/domain.
|
||||||
|
"""
|
||||||
|
return (self.tag_name, self.tag_domain)
|
||||||
|
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.tag_domain == None:
|
||||||
|
return self.tag_name
|
||||||
|
return '{}.{}'.format(self.tag_domain, self.tag_name)
|
||||||
|
|||||||
@@ -2,7 +2,6 @@
|
|||||||
import logging
|
import logging
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from chainlib.eth.address import (
|
from chainlib.eth.address import (
|
||||||
to_checksum_address,
|
to_checksum_address,
|
||||||
)
|
)
|
||||||
@@ -13,17 +12,19 @@ from cic_eth_registry.error import (
|
|||||||
NotAContractError,
|
NotAContractError,
|
||||||
ContractMismatchError,
|
ContractMismatchError,
|
||||||
)
|
)
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import SyncFilter
|
from .base import TagSyncFilter
|
||||||
from cic_cache import db as cic_cache_db
|
from cic_cache import db as cic_cache_db
|
||||||
|
|
||||||
logg = logging.getLogger().getChild(__name__)
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
class ERC20TransferFilter(SyncFilter):
|
class ERC20TransferFilter(TagSyncFilter):
|
||||||
|
|
||||||
def __init__(self, chain_spec):
|
def __init__(self, chain_spec):
|
||||||
|
super(ERC20TransferFilter, self).__init__('transfer', domain='erc20')
|
||||||
self.chain_spec = chain_spec
|
self.chain_spec = chain_spec
|
||||||
|
|
||||||
|
|
||||||
@@ -46,6 +47,9 @@ class ERC20TransferFilter(SyncFilter):
|
|||||||
except RequestMismatchException:
|
except RequestMismatchException:
|
||||||
logg.debug('erc20 match but not a transfer, skipping')
|
logg.debug('erc20 match but not a transfer, skipping')
|
||||||
return False
|
return False
|
||||||
|
except ValueError:
|
||||||
|
logg.debug('erc20 match but bogus data, skipping')
|
||||||
|
return False
|
||||||
|
|
||||||
token_sender = tx.outputs[0]
|
token_sender = tx.outputs[0]
|
||||||
token_recipient = transfer_data[0]
|
token_recipient = transfer_data[0]
|
||||||
@@ -67,7 +71,13 @@ class ERC20TransferFilter(SyncFilter):
|
|||||||
tx.status == Status.SUCCESS,
|
tx.status == Status.SUCCESS,
|
||||||
block.timestamp,
|
block.timestamp,
|
||||||
)
|
)
|
||||||
#db_session.flush()
|
db_session.flush()
|
||||||
|
cic_cache_db.tag_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
self.tag_name,
|
||||||
|
domain=self.tag_domain,
|
||||||
|
)
|
||||||
db_session.commit()
|
db_session.commit()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|||||||
73
apps/cic-cache/cic_cache/runnable/daemons/filters/faucet.py
Normal file
73
apps/cic-cache/cic_cache/runnable/daemons/filters/faucet.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from erc20_faucet import Faucet
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from chainlib.status import Status
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
import cic_cache.db as cic_cache_db
|
||||||
|
from .base import TagSyncFilter
|
||||||
|
|
||||||
|
#logg = logging.getLogger().getChild(__name__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
class FaucetFilter(TagSyncFilter):
|
||||||
|
|
||||||
|
def __init__(self, chain_spec, sender_address=ZERO_ADDRESS):
|
||||||
|
super(FaucetFilter, self).__init__('give_to', domain='faucet')
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.sender_address = sender_address
|
||||||
|
|
||||||
|
|
||||||
|
def filter(self, conn, block, tx, db_session=None):
|
||||||
|
try:
|
||||||
|
data = strip_0x(tx.payload)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
logg.debug('data {}'.format(data))
|
||||||
|
if Faucet.method_for(data[:8]) == None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
token_sender = tx.inputs[0]
|
||||||
|
token_recipient = data[64+8-40:]
|
||||||
|
logg.debug('token recipient {}'.format(token_recipient))
|
||||||
|
|
||||||
|
f = Faucet(self.chain_spec)
|
||||||
|
o = f.token(token_sender, sender_address=self.sender_address)
|
||||||
|
r = conn.do(o)
|
||||||
|
token = f.parse_token(r)
|
||||||
|
|
||||||
|
f = Faucet(self.chain_spec)
|
||||||
|
o = f.token_amount(token_sender, sender_address=self.sender_address)
|
||||||
|
r = conn.do(o)
|
||||||
|
token_value = f.parse_token_amount(r)
|
||||||
|
|
||||||
|
cic_cache_db.add_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
block.number,
|
||||||
|
tx.index,
|
||||||
|
to_checksum_address(token_sender),
|
||||||
|
to_checksum_address(token_recipient),
|
||||||
|
token,
|
||||||
|
token,
|
||||||
|
token_value,
|
||||||
|
token_value,
|
||||||
|
tx.status == Status.SUCCESS,
|
||||||
|
block.timestamp,
|
||||||
|
)
|
||||||
|
db_session.flush()
|
||||||
|
cic_cache_db.tag_transaction(
|
||||||
|
db_session,
|
||||||
|
tx.hash,
|
||||||
|
self.tag_name,
|
||||||
|
domain=self.tag_domain,
|
||||||
|
)
|
||||||
|
db_session.commit()
|
||||||
|
|
||||||
|
return True
|
||||||
@@ -7,9 +7,10 @@ import argparse
|
|||||||
import sys
|
import sys
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import confini
|
import confini
|
||||||
import celery
|
import celery
|
||||||
|
import sqlalchemy
|
||||||
import rlp
|
import rlp
|
||||||
import cic_base.config
|
import cic_base.config
|
||||||
import cic_base.log
|
import cic_base.log
|
||||||
@@ -34,19 +35,32 @@ from chainsyncer.driver import (
|
|||||||
from chainsyncer.db.models.base import SessionBase
|
from chainsyncer.db.models.base import SessionBase
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import (
|
||||||
|
dsn_from_config,
|
||||||
|
add_tag,
|
||||||
|
)
|
||||||
from cic_cache.runnable.daemons.filters import (
|
from cic_cache.runnable.daemons.filters import (
|
||||||
ERC20TransferFilter,
|
ERC20TransferFilter,
|
||||||
|
FaucetFilter,
|
||||||
)
|
)
|
||||||
|
|
||||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
def add_block_args(argparser):
|
||||||
|
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
|
||||||
|
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||||
|
return argparser
|
||||||
|
|
||||||
|
|
||||||
logg = cic_base.log.create()
|
logg = cic_base.log.create()
|
||||||
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
||||||
#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic')
|
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
|
||||||
args = cic_base.argparse.parse(argparser, logg)
|
args = cic_base.argparse.parse(argparser, logg)
|
||||||
config = cic_base.config.create(args.c, args, args.env_prefix)
|
config = cic_base.config.create(args.c, args, args.env_prefix)
|
||||||
|
|
||||||
|
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
|
||||||
|
config.add(args.no_history, '_NO_HISTORY', True)
|
||||||
|
|
||||||
cic_base.config.log(config)
|
cic_base.config.log(config)
|
||||||
|
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
@@ -55,10 +69,21 @@ SessionBase.connect(dsn, debug=config.true('DATABASE_DEBUG'))
|
|||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
|
|
||||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||||
|
|
||||||
|
|
||||||
|
def register_filter_tags(filters, session):
|
||||||
|
for f in filters:
|
||||||
|
tag = f.tag()
|
||||||
|
try:
|
||||||
|
add_tag(session, tag[0], domain=tag[1])
|
||||||
|
session.commit()
|
||||||
|
logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1]))
|
||||||
|
except sqlalchemy.exc.IntegrityError:
|
||||||
|
session.rollback()
|
||||||
|
logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1]))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# Connect to blockchain with chainlib
|
# Connect to blockchain with chainlib
|
||||||
rpc = RPCConnection.connect(chain_spec, 'default')
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
@@ -67,7 +92,7 @@ def main():
|
|||||||
r = rpc.do(o)
|
r = rpc.do(o)
|
||||||
block_offset = int(strip_0x(r), 16) + 1
|
block_offset = int(strip_0x(r), 16) + 1
|
||||||
|
|
||||||
logg.debug('starting at block {}'.format(block_offset))
|
logg.debug('current block height {}'.format(block_offset))
|
||||||
|
|
||||||
syncers = []
|
syncers = []
|
||||||
|
|
||||||
@@ -76,8 +101,13 @@ def main():
|
|||||||
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
||||||
|
|
||||||
if len(syncer_backends) == 0:
|
if len(syncer_backends) == 0:
|
||||||
logg.info('found no backends to resume')
|
initial_block_start = config.get('SYNCER_HISTORY_START')
|
||||||
syncer_backends.append(SQLBackend.initial(chain_spec, block_offset))
|
initial_block_offset = block_offset
|
||||||
|
if config.get('_NO_HISTORY'):
|
||||||
|
initial_block_start = block_offset
|
||||||
|
initial_block_offset += 1
|
||||||
|
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
|
||||||
|
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
|
||||||
else:
|
else:
|
||||||
for syncer_backend in syncer_backends:
|
for syncer_backend in syncer_backends:
|
||||||
logg.info('resuming sync session {}'.format(syncer_backend))
|
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||||
@@ -97,11 +127,22 @@ def main():
|
|||||||
logg.info('using trusted address {}'.format(address))
|
logg.info('using trusted address {}'.format(address))
|
||||||
|
|
||||||
erc20_transfer_filter = ERC20TransferFilter(chain_spec)
|
erc20_transfer_filter = ERC20TransferFilter(chain_spec)
|
||||||
|
faucet_filter = FaucetFilter(chain_spec)
|
||||||
|
|
||||||
|
filters = [
|
||||||
|
erc20_transfer_filter,
|
||||||
|
faucet_filter,
|
||||||
|
]
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
register_filter_tags(filters, session)
|
||||||
|
session.close()
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
for syncer in syncers:
|
for syncer in syncers:
|
||||||
logg.debug('running syncer index {}'.format(i))
|
logg.debug('running syncer index {}'.format(i))
|
||||||
syncer.add_filter(erc20_transfer_filter)
|
for f in filters:
|
||||||
|
syncer.add_filter(f)
|
||||||
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
|
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
|
||||||
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
||||||
|
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
DEBUG=
|
DEBUG=0
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
[eth]
|
[eth]
|
||||||
provider = ws://localhost:63546
|
provider = http://localhost:63545
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
[syncer]
|
[syncer]
|
||||||
loop_interval = 1
|
loop_interval = 1
|
||||||
|
history_start = 0
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
[syncer]
|
[syncer]
|
||||||
loop_interval = 5
|
loop_interval = 5
|
||||||
|
history_start = 0
|
||||||
|
|||||||
@@ -1,2 +1,4 @@
|
|||||||
[cic]
|
[cic]
|
||||||
registry_address =
|
registry_address =
|
||||||
|
chain_spec =
|
||||||
|
trust_address =
|
||||||
|
|||||||
@@ -6,4 +6,4 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=sqlite
|
ENGINE=sqlite
|
||||||
DRIVER=pysqlite
|
DRIVER=pysqlite
|
||||||
DEBUG=
|
DEBUG=1
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ RUN apt-get update && \
|
|||||||
|
|
||||||
# Copy shared requirements from top of mono-repo
|
# Copy shared requirements from top of mono-repo
|
||||||
RUN echo "copying root req file ${root_requirement_file}"
|
RUN echo "copying root req file ${root_requirement_file}"
|
||||||
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a76
|
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||||
|
|
||||||
COPY cic-cache/requirements.txt ./
|
COPY cic-cache/requirements.txt ./
|
||||||
COPY cic-cache/setup.cfg \
|
COPY cic-cache/setup.cfg \
|
||||||
@@ -43,10 +43,6 @@ COPY cic-cache/config/ /usr/local/etc/cic-cache/
|
|||||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||||
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||||
|
|
||||||
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
|
|
||||||
mkdir -p /usr/local/share/cic/solidity && \
|
|
||||||
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
|
|
||||||
|
|
||||||
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
|
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
|
||||||
COPY cic-cache/docker/db.sh ./db.sh
|
COPY cic-cache/docker/db.sh ./db.sh
|
||||||
RUN chmod 755 ./*.sh
|
RUN chmod 755 ./*.sh
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
cic-base~=0.1.2a77
|
cic-base~=0.1.2b10
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
confini~=0.3.6rc3
|
confini~=0.3.6rc3
|
||||||
uwsgi==2.0.19.1
|
uwsgi==2.0.19.1
|
||||||
moolb~=0.1.0
|
moolb~=0.1.0
|
||||||
cic-eth-registry~=0.5.4a16
|
cic-eth-registry~=0.5.5a4
|
||||||
SQLAlchemy==1.3.20
|
SQLAlchemy==1.3.20
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
psycopg2==2.8.6
|
psycopg2==2.8.6
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
chainsyncer[sql]~=0.0.2a2
|
chainsyncer[sql]~=0.0.2a4
|
||||||
|
|||||||
@@ -4,3 +4,8 @@ pytest-mock==3.3.1
|
|||||||
pysqlite3==0.4.3
|
pysqlite3==0.4.3
|
||||||
sqlparse==0.4.1
|
sqlparse==0.4.1
|
||||||
pytest-celery==0.0.0a1
|
pytest-celery==0.0.0a1
|
||||||
|
eth_tester==0.5.0b3
|
||||||
|
py-evm==0.3.0a20
|
||||||
|
web3==5.12.2
|
||||||
|
cic-eth-registry~=0.5.5a3
|
||||||
|
cic-base[full]==0.1.2b8
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@@ -84,3 +84,7 @@ def txs(
|
|||||||
|
|
||||||
session.commit()
|
session.commit()
|
||||||
|
|
||||||
|
return [
|
||||||
|
tx_hash_first,
|
||||||
|
tx_hash_second,
|
||||||
|
]
|
||||||
|
|||||||
3
apps/cic-cache/tests/filters/conftest.py
Normal file
3
apps/cic-cache/tests/filters/conftest.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
from chainlib.eth.pytest import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_tokens import *
|
||||||
|
|
||||||
69
apps/cic-cache/tests/filters/test_erc20.py
Normal file
69
apps/cic-cache/tests/filters/test_erc20.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from sqlalchemy import text
|
||||||
|
from chainlib.eth.tx import Tx
|
||||||
|
from chainlib.eth.block import Block
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_filter(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
data = 'a9059cbb'
|
||||||
|
data += strip_0x(list_actors['alice'])
|
||||||
|
data += '1000'.ljust(64, '0')
|
||||||
|
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': foo_token,
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
r = fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
assert r
|
||||||
|
|
||||||
|
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||||
|
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||||
|
assert r[0] == tx.hash
|
||||||
71
apps/cic-cache/tests/filters/test_faucet.py
Normal file
71
apps/cic-cache/tests/filters/test_faucet.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.block import (
|
||||||
|
block_by_hash,
|
||||||
|
Block,
|
||||||
|
)
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
receipt,
|
||||||
|
unpack,
|
||||||
|
transaction,
|
||||||
|
Tx,
|
||||||
|
)
|
||||||
|
from hexathon import strip_0x
|
||||||
|
from erc20_faucet.faucet import SingleShotFaucet
|
||||||
|
from sqlalchemy import text
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
from cic_cache.runnable.daemons.filters.faucet import FaucetFilter
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_faucet(
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
foo_token,
|
||||||
|
faucet_noregistry,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
contract_roles,
|
||||||
|
agent_roles,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = FaucetFilter(chain_spec, contract_roles['CONTRACT_DEPLOYER'])
|
||||||
|
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(agent_roles['ALICE'], eth_rpc)
|
||||||
|
c = SingleShotFaucet(chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash_hex, o) = c.give_to(faucet_noregistry, agent_roles['ALICE'], agent_roles['ALICE'])
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
|
||||||
|
tx_src = unpack(bytes.fromhex(strip_0x(o['params'][0])), chain_spec)
|
||||||
|
|
||||||
|
o = receipt(r)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
rcpt = Tx.src_normalize(r)
|
||||||
|
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
o = block_by_hash(r['block_hash'])
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
block_object = Block(r)
|
||||||
|
|
||||||
|
tx = Tx(tx_src, block_object)
|
||||||
|
tx.apply_receipt(rcpt)
|
||||||
|
|
||||||
|
r = fltr.filter(eth_rpc, block_object, tx, init_database)
|
||||||
|
assert r
|
||||||
|
|
||||||
|
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||||
|
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||||
|
assert r[0] == tx.hash
|
||||||
@@ -2,7 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import confini
|
import confini
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ logg = logging.getLogger(__file__)
|
|||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def load_config():
|
def load_config():
|
||||||
config_dir = os.path.join(root_dir, '.config/test')
|
config_dir = os.path.join(root_dir, 'config/test')
|
||||||
conf = confini.Config(config_dir, 'CICTEST')
|
conf = confini.Config(config_dir, 'CICTEST')
|
||||||
conf.process()
|
conf.process()
|
||||||
logg.debug('config {}'.format(conf))
|
logg.debug('config {}'.format(conf))
|
||||||
|
|||||||
@@ -3,13 +3,16 @@ import os
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import sqlparse
|
import sqlparse
|
||||||
|
import alembic
|
||||||
|
from alembic.config import Config as AlembicConfig
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db.models.base import SessionBase
|
from cic_cache.db.models.base import SessionBase
|
||||||
from cic_cache.db import dsn_from_config
|
from cic_cache.db import dsn_from_config
|
||||||
|
from cic_cache.db import add_tag
|
||||||
|
|
||||||
logg = logging.getLogger(__file__)
|
logg = logging.getLogger(__file__)
|
||||||
|
|
||||||
@@ -26,11 +29,10 @@ def database_engine(
|
|||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
dsn = dsn_from_config(load_config)
|
dsn = dsn_from_config(load_config)
|
||||||
SessionBase.connect(dsn)
|
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
|
||||||
return dsn
|
return dsn
|
||||||
|
|
||||||
|
|
||||||
# TODO: use alembic instead to migrate db, here we have to keep separate schema than migration script in script/migrate.py
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def init_database(
|
def init_database(
|
||||||
load_config,
|
load_config,
|
||||||
@@ -38,52 +40,23 @@ def init_database(
|
|||||||
):
|
):
|
||||||
|
|
||||||
rootdir = os.path.dirname(os.path.dirname(__file__))
|
rootdir = os.path.dirname(os.path.dirname(__file__))
|
||||||
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
|
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
|
||||||
|
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
|
||||||
if load_config.get('DATABASE_ENGINE') == 'sqlite':
|
if not os.path.isdir(migrationsdir):
|
||||||
rconn = SessionBase.engine.raw_connection()
|
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
|
||||||
f = open(os.path.join(schemadir, 'db.sql'))
|
logg.info('using migrations directory {}'.format(migrationsdir))
|
||||||
s = f.read()
|
|
||||||
f.close()
|
|
||||||
rconn.executescript(s)
|
|
||||||
|
|
||||||
else:
|
|
||||||
rconn = SessionBase.engine.raw_connection()
|
|
||||||
rcursor = rconn.cursor()
|
|
||||||
|
|
||||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
|
|
||||||
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
|
|
||||||
|
|
||||||
f = open(os.path.join(schemadir, 'db.sql'))
|
|
||||||
s = f.read()
|
|
||||||
f.close()
|
|
||||||
r = re.compile(r'^[A-Z]', re.MULTILINE)
|
|
||||||
for l in sqlparse.parse(s):
|
|
||||||
strl = str(l)
|
|
||||||
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
|
|
||||||
if not re.search(r, strl):
|
|
||||||
logg.warning('skipping parsed query line {}'.format(strl))
|
|
||||||
continue
|
|
||||||
rcursor.execute(strl)
|
|
||||||
rconn.commit()
|
|
||||||
|
|
||||||
rcursor.execute('SET search_path TO public')
|
|
||||||
|
|
||||||
# this doesn't work when run separately, no idea why
|
|
||||||
# functions have been manually added to original schema from cic-eth
|
|
||||||
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
|
|
||||||
# s = f.read()
|
|
||||||
# f.close()
|
|
||||||
# rcursor.execute(s)
|
|
||||||
#
|
|
||||||
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
|
|
||||||
# s = f.read()
|
|
||||||
# f.close()
|
|
||||||
# rcursor.execute(s)
|
|
||||||
|
|
||||||
rcursor.close()
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
|
|
||||||
|
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
|
||||||
|
ac.set_main_option('sqlalchemy.url', database_engine)
|
||||||
|
ac.set_main_option('script_location', migrationsdir)
|
||||||
|
|
||||||
|
alembic.command.downgrade(ac, 'base')
|
||||||
|
alembic.command.upgrade(ac, 'head')
|
||||||
|
|
||||||
|
session.commit()
|
||||||
|
|
||||||
yield session
|
yield session
|
||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
@@ -116,3 +89,14 @@ def list_defaults(
|
|||||||
return {
|
return {
|
||||||
'block': 420000,
|
'block': 420000,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def tags(
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
|
||||||
|
add_tag(init_database, 'foo')
|
||||||
|
add_tag(init_database, 'baz', domain='bar')
|
||||||
|
add_tag(init_database, 'xyzzy', domain='bar')
|
||||||
|
init_database.commit()
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import datetime
|
|||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
|||||||
37
apps/cic-cache/tests/test_tag.py
Normal file
37
apps/cic-cache/tests/test_tag.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import os
|
||||||
|
import datetime
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.db import tag_transaction
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_cache(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
tag_transaction(init_database, txs[0], 'foo')
|
||||||
|
tag_transaction(init_database, txs[0], 'baz', domain='bar')
|
||||||
|
tag_transaction(init_database, txs[1], 'xyzzy', domain='bar')
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.value = 'foo'").fetchall()
|
||||||
|
assert r[0][0] == txs[0]
|
||||||
|
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'baz'").fetchall()
|
||||||
|
assert r[0][0] == txs[0]
|
||||||
|
|
||||||
|
|
||||||
|
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'xyzzy'").fetchall()
|
||||||
|
assert r[0][0] == txs[1]
|
||||||
@@ -3,7 +3,7 @@ import logging
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from erc20_single_shot_faucet import SingleShotFaucet as Faucet
|
from erc20_faucet import Faucet
|
||||||
from hexathon import (
|
from hexathon import (
|
||||||
strip_0x,
|
strip_0x,
|
||||||
)
|
)
|
||||||
@@ -20,8 +20,9 @@ from chainlib.eth.tx import (
|
|||||||
)
|
)
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.error import JSONRPCException
|
from chainlib.error import JSONRPCException
|
||||||
from eth_accounts_index import AccountRegistry
|
from eth_accounts_index.registry import AccountRegistry
|
||||||
from sarafu_faucet import MinterFaucet as Faucet
|
from eth_accounts_index import AccountsIndex
|
||||||
|
from sarafu_faucet import MinterFaucet
|
||||||
from chainqueue.db.models.tx import TxCache
|
from chainqueue.db.models.tx import TxCache
|
||||||
|
|
||||||
# local import
|
# local import
|
||||||
@@ -133,7 +134,7 @@ def register(self, account_address, chain_spec_dict, writer_address=None):
|
|||||||
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce)
|
nonce_oracle = CustodialTaskNonceOracle(writer_address, self.request.root_id, session=session) #, default_nonce)
|
||||||
gas_oracle = self.create_gas_oracle(rpc, AccountRegistry.gas)
|
gas_oracle = self.create_gas_oracle(rpc, AccountRegistry.gas)
|
||||||
account_registry = AccountRegistry(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
account_registry = AccountsIndex(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
(tx_hash_hex, tx_signed_raw_hex) = account_registry.add(account_registry_address, writer_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
||||||
rpc_signer.disconnect()
|
rpc_signer.disconnect()
|
||||||
|
|
||||||
@@ -185,7 +186,7 @@ def gift(self, account_address, chain_spec_dict):
|
|||||||
# Generate and sign transaction
|
# Generate and sign transaction
|
||||||
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
nonce_oracle = CustodialTaskNonceOracle(account_address, self.request.root_id, session=session) #, default_nonce)
|
nonce_oracle = CustodialTaskNonceOracle(account_address, self.request.root_id, session=session) #, default_nonce)
|
||||||
gas_oracle = self.create_gas_oracle(rpc, Faucet.gas)
|
gas_oracle = self.create_gas_oracle(rpc, MinterFaucet.gas)
|
||||||
faucet = Faucet(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
faucet = Faucet(chain_spec, signer=rpc_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
||||||
(tx_hash_hex, tx_signed_raw_hex) = faucet.give_to(faucet_address, account_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
(tx_hash_hex, tx_signed_raw_hex) = faucet.give_to(faucet_address, account_address, account_address, tx_format=TxFormat.RLP_SIGNED)
|
||||||
rpc_signer.disconnect()
|
rpc_signer.disconnect()
|
||||||
@@ -338,7 +339,7 @@ def cache_account_data(
|
|||||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||||
tx_data = AccountRegistry.parse_add_request(tx['data'])
|
tx_data = AccountsIndex.parse_add_request(tx['data'])
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
tx_cache = TxCache(
|
tx_cache = TxCache(
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ import celery
|
|||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.connection import RPCConnection
|
from chainlib.connection import RPCConnection
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from chainlib.eth.tx import (
|
from chainlib.eth.tx import (
|
||||||
TxFormat,
|
TxFormat,
|
||||||
unpack,
|
unpack,
|
||||||
@@ -16,6 +15,7 @@ from cic_eth_registry.erc20 import ERC20Token
|
|||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
from chainqueue.db.models.tx import TxCache
|
from chainqueue.db.models.tx import TxCache
|
||||||
from chainqueue.error import NotLocalTxError
|
from chainqueue.error import NotLocalTxError
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from chainlib.chain import ChainSpec
|
|||||||
from chainlib.connection import RPCConnection
|
from chainlib.connection import RPCConnection
|
||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from cic_eth_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
from eth_address_declarator import AddressDeclarator
|
from eth_address_declarator import Declarator
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.task import BaseTask
|
from cic_eth.task import BaseTask
|
||||||
@@ -23,12 +23,12 @@ def translate_address(address, trusted_addresses, chain_spec, sender_address=ZER
|
|||||||
registry = CICRegistry(chain_spec, rpc)
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
|
||||||
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
declarator_address = registry.by_name('AddressDeclarator', sender_address=sender_address)
|
||||||
c = AddressDeclarator(chain_spec)
|
c = Declarator(chain_spec)
|
||||||
|
|
||||||
for trusted_address in trusted_addresses:
|
for trusted_address in trusted_addresses:
|
||||||
o = c.declaration(declarator_address, trusted_address, address, sender_address=sender_address)
|
o = c.declaration(declarator_address, trusted_address, address, sender_address=sender_address)
|
||||||
r = rpc.do(o)
|
r = rpc.do(o)
|
||||||
declaration_hex = AddressDeclarator.parse_declaration(r)
|
declaration_hex = Declarator.parse_declaration(r)
|
||||||
declaration_hex = declaration_hex[0].rstrip('0')
|
declaration_hex = declaration_hex[0].rstrip('0')
|
||||||
declaration_bytes = bytes.fromhex(declaration_hex)
|
declaration_bytes = bytes.fromhex(declaration_hex)
|
||||||
declaration = None
|
declaration = None
|
||||||
|
|||||||
@@ -14,13 +14,13 @@ from chainlib.eth.tx import (
|
|||||||
)
|
)
|
||||||
from chainlib.eth.block import block_by_number
|
from chainlib.eth.block import block_by_number
|
||||||
from chainlib.eth.contract import abi_decode_single
|
from chainlib.eth.contract import abi_decode_single
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from hexathon import strip_0x
|
from hexathon import strip_0x
|
||||||
from cic_eth_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_eth_registry.erc20 import ERC20Token
|
from cic_eth_registry.erc20 import ERC20Token
|
||||||
from chainqueue.db.models.otx import Otx
|
from chainqueue.db.models.otx import Otx
|
||||||
from chainqueue.db.enum import StatusEnum
|
from chainqueue.db.enum import StatusEnum
|
||||||
from chainqueue.query import get_tx_cache
|
from chainqueue.query import get_tx_cache
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.queue.time import tx_times
|
from cic_eth.queue.time import tx_times
|
||||||
|
|||||||
@@ -3,19 +3,20 @@ import logging
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from cic_eth_registry.error import UnknownContractError
|
from cic_eth_registry.error import (
|
||||||
|
UnknownContractError,
|
||||||
|
NotAContractError,
|
||||||
|
)
|
||||||
from chainlib.status import Status as TxStatus
|
from chainlib.status import Status as TxStatus
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address
|
||||||
from chainlib.eth.error import RequestMismatchException
|
from chainlib.eth.error import RequestMismatchException
|
||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from hexathon import (
|
from hexathon import (
|
||||||
strip_0x,
|
strip_0x,
|
||||||
add_0x,
|
add_0x,
|
||||||
)
|
)
|
||||||
# TODO: use sarafu_Faucet for both when inheritance has been implemented
|
from eth_erc20 import ERC20
|
||||||
from erc20_single_shot_faucet import SingleShotFaucet
|
from erc20_faucet import Faucet
|
||||||
from sarafu_faucet import MinterFaucet as Faucet
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from .base import SyncFilter
|
from .base import SyncFilter
|
||||||
@@ -71,14 +72,13 @@ class CallbackFilter(SyncFilter):
|
|||||||
#transfer_data['token_address'] = tx.inputs[0]
|
#transfer_data['token_address'] = tx.inputs[0]
|
||||||
faucet_contract = tx.inputs[0]
|
faucet_contract = tx.inputs[0]
|
||||||
|
|
||||||
c = SingleShotFaucet(self.chain_spec)
|
o = Faucet.token(faucet_contract, sender_address=self.caller_address)
|
||||||
o = c.token(faucet_contract, sender_address=self.caller_address)
|
|
||||||
r = conn.do(o)
|
r = conn.do(o)
|
||||||
transfer_data['token_address'] = add_0x(c.parse_token(r))
|
transfer_data['token_address'] = add_0x(c.parse_token(r))
|
||||||
|
|
||||||
o = c.amount(faucet_contract, sender_address=self.caller_address)
|
o = c.token_amount(faucet_contract, sender_address=self.caller_address)
|
||||||
r = conn.do(o)
|
r = conn.do(o)
|
||||||
transfer_data['value'] = c.parse_amount(r)
|
transfer_data['value'] = c.parse_token_amount(r)
|
||||||
|
|
||||||
return ('tokengift', transfer_data)
|
return ('tokengift', transfer_data)
|
||||||
|
|
||||||
@@ -127,8 +127,7 @@ class CallbackFilter(SyncFilter):
|
|||||||
(transfer_type, transfer_data) = parser(tx, conn)
|
(transfer_type, transfer_data) = parser(tx, conn)
|
||||||
if transfer_type == None:
|
if transfer_type == None:
|
||||||
continue
|
continue
|
||||||
else:
|
break
|
||||||
pass
|
|
||||||
except RequestMismatchException:
|
except RequestMismatchException:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -171,7 +170,9 @@ class CallbackFilter(SyncFilter):
|
|||||||
t = self.call_back(transfer_type, result)
|
t = self.call_back(transfer_type, result)
|
||||||
logg.info('callback success task id {} tx {} queue {}'.format(t, tx.hash, t.queue))
|
logg.info('callback success task id {} tx {} queue {}'.format(t, tx.hash, t.queue))
|
||||||
except UnknownContractError:
|
except UnknownContractError:
|
||||||
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(tx.queue, tx.method, transfer_data['to'], tx.hash))
|
logg.debug('callback filter {}:{} skipping "transfer" method on unknown contract {} tx {}'.format(self.queue, self.method, transfer_data['to'], tx.hash))
|
||||||
|
except NotAContractError:
|
||||||
|
logg.debug('callback filter {}:{} skipping "transfer" on non-contract address {} tx {}'.format(self.queue, self.method, transfer_data['to'], tx.hash))
|
||||||
|
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ from .base import SyncFilter
|
|||||||
|
|
||||||
logg = logging.getLogger().getChild(__name__)
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
account_registry_add_log_hash = '0x5ed3bdd47b9af629827a8d129aa39c870b10c03f0153fe9ddb8e84b665061acd'
|
account_registry_add_log_hash = '0x9cc987676e7d63379f176ea50df0ae8d2d9d1141d1231d4ce15b5965f73c9430'
|
||||||
|
|
||||||
|
|
||||||
class RegistrationFilter(SyncFilter):
|
class RegistrationFilter(SyncFilter):
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ class TxFilter(SyncFilter):
|
|||||||
if otx == None:
|
if otx == None:
|
||||||
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
|
logg.debug('tx {} not found locally, skipping'.format(tx_hash_hex))
|
||||||
return None
|
return None
|
||||||
logg.info('tx filter match on {}'.format(otx.tx_hash))
|
logg.debug('otx filter match on {}'.format(otx.tx_hash))
|
||||||
db_session.flush()
|
db_session.flush()
|
||||||
SessionBase.release_session(db_session)
|
SessionBase.release_session(db_session)
|
||||||
s_final_state = celery.signature(
|
s_final_state = celery.signature(
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ from cic_eth.eth import (
|
|||||||
from cic_eth.admin import (
|
from cic_eth.admin import (
|
||||||
debug,
|
debug,
|
||||||
ctrl,
|
ctrl,
|
||||||
|
token
|
||||||
)
|
)
|
||||||
from cic_eth.queue import (
|
from cic_eth.queue import (
|
||||||
query,
|
query,
|
||||||
|
|||||||
@@ -51,15 +51,23 @@ from cic_eth.registry import (
|
|||||||
|
|
||||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
def add_block_args(argparser):
|
||||||
|
argparser.add_argument('--history-start', type=int, default=0, dest='history_start', help='Start block height for initial history sync')
|
||||||
|
argparser.add_argument('--no-history', action='store_true', dest='no_history', help='Skip initial history sync')
|
||||||
|
return argparser
|
||||||
|
|
||||||
|
|
||||||
logg = cic_base.log.create()
|
logg = cic_base.log.create()
|
||||||
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
argparser = cic_base.argparse.create(script_dir, cic_base.argparse.full_template)
|
||||||
#argparser = cic_base.argparse.add(argparser, add_traffic_args, 'traffic')
|
argparser = cic_base.argparse.add(argparser, add_block_args, 'block')
|
||||||
args = cic_base.argparse.parse(argparser, logg)
|
args = cic_base.argparse.parse(argparser, logg)
|
||||||
|
|
||||||
config = cic_base.config.create(args.c, args, args.env_prefix)
|
config = cic_base.config.create(args.c, args, args.env_prefix)
|
||||||
|
|
||||||
config.add(args.y, '_KEYSTORE_FILE', True)
|
config.add(args.y, '_KEYSTORE_FILE', True)
|
||||||
|
|
||||||
config.add(args.q, '_CELERY_QUEUE', True)
|
config.add(args.q, '_CELERY_QUEUE', True)
|
||||||
|
config.add(args.history_start, 'SYNCER_HISTORY_START', True)
|
||||||
|
config.add(args.no_history, '_NO_HISTORY', True)
|
||||||
|
|
||||||
cic_base.config.log(config)
|
cic_base.config.log(config)
|
||||||
|
|
||||||
@@ -69,9 +77,9 @@ SessionBase.connect(dsn, pool_size=16, debug=config.true('DATABASE_DEBUG'))
|
|||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
#RPCConnection.register_location(config.get('ETH_PROVIDER'), chain_spec, 'default')
|
|
||||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# connect to celery
|
# connect to celery
|
||||||
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
@@ -89,7 +97,7 @@ def main():
|
|||||||
stat = init_chain_stat(rpc, block_start=block_current)
|
stat = init_chain_stat(rpc, block_start=block_current)
|
||||||
loop_interval = stat.block_average()
|
loop_interval = stat.block_average()
|
||||||
|
|
||||||
logg.debug('starting at block {}'.format(block_offset))
|
logg.debug('current block height {}'.format(block_offset))
|
||||||
|
|
||||||
syncers = []
|
syncers = []
|
||||||
|
|
||||||
@@ -98,8 +106,13 @@ def main():
|
|||||||
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
syncer_backends = SQLBackend.resume(chain_spec, block_offset)
|
||||||
|
|
||||||
if len(syncer_backends) == 0:
|
if len(syncer_backends) == 0:
|
||||||
logg.info('found no backends to resume')
|
initial_block_start = config.get('SYNCER_HISTORY_START')
|
||||||
syncer_backends.append(SQLBackend.initial(chain_spec, block_offset))
|
initial_block_offset = block_offset
|
||||||
|
if config.get('_NO_HISTORY'):
|
||||||
|
initial_block_start = block_offset
|
||||||
|
initial_block_offset += 1
|
||||||
|
syncer_backends.append(SQLBackend.initial(chain_spec, initial_block_offset, start_block_height=initial_block_start))
|
||||||
|
logg.info('found no backends to resume, adding initial sync from history start {} end {}'.format(initial_block_start, initial_block_offset))
|
||||||
else:
|
else:
|
||||||
for syncer_backend in syncer_backends:
|
for syncer_backend in syncer_backends:
|
||||||
logg.info('resuming sync session {}'.format(syncer_backend))
|
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||||
@@ -155,7 +168,6 @@ def main():
|
|||||||
for cf in callback_filters:
|
for cf in callback_filters:
|
||||||
syncer.add_filter(cf)
|
syncer.add_filter(cf)
|
||||||
|
|
||||||
#r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
|
|
||||||
r = syncer.loop(int(loop_interval), rpc)
|
r = syncer.loop(int(loop_interval), rpc)
|
||||||
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,10 @@ import confini
|
|||||||
import celery
|
import celery
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import (
|
||||||
|
Api,
|
||||||
|
AdminApi,
|
||||||
|
)
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@@ -53,8 +56,13 @@ celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=confi
|
|||||||
queue = args.q
|
queue = args.q
|
||||||
|
|
||||||
api = Api(config.get('CIC_CHAIN_SPEC'), queue=queue)
|
api = Api(config.get('CIC_CHAIN_SPEC'), queue=queue)
|
||||||
|
admin_api = AdminApi(None)
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
t = admin_api.registry()
|
||||||
|
registry = t.get()
|
||||||
|
print('Registry address: {}'.format(registry))
|
||||||
|
|
||||||
t = api.default_token()
|
t = api.default_token()
|
||||||
token_info = t.get()
|
token_info = t.get()
|
||||||
print('Default token symbol: {}'.format(token_info['symbol']))
|
print('Default token symbol: {}'.format(token_info['symbol']))
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ version = (
|
|||||||
0,
|
0,
|
||||||
11,
|
11,
|
||||||
0,
|
0,
|
||||||
'beta.11',
|
'beta.13',
|
||||||
)
|
)
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
[SYNCER]
|
[SYNCER]
|
||||||
loop_interval =
|
loop_interval =
|
||||||
|
history_start = 0
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
[SYNCER]
|
[SYNCER]
|
||||||
loop_interval =
|
loop_interval =
|
||||||
|
history_start = 0
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ RUN apt-get update && \
|
|||||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
||||||
|
|
||||||
# Copy shared requirements from top of mono-repo
|
# Copy shared requirements from top of mono-repo
|
||||||
RUN echo "copying root req file ${root_requirement_file}"
|
RUN echo "copying root req file: ${root_requirement_file}"
|
||||||
#COPY $root_requirement_file .
|
#COPY $root_requirement_file .
|
||||||
#RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
|
#RUN pip install -r $root_requirement_file $pip_extra_index_url_flag
|
||||||
RUN /usr/local/bin/python -m pip install --upgrade pip
|
RUN /usr/local/bin/python -m pip install --upgrade pip
|
||||||
@@ -29,7 +29,7 @@ RUN /usr/local/bin/python -m pip install --upgrade pip
|
|||||||
# python merge_requirements.py | tee merged_requirements.txt
|
# python merge_requirements.py | tee merged_requirements.txt
|
||||||
#RUN cd cic-base && \
|
#RUN cd cic-base && \
|
||||||
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
|
# pip install $pip_extra_index_url_flag -r ./merged_requirements.txt
|
||||||
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a77
|
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||||
|
|
||||||
COPY cic-eth/scripts/ scripts/
|
COPY cic-eth/scripts/ scripts/
|
||||||
COPY cic-eth/setup.cfg cic-eth/setup.py ./
|
COPY cic-eth/setup.cfg cic-eth/setup.py ./
|
||||||
@@ -50,8 +50,4 @@ COPY cic-eth/config/ /usr/local/etc/cic-eth/
|
|||||||
COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||||
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||||
|
|
||||||
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
|
|
||||||
mkdir -p /usr/local/share/cic/solidity && \
|
|
||||||
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
|
|
||||||
|
|
||||||
COPY util/liveness/health.sh /usr/local/bin/health.sh
|
COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||||
|
|||||||
@@ -1,25 +1,24 @@
|
|||||||
cic-base==0.1.2b5
|
cic-base~=0.1.2b11
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
crypto-dev-signer~=0.4.14b3
|
crypto-dev-signer~=0.4.14b3
|
||||||
confini~=0.3.6rc3
|
confini~=0.3.6rc3
|
||||||
cic-eth-registry~=0.5.4a16
|
cic-eth-registry~=0.5.5a4
|
||||||
#cic-bancor~=0.0.6
|
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
websockets==8.1
|
websockets==8.1
|
||||||
requests~=2.24.0
|
requests~=2.24.0
|
||||||
eth_accounts_index~=0.0.11a9
|
eth_accounts_index~=0.0.11a12
|
||||||
erc20-transfer-authorization~=0.3.1a5
|
erc20-transfer-authorization~=0.3.1a6
|
||||||
uWSGI==2.0.19.1
|
uWSGI==2.0.19.1
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
websocket-client==0.57.0
|
websocket-client==0.57.0
|
||||||
moolb~=0.1.1b2
|
moolb~=0.1.1b2
|
||||||
eth-address-index~=0.1.1a9
|
eth-address-index~=0.1.1a11
|
||||||
chainlib~=0.0.2a20
|
chainlib~=0.0.3a2
|
||||||
hexathon~=0.0.1a7
|
hexathon~=0.0.1a7
|
||||||
chainsyncer[sql]~=0.0.2a2
|
chainsyncer[sql]~=0.0.2a4
|
||||||
chainqueue~=0.0.2a2
|
chainqueue~=0.0.2a2
|
||||||
pysha3==1.0.2
|
sarafu-faucet==0.0.3a3
|
||||||
|
erc20-faucet==0.2.1a4
|
||||||
coincurve==15.0.0
|
coincurve==15.0.0
|
||||||
sarafu-faucet==0.0.2a28
|
potaahto~=0.0.1a2
|
||||||
potaahto~=0.0.1a1
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import alembic
|
import alembic
|
||||||
from alembic.config import Config as AlembicConfig
|
from alembic.config import Config as AlembicConfig
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
[pgp]
|
[pgp]
|
||||||
exports_dir = pgp
|
exports_dir = /root/pgp
|
||||||
privatekey_file = privatekeys.asc
|
privatekey_file = privatekeys.asc
|
||||||
passphrase = merman
|
passphrase = merman
|
||||||
publickey_trusted_file = publickeys.asc
|
publickey_trusted_file = publickeys.asc
|
||||||
|
|||||||
@@ -2,26 +2,31 @@ FROM node:15.3.0-alpine3.10
|
|||||||
|
|
||||||
WORKDIR /tmp/src/cic-meta
|
WORKDIR /tmp/src/cic-meta
|
||||||
|
|
||||||
|
RUN apk add --no-cache postgresql bash
|
||||||
|
|
||||||
COPY cic-meta/package.json \
|
COPY cic-meta/package.json \
|
||||||
./
|
./
|
||||||
|
|
||||||
COPY cic-meta/src/ src/
|
COPY cic-meta/src/ src/
|
||||||
COPY cic-meta/tests/ tests/
|
COPY cic-meta/tests/ tests/
|
||||||
COPY cic-meta/scripts/ scripts/
|
COPY cic-meta/scripts/ scripts/
|
||||||
#COPY docker/*.sh /root/
|
|
||||||
|
|
||||||
RUN alias tsc=node_modules/typescript/bin/tsc
|
|
||||||
|
|
||||||
RUN npm install
|
RUN npm install
|
||||||
|
|
||||||
|
# see exports_dir gpg.ini
|
||||||
|
COPY cic-meta/tests/*.asc /root/pgp/
|
||||||
|
RUN alias tsc=node_modules/typescript/bin/tsc
|
||||||
|
|
||||||
|
|
||||||
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
|
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
|
||||||
# COPY cic-meta/scripts/server/initdb/server.postgres.sql /usr/local/share/cic-meta/sql/server.sql
|
# COPY cic-meta/scripts/server/initdb/server.postgres.sql /usr/local/share/cic-meta/sql/server.sql
|
||||||
|
|
||||||
COPY cic-meta/docker/db.sh ./db.sh
|
COPY cic-meta/docker/db.sh ./db.sh
|
||||||
RUN chmod 755 ./db.sh
|
RUN chmod 755 ./db.sh
|
||||||
|
|
||||||
RUN alias ts-node=/tmp/src/cic-meta/node_modules/ts-node/dist/bin.js
|
#RUN alias ts-node=/tmp/src/cic-meta/node_modules/ts-node/dist/bin.js
|
||||||
ENTRYPOINT [ "./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts" ]
|
#ENTRYPOINT [ "./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts" ]
|
||||||
|
|
||||||
# COPY cic-meta/docker/start_server.sh ./start_server.sh
|
COPY cic-meta/docker/start_server.sh ./start_server.sh
|
||||||
# RUN chmod 755 ./start_server.sh
|
RUN chmod 755 ./start_server.sh
|
||||||
|
ENTRYPOINT ["sh", "./start_server.sh"]
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
PGPASSWORD=$DATABASE_PASSWORD psql -v ON_ERROR_STOP=1 -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME -f $SCHEMA_SQL_PATH
|
||||||
|
|
||||||
|
|
||||||
PGPASSWORD=$DATABASE_PASSWORD psql -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME /usr/local/share/cic-meta/sql/server.sql
|
|
||||||
|
|||||||
@@ -1,3 +1,9 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# db migration
|
||||||
sh ./db.sh
|
sh ./db.sh
|
||||||
|
|
||||||
/usr/local/bin/node /usr/local/bin/cic-meta-server $@
|
# /usr/local/bin/node /usr/local/bin/cic-meta-server $@
|
||||||
|
# ./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts $@
|
||||||
|
npm run start "$@"
|
||||||
|
|||||||
@@ -10,7 +10,8 @@
|
|||||||
"build-server": "tsc -d --outDir dist-server scripts/server/*.ts",
|
"build-server": "tsc -d --outDir dist-server scripts/server/*.ts",
|
||||||
"pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack",
|
"pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack",
|
||||||
"clean": "rm -rf dist",
|
"clean": "rm -rf dist",
|
||||||
"prepare": "npm run build && npm run build-server"
|
"prepare": "npm run build && npm run build-server",
|
||||||
|
"start": "./node_modules/ts-node/dist/bin.js ./scripts/server/server.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@ethereumjs/tx": "^3.0.0-beta.1",
|
"@ethereumjs/tx": "^3.0.0-beta.1",
|
||||||
|
|||||||
@@ -1,15 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
psql -v ON_ERROR_STOP=1 --username grassroots --dbname cic_meta <<-EOSQL
|
|
||||||
create table if not exists store (
|
|
||||||
id serial primary key not null,
|
|
||||||
owner_fingerprint text not null,
|
|
||||||
hash char(64) not null unique,
|
|
||||||
content text not null
|
|
||||||
);
|
|
||||||
|
|
||||||
create index if not exists idx_fp on store ((lower(owner_fingerprint)));
|
|
||||||
EOSQL
|
|
||||||
|
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
create table if not exists cic_meta.store (
|
create table if not exists store (
|
||||||
id serial primary key not null,
|
id serial primary key not null,
|
||||||
owner_fingerprint text not null,
|
owner_fingerprint text not null,
|
||||||
hash char(64) not null unique,
|
hash char(64) not null unique,
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ LOCALE_FALLBACK=en
|
|||||||
LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/
|
LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/
|
||||||
MAX_BODY_LENGTH=1024
|
MAX_BODY_LENGTH=1024
|
||||||
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
|
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
|
||||||
SERVICE_CODE=*483*46#
|
SERVICE_CODE=*483*46#,*483*061#,*384*96#
|
||||||
|
|
||||||
[phone_number]
|
[phone_number]
|
||||||
REGION=KE
|
REGION=KE
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ def define_account_tx_metadata(user: Account):
|
|||||||
)
|
)
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=identifier,
|
identifier=identifier,
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
account_metadata = get_cached_data(key=key)
|
account_metadata = get_cached_data(key=key)
|
||||||
|
|
||||||
|
|||||||
@@ -80,7 +80,7 @@ def get_cached_operational_balance(blockchain_address: str):
|
|||||||
"""
|
"""
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(blockchain_address[2:]),
|
identifier=bytes.fromhex(blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cached_balance = get_cached_data(key=key)
|
cached_balance = get_cached_data(key=key)
|
||||||
if cached_balance:
|
if cached_balance:
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ def from_wei(value: int) -> float:
|
|||||||
"""This function converts values in Wei to a token in the cic network.
|
"""This function converts values in Wei to a token in the cic network.
|
||||||
:param value: Value in Wei
|
:param value: Value in Wei
|
||||||
:type value: int
|
:type value: int
|
||||||
:return: SRF equivalent of value in Wei
|
:return: platform's default token equivalent of value in Wei
|
||||||
:rtype: float
|
:rtype: float
|
||||||
"""
|
"""
|
||||||
value = float(value) / 1e+6
|
value = float(value) / 1e+6
|
||||||
@@ -33,9 +33,9 @@ def from_wei(value: int) -> float:
|
|||||||
|
|
||||||
def to_wei(value: int) -> int:
|
def to_wei(value: int) -> int:
|
||||||
"""This functions converts values from a token in the cic network to Wei.
|
"""This functions converts values from a token in the cic network to Wei.
|
||||||
:param value: Value in SRF
|
:param value: Value in platform's default token
|
||||||
:type value: int
|
:type value: int
|
||||||
:return: Wei equivalent of value in SRF
|
:return: Wei equivalent of value in platform's default token
|
||||||
:rtype: int
|
:rtype: int
|
||||||
"""
|
"""
|
||||||
return int(value * 1e+6)
|
return int(value * 1e+6)
|
||||||
|
|||||||
@@ -38,3 +38,13 @@ class MetadataStoreError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SeppukuError(Exception):
|
||||||
|
"""Exception base class for all errors that should cause system shutdown"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InitializationError(Exception):
|
||||||
|
"""Exception raised when initialization state is insufficient to run component"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -118,7 +118,7 @@ class MetadataRequestsHandler(Metadata):
|
|||||||
metadata_http_error_handler(result=result)
|
metadata_http_error_handler(result=result)
|
||||||
response_data = result.content
|
response_data = result.content
|
||||||
data = json.loads(response_data.decode('utf-8'))
|
data = json.loads(response_data.decode('utf-8'))
|
||||||
if result.status_code == 200 and self.cic_type == 'cic.person':
|
if result.status_code == 200 and self.cic_type == ':cic.person':
|
||||||
person = Person()
|
person = Person()
|
||||||
deserialized_person = person.deserialize(person_data=json.loads(data))
|
deserialized_person = person.deserialize(person_data=json.loads(data))
|
||||||
data = json.dumps(deserialized_person.serialize())
|
data = json.dumps(deserialized_person.serialize())
|
||||||
|
|||||||
@@ -9,4 +9,4 @@ from .base import MetadataRequestsHandler
|
|||||||
class PersonMetadata(MetadataRequestsHandler):
|
class PersonMetadata(MetadataRequestsHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type='cic.person', identifier=identifier)
|
super().__init__(cic_type=':cic.person', identifier=identifier)
|
||||||
|
|||||||
@@ -10,4 +10,4 @@ from .base import MetadataRequestsHandler
|
|||||||
class PhonePointerMetadata(MetadataRequestsHandler):
|
class PhonePointerMetadata(MetadataRequestsHandler):
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
super().__init__(cic_type='cic.msisdn', identifier=identifier)
|
super().__init__(cic_type=':cic.phone', identifier=identifier)
|
||||||
|
|||||||
@@ -48,10 +48,9 @@ def define_response_with_content(headers: list, response: str) -> tuple:
|
|||||||
content_length_header = ('Content-Length', str(content_length))
|
content_length_header = ('Content-Length', str(content_length))
|
||||||
# check for content length defaulted to zero in error headers
|
# check for content length defaulted to zero in error headers
|
||||||
for position, header in enumerate(headers):
|
for position, header in enumerate(headers):
|
||||||
if header[0] == 'Content-Length':
|
if 'Content-Length' in header:
|
||||||
headers[position] = content_length_header
|
headers.pop(position)
|
||||||
else:
|
headers.append(content_length_header)
|
||||||
headers.append(content_length_header)
|
|
||||||
return response_bytes, headers
|
return response_bytes, headers
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from typing import Optional
|
|||||||
# third party imports
|
# third party imports
|
||||||
import celery
|
import celery
|
||||||
from sqlalchemy import desc
|
from sqlalchemy import desc
|
||||||
|
from cic_eth.api import Api
|
||||||
from tinydb.table import Document
|
from tinydb.table import Document
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@@ -15,7 +16,7 @@ from cic_ussd.balance import BalanceManager, compute_operational_balance, get_ca
|
|||||||
from cic_ussd.chain import Chain
|
from cic_ussd.chain import Chain
|
||||||
from cic_ussd.db.models.account import AccountStatus, Account
|
from cic_ussd.db.models.account import AccountStatus, Account
|
||||||
from cic_ussd.db.models.ussd_session import UssdSession
|
from cic_ussd.db.models.ussd_session import UssdSession
|
||||||
from cic_ussd.error import MetadataNotFoundError
|
from cic_ussd.error import MetadataNotFoundError, SeppukuError
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
||||||
from cic_ussd.phone_number import get_user_by_phone_number
|
from cic_ussd.phone_number import get_user_by_phone_number
|
||||||
@@ -28,6 +29,38 @@ from cic_types.models.person import generate_metadata_pointer, get_contact_data_
|
|||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_token_data():
|
||||||
|
chain_str = Chain.spec.__str__()
|
||||||
|
cic_eth_api = Api(chain_str=chain_str)
|
||||||
|
default_token_request_task = cic_eth_api.default_token()
|
||||||
|
default_token_data = default_token_request_task.get()
|
||||||
|
return default_token_data
|
||||||
|
|
||||||
|
|
||||||
|
def retrieve_token_symbol(chain_str: str = Chain.spec.__str__()):
|
||||||
|
"""
|
||||||
|
:param chain_str:
|
||||||
|
:type chain_str:
|
||||||
|
:return:
|
||||||
|
:rtype:
|
||||||
|
"""
|
||||||
|
cache_key = create_cached_data_key(
|
||||||
|
identifier=chain_str.encode('utf-8'),
|
||||||
|
salt=':cic.default_token_data'
|
||||||
|
)
|
||||||
|
cached_data = get_cached_data(key=cache_key)
|
||||||
|
if cached_data:
|
||||||
|
default_token_data = json.loads(cached_data)
|
||||||
|
return default_token_data.get('symbol')
|
||||||
|
else:
|
||||||
|
logg.warning('Cached default token data not found. Attempting retrieval from default token API')
|
||||||
|
default_token_data = get_default_token_data()
|
||||||
|
if default_token_data:
|
||||||
|
return default_token_data.get('symbol')
|
||||||
|
else:
|
||||||
|
raise SeppukuError(f'Could not retrieve default token for: {chain_str}')
|
||||||
|
|
||||||
|
|
||||||
def process_pin_authorization(display_key: str, user: Account, **kwargs) -> str:
|
def process_pin_authorization(display_key: str, user: Account, **kwargs) -> str:
|
||||||
"""
|
"""
|
||||||
This method provides translation for all ussd menu entries that follow the pin authorization pattern.
|
This method provides translation for all ussd menu entries that follow the pin authorization pattern.
|
||||||
@@ -73,7 +106,9 @@ def process_exit_insufficient_balance(display_key: str, user: Account, ussd_sess
|
|||||||
# compile response data
|
# compile response data
|
||||||
user_input = ussd_session.get('user_input').split('*')[-1]
|
user_input = ussd_session.get('user_input').split('*')[-1]
|
||||||
transaction_amount = to_wei(value=int(user_input))
|
transaction_amount = to_wei(value=int(user_input))
|
||||||
token_symbol = 'SRF'
|
|
||||||
|
# get default data
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
|
|
||||||
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
||||||
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
||||||
@@ -102,7 +137,7 @@ def process_exit_successful_transaction(display_key: str, user: Account, ussd_se
|
|||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
transaction_amount = to_wei(int(ussd_session.get('session_data').get('transaction_amount')))
|
transaction_amount = to_wei(int(ussd_session.get('session_data').get('transaction_amount')))
|
||||||
token_symbol = 'SRF'
|
token_symbol = retrieve_token_symbol()
|
||||||
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
||||||
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
||||||
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
||||||
@@ -137,7 +172,7 @@ def process_transaction_pin_authorization(user: Account, display_key: str, ussd_
|
|||||||
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
tx_recipient_information = define_account_tx_metadata(user=recipient)
|
||||||
tx_sender_information = define_account_tx_metadata(user=user)
|
tx_sender_information = define_account_tx_metadata(user=user)
|
||||||
|
|
||||||
token_symbol = 'SRF'
|
token_symbol = retrieve_token_symbol()
|
||||||
user_input = ussd_session.get('session_data').get('transaction_amount')
|
user_input = ussd_session.get('session_data').get('transaction_amount')
|
||||||
transaction_amount = to_wei(value=int(user_input))
|
transaction_amount = to_wei(value=int(user_input))
|
||||||
logg.debug('Requires integration to determine user tokens.')
|
logg.debug('Requires integration to determine user tokens.')
|
||||||
@@ -168,18 +203,18 @@ def process_account_balances(user: Account, display_key: str, ussd_session: dict
|
|||||||
logg.debug('Requires call to retrieve tax and bonus amounts')
|
logg.debug('Requires call to retrieve tax and bonus amounts')
|
||||||
tax = ''
|
tax = ''
|
||||||
bonus = ''
|
bonus = ''
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
return translation_for(
|
return translation_for(
|
||||||
key=display_key,
|
key=display_key,
|
||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
operational_balance=operational_balance,
|
operational_balance=operational_balance,
|
||||||
tax=tax,
|
tax=tax,
|
||||||
bonus=bonus,
|
bonus=bonus,
|
||||||
token_symbol='SRF'
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def format_transactions(transactions: list, preferred_language: str):
|
def format_transactions(transactions: list, preferred_language: str, token_symbol: str):
|
||||||
|
|
||||||
formatted_transactions = ''
|
formatted_transactions = ''
|
||||||
if len(transactions) > 0:
|
if len(transactions) > 0:
|
||||||
@@ -190,7 +225,7 @@ def format_transactions(transactions: list, preferred_language: str):
|
|||||||
timestamp = transaction.get('timestamp')
|
timestamp = transaction.get('timestamp')
|
||||||
action_tag = transaction.get('action_tag')
|
action_tag = transaction.get('action_tag')
|
||||||
direction = transaction.get('direction')
|
direction = transaction.get('direction')
|
||||||
token_symbol = 'SRF'
|
token_symbol = token_symbol
|
||||||
|
|
||||||
if action_tag == 'SENT' or action_tag == 'ULITUMA':
|
if action_tag == 'SENT' or action_tag == 'ULITUMA':
|
||||||
formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n'
|
formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n'
|
||||||
@@ -214,7 +249,7 @@ def process_display_user_metadata(user: Account, display_key: str):
|
|||||||
"""
|
"""
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
user_metadata = get_cached_data(key)
|
user_metadata = get_cached_data(key)
|
||||||
if user_metadata:
|
if user_metadata:
|
||||||
@@ -251,9 +286,11 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
"""
|
"""
|
||||||
# retrieve cached statement
|
# retrieve cached statement
|
||||||
identifier = blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address)
|
identifier = blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address)
|
||||||
key = create_cached_data_key(identifier=identifier, salt='cic.statement')
|
key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
|
||||||
transactions = get_cached_data(key=key)
|
transactions = get_cached_data(key=key)
|
||||||
|
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
|
|
||||||
first_transaction_set = []
|
first_transaction_set = []
|
||||||
middle_transaction_set = []
|
middle_transaction_set = []
|
||||||
last_transaction_set = []
|
last_transaction_set = []
|
||||||
@@ -277,7 +314,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
first_transaction_set=format_transactions(
|
first_transaction_set=format_transactions(
|
||||||
transactions=first_transaction_set,
|
transactions=first_transaction_set,
|
||||||
preferred_language=user.preferred_language
|
preferred_language=user.preferred_language,
|
||||||
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
elif display_key == 'ussd.kenya.middle_transaction_set':
|
elif display_key == 'ussd.kenya.middle_transaction_set':
|
||||||
@@ -286,7 +324,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
middle_transaction_set=format_transactions(
|
middle_transaction_set=format_transactions(
|
||||||
transactions=middle_transaction_set,
|
transactions=middle_transaction_set,
|
||||||
preferred_language=user.preferred_language
|
preferred_language=user.preferred_language,
|
||||||
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -296,7 +335,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
|
|||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
last_transaction_set=format_transactions(
|
last_transaction_set=format_transactions(
|
||||||
transactions=last_transaction_set,
|
transactions=last_transaction_set,
|
||||||
preferred_language=user.preferred_language
|
preferred_language=user.preferred_language,
|
||||||
|
token_symbol=token_symbol
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -312,18 +352,19 @@ def process_start_menu(display_key: str, user: Account):
|
|||||||
:return: Corresponding translation text response
|
:return: Corresponding translation text response
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
|
token_symbol = retrieve_token_symbol()
|
||||||
chain_str = Chain.spec.__str__()
|
chain_str = Chain.spec.__str__()
|
||||||
blockchain_address = user.blockchain_address
|
blockchain_address = user.blockchain_address
|
||||||
balance_manager = BalanceManager(address=blockchain_address,
|
balance_manager = BalanceManager(address=blockchain_address,
|
||||||
chain_str=chain_str,
|
chain_str=chain_str,
|
||||||
token_symbol='SRF')
|
token_symbol=token_symbol)
|
||||||
|
|
||||||
# get balances synchronously for display on start menu
|
# get balances synchronously for display on start menu
|
||||||
balances_data = balance_manager.get_balances()
|
balances_data = balance_manager.get_balances()
|
||||||
|
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(blockchain_address[2:]),
|
identifier=bytes.fromhex(blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=json.dumps(balances_data))
|
cache_data(key=key, data=json.dumps(balances_data))
|
||||||
|
|
||||||
@@ -340,9 +381,6 @@ def process_start_menu(display_key: str, user: Account):
|
|||||||
# retrieve and cache account's statement
|
# retrieve and cache account's statement
|
||||||
retrieve_account_statement(blockchain_address=blockchain_address)
|
retrieve_account_statement(blockchain_address=blockchain_address)
|
||||||
|
|
||||||
# TODO [Philip]: figure out how to get token symbol from a metadata layer of sorts.
|
|
||||||
token_symbol = 'SRF'
|
|
||||||
|
|
||||||
return translation_for(
|
return translation_for(
|
||||||
key=display_key,
|
key=display_key,
|
||||||
preferred_language=user.preferred_language,
|
preferred_language=user.preferred_language,
|
||||||
@@ -375,6 +413,13 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
|
|||||||
:return: A ussd menu's corresponding text value.
|
:return: A ussd menu's corresponding text value.
|
||||||
:rtype: Document
|
:rtype: Document
|
||||||
"""
|
"""
|
||||||
|
# retrieve metadata before any transition
|
||||||
|
key = generate_metadata_pointer(
|
||||||
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
|
cic_type=':cic.person'
|
||||||
|
)
|
||||||
|
person_metadata = get_cached_data(key=key)
|
||||||
|
|
||||||
if ussd_session:
|
if ussd_session:
|
||||||
if user_input == "0":
|
if user_input == "0":
|
||||||
return UssdMenu.parent_menu(menu_name=ussd_session.get('state'))
|
return UssdMenu.parent_menu(menu_name=ussd_session.get('state'))
|
||||||
@@ -385,12 +430,6 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
|
|||||||
if user.has_valid_pin():
|
if user.has_valid_pin():
|
||||||
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
|
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
|
||||||
|
|
||||||
key = generate_metadata_pointer(
|
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
|
||||||
cic_type='cic.person'
|
|
||||||
)
|
|
||||||
person_metadata = get_cached_data(key=key)
|
|
||||||
|
|
||||||
if last_ussd_session:
|
if last_ussd_session:
|
||||||
# get last state
|
# get last state
|
||||||
last_state = last_ussd_session.state
|
last_state = last_ussd_session.state
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import argparse
|
|||||||
import logging
|
import logging
|
||||||
import urllib
|
import urllib
|
||||||
from xdg.BaseDirectory import xdg_config_home
|
from xdg.BaseDirectory import xdg_config_home
|
||||||
from urllib import request
|
from urllib import parse, request
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
from confini import Config
|
from confini import Config
|
||||||
@@ -92,9 +92,9 @@ def main():
|
|||||||
data['text'] = user_input
|
data['text'] = user_input
|
||||||
|
|
||||||
req = urllib.request.Request(url)
|
req = urllib.request.Request(url)
|
||||||
data_str = json.dumps(data)
|
urlencoded_data = parse.urlencode(data)
|
||||||
data_bytes = data_str.encode('utf-8')
|
data_bytes = urlencoded_data.encode('utf-8')
|
||||||
req.add_header('Content-Type', 'application/json')
|
req.add_header('Content-Type', 'application/x-www-form-urlencoded')
|
||||||
req.data = data_bytes
|
req.data = data_bytes
|
||||||
response = urllib.request.urlopen(req)
|
response = urllib.request.urlopen(req)
|
||||||
response_data = response.read().decode('utf-8')
|
response_data = response.read().decode('utf-8')
|
||||||
|
|||||||
73
apps/cic-ussd/cic_ussd/runnable/daemons/cic_user_server.py
Normal file
73
apps/cic-ussd/cic_ussd/runnable/daemons/cic_user_server.py
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
"""
|
||||||
|
This module handles requests originating from CICADA or any other management client for custodial wallets, processing
|
||||||
|
requests offering control of user account states to a staff behind the client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
from urllib.parse import quote_plus
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
from confini import Config
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_ussd.db import dsn_from_config
|
||||||
|
from cic_ussd.db.models.base import SessionBase
|
||||||
|
from cic_ussd.operations import define_response_with_content
|
||||||
|
from cic_ussd.requests import (get_request_endpoint,
|
||||||
|
get_query_parameters,
|
||||||
|
process_pin_reset_requests,
|
||||||
|
process_locked_accounts_requests)
|
||||||
|
from cic_ussd.runnable.server_base import exportable_parser, logg
|
||||||
|
args = exportable_parser.parse_args()
|
||||||
|
|
||||||
|
# define log levels
|
||||||
|
if args.vv:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
elif args.v:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
|
# parse config
|
||||||
|
config = Config(config_dir=args.c, env_prefix=args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||||
|
|
||||||
|
# set up db
|
||||||
|
data_source_name = dsn_from_config(config)
|
||||||
|
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
||||||
|
# create session for the life time of http request
|
||||||
|
SessionBase.session = SessionBase.create_session()
|
||||||
|
|
||||||
|
|
||||||
|
# handle requests from CICADA
|
||||||
|
def application(env, start_response):
|
||||||
|
"""Loads python code for application to be accessible over web server
|
||||||
|
:param env: Object containing server and request information
|
||||||
|
:type env: dict
|
||||||
|
:param start_response: Callable to define responses.
|
||||||
|
:type start_response: any
|
||||||
|
:return: a list containing a bytes representation of the response object
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
|
||||||
|
# define headers
|
||||||
|
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
|
||||||
|
headers = [('Content-Type', 'text/plain')]
|
||||||
|
|
||||||
|
if get_request_endpoint(env) == '/pin':
|
||||||
|
phone_number = get_query_parameters(env=env, query_name='phoneNumber')
|
||||||
|
phone_number = quote_plus(phone_number)
|
||||||
|
response, message = process_pin_reset_requests(env=env, phone_number=phone_number)
|
||||||
|
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response)
|
||||||
|
SessionBase.session.close()
|
||||||
|
start_response(message, headers)
|
||||||
|
return [response_bytes]
|
||||||
|
|
||||||
|
# handle requests for locked accounts
|
||||||
|
response, message = process_locked_accounts_requests(env=env)
|
||||||
|
response_bytes, headers = define_response_with_content(headers=headers, response=response)
|
||||||
|
start_response(message, headers)
|
||||||
|
SessionBase.session.close()
|
||||||
|
return [response_bytes]
|
||||||
|
|
||||||
@@ -1,25 +1,24 @@
|
|||||||
"""Functions defining WSGI interaction with external http requests
|
"""This module handles requests originating from the ussd service provider.
|
||||||
Defines an application function essential for the uWSGI python loader to run th python application code.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# standard imports
|
# standard imports
|
||||||
import argparse
|
|
||||||
import celery
|
|
||||||
import i18n
|
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
from urllib.parse import parse_qs
|
||||||
import redis
|
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
from confini import Config
|
import celery
|
||||||
|
import i18n
|
||||||
|
import redis
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from urllib.parse import quote_plus
|
from confini import Config
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.chain import Chain
|
from cic_ussd.chain import Chain
|
||||||
from cic_ussd.db import dsn_from_config
|
from cic_ussd.db import dsn_from_config
|
||||||
from cic_ussd.db.models.base import SessionBase
|
from cic_ussd.db.models.base import SessionBase
|
||||||
from cic_ussd.encoder import PasswordEncoder
|
from cic_ussd.encoder import PasswordEncoder
|
||||||
|
from cic_ussd.error import InitializationError
|
||||||
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
|
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
from cic_ussd.metadata.signer import Signer
|
from cic_ussd.metadata.signer import Signer
|
||||||
@@ -28,34 +27,16 @@ from cic_ussd.operations import (define_response_with_content,
|
|||||||
process_menu_interaction_requests,
|
process_menu_interaction_requests,
|
||||||
define_multilingual_responses)
|
define_multilingual_responses)
|
||||||
from cic_ussd.phone_number import process_phone_number
|
from cic_ussd.phone_number import process_phone_number
|
||||||
from cic_ussd.redis import InMemoryStore
|
from cic_ussd.processor import get_default_token_data
|
||||||
|
from cic_ussd.redis import cache_data, create_cached_data_key, InMemoryStore
|
||||||
from cic_ussd.requests import (get_request_endpoint,
|
from cic_ussd.requests import (get_request_endpoint,
|
||||||
get_request_method,
|
get_request_method)
|
||||||
get_query_parameters,
|
from cic_ussd.runnable.server_base import exportable_parser, logg
|
||||||
process_locked_accounts_requests,
|
|
||||||
process_pin_reset_requests)
|
|
||||||
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
||||||
from cic_ussd.state_machine import UssdStateMachine
|
from cic_ussd.state_machine import UssdStateMachine
|
||||||
from cic_ussd.validator import check_ip, check_request_content_length, check_service_code, validate_phone_number, \
|
from cic_ussd.validator import check_ip, check_request_content_length, validate_phone_number, validate_presence
|
||||||
validate_presence
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
args = exportable_parser.parse_args()
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
config_directory = '/usr/local/etc/cic-ussd/'
|
|
||||||
|
|
||||||
# define arguments
|
|
||||||
arg_parser = argparse.ArgumentParser()
|
|
||||||
arg_parser.add_argument('-c', type=str, default=config_directory, help='config directory.')
|
|
||||||
arg_parser.add_argument('-q', type=str, default='cic-ussd', help='queue name for worker tasks')
|
|
||||||
arg_parser.add_argument('-v', action='store_true', help='be verbose')
|
|
||||||
arg_parser.add_argument('-vv', action='store_true', help='be more verbose')
|
|
||||||
arg_parser.add_argument('--env-prefix',
|
|
||||||
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
|
||||||
dest='env_prefix',
|
|
||||||
type=str,
|
|
||||||
help='environment prefix for variables to overwrite configuration')
|
|
||||||
args = arg_parser.parse_args()
|
|
||||||
|
|
||||||
# define log levels
|
# define log levels
|
||||||
if args.vv:
|
if args.vv:
|
||||||
@@ -69,7 +50,14 @@ config.process()
|
|||||||
config.censor('PASSWORD', 'DATABASE')
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||||
|
|
||||||
# initialize elements
|
# set up db
|
||||||
|
data_source_name = dsn_from_config(config)
|
||||||
|
SessionBase.connect(data_source_name,
|
||||||
|
pool_size=int(config.get('DATABASE_POOL_SIZE')),
|
||||||
|
debug=config.true('DATABASE_DEBUG'))
|
||||||
|
# create session for the life time of http request
|
||||||
|
SessionBase.session = SessionBase.create_session()
|
||||||
|
|
||||||
# set up translations
|
# set up translations
|
||||||
i18n.load_path.append(config.get('APP_LOCALE_PATH'))
|
i18n.load_path.append(config.get('APP_LOCALE_PATH'))
|
||||||
i18n.set('fallback', config.get('APP_LOCALE_FALLBACK'))
|
i18n.set('fallback', config.get('APP_LOCALE_FALLBACK'))
|
||||||
@@ -82,12 +70,6 @@ ussd_menu_db = create_local_file_data_stores(file_location=config.get('USSD_MENU
|
|||||||
table_name='ussd_menu')
|
table_name='ussd_menu')
|
||||||
UssdMenu.ussd_menu_db = ussd_menu_db
|
UssdMenu.ussd_menu_db = ussd_menu_db
|
||||||
|
|
||||||
# set up db
|
|
||||||
data_source_name = dsn_from_config(config)
|
|
||||||
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
|
||||||
# create session for the life time of http request
|
|
||||||
SessionBase.session = SessionBase.create_session()
|
|
||||||
|
|
||||||
# define universal redis cache access
|
# define universal redis cache access
|
||||||
InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'),
|
InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'),
|
||||||
port=config.get('REDIS_PORT'),
|
port=config.get('REDIS_PORT'),
|
||||||
@@ -127,6 +109,23 @@ Chain.spec = chain_spec
|
|||||||
UssdStateMachine.states = states
|
UssdStateMachine.states = states
|
||||||
UssdStateMachine.transitions = transitions
|
UssdStateMachine.transitions = transitions
|
||||||
|
|
||||||
|
# retrieve default token data
|
||||||
|
default_token_data = get_default_token_data()
|
||||||
|
chain_str = Chain.spec.__str__()
|
||||||
|
|
||||||
|
# cache default token for re-usability
|
||||||
|
if default_token_data:
|
||||||
|
cache_key = create_cached_data_key(
|
||||||
|
identifier=chain_str.encode('utf-8'),
|
||||||
|
salt=':cic.default_token_data'
|
||||||
|
)
|
||||||
|
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
||||||
|
else:
|
||||||
|
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
||||||
|
|
||||||
|
|
||||||
|
valid_service_codes = config.get('APP_SERVICE_CODE').split(",")
|
||||||
|
|
||||||
|
|
||||||
def application(env, start_response):
|
def application(env, start_response):
|
||||||
"""Loads python code for application to be accessible over web server
|
"""Loads python code for application to be accessible over web server
|
||||||
@@ -134,6 +133,8 @@ def application(env, start_response):
|
|||||||
:type env: dict
|
:type env: dict
|
||||||
:param start_response: Callable to define responses.
|
:param start_response: Callable to define responses.
|
||||||
:type start_response: any
|
:type start_response: any
|
||||||
|
:return: a list containing a bytes representation of the response object
|
||||||
|
:rtype: list
|
||||||
"""
|
"""
|
||||||
# define headers
|
# define headers
|
||||||
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
|
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
|
||||||
@@ -141,13 +142,27 @@ def application(env, start_response):
|
|||||||
|
|
||||||
if get_request_method(env=env) == 'POST' and get_request_endpoint(env=env) == '/':
|
if get_request_method(env=env) == 'POST' and get_request_endpoint(env=env) == '/':
|
||||||
|
|
||||||
# get post data
|
if env.get('CONTENT_TYPE') != 'application/x-www-form-urlencoded':
|
||||||
post_data = json.load(env.get('wsgi.input'))
|
start_response('405 Play by the rules', errors_headers)
|
||||||
|
return []
|
||||||
|
|
||||||
service_code = post_data.get('serviceCode')
|
post_data = env.get('wsgi.input').read()
|
||||||
phone_number = post_data.get('phoneNumber')
|
post_data = post_data.decode('utf-8')
|
||||||
external_session_id = post_data.get('sessionId')
|
|
||||||
user_input = post_data.get('text')
|
try:
|
||||||
|
post_data = parse_qs(post_data)
|
||||||
|
except TypeError:
|
||||||
|
start_response('400 Size matters', errors_headers)
|
||||||
|
return []
|
||||||
|
|
||||||
|
service_code = post_data.get('serviceCode')[0]
|
||||||
|
phone_number = post_data.get('phoneNumber')[0]
|
||||||
|
external_session_id = post_data.get('sessionId')[0]
|
||||||
|
|
||||||
|
try:
|
||||||
|
user_input = post_data.get('text')[0]
|
||||||
|
except TypeError:
|
||||||
|
user_input = ""
|
||||||
|
|
||||||
# add validation for phone number
|
# add validation for phone number
|
||||||
if phone_number:
|
if phone_number:
|
||||||
@@ -164,14 +179,14 @@ def application(env, start_response):
|
|||||||
return []
|
return []
|
||||||
|
|
||||||
# validate service code
|
# validate service code
|
||||||
if not check_service_code(code=service_code, config=config):
|
if service_code not in valid_service_codes:
|
||||||
response = define_multilingual_responses(
|
response = define_multilingual_responses(
|
||||||
key='ussd.kenya.invalid_service_code',
|
key='ussd.kenya.invalid_service_code',
|
||||||
locales=['en', 'sw'],
|
locales=['en', 'sw'],
|
||||||
prefix='END',
|
prefix='END',
|
||||||
valid_service_code=config.get('APP_SERVICE_CODE'))
|
valid_service_code=valid_service_codes[0])
|
||||||
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response)
|
response_bytes, headers = define_response_with_content(headers=headers, response=response)
|
||||||
start_response('400 Invalid service code', headers)
|
start_response('200 OK', headers)
|
||||||
return [response_bytes]
|
return [response_bytes]
|
||||||
|
|
||||||
# validate phone number
|
# validate phone number
|
||||||
@@ -195,19 +210,7 @@ def application(env, start_response):
|
|||||||
SessionBase.session.close()
|
SessionBase.session.close()
|
||||||
return [response_bytes]
|
return [response_bytes]
|
||||||
|
|
||||||
# handle pin requests
|
else:
|
||||||
if get_request_endpoint(env) == '/pin':
|
start_response('405 Play by the rules', errors_headers)
|
||||||
phone_number = get_query_parameters(env=env, query_name='phoneNumber')
|
return []
|
||||||
phone_number = quote_plus(phone_number)
|
|
||||||
response, message = process_pin_reset_requests(env=env, phone_number=phone_number)
|
|
||||||
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response)
|
|
||||||
SessionBase.session.close()
|
|
||||||
start_response(message, headers)
|
|
||||||
return [response_bytes]
|
|
||||||
|
|
||||||
# handle requests for locked accounts
|
|
||||||
response, message = process_locked_accounts_requests(env=env)
|
|
||||||
response_bytes, headers = define_response_with_content(headers=headers, response=response)
|
|
||||||
start_response(message, headers)
|
|
||||||
SessionBase.session.close()
|
|
||||||
return [response_bytes]
|
|
||||||
38
apps/cic-ussd/cic_ussd/runnable/server_base.py
Normal file
38
apps/cic-ussd/cic_ussd/runnable/server_base.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
"""This module handles generic wsgi server configurations that can then be subsumed by different server flavors for the
|
||||||
|
cic-ussd component.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from argparse import ArgumentParser
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
|
||||||
|
# define a logging system
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
# define default config directory as would be defined in docker
|
||||||
|
default_config_dir = '/usr/local/etc/cic-ussd/'
|
||||||
|
|
||||||
|
# define args parser
|
||||||
|
arg_parser = ArgumentParser(description='CLI for handling cic-ussd server applications.')
|
||||||
|
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||||
|
arg_parser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
arg_parser.add_argument('-q', type=str, default='cic-ussd', help='queue name for worker tasks')
|
||||||
|
arg_parser.add_argument('--env-prefix',
|
||||||
|
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
||||||
|
dest='env_prefix',
|
||||||
|
type=str,
|
||||||
|
help='environment prefix for variables to overwrite configuration')
|
||||||
|
exportable_parser = arg_parser
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -12,6 +12,7 @@ from cic_ussd.chain import Chain
|
|||||||
from cic_ussd.db.models.account import AccountStatus, Account
|
from cic_ussd.db.models.account import AccountStatus, Account
|
||||||
from cic_ussd.operations import save_to_in_memory_ussd_session_data
|
from cic_ussd.operations import save_to_in_memory_ussd_session_data
|
||||||
from cic_ussd.phone_number import get_user_by_phone_number
|
from cic_ussd.phone_number import get_user_by_phone_number
|
||||||
|
from cic_ussd.processor import retrieve_token_symbol
|
||||||
from cic_ussd.redis import create_cached_data_key, get_cached_data
|
from cic_ussd.redis import create_cached_data_key, get_cached_data
|
||||||
from cic_ussd.transactions import OutgoingTransactionProcessor
|
from cic_ussd.transactions import OutgoingTransactionProcessor
|
||||||
|
|
||||||
@@ -64,7 +65,7 @@ def has_sufficient_balance(state_machine_data: Tuple[str, dict, Account]) -> boo
|
|||||||
# get cached balance
|
# get cached balance
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(user.blockchain_address[2:]),
|
identifier=bytes.fromhex(user.blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cached_balance = get_cached_data(key=key)
|
cached_balance = get_cached_data(key=key)
|
||||||
operational_balance = compute_operational_balance(balances=json.loads(cached_balance))
|
operational_balance = compute_operational_balance(balances=json.loads(cached_balance))
|
||||||
@@ -124,14 +125,18 @@ def process_transaction_request(state_machine_data: Tuple[str, dict, Account]):
|
|||||||
"""
|
"""
|
||||||
user_input, ussd_session, user = state_machine_data
|
user_input, ussd_session, user = state_machine_data
|
||||||
|
|
||||||
|
# retrieve token symbol
|
||||||
|
chain_str = Chain.spec.__str__()
|
||||||
|
|
||||||
# get user from phone number
|
# get user from phone number
|
||||||
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
|
||||||
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
|
||||||
to_address = recipient.blockchain_address
|
to_address = recipient.blockchain_address
|
||||||
from_address = user.blockchain_address
|
from_address = user.blockchain_address
|
||||||
amount = int(ussd_session.get('session_data').get('transaction_amount'))
|
amount = int(ussd_session.get('session_data').get('transaction_amount'))
|
||||||
chain_str = Chain.spec.__str__()
|
token_symbol = retrieve_token_symbol(chain_str=chain_str)
|
||||||
|
|
||||||
outgoing_tx_processor = OutgoingTransactionProcessor(chain_str=chain_str,
|
outgoing_tx_processor = OutgoingTransactionProcessor(chain_str=chain_str,
|
||||||
from_address=from_address,
|
from_address=from_address,
|
||||||
to_address=to_address)
|
to_address=to_address)
|
||||||
outgoing_tx_processor.process_outgoing_transfer_transaction(amount=amount)
|
outgoing_tx_processor.process_outgoing_transfer_transaction(amount=amount, token_symbol=token_symbol)
|
||||||
|
|||||||
@@ -176,7 +176,7 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, Account]):
|
|||||||
blockchain_address = user.blockchain_address
|
blockchain_address = user.blockchain_address
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
user_metadata = get_cached_data(key=key)
|
user_metadata = get_cached_data(key=key)
|
||||||
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ def has_cached_user_metadata(state_machine_data: Tuple[str, dict, Account]):
|
|||||||
# check for user metadata in cache
|
# check for user metadata in cache
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
user_metadata = get_cached_data(key=key)
|
user_metadata = get_cached_data(key=key)
|
||||||
return user_metadata is not None
|
return user_metadata is not None
|
||||||
|
|||||||
@@ -136,7 +136,7 @@ def process_balances_callback(result: list, param: str, status_code: int):
|
|||||||
blockchain_address = balances_data.get('address')
|
blockchain_address = balances_data.get('address')
|
||||||
key = create_cached_data_key(
|
key = create_cached_data_key(
|
||||||
identifier=bytes.fromhex(blockchain_address[2:]),
|
identifier=bytes.fromhex(blockchain_address[2:]),
|
||||||
salt='cic.balances_data'
|
salt=':cic.balances_data'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=json.dumps(balances_data))
|
cache_data(key=key, data=json.dumps(balances_data))
|
||||||
else:
|
else:
|
||||||
@@ -226,7 +226,7 @@ def process_statement_callback(result, param: str, status_code: int):
|
|||||||
|
|
||||||
# cache account statement
|
# cache account statement
|
||||||
identifier = bytes.fromhex(param[2:])
|
identifier = bytes.fromhex(param[2:])
|
||||||
key = create_cached_data_key(identifier=identifier, salt='cic.statement')
|
key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
|
||||||
data = json.dumps(processed_transactions)
|
data = json.dumps(processed_transactions)
|
||||||
|
|
||||||
# cache statement data
|
# cache statement data
|
||||||
|
|||||||
@@ -120,7 +120,7 @@ class OutgoingTransactionProcessor:
|
|||||||
self.from_address = from_address
|
self.from_address = from_address
|
||||||
self.to_address = to_address
|
self.to_address = to_address
|
||||||
|
|
||||||
def process_outgoing_transfer_transaction(self, amount: int, token_symbol='SRF'):
|
def process_outgoing_transfer_transaction(self, amount: int, token_symbol: str):
|
||||||
"""This function initiates standard transfers between one account to another
|
"""This function initiates standard transfers between one account to another
|
||||||
:param amount: The amount of tokens to be sent
|
:param amount: The amount of tokens to be sent
|
||||||
:type amount: int
|
:type amount: int
|
||||||
|
|||||||
@@ -45,19 +45,6 @@ def check_request_content_length(config: Config, env: dict):
|
|||||||
config.get('APP_MAX_BODY_LENGTH'))
|
config.get('APP_MAX_BODY_LENGTH'))
|
||||||
|
|
||||||
|
|
||||||
def check_service_code(code: str, config: Config):
|
|
||||||
"""Checks whether provided code matches expected service code
|
|
||||||
:param config: A dictionary object containing configuration values
|
|
||||||
:type config: Config
|
|
||||||
:param code: Service code passed over request
|
|
||||||
:type code: str
|
|
||||||
|
|
||||||
:return: Service code validity
|
|
||||||
:rtype: boolean
|
|
||||||
"""
|
|
||||||
return code == config.get('APP_SERVICE_CODE')
|
|
||||||
|
|
||||||
|
|
||||||
def check_known_user(phone: str):
|
def check_known_user(phone: str):
|
||||||
"""
|
"""
|
||||||
This method attempts to ascertain whether the user already exists and is known to the system.
|
This method attempts to ascertain whether the user already exists and is known to the system.
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import semver
|
import semver
|
||||||
|
|
||||||
version = (0, 3, 0, 'alpha.9')
|
version = (0, 3, 0, 'alpha.10')
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
major=version[0],
|
major=version[0],
|
||||||
|
|||||||
@@ -38,8 +38,9 @@ COPY cic-ussd/transitions/ cic-ussd/transitions/
|
|||||||
COPY cic-ussd/var/ cic-ussd/var/
|
COPY cic-ussd/var/ cic-ussd/var/
|
||||||
|
|
||||||
COPY cic-ussd/docker/db.sh \
|
COPY cic-ussd/docker/db.sh \
|
||||||
cic-ussd/docker/start_tasker.sh \
|
cic-ussd/docker/start_cic_user_tasker.sh \
|
||||||
cic-ussd/docker/start_uwsgi.sh \
|
cic-ussd/docker/start_cic_user_ussd_server.sh\
|
||||||
|
cic-ussd/docker/start_cic_user_server.sh\
|
||||||
/root/
|
/root/
|
||||||
|
|
||||||
RUN chmod +x /root/*.sh
|
RUN chmod +x /root/*.sh
|
||||||
|
|||||||
7
apps/cic-ussd/docker/start_cic_user_server.sh
Normal file
7
apps/cic-ussd/docker/start_cic_user_server.sh
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. /root/db.sh
|
||||||
|
|
||||||
|
user_server_port=${SERVER_PORT:-9500}
|
||||||
|
|
||||||
|
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_server.py --http :"$user_server_port" --pyargv "$@"
|
||||||
5
apps/cic-ussd/docker/start_cic_user_tasker.sh
Normal file
5
apps/cic-ussd/docker/start_cic_user_tasker.sh
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. /root/db.sh
|
||||||
|
|
||||||
|
/usr/local/bin/cic-user-tasker "$@"
|
||||||
7
apps/cic-ussd/docker/start_cic_user_ussd_server.sh
Normal file
7
apps/cic-ussd/docker/start_cic_user_ussd_server.sh
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
. /root/db.sh
|
||||||
|
|
||||||
|
user_ussd_server_port=${SERVER_PORT:-9000}
|
||||||
|
|
||||||
|
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_ussd_server.py --http :"$user_ussd_server_port" --pyargv "$@"
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
. /root/db.sh
|
|
||||||
|
|
||||||
/usr/local/bin/cic-ussd-tasker $@
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
. /root/db.sh
|
|
||||||
|
|
||||||
server_port=${SERVER_PORT:-9000}
|
|
||||||
|
|
||||||
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/server.py --http :$server_port --pyargv "$@"
|
|
||||||
@@ -35,6 +35,7 @@ packages =
|
|||||||
cic_ussd.menu
|
cic_ussd.menu
|
||||||
cic_ussd.metadata
|
cic_ussd.metadata
|
||||||
cic_ussd.runnable
|
cic_ussd.runnable
|
||||||
|
cic_ussd.runnable.daemons
|
||||||
cic_ussd.session
|
cic_ussd.session
|
||||||
cic_ussd.state_machine
|
cic_ussd.state_machine
|
||||||
cic_ussd.state_machine.logic
|
cic_ussd.state_machine.logic
|
||||||
@@ -44,5 +45,5 @@ scripts =
|
|||||||
|
|
||||||
[options.entry_points]
|
[options.entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
cic-ussd-tasker = cic_ussd.runnable.tasker:main
|
cic-user-tasker = cic_ussd.runnable.daemons.cic_user_tasker:main
|
||||||
cic-ussd-client = cic_ussd.runnable.client:main
|
cic-ussd-client = cic_ussd.runnable.client:main
|
||||||
|
|||||||
@@ -105,7 +105,7 @@ def test_get_user_metadata(caplog,
|
|||||||
assert 'Get latest data status: 200' in caplog.text
|
assert 'Get latest data status: 200' in caplog.text
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=identifier,
|
identifier=identifier,
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
cached_user_metadata = get_cached_data(key=key)
|
cached_user_metadata = get_cached_data(key=key)
|
||||||
assert cached_user_metadata
|
assert cached_user_metadata
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ def test_has_cached_user_metadata(create_in_db_ussd_session,
|
|||||||
user = create_activated_user
|
user = create_activated_user
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=json.dumps(person_metadata))
|
cache_data(key=key, data=json.dumps(person_metadata))
|
||||||
result = has_cached_user_metadata(state_machine_data=state_machine_data)
|
result = has_cached_user_metadata(state_machine_data=state_machine_data)
|
||||||
|
|||||||
2
apps/cic-ussd/tests/fixtures/user.py
vendored
2
apps/cic-ussd/tests/fixtures/user.py
vendored
@@ -115,6 +115,6 @@ def cached_user_metadata(create_activated_user, init_redis_cache, person_metadat
|
|||||||
user_metadata = json.dumps(person_metadata)
|
user_metadata = json.dumps(person_metadata)
|
||||||
key = generate_metadata_pointer(
|
key = generate_metadata_pointer(
|
||||||
identifier=blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address),
|
identifier=blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address),
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
cache_data(key=key, data=user_metadata)
|
cache_data(key=key, data=user_metadata)
|
||||||
|
|||||||
@@ -158,6 +158,8 @@ en:
|
|||||||
Your Sarafu-Network balances is: %{token_balance}
|
Your Sarafu-Network balances is: %{token_balance}
|
||||||
00. Back
|
00. Back
|
||||||
99. Exit
|
99. Exit
|
||||||
|
invalid_service_code: |-
|
||||||
|
Please dial %{valid_service_code} to access Sarafu Network
|
||||||
help: |-
|
help: |-
|
||||||
CON For assistance call %{support_phone}
|
CON For assistance call %{support_phone}
|
||||||
00. Back
|
00. Back
|
||||||
|
|||||||
@@ -158,6 +158,8 @@ sw:
|
|||||||
Akaunti yako ya Sarafu-Network ina salio ifuatayo: %{token_balance}
|
Akaunti yako ya Sarafu-Network ina salio ifuatayo: %{token_balance}
|
||||||
00. Nyuma
|
00. Nyuma
|
||||||
99. Ondoka
|
99. Ondoka
|
||||||
|
invalid_service_code: |-
|
||||||
|
Bonyeza %{valid_service_code} kutumia mtandao wa Sarafu
|
||||||
help: |-
|
help: |-
|
||||||
CON Kwa usaidizi piga simu %{support_phone}
|
CON Kwa usaidizi piga simu %{support_phone}
|
||||||
0. Nyuma
|
0. Nyuma
|
||||||
|
|||||||
@@ -31,15 +31,6 @@ RUN echo Install confini schema files && \
|
|||||||
git checkout $cic_config_commit && \
|
git checkout $cic_config_commit && \
|
||||||
cp -v *.ini $CONFINI_DIR
|
cp -v *.ini $CONFINI_DIR
|
||||||
|
|
||||||
ARG cic_contracts_commit=698ef3a30fde8d7f2c498f1208fb0ff45d665501
|
|
||||||
ARG cic_contracts_url=https://gitlab.com/grassrootseconomics/cic-contracts.git/
|
|
||||||
RUN echo Install ABI collection for solidity interfaces used across all components && \
|
|
||||||
git clone --depth 1 $cic_contracts_url cic-contracts && \
|
|
||||||
cd cic-contracts && \
|
|
||||||
git fetch --depth 1 origin $cic_contracts_commit && \
|
|
||||||
git checkout $cic_contracts_commit && \
|
|
||||||
make install
|
|
||||||
|
|
||||||
# Install nvm with node and npm
|
# Install nvm with node and npm
|
||||||
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
||||||
ENV NVM_DIR /root/.nvm
|
ENV NVM_DIR /root/.nvm
|
||||||
@@ -56,54 +47,61 @@ RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh |
|
|||||||
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
||||||
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
||||||
|
|
||||||
RUN useradd --create-home grassroots
|
#RUN useradd --create-home grassroots
|
||||||
WORKDIR /home/grassroots
|
# WORKDIR /home/grassroots
|
||||||
USER grassroots
|
# USER grassroots
|
||||||
|
|
||||||
|
ARG pip_extra_args=""
|
||||||
|
ARG pip_index_url=https://pypi.org/simple
|
||||||
ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
|
ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
|
||||||
ARG cic_base_version=0.1.2a79
|
ARG cic_base_version=0.1.2b11
|
||||||
ARG cic_eth_version=0.11.0b8+build.c2286e5c
|
ARG cic_eth_version=0.11.0b14
|
||||||
ARG sarafu_faucet_version=0.0.2a28
|
ARG sarafu_token_version=0.0.1a8
|
||||||
ARG sarafu_token_version==0.0.1a6
|
ARG sarafu_faucet_version=0.0.3a3
|
||||||
ARG cic_contracts_version=0.0.2a2
|
RUN pip install --index-url https://pypi.org/simple --extra-index-url $pip_extra_index_url \
|
||||||
RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \
|
cic-base[full_graph]==$cic_base_version \
|
||||||
cic-eth==$cic_eth_version \
|
cic-eth==$cic_eth_version \
|
||||||
cic-contracts==$cic_contracts_version \
|
|
||||||
sarafu-faucet==$sarafu_faucet_version \
|
sarafu-faucet==$sarafu_faucet_version \
|
||||||
sarafu-token==$sarafu_token_version
|
sarafu-token==$sarafu_token_version \
|
||||||
|
cic-eth==$cic_eth_version
|
||||||
|
|
||||||
|
# -------------- begin runtime container ----------------
|
||||||
FROM python:3.8.6-slim-buster as runtime-image
|
FROM python:3.8.6-slim-buster as runtime-image
|
||||||
|
|
||||||
RUN apt-get update
|
RUN apt-get update
|
||||||
RUN apt-get install -y --no-install-recommends gnupg libpq-dev
|
RUN apt-get install -y --no-install-recommends gnupg libpq-dev
|
||||||
RUN apt-get install -y --no-install-recommends jq
|
RUN apt-get install -y jq bash iputils-ping socat
|
||||||
|
|
||||||
COPY --from=compile-image /usr/local/bin/ /usr/local/bin/
|
COPY --from=compile-image /usr/local/bin/ /usr/local/bin/
|
||||||
COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/
|
COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/
|
||||||
|
COPY --from=compile-image /usr/local/lib/python3.8/site-packages/ \
|
||||||
|
/usr/local/lib/python3.8/site-packages/
|
||||||
|
|
||||||
RUN useradd --create-home grassroots
|
ENV EXTRA_INDEX_URL https://pip.grassrootseconomics.net:8433
|
||||||
WORKDIR /home/grassroots
|
# RUN useradd -u 1001 --create-home grassroots
|
||||||
# COPY python dependencies to user dir
|
# RUN adduser grassroots sudo && \
|
||||||
COPY --from=compile-image /home/grassroots/.local .local
|
# echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
||||||
ENV PATH=/home/grassroots/.local/bin:$PATH
|
# WORKDIR /home/grassroots
|
||||||
|
|
||||||
COPY contract-migration/testdata/pgp testdata/pgp
|
COPY contract-migration/testdata/pgp testdata/pgp
|
||||||
COPY contract-migration/sarafu_declaration.json sarafu_declaration.json
|
COPY contract-migration/sarafu_declaration.json sarafu_declaration.json
|
||||||
COPY contract-migration/keystore keystore
|
COPY contract-migration/keystore keystore
|
||||||
COPY contract-migration/envlist .
|
COPY contract-migration/envlist .
|
||||||
|
COPY contract-migration/scripts scripts/
|
||||||
|
|
||||||
# RUN chown grassroots:grassroots .local/
|
|
||||||
|
|
||||||
RUN mkdir -p /tmp/cic/config
|
|
||||||
RUN chown grassroots:grassroots /tmp/cic/config
|
|
||||||
# A shared output dir for environment configs
|
# A shared output dir for environment configs
|
||||||
|
RUN mkdir -p /tmp/cic/config
|
||||||
|
# RUN chown grassroots:grassroots /tmp/cic/config
|
||||||
RUN chmod a+rwx /tmp/cic/config
|
RUN chmod a+rwx /tmp/cic/config
|
||||||
|
|
||||||
COPY contract-migration/*.sh ./
|
COPY contract-migration/*.sh ./
|
||||||
RUN chown grassroots:grassroots -R .
|
# RUN chown grassroots:grassroots -R .
|
||||||
RUN chmod gu+x *.sh
|
RUN chmod gu+x *.sh
|
||||||
|
|
||||||
|
# we copied these from the root build container.
|
||||||
|
# this is dumb though...I guess the compile image should have the same user
|
||||||
|
# RUN chown grassroots:grassroots -R /usr/local/lib/python3.8/site-packages/
|
||||||
|
|
||||||
USER grassroots
|
# USER grassroots
|
||||||
|
|
||||||
ENTRYPOINT [ ]
|
ENTRYPOINT [ ]
|
||||||
|
|||||||
@@ -27,16 +27,17 @@ from chainlib.eth.block import (
|
|||||||
)
|
)
|
||||||
from chainlib.hash import keccak256_string_to_hex
|
from chainlib.hash import keccak256_string_to_hex
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from chainlib.eth.gas import OverrideGasOracle
|
from chainlib.eth.gas import OverrideGasOracle
|
||||||
from chainlib.eth.nonce import RPCNonceOracle
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
from chainlib.eth.tx import TxFactory
|
from chainlib.eth.tx import TxFactory
|
||||||
from chainlib.jsonrpc import jsonrpc_template
|
from chainlib.jsonrpc import jsonrpc_template
|
||||||
from chainlib.eth.error import EthException
|
from chainlib.eth.error import EthException
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||||
from cic_types.models.person import Person
|
from cic_types.models.person import Person
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
@@ -51,7 +52,7 @@ argparser.add_argument('-c', type=str, default=config_dir, help='config root to
|
|||||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||||
argparser.add_argument('--token-symbol', default='SRF', type=str, dest='token_symbol', help='Token symbol to use for trnsactions')
|
argparser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol', help='Token symbol to use for trnsactions')
|
||||||
argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
|
argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
@@ -252,6 +253,10 @@ def main():
|
|||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
logg.critical('lookup failed for token {}: {}'.format(token_symbol, e))
|
logg.critical('lookup failed for token {}: {}'.format(token_symbol, e))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
if sarafu_token_address == ZERO_ADDRESS:
|
||||||
|
raise KeyError('token address for symbol {} is zero'.format(token_symbol))
|
||||||
|
|
||||||
logg.info('found token address {}'.format(sarafu_token_address))
|
logg.info('found token address {}'.format(sarafu_token_address))
|
||||||
|
|
||||||
syncer_backend = MemBackend(chain_str, 0)
|
syncer_backend = MemBackend(chain_str, 0)
|
||||||
|
|||||||
@@ -27,7 +27,6 @@ from chainlib.eth.block import (
|
|||||||
)
|
)
|
||||||
from chainlib.hash import keccak256_string_to_hex
|
from chainlib.hash import keccak256_string_to_hex
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from chainlib.eth.gas import OverrideGasOracle
|
from chainlib.eth.gas import OverrideGasOracle
|
||||||
from chainlib.eth.nonce import RPCNonceOracle
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
from chainlib.eth.tx import TxFactory
|
from chainlib.eth.tx import TxFactory
|
||||||
@@ -37,6 +36,7 @@ from chainlib.chain import ChainSpec
|
|||||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||||
from cic_types.models.person import Person
|
from cic_types.models.person import Person
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
cic-base[full_graph]==0.1.2b2
|
cic-base[full_graph]==0.1.2b9
|
||||||
sarafu-faucet==0.0.2a28
|
sarafu-faucet==0.0.3a3
|
||||||
cic-eth==0.11.0b10
|
cic-eth==0.11.0b13
|
||||||
cic-types==0.1.0a10
|
cic-types==0.1.0a11
|
||||||
crypto-dev-signer==0.4.14b3
|
crypto-dev-signer==0.4.14b3
|
||||||
|
|||||||
@@ -34,7 +34,6 @@ from chainlib.eth.block import (
|
|||||||
)
|
)
|
||||||
from chainlib.hash import keccak256_string_to_hex
|
from chainlib.hash import keccak256_string_to_hex
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address
|
||||||
from chainlib.eth.erc20 import ERC20
|
|
||||||
from chainlib.eth.gas import (
|
from chainlib.eth.gas import (
|
||||||
OverrideGasOracle,
|
OverrideGasOracle,
|
||||||
balance,
|
balance,
|
||||||
@@ -46,7 +45,8 @@ from cic_types.models.person import (
|
|||||||
Person,
|
Person,
|
||||||
generate_metadata_pointer,
|
generate_metadata_pointer,
|
||||||
)
|
)
|
||||||
from erc20_single_shot_faucet import SingleShotFaucet
|
from erc20_faucet import Faucet
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@@ -224,7 +224,7 @@ class Verifier:
|
|||||||
self.api = cic_eth_api
|
self.api = cic_eth_api
|
||||||
self.data_dir = data_dir
|
self.data_dir = data_dir
|
||||||
self.exit_on_error = exit_on_error
|
self.exit_on_error = exit_on_error
|
||||||
self.faucet_tx_factory = SingleShotFaucet(chain_spec, gas_oracle=gas_oracle)
|
self.faucet_tx_factory = Faucet(chain_spec, gas_oracle=gas_oracle)
|
||||||
|
|
||||||
verifymethods = []
|
verifymethods = []
|
||||||
for k in dir(self):
|
for k in dir(self):
|
||||||
|
|||||||
@@ -99,7 +99,7 @@ export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
|
|||||||
|
|
||||||
# Transfer tokens to gifter address
|
# Transfer tokens to gifter address
|
||||||
>&2 echo "transfer sarafu tokens to token gifter address"
|
>&2 echo "transfer sarafu tokens to token gifter address"
|
||||||
>&2 eth-transfer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_RESERVE_ADDRESS -w $debug $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${token_amount:0:-1}
|
>&2 erc20-transfer -y $keystore_file -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER --token-address $DEV_RESERVE_ADDRESS -w $debug $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${token_amount:0:-1}
|
||||||
|
|
||||||
#echo -n 0 > $init_level_file
|
#echo -n 0 > $init_level_file
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ image:
|
|||||||
entrypoint: [""]
|
entrypoint: [""]
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
KANIKO_CACHE_ARGS: "--cache=true --cache-copy-layers=true --cache-ttl=24h"
|
KANIKO_CACHE_ARGS: "--cache=false --cache-copy-layers=true --cache-ttl=24h"
|
||||||
CONTEXT: $CI_PROJECT_DIR/apps/
|
CONTEXT: $CI_PROJECT_DIR/apps/
|
||||||
|
|
||||||
.py_build_merge_request:
|
.py_build_merge_request:
|
||||||
|
|||||||
@@ -53,8 +53,6 @@ services:
|
|||||||
command: [ "-c", "max_connections=200" ]
|
command: [ "-c", "max_connections=200" ]
|
||||||
volumes:
|
volumes:
|
||||||
- ./scripts/initdb/create_db.sql:/docker-entrypoint-initdb.d/1-create_all_db.sql
|
- ./scripts/initdb/create_db.sql:/docker-entrypoint-initdb.d/1-create_all_db.sql
|
||||||
- ./apps/cic-meta/scripts/initdb/postgresql.sh:/docker-entrypoint-initdb.d/2-init-cic-meta.sh
|
|
||||||
- ./apps/cic-cache/db/psycopg2/db.sql:/docker-entrypoint-initdb.d/3-init-cic-meta.sql
|
|
||||||
- postgres-db:/var/lib/postgresql/data
|
- postgres-db:/var/lib/postgresql/data
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
@@ -78,6 +76,9 @@ services:
|
|||||||
|
|
||||||
contract-migration:
|
contract-migration:
|
||||||
build:
|
build:
|
||||||
|
args:
|
||||||
|
pip_index_url: ${PIP_DEFAULT_INDEX_URL:-https://pypi.org/simple}
|
||||||
|
pip_extra_args: $PIP_EXTRA_ARGS
|
||||||
context: apps/
|
context: apps/
|
||||||
dockerfile: contract-migration/docker/Dockerfile
|
dockerfile: contract-migration/docker/Dockerfile
|
||||||
# image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/contract-migration:latest
|
# image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/contract-migration:latest
|
||||||
@@ -477,6 +478,7 @@ services:
|
|||||||
PGP_PUBLICKEY_TRUSTED_FILE: publickeys.asc
|
PGP_PUBLICKEY_TRUSTED_FILE: publickeys.asc
|
||||||
PGP_PUBLICKEY_ACTIVE_FILE: publickeys.asc
|
PGP_PUBLICKEY_ACTIVE_FILE: publickeys.asc
|
||||||
PGP_PUBLICKEY_ENCRYPT_FILE: publickeys.asc
|
PGP_PUBLICKEY_ENCRYPT_FILE: publickeys.asc
|
||||||
|
SCHEMA_SQL_PATH: scripts/initdb/server.postgres.sql
|
||||||
ports:
|
ports:
|
||||||
- ${HTTP_PORT_CIC_META:-63380}:8000
|
- ${HTTP_PORT_CIC_META:-63380}:8000
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -488,8 +490,7 @@ services:
|
|||||||
- ${LOCAL_VOLUME_DIR:-/tmp/cic}/pgp:/tmp/cic/pgp
|
- ${LOCAL_VOLUME_DIR:-/tmp/cic}/pgp:/tmp/cic/pgp
|
||||||
# command: "/root/start_server.sh -vv"
|
# command: "/root/start_server.sh -vv"
|
||||||
|
|
||||||
cic-ussd-server:
|
cic-user-ussd-server:
|
||||||
# image: grassrootseconomics:cic-ussd
|
|
||||||
build:
|
build:
|
||||||
context: apps/
|
context: apps/
|
||||||
dockerfile: cic-ussd/docker/Dockerfile
|
dockerfile: cic-ussd/docker/Dockerfile
|
||||||
@@ -507,7 +508,7 @@ services:
|
|||||||
SERVER_PORT: 9000
|
SERVER_PORT: 9000
|
||||||
CIC_META_URL: ${CIC_META_URL:-http://meta:8000}
|
CIC_META_URL: ${CIC_META_URL:-http://meta:8000}
|
||||||
ports:
|
ports:
|
||||||
- ${HTTP_PORT_CIC_USSD:-63315}:9000
|
- ${HTTP_PORT_CIC_USER_USSD_SERVER:-63315}:9000
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- redis
|
- redis
|
||||||
@@ -516,10 +517,31 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
command: "/root/start_uwsgi.sh -vv"
|
command: "/root/start_cic_user_ussd_server.sh -vv"
|
||||||
|
|
||||||
cic-ussd-tasker:
|
cic-user-server:
|
||||||
# image: grassrootseconomics:cic-ussd
|
build:
|
||||||
|
context: apps
|
||||||
|
dockerfile: cic-ussd/docker/Dockerfile
|
||||||
|
environment:
|
||||||
|
DATABASE_USER: grassroots
|
||||||
|
DATABASE_HOST: postgres
|
||||||
|
DATABASE_PORT: 5432
|
||||||
|
DATABASE_PASSWORD: tralala
|
||||||
|
DATABASE_NAME: cic_ussd
|
||||||
|
DATABASE_ENGINE: postgresql
|
||||||
|
DATABASE_DRIVER: psycopg2
|
||||||
|
DATABASE_POOL_SIZE: 0
|
||||||
|
ports:
|
||||||
|
- ${HTTP_PORT_CIC_USER_SERVER:-63415}:9500
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
deploy:
|
||||||
|
restart_policy:
|
||||||
|
condition: on-failure
|
||||||
|
command: "/root/start_cic_user_server.sh -vv"
|
||||||
|
|
||||||
|
cic-user-tasker:
|
||||||
build:
|
build:
|
||||||
context: apps
|
context: apps
|
||||||
dockerfile: cic-ussd/docker/Dockerfile
|
dockerfile: cic-ussd/docker/Dockerfile
|
||||||
@@ -544,4 +566,4 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
command: "/root/start_tasker.sh -q cic-ussd -vv"
|
command: "/root/start_cic_user_tasker.sh -q cic-ussd -vv"
|
||||||
|
|||||||
Reference in New Issue
Block a user