Merge remote-tracking branch 'origin/master' into lash/contract-interfaces

This commit is contained in:
nolash 2021-05-02 18:58:42 +02:00
commit 3a3317fd29
42 changed files with 513 additions and 136 deletions

View File

@ -6,3 +6,4 @@ HOST=localhost
PORT=5432 PORT=5432
ENGINE=postgresql ENGINE=postgresql
DRIVER=psycopg2 DRIVER=psycopg2
DEBUG=

View File

@ -6,3 +6,4 @@ HOST=localhost
PORT=5432 PORT=5432
ENGINE=sqlite ENGINE=sqlite
DRIVER=pysqlite DRIVER=pysqlite
DEBUG=

View File

@ -2,9 +2,14 @@
import logging import logging
# local imports # local imports
from .list import list_transactions_mined from .list import (
from .list import list_transactions_account_mined list_transactions_mined,
from .list import add_transaction list_transactions_account_mined,
add_transaction,
tag_transaction,
add_tag,
)
logg = logging.getLogger() logg = logging.getLogger()

View File

@ -2,8 +2,9 @@
import logging import logging
import datetime import datetime
# third-party imports # external imports
from cic_cache.db.models.base import SessionBase from cic_cache.db.models.base import SessionBase
from sqlalchemy import text
logg = logging.getLogger() logg = logging.getLogger()
@ -50,7 +51,8 @@ def list_transactions_account_mined(
def add_transaction( def add_transaction(
session, tx_hash, session,
tx_hash,
block_number, block_number,
tx_index, tx_index,
sender, sender,
@ -62,6 +64,33 @@ def add_transaction(
success, success,
timestamp, timestamp,
): ):
"""Adds a single transaction to the cache persistent storage. Sensible interpretation of all fields is the responsibility of the caller.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param tx_hash: Transaction hash
:type tx_hash: str, 0x-hex
:param block_number: Block number
:type block_number: int
:param tx_index: Transaction index in block
:type tx_index: int
:param sender: Ethereum address of effective sender
:type sender: str, 0x-hex
:param receiver: Ethereum address of effective recipient
:type receiver: str, 0x-hex
:param source_token: Ethereum address of token used by sender
:type source_token: str, 0x-hex
:param destination_token: Ethereum address of token received by recipient
:type destination_token: str, 0x-hex
:param from_value: Source token value spent in transaction
:type from_value: int
:param to_value: Destination token value received in transaction
:type to_value: int
:param success: True if code execution on network was successful
:type success: bool
:param date_block: Block timestamp
:type date_block: datetime
"""
date_block = datetime.datetime.fromtimestamp(timestamp) date_block = datetime.datetime.fromtimestamp(timestamp)
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format( s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
tx_hash, tx_hash,
@ -77,3 +106,74 @@ def add_transaction(
date_block, date_block,
) )
session.execute(s) session.execute(s)
def tag_transaction(
session,
tx_hash,
name,
domain=None,
):
"""Tag a single transaction with a single tag.
Tag must already exist in storage.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param tx_hash: Transaction hash
:type tx_hash: str, 0x-hex
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
:raises ValueError: Unknown tag or transaction hash
"""
s = text("SELECT id from tx where tx_hash = :a")
r = session.execute(s, {'a': tx_hash}).fetchall()
tx_id = r[0].values()[0]
if tx_id == None:
raise ValueError('unknown tx hash {}'.format(tx_hash))
#s = text("SELECT id from tag where value = :a and domain = :b")
if domain == None:
s = text("SELECT id from tag where value = :a")
else:
s = text("SELECT id from tag where value = :a and domain = :b")
r = session.execute(s, {'a': name, 'b': domain}).fetchall()
tag_id = r[0].values()[0]
logg.debug('type {} {}'.format(type(tag_id), type(tx_id)))
if tag_id == None:
raise ValueError('unknown tag name {} domain {}'.format(name, domain))
s = text("INSERT INTO tag_tx_link (tag_id, tx_id) VALUES (:a, :b)")
r = session.execute(s, {'a': int(tag_id), 'b': int(tx_id)})
def add_tag(
session,
name,
domain=None,
):
"""Add a single tag to storage.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
:raises sqlalchemy.exc.IntegrityError: Tag already exists
"""
s = None
if domain == None:
s = text("INSERT INTO tag (value) VALUES (:b)")
else:
s = text("INSERT INTO tag (domain, value) VALUES (:a, :b)")
session.execute(s, {'a': domain, 'b': name})

View File

@ -0,0 +1,38 @@
"""Transaction tags
Revision ID: aaf2bdce7d6e
Revises: 6604de4203e2
Create Date: 2021-05-01 09:20:20.775082
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'aaf2bdce7d6e'
down_revision = '6604de4203e2'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'tag',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('domain', sa.String(), nullable=True),
sa.Column('value', sa.String(), nullable=False),
)
op.create_index('idx_tag_domain_value', 'tag', ['domain', 'value'], unique=True)
op.create_table(
'tag_tx_link',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tag.id'), nullable=False),
sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=False),
)
def downgrade():
op.drop_table('tag_tx_link')
op.drop_index('idx_tag_domain_value')
op.drop_table('tag')

View File

@ -1,2 +1,27 @@
class SyncFilter: class TagSyncFilter:
pass """Holds tag name and domain for an implementing filter.
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
"""
def __init__(self, name, domain=None):
self.tag_name = name
self.tag_domain = domain
def tag(self):
"""Return tag value/domain.
:rtype: Tuple
:returns: tag value/domain.
"""
return (self.tag_name, self.tag_domain)
def __str__(self):
if self.tag_domain == None:
return self.tag_name
return '{}.{}'.format(self.tag_domain, self.tag_name)

View File

@ -15,15 +15,16 @@ from cic_eth_registry.error import (
) )
# local imports # local imports
from .base import SyncFilter from .base import TagSyncFilter
from cic_cache import db as cic_cache_db from cic_cache import db as cic_cache_db
logg = logging.getLogger().getChild(__name__) logg = logging.getLogger().getChild(__name__)
class ERC20TransferFilter(SyncFilter): class ERC20TransferFilter(TagSyncFilter):
def __init__(self, chain_spec): def __init__(self, chain_spec):
super(ERC20TransferFilter, self).__init__('transfer', domain='erc20')
self.chain_spec = chain_spec self.chain_spec = chain_spec
@ -46,6 +47,9 @@ class ERC20TransferFilter(SyncFilter):
except RequestMismatchException: except RequestMismatchException:
logg.debug('erc20 match but not a transfer, skipping') logg.debug('erc20 match but not a transfer, skipping')
return False return False
except ValueError:
logg.debug('erc20 match but bogus data, skipping')
return False
token_sender = tx.outputs[0] token_sender = tx.outputs[0]
token_recipient = transfer_data[0] token_recipient = transfer_data[0]
@ -67,7 +71,13 @@ class ERC20TransferFilter(SyncFilter):
tx.status == Status.SUCCESS, tx.status == Status.SUCCESS,
block.timestamp, block.timestamp,
) )
#db_session.flush() db_session.flush()
cic_cache_db.tag_transaction(
db_session,
tx.hash,
self.tag_name,
domain=self.tag_domain,
)
db_session.commit() db_session.commit()
return True return True

View File

@ -7,9 +7,10 @@ import argparse
import sys import sys
import re import re
# third-party imports # external imports
import confini import confini
import celery import celery
import sqlalchemy
import rlp import rlp
import cic_base.config import cic_base.config
import cic_base.log import cic_base.log
@ -34,7 +35,10 @@ from chainsyncer.driver import (
from chainsyncer.db.models.base import SessionBase from chainsyncer.db.models.base import SessionBase
# local imports # local imports
from cic_cache.db import dsn_from_config from cic_cache.db import (
dsn_from_config,
add_tag,
)
from cic_cache.runnable.daemons.filters import ( from cic_cache.runnable.daemons.filters import (
ERC20TransferFilter, ERC20TransferFilter,
) )
@ -59,6 +63,17 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER')) cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
def register_filter_tags(filters, session):
for f in filters:
tag = f.tag()
try:
add_tag(session, tag[0], domain=tag[1])
session.commit()
logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1]))
except sqlalchemy.exc.IntegrityError:
logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1]))
def main(): def main():
# Connect to blockchain with chainlib # Connect to blockchain with chainlib
rpc = RPCConnection.connect(chain_spec, 'default') rpc = RPCConnection.connect(chain_spec, 'default')
@ -98,10 +113,19 @@ def main():
erc20_transfer_filter = ERC20TransferFilter(chain_spec) erc20_transfer_filter = ERC20TransferFilter(chain_spec)
filters = [
erc20_transfer_filter,
]
session = SessionBase.create_session()
register_filter_tags(filters, session)
session.close()
i = 0 i = 0
for syncer in syncers: for syncer in syncers:
logg.debug('running syncer index {}'.format(i)) logg.debug('running syncer index {}'.format(i))
syncer.add_filter(erc20_transfer_filter) for f in filters:
syncer.add_filter(f)
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc) r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
sys.stderr.write("sync {} done at block {}\n".format(syncer, r)) sys.stderr.write("sync {} done at block {}\n".format(syncer, r))

View File

@ -6,4 +6,4 @@ HOST=localhost
PORT=5432 PORT=5432
ENGINE=postgresql ENGINE=postgresql
DRIVER=psycopg2 DRIVER=psycopg2
DEBUG= DEBUG=0

View File

@ -1,2 +1,4 @@
[cic] [cic]
registry_address = registry_address =
chain_spec =
trust_address =

View File

@ -6,4 +6,4 @@ HOST=localhost
PORT=5432 PORT=5432
ENGINE=sqlite ENGINE=sqlite
DRIVER=pysqlite DRIVER=pysqlite
DEBUG= DEBUG=1

View File

@ -1,9 +1,9 @@
cic-base~=0.1.2a77 cic-base~=0.1.2b6
alembic==1.4.2 alembic==1.4.2
confini~=0.3.6rc3 confini~=0.3.6rc3
uwsgi==2.0.19.1 uwsgi==2.0.19.1
moolb~=0.1.0 moolb~=0.1.0
cic-eth-registry~=0.5.4a16 cic-eth-registry~=0.5.5a1
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
semver==2.13.0 semver==2.13.0
psycopg2==2.8.6 psycopg2==2.8.6

View File

@ -4,3 +4,10 @@ pytest-mock==3.3.1
pysqlite3==0.4.3 pysqlite3==0.4.3
sqlparse==0.4.1 sqlparse==0.4.1
pytest-celery==0.0.0a1 pytest-celery==0.0.0a1
eth_tester==0.5.0b3
py-evm==0.3.0a20
web3==5.12.2
cic-eth-registry~=0.5.5a3
giftable-erc20-token~=0.0.8a10
eth-address-index~=0.1.1a10
sarafu-faucet~=0.0.3a1

View File

@ -3,7 +3,7 @@ import os
import sys import sys
import datetime import datetime
# third-party imports # external imports
import pytest import pytest
# local imports # local imports
@ -84,3 +84,7 @@ def txs(
session.commit() session.commit()
return [
tx_hash_first,
tx_hash_second,
]

View File

@ -0,0 +1,3 @@
from chainlib.eth.pytest import *
from cic_eth_registry.pytest.fixtures_tokens import *

View File

@ -0,0 +1,69 @@
# standard imports
import os
import datetime
import logging
import json
# external imports
import pytest
from sqlalchemy import text
from chainlib.eth.tx import Tx
from chainlib.eth.block import Block
from chainlib.chain import ChainSpec
from hexathon import (
strip_0x,
add_0x,
)
# local imports
from cic_cache.db import add_tag
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
logg = logging.getLogger()
def test_cache(
eth_rpc,
foo_token,
init_database,
list_defaults,
list_actors,
tags,
):
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
fltr = ERC20TransferFilter(chain_spec)
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
data = 'a9059cbb'
data += strip_0x(list_actors['alice'])
data += '1000'.ljust(64, '0')
block = Block({
'hash': os.urandom(32).hex(),
'number': 42,
'timestamp': datetime.datetime.utcnow().timestamp(),
'transactions': [],
})
tx = Tx({
'to': foo_token,
'from': list_actors['bob'],
'data': data,
'value': 0,
'hash': os.urandom(32).hex(),
'nonce': 13,
'gasPrice': 10000000,
'gas': 123456,
})
block.txs.append(tx)
tx.block = block
r = fltr.filter(eth_rpc, block, tx, db_session=init_database)
assert r
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
assert r[0] == tx.hash

View File

@ -2,7 +2,7 @@
import os import os
import logging import logging
# third-party imports # external imports
import pytest import pytest
import confini import confini
@ -13,7 +13,7 @@ logg = logging.getLogger(__file__)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def load_config(): def load_config():
config_dir = os.path.join(root_dir, '.config/test') config_dir = os.path.join(root_dir, 'config/test')
conf = confini.Config(config_dir, 'CICTEST') conf = confini.Config(config_dir, 'CICTEST')
conf.process() conf.process()
logg.debug('config {}'.format(conf)) logg.debug('config {}'.format(conf))

View File

@ -3,13 +3,16 @@ import os
import logging import logging
import re import re
# third-party imports # external imports
import pytest import pytest
import sqlparse import sqlparse
import alembic
from alembic.config import Config as AlembicConfig
# local imports # local imports
from cic_cache.db.models.base import SessionBase from cic_cache.db.models.base import SessionBase
from cic_cache.db import dsn_from_config from cic_cache.db import dsn_from_config
from cic_cache.db import add_tag
logg = logging.getLogger(__file__) logg = logging.getLogger(__file__)
@ -26,11 +29,10 @@ def database_engine(
except FileNotFoundError: except FileNotFoundError:
pass pass
dsn = dsn_from_config(load_config) dsn = dsn_from_config(load_config)
SessionBase.connect(dsn) SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
return dsn return dsn
# TODO: use alembic instead to migrate db, here we have to keep separate schema than migration script in script/migrate.py
@pytest.fixture(scope='function') @pytest.fixture(scope='function')
def init_database( def init_database(
load_config, load_config,
@ -38,52 +40,23 @@ def init_database(
): ):
rootdir = os.path.dirname(os.path.dirname(__file__)) rootdir = os.path.dirname(os.path.dirname(__file__))
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER')) dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
if load_config.get('DATABASE_ENGINE') == 'sqlite': if not os.path.isdir(migrationsdir):
rconn = SessionBase.engine.raw_connection() migrationsdir = os.path.join(dbdir, 'migrations', 'default')
f = open(os.path.join(schemadir, 'db.sql')) logg.info('using migrations directory {}'.format(migrationsdir))
s = f.read()
f.close()
rconn.executescript(s)
else:
rconn = SessionBase.engine.raw_connection()
rcursor = rconn.cursor()
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
f = open(os.path.join(schemadir, 'db.sql'))
s = f.read()
f.close()
r = re.compile(r'^[A-Z]', re.MULTILINE)
for l in sqlparse.parse(s):
strl = str(l)
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
if not re.search(r, strl):
logg.warning('skipping parsed query line {}'.format(strl))
continue
rcursor.execute(strl)
rconn.commit()
rcursor.execute('SET search_path TO public')
# this doesn't work when run separately, no idea why
# functions have been manually added to original schema from cic-eth
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
#
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
rcursor.close()
session = SessionBase.create_session() session = SessionBase.create_session()
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', database_engine)
ac.set_main_option('script_location', migrationsdir)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head')
session.commit()
yield session yield session
session.commit() session.commit()
session.close() session.close()
@ -116,3 +89,14 @@ def list_defaults(
return { return {
'block': 420000, 'block': 420000,
} }
@pytest.fixture(scope='function')
def tags(
init_database,
):
add_tag(init_database, 'foo')
add_tag(init_database, 'baz', domain='bar')
add_tag(init_database, 'xyzzy', domain='bar')
init_database.commit()

View File

@ -4,7 +4,7 @@ import datetime
import logging import logging
import json import json
# third-party imports # external imports
import pytest import pytest
# local imports # local imports

View File

@ -0,0 +1,37 @@
import os
import datetime
import logging
import json
# external imports
import pytest
# local imports
from cic_cache.db import tag_transaction
logg = logging.getLogger()
def test_cache(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
tags,
):
tag_transaction(init_database, txs[0], 'foo')
tag_transaction(init_database, txs[0], 'baz', domain='bar')
tag_transaction(init_database, txs[1], 'xyzzy', domain='bar')
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.value = 'foo'").fetchall()
assert r[0][0] == txs[0]
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'baz'").fetchall()
assert r[0][0] == txs[0]
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'xyzzy'").fetchall()
assert r[0][0] == txs[1]

View File

@ -36,6 +36,7 @@ from cic_eth.eth import (
from cic_eth.admin import ( from cic_eth.admin import (
debug, debug,
ctrl, ctrl,
token
) )
from cic_eth.queue import ( from cic_eth.queue import (
query, query,

View File

@ -2,7 +2,7 @@
import os import os
import logging import logging
# third-party imports # external imports
import pytest import pytest
import alembic import alembic
from alembic.config import Config as AlembicConfig from alembic.config import Config as AlembicConfig

View File

@ -2,7 +2,7 @@ FROM node:15.3.0-alpine3.10
WORKDIR /tmp/src/cic-meta WORKDIR /tmp/src/cic-meta
RUN apk add --no-cache postgresql RUN apk add --no-cache postgresql bash
COPY cic-meta/package.json \ COPY cic-meta/package.json \
./ ./

View File

@ -1,3 +1,6 @@
#!/bin/bash #!/bin/bash
set -e
PGPASSWORD=$DATABASE_PASSWORD psql -v ON_ERROR_STOP=1 -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME -f $SCHEMA_SQL_PATH
PGPASSWORD=$DATABASE_PASSWORD psql -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME -f $SCHEMA_SQL_PATH

View File

@ -1,3 +1,7 @@
#!/bin/bash
set -euo pipefail
# db migration
sh ./db.sh sh ./db.sh
# /usr/local/bin/node /usr/local/bin/cic-meta-server $@ # /usr/local/bin/node /usr/local/bin/cic-meta-server $@

View File

@ -1,4 +1,4 @@
create table if not exists cic_meta.store ( create table if not exists store (
id serial primary key not null, id serial primary key not null,
owner_fingerprint text not null, owner_fingerprint text not null,
hash char(64) not null unique, hash char(64) not null unique,

View File

@ -20,7 +20,7 @@ def define_account_tx_metadata(user: Account):
) )
key = generate_metadata_pointer( key = generate_metadata_pointer(
identifier=identifier, identifier=identifier,
cic_type='cic.person' cic_type=':cic.person'
) )
account_metadata = get_cached_data(key=key) account_metadata = get_cached_data(key=key)

View File

@ -80,7 +80,7 @@ def get_cached_operational_balance(blockchain_address: str):
""" """
key = create_cached_data_key( key = create_cached_data_key(
identifier=bytes.fromhex(blockchain_address[2:]), identifier=bytes.fromhex(blockchain_address[2:]),
salt='cic.balances_data' salt=':cic.balances_data'
) )
cached_balance = get_cached_data(key=key) cached_balance = get_cached_data(key=key)
if cached_balance: if cached_balance:

View File

@ -38,3 +38,13 @@ class MetadataStoreError(Exception):
pass pass
class SeppukuError(Exception):
"""Exception base class for all errors that should cause system shutdown"""
pass
class InitializationError(Exception):
"""Exception raised when initialization state is insufficient to run component"""
pass

View File

@ -118,7 +118,7 @@ class MetadataRequestsHandler(Metadata):
metadata_http_error_handler(result=result) metadata_http_error_handler(result=result)
response_data = result.content response_data = result.content
data = json.loads(response_data.decode('utf-8')) data = json.loads(response_data.decode('utf-8'))
if result.status_code == 200 and self.cic_type == 'cic.person': if result.status_code == 200 and self.cic_type == ':cic.person':
person = Person() person = Person()
deserialized_person = person.deserialize(person_data=json.loads(data)) deserialized_person = person.deserialize(person_data=json.loads(data))
data = json.dumps(deserialized_person.serialize()) data = json.dumps(deserialized_person.serialize())

View File

@ -9,4 +9,4 @@ from .base import MetadataRequestsHandler
class PersonMetadata(MetadataRequestsHandler): class PersonMetadata(MetadataRequestsHandler):
def __init__(self, identifier: bytes): def __init__(self, identifier: bytes):
super().__init__(cic_type='cic.person', identifier=identifier) super().__init__(cic_type=':cic.person', identifier=identifier)

View File

@ -10,4 +10,4 @@ from .base import MetadataRequestsHandler
class PhonePointerMetadata(MetadataRequestsHandler): class PhonePointerMetadata(MetadataRequestsHandler):
def __init__(self, identifier: bytes): def __init__(self, identifier: bytes):
super().__init__(cic_type='cic.msisdn', identifier=identifier) super().__init__(cic_type=':cic.phone', identifier=identifier)

View File

@ -7,6 +7,7 @@ from typing import Optional
# third party imports # third party imports
import celery import celery
from sqlalchemy import desc from sqlalchemy import desc
from cic_eth.api import Api
from tinydb.table import Document from tinydb.table import Document
# local imports # local imports
@ -15,7 +16,7 @@ from cic_ussd.balance import BalanceManager, compute_operational_balance, get_ca
from cic_ussd.chain import Chain from cic_ussd.chain import Chain
from cic_ussd.db.models.account import AccountStatus, Account from cic_ussd.db.models.account import AccountStatus, Account
from cic_ussd.db.models.ussd_session import UssdSession from cic_ussd.db.models.ussd_session import UssdSession
from cic_ussd.error import MetadataNotFoundError from cic_ussd.error import MetadataNotFoundError, SeppukuError
from cic_ussd.menu.ussd_menu import UssdMenu from cic_ussd.menu.ussd_menu import UssdMenu
from cic_ussd.metadata import blockchain_address_to_metadata_pointer from cic_ussd.metadata import blockchain_address_to_metadata_pointer
from cic_ussd.phone_number import get_user_by_phone_number from cic_ussd.phone_number import get_user_by_phone_number
@ -28,6 +29,38 @@ from cic_types.models.person import generate_metadata_pointer, get_contact_data_
logg = logging.getLogger(__name__) logg = logging.getLogger(__name__)
def get_default_token_data():
chain_str = Chain.spec.__str__()
cic_eth_api = Api(chain_str=chain_str)
default_token_request_task = cic_eth_api.default_token()
default_token_data = default_token_request_task.get()
return default_token_data
def retrieve_token_symbol(chain_str: str = Chain.spec.__str__()):
"""
:param chain_str:
:type chain_str:
:return:
:rtype:
"""
cache_key = create_cached_data_key(
identifier=chain_str.encode('utf-8'),
salt=':cic.default_token_data'
)
cached_data = get_cached_data(key=cache_key)
if cached_data:
default_token_data = json.loads(cached_data)
return default_token_data.get('symbol')
else:
logg.warning('Cached default token data not found. Attempting retrieval from default token API')
default_token_data = get_default_token_data()
if default_token_data:
return default_token_data.get('symbol')
else:
raise SeppukuError(f'Could not retrieve default token for: {chain_str}')
def process_pin_authorization(display_key: str, user: Account, **kwargs) -> str: def process_pin_authorization(display_key: str, user: Account, **kwargs) -> str:
""" """
This method provides translation for all ussd menu entries that follow the pin authorization pattern. This method provides translation for all ussd menu entries that follow the pin authorization pattern.
@ -73,7 +106,9 @@ def process_exit_insufficient_balance(display_key: str, user: Account, ussd_sess
# compile response data # compile response data
user_input = ussd_session.get('user_input').split('*')[-1] user_input = ussd_session.get('user_input').split('*')[-1]
transaction_amount = to_wei(value=int(user_input)) transaction_amount = to_wei(value=int(user_input))
token_symbol = 'GFT'
# get default data
token_symbol = retrieve_token_symbol()
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number') recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
recipient = get_user_by_phone_number(phone_number=recipient_phone_number) recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
@ -102,7 +137,7 @@ def process_exit_successful_transaction(display_key: str, user: Account, ussd_se
:rtype: str :rtype: str
""" """
transaction_amount = to_wei(int(ussd_session.get('session_data').get('transaction_amount'))) transaction_amount = to_wei(int(ussd_session.get('session_data').get('transaction_amount')))
token_symbol = 'GFT' token_symbol = retrieve_token_symbol()
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number') recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
recipient = get_user_by_phone_number(phone_number=recipient_phone_number) recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
tx_recipient_information = define_account_tx_metadata(user=recipient) tx_recipient_information = define_account_tx_metadata(user=recipient)
@ -137,7 +172,7 @@ def process_transaction_pin_authorization(user: Account, display_key: str, ussd_
tx_recipient_information = define_account_tx_metadata(user=recipient) tx_recipient_information = define_account_tx_metadata(user=recipient)
tx_sender_information = define_account_tx_metadata(user=user) tx_sender_information = define_account_tx_metadata(user=user)
token_symbol = 'GFT' token_symbol = retrieve_token_symbol()
user_input = ussd_session.get('session_data').get('transaction_amount') user_input = ussd_session.get('session_data').get('transaction_amount')
transaction_amount = to_wei(value=int(user_input)) transaction_amount = to_wei(value=int(user_input))
logg.debug('Requires integration to determine user tokens.') logg.debug('Requires integration to determine user tokens.')
@ -168,18 +203,18 @@ def process_account_balances(user: Account, display_key: str, ussd_session: dict
logg.debug('Requires call to retrieve tax and bonus amounts') logg.debug('Requires call to retrieve tax and bonus amounts')
tax = '' tax = ''
bonus = '' bonus = ''
token_symbol = retrieve_token_symbol()
return translation_for( return translation_for(
key=display_key, key=display_key,
preferred_language=user.preferred_language, preferred_language=user.preferred_language,
operational_balance=operational_balance, operational_balance=operational_balance,
tax=tax, tax=tax,
bonus=bonus, bonus=bonus,
token_symbol='GFT' token_symbol=token_symbol
) )
def format_transactions(transactions: list, preferred_language: str): def format_transactions(transactions: list, preferred_language: str, token_symbol: str):
formatted_transactions = '' formatted_transactions = ''
if len(transactions) > 0: if len(transactions) > 0:
@ -190,7 +225,7 @@ def format_transactions(transactions: list, preferred_language: str):
timestamp = transaction.get('timestamp') timestamp = transaction.get('timestamp')
action_tag = transaction.get('action_tag') action_tag = transaction.get('action_tag')
direction = transaction.get('direction') direction = transaction.get('direction')
token_symbol = 'GFT' token_symbol = token_symbol
if action_tag == 'SENT' or action_tag == 'ULITUMA': if action_tag == 'SENT' or action_tag == 'ULITUMA':
formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n' formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n'
@ -214,7 +249,7 @@ def process_display_user_metadata(user: Account, display_key: str):
""" """
key = generate_metadata_pointer( key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address), identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person' cic_type=':cic.person'
) )
user_metadata = get_cached_data(key) user_metadata = get_cached_data(key)
if user_metadata: if user_metadata:
@ -251,9 +286,11 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
""" """
# retrieve cached statement # retrieve cached statement
identifier = blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address) identifier = blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address)
key = create_cached_data_key(identifier=identifier, salt='cic.statement') key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
transactions = get_cached_data(key=key) transactions = get_cached_data(key=key)
token_symbol = retrieve_token_symbol()
first_transaction_set = [] first_transaction_set = []
middle_transaction_set = [] middle_transaction_set = []
last_transaction_set = [] last_transaction_set = []
@ -277,7 +314,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
preferred_language=user.preferred_language, preferred_language=user.preferred_language,
first_transaction_set=format_transactions( first_transaction_set=format_transactions(
transactions=first_transaction_set, transactions=first_transaction_set,
preferred_language=user.preferred_language preferred_language=user.preferred_language,
token_symbol=token_symbol
) )
) )
elif display_key == 'ussd.kenya.middle_transaction_set': elif display_key == 'ussd.kenya.middle_transaction_set':
@ -286,7 +324,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
preferred_language=user.preferred_language, preferred_language=user.preferred_language,
middle_transaction_set=format_transactions( middle_transaction_set=format_transactions(
transactions=middle_transaction_set, transactions=middle_transaction_set,
preferred_language=user.preferred_language preferred_language=user.preferred_language,
token_symbol=token_symbol
) )
) )
@ -296,7 +335,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
preferred_language=user.preferred_language, preferred_language=user.preferred_language,
last_transaction_set=format_transactions( last_transaction_set=format_transactions(
transactions=last_transaction_set, transactions=last_transaction_set,
preferred_language=user.preferred_language preferred_language=user.preferred_language,
token_symbol=token_symbol
) )
) )
@ -312,18 +352,19 @@ def process_start_menu(display_key: str, user: Account):
:return: Corresponding translation text response :return: Corresponding translation text response
:rtype: str :rtype: str
""" """
token_symbol = retrieve_token_symbol()
chain_str = Chain.spec.__str__() chain_str = Chain.spec.__str__()
blockchain_address = user.blockchain_address blockchain_address = user.blockchain_address
balance_manager = BalanceManager(address=blockchain_address, balance_manager = BalanceManager(address=blockchain_address,
chain_str=chain_str, chain_str=chain_str,
token_symbol='GFT') token_symbol=token_symbol)
# get balances synchronously for display on start menu # get balances synchronously for display on start menu
balances_data = balance_manager.get_balances() balances_data = balance_manager.get_balances()
key = create_cached_data_key( key = create_cached_data_key(
identifier=bytes.fromhex(blockchain_address[2:]), identifier=bytes.fromhex(blockchain_address[2:]),
salt='cic.balances_data' salt=':cic.balances_data'
) )
cache_data(key=key, data=json.dumps(balances_data)) cache_data(key=key, data=json.dumps(balances_data))
@ -340,9 +381,6 @@ def process_start_menu(display_key: str, user: Account):
# retrieve and cache account's statement # retrieve and cache account's statement
retrieve_account_statement(blockchain_address=blockchain_address) retrieve_account_statement(blockchain_address=blockchain_address)
# TODO [Philip]: figure out how to get token symbol from a metadata layer of sorts.
token_symbol = 'GFT'
return translation_for( return translation_for(
key=display_key, key=display_key,
preferred_language=user.preferred_language, preferred_language=user.preferred_language,
@ -375,6 +413,13 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
:return: A ussd menu's corresponding text value. :return: A ussd menu's corresponding text value.
:rtype: Document :rtype: Document
""" """
# retrieve metadata before any transition
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type=':cic.person'
)
person_metadata = get_cached_data(key=key)
if ussd_session: if ussd_session:
if user_input == "0": if user_input == "0":
return UssdMenu.parent_menu(menu_name=ussd_session.get('state')) return UssdMenu.parent_menu(menu_name=ussd_session.get('state'))
@ -382,29 +427,12 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
successive_state = next_state(ussd_session=ussd_session, user=user, user_input=user_input) successive_state = next_state(ussd_session=ussd_session, user=user, user_input=user_input)
return UssdMenu.find_by_name(name=successive_state) return UssdMenu.find_by_name(name=successive_state)
else: else:
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person'
)
# retrieve and cache account's metadata
s_query_person_metadata = celery.signature(
'cic_ussd.tasks.metadata.query_person_metadata',
[user.blockchain_address]
)
s_query_person_metadata.apply_async(queue='cic-ussd')
if user.has_valid_pin(): if user.has_valid_pin():
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number) last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
if last_ussd_session: if last_ussd_session:
# get metadata
person_metadata = get_cached_data(key=key)
# get last state # get last state
last_state = last_ussd_session.state last_state = last_ussd_session.state
# if last state is account_creation_prompt and metadata exists, show start menu # if last state is account_creation_prompt and metadata exists, show start menu
if last_state in [ if last_state in [
'account_creation_prompt', 'account_creation_prompt',

View File

@ -17,6 +17,7 @@ from cic_ussd.chain import Chain
from cic_ussd.db import dsn_from_config from cic_ussd.db import dsn_from_config
from cic_ussd.db.models.base import SessionBase from cic_ussd.db.models.base import SessionBase
from cic_ussd.encoder import PasswordEncoder from cic_ussd.encoder import PasswordEncoder
from cic_ussd.error import InitializationError
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
from cic_ussd.menu.ussd_menu import UssdMenu from cic_ussd.menu.ussd_menu import UssdMenu
from cic_ussd.metadata.signer import Signer from cic_ussd.metadata.signer import Signer
@ -25,7 +26,8 @@ from cic_ussd.operations import (define_response_with_content,
process_menu_interaction_requests, process_menu_interaction_requests,
define_multilingual_responses) define_multilingual_responses)
from cic_ussd.phone_number import process_phone_number from cic_ussd.phone_number import process_phone_number
from cic_ussd.redis import InMemoryStore from cic_ussd.processor import get_default_token_data
from cic_ussd.redis import cache_data, create_cached_data_key, InMemoryStore
from cic_ussd.requests import (get_request_endpoint, from cic_ussd.requests import (get_request_endpoint,
get_request_method) get_request_method)
from cic_ussd.runnable.server_base import exportable_parser, logg from cic_ussd.runnable.server_base import exportable_parser, logg
@ -107,6 +109,20 @@ Chain.spec = chain_spec
UssdStateMachine.states = states UssdStateMachine.states = states
UssdStateMachine.transitions = transitions UssdStateMachine.transitions = transitions
# retrieve default token data
default_token_data = get_default_token_data()
chain_str = Chain.spec.__str__()
# cache default token for re-usability
if default_token_data:
cache_key = create_cached_data_key(
identifier=chain_str.encode('utf-8'),
salt=':cic.default_token_data'
)
cache_data(key=cache_key, data=json.dumps(default_token_data))
else:
raise InitializationError(f'Default token data for: {chain_str} not found.')
def application(env, start_response): def application(env, start_response):
"""Loads python code for application to be accessible over web server """Loads python code for application to be accessible over web server

View File

@ -64,7 +64,7 @@ def has_sufficient_balance(state_machine_data: Tuple[str, dict, Account]) -> boo
# get cached balance # get cached balance
key = create_cached_data_key( key = create_cached_data_key(
identifier=bytes.fromhex(user.blockchain_address[2:]), identifier=bytes.fromhex(user.blockchain_address[2:]),
salt='cic.balances_data' salt=':cic.balances_data'
) )
cached_balance = get_cached_data(key=key) cached_balance = get_cached_data(key=key)
operational_balance = compute_operational_balance(balances=json.loads(cached_balance)) operational_balance = compute_operational_balance(balances=json.loads(cached_balance))

View File

@ -176,7 +176,7 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, Account]):
blockchain_address = user.blockchain_address blockchain_address = user.blockchain_address
key = generate_metadata_pointer( key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address), identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person' cic_type=':cic.person'
) )
user_metadata = get_cached_data(key=key) user_metadata = get_cached_data(key=key)

View File

@ -23,7 +23,7 @@ def has_cached_user_metadata(state_machine_data: Tuple[str, dict, Account]):
# check for user metadata in cache # check for user metadata in cache
key = generate_metadata_pointer( key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address), identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person' cic_type=':cic.person'
) )
user_metadata = get_cached_data(key=key) user_metadata = get_cached_data(key=key)
return user_metadata is not None return user_metadata is not None

View File

@ -136,7 +136,7 @@ def process_balances_callback(result: list, param: str, status_code: int):
blockchain_address = balances_data.get('address') blockchain_address = balances_data.get('address')
key = create_cached_data_key( key = create_cached_data_key(
identifier=bytes.fromhex(blockchain_address[2:]), identifier=bytes.fromhex(blockchain_address[2:]),
salt='cic.balances_data' salt=':cic.balances_data'
) )
cache_data(key=key, data=json.dumps(balances_data)) cache_data(key=key, data=json.dumps(balances_data))
else: else:
@ -226,7 +226,7 @@ def process_statement_callback(result, param: str, status_code: int):
# cache account statement # cache account statement
identifier = bytes.fromhex(param[2:]) identifier = bytes.fromhex(param[2:])
key = create_cached_data_key(identifier=identifier, salt='cic.statement') key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
data = json.dumps(processed_transactions) data = json.dumps(processed_transactions)
# cache statement data # cache statement data

View File

@ -105,7 +105,7 @@ def test_get_user_metadata(caplog,
assert 'Get latest data status: 200' in caplog.text assert 'Get latest data status: 200' in caplog.text
key = generate_metadata_pointer( key = generate_metadata_pointer(
identifier=identifier, identifier=identifier,
cic_type='cic.person' cic_type=':cic.person'
) )
cached_user_metadata = get_cached_data(key=key) cached_user_metadata = get_cached_data(key=key)
assert cached_user_metadata assert cached_user_metadata

View File

@ -36,7 +36,7 @@ def test_has_cached_user_metadata(create_in_db_ussd_session,
user = create_activated_user user = create_activated_user
key = generate_metadata_pointer( key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address), identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person' cic_type=':cic.person'
) )
cache_data(key=key, data=json.dumps(person_metadata)) cache_data(key=key, data=json.dumps(person_metadata))
result = has_cached_user_metadata(state_machine_data=state_machine_data) result = has_cached_user_metadata(state_machine_data=state_machine_data)

View File

@ -115,6 +115,6 @@ def cached_user_metadata(create_activated_user, init_redis_cache, person_metadat
user_metadata = json.dumps(person_metadata) user_metadata = json.dumps(person_metadata)
key = generate_metadata_pointer( key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address), identifier=blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address),
cic_type='cic.person' cic_type=':cic.person'
) )
cache_data(key=key, data=user_metadata) cache_data(key=key, data=user_metadata)

View File

@ -47,9 +47,9 @@ RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh |
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
RUN useradd --create-home grassroots #RUN useradd --create-home grassroots
WORKDIR /home/grassroots # WORKDIR /home/grassroots
USER grassroots # USER grassroots
ARG pip_extra_args="" ARG pip_extra_args=""
ARG pip_index_url=https://pypi.org/simple ARG pip_index_url=https://pypi.org/simple
@ -63,38 +63,43 @@ RUN pip install --user $pip_extra_args --index-url $pip_index_url --extra-index-
sarafu-token==$sarafu_token_version \ sarafu-token==$sarafu_token_version \
cic-eth==$cic_eth_version cic-eth==$cic_eth_version
# -------------- begin runtime container ----------------
FROM python:3.8.6-slim-buster as runtime-image FROM python:3.8.6-slim-buster as runtime-image
RUN apt-get update RUN apt-get update
RUN apt-get install -y --no-install-recommends gnupg libpq-dev RUN apt-get install -y --no-install-recommends gnupg libpq-dev
RUN apt-get install -y --no-install-recommends jq RUN apt-get install -y jq bash iputils-ping socat
COPY --from=compile-image /usr/local/bin/ /usr/local/bin/ COPY --from=compile-image /usr/local/bin/ /usr/local/bin/
COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/ COPY --from=compile-image /usr/local/etc/cic/ /usr/local/etc/cic/
COPY --from=compile-image /usr/local/lib/python3.8/site-packages/ \
/usr/local/lib/python3.8/site-packages/
RUN useradd --create-home grassroots ENV EXTRA_INDEX_URL https://pip.grassrootseconomics.net:8433
WORKDIR /home/grassroots # RUN useradd -u 1001 --create-home grassroots
# COPY python dependencies to user dir # RUN adduser grassroots sudo && \
COPY --from=compile-image /home/grassroots/.local .local # echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
ENV PATH=/home/grassroots/.local/bin:$PATH # WORKDIR /home/grassroots
COPY contract-migration/testdata/pgp testdata/pgp COPY contract-migration/testdata/pgp testdata/pgp
COPY contract-migration/sarafu_declaration.json sarafu_declaration.json COPY contract-migration/sarafu_declaration.json sarafu_declaration.json
COPY contract-migration/keystore keystore COPY contract-migration/keystore keystore
COPY contract-migration/envlist . COPY contract-migration/envlist .
COPY contract-migration/scripts scripts/
# RUN chown grassroots:grassroots .local/
RUN mkdir -p /tmp/cic/config
RUN chown grassroots:grassroots /tmp/cic/config
# A shared output dir for environment configs # A shared output dir for environment configs
RUN mkdir -p /tmp/cic/config
# RUN chown grassroots:grassroots /tmp/cic/config
RUN chmod a+rwx /tmp/cic/config RUN chmod a+rwx /tmp/cic/config
COPY contract-migration/*.sh ./ COPY contract-migration/*.sh ./
RUN chown grassroots:grassroots -R . # RUN chown grassroots:grassroots -R .
RUN chmod gu+x *.sh RUN chmod gu+x *.sh
# we copied these from the root build container.
# this is dumb though...I guess the compile image should have the same user
# RUN chown grassroots:grassroots -R /usr/local/lib/python3.8/site-packages/
USER grassroots # USER grassroots
ENTRYPOINT [ ] ENTRYPOINT [ ]