Merge branch 'master' of gitlab.com:grassrootseconomics/cic-internal-integration into philip/import-pins-script

This commit is contained in:
PhilipWafula 2021-05-02 08:13:26 +03:00
commit a9a04d3caa
Signed by untrusted user: mango-habanero
GPG Key ID: B00CE9034DA19FB7
60 changed files with 698 additions and 236 deletions

View File

@ -6,3 +6,4 @@ HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=

View File

@ -6,3 +6,4 @@ HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite
DEBUG=

View File

@ -2,9 +2,14 @@
import logging
# local imports
from .list import list_transactions_mined
from .list import list_transactions_account_mined
from .list import add_transaction
from .list import (
list_transactions_mined,
list_transactions_account_mined,
add_transaction,
tag_transaction,
add_tag,
)
logg = logging.getLogger()

View File

@ -2,8 +2,9 @@
import logging
import datetime
# third-party imports
# external imports
from cic_cache.db.models.base import SessionBase
from sqlalchemy import text
logg = logging.getLogger()
@ -50,7 +51,8 @@ def list_transactions_account_mined(
def add_transaction(
session, tx_hash,
session,
tx_hash,
block_number,
tx_index,
sender,
@ -62,6 +64,33 @@ def add_transaction(
success,
timestamp,
):
"""Adds a single transaction to the cache persistent storage. Sensible interpretation of all fields is the responsibility of the caller.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param tx_hash: Transaction hash
:type tx_hash: str, 0x-hex
:param block_number: Block number
:type block_number: int
:param tx_index: Transaction index in block
:type tx_index: int
:param sender: Ethereum address of effective sender
:type sender: str, 0x-hex
:param receiver: Ethereum address of effective recipient
:type receiver: str, 0x-hex
:param source_token: Ethereum address of token used by sender
:type source_token: str, 0x-hex
:param destination_token: Ethereum address of token received by recipient
:type destination_token: str, 0x-hex
:param from_value: Source token value spent in transaction
:type from_value: int
:param to_value: Destination token value received in transaction
:type to_value: int
:param success: True if code execution on network was successful
:type success: bool
:param date_block: Block timestamp
:type date_block: datetime
"""
date_block = datetime.datetime.fromtimestamp(timestamp)
s = "INSERT INTO tx (tx_hash, block_number, tx_index, sender, recipient, source_token, destination_token, from_value, to_value, success, date_block) VALUES ('{}', {}, {}, '{}', '{}', '{}', '{}', {}, {}, {}, '{}')".format(
tx_hash,
@ -77,3 +106,74 @@ def add_transaction(
date_block,
)
session.execute(s)
def tag_transaction(
session,
tx_hash,
name,
domain=None,
):
"""Tag a single transaction with a single tag.
Tag must already exist in storage.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param tx_hash: Transaction hash
:type tx_hash: str, 0x-hex
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
:raises ValueError: Unknown tag or transaction hash
"""
s = text("SELECT id from tx where tx_hash = :a")
r = session.execute(s, {'a': tx_hash}).fetchall()
tx_id = r[0].values()[0]
if tx_id == None:
raise ValueError('unknown tx hash {}'.format(tx_hash))
#s = text("SELECT id from tag where value = :a and domain = :b")
if domain == None:
s = text("SELECT id from tag where value = :a")
else:
s = text("SELECT id from tag where value = :a and domain = :b")
r = session.execute(s, {'a': name, 'b': domain}).fetchall()
tag_id = r[0].values()[0]
logg.debug('type {} {}'.format(type(tag_id), type(tx_id)))
if tag_id == None:
raise ValueError('unknown tag name {} domain {}'.format(name, domain))
s = text("INSERT INTO tag_tx_link (tag_id, tx_id) VALUES (:a, :b)")
r = session.execute(s, {'a': int(tag_id), 'b': int(tx_id)})
def add_tag(
session,
name,
domain=None,
):
"""Add a single tag to storage.
:param session: Persistent storage session object
:type session: SQLAlchemy session
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
:raises sqlalchemy.exc.IntegrityError: Tag already exists
"""
s = None
if domain == None:
s = text("INSERT INTO tag (value) VALUES (:b)")
else:
s = text("INSERT INTO tag (domain, value) VALUES (:a, :b)")
session.execute(s, {'a': domain, 'b': name})

View File

@ -0,0 +1,38 @@
"""Transaction tags
Revision ID: aaf2bdce7d6e
Revises: 6604de4203e2
Create Date: 2021-05-01 09:20:20.775082
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'aaf2bdce7d6e'
down_revision = '6604de4203e2'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'tag',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('domain', sa.String(), nullable=True),
sa.Column('value', sa.String(), nullable=False),
)
op.create_index('idx_tag_domain_value', 'tag', ['domain', 'value'], unique=True)
op.create_table(
'tag_tx_link',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('tag_id', sa.Integer, sa.ForeignKey('tag.id'), nullable=False),
sa.Column('tx_id', sa.Integer, sa.ForeignKey('tx.id'), nullable=False),
)
def downgrade():
op.drop_table('tag_tx_link')
op.drop_index('idx_tag_domain_value')
op.drop_table('tag')

View File

@ -1,2 +1,27 @@
class SyncFilter:
pass
class TagSyncFilter:
"""Holds tag name and domain for an implementing filter.
:param name: Tag value
:type name: str
:param domain: Tag domain
:type domain: str
"""
def __init__(self, name, domain=None):
self.tag_name = name
self.tag_domain = domain
def tag(self):
"""Return tag value/domain.
:rtype: Tuple
:returns: tag value/domain.
"""
return (self.tag_name, self.tag_domain)
def __str__(self):
if self.tag_domain == None:
return self.tag_name
return '{}.{}'.format(self.tag_domain, self.tag_name)

View File

@ -15,15 +15,16 @@ from cic_eth_registry.error import (
)
# local imports
from .base import SyncFilter
from .base import TagSyncFilter
from cic_cache import db as cic_cache_db
logg = logging.getLogger().getChild(__name__)
class ERC20TransferFilter(SyncFilter):
class ERC20TransferFilter(TagSyncFilter):
def __init__(self, chain_spec):
super(ERC20TransferFilter, self).__init__('transfer', domain='erc20')
self.chain_spec = chain_spec
@ -46,6 +47,9 @@ class ERC20TransferFilter(SyncFilter):
except RequestMismatchException:
logg.debug('erc20 match but not a transfer, skipping')
return False
except ValueError:
logg.debug('erc20 match but bogus data, skipping')
return False
token_sender = tx.outputs[0]
token_recipient = transfer_data[0]
@ -67,7 +71,13 @@ class ERC20TransferFilter(SyncFilter):
tx.status == Status.SUCCESS,
block.timestamp,
)
#db_session.flush()
db_session.flush()
cic_cache_db.tag_transaction(
db_session,
tx.hash,
self.tag_name,
domain=self.tag_domain,
)
db_session.commit()
return True

View File

@ -7,9 +7,10 @@ import argparse
import sys
import re
# third-party imports
# external imports
import confini
import celery
import sqlalchemy
import rlp
import cic_base.config
import cic_base.log
@ -34,7 +35,10 @@ from chainsyncer.driver import (
from chainsyncer.db.models.base import SessionBase
# local imports
from cic_cache.db import dsn_from_config
from cic_cache.db import (
dsn_from_config,
add_tag,
)
from cic_cache.runnable.daemons.filters import (
ERC20TransferFilter,
)
@ -59,6 +63,17 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
def register_filter_tags(filters, session):
for f in filters:
tag = f.tag()
try:
add_tag(session, tag[0], domain=tag[1])
session.commit()
logg.info('added tag name "{}" domain "{}"'.format(tag[0], tag[1]))
except sqlalchemy.exc.IntegrityError:
logg.debug('already have tag name "{}" domain "{}"'.format(tag[0], tag[1]))
def main():
# Connect to blockchain with chainlib
rpc = RPCConnection.connect(chain_spec, 'default')
@ -98,10 +113,19 @@ def main():
erc20_transfer_filter = ERC20TransferFilter(chain_spec)
filters = [
erc20_transfer_filter,
]
session = SessionBase.create_session()
register_filter_tags(filters, session)
session.close()
i = 0
for syncer in syncers:
logg.debug('running syncer index {}'.format(i))
syncer.add_filter(erc20_transfer_filter)
for f in filters:
syncer.add_filter(f)
r = syncer.loop(int(config.get('SYNCER_LOOP_INTERVAL')), rpc)
sys.stderr.write("sync {} done at block {}\n".format(syncer, r))

View File

@ -6,4 +6,4 @@ HOST=localhost
PORT=5432
ENGINE=postgresql
DRIVER=psycopg2
DEBUG=
DEBUG=0

View File

@ -1,2 +1,4 @@
[cic]
registry_address =
chain_spec =
trust_address =

View File

@ -6,4 +6,4 @@ HOST=localhost
PORT=5432
ENGINE=sqlite
DRIVER=pysqlite
DEBUG=
DEBUG=1

View File

@ -43,10 +43,6 @@ COPY cic-cache/config/ /usr/local/etc/cic-cache/
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
mkdir -p /usr/local/share/cic/solidity && \
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
COPY cic-cache/docker/db.sh ./db.sh
RUN chmod 755 ./*.sh

View File

@ -1,9 +1,9 @@
cic-base~=0.1.2a77
cic-base~=0.1.2b6
alembic==1.4.2
confini~=0.3.6rc3
uwsgi==2.0.19.1
moolb~=0.1.0
cic-eth-registry~=0.5.4a16
cic-eth-registry~=0.5.5a1
SQLAlchemy==1.3.20
semver==2.13.0
psycopg2==2.8.6

View File

@ -4,3 +4,10 @@ pytest-mock==3.3.1
pysqlite3==0.4.3
sqlparse==0.4.1
pytest-celery==0.0.0a1
eth_tester==0.5.0b3
py-evm==0.3.0a20
web3==5.12.2
cic-eth-registry~=0.5.5a3
giftable-erc20-token~=0.0.8a10
eth-address-index~=0.1.1a10
sarafu-faucet~=0.0.3a1

View File

@ -3,7 +3,7 @@ import os
import sys
import datetime
# third-party imports
# external imports
import pytest
# local imports
@ -84,3 +84,7 @@ def txs(
session.commit()
return [
tx_hash_first,
tx_hash_second,
]

View File

@ -0,0 +1,3 @@
from chainlib.eth.pytest import *
from cic_eth_registry.pytest.fixtures_tokens import *

View File

@ -0,0 +1,69 @@
# standard imports
import os
import datetime
import logging
import json
# external imports
import pytest
from sqlalchemy import text
from chainlib.eth.tx import Tx
from chainlib.eth.block import Block
from chainlib.chain import ChainSpec
from hexathon import (
strip_0x,
add_0x,
)
# local imports
from cic_cache.db import add_tag
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
logg = logging.getLogger()
def test_cache(
eth_rpc,
foo_token,
init_database,
list_defaults,
list_actors,
tags,
):
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
fltr = ERC20TransferFilter(chain_spec)
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
data = 'a9059cbb'
data += strip_0x(list_actors['alice'])
data += '1000'.ljust(64, '0')
block = Block({
'hash': os.urandom(32).hex(),
'number': 42,
'timestamp': datetime.datetime.utcnow().timestamp(),
'transactions': [],
})
tx = Tx({
'to': foo_token,
'from': list_actors['bob'],
'data': data,
'value': 0,
'hash': os.urandom(32).hex(),
'nonce': 13,
'gasPrice': 10000000,
'gas': 123456,
})
block.txs.append(tx)
tx.block = block
r = fltr.filter(eth_rpc, block, tx, db_session=init_database)
assert r
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
assert r[0] == tx.hash

View File

@ -2,7 +2,7 @@
import os
import logging
# third-party imports
# external imports
import pytest
import confini
@ -13,7 +13,7 @@ logg = logging.getLogger(__file__)
@pytest.fixture(scope='session')
def load_config():
config_dir = os.path.join(root_dir, '.config/test')
config_dir = os.path.join(root_dir, 'config/test')
conf = confini.Config(config_dir, 'CICTEST')
conf.process()
logg.debug('config {}'.format(conf))

View File

@ -3,13 +3,16 @@ import os
import logging
import re
# third-party imports
# external imports
import pytest
import sqlparse
import alembic
from alembic.config import Config as AlembicConfig
# local imports
from cic_cache.db.models.base import SessionBase
from cic_cache.db import dsn_from_config
from cic_cache.db import add_tag
logg = logging.getLogger(__file__)
@ -26,11 +29,10 @@ def database_engine(
except FileNotFoundError:
pass
dsn = dsn_from_config(load_config)
SessionBase.connect(dsn)
SessionBase.connect(dsn, debug=load_config.true('DATABASE_DEBUG'))
return dsn
# TODO: use alembic instead to migrate db, here we have to keep separate schema than migration script in script/migrate.py
@pytest.fixture(scope='function')
def init_database(
load_config,
@ -38,52 +40,23 @@ def init_database(
):
rootdir = os.path.dirname(os.path.dirname(__file__))
schemadir = os.path.join(rootdir, 'db', load_config.get('DATABASE_DRIVER'))
if load_config.get('DATABASE_ENGINE') == 'sqlite':
rconn = SessionBase.engine.raw_connection()
f = open(os.path.join(schemadir, 'db.sql'))
s = f.read()
f.close()
rconn.executescript(s)
else:
rconn = SessionBase.engine.raw_connection()
rcursor = rconn.cursor()
#rcursor.execute('DROP FUNCTION IF EXISTS public.transaction_list')
#rcursor.execute('DROP FUNCTION IF EXISTS public.balances')
f = open(os.path.join(schemadir, 'db.sql'))
s = f.read()
f.close()
r = re.compile(r'^[A-Z]', re.MULTILINE)
for l in sqlparse.parse(s):
strl = str(l)
# we need to check for empty query lines, as sqlparse doesn't do that on its own (and psycopg complains when it gets them)
if not re.search(r, strl):
logg.warning('skipping parsed query line {}'.format(strl))
continue
rcursor.execute(strl)
rconn.commit()
rcursor.execute('SET search_path TO public')
# this doesn't work when run separately, no idea why
# functions have been manually added to original schema from cic-eth
# f = open(os.path.join(schemadir, 'proc_transaction_list.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
#
# f = open(os.path.join(schemadir, 'proc_balances.sql'))
# s = f.read()
# f.close()
# rcursor.execute(s)
rcursor.close()
dbdir = os.path.join(rootdir, 'cic_cache', 'db')
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
if not os.path.isdir(migrationsdir):
migrationsdir = os.path.join(dbdir, 'migrations', 'default')
logg.info('using migrations directory {}'.format(migrationsdir))
session = SessionBase.create_session()
ac = AlembicConfig(os.path.join(migrationsdir, 'alembic.ini'))
ac.set_main_option('sqlalchemy.url', database_engine)
ac.set_main_option('script_location', migrationsdir)
alembic.command.downgrade(ac, 'base')
alembic.command.upgrade(ac, 'head')
session.commit()
yield session
session.commit()
session.close()
@ -116,3 +89,14 @@ def list_defaults(
return {
'block': 420000,
}
@pytest.fixture(scope='function')
def tags(
init_database,
):
add_tag(init_database, 'foo')
add_tag(init_database, 'baz', domain='bar')
add_tag(init_database, 'xyzzy', domain='bar')
init_database.commit()

View File

@ -4,7 +4,7 @@ import datetime
import logging
import json
# third-party imports
# external imports
import pytest
# local imports

View File

@ -0,0 +1,37 @@
import os
import datetime
import logging
import json
# external imports
import pytest
# local imports
from cic_cache.db import tag_transaction
logg = logging.getLogger()
def test_cache(
init_database,
list_defaults,
list_actors,
list_tokens,
txs,
tags,
):
tag_transaction(init_database, txs[0], 'foo')
tag_transaction(init_database, txs[0], 'baz', domain='bar')
tag_transaction(init_database, txs[1], 'xyzzy', domain='bar')
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.value = 'foo'").fetchall()
assert r[0][0] == txs[0]
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'baz'").fetchall()
assert r[0][0] == txs[0]
r = init_database.execute("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = 'bar' AND a.value = 'xyzzy'").fetchall()
assert r[0][0] == txs[1]

View File

@ -36,6 +36,7 @@ from cic_eth.eth import (
from cic_eth.admin import (
debug,
ctrl,
token
)
from cic_eth.queue import (
query,

View File

@ -50,8 +50,4 @@ COPY cic-eth/config/ /usr/local/etc/cic-eth/
COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
RUN git clone https://gitlab.com/grassrootseconomics/cic-contracts.git && \
mkdir -p /usr/local/share/cic/solidity && \
cp -R cic-contracts/abis /usr/local/share/cic/solidity/abi
COPY util/liveness/health.sh /usr/local/bin/health.sh

View File

@ -2,7 +2,7 @@
import os
import logging
# third-party imports
# external imports
import pytest
import alembic
from alembic.config import Config as AlembicConfig

View File

@ -1,5 +1,5 @@
[pgp]
exports_dir = pgp
exports_dir = /root/pgp
privatekey_file = privatekeys.asc
passphrase = merman
publickey_trusted_file = publickeys.asc

View File

@ -2,26 +2,31 @@ FROM node:15.3.0-alpine3.10
WORKDIR /tmp/src/cic-meta
RUN apk add --no-cache postgresql bash
COPY cic-meta/package.json \
./
COPY cic-meta/src/ src/
COPY cic-meta/tests/ tests/
COPY cic-meta/scripts/ scripts/
#COPY docker/*.sh /root/
RUN alias tsc=node_modules/typescript/bin/tsc
RUN npm install
# see exports_dir gpg.ini
COPY cic-meta/tests/*.asc /root/pgp/
RUN alias tsc=node_modules/typescript/bin/tsc
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
# COPY cic-meta/scripts/server/initdb/server.postgres.sql /usr/local/share/cic-meta/sql/server.sql
COPY cic-meta/docker/db.sh ./db.sh
RUN chmod 755 ./db.sh
RUN alias ts-node=/tmp/src/cic-meta/node_modules/ts-node/dist/bin.js
ENTRYPOINT [ "./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts" ]
#RUN alias ts-node=/tmp/src/cic-meta/node_modules/ts-node/dist/bin.js
#ENTRYPOINT [ "./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts" ]
# COPY cic-meta/docker/start_server.sh ./start_server.sh
# RUN chmod 755 ./start_server.sh
COPY cic-meta/docker/start_server.sh ./start_server.sh
RUN chmod 755 ./start_server.sh
ENTRYPOINT ["sh", "./start_server.sh"]

View File

@ -1,3 +1,6 @@
#!/bin/bash
set -e
PGPASSWORD=$DATABASE_PASSWORD psql -v ON_ERROR_STOP=1 -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME -f $SCHEMA_SQL_PATH
PGPASSWORD=$DATABASE_PASSWORD psql -U $DATABASE_USER -h $DATABASE_HOST -p $DATABASE_PORT -d $DATABASE_NAME /usr/local/share/cic-meta/sql/server.sql

View File

@ -1,3 +1,9 @@
#!/bin/bash
set -euo pipefail
# db migration
sh ./db.sh
/usr/local/bin/node /usr/local/bin/cic-meta-server $@
# /usr/local/bin/node /usr/local/bin/cic-meta-server $@
# ./node_modules/ts-node/dist/bin.js", "./scripts/server/server.ts $@
npm run start "$@"

View File

@ -10,7 +10,8 @@
"build-server": "tsc -d --outDir dist-server scripts/server/*.ts",
"pack": "node_modules/typescript/bin/tsc -d --outDir dist && webpack",
"clean": "rm -rf dist",
"prepare": "npm run build && npm run build-server"
"prepare": "npm run build && npm run build-server",
"start": "./node_modules/ts-node/dist/bin.js ./scripts/server/server.ts"
},
"dependencies": {
"@ethereumjs/tx": "^3.0.0-beta.1",

View File

@ -1,15 +0,0 @@
#!/bin/bash
set -e
psql -v ON_ERROR_STOP=1 --username grassroots --dbname cic_meta <<-EOSQL
create table if not exists store (
id serial primary key not null,
owner_fingerprint text not null,
hash char(64) not null unique,
content text not null
);
create index if not exists idx_fp on store ((lower(owner_fingerprint)));
EOSQL

View File

@ -1,4 +1,4 @@
create table if not exists cic_meta.store (
create table if not exists store (
id serial primary key not null,
owner_fingerprint text not null,
hash char(64) not null unique,

View File

@ -20,7 +20,7 @@ def define_account_tx_metadata(user: Account):
)
key = generate_metadata_pointer(
identifier=identifier,
cic_type='cic.person'
cic_type=':cic.person'
)
account_metadata = get_cached_data(key=key)

View File

@ -80,7 +80,7 @@ def get_cached_operational_balance(blockchain_address: str):
"""
key = create_cached_data_key(
identifier=bytes.fromhex(blockchain_address[2:]),
salt='cic.balances_data'
salt=':cic.balances_data'
)
cached_balance = get_cached_data(key=key)
if cached_balance:

View File

@ -38,3 +38,13 @@ class MetadataStoreError(Exception):
pass
class SeppukuError(Exception):
"""Exception base class for all errors that should cause system shutdown"""
pass
class InitializationError(Exception):
"""Exception raised when initialization state is insufficient to run component"""
pass

View File

@ -118,7 +118,7 @@ class MetadataRequestsHandler(Metadata):
metadata_http_error_handler(result=result)
response_data = result.content
data = json.loads(response_data.decode('utf-8'))
if result.status_code == 200 and self.cic_type == 'cic.person':
if result.status_code == 200 and self.cic_type == ':cic.person':
person = Person()
deserialized_person = person.deserialize(person_data=json.loads(data))
data = json.dumps(deserialized_person.serialize())

View File

@ -9,4 +9,4 @@ from .base import MetadataRequestsHandler
class PersonMetadata(MetadataRequestsHandler):
def __init__(self, identifier: bytes):
super().__init__(cic_type='cic.person', identifier=identifier)
super().__init__(cic_type=':cic.person', identifier=identifier)

View File

@ -10,4 +10,4 @@ from .base import MetadataRequestsHandler
class PhonePointerMetadata(MetadataRequestsHandler):
def __init__(self, identifier: bytes):
super().__init__(cic_type='cic.msisdn', identifier=identifier)
super().__init__(cic_type=':cic.phone', identifier=identifier)

View File

@ -48,10 +48,9 @@ def define_response_with_content(headers: list, response: str) -> tuple:
content_length_header = ('Content-Length', str(content_length))
# check for content length defaulted to zero in error headers
for position, header in enumerate(headers):
if header[0] == 'Content-Length':
headers[position] = content_length_header
else:
headers.append(content_length_header)
if 'Content-Length' in header:
headers.pop(position)
headers.append(content_length_header)
return response_bytes, headers

View File

@ -7,6 +7,7 @@ from typing import Optional
# third party imports
import celery
from sqlalchemy import desc
from cic_eth.api import Api
from tinydb.table import Document
# local imports
@ -15,7 +16,7 @@ from cic_ussd.balance import BalanceManager, compute_operational_balance, get_ca
from cic_ussd.chain import Chain
from cic_ussd.db.models.account import AccountStatus, Account
from cic_ussd.db.models.ussd_session import UssdSession
from cic_ussd.error import MetadataNotFoundError
from cic_ussd.error import MetadataNotFoundError, SeppukuError
from cic_ussd.menu.ussd_menu import UssdMenu
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
from cic_ussd.phone_number import get_user_by_phone_number
@ -28,6 +29,38 @@ from cic_types.models.person import generate_metadata_pointer, get_contact_data_
logg = logging.getLogger(__name__)
def get_default_token_data():
chain_str = Chain.spec.__str__()
cic_eth_api = Api(chain_str=chain_str)
default_token_request_task = cic_eth_api.default_token()
default_token_data = default_token_request_task.get()
return default_token_data
def retrieve_token_symbol(chain_str: str = Chain.spec.__str__()):
"""
:param chain_str:
:type chain_str:
:return:
:rtype:
"""
cache_key = create_cached_data_key(
identifier=chain_str.encode('utf-8'),
salt=':cic.default_token_data'
)
cached_data = get_cached_data(key=cache_key)
if cached_data:
default_token_data = json.loads(cached_data)
return default_token_data.get('symbol')
else:
logg.warning('Cached default token data not found. Attempting retrieval from default token API')
default_token_data = get_default_token_data()
if default_token_data:
return default_token_data.get('symbol')
else:
raise SeppukuError(f'Could not retrieve default token for: {chain_str}')
def process_pin_authorization(display_key: str, user: Account, **kwargs) -> str:
"""
This method provides translation for all ussd menu entries that follow the pin authorization pattern.
@ -73,7 +106,9 @@ def process_exit_insufficient_balance(display_key: str, user: Account, ussd_sess
# compile response data
user_input = ussd_session.get('user_input').split('*')[-1]
transaction_amount = to_wei(value=int(user_input))
token_symbol = 'SRF'
# get default data
token_symbol = retrieve_token_symbol()
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
@ -102,7 +137,7 @@ def process_exit_successful_transaction(display_key: str, user: Account, ussd_se
:rtype: str
"""
transaction_amount = to_wei(int(ussd_session.get('session_data').get('transaction_amount')))
token_symbol = 'SRF'
token_symbol = retrieve_token_symbol()
recipient_phone_number = ussd_session.get('session_data').get('recipient_phone_number')
recipient = get_user_by_phone_number(phone_number=recipient_phone_number)
tx_recipient_information = define_account_tx_metadata(user=recipient)
@ -137,7 +172,7 @@ def process_transaction_pin_authorization(user: Account, display_key: str, ussd_
tx_recipient_information = define_account_tx_metadata(user=recipient)
tx_sender_information = define_account_tx_metadata(user=user)
token_symbol = 'SRF'
token_symbol = retrieve_token_symbol()
user_input = ussd_session.get('session_data').get('transaction_amount')
transaction_amount = to_wei(value=int(user_input))
logg.debug('Requires integration to determine user tokens.')
@ -168,18 +203,18 @@ def process_account_balances(user: Account, display_key: str, ussd_session: dict
logg.debug('Requires call to retrieve tax and bonus amounts')
tax = ''
bonus = ''
token_symbol = retrieve_token_symbol()
return translation_for(
key=display_key,
preferred_language=user.preferred_language,
operational_balance=operational_balance,
tax=tax,
bonus=bonus,
token_symbol='SRF'
token_symbol=token_symbol
)
def format_transactions(transactions: list, preferred_language: str):
def format_transactions(transactions: list, preferred_language: str, token_symbol: str):
formatted_transactions = ''
if len(transactions) > 0:
@ -190,7 +225,7 @@ def format_transactions(transactions: list, preferred_language: str):
timestamp = transaction.get('timestamp')
action_tag = transaction.get('action_tag')
direction = transaction.get('direction')
token_symbol = 'SRF'
token_symbol = token_symbol
if action_tag == 'SENT' or action_tag == 'ULITUMA':
formatted_transactions += f'{action_tag} {value} {token_symbol} {direction} {recipient_phone_number} {timestamp}.\n'
@ -214,7 +249,7 @@ def process_display_user_metadata(user: Account, display_key: str):
"""
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person'
cic_type=':cic.person'
)
user_metadata = get_cached_data(key)
if user_metadata:
@ -251,9 +286,11 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
"""
# retrieve cached statement
identifier = blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address)
key = create_cached_data_key(identifier=identifier, salt='cic.statement')
key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
transactions = get_cached_data(key=key)
token_symbol = retrieve_token_symbol()
first_transaction_set = []
middle_transaction_set = []
last_transaction_set = []
@ -277,7 +314,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
preferred_language=user.preferred_language,
first_transaction_set=format_transactions(
transactions=first_transaction_set,
preferred_language=user.preferred_language
preferred_language=user.preferred_language,
token_symbol=token_symbol
)
)
elif display_key == 'ussd.kenya.middle_transaction_set':
@ -286,7 +324,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
preferred_language=user.preferred_language,
middle_transaction_set=format_transactions(
transactions=middle_transaction_set,
preferred_language=user.preferred_language
preferred_language=user.preferred_language,
token_symbol=token_symbol
)
)
@ -296,7 +335,8 @@ def process_account_statement(user: Account, display_key: str, ussd_session: dic
preferred_language=user.preferred_language,
last_transaction_set=format_transactions(
transactions=last_transaction_set,
preferred_language=user.preferred_language
preferred_language=user.preferred_language,
token_symbol=token_symbol
)
)
@ -312,18 +352,19 @@ def process_start_menu(display_key: str, user: Account):
:return: Corresponding translation text response
:rtype: str
"""
token_symbol = retrieve_token_symbol()
chain_str = Chain.spec.__str__()
blockchain_address = user.blockchain_address
balance_manager = BalanceManager(address=blockchain_address,
chain_str=chain_str,
token_symbol='SRF')
token_symbol=token_symbol)
# get balances synchronously for display on start menu
balances_data = balance_manager.get_balances()
key = create_cached_data_key(
identifier=bytes.fromhex(blockchain_address[2:]),
salt='cic.balances_data'
salt=':cic.balances_data'
)
cache_data(key=key, data=json.dumps(balances_data))
@ -340,9 +381,6 @@ def process_start_menu(display_key: str, user: Account):
# retrieve and cache account's statement
retrieve_account_statement(blockchain_address=blockchain_address)
# TODO [Philip]: figure out how to get token symbol from a metadata layer of sorts.
token_symbol = 'SRF'
return translation_for(
key=display_key,
preferred_language=user.preferred_language,
@ -375,6 +413,13 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
:return: A ussd menu's corresponding text value.
:rtype: Document
"""
# retrieve metadata before any transition
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type=':cic.person'
)
person_metadata = get_cached_data(key=key)
if ussd_session:
if user_input == "0":
return UssdMenu.parent_menu(menu_name=ussd_session.get('state'))
@ -385,12 +430,6 @@ def process_request(user_input: str, user: Account, ussd_session: Optional[dict]
if user.has_valid_pin():
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person'
)
person_metadata = get_cached_data(key=key)
if last_ussd_session:
# get last state
last_state = last_ussd_session.state

View File

@ -0,0 +1,73 @@
"""
This module handles requests originating from CICADA or any other management client for custodial wallets, processing
requests offering control of user account states to a staff behind the client.
"""
# standard imports
import logging
from urllib.parse import quote_plus
# third-party imports
from confini import Config
# local imports
from cic_ussd.db import dsn_from_config
from cic_ussd.db.models.base import SessionBase
from cic_ussd.operations import define_response_with_content
from cic_ussd.requests import (get_request_endpoint,
get_query_parameters,
process_pin_reset_requests,
process_locked_accounts_requests)
from cic_ussd.runnable.server_base import exportable_parser, logg
args = exportable_parser.parse_args()
# define log levels
if args.vv:
logging.getLogger().setLevel(logging.DEBUG)
elif args.v:
logging.getLogger().setLevel(logging.INFO)
# parse config
config = Config(config_dir=args.c, env_prefix=args.env_prefix)
config.process()
config.censor('PASSWORD', 'DATABASE')
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
# set up db
data_source_name = dsn_from_config(config)
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
# create session for the life time of http request
SessionBase.session = SessionBase.create_session()
# handle requests from CICADA
def application(env, start_response):
"""Loads python code for application to be accessible over web server
:param env: Object containing server and request information
:type env: dict
:param start_response: Callable to define responses.
:type start_response: any
:return: a list containing a bytes representation of the response object
:rtype: list
"""
# define headers
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
headers = [('Content-Type', 'text/plain')]
if get_request_endpoint(env) == '/pin':
phone_number = get_query_parameters(env=env, query_name='phoneNumber')
phone_number = quote_plus(phone_number)
response, message = process_pin_reset_requests(env=env, phone_number=phone_number)
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response)
SessionBase.session.close()
start_response(message, headers)
return [response_bytes]
# handle requests for locked accounts
response, message = process_locked_accounts_requests(env=env)
response_bytes, headers = define_response_with_content(headers=headers, response=response)
start_response(message, headers)
SessionBase.session.close()
return [response_bytes]

View File

@ -1,25 +1,23 @@
"""Functions defining WSGI interaction with external http requests
Defines an application function essential for the uWSGI python loader to run th python application code.
"""This module handles requests originating from the ussd service provider.
"""
# standard imports
import argparse
import celery
import i18n
import json
import logging
import os
import redis
# third-party imports
from confini import Config
import celery
import i18n
import redis
from chainlib.chain import ChainSpec
from urllib.parse import quote_plus
from confini import Config
# local imports
from cic_ussd.chain import Chain
from cic_ussd.db import dsn_from_config
from cic_ussd.db.models.base import SessionBase
from cic_ussd.encoder import PasswordEncoder
from cic_ussd.error import InitializationError
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
from cic_ussd.menu.ussd_menu import UssdMenu
from cic_ussd.metadata.signer import Signer
@ -28,34 +26,17 @@ from cic_ussd.operations import (define_response_with_content,
process_menu_interaction_requests,
define_multilingual_responses)
from cic_ussd.phone_number import process_phone_number
from cic_ussd.redis import InMemoryStore
from cic_ussd.processor import get_default_token_data
from cic_ussd.redis import cache_data, create_cached_data_key, InMemoryStore
from cic_ussd.requests import (get_request_endpoint,
get_request_method,
get_query_parameters,
process_locked_accounts_requests,
process_pin_reset_requests)
get_request_method)
from cic_ussd.runnable.server_base import exportable_parser, logg
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
from cic_ussd.state_machine import UssdStateMachine
from cic_ussd.validator import check_ip, check_request_content_length, check_service_code, validate_phone_number, \
validate_presence
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
config_directory = '/usr/local/etc/cic-ussd/'
# define arguments
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-c', type=str, default=config_directory, help='config directory.')
arg_parser.add_argument('-q', type=str, default='cic-ussd', help='queue name for worker tasks')
arg_parser.add_argument('-v', action='store_true', help='be verbose')
arg_parser.add_argument('-vv', action='store_true', help='be more verbose')
arg_parser.add_argument('--env-prefix',
default=os.environ.get('CONFINI_ENV_PREFIX'),
dest='env_prefix',
type=str,
help='environment prefix for variables to overwrite configuration')
args = arg_parser.parse_args()
args = exportable_parser.parse_args()
# define log levels
if args.vv:
@ -69,7 +50,14 @@ config.process()
config.censor('PASSWORD', 'DATABASE')
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
# initialize elements
# set up db
data_source_name = dsn_from_config(config)
SessionBase.connect(data_source_name,
pool_size=int(config.get('DATABASE_POOL_SIZE')),
debug=config.true('DATABASE_DEBUG'))
# create session for the life time of http request
SessionBase.session = SessionBase.create_session()
# set up translations
i18n.load_path.append(config.get('APP_LOCALE_PATH'))
i18n.set('fallback', config.get('APP_LOCALE_FALLBACK'))
@ -82,12 +70,6 @@ ussd_menu_db = create_local_file_data_stores(file_location=config.get('USSD_MENU
table_name='ussd_menu')
UssdMenu.ussd_menu_db = ussd_menu_db
# set up db
data_source_name = dsn_from_config(config)
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
# create session for the life time of http request
SessionBase.session = SessionBase.create_session()
# define universal redis cache access
InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'),
port=config.get('REDIS_PORT'),
@ -127,6 +109,20 @@ Chain.spec = chain_spec
UssdStateMachine.states = states
UssdStateMachine.transitions = transitions
# retrieve default token data
default_token_data = get_default_token_data()
chain_str = Chain.spec.__str__()
# cache default token for re-usability
if default_token_data:
cache_key = create_cached_data_key(
identifier=chain_str.encode('utf-8'),
salt=':cic.default_token_data'
)
cache_data(key=cache_key, data=json.dumps(default_token_data))
else:
raise InitializationError(f'Default token data for: {chain_str} not found.')
def application(env, start_response):
"""Loads python code for application to be accessible over web server
@ -134,6 +130,8 @@ def application(env, start_response):
:type env: dict
:param start_response: Callable to define responses.
:type start_response: any
:return: a list containing a bytes representation of the response object
:rtype: list
"""
# define headers
errors_headers = [('Content-Type', 'text/plain'), ('Content-Length', '0')]
@ -194,20 +192,3 @@ def application(env, start_response):
start_response('200 OK,', headers)
SessionBase.session.close()
return [response_bytes]
# handle pin requests
if get_request_endpoint(env) == '/pin':
phone_number = get_query_parameters(env=env, query_name='phoneNumber')
phone_number = quote_plus(phone_number)
response, message = process_pin_reset_requests(env=env, phone_number=phone_number)
response_bytes, headers = define_response_with_content(headers=errors_headers, response=response)
SessionBase.session.close()
start_response(message, headers)
return [response_bytes]
# handle requests for locked accounts
response, message = process_locked_accounts_requests(env=env)
response_bytes, headers = define_response_with_content(headers=headers, response=response)
start_response(message, headers)
SessionBase.session.close()
return [response_bytes]

View File

@ -0,0 +1,38 @@
"""This module handles generic wsgi server configurations that can then be subsumed by different server flavors for the
cic-ussd component.
"""
# standard imports
import logging
import os
from argparse import ArgumentParser
# third-party imports
# local imports
# define a logging system
logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger()
# define default config directory as would be defined in docker
default_config_dir = '/usr/local/etc/cic-ussd/'
# define args parser
arg_parser = ArgumentParser(description='CLI for handling cic-ussd server applications.')
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
arg_parser.add_argument('-v', help='be verbose', action='store_true')
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
arg_parser.add_argument('-q', type=str, default='cic-ussd', help='queue name for worker tasks')
arg_parser.add_argument('--env-prefix',
default=os.environ.get('CONFINI_ENV_PREFIX'),
dest='env_prefix',
type=str,
help='environment prefix for variables to overwrite configuration')
exportable_parser = arg_parser

View File

@ -64,7 +64,7 @@ def has_sufficient_balance(state_machine_data: Tuple[str, dict, Account]) -> boo
# get cached balance
key = create_cached_data_key(
identifier=bytes.fromhex(user.blockchain_address[2:]),
salt='cic.balances_data'
salt=':cic.balances_data'
)
cached_balance = get_cached_data(key=key)
operational_balance = compute_operational_balance(balances=json.loads(cached_balance))

View File

@ -176,7 +176,7 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, Account]):
blockchain_address = user.blockchain_address
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person'
cic_type=':cic.person'
)
user_metadata = get_cached_data(key=key)

View File

@ -23,7 +23,7 @@ def has_cached_user_metadata(state_machine_data: Tuple[str, dict, Account]):
# check for user metadata in cache
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person'
cic_type=':cic.person'
)
user_metadata = get_cached_data(key=key)
return user_metadata is not None

View File

@ -136,7 +136,7 @@ def process_balances_callback(result: list, param: str, status_code: int):
blockchain_address = balances_data.get('address')
key = create_cached_data_key(
identifier=bytes.fromhex(blockchain_address[2:]),
salt='cic.balances_data'
salt=':cic.balances_data'
)
cache_data(key=key, data=json.dumps(balances_data))
else:
@ -226,7 +226,7 @@ def process_statement_callback(result, param: str, status_code: int):
# cache account statement
identifier = bytes.fromhex(param[2:])
key = create_cached_data_key(identifier=identifier, salt='cic.statement')
key = create_cached_data_key(identifier=identifier, salt=':cic.statement')
data = json.dumps(processed_transactions)
# cache statement data

View File

@ -1,7 +1,7 @@
# standard imports
import semver
version = (0, 3, 0, 'alpha.9')
version = (0, 3, 0, 'alpha.10')
version_object = semver.VersionInfo(
major=version[0],

View File

@ -38,8 +38,9 @@ COPY cic-ussd/transitions/ cic-ussd/transitions/
COPY cic-ussd/var/ cic-ussd/var/
COPY cic-ussd/docker/db.sh \
cic-ussd/docker/start_tasker.sh \
cic-ussd/docker/start_uwsgi.sh \
cic-ussd/docker/start_cic_user_tasker.sh \
cic-ussd/docker/start_cic_user_ussd_server.sh\
cic-ussd/docker/start_cic_user_server.sh\
/root/
RUN chmod +x /root/*.sh

View File

@ -0,0 +1,7 @@
#!/bin/bash
. /root/db.sh
user_server_port=${SERVER_PORT:-9500}
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_server.py --http :"$user_server_port" --pyargv "$@"

View File

@ -0,0 +1,5 @@
#!/bin/bash
. /root/db.sh
/usr/local/bin/cic-user-tasker "$@"

View File

@ -0,0 +1,7 @@
#!/bin/bash
. /root/db.sh
user_ussd_server_port=${SERVER_PORT:-9000}
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/daemons/cic_user_ussd_server.py --http :"$user_ussd_server_port" --pyargv "$@"

View File

@ -1,5 +0,0 @@
#!/bin/bash
. /root/db.sh
/usr/local/bin/cic-ussd-tasker $@

View File

@ -1,7 +0,0 @@
#!/bin/bash
. /root/db.sh
server_port=${SERVER_PORT:-9000}
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/server.py --http :$server_port --pyargv "$@"

View File

@ -35,6 +35,7 @@ packages =
cic_ussd.menu
cic_ussd.metadata
cic_ussd.runnable
cic_ussd.runnable.daemons
cic_ussd.session
cic_ussd.state_machine
cic_ussd.state_machine.logic
@ -44,5 +45,5 @@ scripts =
[options.entry_points]
console_scripts =
cic-ussd-tasker = cic_ussd.runnable.tasker:main
cic-user-tasker = cic_ussd.runnable.daemons.cic_user_tasker:main
cic-ussd-client = cic_ussd.runnable.client:main

View File

@ -105,7 +105,7 @@ def test_get_user_metadata(caplog,
assert 'Get latest data status: 200' in caplog.text
key = generate_metadata_pointer(
identifier=identifier,
cic_type='cic.person'
cic_type=':cic.person'
)
cached_user_metadata = get_cached_data(key=key)
assert cached_user_metadata

View File

@ -36,7 +36,7 @@ def test_has_cached_user_metadata(create_in_db_ussd_session,
user = create_activated_user
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=user.blockchain_address),
cic_type='cic.person'
cic_type=':cic.person'
)
cache_data(key=key, data=json.dumps(person_metadata))
result = has_cached_user_metadata(state_machine_data=state_machine_data)

View File

@ -115,6 +115,6 @@ def cached_user_metadata(create_activated_user, init_redis_cache, person_metadat
user_metadata = json.dumps(person_metadata)
key = generate_metadata_pointer(
identifier=blockchain_address_to_metadata_pointer(blockchain_address=create_activated_user.blockchain_address),
cic_type='cic.person'
cic_type=':cic.person'
)
cache_data(key=key, data=user_metadata)

View File

@ -31,15 +31,6 @@ RUN echo Install confini schema files && \
git checkout $cic_config_commit && \
cp -v *.ini $CONFINI_DIR
ARG cic_contracts_commit=698ef3a30fde8d7f2c498f1208fb0ff45d665501
ARG cic_contracts_url=https://gitlab.com/grassrootseconomics/cic-contracts.git/
RUN echo Install ABI collection for solidity interfaces used across all components && \
git clone --depth 1 $cic_contracts_url cic-contracts && \
cd cic-contracts && \
git fetch --depth 1 origin $cic_contracts_commit && \
git checkout $cic_contracts_commit && \
make install
# Install nvm with node and npm
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
ENV NVM_DIR /root/.nvm
@ -64,9 +55,9 @@ ARG pip_extra_index_url=https://pip.grassrootseconomics.net:8433
ARG cic_base_version=0.1.2a79
ARG cic_eth_version=0.11.0b8+build.c2286e5c
ARG sarafu_faucet_version=0.0.2a28
ARG sarafu_token_version==0.0.1a6
ARG sarafu_token_version=0.0.1a6
ARG cic_contracts_version=0.0.2a2
RUN pip install --user --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \
RUN pip install --user --index-url https://pypi.org/simple --extra-index-url $pip_extra_index_url cic-base[full_graph]==$cic_base_version \
cic-eth==$cic_eth_version \
cic-contracts==$cic_contracts_version \
sarafu-faucet==$sarafu_faucet_version \

View File

@ -53,8 +53,6 @@ services:
command: [ "-c", "max_connections=200" ]
volumes:
- ./scripts/initdb/create_db.sql:/docker-entrypoint-initdb.d/1-create_all_db.sql
- ./apps/cic-meta/scripts/initdb/postgresql.sh:/docker-entrypoint-initdb.d/2-init-cic-meta.sh
- ./apps/cic-cache/db/psycopg2/db.sql:/docker-entrypoint-initdb.d/3-init-cic-meta.sql
- postgres-db:/var/lib/postgresql/data
redis:
@ -477,6 +475,7 @@ services:
PGP_PUBLICKEY_TRUSTED_FILE: publickeys.asc
PGP_PUBLICKEY_ACTIVE_FILE: publickeys.asc
PGP_PUBLICKEY_ENCRYPT_FILE: publickeys.asc
SCHEMA_SQL_PATH: scripts/initdb/server.postgres.sql
ports:
- ${HTTP_PORT_CIC_META:-63380}:8000
depends_on:
@ -488,8 +487,7 @@ services:
- ${LOCAL_VOLUME_DIR:-/tmp/cic}/pgp:/tmp/cic/pgp
# command: "/root/start_server.sh -vv"
cic-ussd-server:
# image: grassrootseconomics:cic-ussd
cic-user-ussd-server:
build:
context: apps/
dockerfile: cic-ussd/docker/Dockerfile
@ -507,7 +505,7 @@ services:
SERVER_PORT: 9000
CIC_META_URL: ${CIC_META_URL:-http://meta:8000}
ports:
- ${HTTP_PORT_CIC_USSD:-63315}:9000
- ${HTTP_PORT_CIC_USER_USSD_SERVER:-63315}:9000
depends_on:
- postgres
- redis
@ -516,10 +514,31 @@ services:
deploy:
restart_policy:
condition: on-failure
command: "/root/start_uwsgi.sh -vv"
command: "/root/start_cic_user_ussd_server.sh -vv"
cic-ussd-tasker:
# image: grassrootseconomics:cic-ussd
cic-user-server:
build:
context: apps
dockerfile: cic-ussd/docker/Dockerfile
environment:
DATABASE_USER: grassroots
DATABASE_HOST: postgres
DATABASE_PORT: 5432
DATABASE_PASSWORD: tralala
DATABASE_NAME: cic_ussd
DATABASE_ENGINE: postgresql
DATABASE_DRIVER: psycopg2
DATABASE_POOL_SIZE: 0
ports:
- ${HTTP_PORT_CIC_USER_SERVER:-63415}:9500
depends_on:
- postgres
deploy:
restart_policy:
condition: on-failure
command: "/root/start_cic_user_server.sh -vv"
cic-user-tasker:
build:
context: apps
dockerfile: cic-ussd/docker/Dockerfile
@ -544,4 +563,4 @@ services:
deploy:
restart_policy:
condition: on-failure
command: "/root/start_tasker.sh -q cic-ussd -vv"
command: "/root/start_cic_user_tasker.sh -q cic-ussd -vv"