Implement migration script with ussd and notify
This commit is contained in:
parent
5a4e0b8eba
commit
7b16a36a62
@ -10,6 +10,7 @@ from sqlalchemy.pool import (
|
|||||||
StaticPool,
|
StaticPool,
|
||||||
QueuePool,
|
QueuePool,
|
||||||
AssertionPool,
|
AssertionPool,
|
||||||
|
NullPool,
|
||||||
)
|
)
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -64,6 +65,7 @@ class SessionBase(Model):
|
|||||||
if SessionBase.poolable:
|
if SessionBase.poolable:
|
||||||
poolclass = QueuePool
|
poolclass = QueuePool
|
||||||
if pool_size > 1:
|
if pool_size > 1:
|
||||||
|
logg.info('db using queue pool')
|
||||||
e = create_engine(
|
e = create_engine(
|
||||||
dsn,
|
dsn,
|
||||||
max_overflow=pool_size*3,
|
max_overflow=pool_size*3,
|
||||||
@ -74,17 +76,22 @@ class SessionBase(Model):
|
|||||||
echo=debug,
|
echo=debug,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
if debug:
|
if pool_size == 0:
|
||||||
|
logg.info('db using nullpool')
|
||||||
|
poolclass = NullPool
|
||||||
|
elif debug:
|
||||||
|
logg.info('db using assertion pool')
|
||||||
poolclass = AssertionPool
|
poolclass = AssertionPool
|
||||||
else:
|
else:
|
||||||
|
logg.info('db using static pool')
|
||||||
poolclass = StaticPool
|
poolclass = StaticPool
|
||||||
|
|
||||||
e = create_engine(
|
e = create_engine(
|
||||||
dsn,
|
dsn,
|
||||||
poolclass=poolclass,
|
poolclass=poolclass,
|
||||||
echo=debug,
|
echo=debug,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
logg.info('db not poolable')
|
||||||
e = create_engine(
|
e = create_engine(
|
||||||
dsn,
|
dsn,
|
||||||
echo=debug,
|
echo=debug,
|
||||||
|
@ -91,7 +91,7 @@ logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
|||||||
|
|
||||||
# connect to database
|
# connect to database
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn, pool_size=50, debug=config.true('DATABASE_DEBUG'))
|
SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
# verify database connection with minimal sanity query
|
# verify database connection with minimal sanity query
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
|
@ -6,4 +6,5 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
|
POOL_SIZE=50
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
|
@ -6,4 +6,5 @@ HOST=localhost
|
|||||||
PORT=63432
|
PORT=63432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
|
POOL_SIZE=50
|
||||||
DEBUG=0
|
DEBUG=0
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "cic-client-meta",
|
"name": "cic-client-meta",
|
||||||
"version": "0.0.7-alpha.3",
|
"version": "0.0.7-alpha.6",
|
||||||
"description": "Signed CRDT metadata graphs for the CIC network",
|
"description": "Signed CRDT metadata graphs for the CIC network",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
@ -40,6 +40,6 @@
|
|||||||
],
|
],
|
||||||
"license": "GPL-3.0-or-later",
|
"license": "GPL-3.0-or-later",
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": "~15.3.0"
|
"node": "~14.16.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -114,6 +114,7 @@ async function processRequest(req, res) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if (!['PUT', 'GET', 'POST'].includes(req.method)) {
|
if (!['PUT', 'GET', 'POST'].includes(req.method)) {
|
||||||
res.writeHead(405, {"Content-Type": "text/plain"});
|
res.writeHead(405, {"Content-Type": "text/plain"});
|
||||||
res.end();
|
res.end();
|
||||||
@ -123,6 +124,7 @@ async function processRequest(req, res) {
|
|||||||
try {
|
try {
|
||||||
digest = parseDigest(req.url);
|
digest = parseDigest(req.url);
|
||||||
} catch(e) {
|
} catch(e) {
|
||||||
|
console.error('digest error: ' + e)
|
||||||
res.writeHead(400, {"Content-Type": "text/plain"});
|
res.writeHead(400, {"Content-Type": "text/plain"});
|
||||||
res.end();
|
res.end();
|
||||||
return;
|
return;
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import { ArgPair, Syncable } from '../sync';
|
import { ArgPair, Syncable } from '../sync';
|
||||||
import { Addressable, addressToBytes, bytesToHex, toKey } from '../digest';
|
import { Addressable, mergeKey } from '../digest';
|
||||||
|
|
||||||
class Phone extends Syncable implements Addressable {
|
class Phone extends Syncable implements Addressable {
|
||||||
|
|
||||||
address: string
|
address: string
|
||||||
value: number
|
value: number
|
||||||
|
|
||||||
constructor(address:string, v:number) {
|
constructor(address:string, v:string) {
|
||||||
const o = {
|
const o = {
|
||||||
msisdn: v,
|
msisdn: v,
|
||||||
}
|
}
|
||||||
@ -17,8 +17,8 @@ class Phone extends Syncable implements Addressable {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async toKey(msisdn:number) {
|
public static async toKey(msisdn:string) {
|
||||||
return await toKey(msisdn.toString(), ':cic.msisdn');
|
return await mergeKey(Buffer.from(msisdn), Buffer.from(':cic.phone'));
|
||||||
}
|
}
|
||||||
|
|
||||||
public key(): string {
|
public key(): string {
|
||||||
|
@ -61,6 +61,7 @@ function addressToBytes(s:string) {
|
|||||||
export {
|
export {
|
||||||
toKey,
|
toKey,
|
||||||
toAddressKey,
|
toAddressKey,
|
||||||
|
mergeKey,
|
||||||
bytesToHex,
|
bytesToHex,
|
||||||
addressToBytes,
|
addressToBytes,
|
||||||
Addressable,
|
Addressable,
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.8.6
|
FROM python:3.8.6-slim-buster
|
||||||
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps
|
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
[app]
|
[app]
|
||||||
ALLOWED_IP=127.0.0.1
|
ALLOWED_IP=0.0.0.0/0
|
||||||
LOCALE_FALLBACK=en
|
LOCALE_FALLBACK=en
|
||||||
LOCALE_PATH=var/lib/locale/
|
LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/
|
||||||
MAX_BODY_LENGTH=1024
|
MAX_BODY_LENGTH=1024
|
||||||
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
|
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
|
||||||
SERVICE_CODE=*483*46#
|
SERVICE_CODE=*483*46#
|
||||||
|
|
||||||
|
[phone_number]
|
||||||
|
REGION=KE
|
||||||
|
|
||||||
[ussd]
|
[ussd]
|
||||||
MENU_FILE=/usr/src/data/ussd_menu.json
|
MENU_FILE=/usr/src/data/ussd_menu.json
|
||||||
|
|
||||||
|
@ -6,3 +6,5 @@ HOST=localhost
|
|||||||
PORT=5432
|
PORT=5432
|
||||||
ENGINE=postgresql
|
ENGINE=postgresql
|
||||||
DRIVER=psycopg2
|
DRIVER=psycopg2
|
||||||
|
DEBUG=0
|
||||||
|
POOL_SIZE=1
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
[celery]
|
[celery]
|
||||||
BROKER_URL=redis://
|
BROKER_URL=redis://redis:6379
|
||||||
RESULT_URL=redis://
|
RESULT_URL=redis://redis:6379
|
||||||
|
|
||||||
[redis]
|
[redis]
|
||||||
HOSTNAME=localhost
|
HOSTNAME=redis
|
||||||
PASSWORD=
|
PASSWORD=
|
||||||
PORT=6379
|
PORT=6379
|
||||||
DATABASE=0
|
DATABASE=0
|
||||||
|
@ -1,47 +1,129 @@
|
|||||||
# standard imports
|
# stanard imports
|
||||||
|
import logging
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
from sqlalchemy import Column, Integer, DateTime
|
from sqlalchemy import Column, Integer, DateTime
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
from sqlalchemy import create_engine
|
from sqlalchemy import create_engine
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.pool import (
|
||||||
|
StaticPool,
|
||||||
|
QueuePool,
|
||||||
|
AssertionPool,
|
||||||
|
NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
Model = declarative_base(name='Model')
|
Model = declarative_base(name='Model')
|
||||||
|
|
||||||
|
|
||||||
class SessionBase(Model):
|
class SessionBase(Model):
|
||||||
|
"""The base object for all SQLAlchemy enabled models. All other models must extend this.
|
||||||
|
"""
|
||||||
__abstract__ = True
|
__abstract__ = True
|
||||||
|
|
||||||
id = Column(Integer, primary_key=True)
|
|
||||||
created = Column(DateTime, default=datetime.datetime.utcnow)
|
created = Column(DateTime, default=datetime.datetime.utcnow)
|
||||||
updated = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
|
updated = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
|
||||||
engine = None
|
engine = None
|
||||||
session = None
|
"""Database connection engine of the running aplication"""
|
||||||
query = None
|
sessionmaker = None
|
||||||
|
"""Factory object responsible for creating sessions from the connection pool"""
|
||||||
|
transactional = True
|
||||||
|
"""Whether the database backend supports query transactions. Should be explicitly set by initialization code"""
|
||||||
|
poolable = True
|
||||||
|
"""Whether the database backend supports connection pools. Should be explicitly set by initialization code"""
|
||||||
|
procedural = True
|
||||||
|
"""Whether the database backend supports stored procedures"""
|
||||||
|
localsessions = {}
|
||||||
|
"""Contains dictionary of sessions initiated by db model components"""
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def create_session():
|
def create_session():
|
||||||
session = sessionmaker(bind=SessionBase.engine)
|
"""Creates a new database session.
|
||||||
return session()
|
"""
|
||||||
|
return SessionBase.sessionmaker()
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _set_engine(engine):
|
def _set_engine(engine):
|
||||||
|
"""Sets the database engine static property
|
||||||
|
"""
|
||||||
SessionBase.engine = engine
|
SessionBase.engine = engine
|
||||||
|
SessionBase.sessionmaker = sessionmaker(bind=SessionBase.engine)
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def build():
|
def connect(dsn, pool_size=16, debug=False):
|
||||||
Model.metadata.create_all(bind=SessionBase.engine)
|
"""Create new database connection engine and connect to database backend.
|
||||||
|
|
||||||
|
:param dsn: DSN string defining connection.
|
||||||
|
:type dsn: str
|
||||||
|
"""
|
||||||
|
e = None
|
||||||
|
if SessionBase.poolable:
|
||||||
|
poolclass = QueuePool
|
||||||
|
if pool_size > 1:
|
||||||
|
logg.info('db using queue pool')
|
||||||
|
e = create_engine(
|
||||||
|
dsn,
|
||||||
|
max_overflow=pool_size*3,
|
||||||
|
pool_pre_ping=True,
|
||||||
|
pool_size=pool_size,
|
||||||
|
pool_recycle=60,
|
||||||
|
poolclass=poolclass,
|
||||||
|
echo=debug,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if pool_size == 0:
|
||||||
|
poolclass = NullPool
|
||||||
|
elif debug:
|
||||||
|
poolclass = AssertionPool
|
||||||
|
else:
|
||||||
|
poolclass = StaticPool
|
||||||
|
e = create_engine(
|
||||||
|
dsn,
|
||||||
|
poolclass=poolclass,
|
||||||
|
echo=debug,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logg.info('db connection not poolable')
|
||||||
|
e = create_engine(
|
||||||
|
dsn,
|
||||||
|
echo=debug,
|
||||||
|
)
|
||||||
|
|
||||||
|
SessionBase._set_engine(e)
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
# https://docs.sqlalchemy.org/en/13/core/pooling.html#pool-disconnects
|
|
||||||
def connect(data_source_name):
|
|
||||||
engine = create_engine(data_source_name, pool_pre_ping=True)
|
|
||||||
SessionBase._set_engine(engine)
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def disconnect():
|
def disconnect():
|
||||||
|
"""Disconnect from database and free resources.
|
||||||
|
"""
|
||||||
SessionBase.engine.dispose()
|
SessionBase.engine.dispose()
|
||||||
SessionBase.engine = None
|
SessionBase.engine = None
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def bind_session(session=None):
|
||||||
|
localsession = session
|
||||||
|
if localsession == None:
|
||||||
|
localsession = SessionBase.create_session()
|
||||||
|
localsession_key = str(id(localsession))
|
||||||
|
logg.debug('creating new session {}'.format(localsession_key))
|
||||||
|
SessionBase.localsessions[localsession_key] = localsession
|
||||||
|
return localsession
|
||||||
|
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def release_session(session=None):
|
||||||
|
session_key = str(id(session))
|
||||||
|
if SessionBase.localsessions.get(session_key) != None:
|
||||||
|
logg.debug('commit and destroy session {}'.format(session_key))
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
@ -18,7 +18,7 @@ class ActionDataNotFoundError(OSError):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class UserMetadataNotFoundError(OSError):
|
class MetadataNotFoundError(OSError):
|
||||||
"""Raised when metadata is expected but not available in cache."""
|
"""Raised when metadata is expected but not available in cache."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -31,3 +31,10 @@ class UnsupportedMethodError(OSError):
|
|||||||
class CachedDataNotFoundError(OSError):
|
class CachedDataNotFoundError(OSError):
|
||||||
"""Raised when the method passed to the make request function is unsupported."""
|
"""Raised when the method passed to the make request function is unsupported."""
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataStoreError(Exception):
|
||||||
|
"""Raised when metadata storage fails"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -3,7 +3,10 @@
|
|||||||
# third-party imports
|
# third-party imports
|
||||||
import requests
|
import requests
|
||||||
from chainlib.eth.address import to_checksum
|
from chainlib.eth.address import to_checksum
|
||||||
from hexathon import add_0x
|
from hexathon import (
|
||||||
|
add_0x,
|
||||||
|
strip_0x,
|
||||||
|
)
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.error import UnsupportedMethodError
|
from cic_ussd.error import UnsupportedMethodError
|
||||||
@ -40,4 +43,4 @@ def blockchain_address_to_metadata_pointer(blockchain_address: str):
|
|||||||
:return:
|
:return:
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
return bytes.fromhex(blockchain_address[2:])
|
return bytes.fromhex(strip_0x(blockchain_address))
|
||||||
|
7
apps/cic-ussd/cic_ussd/metadata/base.py
Normal file
7
apps/cic-ussd/cic_ussd/metadata/base.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
class Metadata:
|
||||||
|
"""
|
||||||
|
:cvar base_url:
|
||||||
|
:type base_url:
|
||||||
|
"""
|
||||||
|
|
||||||
|
base_url = None
|
85
apps/cic-ussd/cic_ussd/metadata/phone.py
Normal file
85
apps/cic-ussd/cic_ussd/metadata/phone.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
# standard imports
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import requests
|
||||||
|
from cic_types.models.person import generate_metadata_pointer
|
||||||
|
from cic_ussd.metadata import make_request
|
||||||
|
from cic_ussd.metadata.signer import Signer
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_ussd.error import MetadataStoreError
|
||||||
|
from .base import Metadata
|
||||||
|
|
||||||
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PhonePointerMetadata(Metadata):
|
||||||
|
|
||||||
|
def __init__(self, identifier: bytes, engine: str):
|
||||||
|
"""
|
||||||
|
:param identifier:
|
||||||
|
:type identifier:
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.headers = {
|
||||||
|
'X-CIC-AUTOMERGE': 'server',
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
}
|
||||||
|
self.identifier = identifier
|
||||||
|
self.metadata_pointer = generate_metadata_pointer(
|
||||||
|
identifier=self.identifier,
|
||||||
|
cic_type=':cic.phone'
|
||||||
|
)
|
||||||
|
if self.base_url:
|
||||||
|
self.url = os.path.join(self.base_url, self.metadata_pointer)
|
||||||
|
|
||||||
|
self.engine = engine
|
||||||
|
|
||||||
|
|
||||||
|
def create(self, data: str):
|
||||||
|
try:
|
||||||
|
data = json.dumps(data).encode('utf-8')
|
||||||
|
result = make_request(method='POST', url=self.url, data=data, headers=self.headers)
|
||||||
|
metadata = result.content
|
||||||
|
logg.debug('data {} meta {} resp {} stats {}'.format(data, metadata, result.reason, result.status_code))
|
||||||
|
self.edit(data=metadata, engine=self.engine)
|
||||||
|
result.raise_for_status()
|
||||||
|
except requests.exceptions.HTTPError as error:
|
||||||
|
raise MetadataStoreError(error)
|
||||||
|
|
||||||
|
|
||||||
|
def edit(self, data: bytes, engine: str):
|
||||||
|
"""
|
||||||
|
:param data:
|
||||||
|
:type data:
|
||||||
|
:param engine:
|
||||||
|
:type engine:
|
||||||
|
:return:
|
||||||
|
:rtype:
|
||||||
|
"""
|
||||||
|
cic_meta_signer = Signer()
|
||||||
|
signature = cic_meta_signer.sign_digest(data=data)
|
||||||
|
algorithm = cic_meta_signer.get_operational_key().get('algo')
|
||||||
|
decoded_data = data.decode('utf-8')
|
||||||
|
formatted_data = {
|
||||||
|
'm': decoded_data,
|
||||||
|
's': {
|
||||||
|
'engine': engine,
|
||||||
|
'algo': algorithm,
|
||||||
|
'data': signature,
|
||||||
|
'digest': json.loads(data).get('digest'),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
formatted_data = json.dumps(formatted_data).encode('utf-8')
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = make_request(method='PUT', url=self.url, data=formatted_data, headers=self.headers)
|
||||||
|
logg.debug(f'signed phone pointer metadata submission status: {result.status_code}.')
|
||||||
|
result.raise_for_status()
|
||||||
|
logg.info('phone {} metadata pointer {} set to {}'.format(self.identifier.decode('utf-8'), self.metadata_pointer, decoded_data))
|
||||||
|
except requests.exceptions.HTTPError as error:
|
||||||
|
raise MetadataStoreError(error)
|
||||||
|
|
@ -44,7 +44,7 @@ class Signer:
|
|||||||
gpg_keys = self.gpg.list_keys()
|
gpg_keys = self.gpg.list_keys()
|
||||||
key_algorithm = gpg_keys[0].get('algo')
|
key_algorithm = gpg_keys[0].get('algo')
|
||||||
key_id = gpg_keys[0].get("keyid")
|
key_id = gpg_keys[0].get("keyid")
|
||||||
logg.info(f'using signing key: {key_id}, algorithm: {key_algorithm}')
|
logg.debug(f'using signing key: {key_id}, algorithm: {key_algorithm}')
|
||||||
return gpg_keys[0]
|
return gpg_keys[0]
|
||||||
|
|
||||||
def sign_digest(self, data: bytes):
|
def sign_digest(self, data: bytes):
|
||||||
|
@ -12,30 +12,28 @@ from cic_ussd.chain import Chain
|
|||||||
from cic_ussd.metadata import make_request
|
from cic_ussd.metadata import make_request
|
||||||
from cic_ussd.metadata.signer import Signer
|
from cic_ussd.metadata.signer import Signer
|
||||||
from cic_ussd.redis import cache_data
|
from cic_ussd.redis import cache_data
|
||||||
|
from cic_ussd.error import MetadataStoreError
|
||||||
|
from .base import Metadata
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
class UserMetadata:
|
class UserMetadata(Metadata):
|
||||||
"""
|
|
||||||
:cvar base_url:
|
|
||||||
:type base_url:
|
|
||||||
"""
|
|
||||||
base_url = None
|
|
||||||
|
|
||||||
def __init__(self, identifier: bytes):
|
def __init__(self, identifier: bytes):
|
||||||
"""
|
"""
|
||||||
:param identifier:
|
:param identifier:
|
||||||
:type identifier:
|
:type identifier:
|
||||||
"""
|
"""
|
||||||
self. headers = {
|
self.headers = {
|
||||||
'X-CIC-AUTOMERGE': 'server',
|
'X-CIC-AUTOMERGE': 'server',
|
||||||
'Content-Type': 'application/json'
|
'Content-Type': 'application/json'
|
||||||
}
|
}
|
||||||
self.identifier = identifier
|
self.identifier = identifier
|
||||||
self.metadata_pointer = generate_metadata_pointer(
|
self.metadata_pointer = generate_metadata_pointer(
|
||||||
identifier=self.identifier,
|
identifier=self.identifier,
|
||||||
cic_type='cic.person'
|
cic_type=':cic.person'
|
||||||
)
|
)
|
||||||
if self.base_url:
|
if self.base_url:
|
||||||
self.url = os.path.join(self.base_url, self.metadata_pointer)
|
self.url = os.path.join(self.base_url, self.metadata_pointer)
|
||||||
@ -49,7 +47,7 @@ class UserMetadata:
|
|||||||
logg.info(f'Get sign material response status: {result.status_code}')
|
logg.info(f'Get sign material response status: {result.status_code}')
|
||||||
result.raise_for_status()
|
result.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as error:
|
except requests.exceptions.HTTPError as error:
|
||||||
raise RuntimeError(error)
|
raise MetadataStoreError(error)
|
||||||
|
|
||||||
def edit(self, data: bytes, engine: str):
|
def edit(self, data: bytes, engine: str):
|
||||||
"""
|
"""
|
||||||
@ -76,10 +74,10 @@ class UserMetadata:
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
result = make_request(method='PUT', url=self.url, data=formatted_data, headers=self.headers)
|
result = make_request(method='PUT', url=self.url, data=formatted_data, headers=self.headers)
|
||||||
logg.info(f'Signed content submission status: {result.status_code}.')
|
logg.debug(f'signed user metadata submission status: {result.status_code}.')
|
||||||
result.raise_for_status()
|
result.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as error:
|
except requests.exceptions.HTTPError as error:
|
||||||
raise RuntimeError(error)
|
raise MetadataStoreError(error)
|
||||||
|
|
||||||
def query(self):
|
def query(self):
|
||||||
result = make_request(method='GET', url=self.url)
|
result = make_request(method='GET', url=self.url)
|
||||||
@ -99,4 +97,4 @@ class UserMetadata:
|
|||||||
logg.info('The data is not available and might need to be added.')
|
logg.info('The data is not available and might need to be added.')
|
||||||
result.raise_for_status()
|
result.raise_for_status()
|
||||||
except requests.exceptions.HTTPError as error:
|
except requests.exceptions.HTTPError as error:
|
||||||
raise RuntimeError(error)
|
raise MetadataNotFoundError(error)
|
||||||
|
@ -15,7 +15,7 @@ from cic_ussd.balance import BalanceManager, compute_operational_balance, get_ca
|
|||||||
from cic_ussd.chain import Chain
|
from cic_ussd.chain import Chain
|
||||||
from cic_ussd.db.models.user import AccountStatus, User
|
from cic_ussd.db.models.user import AccountStatus, User
|
||||||
from cic_ussd.db.models.ussd_session import UssdSession
|
from cic_ussd.db.models.ussd_session import UssdSession
|
||||||
from cic_ussd.error import UserMetadataNotFoundError
|
from cic_ussd.error import MetadataNotFoundError
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
||||||
from cic_ussd.phone_number import get_user_by_phone_number
|
from cic_ussd.phone_number import get_user_by_phone_number
|
||||||
@ -235,7 +235,7 @@ def process_display_user_metadata(user: User, display_key: str):
|
|||||||
products=products
|
products=products
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise UserMetadataNotFoundError(f'Expected user metadata but found none in cache for key: {user.blockchain_address}')
|
raise MetadataNotFoundError(f'Expected user metadata but found none in cache for key: {user.blockchain_address}')
|
||||||
|
|
||||||
|
|
||||||
def process_account_statement(user: User, display_key: str, ussd_session: dict):
|
def process_account_statement(user: User, display_key: str, ussd_session: dict):
|
||||||
|
@ -23,10 +23,11 @@ from cic_ussd.encoder import PasswordEncoder
|
|||||||
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
|
from cic_ussd.files.local_files import create_local_file_data_stores, json_file_parser
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
from cic_ussd.metadata.signer import Signer
|
from cic_ussd.metadata.signer import Signer
|
||||||
from cic_ussd.metadata.user import UserMetadata
|
from cic_ussd.metadata.base import Metadata
|
||||||
from cic_ussd.operations import (define_response_with_content,
|
from cic_ussd.operations import (define_response_with_content,
|
||||||
process_menu_interaction_requests,
|
process_menu_interaction_requests,
|
||||||
define_multilingual_responses)
|
define_multilingual_responses)
|
||||||
|
from cic_ussd.phone_number import process_phone_number
|
||||||
from cic_ussd.redis import InMemoryStore
|
from cic_ussd.redis import InMemoryStore
|
||||||
from cic_ussd.requests import (get_request_endpoint,
|
from cic_ussd.requests import (get_request_endpoint,
|
||||||
get_request_method,
|
get_request_method,
|
||||||
@ -64,7 +65,6 @@ config.censor('PASSWORD', 'DATABASE')
|
|||||||
# define log levels
|
# define log levels
|
||||||
if args.vv:
|
if args.vv:
|
||||||
logging.getLogger().setLevel(logging.DEBUG)
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.DEBUG)
|
|
||||||
elif args.v:
|
elif args.v:
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
|
||||||
@ -86,7 +86,7 @@ UssdMenu.ussd_menu_db = ussd_menu_db
|
|||||||
|
|
||||||
# set up db
|
# set up db
|
||||||
data_source_name = dsn_from_config(config)
|
data_source_name = dsn_from_config(config)
|
||||||
SessionBase.connect(data_source_name=data_source_name)
|
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
||||||
# create session for the life time of http request
|
# create session for the life time of http request
|
||||||
SessionBase.session = SessionBase.create_session()
|
SessionBase.session = SessionBase.create_session()
|
||||||
|
|
||||||
@ -99,7 +99,7 @@ InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'),
|
|||||||
InMemoryUssdSession.redis_cache = InMemoryStore.cache
|
InMemoryUssdSession.redis_cache = InMemoryStore.cache
|
||||||
|
|
||||||
# define metadata URL
|
# define metadata URL
|
||||||
UserMetadata.base_url = config.get('CIC_META_URL')
|
Metadata.base_url = config.get('CIC_META_URL')
|
||||||
|
|
||||||
# define signer values
|
# define signer values
|
||||||
export_dir = config.get('PGP_EXPORT_DIR')
|
export_dir = config.get('PGP_EXPORT_DIR')
|
||||||
@ -151,6 +151,10 @@ def application(env, start_response):
|
|||||||
external_session_id = post_data.get('sessionId')
|
external_session_id = post_data.get('sessionId')
|
||||||
user_input = post_data.get('text')
|
user_input = post_data.get('text')
|
||||||
|
|
||||||
|
# add validation for phone number
|
||||||
|
if phone_number:
|
||||||
|
phone_number = process_phone_number(phone_number=phone_number, region=config.get('PHONE_NUMBER_REGION'))
|
||||||
|
|
||||||
# validate ip address
|
# validate ip address
|
||||||
if not check_ip(config=config, env=env):
|
if not check_ip(config=config, env=env):
|
||||||
start_response('403 Sneaky, sneaky', errors_headers)
|
start_response('403 Sneaky, sneaky', errors_headers)
|
||||||
@ -174,8 +178,10 @@ def application(env, start_response):
|
|||||||
|
|
||||||
# validate phone number
|
# validate phone number
|
||||||
if not validate_phone_number(phone_number):
|
if not validate_phone_number(phone_number):
|
||||||
|
logg.error('invalid phone number {}'.format(phone_number))
|
||||||
start_response('400 Invalid phone number format', errors_headers)
|
start_response('400 Invalid phone number format', errors_headers)
|
||||||
return []
|
return []
|
||||||
|
logg.debug('session {} started for {}'.format(external_session_id, phone_number))
|
||||||
|
|
||||||
# handle menu interaction requests
|
# handle menu interaction requests
|
||||||
chain_str = chain_spec.__str__()
|
chain_str = chain_spec.__str__()
|
||||||
|
@ -13,13 +13,14 @@ from confini import Config
|
|||||||
from cic_ussd.db import dsn_from_config
|
from cic_ussd.db import dsn_from_config
|
||||||
from cic_ussd.db.models.base import SessionBase
|
from cic_ussd.db.models.base import SessionBase
|
||||||
from cic_ussd.metadata.signer import Signer
|
from cic_ussd.metadata.signer import Signer
|
||||||
from cic_ussd.metadata.user import UserMetadata
|
from cic_ussd.metadata.base import Metadata
|
||||||
from cic_ussd.redis import InMemoryStore
|
from cic_ussd.redis import InMemoryStore
|
||||||
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
from cic_ussd.session.ussd_session import UssdSession as InMemoryUssdSession
|
||||||
from cic_ussd.validator import validate_presence
|
from cic_ussd.validator import validate_presence
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
logging.getLogger('gnupg').setLevel(logging.WARNING)
|
||||||
|
|
||||||
config_directory = '/usr/local/etc/cic-ussd/'
|
config_directory = '/usr/local/etc/cic-ussd/'
|
||||||
|
|
||||||
@ -47,7 +48,7 @@ logg.debug(config)
|
|||||||
|
|
||||||
# connect to database
|
# connect to database
|
||||||
data_source_name = dsn_from_config(config)
|
data_source_name = dsn_from_config(config)
|
||||||
SessionBase.connect(data_source_name=data_source_name)
|
SessionBase.connect(data_source_name, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
||||||
|
|
||||||
# verify database connection with minimal sanity query
|
# verify database connection with minimal sanity query
|
||||||
session = SessionBase.create_session()
|
session = SessionBase.create_session()
|
||||||
@ -63,7 +64,7 @@ InMemoryStore.cache = redis.StrictRedis(host=config.get('REDIS_HOSTNAME'),
|
|||||||
InMemoryUssdSession.redis_cache = InMemoryStore.cache
|
InMemoryUssdSession.redis_cache = InMemoryStore.cache
|
||||||
|
|
||||||
# define metadata URL
|
# define metadata URL
|
||||||
UserMetadata.base_url = config.get('CIC_META_URL')
|
Metadata.base_url = config.get('CIC_META_URL')
|
||||||
|
|
||||||
# define signer values
|
# define signer values
|
||||||
export_dir = config.get('PGP_EXPORT_DIR')
|
export_dir = config.get('PGP_EXPORT_DIR')
|
||||||
|
@ -11,7 +11,7 @@ from cic_types.models.person import generate_vcard_from_contact_data, manage_ide
|
|||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.chain import Chain
|
from cic_ussd.chain import Chain
|
||||||
from cic_ussd.db.models.user import User
|
from cic_ussd.db.models.user import User
|
||||||
from cic_ussd.error import UserMetadataNotFoundError
|
from cic_ussd.error import MetadataNotFoundError
|
||||||
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
||||||
from cic_ussd.operations import save_to_in_memory_ussd_session_data
|
from cic_ussd.operations import save_to_in_memory_ussd_session_data
|
||||||
from cic_ussd.redis import get_cached_data
|
from cic_ussd.redis import get_cached_data
|
||||||
@ -181,7 +181,7 @@ def edit_user_metadata_attribute(state_machine_data: Tuple[str, dict, User]):
|
|||||||
user_metadata = get_cached_data(key=key)
|
user_metadata = get_cached_data(key=key)
|
||||||
|
|
||||||
if not user_metadata:
|
if not user_metadata:
|
||||||
raise UserMetadataNotFoundError(f'Expected user metadata but found none in cache for key: {blockchain_address}')
|
raise MetadataNotFoundError(f'Expected user metadata but found none in cache for key: {blockchain_address}')
|
||||||
|
|
||||||
given_name = ussd_session.get('session_data').get('given_name')
|
given_name = ussd_session.get('session_data').get('given_name')
|
||||||
family_name = ussd_session.get('session_data').get('family_name')
|
family_name = ussd_session.get('session_data').get('family_name')
|
||||||
|
@ -5,9 +5,24 @@ import celery
|
|||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
from cic_ussd.error import MetadataStoreError
|
||||||
|
from cic_ussd.db.models.base import SessionBase
|
||||||
|
|
||||||
|
|
||||||
class CriticalTask(celery.Task):
|
class BaseTask(celery.Task):
|
||||||
|
|
||||||
|
session_func = SessionBase.create_session
|
||||||
|
|
||||||
|
def create_session(self):
|
||||||
|
return BaseTask.session_func()
|
||||||
|
|
||||||
|
|
||||||
|
def log_banner(self):
|
||||||
|
logg.debug('task {} root uuid {}'.format(self.__class__.__name__, self.request.root_id))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
class CriticalTask(BaseTask):
|
||||||
retry_jitter = True
|
retry_jitter = True
|
||||||
retry_backoff = True
|
retry_backoff = True
|
||||||
retry_backoff_max = 8
|
retry_backoff_max = 8
|
||||||
@ -17,4 +32,11 @@ class CriticalSQLAlchemyTask(CriticalTask):
|
|||||||
autoretry_for = (
|
autoretry_for = (
|
||||||
sqlalchemy.exc.DatabaseError,
|
sqlalchemy.exc.DatabaseError,
|
||||||
sqlalchemy.exc.TimeoutError,
|
sqlalchemy.exc.TimeoutError,
|
||||||
|
sqlalchemy.exc.ResourceClosedError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class CriticalMetadataTask(CriticalTask):
|
||||||
|
autoretry_for = (
|
||||||
|
MetadataStoreError,
|
||||||
)
|
)
|
||||||
|
@ -54,6 +54,18 @@ def process_account_creation_callback(self, result: str, url: str, status_code:
|
|||||||
session.commit()
|
session.commit()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_ussd.tasks.metadata.add_phone_pointer',
|
||||||
|
[
|
||||||
|
result,
|
||||||
|
phone_number,
|
||||||
|
'pgp',
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
# expire cache
|
# expire cache
|
||||||
cache.expire(task_id, timedelta(seconds=180))
|
cache.expire(task_id, timedelta(seconds=180))
|
||||||
|
|
||||||
@ -65,6 +77,8 @@ def process_account_creation_callback(self, result: str, url: str, status_code:
|
|||||||
session.close()
|
session.close()
|
||||||
raise ActionDataNotFoundError(f'Account creation task: {task_id}, returned unexpected response: {status_code}')
|
raise ActionDataNotFoundError(f'Account creation task: {task_id}, returned unexpected response: {status_code}')
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task
|
@celery_app.task
|
||||||
def process_incoming_transfer_callback(result: dict, param: str, status_code: int):
|
def process_incoming_transfer_callback(result: dict, param: str, status_code: int):
|
||||||
@ -118,6 +132,7 @@ def process_incoming_transfer_callback(result: dict, param: str, status_code: in
|
|||||||
session.close()
|
session.close()
|
||||||
raise ValueError(f'Unexpected status code: {status_code}.')
|
raise ValueError(f'Unexpected status code: {status_code}.')
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
@celery_app.task
|
@celery_app.task
|
||||||
def process_balances_callback(result: list, param: str, status_code: int):
|
def process_balances_callback(result: list, param: str, status_code: int):
|
||||||
@ -161,7 +176,6 @@ def define_transaction_action_tag(
|
|||||||
def process_statement_callback(result, param: str, status_code: int):
|
def process_statement_callback(result, param: str, status_code: int):
|
||||||
if status_code == 0:
|
if status_code == 0:
|
||||||
# create session
|
# create session
|
||||||
session = SessionBase.create_session()
|
|
||||||
processed_transactions = []
|
processed_transactions = []
|
||||||
|
|
||||||
# process transaction data to cache
|
# process transaction data to cache
|
||||||
@ -174,6 +188,7 @@ def process_statement_callback(result, param: str, status_code: int):
|
|||||||
if '0x0000000000000000000000000000000000000000' in source_token:
|
if '0x0000000000000000000000000000000000000000' in source_token:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
session = SessionBase.create_session()
|
||||||
# describe a processed transaction
|
# describe a processed transaction
|
||||||
processed_transaction = {}
|
processed_transaction = {}
|
||||||
|
|
||||||
@ -202,6 +217,8 @@ def process_statement_callback(result, param: str, status_code: int):
|
|||||||
else:
|
else:
|
||||||
logg.warning(f'Tx with recipient not found in cic-ussd')
|
logg.warning(f'Tx with recipient not found in cic-ussd')
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
|
||||||
# add transaction values
|
# add transaction values
|
||||||
processed_transaction['to_value'] = from_wei(value=transaction.get('to_value')).__str__()
|
processed_transaction['to_value'] = from_wei(value=transaction.get('to_value')).__str__()
|
||||||
processed_transaction['from_value'] = from_wei(value=transaction.get('from_value')).__str__()
|
processed_transaction['from_value'] = from_wei(value=transaction.get('from_value')).__str__()
|
||||||
|
@ -2,15 +2,18 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
from cic_ussd.metadata import blockchain_address_to_metadata_pointer
|
||||||
from cic_ussd.metadata.user import UserMetadata
|
from cic_ussd.metadata.user import UserMetadata
|
||||||
|
from cic_ussd.metadata.phone import PhonePointerMetadata
|
||||||
|
from cic_ussd.tasks.base import CriticalMetadataTask
|
||||||
|
|
||||||
celery_app = celery.current_app
|
celery_app = celery.current_app
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task
|
@celery_app.task
|
||||||
@ -46,3 +49,10 @@ def edit_user_metadata(blockchain_address: str, data: bytes, engine: str):
|
|||||||
identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address)
|
identifier = blockchain_address_to_metadata_pointer(blockchain_address=blockchain_address)
|
||||||
user_metadata_client = UserMetadata(identifier=identifier)
|
user_metadata_client = UserMetadata(identifier=identifier)
|
||||||
user_metadata_client.edit(data=data, engine=engine)
|
user_metadata_client.edit(data=data, engine=engine)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=CriticalMetadataTask)
|
||||||
|
def add_phone_pointer(self, blockchain_address: str, phone: str, engine: str):
|
||||||
|
stripped_address = strip_0x(blockchain_address)
|
||||||
|
phone_metadata_client = PhonePointerMetadata(identifier=phone.encode('utf-8'), engine=engine)
|
||||||
|
phone_metadata_client.create(data=stripped_address)
|
||||||
|
@ -70,3 +70,4 @@ def persist_session_to_db(external_session_id: str):
|
|||||||
session.close()
|
session.close()
|
||||||
raise SessionNotFoundError('Session does not exist!')
|
raise SessionNotFoundError('Session does not exist!')
|
||||||
|
|
||||||
|
session.close()
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import ipaddress
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
from confini import Config
|
from confini import Config
|
||||||
@ -21,7 +22,14 @@ def check_ip(config: Config, env: dict):
|
|||||||
:return: Request IP validity
|
:return: Request IP validity
|
||||||
:rtype: boolean
|
:rtype: boolean
|
||||||
"""
|
"""
|
||||||
return env.get('REMOTE_ADDR') == config.get('APP_ALLOWED_IP')
|
# TODO: do once at boot time
|
||||||
|
actual_ip = ipaddress.ip_network(env.get('REMOTE_ADDR') + '/32')
|
||||||
|
for allowed_net_src in config.get('APP_ALLOWED_IP').split(','):
|
||||||
|
allowed_net = ipaddress.ip_network(allowed_net_src)
|
||||||
|
if actual_ip.subnet_of(allowed_net):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def check_request_content_length(config: Config, env: dict):
|
def check_request_content_length(config: Config, env: dict):
|
||||||
|
@ -2,4 +2,4 @@
|
|||||||
|
|
||||||
. /root/db.sh
|
. /root/db.sh
|
||||||
|
|
||||||
/usr/local/bin/cic-ussd-tasker -vv "$@"
|
/usr/local/bin/cic-ussd-tasker $@
|
||||||
|
@ -2,4 +2,6 @@
|
|||||||
|
|
||||||
. /root/db.sh
|
. /root/db.sh
|
||||||
|
|
||||||
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/server.py --http :9000 --pyargv "-vv"
|
server_port=${SERVER_PORT:-9000}
|
||||||
|
|
||||||
|
/usr/local/bin/uwsgi --wsgi-file /usr/local/lib/python3.8/site-packages/cic_ussd/runnable/server.py --http :$server_port --pyargv "$@"
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
cic_base[full_graph]~=0.1.2a58
|
cic_base[full_graph]~=0.1.2a61
|
||||||
cic-eth~=0.11.0a4
|
cic-eth~=0.11.0b1
|
||||||
cic-notify~=0.4.0a3
|
cic-notify~=0.4.0a3
|
||||||
cic-types~=0.1.0a10
|
cic-types~=0.1.0a10
|
||||||
|
@ -2,88 +2,234 @@
|
|||||||
|
|
||||||
This folder contains tools to generate and import test data.
|
This folder contains tools to generate and import test data.
|
||||||
|
|
||||||
## DATA CREATION
|
## OVERVIEW
|
||||||
|
|
||||||
Does not need the cluster to run.
|
Three sets of tools are available, sorted by respective subdirectories.
|
||||||
|
|
||||||
Vanilla:
|
* **eth**: Import using sovereign wallets.
|
||||||
|
* **cic_eth**: Import using the `cic_eth` custodial engine.
|
||||||
|
* **cic_ussd**: Import using the `cic_ussd` interface (backed by `cic_eth`)
|
||||||
|
|
||||||
|
Each of the modules include two main scripts:
|
||||||
|
|
||||||
|
* **import_users.py**: Registers all created accounts in the network
|
||||||
|
* **import_balance.py**: Transfer an opening balance using an external keystore wallet
|
||||||
|
|
||||||
|
The balance script will sync with the blockchain, processing transactions and triggering actions when it finds. In its current version it does not keep track of any other state, so it will run indefinitly and needs You the Human to decide when it has done what it needs to do.
|
||||||
|
|
||||||
|
|
||||||
|
In addition the following common tools are available:
|
||||||
|
|
||||||
|
* **create_import_users.py**: User creation script
|
||||||
|
* **verify.py**: Import verification script
|
||||||
|
* **cic_meta**: Metadata imports
|
||||||
|
|
||||||
|
|
||||||
|
## REQUIREMENTS
|
||||||
|
|
||||||
|
A virtual environment for the python scripts is recommended. We know it works with `python 3.8.x`. Let us know if you run it successfully with other minor versions.
|
||||||
|
|
||||||
|
```
|
||||||
|
python3 -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
```
|
||||||
|
|
||||||
|
Install all requirements from the `requirements.txt` file:
|
||||||
|
|
||||||
|
`pip install --extra-index-url https://pip.grassrootseconomics.net:8433 -r requirements.txt`
|
||||||
|
|
||||||
|
|
||||||
|
If you are importing metadata, also do ye olde:
|
||||||
|
|
||||||
|
`npm install`
|
||||||
|
|
||||||
|
|
||||||
|
## HOW TO USE
|
||||||
|
|
||||||
|
### Step 1 - Data creation
|
||||||
|
|
||||||
|
Before running any of the imports, the user data to import has to be generated and saved to disk.
|
||||||
|
|
||||||
|
The script does not need any services to run.
|
||||||
|
|
||||||
|
Vanilla version:
|
||||||
|
|
||||||
`python create_import_users.py [--dir <datadir>] <number_of_users>`
|
`python create_import_users.py [--dir <datadir>] <number_of_users>`
|
||||||
|
|
||||||
If you want to use the `import_balance.py` script to add to the user's balance from an external address, add:
|
If you want to use a `import_balance.py` script to add to the user's balance from an external address, use:
|
||||||
|
|
||||||
`python create_import_users.py --gift-threshold <max_units_to_send> [--dir <datadir>] <number_of_users>`
|
`python create_import_users.py --gift-threshold <max_units_to_send> [--dir <datadir>] <number_of_users>`
|
||||||
|
|
||||||
|
|
||||||
## IMPORT
|
### Step 2 - Services
|
||||||
|
|
||||||
Make sure the following is running in the cluster:
|
Unless you know what you are doing, start with a clean slate, and execute (in the repository root):
|
||||||
* eth
|
|
||||||
* postgres
|
`docker-compose down -v`
|
||||||
* redis
|
|
||||||
* cic-meta-server
|
Then go through, in sequence:
|
||||||
|
|
||||||
|
#### Base requirements
|
||||||
|
|
||||||
|
If you are importing using `eth` and _not_ importing metadata, then the only service you need running in the cluster is:
|
||||||
|
* eth
|
||||||
|
|
||||||
|
In all other cases you will _also_ need:
|
||||||
|
* postgres
|
||||||
|
* redis
|
||||||
|
|
||||||
|
|
||||||
If using the _custodial_ alternative for user imports, also run:
|
#### EVM provisions
|
||||||
* cic-eth-tasker
|
|
||||||
* cic-eth-dispatcher
|
This step is needed in *all* cases.
|
||||||
* cic-eth-tracker
|
|
||||||
|
`RUN_MASK=1 docker-compose up contract-migration`
|
||||||
|
|
||||||
|
After this step is run, you can find top-level ethereum addresses (like the cic registry address, which you will need below) in `<repository_root>/service-configs/.env`
|
||||||
|
|
||||||
|
|
||||||
You will want to run these in sequence:
|
#### Custodial provisions
|
||||||
|
|
||||||
|
This step is _only_ needed if you are importing using `cic_eth` or `cic_ussd`
|
||||||
|
|
||||||
|
`RUN_MASK=2 docker-compose up contract-migration`
|
||||||
|
|
||||||
|
|
||||||
## 1. Metadata
|
#### Custodial services
|
||||||
|
|
||||||
`node import_meta.js <datadir> <number_of_users>`
|
If importing using `cic_eth` or `cic_ussd` also run:
|
||||||
|
* cic-eth-tasker
|
||||||
|
* cic-eth-dispatcher
|
||||||
|
* cic-eth-tracker
|
||||||
|
* cic-eth-retrier
|
||||||
|
|
||||||
|
If importing using `cic_ussd` also run:
|
||||||
|
* cic-ussd-tasker
|
||||||
|
* cic-ussd-server
|
||||||
|
* cic-notify-tasker
|
||||||
|
|
||||||
|
If metadata is to be imported, also run:
|
||||||
|
* cic-meta-server
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### Step 3 - User imports
|
||||||
|
|
||||||
|
If you did not change the docker-compose setup, your `eth_provider` the you need for the commands below will be `http://localhost:63545`.
|
||||||
|
|
||||||
|
Only run _one_ of the alternatives.
|
||||||
|
|
||||||
|
The keystore file used for transferring external opening balances tracker is relative to the directory you found this README in. Of course you can use a different wallet, but then you will have to provide it with tokens yourself (hint: `../reset.sh`)
|
||||||
|
|
||||||
|
All external balance transactions are saved in raw wire format in `<datadir>/txs`, with transaction hash as file name.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Alternative 1 - Sovereign wallet import - `eth`
|
||||||
|
|
||||||
|
|
||||||
|
First, make a note of the **block height** before running anything.
|
||||||
|
|
||||||
|
To import, run to _completion_:
|
||||||
|
|
||||||
|
`python eth/import_users.py -v -c config -p <eth_provider> -r <cic_registry_address> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
||||||
|
|
||||||
|
After the script completes, keystore files for all generated accouts will be found in `<datadir>/keystore`, all with `foo` as password (would set it empty, but believe it or not some interfaces out there won't work unless you have one).
|
||||||
|
|
||||||
|
Then run:
|
||||||
|
|
||||||
|
`python eth/import_balance.py -v -c config -r <cic_registry_address> -p <eth_provider> --offset <block_height_at_start> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#### Alternative 2 - Custodial engine import - `cic_eth`
|
||||||
|
|
||||||
|
Run in sequence, in first terminal:
|
||||||
|
|
||||||
|
`python cic_eth/import_balance.py -v -c config -p <eth_provider> -r <cic_registry_address> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c --head out`
|
||||||
|
|
||||||
|
In another terminal:
|
||||||
|
|
||||||
|
`python cic_eth/import_users.py -v -c config --redis-host-callback <redis_hostname_in_docker> out`
|
||||||
|
|
||||||
|
The `redis_hostname_in_docker` value is the hostname required to reach the redis server from within the docker cluster, and should be `redis` if you left the docker-compose unchanged. The `import_users` script will receive the address of each newly created custodial account on a redis subscription fed by a callback task in the `cic_eth` account creation task chain.
|
||||||
|
|
||||||
|
|
||||||
|
#### Alternative 3 - USSD import - `cic_ussd`
|
||||||
|
|
||||||
|
If you have previously run the `cic_ussd` import incompletely, it could be a good idea to purge the queue. If you have left docker-compose unchanged, `redis_url` should be `redis://localhost:63379`.
|
||||||
|
|
||||||
|
`celery -A cic_ussd.import_task purge -Q cic-import-ussd --broker <redis_url>`
|
||||||
|
|
||||||
|
Then, in sequence, run in first terminal:
|
||||||
|
|
||||||
|
`python cic_eth/import_balance.py -v -c config -p <eth_provider> -r <cic_registry_address> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c out`
|
||||||
|
|
||||||
|
In second terminal:
|
||||||
|
|
||||||
|
`python cic_ussd/import_users.py -v -c config out`
|
||||||
|
|
||||||
|
The balance script is a celery task worker, and will not exit by itself in its current version. However, after it's done doing its job, you will find "reached nonce ... exiting" among the last lines of the log.
|
||||||
|
|
||||||
|
The connection parameters for the `cic-ussd-server` is currently _hardcoded_ in the `import_users.py` script file.
|
||||||
|
|
||||||
|
|
||||||
|
### Step 4 - Metadata import (optional)
|
||||||
|
|
||||||
|
The metadata import scripts can be run at any time after step 1 has been completed.
|
||||||
|
|
||||||
|
|
||||||
|
#### Importing user metadata
|
||||||
|
|
||||||
|
To import the main user metadata structs, run:
|
||||||
|
|
||||||
|
`node cic_meta/import_meta.js <datadir> <number_of_users>`
|
||||||
|
|
||||||
Monitors a folder for output from the `import_users.py` script, adding the metadata found to the `cic-meta` service.
|
Monitors a folder for output from the `import_users.py` script, adding the metadata found to the `cic-meta` service.
|
||||||
|
|
||||||
|
If _number of users_ is omitted the script will run until manually interrupted.
|
||||||
## 2. Balances
|
|
||||||
|
|
||||||
(Only if you used the `--gift-threshold` option above)
|
|
||||||
|
|
||||||
`python -c config -i <newchain:id> -r <cic_registry_address> -p <eth_provider> --head -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
|
||||||
|
|
||||||
This will monitor new mined blocks and send balances to the newly created accounts.
|
|
||||||
|
|
||||||
|
|
||||||
### 3. Users
|
#### Importing phone pointer
|
||||||
|
|
||||||
Only use **one** of the following
|
`node cic_meta/import_meta_phone.js <datadir> <number_of_users>`
|
||||||
|
|
||||||
#### Custodial
|
If you imported using `cic_ussd`, the phone pointer is _already added_ and this script will do nothing.
|
||||||
|
|
||||||
This alternative generates accounts using the `cic-eth` custodial engine
|
|
||||||
|
|
||||||
Without any modifications to the cluster and config files:
|
### Step 5 - Verify
|
||||||
|
|
||||||
`python import_users.py -c config --redis-host-callback redis <datadir>`
|
`python verify.py -v -c config -r <cic_registry_address> -p <eth_provider> <datadir>`
|
||||||
|
|
||||||
** A note on the The callback**: The script uses a redis callback to retrieve the newly generated custodial address. This is the redis server _from the perspective of the cic-eth component_.
|
Included checks:
|
||||||
|
* Private key is in cic-eth keystore
|
||||||
|
* Address is in accounts index
|
||||||
|
* Address has gas balance
|
||||||
|
* Address has triggered the token faucet
|
||||||
|
* Address has token balance matching the gift threshold
|
||||||
|
* Personal metadata can be retrieved and has exact match
|
||||||
|
* Phone pointer metadata can be retrieved and matches address
|
||||||
|
* USSD menu response is initial state after registration
|
||||||
|
|
||||||
#### Sovereign
|
Checks can be selectively included and excluded. See `--help` for details.
|
||||||
|
|
||||||
This alternative generates keystore files, while registering corresponding addresses in the accounts registry directly
|
Will output one line for each check, with name of check and number of errors found per check.
|
||||||
|
|
||||||
`python import_sovereign_users.py -c config -i <newchain:id> -r <cic_registry_address> -p <eth_provider> -y ../keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c <datadir>`
|
|
||||||
|
|
||||||
A `keystore` sub-directory in the data path is created, with ethereum keystore files for all generated private keys. Passphrase is set to empty string for all of them.
|
|
||||||
|
|
||||||
## VERIFY
|
|
||||||
|
|
||||||
`python verify.py -c config -i <newchain:id> -r <cic_registry_address> -p <eth_provider> <datadir>`
|
|
||||||
|
|
||||||
Checks
|
|
||||||
* Private key is in cic-eth keystore
|
|
||||||
* Address is in accounts index
|
|
||||||
* Address has balance matching the gift threshold
|
|
||||||
* Metadata can be retrieved and has exact match
|
|
||||||
|
|
||||||
Should exit with code 0 if all input data is found in the respective services.
|
Should exit with code 0 if all input data is found in the respective services.
|
||||||
|
|
||||||
|
|
||||||
## KNOWN ISSUES
|
## KNOWN ISSUES
|
||||||
|
|
||||||
If the faucet disbursement is set to a non-zero amount, the balances will be off. The verify script needs to be improved to check the faucet amount.
|
- If the faucet disbursement is set to a non-zero amount, the balances will be off. The verify script needs to be improved to check the faucet amount.
|
||||||
|
|
||||||
|
- When the account callback in `cic_eth` fails, the `cic_eth/import_users.py` script will exit with a cryptic complaint concerning a `None` value.
|
||||||
|
|
||||||
|
- Sovereign import scripts use the same keystore, and running them simultaneously will mess up the transaction nonce sequence. Better would be to use two different keystore wallets so balance and users scripts can be run simultaneously.
|
||||||
|
|
||||||
|
- `pycrypto` and `pycryptodome` _have to be installed in that order_. If you get errors concerning `Crypto.KDF` then uninstall both and re-install in that order. Make sure you use the versions listed in `requirements.txt`. `pycryptodome` is a legacy dependency and will be removed as soon as possible.
|
||||||
|
|
||||||
|
- Sovereign import script is very slow because it's scrypt'ing keystore files for the accounts that it creates. An improvement would be optional and/or asynchronous keyfile generation.
|
||||||
|
|
||||||
|
- Running the balance script should be _optional_ in all cases, but is currently required in the case of `cic_ussd` because it is needed to generate the metadata. An improvement would be moving the task to `import_users.py`, for a different queue than the balance tx handler.
|
||||||
|
|
||||||
|
- `cic_ussd` imports is poorly implemented, and consumes a lot of resources. Therefore it takes a long time to complete. Reducing the amount of polls for the phone pointer would go a long way to improve it.
|
||||||
|
@ -10,7 +10,7 @@ import hashlib
|
|||||||
import csv
|
import csv
|
||||||
import json
|
import json
|
||||||
|
|
||||||
# third-party impotts
|
# external imports
|
||||||
import eth_abi
|
import eth_abi
|
||||||
import confini
|
import confini
|
||||||
from hexathon import (
|
from hexathon import (
|
||||||
@ -42,7 +42,7 @@ from cic_types.models.person import Person
|
|||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
config_dir = '/usr/local/etc/cic-syncer'
|
config_dir = './config'
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||||
@ -162,6 +162,15 @@ class Handler:
|
|||||||
(tx_hash_hex, o) = self.tx_factory.transfer(self.token_address, signer_address, recipient, balance_full)
|
(tx_hash_hex, o) = self.tx_factory.transfer(self.token_address, signer_address, recipient, balance_full)
|
||||||
logg.info('submitting erc20 transfer tx {} for recipient {}'.format(tx_hash_hex, recipient))
|
logg.info('submitting erc20 transfer tx {} for recipient {}'.format(tx_hash_hex, recipient))
|
||||||
r = conn.do(o)
|
r = conn.do(o)
|
||||||
|
|
||||||
|
tx_path = os.path.join(
|
||||||
|
user_dir,
|
||||||
|
'txs',
|
||||||
|
strip_0x(tx_hash_hex),
|
||||||
|
)
|
||||||
|
f = open(tx_path, 'w')
|
||||||
|
f.write(strip_0x(o['params'][0]))
|
||||||
|
f.close()
|
||||||
# except TypeError as e:
|
# except TypeError as e:
|
||||||
# logg.warning('typerror {}'.format(e))
|
# logg.warning('typerror {}'.format(e))
|
||||||
# pass
|
# pass
|
@ -7,6 +7,7 @@ import argparse
|
|||||||
import uuid
|
import uuid
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
|
import phonenumbers
|
||||||
from glob import glob
|
from glob import glob
|
||||||
|
|
||||||
# third-party imports
|
# third-party imports
|
||||||
@ -17,7 +18,7 @@ from hexathon import (
|
|||||||
add_0x,
|
add_0x,
|
||||||
strip_0x,
|
strip_0x,
|
||||||
)
|
)
|
||||||
from chainlib.eth.address import to_checksum
|
from chainlib.eth.address import to_checksum_address
|
||||||
from cic_types.models.person import Person
|
from cic_types.models.person import Person
|
||||||
from cic_eth.api.api_task import Api
|
from cic_eth.api.api_task import Api
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
@ -75,9 +76,15 @@ os.makedirs(user_new_dir)
|
|||||||
meta_dir = os.path.join(args.user_dir, 'meta')
|
meta_dir = os.path.join(args.user_dir, 'meta')
|
||||||
os.makedirs(meta_dir)
|
os.makedirs(meta_dir)
|
||||||
|
|
||||||
|
phone_dir = os.path.join(args.user_dir, 'phone')
|
||||||
|
os.makedirs(os.path.join(phone_dir, 'meta'))
|
||||||
|
|
||||||
user_old_dir = os.path.join(args.user_dir, 'old')
|
user_old_dir = os.path.join(args.user_dir, 'old')
|
||||||
os.stat(user_old_dir)
|
os.stat(user_old_dir)
|
||||||
|
|
||||||
|
txs_dir = os.path.join(args.user_dir, 'txs')
|
||||||
|
os.makedirs(txs_dir)
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
chain_str = str(chain_spec)
|
chain_str = str(chain_spec)
|
||||||
|
|
||||||
@ -165,12 +172,32 @@ if __name__ == '__main__':
|
|||||||
f.write(json.dumps(o))
|
f.write(json.dumps(o))
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
#old_address = to_checksum(add_0x(y[:len(y)-5]))
|
|
||||||
#fi.write('{},{}\n'.format(new_address, old_address))
|
#fi.write('{},{}\n'.format(new_address, old_address))
|
||||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), 'cic.person')
|
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), 'cic.person')
|
||||||
meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper()))
|
meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper()))
|
||||||
os.symlink(os.path.realpath(filepath), meta_filepath)
|
os.symlink(os.path.realpath(filepath), meta_filepath)
|
||||||
|
|
||||||
|
phone_object = phonenumbers.parse(u.tel)
|
||||||
|
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||||
|
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||||
|
meta_phone_filepath = os.path.join(phone_dir, 'meta', meta_phone_key)
|
||||||
|
|
||||||
|
filepath = os.path.join(
|
||||||
|
phone_dir,
|
||||||
|
'new',
|
||||||
|
meta_phone_key[:2].upper(),
|
||||||
|
meta_phone_key[2:4].upper(),
|
||||||
|
meta_phone_key.upper(),
|
||||||
|
)
|
||||||
|
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
||||||
|
|
||||||
|
f = open(filepath, 'w')
|
||||||
|
f.write(to_checksum_address(new_address_clean))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
os.symlink(os.path.realpath(filepath), meta_phone_filepath)
|
||||||
|
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")
|
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")
|
||||||
|
|
@ -101,6 +101,7 @@ function importMeta(keystore) {
|
|||||||
const file = files[i];
|
const file = files[i];
|
||||||
if (file.substr(-5) != '.json') {
|
if (file.substr(-5) != '.json') {
|
||||||
console.debug('skipping file', file);
|
console.debug('skipping file', file);
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
const filePath = path.join(workDir, file);
|
const filePath = path.join(workDir, file);
|
||||||
doOne(keystore, filePath);
|
doOne(keystore, filePath);
|
134
apps/contract-migration/scripts/cic_meta/import_meta_phone.js
Normal file
134
apps/contract-migration/scripts/cic_meta/import_meta_phone.js
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
const http = require('http');
|
||||||
|
|
||||||
|
const cic = require('cic-client-meta');
|
||||||
|
const vcfp = require('vcard-parser');
|
||||||
|
|
||||||
|
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
|
||||||
|
|
||||||
|
const config = new cic.Config('./config');
|
||||||
|
config.process();
|
||||||
|
console.log(config);
|
||||||
|
|
||||||
|
|
||||||
|
function sendit(uid, envelope) {
|
||||||
|
const d = envelope.toJSON();
|
||||||
|
|
||||||
|
const contentLength = (new TextEncoder().encode(d)).length;
|
||||||
|
const opts = {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Content-Length': contentLength,
|
||||||
|
'X-CIC-AUTOMERGE': 'client',
|
||||||
|
|
||||||
|
},
|
||||||
|
};
|
||||||
|
let url = config.get('META_URL');
|
||||||
|
url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/');
|
||||||
|
console.log('posting to url: ' + url + uid);
|
||||||
|
const req = http.request(url + uid, opts, (res) => {
|
||||||
|
res.on('data', process.stdout.write);
|
||||||
|
res.on('end', () => {
|
||||||
|
console.log('result', res.statusCode, res.headers);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
if (!req.write(d)) {
|
||||||
|
console.error('foo', d);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
req.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
function doOne(keystore, filePath, address) {
|
||||||
|
const signer = new cic.PGPSigner(keystore);
|
||||||
|
|
||||||
|
const j = JSON.parse(fs.readFileSync(filePath).toString());
|
||||||
|
const b = Buffer.from(j['vcard'], 'base64');
|
||||||
|
const s = b.toString();
|
||||||
|
const o = vcfp.parse(s);
|
||||||
|
const phone = o.tel[0].value;
|
||||||
|
|
||||||
|
cic.Phone.toKey(phone).then((uid) => {
|
||||||
|
const o = fs.readFileSync(filePath, 'utf-8');
|
||||||
|
|
||||||
|
const s = new cic.Syncable(uid, o);
|
||||||
|
s.setSigner(signer);
|
||||||
|
s.onwrap = (env) => {
|
||||||
|
sendit(uid, env);
|
||||||
|
};
|
||||||
|
s.sign();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
const privateKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE'));
|
||||||
|
const publicKeyPath = path.join(config.get('PGP_EXPORTS_DIR'), config.get('PGP_PRIVATE_KEY_FILE'));
|
||||||
|
pk = fs.readFileSync(privateKeyPath);
|
||||||
|
pubk = fs.readFileSync(publicKeyPath);
|
||||||
|
|
||||||
|
new cic.PGPKeyStore(
|
||||||
|
config.get('PGP_PASSPHRASE'),
|
||||||
|
pk,
|
||||||
|
pubk,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
importMetaPhone,
|
||||||
|
);
|
||||||
|
|
||||||
|
const batchSize = 16;
|
||||||
|
const batchDelay = 1000;
|
||||||
|
const total = parseInt(process.argv[3]);
|
||||||
|
const dataDir = process.argv[2];
|
||||||
|
const workDir = path.join(dataDir, 'phone/meta');
|
||||||
|
const userDir = path.join(dataDir, 'new');
|
||||||
|
let count = 0;
|
||||||
|
let batchCount = 0;
|
||||||
|
|
||||||
|
|
||||||
|
function importMetaPhone(keystore) {
|
||||||
|
let err;
|
||||||
|
let files;
|
||||||
|
|
||||||
|
try {
|
||||||
|
err, files = fs.readdirSync(workDir);
|
||||||
|
} catch {
|
||||||
|
console.error('source directory not yet ready', workDir);
|
||||||
|
setTimeout(importMetaPhone, batchDelay, keystore);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let limit = batchSize;
|
||||||
|
if (files.length < limit) {
|
||||||
|
limit = files.length;
|
||||||
|
}
|
||||||
|
for (let i = 0; i < limit; i++) {
|
||||||
|
const file = files[i];
|
||||||
|
if (file.substr(0) == '.') {
|
||||||
|
console.debug('skipping file', file);
|
||||||
|
}
|
||||||
|
const filePath = path.join(workDir, file);
|
||||||
|
|
||||||
|
const address = fs.readFileSync(filePath).toString().substring(2).toUpperCase();
|
||||||
|
const metaFilePath = path.join(
|
||||||
|
userDir,
|
||||||
|
address.substring(0, 2),
|
||||||
|
address.substring(2, 4),
|
||||||
|
address + '.json',
|
||||||
|
);
|
||||||
|
|
||||||
|
doOne(keystore, metaFilePath, address);
|
||||||
|
fs.unlinkSync(filePath);
|
||||||
|
count++;
|
||||||
|
batchCount++;
|
||||||
|
if (batchCount == batchSize) {
|
||||||
|
console.debug('reached batch size, breathing');
|
||||||
|
batchCount=0;
|
||||||
|
setTimeout(importMeta, batchDelay, keystore);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (count == total) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setTimeout(importMetaPhone, 100, keystore);
|
||||||
|
}
|
157
apps/contract-migration/scripts/cic_ussd/import_balance.py
Normal file
157
apps/contract-migration/scripts/cic_ussd/import_balance.py
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import hashlib
|
||||||
|
import redis
|
||||||
|
import celery
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import confini
|
||||||
|
from chainlib.eth.connection import EthHTTPConnection
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||||
|
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||||
|
from cic_types.models.person import Person
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from import_util import BalanceProcessor
|
||||||
|
from import_task import *
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
config_dir = './config'
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||||
|
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
|
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||||
|
argparser.add_argument('--meta-host', dest='meta_host', type=str, help='metadata server host')
|
||||||
|
argparser.add_argument('--meta-port', dest='meta_port', type=int, help='metadata server host')
|
||||||
|
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||||
|
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||||
|
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||||
|
argparser.add_argument('--token-symbol', default='SRF', type=str, dest='token_symbol', help='Token symbol to use for trnsactions')
|
||||||
|
argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
|
argparser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from')
|
||||||
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
argparser.add_argument('user_dir', default='out', type=str, help='user export directory')
|
||||||
|
args = argparser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
# override args
|
||||||
|
args_override = {
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
|
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||||
|
'REDIS_HOST': getattr(args, 'redis_host'),
|
||||||
|
'REDIS_PORT': getattr(args, 'redis_port'),
|
||||||
|
'REDIS_DB': getattr(args, 'redis_db'),
|
||||||
|
'META_HOST': getattr(args, 'meta_host'),
|
||||||
|
'META_PORT': getattr(args, 'meta_port'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli flag')
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
redis_host = config.get('REDIS_HOST')
|
||||||
|
redis_port = config.get('REDIS_PORT')
|
||||||
|
redis_db = config.get('REDIS_DB')
|
||||||
|
r = redis.Redis(redis_host, redis_port, redis_db)
|
||||||
|
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
|
|
||||||
|
signer_address = None
|
||||||
|
keystore = DictKeystore()
|
||||||
|
if args.y != None:
|
||||||
|
logg.debug('loading keystore file {}'.format(args.y))
|
||||||
|
signer_address = keystore.import_keystore_file(args.y)
|
||||||
|
logg.debug('now have key for signer address {}'.format(signer_address))
|
||||||
|
signer = EIP155Signer(keystore)
|
||||||
|
|
||||||
|
queue = args.q
|
||||||
|
chain_str = config.get('CIC_CHAIN_SPEC')
|
||||||
|
block_offset = 0
|
||||||
|
if args.head:
|
||||||
|
block_offset = -1
|
||||||
|
else:
|
||||||
|
block_offset = args.offset
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
old_chain_spec_str = args.old_chain_spec
|
||||||
|
old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str)
|
||||||
|
|
||||||
|
user_dir = args.user_dir # user_out_dir from import_users.py
|
||||||
|
|
||||||
|
token_symbol = args.token_symbol
|
||||||
|
|
||||||
|
MetadataTask.meta_host = config.get('META_HOST')
|
||||||
|
MetadataTask.meta_port = config.get('META_PORT')
|
||||||
|
ImportTask.chain_spec = chain_spec
|
||||||
|
|
||||||
|
def main():
|
||||||
|
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
||||||
|
|
||||||
|
ImportTask.balance_processor = BalanceProcessor(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'), signer_address, signer)
|
||||||
|
ImportTask.balance_processor.init()
|
||||||
|
|
||||||
|
# TODO get decimals from token
|
||||||
|
balances = {}
|
||||||
|
f = open('{}/balances.csv'.format(user_dir, 'r'))
|
||||||
|
remove_zeros = 10**6
|
||||||
|
i = 0
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == None:
|
||||||
|
break
|
||||||
|
r = l.split(',')
|
||||||
|
try:
|
||||||
|
address = to_checksum_address(r[0])
|
||||||
|
sys.stdout.write('loading balance {} {} {}'.format(i, address, r[1]).ljust(200) + "\r")
|
||||||
|
except ValueError:
|
||||||
|
break
|
||||||
|
balance = int(int(r[1].rstrip()) / remove_zeros)
|
||||||
|
balances[address] = balance
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
ImportTask.balances = balances
|
||||||
|
ImportTask.count = i
|
||||||
|
|
||||||
|
s = celery.signature(
|
||||||
|
'import_task.send_txs',
|
||||||
|
[
|
||||||
|
MetadataTask.balance_processor.nonce_offset,
|
||||||
|
],
|
||||||
|
queue='cic-import-ussd',
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
argv = ['worker', '-Q', 'cic-import-ussd', '--loglevel=DEBUG']
|
||||||
|
celery_app.worker_main(argv)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
218
apps/contract-migration/scripts/cic_ussd/import_task.py
Normal file
218
apps/contract-migration/scripts/cic_ussd/import_task.py
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.error
|
||||||
|
import urllib.request
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
unpack,
|
||||||
|
raw,
|
||||||
|
)
|
||||||
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
from cic_types.models.person import Person
|
||||||
|
|
||||||
|
#logg = logging.getLogger().getChild(__name__)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
|
class ImportTask(celery.Task):
|
||||||
|
|
||||||
|
balances = None
|
||||||
|
import_dir = 'out'
|
||||||
|
count = 0
|
||||||
|
chain_spec = None
|
||||||
|
balance_processor = None
|
||||||
|
max_retries = None
|
||||||
|
|
||||||
|
class MetadataTask(ImportTask):
|
||||||
|
|
||||||
|
meta_host = None
|
||||||
|
meta_port = None
|
||||||
|
meta_path = ''
|
||||||
|
meta_ssl = False
|
||||||
|
autoretry_for = (
|
||||||
|
urllib.error.HTTPError,
|
||||||
|
OSError,
|
||||||
|
)
|
||||||
|
retry_jitter = True
|
||||||
|
retry_backoff = True
|
||||||
|
retry_backoff_max = 60
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def meta_url(self):
|
||||||
|
scheme = 'http'
|
||||||
|
if self.meta_ssl:
|
||||||
|
scheme += s
|
||||||
|
url = urllib.parse.urlparse('{}://{}:{}/{}'.format(scheme, self.meta_host, self.meta_port, self.meta_path))
|
||||||
|
return urllib.parse.urlunparse(url)
|
||||||
|
|
||||||
|
|
||||||
|
def old_address_from_phone(base_path, phone):
|
||||||
|
pidx = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||||
|
phone_idx_path = os.path.join('{}/phone/{}/{}/{}'.format(
|
||||||
|
base_path,
|
||||||
|
pidx[:2],
|
||||||
|
pidx[2:4],
|
||||||
|
pidx,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
f = open(phone_idx_path, 'r')
|
||||||
|
old_address = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
return old_address
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=MetadataTask)
|
||||||
|
def resolve_phone(self, phone):
|
||||||
|
identifier = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||||
|
url = urllib.parse.urljoin(self.meta_url(), identifier)
|
||||||
|
logg.debug('attempt getting phone pointer at {} for phone {}'.format(url, phone))
|
||||||
|
r = urllib.request.urlopen(url)
|
||||||
|
address = json.load(r)
|
||||||
|
address = address.replace('"', '')
|
||||||
|
logg.debug('address {} for phone {}'.format(address, phone))
|
||||||
|
|
||||||
|
return address
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=MetadataTask)
|
||||||
|
def generate_metadata(self, address, phone):
|
||||||
|
old_address = old_address_from_phone(self.import_dir, phone)
|
||||||
|
|
||||||
|
logg.debug('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>> foo')
|
||||||
|
logg.debug('address {}'.format(address))
|
||||||
|
old_address_upper = strip_0x(old_address).upper()
|
||||||
|
metadata_path = '{}/old/{}/{}/{}.json'.format(
|
||||||
|
self.import_dir,
|
||||||
|
old_address_upper[:2],
|
||||||
|
old_address_upper[2:4],
|
||||||
|
old_address_upper,
|
||||||
|
)
|
||||||
|
|
||||||
|
f = open(metadata_path, 'r')
|
||||||
|
o = json.load(f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
u = Person.deserialize(o)
|
||||||
|
|
||||||
|
if u.identities.get('evm') == None:
|
||||||
|
u.identities['evm'] = {}
|
||||||
|
sub_chain_str = '{}:{}'.format(self.chain_spec.common_name(), self.chain_spec.network_id())
|
||||||
|
u.identities['evm'][sub_chain_str] = [add_0x(address)]
|
||||||
|
|
||||||
|
new_address_clean = strip_0x(address)
|
||||||
|
filepath = os.path.join(
|
||||||
|
self.import_dir,
|
||||||
|
'new',
|
||||||
|
new_address_clean[:2].upper(),
|
||||||
|
new_address_clean[2:4].upper(),
|
||||||
|
new_address_clean.upper() + '.json',
|
||||||
|
)
|
||||||
|
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
||||||
|
|
||||||
|
o = u.serialize()
|
||||||
|
f = open(filepath, 'w')
|
||||||
|
f.write(json.dumps(o))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person')
|
||||||
|
meta_filepath = os.path.join(
|
||||||
|
self.import_dir,
|
||||||
|
'meta',
|
||||||
|
'{}.json'.format(new_address_clean.upper()),
|
||||||
|
)
|
||||||
|
os.symlink(os.path.realpath(filepath), meta_filepath)
|
||||||
|
|
||||||
|
logg.debug('found metadata {} for phone {}'.format(o, phone))
|
||||||
|
|
||||||
|
return address
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=MetadataTask)
|
||||||
|
def opening_balance_tx(self, address, phone, serial):
|
||||||
|
|
||||||
|
|
||||||
|
old_address = old_address_from_phone(self.import_dir, phone)
|
||||||
|
|
||||||
|
k = to_checksum_address(strip_0x(old_address))
|
||||||
|
balance = self.balances[k]
|
||||||
|
logg.debug('found balance {} for address {} phone {}'.format(balance, old_address, phone))
|
||||||
|
|
||||||
|
decimal_balance = self.balance_processor.get_decimal_amount(int(balance))
|
||||||
|
|
||||||
|
(tx_hash_hex, o) = self.balance_processor.get_rpc_tx(address, decimal_balance, serial)
|
||||||
|
|
||||||
|
tx = unpack(bytes.fromhex(strip_0x(o)), self.chain_spec)
|
||||||
|
logg.debug('generated tx token value {} to {} tx hash {}'.format(decimal_balance, address, tx_hash_hex))
|
||||||
|
|
||||||
|
tx_path = os.path.join(
|
||||||
|
self.import_dir,
|
||||||
|
'txs',
|
||||||
|
strip_0x(tx_hash_hex),
|
||||||
|
)
|
||||||
|
|
||||||
|
f = open(tx_path, 'w')
|
||||||
|
f.write(strip_0x(o))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
tx_nonce_path = os.path.join(
|
||||||
|
self.import_dir,
|
||||||
|
'txs',
|
||||||
|
'.' + str(tx['nonce']),
|
||||||
|
)
|
||||||
|
os.symlink(os.path.realpath(tx_path), tx_nonce_path)
|
||||||
|
|
||||||
|
return tx['hash']
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=ImportTask, autoretry_for=(FileNotFoundError,), max_retries=None, default_retry_delay=0.1)
|
||||||
|
def send_txs(self, nonce):
|
||||||
|
|
||||||
|
if nonce == self.count + self.balance_processor.nonce_offset:
|
||||||
|
logg.info('reached nonce {} (offset {} + count {}) exiting'.format(nonce, self.balance_processor.nonce_offset, self.count))
|
||||||
|
return
|
||||||
|
|
||||||
|
|
||||||
|
logg.debug('attempt to open symlink for nonce {}'.format(nonce))
|
||||||
|
tx_nonce_path = os.path.join(
|
||||||
|
self.import_dir,
|
||||||
|
'txs',
|
||||||
|
'.' + str(nonce),
|
||||||
|
)
|
||||||
|
f = open(tx_nonce_path, 'r')
|
||||||
|
tx_signed_raw_hex = f.read()
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
os.unlink(tx_nonce_path)
|
||||||
|
|
||||||
|
o = raw(add_0x(tx_signed_raw_hex))
|
||||||
|
tx_hash_hex = self.balance_processor.conn.do(o)
|
||||||
|
|
||||||
|
logg.info('sent nonce {} tx hash {}'.format(nonce, tx_hash_hex)) #tx_signed_raw_hex))
|
||||||
|
|
||||||
|
nonce += 1
|
||||||
|
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
s = celery.signature(
|
||||||
|
'import_task.send_txs',
|
||||||
|
[
|
||||||
|
nonce,
|
||||||
|
],
|
||||||
|
queue=queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
|
||||||
|
|
||||||
|
return nonce
|
191
apps/contract-migration/scripts/cic_ussd/import_users.py
Normal file
191
apps/contract-migration/scripts/cic_ussd/import_users.py
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
import argparse
|
||||||
|
import uuid
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
import urllib.request
|
||||||
|
from glob import glob
|
||||||
|
|
||||||
|
# third-party imports
|
||||||
|
import redis
|
||||||
|
import confini
|
||||||
|
import celery
|
||||||
|
from hexathon import (
|
||||||
|
add_0x,
|
||||||
|
strip_0x,
|
||||||
|
)
|
||||||
|
from chainlib.eth.address import to_checksum
|
||||||
|
from cic_types.models.person import Person
|
||||||
|
from cic_eth.api.api_task import Api
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
import phonenumbers
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
default_config_dir = '/usr/local/etc/cic'
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
|
||||||
|
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||||
|
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||||
|
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||||
|
argparser.add_argument('--batch-size', dest='batch_size', default=100, type=int, help='burst size of sending transactions to node') # batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size
|
||||||
|
argparser.add_argument('--batch-delay', dest='batch_delay', default=3, type=int, help='seconds delay between batches')
|
||||||
|
argparser.add_argument('--timeout', default=60.0, type=float, help='Callback timeout')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||||
|
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||||
|
argparser.add_argument('user_dir', type=str, help='path to users export dir tree')
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.v:
|
||||||
|
logg.setLevel(logging.INFO)
|
||||||
|
elif args.vv:
|
||||||
|
logg.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config_dir = args.c
|
||||||
|
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||||
|
config.process()
|
||||||
|
args_override = {
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
'REDIS_HOST': getattr(args, 'redis_host'),
|
||||||
|
'REDIS_PORT': getattr(args, 'redis_port'),
|
||||||
|
'REDIS_DB': getattr(args, 'redis_db'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli')
|
||||||
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
|
redis_host = config.get('REDIS_HOST')
|
||||||
|
redis_port = config.get('REDIS_PORT')
|
||||||
|
redis_db = config.get('REDIS_DB')
|
||||||
|
r = redis.Redis(redis_host, redis_port, redis_db)
|
||||||
|
|
||||||
|
ps = r.pubsub()
|
||||||
|
|
||||||
|
user_new_dir = os.path.join(args.user_dir, 'new')
|
||||||
|
os.makedirs(user_new_dir)
|
||||||
|
|
||||||
|
meta_dir = os.path.join(args.user_dir, 'meta')
|
||||||
|
os.makedirs(meta_dir)
|
||||||
|
|
||||||
|
user_old_dir = os.path.join(args.user_dir, 'old')
|
||||||
|
os.stat(user_old_dir)
|
||||||
|
|
||||||
|
txs_dir = os.path.join(args.user_dir, 'txs')
|
||||||
|
os.makedirs(txs_dir)
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
chain_str = str(chain_spec)
|
||||||
|
|
||||||
|
batch_size = args.batch_size
|
||||||
|
batch_delay = args.batch_delay
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def build_ussd_request(phone, host, port, service_code, username, password, ssl=False):
|
||||||
|
url = 'http'
|
||||||
|
if ssl:
|
||||||
|
url += 's'
|
||||||
|
url += '://{}:{}'.format(host, port)
|
||||||
|
url += '/?username={}&password={}'.format(username, password) #config.get('USSD_USER'), config.get('USSD_PASS'))
|
||||||
|
|
||||||
|
logg.info('ussd service url {}'.format(url))
|
||||||
|
logg.info('ussd phone {}'.format(phone))
|
||||||
|
|
||||||
|
session = uuid.uuid4().hex
|
||||||
|
data = {
|
||||||
|
'sessionId': session,
|
||||||
|
'serviceCode': service_code,
|
||||||
|
'phoneNumber': phone,
|
||||||
|
'text': service_code,
|
||||||
|
}
|
||||||
|
req = urllib.request.Request(url)
|
||||||
|
data_str = json.dumps(data)
|
||||||
|
data_bytes = data_str.encode('utf-8')
|
||||||
|
req.add_header('Content-Type', 'application/json')
|
||||||
|
req.data = data_bytes
|
||||||
|
|
||||||
|
return req
|
||||||
|
|
||||||
|
|
||||||
|
def register_ussd(i, u):
|
||||||
|
phone_object = phonenumbers.parse(u.tel)
|
||||||
|
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||||
|
logg.debug('tel {} {}'.format(u.tel, phone))
|
||||||
|
req = build_ussd_request(phone, 'localhost', 63315, '*483*46#', '', '')
|
||||||
|
response = urllib.request.urlopen(req)
|
||||||
|
response_data = response.read().decode('utf-8')
|
||||||
|
state = response_data[:3]
|
||||||
|
out = response_data[4:]
|
||||||
|
logg.debug('ussd reponse: {}'.format(out))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
j = 0
|
||||||
|
for x in os.walk(user_old_dir):
|
||||||
|
for y in x[2]:
|
||||||
|
if y[len(y)-5:] != '.json':
|
||||||
|
continue
|
||||||
|
filepath = os.path.join(x[0], y)
|
||||||
|
f = open(filepath, 'r')
|
||||||
|
try:
|
||||||
|
o = json.load(f)
|
||||||
|
except json.decoder.JSONDecodeError as e:
|
||||||
|
f.close()
|
||||||
|
logg.error('load error for {}: {}'.format(y, e))
|
||||||
|
continue
|
||||||
|
f.close()
|
||||||
|
u = Person.deserialize(o)
|
||||||
|
|
||||||
|
new_address = register_ussd(i, u)
|
||||||
|
|
||||||
|
phone_object = phonenumbers.parse(u.tel)
|
||||||
|
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||||
|
|
||||||
|
s_phone = celery.signature(
|
||||||
|
'import_task.resolve_phone',
|
||||||
|
[
|
||||||
|
phone,
|
||||||
|
],
|
||||||
|
queue='cic-import-ussd',
|
||||||
|
)
|
||||||
|
|
||||||
|
s_meta = celery.signature(
|
||||||
|
'import_task.generate_metadata',
|
||||||
|
[
|
||||||
|
phone,
|
||||||
|
],
|
||||||
|
queue='cic-import-ussd',
|
||||||
|
)
|
||||||
|
|
||||||
|
s_balance = celery.signature(
|
||||||
|
'import_task.opening_balance_tx',
|
||||||
|
[
|
||||||
|
phone,
|
||||||
|
i,
|
||||||
|
],
|
||||||
|
queue='cic-import-ussd',
|
||||||
|
)
|
||||||
|
|
||||||
|
s_meta.link(s_balance)
|
||||||
|
s_phone.link(s_meta)
|
||||||
|
s_phone.apply_async(countdown=7) # block time plus a bit of time for ussd processing
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")
|
||||||
|
|
||||||
|
j += 1
|
||||||
|
if j == batch_size:
|
||||||
|
time.sleep(batch_delay)
|
||||||
|
j = 0
|
||||||
|
|
||||||
|
#fi.close()
|
72
apps/contract-migration/scripts/cic_ussd/import_util.py
Normal file
72
apps/contract-migration/scripts/cic_ussd/import_util.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from eth_contract_registry import Registry
|
||||||
|
from eth_token_index import TokenUniqueSymbolIndex
|
||||||
|
from chainlib.eth.gas import OverrideGasOracle
|
||||||
|
from chainlib.eth.nonce import OverrideNonceOracle
|
||||||
|
from chainlib.eth.erc20 import ERC20
|
||||||
|
from chainlib.eth.tx import (
|
||||||
|
count,
|
||||||
|
TxFormat,
|
||||||
|
)
|
||||||
|
|
||||||
|
logg = logging.getLogger().getChild(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class BalanceProcessor:
|
||||||
|
|
||||||
|
def __init__(self, conn, chain_spec, registry_address, signer_address, signer):
|
||||||
|
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.conn = conn
|
||||||
|
#self.signer_address = signer_address
|
||||||
|
self.registry_address = registry_address
|
||||||
|
|
||||||
|
self.token_index_address = None
|
||||||
|
self.token_address = None
|
||||||
|
self.signer_address = signer_address
|
||||||
|
self.signer = signer
|
||||||
|
|
||||||
|
o = count(signer_address)
|
||||||
|
c = self.conn.do(o)
|
||||||
|
self.nonce_offset = int(c, 16)
|
||||||
|
self.gas_oracle = OverrideGasOracle(conn=conn, limit=8000000)
|
||||||
|
|
||||||
|
self.value_multiplier = 1
|
||||||
|
|
||||||
|
|
||||||
|
def init(self):
|
||||||
|
# Get Token registry address
|
||||||
|
registry = Registry(self.chain_spec)
|
||||||
|
o = registry.address_of(self.registry_address, 'TokenRegistry')
|
||||||
|
r = self.conn.do(o)
|
||||||
|
self.token_index_address = registry.parse_address_of(r)
|
||||||
|
logg.info('found token index address {}'.format(self.token_index_address))
|
||||||
|
|
||||||
|
token_registry = TokenUniqueSymbolIndex(self.chain_spec)
|
||||||
|
o = token_registry.address_of(self.token_index_address, 'SRF')
|
||||||
|
r = self.conn.do(o)
|
||||||
|
self.token_address = token_registry.parse_address_of(r)
|
||||||
|
logg.info('found SRF token address {}'.format(self.token_address))
|
||||||
|
|
||||||
|
tx_factory = ERC20(self.chain_spec)
|
||||||
|
o = tx_factory.decimals(self.token_address)
|
||||||
|
r = self.conn.do(o)
|
||||||
|
n = tx_factory.parse_decimals(r)
|
||||||
|
self.value_multiplier = 10 ** n
|
||||||
|
|
||||||
|
|
||||||
|
def get_rpc_tx(self, recipient, value, i):
|
||||||
|
logg.debug('initiating nonce offset {} for recipient {}'.format(self.nonce_offset + i, recipient))
|
||||||
|
nonce_oracle = OverrideNonceOracle(self.signer_address, self.nonce_offset + i)
|
||||||
|
tx_factory = ERC20(self.chain_spec, signer=self.signer, nonce_oracle=nonce_oracle, gas_oracle=self.gas_oracle)
|
||||||
|
return tx_factory.transfer(self.token_address, self.signer_address, recipient, value, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
#(tx_hash_hex, o) = tx_factory.transfer(self.token_address, self.signer_address, recipient, value)
|
||||||
|
#self.conn.do(o)
|
||||||
|
#return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
def get_decimal_amount(self, value):
|
||||||
|
return value * self.value_multiplier
|
@ -1,2 +1,5 @@
|
|||||||
[meta]
|
[meta]
|
||||||
url = http://localhost:63380
|
url = http://localhost:63380
|
||||||
|
host = localhost
|
||||||
|
port = 63380
|
||||||
|
ssl = 0
|
||||||
|
@ -24,6 +24,7 @@ from cic_types.models.person import (
|
|||||||
get_contact_data_from_vcard,
|
get_contact_data_from_vcard,
|
||||||
)
|
)
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
import phonenumbers
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -80,10 +81,12 @@ phone_idx = []
|
|||||||
user_dir = args.dir
|
user_dir = args.dir
|
||||||
user_count = args.user_count
|
user_count = args.user_count
|
||||||
|
|
||||||
|
random.seed()
|
||||||
|
|
||||||
def genPhoneIndex(phone):
|
def genPhoneIndex(phone):
|
||||||
h = hashlib.new('sha256')
|
h = hashlib.new('sha256')
|
||||||
h.update(phone.encode('utf-8'))
|
h.update(phone.encode('utf-8'))
|
||||||
h.update(b'cic.msisdn')
|
h.update(b':cic.phone')
|
||||||
return h.digest().hex()
|
return h.digest().hex()
|
||||||
|
|
||||||
|
|
||||||
@ -101,7 +104,9 @@ def genDate():
|
|||||||
|
|
||||||
|
|
||||||
def genPhone():
|
def genPhone():
|
||||||
return fake.msisdn()
|
phone_str = '+254' + str(random.randint(100000000, 999999999))
|
||||||
|
phone_object = phonenumbers.parse(phone_str)
|
||||||
|
return phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||||
|
|
||||||
|
|
||||||
def genPersonal(phone):
|
def genPersonal(phone):
|
||||||
@ -205,14 +210,14 @@ if __name__ == '__main__':
|
|||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
pidx = genPhoneIndex(phone)
|
pidx = genPhoneIndex(phone)
|
||||||
d = prepareLocalFilePath(os.path.join(user_dir, 'phone'), uid)
|
d = prepareLocalFilePath(os.path.join(user_dir, 'phone'), pidx)
|
||||||
f = open('{}/{}'.format(d, pidx), 'w')
|
f = open('{}/{}'.format(d, pidx), 'w')
|
||||||
f.write(eth)
|
f.write(eth)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
amount = genAmount()
|
amount = genAmount()
|
||||||
fa.write('{},{}\n'.format(eth,amount))
|
fa.write('{},{}\n'.format(eth,amount))
|
||||||
logg.debug('pidx {}, uid {}, eth {}, amount {}'.format(pidx, uid, eth, amount))
|
logg.debug('pidx {}, uid {}, eth {}, amount {}, phone {}'.format(pidx, uid, eth, amount, phone))
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
|
309
apps/contract-migration/scripts/eth/import_balance.py
Normal file
309
apps/contract-migration/scripts/eth/import_balance.py
Normal file
@ -0,0 +1,309 @@
|
|||||||
|
# standard imports
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
import argparse
|
||||||
|
import sys
|
||||||
|
import re
|
||||||
|
import hashlib
|
||||||
|
import csv
|
||||||
|
import json
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import eth_abi
|
||||||
|
import confini
|
||||||
|
from hexathon import (
|
||||||
|
strip_0x,
|
||||||
|
add_0x,
|
||||||
|
)
|
||||||
|
from chainsyncer.backend import MemBackend
|
||||||
|
from chainsyncer.driver import HeadSyncer
|
||||||
|
from chainlib.eth.connection import EthHTTPConnection
|
||||||
|
from chainlib.eth.block import (
|
||||||
|
block_latest,
|
||||||
|
block_by_number,
|
||||||
|
Block,
|
||||||
|
)
|
||||||
|
from chainlib.eth.hash import keccak256_string_to_hex
|
||||||
|
from chainlib.eth.address import to_checksum_address
|
||||||
|
from chainlib.eth.erc20 import ERC20
|
||||||
|
from chainlib.eth.gas import OverrideGasOracle
|
||||||
|
from chainlib.eth.nonce import RPCNonceOracle
|
||||||
|
from chainlib.eth.tx import TxFactory
|
||||||
|
from chainlib.eth.rpc import jsonrpc_template
|
||||||
|
from chainlib.eth.error import EthException
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||||
|
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||||
|
from cic_types.models.person import Person
|
||||||
|
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
config_dir = './config'
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||||
|
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||||
|
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
|
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||||
|
argparser.add_argument('--token-symbol', default='SRF', type=str, dest='token_symbol', help='Token symbol to use for trnsactions')
|
||||||
|
argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
|
||||||
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('-q', type=str, default='cic-eth', help='celery queue to submit transaction tasks to')
|
||||||
|
argparser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from')
|
||||||
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
|
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||||
|
argparser.add_argument('user_dir', type=str, help='user export directory')
|
||||||
|
args = argparser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
if args.v == True:
|
||||||
|
logging.getLogger().setLevel(logging.INFO)
|
||||||
|
elif args.vv == True:
|
||||||
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config_dir = os.path.join(args.c)
|
||||||
|
os.makedirs(config_dir, 0o777, True)
|
||||||
|
config = confini.Config(config_dir, args.env_prefix)
|
||||||
|
config.process()
|
||||||
|
# override args
|
||||||
|
args_override = {
|
||||||
|
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||||
|
'ETH_PROVIDER': getattr(args, 'p'),
|
||||||
|
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||||
|
}
|
||||||
|
config.dict_override(args_override, 'cli flag')
|
||||||
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
|
#app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
|
|
||||||
|
signer_address = None
|
||||||
|
keystore = DictKeystore()
|
||||||
|
if args.y != None:
|
||||||
|
logg.debug('loading keystore file {}'.format(args.y))
|
||||||
|
signer_address = keystore.import_keystore_file(args.y)
|
||||||
|
logg.debug('now have key for signer address {}'.format(signer_address))
|
||||||
|
signer = EIP155Signer(keystore)
|
||||||
|
|
||||||
|
queue = args.q
|
||||||
|
chain_str = config.get('CIC_CHAIN_SPEC')
|
||||||
|
block_offset = 0
|
||||||
|
if args.head:
|
||||||
|
block_offset = -1
|
||||||
|
else:
|
||||||
|
block_offset = args.offset
|
||||||
|
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
old_chain_spec_str = args.old_chain_spec
|
||||||
|
old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str)
|
||||||
|
|
||||||
|
user_dir = args.user_dir # user_out_dir from import_users.py
|
||||||
|
|
||||||
|
token_symbol = args.token_symbol
|
||||||
|
|
||||||
|
|
||||||
|
class Handler:
|
||||||
|
|
||||||
|
account_index_add_signature = keccak256_string_to_hex('add(address)')[:8]
|
||||||
|
|
||||||
|
def __init__(self, conn, chain_spec, user_dir, balances, token_address, signer, gas_oracle, nonce_oracle):
|
||||||
|
self.token_address = token_address
|
||||||
|
self.user_dir = user_dir
|
||||||
|
self.balances = balances
|
||||||
|
self.chain_spec = chain_spec
|
||||||
|
self.tx_factory = ERC20(chain_spec, signer, gas_oracle, nonce_oracle)
|
||||||
|
|
||||||
|
|
||||||
|
def name(self):
|
||||||
|
return 'balance_handler'
|
||||||
|
|
||||||
|
|
||||||
|
def filter(self, conn, block, tx, db_session):
|
||||||
|
if tx.payload == None or len(tx.payload) == 0:
|
||||||
|
logg.debug('no payload, skipping {}'.format(tx))
|
||||||
|
return
|
||||||
|
|
||||||
|
if tx.payload[:8] == self.account_index_add_signature:
|
||||||
|
recipient = eth_abi.decode_single('address', bytes.fromhex(tx.payload[-64:]))
|
||||||
|
#original_address = to_checksum_address(self.addresses[to_checksum_address(recipient)])
|
||||||
|
user_file = 'new/{}/{}/{}.json'.format(
|
||||||
|
recipient[2:4].upper(),
|
||||||
|
recipient[4:6].upper(),
|
||||||
|
recipient[2:].upper(),
|
||||||
|
)
|
||||||
|
filepath = os.path.join(self.user_dir, user_file)
|
||||||
|
o = None
|
||||||
|
try:
|
||||||
|
f = open(filepath, 'r')
|
||||||
|
o = json.load(f)
|
||||||
|
f.close()
|
||||||
|
except FileNotFoundError:
|
||||||
|
logg.error('no import record of address {}'.format(recipient))
|
||||||
|
return
|
||||||
|
u = Person.deserialize(o)
|
||||||
|
original_address = u.identities[old_chain_spec.engine()]['{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())][0]
|
||||||
|
try:
|
||||||
|
balance = self.balances[original_address]
|
||||||
|
except KeyError as e:
|
||||||
|
logg.error('balance get fail orig {} new {}'.format(original_address, recipient))
|
||||||
|
return
|
||||||
|
|
||||||
|
# TODO: store token object in handler ,get decimals from there
|
||||||
|
multiplier = 10**6
|
||||||
|
balance_full = balance * multiplier
|
||||||
|
logg.info('registered {} originally {} ({}) tx hash {} balance {}'.format(recipient, original_address, u, tx.hash, balance_full))
|
||||||
|
|
||||||
|
(tx_hash_hex, o) = self.tx_factory.transfer(self.token_address, signer_address, recipient, balance_full)
|
||||||
|
logg.info('submitting erc20 transfer tx {} for recipient {}'.format(tx_hash_hex, recipient))
|
||||||
|
r = conn.do(o)
|
||||||
|
|
||||||
|
tx_path = os.path.join(
|
||||||
|
user_dir,
|
||||||
|
'txs',
|
||||||
|
strip_0x(tx_hash_hex),
|
||||||
|
)
|
||||||
|
f = open(tx_path, 'w')
|
||||||
|
f.write(strip_0x(o['params'][0]))
|
||||||
|
f.close()
|
||||||
|
# except TypeError as e:
|
||||||
|
# logg.warning('typerror {}'.format(e))
|
||||||
|
# pass
|
||||||
|
# except IndexError as e:
|
||||||
|
# logg.warning('indexerror {}'.format(e))
|
||||||
|
# pass
|
||||||
|
# except EthException as e:
|
||||||
|
# logg.error('send error {}'.format(e).ljust(200))
|
||||||
|
#except KeyError as e:
|
||||||
|
# logg.error('key record not found in imports: {}'.format(e).ljust(200))
|
||||||
|
|
||||||
|
|
||||||
|
#class BlockGetter:
|
||||||
|
#
|
||||||
|
# def __init__(self, conn, gas_oracle, nonce_oracle, chain_spec):
|
||||||
|
# self.conn = conn
|
||||||
|
# self.tx_factory = ERC20(signer=signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_id)
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# def get(self, n):
|
||||||
|
# o = block_by_number(n)
|
||||||
|
# r = self.conn.do(o)
|
||||||
|
# b = None
|
||||||
|
# try:
|
||||||
|
# b = Block(r)
|
||||||
|
# except TypeError as e:
|
||||||
|
# if r == None:
|
||||||
|
# logg.debug('block not found {}'.format(n))
|
||||||
|
# else:
|
||||||
|
# logg.error('block retrieve error {}'.format(e))
|
||||||
|
# return b
|
||||||
|
|
||||||
|
|
||||||
|
def progress_callback(block_number, tx_index, s):
|
||||||
|
sys.stdout.write(str(s).ljust(200) + "\n")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
global chain_str, block_offset, user_dir
|
||||||
|
|
||||||
|
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
||||||
|
gas_oracle = OverrideGasOracle(conn=conn, limit=8000000)
|
||||||
|
nonce_oracle = RPCNonceOracle(signer_address, conn)
|
||||||
|
|
||||||
|
# Get Token registry address
|
||||||
|
txf = TxFactory(chain_spec, signer=signer, gas_oracle=gas_oracle, nonce_oracle=None)
|
||||||
|
tx = txf.template(signer_address, config.get('CIC_REGISTRY_ADDRESS'))
|
||||||
|
|
||||||
|
registry_addressof_method = keccak256_string_to_hex('addressOf(bytes32)')[:8]
|
||||||
|
data = add_0x(registry_addressof_method)
|
||||||
|
data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex()
|
||||||
|
txf.set_code(tx, data)
|
||||||
|
|
||||||
|
o = jsonrpc_template()
|
||||||
|
o['method'] = 'eth_call'
|
||||||
|
o['params'].append(txf.normalize(tx))
|
||||||
|
o['params'].append('latest')
|
||||||
|
r = conn.do(o)
|
||||||
|
token_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||||
|
logg.info('found token index address {}'.format(token_index_address))
|
||||||
|
|
||||||
|
|
||||||
|
# Get Sarafu token address
|
||||||
|
tx = txf.template(signer_address, token_index_address)
|
||||||
|
data = add_0x(registry_addressof_method)
|
||||||
|
h = hashlib.new('sha256')
|
||||||
|
h.update(token_symbol.encode('utf-8'))
|
||||||
|
z = h.digest()
|
||||||
|
data += eth_abi.encode_single('bytes32', z).hex()
|
||||||
|
txf.set_code(tx, data)
|
||||||
|
o = jsonrpc_template()
|
||||||
|
o['method'] = 'eth_call'
|
||||||
|
o['params'].append(txf.normalize(tx))
|
||||||
|
o['params'].append('latest')
|
||||||
|
r = conn.do(o)
|
||||||
|
try:
|
||||||
|
sarafu_token_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||||
|
except ValueError as e:
|
||||||
|
logg.critical('lookup failed for token {}: {}'.format(token_symbol, e))
|
||||||
|
sys.exit(1)
|
||||||
|
logg.info('found token address {}'.format(sarafu_token_address))
|
||||||
|
|
||||||
|
syncer_backend = MemBackend(chain_str, 0)
|
||||||
|
|
||||||
|
if block_offset == -1:
|
||||||
|
o = block_latest()
|
||||||
|
r = conn.do(o)
|
||||||
|
block_offset = int(strip_0x(r), 16) + 1
|
||||||
|
#
|
||||||
|
# addresses = {}
|
||||||
|
# f = open('{}/addresses.csv'.format(user_dir, 'r'))
|
||||||
|
# while True:
|
||||||
|
# l = f.readline()
|
||||||
|
# if l == None:
|
||||||
|
# break
|
||||||
|
# r = l.split(',')
|
||||||
|
# try:
|
||||||
|
# k = r[0]
|
||||||
|
# v = r[1].rstrip()
|
||||||
|
# addresses[k] = v
|
||||||
|
# sys.stdout.write('loading address mapping {} -> {}'.format(k, v).ljust(200) + "\r")
|
||||||
|
# except IndexError as e:
|
||||||
|
# break
|
||||||
|
# f.close()
|
||||||
|
|
||||||
|
# TODO get decimals from token
|
||||||
|
balances = {}
|
||||||
|
f = open('{}/balances.csv'.format(user_dir, 'r'))
|
||||||
|
remove_zeros = 10**6
|
||||||
|
i = 0
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == None:
|
||||||
|
break
|
||||||
|
r = l.split(',')
|
||||||
|
try:
|
||||||
|
address = to_checksum_address(r[0])
|
||||||
|
sys.stdout.write('loading balance {} {} {}'.format(i, address, r[1]).ljust(200) + "\r")
|
||||||
|
except ValueError:
|
||||||
|
break
|
||||||
|
balance = int(int(r[1].rstrip()) / remove_zeros)
|
||||||
|
balances[address] = balance
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
syncer_backend.set(block_offset, 0)
|
||||||
|
syncer = HeadSyncer(syncer_backend, progress_callback=progress_callback)
|
||||||
|
handler = Handler(conn, chain_spec, user_dir, balances, sarafu_token_address, signer, gas_oracle, nonce_oracle)
|
||||||
|
syncer.add_filter(handler)
|
||||||
|
syncer.loop(1, conn)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
@ -7,6 +7,7 @@ import argparse
|
|||||||
import uuid
|
import uuid
|
||||||
import datetime
|
import datetime
|
||||||
import time
|
import time
|
||||||
|
import phonenumbers
|
||||||
from glob import glob
|
from glob import glob
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
@ -67,9 +68,15 @@ os.makedirs(user_new_dir)
|
|||||||
meta_dir = os.path.join(args.user_dir, 'meta')
|
meta_dir = os.path.join(args.user_dir, 'meta')
|
||||||
os.makedirs(meta_dir)
|
os.makedirs(meta_dir)
|
||||||
|
|
||||||
|
phone_dir = os.path.join(args.user_dir, 'phone')
|
||||||
|
os.makedirs(os.path.join(phone_dir, 'meta'))
|
||||||
|
|
||||||
user_old_dir = os.path.join(args.user_dir, 'old')
|
user_old_dir = os.path.join(args.user_dir, 'old')
|
||||||
os.stat(user_old_dir)
|
os.stat(user_old_dir)
|
||||||
|
|
||||||
|
txs_dir = os.path.join(args.user_dir, 'txs')
|
||||||
|
os.makedirs(txs_dir)
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
chain_str = str(chain_spec)
|
chain_str = str(chain_spec)
|
||||||
|
|
||||||
@ -88,7 +95,7 @@ signer = EIP155Signer(keystore)
|
|||||||
|
|
||||||
nonce_oracle = RPCNonceOracle(signer_address, rpc)
|
nonce_oracle = RPCNonceOracle(signer_address, rpc)
|
||||||
|
|
||||||
registry = Registry()
|
registry = Registry(chain_spec)
|
||||||
o = registry.address_of(config.get('CIC_REGISTRY_ADDRESS'), 'AccountRegistry')
|
o = registry.address_of(config.get('CIC_REGISTRY_ADDRESS'), 'AccountRegistry')
|
||||||
r = rpc.do(o)
|
r = rpc.do(o)
|
||||||
account_registry_address = registry.parse_address_of(r)
|
account_registry_address = registry.parse_address_of(r)
|
||||||
@ -109,7 +116,7 @@ def register_eth(i, u):
|
|||||||
rpc.do(o)
|
rpc.do(o)
|
||||||
|
|
||||||
pk = keystore.get(address)
|
pk = keystore.get(address)
|
||||||
keyfile_content = to_keyfile_dict(pk, '')
|
keyfile_content = to_keyfile_dict(pk, 'foo')
|
||||||
keyfile_path = os.path.join(keyfile_dir, '{}.json'.format(address))
|
keyfile_path = os.path.join(keyfile_dir, '{}.json'.format(address))
|
||||||
f = open(keyfile_path, 'w')
|
f = open(keyfile_path, 'w')
|
||||||
json.dump(keyfile_content, f)
|
json.dump(keyfile_content, f)
|
||||||
@ -166,10 +173,31 @@ if __name__ == '__main__':
|
|||||||
f.write(json.dumps(o))
|
f.write(json.dumps(o))
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), 'cic.person')
|
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person')
|
||||||
meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper()))
|
meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper()))
|
||||||
os.symlink(os.path.realpath(filepath), meta_filepath)
|
os.symlink(os.path.realpath(filepath), meta_filepath)
|
||||||
|
|
||||||
|
phone_object = phonenumbers.parse(u.tel)
|
||||||
|
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||||
|
logg.debug('>>>>> Using phone {}'.format(phone))
|
||||||
|
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||||
|
meta_phone_filepath = os.path.join(phone_dir, 'meta', meta_phone_key)
|
||||||
|
|
||||||
|
filepath = os.path.join(
|
||||||
|
phone_dir,
|
||||||
|
'new',
|
||||||
|
meta_phone_key[:2].upper(),
|
||||||
|
meta_phone_key[2:4].upper(),
|
||||||
|
meta_phone_key.upper(),
|
||||||
|
)
|
||||||
|
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
||||||
|
|
||||||
|
f = open(filepath, 'w')
|
||||||
|
f.write(to_checksum_address(new_address_clean))
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
os.symlink(os.path.realpath(filepath), meta_phone_filepath)
|
||||||
|
|
||||||
i += 1
|
i += 1
|
||||||
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")
|
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
@ -1 +0,0 @@
|
|||||||
{"metricId":"ea11447f-da1c-49e6-b0a2-8a988a99e3ce","metrics":{"from":"2021-02-12T18:59:21.666Z","to":"2021-02-12T18:59:21.666Z","successfulInstalls":0,"failedInstalls":1}}
|
|
@ -1 +0,0 @@
|
|||||||
python import_balance.py -c config -i evm:bloxberg:8996 -y /home/lash/tmp/d/keystore/UTC--2021-02-07T09-58-35.341813355Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c -v $@
|
|
@ -1 +0,0 @@
|
|||||||
python import_users.py -c config --redis-host-callback redis -vv $@
|
|
2111
apps/contract-migration/scripts/package-lock.json
generated
2111
apps/contract-migration/scripts/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -11,6 +11,8 @@ import csv
|
|||||||
import json
|
import json
|
||||||
import urllib
|
import urllib
|
||||||
import copy
|
import copy
|
||||||
|
import uuid
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
@ -57,18 +59,29 @@ custodial_tests = [
|
|||||||
'faucet',
|
'faucet',
|
||||||
]
|
]
|
||||||
|
|
||||||
all_tests = custodial_tests + [
|
metadata_tests = [
|
||||||
|
'metadata',
|
||||||
|
'metadata_phone',
|
||||||
|
]
|
||||||
|
|
||||||
|
eth_tests = [
|
||||||
'accounts_index',
|
'accounts_index',
|
||||||
'balance',
|
'balance',
|
||||||
'metadata',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
phone_tests = [
|
||||||
|
'ussd',
|
||||||
|
]
|
||||||
|
|
||||||
|
all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
|
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
|
||||||
|
argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url')
|
||||||
argparser.add_argument('--skip-custodial', dest='skip_custodial', action='store_true', help='skip all custodial verifications')
|
argparser.add_argument('--skip-custodial', dest='skip_custodial', action='store_true', help='skip all custodial verifications')
|
||||||
argparser.add_argument('--exclude', action='append', type=str, default=[], help='skip specified verification')
|
argparser.add_argument('--exclude', action='append', type=str, default=[], help='skip specified verification')
|
||||||
argparser.add_argument('--include', action='append', type=str, help='include specified verification')
|
argparser.add_argument('--include', action='append', type=str, help='include specified verification')
|
||||||
@ -98,6 +111,9 @@ args_override = {
|
|||||||
config.dict_override(args_override, 'cli flag')
|
config.dict_override(args_override, 'cli flag')
|
||||||
config.censor('PASSWORD', 'DATABASE')
|
config.censor('PASSWORD', 'DATABASE')
|
||||||
config.censor('PASSWORD', 'SSL')
|
config.censor('PASSWORD', 'SSL')
|
||||||
|
config.add(args.meta_provider, '_META_PROVIDER', True)
|
||||||
|
config.add(args.ussd_provider, '_USSD_PROVIDER', True)
|
||||||
|
|
||||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||||
|
|
||||||
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
@ -107,7 +123,6 @@ chain_str = str(chain_spec)
|
|||||||
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
||||||
old_chain_str = str(old_chain_spec)
|
old_chain_str = str(old_chain_spec)
|
||||||
user_dir = args.user_dir # user_out_dir from import_users.py
|
user_dir = args.user_dir # user_out_dir from import_users.py
|
||||||
meta_url = args.meta_provider
|
|
||||||
exit_on_error = args.x
|
exit_on_error = args.x
|
||||||
|
|
||||||
active_tests = []
|
active_tests = []
|
||||||
@ -271,7 +286,7 @@ class Verifier:
|
|||||||
|
|
||||||
def verify_metadata(self, address, balance=None):
|
def verify_metadata(self, address, balance=None):
|
||||||
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person')
|
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person')
|
||||||
url = os.path.join(meta_url, k)
|
url = os.path.join(config.get('_META_PROVIDER'), k)
|
||||||
logg.debug('verify metadata url {}'.format(url))
|
logg.debug('verify metadata url {}'.format(url))
|
||||||
try:
|
try:
|
||||||
res = urllib.request.urlopen(url)
|
res = urllib.request.urlopen(url)
|
||||||
@ -299,6 +314,77 @@ class Verifier:
|
|||||||
raise VerifierError(o_retrieved, 'metadata (person)')
|
raise VerifierError(o_retrieved, 'metadata (person)')
|
||||||
|
|
||||||
|
|
||||||
|
def verify_metadata_phone(self, address, balance=None):
|
||||||
|
upper_address = strip_0x(address).upper()
|
||||||
|
f = open(os.path.join(
|
||||||
|
self.data_dir,
|
||||||
|
'new',
|
||||||
|
upper_address[:2],
|
||||||
|
upper_address[2:4],
|
||||||
|
upper_address + '.json',
|
||||||
|
), 'r'
|
||||||
|
)
|
||||||
|
o = json.load(f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
p = Person.deserialize(o)
|
||||||
|
|
||||||
|
k = generate_metadata_pointer(p.tel.encode('utf-8'), ':cic.phone')
|
||||||
|
url = os.path.join(config.get('_META_PROVIDER'), k)
|
||||||
|
logg.debug('verify metadata phone url {}'.format(url))
|
||||||
|
try:
|
||||||
|
res = urllib.request.urlopen(url)
|
||||||
|
except urllib.error.HTTPError as e:
|
||||||
|
raise VerifierError(
|
||||||
|
'({}) {}'.format(url, e),
|
||||||
|
'metadata (phone)',
|
||||||
|
)
|
||||||
|
b = res.read()
|
||||||
|
address_recovered = json.loads(b.decode('utf-8'))
|
||||||
|
address_recovered = address_recovered.replace('"', '')
|
||||||
|
|
||||||
|
if strip_0x(address) != strip_0x(address_recovered):
|
||||||
|
raise VerifierError(address_recovered, 'metadata (phone)')
|
||||||
|
|
||||||
|
|
||||||
|
def verify_ussd(self, address, balance=None):
|
||||||
|
upper_address = strip_0x(address).upper()
|
||||||
|
f = open(os.path.join(
|
||||||
|
self.data_dir,
|
||||||
|
'new',
|
||||||
|
upper_address[:2],
|
||||||
|
upper_address[2:4],
|
||||||
|
upper_address + '.json',
|
||||||
|
), 'r'
|
||||||
|
)
|
||||||
|
o = json.load(f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
p = Person.deserialize(o)
|
||||||
|
phone = p.tel
|
||||||
|
|
||||||
|
session = uuid.uuid4().hex
|
||||||
|
data = {
|
||||||
|
'sessionId': session,
|
||||||
|
'serviceCode': config.get('APP_SERVICE_CODE'),
|
||||||
|
'phoneNumber': phone,
|
||||||
|
'text': config.get('APP_SERVICE_CODE'),
|
||||||
|
}
|
||||||
|
|
||||||
|
req = urllib.request.Request(config.get('_USSD_PROVIDER'))
|
||||||
|
data_str = json.dumps(data)
|
||||||
|
data_bytes = data_str.encode('utf-8')
|
||||||
|
req.add_header('Content-Type', 'application/json')
|
||||||
|
req.data = data_bytes
|
||||||
|
response = urllib.request.urlopen(req)
|
||||||
|
response_data = response.read().decode('utf-8')
|
||||||
|
state = response_data[:3]
|
||||||
|
out = response_data[4:]
|
||||||
|
m = '{} {}'.format(state, out[:7])
|
||||||
|
if m != 'CON Welcome':
|
||||||
|
raise VerifierError(response_data, 'ussd')
|
||||||
|
|
||||||
|
|
||||||
def verify(self, address, balance, debug_stem=None):
|
def verify(self, address, balance, debug_stem=None):
|
||||||
|
|
||||||
for k in active_tests:
|
for k in active_tests:
|
||||||
|
@ -164,6 +164,7 @@ services:
|
|||||||
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
||||||
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
||||||
DATABASE_DEBUG: 1
|
DATABASE_DEBUG: 1
|
||||||
|
DATABASE_POOL_SIZE: 0
|
||||||
ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
|
ETH_ABI_DIR: ${ETH_ABI_DIR:-/usr/local/share/cic/solidity/abi}
|
||||||
CIC_TRUST_ADDRESS: ${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER:-0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C}
|
CIC_TRUST_ADDRESS: ${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER:-0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C}
|
||||||
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
|
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
|
||||||
@ -233,6 +234,7 @@ services:
|
|||||||
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
||||||
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
||||||
DATABASE_DEBUG: ${DATABASE_DEBUG:-0}
|
DATABASE_DEBUG: ${DATABASE_DEBUG:-0}
|
||||||
|
DATABASE_POOL_SIZE: 0
|
||||||
PGPASSWORD: ${DATABASE_PASSWORD:-tralala}
|
PGPASSWORD: ${DATABASE_PASSWORD:-tralala}
|
||||||
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
|
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
|
||||||
BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor}
|
BANCOR_DIR: ${BANCOR_DIR:-/usr/local/share/cic/bancor}
|
||||||
@ -430,6 +432,7 @@ services:
|
|||||||
DATABASE_NAME: ${DATABASE_NAME_CIC_NOTIFY:-cic_notify}
|
DATABASE_NAME: ${DATABASE_NAME_CIC_NOTIFY:-cic_notify}
|
||||||
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
DATABASE_ENGINE: ${DATABASE_ENGINE:-postgres}
|
||||||
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
DATABASE_DRIVER: ${DATABASE_DRIVER:-psycopg2}
|
||||||
|
DATABASE_POOL_SIZE: 0
|
||||||
PGPASSWORD: ${DATABASE_PASSWORD:-tralala}
|
PGPASSWORD: ${DATABASE_PASSWORD:-tralala}
|
||||||
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis}
|
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis}
|
||||||
CELERY_RESULT_URL: ${CELERY_BROKER_URL:-redis://redis}
|
CELERY_RESULT_URL: ${CELERY_BROKER_URL:-redis://redis}
|
||||||
@ -446,6 +449,7 @@ services:
|
|||||||
|
|
||||||
|
|
||||||
cic-meta-server:
|
cic-meta-server:
|
||||||
|
hostname: meta
|
||||||
build:
|
build:
|
||||||
context: apps/
|
context: apps/
|
||||||
dockerfile: cic-meta/docker/Dockerfile
|
dockerfile: cic-meta/docker/Dockerfile
|
||||||
@ -490,9 +494,10 @@ services:
|
|||||||
DATABASE_ENGINE: postgresql
|
DATABASE_ENGINE: postgresql
|
||||||
DATABASE_DRIVER: psycopg2
|
DATABASE_DRIVER: psycopg2
|
||||||
PGP_PASSPHRASE: merman
|
PGP_PASSPHRASE: merman
|
||||||
SERVER_PORT: 8000
|
SERVER_PORT: 9000
|
||||||
|
CIC_META_URL: ${CIC_META_URL:-http://meta:8000}
|
||||||
ports:
|
ports:
|
||||||
- ${HTTP_PORT_CIC_USSD:-63315}:8000
|
- ${HTTP_PORT_CIC_USSD:-63315}:9000
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- redis
|
- redis
|
||||||
@ -501,7 +506,7 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
command: "/root/start_uwsgi.sh"
|
command: "/root/start_uwsgi.sh -vv"
|
||||||
|
|
||||||
cic-ussd-tasker:
|
cic-ussd-tasker:
|
||||||
# image: grassrootseconomics:cic-ussd
|
# image: grassrootseconomics:cic-ussd
|
||||||
@ -516,9 +521,11 @@ services:
|
|||||||
DATABASE_NAME: cic_ussd
|
DATABASE_NAME: cic_ussd
|
||||||
DATABASE_ENGINE: postgresql
|
DATABASE_ENGINE: postgresql
|
||||||
DATABASE_DRIVER: psycopg2
|
DATABASE_DRIVER: psycopg2
|
||||||
|
DATABASE_POOL_SIZE: 0
|
||||||
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis}
|
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis}
|
||||||
CELERY_RESULT_URL: ${CELERY_BROKER_URL:-redis://redis}
|
CELERY_RESULT_URL: ${CELERY_BROKER_URL:-redis://redis}
|
||||||
PGP_PASSPHRASE: merman
|
PGP_PASSPHRASE: merman
|
||||||
|
CIC_META_URL: ${CIC_META_URL:-http://meta:8000}
|
||||||
depends_on:
|
depends_on:
|
||||||
- postgres
|
- postgres
|
||||||
- redis
|
- redis
|
||||||
@ -527,4 +534,4 @@ services:
|
|||||||
deploy:
|
deploy:
|
||||||
restart_policy:
|
restart_policy:
|
||||||
condition: on-failure
|
condition: on-failure
|
||||||
command: "/root/start_tasker.sh -q cic-ussd"
|
command: "/root/start_tasker.sh -q cic-ussd -vv"
|
||||||
|
Loading…
Reference in New Issue
Block a user