Merge remote-tracking branch 'origin/master' into lash/local-dev-improve
This commit is contained in:
commit
a6de7e9fe0
@ -1,5 +1,5 @@
|
|||||||
chainqueue>=0.0.6a1,<0.1.0
|
chainqueue>=0.0.6a1,<0.1.0
|
||||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
chainsyncer[sql]>=0.0.7a3,<0.1.0
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
confini>=0.3.6rc4,<0.5.0
|
confini>=0.3.6rc4,<0.5.0
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
FROM node:15.3.0-alpine3.10
|
FROM node:15.3.0-alpine3.10
|
||||||
#FROM node:lts-alpine3.14
|
|
||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|
||||||
@ -9,7 +8,7 @@ RUN apk add --no-cache postgresql bash
|
|||||||
COPY package.json package-lock.json ./
|
COPY package.json package-lock.json ./
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||||
npm set cache /root/.npm && \
|
npm set cache /root/.npm && \
|
||||||
npm cache verify && \
|
npm cache verify && \
|
||||||
npm ci --verbose
|
npm ci --verbose
|
||||||
|
|
||||||
COPY webpack.config.js .
|
COPY webpack.config.js .
|
||||||
|
@ -3,6 +3,8 @@ ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
|||||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||||
|
|
||||||
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||||
|
RUN apt-get install libffi-dev -y
|
||||||
|
|
||||||
|
|
||||||
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||||
ARG EXTRA_PIP_ARGS=""
|
ARG EXTRA_PIP_ARGS=""
|
||||||
@ -19,8 +21,8 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
|||||||
COPY . .
|
COPY . .
|
||||||
RUN python setup.py install
|
RUN python setup.py install
|
||||||
|
|
||||||
COPY docker/*.sh .
|
COPY docker/*.sh ./
|
||||||
RUN chmod +x *.sh
|
RUN chmod +x /root/*.sh
|
||||||
|
|
||||||
# ini files in config directory defines the configurable parameters for the application
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
# they can all be overridden by environment variables
|
# they can all be overridden by environment variables
|
||||||
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||||||
# third-party imports
|
# third-party imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
from cic_eth_aux.erc20_demurrage_token.api import Api as DemurrageApi
|
from cic_eth_aux.erc20_demurrage_token.api import Api as DemurrageApi
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.transaction import from_wei
|
from cic_ussd.account.transaction import from_wei
|
||||||
@ -102,7 +103,7 @@ def get_cached_available_balance(blockchain_address: str) -> float:
|
|||||||
:rtype: float
|
:rtype: float
|
||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(blockchain_address)
|
identifier = bytes.fromhex(blockchain_address)
|
||||||
key = cache_data_key(identifier, salt=':cic.balances')
|
key = cache_data_key(identifier, salt=MetadataPointer.BALANCES)
|
||||||
cached_balances = get_cached_data(key=key)
|
cached_balances = get_cached_data(key=key)
|
||||||
if cached_balances:
|
if cached_balances:
|
||||||
return calculate_available_balance(json.loads(cached_balances))
|
return calculate_available_balance(json.loads(cached_balances))
|
||||||
@ -117,5 +118,5 @@ def get_cached_adjusted_balance(identifier: bytes):
|
|||||||
:return:
|
:return:
|
||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||||
return get_cached_data(key)
|
return get_cached_data(key)
|
||||||
|
@ -7,6 +7,7 @@ from typing import Optional
|
|||||||
import celery
|
import celery
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local import
|
# local import
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -53,7 +54,7 @@ def get_cached_statement(blockchain_address: str) -> bytes:
|
|||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(strip_0x(blockchain_address))
|
identifier = bytes.fromhex(strip_0x(blockchain_address))
|
||||||
key = cache_data_key(identifier=identifier, salt=':cic.statement')
|
key = cache_data_key(identifier=identifier, salt=MetadataPointer.STATEMENT)
|
||||||
return get_cached_data(key=key)
|
return get_cached_data(key=key)
|
||||||
|
|
||||||
|
|
||||||
|
@ -5,6 +5,7 @@ from typing import Dict, Optional
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -23,7 +24,7 @@ def get_cached_default_token(chain_str: str) -> Optional[str]:
|
|||||||
:rtype:
|
:rtype:
|
||||||
"""
|
"""
|
||||||
logg.debug(f'Retrieving default token from cache for chain: {chain_str}')
|
logg.debug(f'Retrieving default token from cache for chain: {chain_str}')
|
||||||
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=':cic.default_token_data')
|
key = cache_data_key(identifier=chain_str.encode('utf-8'), salt=MetadataPointer.TOKEN_DEFAULT)
|
||||||
return get_cached_data(key=key)
|
return get_cached_data(key=key)
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,7 +2,8 @@
|
|||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
from redis import Redis
|
from redis import Redis
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -38,7 +39,7 @@ def get_cached_data(key: str):
|
|||||||
return cache.get(name=key)
|
return cache.get(name=key)
|
||||||
|
|
||||||
|
|
||||||
def cache_data_key(identifier: bytes, salt: str):
|
def cache_data_key(identifier: bytes, salt: MetadataPointer):
|
||||||
"""
|
"""
|
||||||
:param identifier:
|
:param identifier:
|
||||||
:type identifier:
|
:type identifier:
|
||||||
@ -49,5 +50,5 @@ def cache_data_key(identifier: bytes, salt: str):
|
|||||||
"""
|
"""
|
||||||
hash_object = hashlib.new("sha256")
|
hash_object = hashlib.new("sha256")
|
||||||
hash_object.update(identifier)
|
hash_object.update(identifier)
|
||||||
hash_object.update(salt.encode(encoding="utf-8"))
|
hash_object.update(salt.value.encode(encoding="utf-8"))
|
||||||
return hash_object.digest().hex()
|
return hash_object.digest().hex()
|
||||||
|
@ -3,6 +3,7 @@ import json
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from cic_eth.api import Api
|
from cic_eth.api import Api
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.metadata import get_cached_preferred_language, parse_account_metadata
|
from cic_ussd.account.metadata import get_cached_preferred_language, parse_account_metadata
|
||||||
@ -109,7 +110,7 @@ class Account(SessionBase):
|
|||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
identifier = bytes.fromhex(self.blockchain_address)
|
identifier = bytes.fromhex(self.blockchain_address)
|
||||||
key = cache_data_key(identifier, ':cic.person')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
account_metadata = get_cached_data(key)
|
account_metadata = get_cached_data(key)
|
||||||
if not account_metadata:
|
if not account_metadata:
|
||||||
return self.phone_number
|
return self.phone_number
|
||||||
|
@ -5,6 +5,7 @@ from datetime import datetime, timedelta
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import i18n.config
|
import i18n.config
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.balance import (calculate_available_balance,
|
from cic_ussd.account.balance import (calculate_available_balance,
|
||||||
@ -163,7 +164,7 @@ class MenuProcessor:
|
|||||||
token_symbol = get_default_token_symbol()
|
token_symbol = get_default_token_symbol()
|
||||||
blockchain_address = self.account.blockchain_address
|
blockchain_address = self.account.blockchain_address
|
||||||
balances = get_balances(blockchain_address, chain_str, token_symbol, False)[0]
|
balances = get_balances(blockchain_address, chain_str, token_symbol, False)[0]
|
||||||
key = cache_data_key(self.identifier, ':cic.balances')
|
key = cache_data_key(self.identifier, MetadataPointer.BALANCES)
|
||||||
cache_data(key, json.dumps(balances))
|
cache_data(key, json.dumps(balances))
|
||||||
available_balance = calculate_available_balance(balances)
|
available_balance = calculate_available_balance(balances)
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
@ -173,7 +174,7 @@ class MenuProcessor:
|
|||||||
else:
|
else:
|
||||||
timestamp = int((now - timedelta(30)).timestamp())
|
timestamp = int((now - timedelta(30)).timestamp())
|
||||||
adjusted_balance = get_adjusted_balance(to_wei(int(available_balance)), chain_str, timestamp, token_symbol)
|
adjusted_balance = get_adjusted_balance(to_wei(int(available_balance)), chain_str, timestamp, token_symbol)
|
||||||
key = cache_data_key(self.identifier, ':cic.adjusted_balance')
|
key = cache_data_key(self.identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||||
cache_data(key, json.dumps(adjusted_balance))
|
cache_data(key, json.dumps(adjusted_balance))
|
||||||
|
|
||||||
query_statement(blockchain_address)
|
query_statement(blockchain_address)
|
||||||
|
@ -12,6 +12,7 @@ import i18n
|
|||||||
import redis
|
import redis
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from confini import Config
|
from confini import Config
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
from cic_types.ext.metadata import Metadata
|
from cic_types.ext.metadata import Metadata
|
||||||
from cic_types.ext.metadata.signer import Signer
|
from cic_types.ext.metadata.signer import Signer
|
||||||
|
|
||||||
@ -109,7 +110,7 @@ default_token_data = query_default_token(chain_str)
|
|||||||
|
|
||||||
# cache default token for re-usability
|
# cache default token for re-usability
|
||||||
if default_token_data:
|
if default_token_data:
|
||||||
cache_key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
cache_key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||||
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
cache_data(key=cache_key, data=json.dumps(default_token_data))
|
||||||
else:
|
else:
|
||||||
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
raise InitializationError(f'Default token data for: {chain_str} not found.')
|
||||||
|
@ -3,8 +3,10 @@ import json
|
|||||||
import logging
|
import logging
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.balance import get_balances, calculate_available_balance
|
from cic_ussd.account.balance import get_balances, calculate_available_balance
|
||||||
@ -87,7 +89,7 @@ def balances_callback(result: list, param: str, status_code: int):
|
|||||||
|
|
||||||
balances = result[0]
|
balances = result[0]
|
||||||
identifier = bytes.fromhex(param)
|
identifier = bytes.fromhex(param)
|
||||||
key = cache_data_key(identifier, ':cic.balances')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||||
cache_data(key, json.dumps(balances))
|
cache_data(key, json.dumps(balances))
|
||||||
|
|
||||||
|
|
||||||
|
@ -2,9 +2,10 @@
|
|||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import i18n
|
import i18n
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||||
@ -49,7 +50,7 @@ def cache_statement(parsed_transaction: dict, querying_party: str):
|
|||||||
statement_transactions.append(parsed_transaction)
|
statement_transactions.append(parsed_transaction)
|
||||||
data = json.dumps(statement_transactions)
|
data = json.dumps(statement_transactions)
|
||||||
identifier = bytes.fromhex(querying_party)
|
identifier = bytes.fromhex(querying_party)
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
cache_data(key, data)
|
cache_data(key, data)
|
||||||
|
|
||||||
|
|
||||||
|
2
apps/cic-ussd/config/test/chain.ini
Normal file
2
apps/cic-ussd/config/test/chain.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[chain]
|
||||||
|
spec = 'evm:foo:1:bar'
|
@ -1,5 +1,2 @@
|
|||||||
[cic]
|
[cic]
|
||||||
engine = evm
|
|
||||||
common_name = bloxberg
|
|
||||||
network_id = 8996
|
|
||||||
meta_url = http://test-meta.io
|
meta_url = http://test-meta.io
|
||||||
|
@ -6,7 +6,7 @@ celery==4.4.7
|
|||||||
cffi==1.14.6
|
cffi==1.14.6
|
||||||
cic-eth~=0.12.5a1
|
cic-eth~=0.12.5a1
|
||||||
cic-notify~=0.4.0a11
|
cic-notify~=0.4.0a11
|
||||||
cic-types~=0.2.0a5
|
cic-types~=0.2.0a6
|
||||||
confini>=0.3.6rc4,<0.5.0
|
confini>=0.3.6rc4,<0.5.0
|
||||||
phonenumbers==8.12.12
|
phonenumbers==8.12.12
|
||||||
psycopg2==2.8.6
|
psycopg2==2.8.6
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
cic-eth[services]~=0.12.4a13
|
||||||
Faker==8.1.2
|
Faker==8.1.2
|
||||||
faker-e164==0.1.0
|
faker-e164==0.1.0
|
||||||
pytest==6.2.4
|
pytest==6.2.4
|
||||||
|
@ -4,8 +4,7 @@ import time
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import requests_mock
|
from cic_types.condiments import MetadataPointer
|
||||||
from chainlib.hash import strip_0x
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.statement import (filter_statement_transactions,
|
from cic_ussd.account.statement import (filter_statement_transactions,
|
||||||
@ -48,7 +47,7 @@ def test_generate(activated_account,
|
|||||||
generate(querying_party, None, sender_transaction)
|
generate(querying_party, None, sender_transaction)
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
statement = get_cached_data(key)
|
statement = get_cached_data(key)
|
||||||
statement = json.loads(statement)
|
statement = json.loads(statement)
|
||||||
assert len(statement) == 1
|
assert len(statement) == 1
|
||||||
|
@ -3,7 +3,7 @@ import json
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.balance import get_cached_available_balance
|
from cic_ussd.account.balance import get_cached_available_balance
|
||||||
@ -58,7 +58,7 @@ def test_menu_processor(activated_account,
|
|||||||
token_symbol=token_symbol)
|
token_symbol=token_symbol)
|
||||||
|
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
key = cache_data_key(identifier, ':cic.adjusted_balance')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES_ADJUSTED)
|
||||||
adjusted_balance = 45931650.64654012
|
adjusted_balance = 45931650.64654012
|
||||||
cache_data(key, json.dumps(adjusted_balance))
|
cache_data(key, json.dumps(adjusted_balance))
|
||||||
resp = response(activated_account, 'ussd.kenya.account_balances', name, init_database, generic_ussd_session)
|
resp = response(activated_account, 'ussd.kenya.account_balances', name, init_database, generic_ussd_session)
|
||||||
|
@ -7,6 +7,7 @@ import time
|
|||||||
import i18n
|
import i18n
|
||||||
import requests_mock
|
import requests_mock
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -45,7 +46,7 @@ def test_handle_menu(activated_account,
|
|||||||
ussd_menu = UssdMenu.find_by_name('initial_language_selection')
|
ussd_menu = UssdMenu.find_by_name('initial_language_selection')
|
||||||
assert menu_resp.get('name') == ussd_menu.get('name')
|
assert menu_resp.get('name') == ussd_menu.get('name')
|
||||||
identifier = bytes.fromhex(strip_0x(pending_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(pending_account.blockchain_address))
|
||||||
key = cache_data_key(identifier, ':cic.preferences')
|
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||||
cache_data(key, json.dumps(preferences))
|
cache_data(key, json.dumps(preferences))
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
menu_resp = handle_menu(pending_account, init_database)
|
menu_resp = handle_menu(pending_account, init_database)
|
||||||
|
@ -1,20 +1,18 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import json
|
import json
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import pytest
|
import pytest
|
||||||
import requests_mock
|
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.statement import generate, filter_statement_transactions
|
from cic_ussd.account.statement import filter_statement_transactions
|
||||||
from cic_ussd.account.transaction import transaction_actors
|
from cic_ussd.account.transaction import transaction_actors
|
||||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||||
from cic_ussd.db.models.account import Account
|
from cic_ussd.db.models.account import Account
|
||||||
from cic_ussd.error import AccountCreationDataNotFound
|
from cic_ussd.error import AccountCreationDataNotFound
|
||||||
from cic_ussd.metadata import PreferencesMetadata
|
|
||||||
|
|
||||||
|
|
||||||
# test imports
|
# test imports
|
||||||
@ -89,7 +87,7 @@ def test_balances_callback(activated_account, balances, celery_session_worker):
|
|||||||
[balances, activated_account.blockchain_address, status_code])
|
[balances, activated_account.blockchain_address, status_code])
|
||||||
s_balances_callback.apply_async().get()
|
s_balances_callback.apply_async().get()
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||||
key = cache_data_key(identifier, ':cic.balances')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||||
cached_balances = get_cached_data(key)
|
cached_balances = get_cached_data(key)
|
||||||
cached_balances = json.loads(cached_balances)
|
cached_balances = json.loads(cached_balances)
|
||||||
assert cached_balances == balances[0]
|
assert cached_balances == balances[0]
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
import json
|
import json
|
||||||
import os
|
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
import requests_mock
|
import requests_mock
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.cache import cache_data_key, get_cached_data
|
from cic_ussd.cache import cache_data_key, get_cached_data
|
||||||
@ -27,7 +27,7 @@ def test_query_person_metadata(activated_account,
|
|||||||
s_query_person_metadata = celery.signature(
|
s_query_person_metadata = celery.signature(
|
||||||
'cic_ussd.tasks.metadata.query_person_metadata', [activated_account.blockchain_address])
|
'cic_ussd.tasks.metadata.query_person_metadata', [activated_account.blockchain_address])
|
||||||
s_query_person_metadata.apply().get()
|
s_query_person_metadata.apply().get()
|
||||||
key = cache_data_key(identifier, ':cic.person')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
cached_person_metadata = get_cached_data(key)
|
cached_person_metadata = get_cached_data(key)
|
||||||
cached_person_metadata = json.loads(cached_person_metadata)
|
cached_person_metadata = json.loads(cached_person_metadata)
|
||||||
assert cached_person_metadata == person_metadata
|
assert cached_person_metadata == person_metadata
|
||||||
@ -46,7 +46,7 @@ def test_query_preferences_metadata(activated_account,
|
|||||||
query_preferences_metadata = celery.signature(
|
query_preferences_metadata = celery.signature(
|
||||||
'cic_ussd.tasks.metadata.query_preferences_metadata', [activated_account.blockchain_address])
|
'cic_ussd.tasks.metadata.query_preferences_metadata', [activated_account.blockchain_address])
|
||||||
query_preferences_metadata.apply().get()
|
query_preferences_metadata.apply().get()
|
||||||
key = cache_data_key(identifier, ':cic.preferences')
|
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||||
cached_preferences_metadata = get_cached_data(key)
|
cached_preferences_metadata = get_cached_data(key)
|
||||||
cached_preferences_metadata = json.loads(cached_preferences_metadata)
|
cached_preferences_metadata = json.loads(cached_preferences_metadata)
|
||||||
assert cached_preferences_metadata == preferences
|
assert cached_preferences_metadata == preferences
|
||||||
|
@ -4,6 +4,7 @@ import json
|
|||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from chainlib.hash import strip_0x
|
from chainlib.hash import strip_0x
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.transaction import transaction_actors
|
from cic_ussd.account.transaction import transaction_actors
|
||||||
@ -38,7 +39,7 @@ def test_cache_statement(activated_account,
|
|||||||
transaction_result):
|
transaction_result):
|
||||||
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
recipient_transaction, sender_transaction = transaction_actors(transaction_result)
|
||||||
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
identifier = bytes.fromhex(strip_0x(activated_account.blockchain_address))
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
cached_statement = get_cached_data(key)
|
cached_statement = get_cached_data(key)
|
||||||
assert cached_statement is None
|
assert cached_statement is None
|
||||||
s_parse_transaction = celery.signature(
|
s_parse_transaction = celery.signature(
|
||||||
|
@ -3,6 +3,7 @@ import hashlib
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
||||||
@ -12,7 +13,7 @@ from cic_ussd.cache import cache_data, cache_data_key, get_cached_data
|
|||||||
|
|
||||||
def test_cache_data(init_cache):
|
def test_cache_data(init_cache):
|
||||||
identifier = 'some_key'.encode()
|
identifier = 'some_key'.encode()
|
||||||
key = cache_data_key(identifier, ':testing')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
assert get_cached_data(key) is None
|
assert get_cached_data(key) is None
|
||||||
cache_data(key, json.dumps('some_value'))
|
cache_data(key, json.dumps('some_value'))
|
||||||
assert get_cached_data(key) is not None
|
assert get_cached_data(key) is not None
|
||||||
@ -20,10 +21,10 @@ def test_cache_data(init_cache):
|
|||||||
|
|
||||||
def test_cache_data_key():
|
def test_cache_data_key():
|
||||||
identifier = 'some_key'.encode()
|
identifier = 'some_key'.encode()
|
||||||
key = cache_data_key(identifier, ':testing')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
hash_object = hashlib.new("sha256")
|
hash_object = hashlib.new("sha256")
|
||||||
hash_object.update(identifier)
|
hash_object.update(identifier)
|
||||||
hash_object.update(':testing'.encode(encoding="utf-8"))
|
hash_object.update(':cic.person'.encode(encoding="utf-8"))
|
||||||
assert hash_object.digest().hex() == key
|
assert hash_object.digest().hex() == key
|
||||||
|
|
||||||
|
|
||||||
|
12
apps/cic-ussd/tests/fixtures/account.py
vendored
12
apps/cic-ussd/tests/fixtures/account.py
vendored
@ -4,7 +4,7 @@ import random
|
|||||||
|
|
||||||
# external accounts
|
# external accounts
|
||||||
import pytest
|
import pytest
|
||||||
from chainlib.hash import strip_0x
|
from cic_types.condiments import MetadataPointer
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.account.chain import Chain
|
from cic_ussd.account.chain import Chain
|
||||||
@ -56,7 +56,7 @@ def cache_account_creation_data(init_cache, account_creation_data):
|
|||||||
def cache_balances(activated_account, balances, init_cache):
|
def cache_balances(activated_account, balances, init_cache):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
balances = json.dumps(balances[0])
|
balances = json.dumps(balances[0])
|
||||||
key = cache_data_key(identifier, ':cic.balances')
|
key = cache_data_key(identifier, MetadataPointer.BALANCES)
|
||||||
cache_data(key, balances)
|
cache_data(key, balances)
|
||||||
|
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ def cache_balances(activated_account, balances, init_cache):
|
|||||||
def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
||||||
chain_str = Chain.spec.__str__()
|
chain_str = Chain.spec.__str__()
|
||||||
data = json.dumps(default_token_data)
|
data = json.dumps(default_token_data)
|
||||||
key = cache_data_key(chain_str.encode('utf-8'), ':cic.default_token_data')
|
key = cache_data_key(chain_str.encode('utf-8'), MetadataPointer.TOKEN_DEFAULT)
|
||||||
cache_data(key, data)
|
cache_data(key, data)
|
||||||
|
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ def cache_default_token_data(default_token_data, init_cache, load_chain_spec):
|
|||||||
def cache_person_metadata(activated_account, init_cache, person_metadata):
|
def cache_person_metadata(activated_account, init_cache, person_metadata):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
person = json.dumps(person_metadata)
|
person = json.dumps(person_metadata)
|
||||||
key = cache_data_key(identifier, ':cic.person')
|
key = cache_data_key(identifier, MetadataPointer.PERSON)
|
||||||
cache_data(key, person)
|
cache_data(key, person)
|
||||||
|
|
||||||
|
|
||||||
@ -80,7 +80,7 @@ def cache_person_metadata(activated_account, init_cache, person_metadata):
|
|||||||
def cache_preferences(activated_account, init_cache, preferences):
|
def cache_preferences(activated_account, init_cache, preferences):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
preferences = json.dumps(preferences)
|
preferences = json.dumps(preferences)
|
||||||
key = cache_data_key(identifier, ':cic.preferences')
|
key = cache_data_key(identifier, MetadataPointer.PREFERENCES)
|
||||||
cache_data(key, preferences)
|
cache_data(key, preferences)
|
||||||
|
|
||||||
|
|
||||||
@ -88,7 +88,7 @@ def cache_preferences(activated_account, init_cache, preferences):
|
|||||||
def cache_statement(activated_account, init_cache, statement):
|
def cache_statement(activated_account, init_cache, statement):
|
||||||
identifier = bytes.fromhex(activated_account.blockchain_address)
|
identifier = bytes.fromhex(activated_account.blockchain_address)
|
||||||
statement = json.dumps(statement)
|
statement = json.dumps(statement)
|
||||||
key = cache_data_key(identifier, ':cic.statement')
|
key = cache_data_key(identifier, MetadataPointer.STATEMENT)
|
||||||
cache_data(key, statement)
|
cache_data(key, statement)
|
||||||
|
|
||||||
|
|
||||||
|
6
apps/cic-ussd/tests/fixtures/config.py
vendored
6
apps/cic-ussd/tests/fixtures/config.py
vendored
@ -41,11 +41,7 @@ def init_state_machine(load_config):
|
|||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def load_chain_spec(load_config):
|
def load_chain_spec(load_config):
|
||||||
chain_spec = ChainSpec(
|
chain_spec = ChainSpec.from_chain_str(load_config.get('CHAIN_SPEC'))
|
||||||
common_name=load_config.get('CIC_COMMON_NAME'),
|
|
||||||
engine=load_config.get('CIC_ENGINE'),
|
|
||||||
network_id=load_config.get('CIC_NETWORK_ID')
|
|
||||||
)
|
|
||||||
Chain.spec = chain_spec
|
Chain.spec = chain_spec
|
||||||
|
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@ if [ ! -f $WALLET_KEY_FILE ]; then
|
|||||||
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
|
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-keyfile -z -d $WALLET_KEY_FILE`
|
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-keyfile -z -d $WALLET_KEY_FILE`
|
||||||
noncefile=${DEV_DATA_DIR}/nonce_${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
noncefile=${DEV_DATA_DIR}/nonce_${DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||||
|
|
||||||
|
@ -2,7 +2,6 @@ ARG DOCKER_REGISTRY="registry.gitlab.com/grassrootseconomics"
|
|||||||
|
|
||||||
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
FROM $DOCKER_REGISTRY/cic-base-images:python-3.8.6-dev-e8eb2ee2
|
||||||
|
|
||||||
|
|
||||||
WORKDIR /root
|
WORKDIR /root
|
||||||
|
|
||||||
RUN touch /etc/apt/sources.list.d/ethereum.list
|
RUN touch /etc/apt/sources.list.d/ethereum.list
|
||||||
|
@ -11,4 +11,4 @@ sarafu-faucet>=0.0.7a2,<0.1.0
|
|||||||
confini>=0.4.2rc3,<1.0.0
|
confini>=0.4.2rc3,<1.0.0
|
||||||
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
crypto-dev-signer>=0.4.15rc2,<=0.4.15
|
||||||
eth-token-index>=0.2.4a1,<=0.3.0
|
eth-token-index>=0.2.4a1,<=0.3.0
|
||||||
okota>=0.2.4a13,<0.3.0
|
okota>=0.2.4a15,<0.3.0
|
||||||
|
@ -57,8 +57,8 @@ elif args.v:
|
|||||||
config = Config(args.c, args.env_prefix)
|
config = Config(args.c, args.env_prefix)
|
||||||
config.process()
|
config.process()
|
||||||
args_override = {
|
args_override = {
|
||||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
'CHAIN_SPEC': getattr(args, 'i'),
|
||||||
'ETH_PROVIDER': getattr(args, 'p'),
|
'RPC_PROVIDER': getattr(args, 'p'),
|
||||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||||
'REDIS_HOST': getattr(args, 'redis_host'),
|
'REDIS_HOST': getattr(args, 'redis_host'),
|
||||||
'REDIS_PORT': getattr(args, 'redis_port'),
|
'REDIS_PORT': getattr(args, 'redis_port'),
|
||||||
@ -90,7 +90,7 @@ signer = EIP155Signer(keystore)
|
|||||||
|
|
||||||
block_offset = -1 if args.head else args.offset
|
block_offset = -1 if args.head else args.offset
|
||||||
|
|
||||||
chain_str = config.get('CIC_CHAIN_SPEC')
|
chain_str = config.get('CHAIN_SPEC')
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
ImportTask.chain_spec = chain_spec
|
ImportTask.chain_spec = chain_spec
|
||||||
old_chain_spec_str = args.old_chain_spec
|
old_chain_spec_str = args.old_chain_spec
|
||||||
@ -99,16 +99,12 @@ old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str)
|
|||||||
MetadataTask.meta_host = config.get('META_HOST')
|
MetadataTask.meta_host = config.get('META_HOST')
|
||||||
MetadataTask.meta_port = config.get('META_PORT')
|
MetadataTask.meta_port = config.get('META_PORT')
|
||||||
|
|
||||||
txs_dir = os.path.join(args.import_dir, 'txs')
|
|
||||||
os.makedirs(txs_dir, exist_ok=True)
|
|
||||||
sys.stdout.write(f'created txs dir: {txs_dir}')
|
|
||||||
|
|
||||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
get_celery_worker_status(celery_app)
|
get_celery_worker_status(celery_app)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
conn = EthHTTPConnection(config.get('RPC_PROVIDER'))
|
||||||
ImportTask.balance_processor = BalanceProcessor(conn,
|
ImportTask.balance_processor = BalanceProcessor(conn,
|
||||||
chain_spec,
|
chain_spec,
|
||||||
config.get('CIC_REGISTRY_ADDRESS'),
|
config.get('CIC_REGISTRY_ADDRESS'),
|
||||||
|
@ -14,7 +14,9 @@ from celery import Task
|
|||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address
|
||||||
from chainlib.eth.tx import raw, unpack
|
from chainlib.eth.tx import raw, unpack
|
||||||
from cic_types.models.person import Person, generate_metadata_pointer
|
from cic_types.models.person import Person, identity_tag
|
||||||
|
from cic_types.processor import generate_metadata_pointer
|
||||||
|
from cic_types.condiments import MetadataPointer
|
||||||
from hexathon import add_0x, strip_0x
|
from hexathon import add_0x, strip_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@ -55,7 +57,7 @@ class MetadataTask(ImportTask):
|
|||||||
|
|
||||||
|
|
||||||
def old_address_from_phone(base_path: str, phone_number: str):
|
def old_address_from_phone(base_path: str, phone_number: str):
|
||||||
pid_x = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
|
pid_x = generate_metadata_pointer(phone_number.encode('utf-8'), MetadataPointer.PHONE)
|
||||||
phone_idx_path = os.path.join(f'{base_path}/phone/{pid_x[:2]}/{pid_x[2:4]}/{pid_x}')
|
phone_idx_path = os.path.join(f'{base_path}/phone/{pid_x[:2]}/{pid_x[2:4]}/{pid_x}')
|
||||||
with open(phone_idx_path, 'r') as f:
|
with open(phone_idx_path, 'r') as f:
|
||||||
old_address = f.read()
|
old_address = f.read()
|
||||||
@ -73,9 +75,13 @@ def generate_person_metadata(self, blockchain_address: str, phone_number: str):
|
|||||||
person = Person.deserialize(person_metadata)
|
person = Person.deserialize(person_metadata)
|
||||||
if not person.identities.get('evm'):
|
if not person.identities.get('evm'):
|
||||||
person.identities['evm'] = {}
|
person.identities['evm'] = {}
|
||||||
sub_chain_str = f'{self.chain_spec.common_name()}:{self.chain_spec.network_id()}'
|
chain_spec = self.chain_spec.asdict()
|
||||||
person.identities['evm'][sub_chain_str] = [add_0x(blockchain_address)]
|
arch = chain_spec.get('arch')
|
||||||
blockchain_address = strip_0x(blockchain_address)
|
fork = chain_spec.get('fork')
|
||||||
|
tag = identity_tag(chain_spec)
|
||||||
|
person.identities[arch][fork] = {
|
||||||
|
tag: [blockchain_address]
|
||||||
|
}
|
||||||
file_path = os.path.join(
|
file_path = os.path.join(
|
||||||
self.import_dir,
|
self.import_dir,
|
||||||
'new',
|
'new',
|
||||||
@ -102,7 +108,7 @@ def generate_preferences_data(self, data: tuple):
|
|||||||
blockchain_address: str = data[0]
|
blockchain_address: str = data[0]
|
||||||
preferences = data[1]
|
preferences = data[1]
|
||||||
preferences_dir = os.path.join(self.import_dir, 'preferences')
|
preferences_dir = os.path.join(self.import_dir, 'preferences')
|
||||||
preferences_key = generate_metadata_pointer(bytes.fromhex(strip_0x(blockchain_address)), ':cic.preferences')
|
preferences_key = generate_metadata_pointer(bytes.fromhex(strip_0x(blockchain_address)), MetadataPointer.PREFERENCES)
|
||||||
preferences_filepath = os.path.join(preferences_dir, 'meta', preferences_key)
|
preferences_filepath = os.path.join(preferences_dir, 'meta', preferences_key)
|
||||||
filepath = os.path.join(
|
filepath = os.path.join(
|
||||||
preferences_dir,
|
preferences_dir,
|
||||||
@ -137,7 +143,7 @@ def generate_ussd_data(self, blockchain_address: str, phone_number: str):
|
|||||||
preferred_language = random.sample(["en", "sw"], 1)[0]
|
preferred_language = random.sample(["en", "sw"], 1)[0]
|
||||||
preferences = {'preferred_language': preferred_language}
|
preferences = {'preferred_language': preferred_language}
|
||||||
with open(ussd_data_file, file_op) as ussd_data_file:
|
with open(ussd_data_file, file_op) as ussd_data_file:
|
||||||
ussd_data_file.write(f'{phone_number}, { 1}, {preferred_language}, {False}\n')
|
ussd_data_file.write(f'{phone_number}, 1, {preferred_language}, False\n')
|
||||||
logg.debug(f'written ussd data for address: {blockchain_address}')
|
logg.debug(f'written ussd data for address: {blockchain_address}')
|
||||||
return blockchain_address, preferences
|
return blockchain_address, preferences
|
||||||
|
|
||||||
@ -163,7 +169,7 @@ def opening_balance_tx(self, blockchain_address: str, phone_number: str, serial:
|
|||||||
|
|
||||||
@celery_app.task(bind=True, base=MetadataTask)
|
@celery_app.task(bind=True, base=MetadataTask)
|
||||||
def resolve_phone(self, phone_number: str):
|
def resolve_phone(self, phone_number: str):
|
||||||
identifier = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
|
identifier = generate_metadata_pointer(phone_number.encode('utf-8'), MetadataPointer.PHONE)
|
||||||
url = parse.urljoin(self.meta_url(), identifier)
|
url = parse.urljoin(self.meta_url(), identifier)
|
||||||
logg.debug(f'attempt getting phone pointer at: {url} for phone: {phone_number}')
|
logg.debug(f'attempt getting phone pointer at: {url} for phone: {phone_number}')
|
||||||
r = request.urlopen(url)
|
r = request.urlopen(url)
|
||||||
|
@ -17,6 +17,7 @@ from cic_types.models.person import Person
|
|||||||
from confini import Config
|
from confini import Config
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
from common.dirs import initialize_dirs
|
||||||
from import_util import get_celery_worker_status
|
from import_util import get_celery_worker_status
|
||||||
|
|
||||||
default_config_dir = './config'
|
default_config_dir = './config'
|
||||||
@ -37,6 +38,7 @@ arg_parser.add_argument('--env-prefix',
|
|||||||
dest='env_prefix',
|
dest='env_prefix',
|
||||||
type=str,
|
type=str,
|
||||||
help='environment prefix for variables to overwrite configuration.')
|
help='environment prefix for variables to overwrite configuration.')
|
||||||
|
arg_parser.add_argument('-f', action='store_true', help='force clear previous state')
|
||||||
arg_parser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
arg_parser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit data seeding tasks to.')
|
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit data seeding tasks to.')
|
||||||
arg_parser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
arg_parser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||||
@ -70,21 +72,7 @@ config.censor('PASSWORD', 'DATABASE')
|
|||||||
config.censor('PASSWORD', 'SSL')
|
config.censor('PASSWORD', 'SSL')
|
||||||
logg.debug(f'config loaded from {args.c}:\n{config}')
|
logg.debug(f'config loaded from {args.c}:\n{config}')
|
||||||
|
|
||||||
old_account_dir = os.path.join(args.import_dir, 'old')
|
dirs = initialize_dirs(args.import_dir, force_reset=args.f)
|
||||||
os.stat(old_account_dir)
|
|
||||||
logg.debug(f'created old system data dir: {old_account_dir}')
|
|
||||||
|
|
||||||
new_account_dir = os.path.join(args.import_dir, 'new')
|
|
||||||
os.makedirs(new_account_dir, exist_ok=True)
|
|
||||||
logg.debug(f'created new system data dir: {new_account_dir}')
|
|
||||||
|
|
||||||
person_metadata_dir = os.path.join(args.import_dir, 'meta')
|
|
||||||
os.makedirs(person_metadata_dir, exist_ok=True)
|
|
||||||
logg.debug(f'created person metadata dir: {person_metadata_dir}')
|
|
||||||
|
|
||||||
preferences_dir = os.path.join(args.import_dir, 'preferences')
|
|
||||||
os.makedirs(os.path.join(preferences_dir, 'meta'), exist_ok=True)
|
|
||||||
logg.debug(f'created preferences metadata dir: {preferences_dir}')
|
|
||||||
|
|
||||||
valid_service_codes = config.get('USSD_SERVICE_CODE').split(",")
|
valid_service_codes = config.get('USSD_SERVICE_CODE').split(",")
|
||||||
|
|
||||||
@ -157,7 +145,7 @@ def register_account(person: Person):
|
|||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
i = 0
|
i = 0
|
||||||
j = 0
|
j = 0
|
||||||
for x in os.walk(old_account_dir):
|
for x in os.walk(dirs['old']):
|
||||||
for y in x[2]:
|
for y in x[2]:
|
||||||
if y[len(y) - 5:] != '.json':
|
if y[len(y) - 5:] != '.json':
|
||||||
continue
|
continue
|
||||||
|
@ -13,11 +13,14 @@ def initialize_dirs(user_dir, force_reset=False):
|
|||||||
dirs['meta'] = os.path.join(user_dir, 'meta')
|
dirs['meta'] = os.path.join(user_dir, 'meta')
|
||||||
dirs['custom'] = os.path.join(user_dir, 'custom')
|
dirs['custom'] = os.path.join(user_dir, 'custom')
|
||||||
dirs['phone'] = os.path.join(user_dir, 'phone')
|
dirs['phone'] = os.path.join(user_dir, 'phone')
|
||||||
|
dirs['preferences'] = os.path.join(user_dir, 'preferences')
|
||||||
dirs['txs'] = os.path.join(user_dir, 'txs')
|
dirs['txs'] = os.path.join(user_dir, 'txs')
|
||||||
dirs['keyfile'] = os.path.join(user_dir, 'keystore')
|
dirs['keyfile'] = os.path.join(user_dir, 'keystore')
|
||||||
dirs['custom_new'] = os.path.join(dirs['custom'], 'new')
|
dirs['custom_new'] = os.path.join(dirs['custom'], 'new')
|
||||||
dirs['custom_meta'] = os.path.join(dirs['custom'], 'meta')
|
dirs['custom_meta'] = os.path.join(dirs['custom'], 'meta')
|
||||||
dirs['phone_meta'] = os.path.join(dirs['phone'], 'meta')
|
dirs['phone_meta'] = os.path.join(dirs['phone'], 'meta')
|
||||||
|
dirs['preferences_meta'] = os.path.join(dirs['preferences'], 'meta')
|
||||||
|
dirs['preferences_new'] = os.path.join(dirs['preferences'], 'new')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.stat(dirs['old'])
|
os.stat(dirs['old'])
|
||||||
|
2
apps/data-seeding/config/chain.ini
Normal file
2
apps/data-seeding/config/chain.ini
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[chain]
|
||||||
|
spec =
|
@ -1,10 +1,2 @@
|
|||||||
[cic]
|
[cic]
|
||||||
registry_address =
|
registry_address =
|
||||||
token_index_address =
|
|
||||||
accounts_index_address =
|
|
||||||
declarator_address =
|
|
||||||
approval_escrow_address =
|
|
||||||
chain_spec =
|
|
||||||
tx_retry_delay =
|
|
||||||
trust_address =
|
|
||||||
user_ussd_svc_service_port =
|
|
||||||
|
@ -20,7 +20,7 @@ from cic_types.models.person import (
|
|||||||
generate_vcard_from_contact_data,
|
generate_vcard_from_contact_data,
|
||||||
get_contact_data_from_vcard,
|
get_contact_data_from_vcard,
|
||||||
)
|
)
|
||||||
from chainlib.eth.address import to_checksum_address
|
from chainlib.eth.address import to_checksum_address, strip_0x
|
||||||
import phonenumbers
|
import phonenumbers
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
@ -30,7 +30,6 @@ fake = Faker(['sl', 'en_US', 'no', 'de', 'ro'])
|
|||||||
|
|
||||||
default_config_dir = './config'
|
default_config_dir = './config'
|
||||||
|
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='Config dir')
|
argparser.add_argument('-c', type=str, default=default_config_dir, help='Config dir')
|
||||||
argparser.add_argument('--tag', type=str, action='append',
|
argparser.add_argument('--tag', type=str, action='append',
|
||||||
@ -54,7 +53,6 @@ config = confini.Config(args.c, os.environ.get('CONFINI_ENV_PREFIX'))
|
|||||||
config.process()
|
config.process()
|
||||||
logg.debug('loaded config\n{}'.format(config))
|
logg.debug('loaded config\n{}'.format(config))
|
||||||
|
|
||||||
|
|
||||||
dt_now = datetime.datetime.utcnow()
|
dt_now = datetime.datetime.utcnow()
|
||||||
dt_then = dt_now - datetime.timedelta(weeks=150)
|
dt_then = dt_now - datetime.timedelta(weeks=150)
|
||||||
ts_now = int(dt_now.timestamp())
|
ts_now = int(dt_now.timestamp())
|
||||||
@ -64,7 +62,7 @@ celery_app = celery.Celery(broker=config.get(
|
|||||||
'CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
'CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
gift_max = args.gift_threshold or 0
|
gift_max = args.gift_threshold or 0
|
||||||
gift_factor = (10**6)
|
gift_factor = (10 ** 6)
|
||||||
|
|
||||||
categories = [
|
categories = [
|
||||||
"food/water",
|
"food/water",
|
||||||
@ -105,7 +103,6 @@ def genId(addr, typ):
|
|||||||
|
|
||||||
|
|
||||||
def genDate():
|
def genDate():
|
||||||
|
|
||||||
ts = random.randint(ts_then, ts_now)
|
ts = random.randint(ts_then, ts_now)
|
||||||
return int(datetime.datetime.fromtimestamp(ts).timestamp())
|
return int(datetime.datetime.fromtimestamp(ts).timestamp())
|
||||||
|
|
||||||
@ -148,9 +145,7 @@ def genDob():
|
|||||||
|
|
||||||
|
|
||||||
def gen():
|
def gen():
|
||||||
old_blockchain_address = '0x' + os.urandom(20).hex()
|
old_blockchain_address = os.urandom(20).hex()
|
||||||
old_blockchain_checksum_address = to_checksum_address(
|
|
||||||
old_blockchain_address)
|
|
||||||
gender = random.choice(['female', 'male', 'other'])
|
gender = random.choice(['female', 'male', 'other'])
|
||||||
phone = genPhone()
|
phone = genPhone()
|
||||||
v = genPersonal(phone)
|
v = genPersonal(phone)
|
||||||
@ -164,9 +159,9 @@ def gen():
|
|||||||
p.gender = gender
|
p.gender = gender
|
||||||
p.identities = {
|
p.identities = {
|
||||||
'evm': {
|
'evm': {
|
||||||
'oldchain:1': [
|
'foo': {
|
||||||
old_blockchain_checksum_address,
|
'1:oldchain': [old_blockchain_address],
|
||||||
],
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
p.products = [fake.random_element(elements=OrderedDict(
|
p.products = [fake.random_element(elements=OrderedDict(
|
||||||
@ -207,7 +202,7 @@ def gen():
|
|||||||
# fake.local_latitude()
|
# fake.local_latitude()
|
||||||
p.location['longitude'] = (random.random() * 360) - 180
|
p.location['longitude'] = (random.random() * 360) - 180
|
||||||
|
|
||||||
return (old_blockchain_checksum_address, phone, p)
|
return old_blockchain_address, phone, p
|
||||||
|
|
||||||
|
|
||||||
def prepareLocalFilePath(datadir, address):
|
def prepareLocalFilePath(datadir, address):
|
||||||
@ -242,7 +237,7 @@ if __name__ == '__main__':
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logg.warning('generate failed, trying anew: {}'.format(e))
|
logg.warning('generate failed, trying anew: {}'.format(e))
|
||||||
continue
|
continue
|
||||||
uid = eth[2:].upper()
|
uid = strip_0x(eth).upper()
|
||||||
|
|
||||||
print(o)
|
print(o)
|
||||||
|
|
||||||
|
@ -7,20 +7,23 @@ RUN mkdir -vp /usr/local/etc/cic
|
|||||||
|
|
||||||
COPY package.json \
|
COPY package.json \
|
||||||
package-lock.json \
|
package-lock.json \
|
||||||
.
|
./
|
||||||
|
|
||||||
|
|
||||||
RUN npm ci --production
|
RUN npm ci --production
|
||||||
#RUN --mount=type=cache,mode=0755,target=/root/node_modules npm install
|
#RUN --mount=type=cache,mode=0755,target=/root/node_modules npm install
|
||||||
|
|
||||||
|
COPY common/ cic_ussd/common/
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
COPY config/ /usr/local/etc/data-seeding
|
COPY config/ config
|
||||||
|
|
||||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
ARG EXTRA_PIP_INDEX_URL=https://pip.grassrootseconomics.net:8433
|
||||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
ARG EXTRA_PIP_ARGS=""
|
||||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip pip install \
|
ARG PIP_INDEX_URL=https://pypi.org/simple
|
||||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
|
||||||
--extra-index-url $EXTRA_INDEX_URL -r requirements.txt
|
RUN pip install --index-url $PIP_INDEX_URL \
|
||||||
|
--extra-index-url $EXTRA_PIP_INDEX_URL $EXTRA_PIP_ARGS \
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ argparser.add_argument('-p', '--provider', dest='p', default='http://localhost:8
|
|||||||
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||||
argparser.add_argument('-c', type=str, help='config override directory')
|
argparser.add_argument('-c', type=str, help='config override directory')
|
||||||
argparser.add_argument('-f', action='store_true', help='force clear previous state')
|
argparser.add_argument('-f', action='store_true', help='force clear previous state')
|
||||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:foo:1:oldchain', help='chain spec')
|
||||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
|
||||||
argparser.add_argument('-r', '--registry', dest='r', type=str, help='Contract registry address')
|
argparser.add_argument('-r', '--registry', dest='r', type=str, help='Contract registry address')
|
||||||
argparser.add_argument('--batch-size', dest='batch_size', default=50, type=int, help='burst size of sending transactions to node')
|
argparser.add_argument('--batch-size', dest='batch_size', default=50, type=int, help='burst size of sending transactions to node')
|
||||||
@ -70,7 +70,7 @@ else:
|
|||||||
config.process()
|
config.process()
|
||||||
args_override = {
|
args_override = {
|
||||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
'CHAIN_SPEC': getattr(args, 'i'),
|
||||||
'KEYSTORE_FILE_PATH': getattr(args, 'y')
|
'KEYSTORE_FILE_PATH': getattr(args, 'y')
|
||||||
}
|
}
|
||||||
config.dict_override(args_override, 'cli')
|
config.dict_override(args_override, 'cli')
|
||||||
@ -78,7 +78,7 @@ config.add(args.user_dir, '_USERDIR', True)
|
|||||||
|
|
||||||
#user_dir = args.user_dir
|
#user_dir = args.user_dir
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
|
||||||
chain_str = str(chain_spec)
|
chain_str = str(chain_spec)
|
||||||
|
|
||||||
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
||||||
@ -106,7 +106,7 @@ account_registry_address = registry.parse_address_of(r)
|
|||||||
logg.info('using account registry {}'.format(account_registry_address))
|
logg.info('using account registry {}'.format(account_registry_address))
|
||||||
|
|
||||||
dirs = initialize_dirs(config.get('_USERDIR'), force_reset=args.f)
|
dirs = initialize_dirs(config.get('_USERDIR'), force_reset=args.f)
|
||||||
|
dirs['phone'] = os.path.join(config.get('_USERDIR'))
|
||||||
|
|
||||||
def register_eth(i, u):
|
def register_eth(i, u):
|
||||||
|
|
||||||
@ -165,8 +165,8 @@ if __name__ == '__main__':
|
|||||||
new_address = register_eth(i, u)
|
new_address = register_eth(i, u)
|
||||||
if u.identities.get('evm') == None:
|
if u.identities.get('evm') == None:
|
||||||
u.identities['evm'] = {}
|
u.identities['evm'] = {}
|
||||||
sub_chain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id())
|
sub_chain_str = '{}:{}'.format(chain_spec.network_id(), chain_spec.common_name())
|
||||||
u.identities['evm'][sub_chain_str] = [new_address]
|
u.identities['evm']['foo'][sub_chain_str] = [new_address]
|
||||||
|
|
||||||
new_address_clean = strip_0x(new_address)
|
new_address_clean = strip_0x(new_address)
|
||||||
filepath = os.path.join(
|
filepath = os.path.join(
|
||||||
@ -188,7 +188,6 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
phone_object = phonenumbers.parse(u.tel)
|
phone_object = phonenumbers.parse(u.tel)
|
||||||
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||||
logg.debug('>>>>> Using phone {}'.format(phone))
|
|
||||||
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), MetadataPointer.PHONE)
|
meta_phone_key = generate_metadata_pointer(phone.encode('utf-8'), MetadataPointer.PHONE)
|
||||||
meta_phone_filepath = os.path.join(dirs['phone'], 'meta', meta_phone_key)
|
meta_phone_filepath = os.path.join(dirs['phone'], 'meta', meta_phone_key)
|
||||||
|
|
||||||
@ -221,9 +220,9 @@ if __name__ == '__main__':
|
|||||||
)
|
)
|
||||||
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
||||||
|
|
||||||
sub_old_chain_str = '{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())
|
sub_old_chain_str = '{}:{}'.format(old_chain_spec.network_id(), old_chain_spec.common_name())
|
||||||
f = open(filepath, 'w')
|
f = open(filepath, 'w')
|
||||||
k = u.identities['evm'][sub_old_chain_str][0]
|
k = u.identities['evm']['foo'][sub_old_chain_str][0]
|
||||||
tag_data = {'tags': user_tags[strip_0x(k)]}
|
tag_data = {'tags': user_tags[strip_0x(k)]}
|
||||||
f.write(json.dumps(tag_data))
|
f.write(json.dumps(tag_data))
|
||||||
f.close()
|
f.close()
|
||||||
|
50
apps/data-seeding/import_ussd.sh
Normal file → Executable file
50
apps/data-seeding/import_ussd.sh
Normal file → Executable file
@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
if [[ -d "$OUT_DIR" ]]
|
if [[ -d "$OUT_DIR" ]]
|
||||||
then
|
then
|
||||||
echo "found existing IMPORT DIR cleaning up..."
|
echo -e "\033[;96mfound existing IMPORT DIR cleaning up...\033[;96m"
|
||||||
rm -rf "$OUT_DIR"
|
rm -rf "$OUT_DIR"
|
||||||
mkdir -p "$OUT_DIR"
|
mkdir -p "$OUT_DIR"
|
||||||
else
|
else
|
||||||
echo "IMPORT DIR does not exist creating it."
|
echo -e "\033[;96mIMPORT DIR does not exist creating it.\033[;96m"
|
||||||
mkdir -p "$OUT_DIR"
|
mkdir -p "$OUT_DIR"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -14,81 +14,81 @@ fi
|
|||||||
timeout 5 celery inspect ping -b "$CELERY_BROKER_URL"
|
timeout 5 celery inspect ping -b "$CELERY_BROKER_URL"
|
||||||
if [[ $? -eq 124 ]]
|
if [[ $? -eq 124 ]]
|
||||||
then
|
then
|
||||||
>&2 echo "Celery workers not available. Is the CELERY_BROKER_URL ($CELERY_BROKER_URL) correct?"
|
>&2 echo -e "\033[;96mCelery workers not available. Is the CELERY_BROKER_URL ($CELERY_BROKER_URL) correct?\033[;96m"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Creating seed data..."
|
echo -e "\033[;96mCreating seed data...\033[;96m"
|
||||||
python create_import_users.py -vv -c "$CONFIG" --dir "$OUT_DIR" "$NUMBER_OF_USERS"
|
python create_import_users.py -vv -c "$CONFIG" --dir "$OUT_DIR" "$NUMBER_OF_USERS"
|
||||||
wait $!
|
wait $!
|
||||||
|
|
||||||
echo "Check for running celery workers ..."
|
echo -e "\033[;96mCheck for running celery workers ...\033[;96m"
|
||||||
if [ -f ./cic-ussd-import.pid ];
|
if [ -f ./cic-ussd-import.pid ];
|
||||||
then
|
then
|
||||||
echo "Found a running worker. Killing ..."
|
echo -e "\033[;96mFound a running worker. Killing ...\033[;96m"
|
||||||
kill -9 $(<cic-ussd-import.pid)
|
kill -9 $(<cic-ussd-import.pid)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Purge tasks from celery worker"
|
echo -e "\033[;96mPurge tasks from celery worker\033[;96m"
|
||||||
celery -A cic_ussd.import_task purge -Q "$CELERY_QUEUE" --broker redis://"$REDIS_HOST":"$REDIS_PORT" -f
|
celery -A cic_ussd.import_task purge -Q "$CELERY_QUEUE" --broker redis://"$REDIS_HOST":"$REDIS_PORT" -f
|
||||||
|
|
||||||
echo "Start celery work and import balance job"
|
echo -e "\033[;96mStart celery work and import balance job\033[;96m"
|
||||||
if [ "$INCLUDE_BALANCES" != "y" ]
|
if [ "$INCLUDE_BALANCES" != "y" ]
|
||||||
then
|
then
|
||||||
echo "Running worker without opening balance transactions"
|
echo -e "\033[;96mRunning worker without opening balance transactions\033[;96m"
|
||||||
TARGET_TX_COUNT=$NUMBER_OF_USERS
|
TARGET_TX_COUNT=$NUMBER_OF_USERS
|
||||||
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" > nohup.out 2> nohup.err < /dev/null &
|
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" > nohup.out 2> nohup.err < /dev/null &
|
||||||
else
|
else
|
||||||
echo "Running worker with opening balance transactions"
|
echo -e "\033[;96mRunning worker with opening balance transactions\033[;96m"
|
||||||
TARGET_TX_COUNT=$((NUMBER_OF_USERS*2))
|
TARGET_TX_COUNT=$((NUMBER_OF_USERS*2))
|
||||||
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" &
|
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" &
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Target count set to ${TARGET_TX_COUNT}"
|
echo -e "\033[;96mTarget count set to ${TARGET_TX_COUNT}"
|
||||||
until [ -f ./cic-import-ussd.pid ]
|
until [ -f ./cic-import-ussd.pid ]
|
||||||
do
|
do
|
||||||
echo "Polling for celery worker pid file..."
|
echo -e "\033[;96mPolling for celery worker pid file...\033[;96m"
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
IMPORT_BALANCE_JOB=$(<cic-import-ussd.pid)
|
IMPORT_BALANCE_JOB=$(<cic-import-ussd.pid)
|
||||||
|
|
||||||
echo "Start import users job"
|
echo -e "\033[;96mStart import users job\033[;96m"
|
||||||
if [ "$USSD_SSL" == "y" ]
|
if [ "$USSD_SSL" == "y" ]
|
||||||
then
|
then
|
||||||
echo "Targeting secure ussd-user server"
|
echo -e "\033[;96mTargeting secure ussd-user server\033[;96m"
|
||||||
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" "$OUT_DIR"
|
python cic_ussd/import_users.py -vv -f -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" "$OUT_DIR"
|
||||||
else
|
else
|
||||||
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" --ussd-no-ssl "$OUT_DIR"
|
python cic_ussd/import_users.py -vv -f -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" --ussd-no-ssl "$OUT_DIR"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Waiting for import balance job to complete ..."
|
echo -e "\033[;96mWaiting for import balance job to complete ...\033[;96m"
|
||||||
tail --pid="$IMPORT_BALANCE_JOB" -f /dev/null
|
tail --pid="$IMPORT_BALANCE_JOB" -f /dev/null
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
echo "Importing pins"
|
echo -e "\033[;96mImporting pins\033[;96m"
|
||||||
python cic_ussd/import_pins.py -c "$CONFIG" -vv "$OUT_DIR"
|
python cic_ussd/import_pins.py -c "$CONFIG" -vv "$OUT_DIR"
|
||||||
set +e
|
set +e
|
||||||
wait $!
|
wait $!
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
echo "Importing ussd data"
|
echo -e "\033[;96mImporting ussd data\033[;96m"
|
||||||
python cic_ussd/import_ussd_data.py -c "$CONFIG" -vv "$OUT_DIR"
|
python cic_ussd/import_ussd_data.py -c "$CONFIG" -vv "$OUT_DIR"
|
||||||
set +e
|
set +e
|
||||||
wait $!
|
wait $!
|
||||||
|
|
||||||
echo "Importing person metadata"
|
echo -e "\033[;96mImporting person metadata\033[;96m"
|
||||||
node cic_meta/import_meta.js "$OUT_DIR" "$NUMBER_OF_USERS"
|
node cic_meta/import_meta.js "$OUT_DIR" "$NUMBER_OF_USERS"
|
||||||
|
|
||||||
echo "Import preferences metadata"
|
echo -e "\033[;96mImport preferences metadata\033[;96m"
|
||||||
node cic_meta/import_meta_preferences.js "$OUT_DIR" "$NUMBER_OF_USERS"
|
node cic_meta/import_meta_preferences.js "$OUT_DIR" "$NUMBER_OF_USERS"
|
||||||
|
|
||||||
CIC_NOTIFY_DATABASE=postgres://$DATABASE_USER:$DATABASE_PASSWORD@$DATABASE_HOST:$DATABASE_PORT/$NOTIFY_DATABASE_NAME
|
CIC_NOTIFY_DATABASE=postgres://$DATABASE_USER:$DATABASE_PASSWORD@$DATABASE_HOST:$DATABASE_PORT/$NOTIFY_DATABASE_NAME
|
||||||
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
||||||
while [[ "$NOTIFICATION_COUNT" < "$TARGET_TX_COUNT" ]]
|
while (("$NOTIFICATION_COUNT" < "$TARGET_TX_COUNT" ))
|
||||||
do
|
do
|
||||||
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
||||||
sleep 5
|
sleep 5
|
||||||
echo "Notification count is: ${NOTIFICATION_COUNT} of ${TARGET_TX_COUNT}. Checking after 5 ..."
|
echo -e "\033[;96mNotification count is: ${NOTIFICATION_COUNT} of ${TARGET_TX_COUNT}. Checking after 5 ...\033[;96m"
|
||||||
done
|
done
|
||||||
echo "Running verify script."
|
echo -e "\033[;96mRunning verify script.\033[;96m"
|
||||||
python verify.py -c "$CONFIG" -v -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --exclude "$EXCLUSIONS" --meta-provider "$META_URL" --token-symbol "$TOKEN_SYMBOL" --ussd-provider "$USSD_PROVIDER" "$OUT_DIR"
|
python verify.py -v -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --exclude "$EXCLUSIONS" --meta-provider "$META_URL" --token-symbol "$TOKEN_SYMBOL" --ussd-provider "$USSD_PROVIDER" "$OUT_DIR"
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
sarafu-faucet~=0.0.7a2
|
sarafu-faucet~=0.0.7a2
|
||||||
cic-eth[tools]~=0.12.4a12
|
cic-eth[tools]~=0.12.4a13
|
||||||
cic-types~=0.2.0a1
|
cic-types~=0.2.0a6
|
||||||
funga>=0.5.1a1,<=0.5.15
|
funga>=0.5.1a1,<=0.5.15
|
||||||
faker==4.17.1
|
faker==4.17.1
|
||||||
chainsyncer~=0.0.6a3
|
chainsyncer~=0.0.7a3
|
||||||
chainlib-eth~=0.0.9rc4
|
chainlib-eth~=0.0.10a10
|
||||||
eth-address-index~=0.2.4a1
|
eth-address-index~=0.2.4a1
|
||||||
eth-contract-registry~=0.6.3a3
|
eth-contract-registry~=0.6.3a3
|
||||||
eth-accounts-index~=0.1.2a3
|
eth-accounts-index~=0.1.2a3
|
||||||
|
@ -25,10 +25,9 @@ from chainlib.eth.gas import (
|
|||||||
from chainlib.eth.tx import TxFactory
|
from chainlib.eth.tx import TxFactory
|
||||||
from chainlib.hash import keccak256_string_to_hex
|
from chainlib.hash import keccak256_string_to_hex
|
||||||
from chainlib.jsonrpc import JSONRPCRequest
|
from chainlib.jsonrpc import JSONRPCRequest
|
||||||
from cic_types.models.person import (
|
from cic_types.models.person import Person, identity_tag
|
||||||
Person,
|
from cic_types.condiments import MetadataPointer
|
||||||
generate_metadata_pointer,
|
from cic_types.processor import generate_metadata_pointer
|
||||||
)
|
|
||||||
from erc20_faucet import Faucet
|
from erc20_faucet import Faucet
|
||||||
from eth_erc20 import ERC20
|
from eth_erc20 import ERC20
|
||||||
from hexathon.parse import strip_0x, add_0x
|
from hexathon.parse import strip_0x, add_0x
|
||||||
@ -74,7 +73,7 @@ all_tests = eth_tests + custodial_tests + metadata_tests + phone_tests
|
|||||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||||
argparser.add_argument('-c', type=str, help='config override dir')
|
argparser.add_argument('-c', type=str, help='config override dir')
|
||||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:foo:1:oldchain', help='chain spec')
|
||||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
|
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
|
||||||
argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url')
|
argparser.add_argument('--ussd-provider', type=str, dest='ussd_provider', default='http://localhost:63315', help='cic-ussd url')
|
||||||
@ -108,7 +107,7 @@ config.process()
|
|||||||
# override args
|
# override args
|
||||||
args_override = {
|
args_override = {
|
||||||
'CHAIN_SPEC': getattr(args, 'i'),
|
'CHAIN_SPEC': getattr(args, 'i'),
|
||||||
'ETH_PROVIDER': getattr(args, 'p'),
|
'RPC_PROVIDER': getattr(args, 'p'),
|
||||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||||
}
|
}
|
||||||
config.dict_override(args_override, 'cli flag')
|
config.dict_override(args_override, 'cli flag')
|
||||||
@ -307,7 +306,7 @@ class Verifier:
|
|||||||
|
|
||||||
|
|
||||||
def verify_gas(self, address, balance_token=None):
|
def verify_gas(self, address, balance_token=None):
|
||||||
o = balance(address)
|
o = balance(add_0x(address))
|
||||||
r = self.conn.do(o)
|
r = self.conn.do(o)
|
||||||
logg.debug('wtf {}'.format(r))
|
logg.debug('wtf {}'.format(r))
|
||||||
actual_balance = int(strip_0x(r), 16)
|
actual_balance = int(strip_0x(r), 16)
|
||||||
@ -323,7 +322,7 @@ class Verifier:
|
|||||||
|
|
||||||
|
|
||||||
def verify_metadata(self, address, balance=None):
|
def verify_metadata(self, address, balance=None):
|
||||||
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person')
|
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), MetadataPointer.PERSON)
|
||||||
url = os.path.join(config.get('_META_PROVIDER'), k)
|
url = os.path.join(config.get('_META_PROVIDER'), k)
|
||||||
logg.debug('verify metadata url {}'.format(url))
|
logg.debug('verify metadata url {}'.format(url))
|
||||||
try:
|
try:
|
||||||
@ -367,7 +366,7 @@ class Verifier:
|
|||||||
|
|
||||||
p = Person.deserialize(o)
|
p = Person.deserialize(o)
|
||||||
|
|
||||||
k = generate_metadata_pointer(p.tel.encode('utf-8'), ':cic.phone')
|
k = generate_metadata_pointer(p.tel.encode('utf-8'), MetadataPointer.PHONE)
|
||||||
url = os.path.join(config.get('_META_PROVIDER'), k)
|
url = os.path.join(config.get('_META_PROVIDER'), k)
|
||||||
logg.debug('verify metadata phone url {}'.format(url))
|
logg.debug('verify metadata phone url {}'.format(url))
|
||||||
try:
|
try:
|
||||||
@ -427,7 +426,7 @@ class Verifier:
|
|||||||
def main():
|
def main():
|
||||||
global chain_str, block_offset, user_dir
|
global chain_str, block_offset, user_dir
|
||||||
|
|
||||||
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
conn = EthHTTPConnection(config.get('RPC_PROVIDER'))
|
||||||
gas_oracle = OverrideGasOracle(conn=conn, limit=8000000)
|
gas_oracle = OverrideGasOracle(conn=conn, limit=8000000)
|
||||||
|
|
||||||
# Get Token registry address
|
# Get Token registry address
|
||||||
@ -505,10 +504,17 @@ def main():
|
|||||||
u = Person.deserialize(o)
|
u = Person.deserialize(o)
|
||||||
#logg.debug('data {}'.format(u.identities['evm']))
|
#logg.debug('data {}'.format(u.identities['evm']))
|
||||||
|
|
||||||
subchain_str = '{}:{}'.format(chain_spec.common_name(), chain_spec.network_id())
|
new_chain_spec = chain_spec.asdict()
|
||||||
new_address = u.identities['evm'][subchain_str][0]
|
arch = new_chain_spec.get('arch')
|
||||||
subchain_str = '{}:{}'.format(old_chain_spec.common_name(), old_chain_spec.network_id())
|
fork = new_chain_spec.get('fork')
|
||||||
old_address = u.identities['evm'][subchain_str][0]
|
tag = identity_tag(new_chain_spec)
|
||||||
|
new_address = u.identities[arch][fork][tag][0]
|
||||||
|
|
||||||
|
old_chainspec = old_chain_spec.asdict()
|
||||||
|
arch = old_chainspec.get('arch')
|
||||||
|
fork = old_chainspec.get('fork')
|
||||||
|
tag = identity_tag(old_chainspec)
|
||||||
|
old_address = u.identities[arch][fork][tag][0]
|
||||||
balance = 0
|
balance = 0
|
||||||
try:
|
try:
|
||||||
balance = balances[old_address]
|
balance = balances[old_address]
|
||||||
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue
Block a user