Compare commits
11 Commits
master
...
lash/move-
Author | SHA1 | Date | |
---|---|---|---|
|
e91d0a9f14 | ||
|
5e11804e7f | ||
|
2d1316f1b8 | ||
|
45c018d884 | ||
|
d83843a761 | ||
|
3e5dd44e0b | ||
|
e339cbbf70 | ||
1236d34d2c | |||
|
747ad4dcd1 | ||
|
e0394ebeba | ||
|
740a809506 |
@ -16,6 +16,7 @@ import cic_base.config
|
||||
import cic_base.log
|
||||
import cic_base.argparse
|
||||
import cic_base.rpc
|
||||
from cic_base.eth.syncer import chain_interface
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
from chainlib.chain import ChainSpec
|
||||
@ -28,10 +29,8 @@ from hexathon import (
|
||||
strip_0x,
|
||||
)
|
||||
from chainsyncer.backend.sql import SQLBackend
|
||||
from chainsyncer.driver import (
|
||||
HeadSyncer,
|
||||
HistorySyncer,
|
||||
)
|
||||
from chainsyncer.driver.head import HeadSyncer
|
||||
from chainsyncer.driver.history import HistorySyncer
|
||||
from chainsyncer.db.models.base import SessionBase
|
||||
|
||||
# local imports
|
||||
@ -113,10 +112,10 @@ def main():
|
||||
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||
|
||||
for syncer_backend in syncer_backends:
|
||||
syncers.append(HistorySyncer(syncer_backend))
|
||||
syncers.append(HistorySyncer(syncer_backend, chain_interface))
|
||||
|
||||
syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
|
||||
syncers.append(HeadSyncer(syncer_backend))
|
||||
syncers.append(HeadSyncer(syncer_backend, chain_interface))
|
||||
|
||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||
if trusted_addresses_src == None:
|
||||
|
@ -1,12 +1,13 @@
|
||||
cic-base~=0.1.2b10
|
||||
cic-base==0.1.3a3+build.4aa03607
|
||||
alembic==1.4.2
|
||||
confini~=0.3.6rc3
|
||||
uwsgi==2.0.19.1
|
||||
moolb~=0.1.0
|
||||
cic-eth-registry~=0.5.5a4
|
||||
cic-eth-registry~=0.5.6a1
|
||||
SQLAlchemy==1.3.20
|
||||
semver==2.13.0
|
||||
psycopg2==2.8.6
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
chainsyncer[sql]~=0.0.2a4
|
||||
chainsyncer[sql]~=0.0.3a3
|
||||
erc20-faucet~=0.2.2a1
|
||||
|
@ -2,6 +2,7 @@
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
@ -23,6 +24,8 @@ argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||
argparser.add_argument('-f', action='store_true', help='force action')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
@ -53,4 +56,10 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
|
||||
ac.set_main_option('sqlalchemy.url', dsn)
|
||||
ac.set_main_option('script_location', migrations_dir)
|
||||
|
||||
if args.reset:
|
||||
if not args.f:
|
||||
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
|
||||
logg.error('user chickened out on requested reset, bailing')
|
||||
sys.exit(1)
|
||||
alembic.command.downgrade(ac, 'base')
|
||||
alembic.command.upgrade(ac, 'head')
|
||||
|
@ -6,6 +6,5 @@ sqlparse==0.4.1
|
||||
pytest-celery==0.0.0a1
|
||||
eth_tester==0.5.0b3
|
||||
py-evm==0.3.0a20
|
||||
web3==5.12.2
|
||||
cic-eth-registry~=0.5.5a3
|
||||
cic-base[full]==0.1.2b8
|
||||
cic_base[full]==0.1.3a3+build.4aa03607
|
||||
sarafu-faucet~=0.0.4a1
|
||||
|
2
apps/cic-eth/MANIFEST.in
Normal file
2
apps/cic-eth/MANIFEST.in
Normal file
@ -0,0 +1,2 @@
|
||||
include *requirements.txt
|
||||
|
@ -562,13 +562,13 @@ class AdminApi:
|
||||
tx['source_token_symbol'] = source_token.symbol
|
||||
o = erc20_c.balance_of(tx['source_token'], tx['sender'], sender_address=self.call_address)
|
||||
r = self.rpc.do(o)
|
||||
tx['sender_token_balance'] = erc20_c.parse_balance_of(r)
|
||||
tx['sender_token_balance'] = erc20_c.parse_balance(r)
|
||||
|
||||
if destination_token != None:
|
||||
tx['destination_token_symbol'] = destination_token.symbol
|
||||
o = erc20_c.balance_of(tx['destination_token'], tx['recipient'], sender_address=self.call_address)
|
||||
r = self.rpc.do(o)
|
||||
tx['recipient_token_balance'] = erc20_c.parse_balance_of(r)
|
||||
tx['recipient_token_balance'] = erc20_c.parse_balance(r)
|
||||
#tx['recipient_token_balance'] = destination_token.function('balanceOf')(tx['recipient']).call()
|
||||
|
||||
# TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which
|
||||
|
@ -1,136 +0,0 @@
|
||||
# standard imports
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import argparse
|
||||
import json
|
||||
|
||||
# third-party imports
|
||||
import web3
|
||||
import confini
|
||||
import celery
|
||||
from json.decoder import JSONDecodeError
|
||||
from cic_registry.chain import ChainSpec
|
||||
|
||||
# local imports
|
||||
from cic_eth.db import dsn_from_config
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
|
||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
||||
|
||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config = confini.Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
}
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config:\n{}'.format(config))
|
||||
|
||||
dsn = dsn_from_config(config)
|
||||
SessionBase.connect(dsn)
|
||||
|
||||
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
queue = args.q
|
||||
|
||||
re_something = r'^/something/?'
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
|
||||
def process_something(session, env):
|
||||
r = re.match(re_something, env.get('PATH_INFO'))
|
||||
if not r:
|
||||
return None
|
||||
|
||||
#if env.get('CONTENT_TYPE') != 'application/json':
|
||||
# raise AttributeError('content type')
|
||||
|
||||
#if env.get('REQUEST_METHOD') != 'POST':
|
||||
# raise AttributeError('method')
|
||||
|
||||
#post_data = json.load(env.get('wsgi.input'))
|
||||
|
||||
#return ('text/plain', 'foo'.encode('utf-8'),)
|
||||
|
||||
|
||||
# uwsgi application
|
||||
def application(env, start_response):
|
||||
|
||||
for k in env.keys():
|
||||
logg.debug('env {} {}'.format(k, env[k]))
|
||||
|
||||
headers = []
|
||||
content = b''
|
||||
err = None
|
||||
|
||||
session = SessionBase.create_session()
|
||||
for handler in [
|
||||
process_something,
|
||||
]:
|
||||
try:
|
||||
r = handler(session, env)
|
||||
except AttributeError as e:
|
||||
logg.error('handler fail attribute {}'.format(e))
|
||||
err = '400 Impertinent request'
|
||||
break
|
||||
except JSONDecodeError as e:
|
||||
logg.error('handler fail json {}'.format(e))
|
||||
err = '400 Invalid data format'
|
||||
break
|
||||
except KeyError as e:
|
||||
logg.error('handler fail key {}'.format(e))
|
||||
err = '400 Invalid JSON'
|
||||
break
|
||||
except ValueError as e:
|
||||
logg.error('handler fail value {}'.format(e))
|
||||
err = '400 Invalid data'
|
||||
break
|
||||
except RuntimeError as e:
|
||||
logg.error('task fail value {}'.format(e))
|
||||
err = '500 Task failed, sorry I cannot tell you more'
|
||||
break
|
||||
if r != None:
|
||||
(mime_type, content) = r
|
||||
break
|
||||
session.close()
|
||||
|
||||
if err != None:
|
||||
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
||||
start_response(err, headers)
|
||||
session.close()
|
||||
return [content]
|
||||
|
||||
headers.append(('Content-Length', str(len(content))),)
|
||||
headers.append(('Access-Control-Allow-Origin', '*',));
|
||||
|
||||
if len(content) == 0:
|
||||
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
||||
start_response('404 Looked everywhere, sorry', headers)
|
||||
else:
|
||||
headers.append(('Content-Type', mime_type,))
|
||||
start_response('200 OK', headers)
|
||||
|
||||
return [content]
|
@ -15,6 +15,7 @@ import cic_base.config
|
||||
import cic_base.log
|
||||
import cic_base.argparse
|
||||
import cic_base.rpc
|
||||
from cic_base.eth.syncer import chain_interface
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
@ -26,10 +27,8 @@ from hexathon import (
|
||||
strip_0x,
|
||||
)
|
||||
from chainsyncer.backend.sql import SQLBackend
|
||||
from chainsyncer.driver import (
|
||||
HeadSyncer,
|
||||
HistorySyncer,
|
||||
)
|
||||
from chainsyncer.driver.head import HeadSyncer
|
||||
from chainsyncer.driver.history import HistorySyncer
|
||||
from chainsyncer.db.models.base import SessionBase
|
||||
|
||||
# local imports
|
||||
@ -80,6 +79,7 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
# connect to celery
|
||||
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
@ -121,11 +121,11 @@ def main():
|
||||
|
||||
for syncer_backend in syncer_backends:
|
||||
try:
|
||||
syncers.append(HistorySyncer(syncer_backend))
|
||||
syncers.append(HistorySyncer(syncer_backend, chain_interface))
|
||||
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
|
||||
except AttributeError:
|
||||
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
|
||||
syncers.append(HeadSyncer(syncer_backend))
|
||||
syncers.append(HeadSyncer(syncer_backend, chain_interface))
|
||||
|
||||
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||
|
||||
|
@ -9,8 +9,8 @@ import semver
|
||||
version = (
|
||||
0,
|
||||
11,
|
||||
0,
|
||||
'beta.16',
|
||||
1,
|
||||
'alpha.2',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
@ -1,25 +1,25 @@
|
||||
cic-base~=0.1.2b15
|
||||
cic-base==0.1.3a3+build.4aa03607
|
||||
celery==4.4.7
|
||||
crypto-dev-signer~=0.4.14b3
|
||||
crypto-dev-signer~=0.4.14b6
|
||||
confini~=0.3.6rc3
|
||||
cic-eth-registry~=0.5.5a7
|
||||
cic-eth-registry~=0.5.6a1
|
||||
redis==3.5.3
|
||||
alembic==1.4.2
|
||||
websockets==8.1
|
||||
requests~=2.24.0
|
||||
eth_accounts_index~=0.0.11a12
|
||||
erc20-transfer-authorization~=0.3.1a7
|
||||
eth_accounts_index~=0.0.12a1
|
||||
erc20-transfer-authorization~=0.3.2a1
|
||||
uWSGI==2.0.19.1
|
||||
semver==2.13.0
|
||||
websocket-client==0.57.0
|
||||
moolb~=0.1.1b2
|
||||
eth-address-index~=0.1.1a11
|
||||
chainlib~=0.0.3rc2
|
||||
eth-address-index~=0.1.2a1
|
||||
chainlib-eth~=0.0.5a1
|
||||
hexathon~=0.0.1a7
|
||||
chainsyncer[sql]==0.0.2a5
|
||||
chainqueue~=0.0.2b3
|
||||
sarafu-faucet~=0.0.3a3
|
||||
erc20-faucet~=0.2.1a5
|
||||
chainsyncer[sql]~=0.0.3a3
|
||||
chainqueue~=0.0.2b5
|
||||
sarafu-faucet~=0.0.4a1
|
||||
erc20-faucet~=0.2.2a1
|
||||
coincurve==15.0.0
|
||||
potaahto~=0.0.1a2
|
||||
pycryptodome==3.10.1
|
||||
|
@ -2,6 +2,8 @@
|
||||
import os
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
|
||||
import alembic
|
||||
from alembic.config import Config as AlembicConfig
|
||||
@ -23,6 +25,8 @@ argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||
argparser.add_argument('-f', action='store_true', help='force action')
|
||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||
args = argparser.parse_args()
|
||||
@ -53,4 +57,10 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
|
||||
ac.set_main_option('sqlalchemy.url', dsn)
|
||||
ac.set_main_option('script_location', migrations_dir)
|
||||
|
||||
if args.reset:
|
||||
if not args.f:
|
||||
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
|
||||
logg.error('user chickened out on requested reset, bailing')
|
||||
sys.exit(1)
|
||||
alembic.command.downgrade(ac, 'base')
|
||||
alembic.command.upgrade(ac, 'head')
|
||||
|
@ -9,7 +9,7 @@ import semver
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
version = (0, 4, 0, 'alpha.5')
|
||||
version = (0, 4, 0, 'alpha.6')
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
|
@ -1 +1 @@
|
||||
cic_base[full_graph]~=0.1.2a61
|
||||
cic_base[full_graph]==0.1.3a3+build.4aa03607
|
||||
|
@ -2,4 +2,3 @@ pytest~=6.0.1
|
||||
pytest-celery~=0.0.0a1
|
||||
pytest-mock~=3.3.1
|
||||
pysqlite3~=0.4.3
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
cic_base[full_graph]~=0.1.2b21
|
||||
cic-eth~=0.11.0b16
|
||||
cic-notify~=0.4.0a5
|
||||
cic_base[full_graph]==0.1.3a3+build.4aa03607
|
||||
cic-eth~=0.11.1a2
|
||||
cic-notify~=0.4.0a6
|
||||
cic-types~=0.1.0a11
|
||||
|
@ -8,4 +8,4 @@ pytest-mock==3.3.1
|
||||
pytest-ordering==0.6
|
||||
pytest-redis==2.0.0
|
||||
requests-mock==1.8.0
|
||||
tavern==1.14.2
|
||||
tavern==1.14.2
|
||||
|
@ -171,7 +171,11 @@ Then, in sequence, run in first terminal:
|
||||
|
||||
In second terminal:
|
||||
|
||||
`python cic_ussd/import_users.py -v -c config out`
|
||||
`python cic_ussd/import_users.py -v --ussd-host <user_ussd_server_host> --ussd-port <user_ussd_server_port> -c config out`
|
||||
|
||||
In the event that you are running the command in a local environment you may want to consider passing the `--ussd-no-ssl` flag i.e:
|
||||
|
||||
`python cic_ussd/import_users.py -v --ussd-host <user_ussd_server_host> --ussd-port <user_ussd_server_port> --ussd-no-ssl -c config out`
|
||||
|
||||
|
||||
|
||||
@ -199,6 +203,13 @@ If _number of users_ is omitted the script will run until manually interrupted.
|
||||
If you imported using `cic_ussd`, the phone pointer is _already added_ and this script will do nothing.
|
||||
|
||||
|
||||
### Importing preferences metadata
|
||||
|
||||
`node cic_meta/import_meta_preferences.js <datadir> <number_of_users>`
|
||||
|
||||
If you used the `cic_ussd/import_user.py` script to import your users, preferences metadata is generated and will be imported.
|
||||
|
||||
|
||||
##### Importing pins and ussd data (optional)
|
||||
|
||||
Once the user imports are complete the next step should be importing the user's pins and auxiliary ussd data. This can be done in 3 steps:
|
||||
|
@ -18,19 +18,17 @@ from hexathon import (
|
||||
add_0x,
|
||||
)
|
||||
from chainsyncer.backend.memory import MemBackend
|
||||
from chainsyncer.driver import HeadSyncer
|
||||
from chainsyncer.driver.head import HeadSyncer
|
||||
from chainlib.eth.connection import EthHTTPConnection
|
||||
from chainlib.eth.block import (
|
||||
block_latest,
|
||||
block_by_number,
|
||||
Block,
|
||||
)
|
||||
from chainlib.hash import keccak256_string_to_hex
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.gas import OverrideGasOracle
|
||||
from chainlib.eth.nonce import RPCNonceOracle
|
||||
from chainlib.eth.tx import TxFactory
|
||||
from chainlib.jsonrpc import jsonrpc_template
|
||||
from chainlib.jsonrpc import JSONRPCRequest
|
||||
from chainlib.eth.error import EthException
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
@ -38,6 +36,7 @@ from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from cic_types.models.person import Person
|
||||
from eth_erc20 import ERC20
|
||||
from cic_base.eth.syncer import chain_interface
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
@ -70,13 +69,14 @@ elif args.vv == True:
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config.process()
|
||||
# override args
|
||||
config.process()
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'key-file')
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'y'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
@ -185,27 +185,6 @@ class Handler:
|
||||
# logg.error('key record not found in imports: {}'.format(e).ljust(200))
|
||||
|
||||
|
||||
#class BlockGetter:
|
||||
#
|
||||
# def __init__(self, conn, gas_oracle, nonce_oracle, chain_spec):
|
||||
# self.conn = conn
|
||||
# self.tx_factory = ERC20(signer=signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_id)
|
||||
#
|
||||
#
|
||||
# def get(self, n):
|
||||
# o = block_by_number(n)
|
||||
# r = self.conn.do(o)
|
||||
# b = None
|
||||
# try:
|
||||
# b = Block(r)
|
||||
# except TypeError as e:
|
||||
# if r == None:
|
||||
# logg.debug('block not found {}'.format(n))
|
||||
# else:
|
||||
# logg.error('block retrieve error {}'.format(e))
|
||||
# return b
|
||||
|
||||
|
||||
def progress_callback(block_number, tx_index):
|
||||
sys.stdout.write(str(block_number).ljust(200) + "\n")
|
||||
|
||||
@ -226,11 +205,13 @@ def main():
|
||||
data = add_0x(registry_addressof_method)
|
||||
data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex()
|
||||
txf.set_code(tx, data)
|
||||
|
||||
o = jsonrpc_template()
|
||||
|
||||
j = JSONRPCRequest()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
token_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
logg.info('found token index address {}'.format(token_index_address))
|
||||
@ -244,10 +225,11 @@ def main():
|
||||
z = h.digest()
|
||||
data += eth_abi.encode_single('bytes32', z).hex()
|
||||
txf.set_code(tx, data)
|
||||
o = jsonrpc_template()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
try:
|
||||
sarafu_token_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
@ -305,7 +287,7 @@ def main():
|
||||
f.close()
|
||||
|
||||
syncer_backend.set(block_offset, 0)
|
||||
syncer = HeadSyncer(syncer_backend, block_callback=progress_callback)
|
||||
syncer = HeadSyncer(syncer_backend, chain_interface, block_callback=progress_callback)
|
||||
handler = Handler(conn, chain_spec, user_dir, balances, sarafu_token_address, signer, gas_oracle, nonce_oracle)
|
||||
syncer.add_filter(handler)
|
||||
syncer.loop(1, conn)
|
||||
|
@ -7,4 +7,5 @@ approval_escrow_address =
|
||||
chain_spec = evm:bloxberg:8996
|
||||
tx_retry_delay =
|
||||
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
||||
user_ussd_svc_service_port=
|
||||
user_ussd_svc_service_port =
|
||||
|
||||
|
@ -1,8 +1,2 @@
|
||||
[eth]
|
||||
#ws_provider = ws://localhost:8546
|
||||
#ttp_provider = http://localhost:8545
|
||||
provider = http://localhost:63545
|
||||
gas_provider_address =
|
||||
#chain_id =
|
||||
abi_dir = /usr/local/share/cic/solidity/abi
|
||||
account_accounts_index_writer =
|
||||
|
@ -204,9 +204,9 @@ def gen():
|
||||
]))
|
||||
if random.randint(0, 1):
|
||||
# fake.local_latitude()
|
||||
p.location['latitude'] = (random.random() + 180) - 90
|
||||
p.location['latitude'] = (random.random() * 180) - 90
|
||||
# fake.local_latitude()
|
||||
p.location['longitude'] = (random.random() + 360) - 180
|
||||
p.location['longitude'] = (random.random() * 360) - 179
|
||||
|
||||
return (old_blockchain_checksum_address, phone, p)
|
||||
|
||||
|
@ -18,25 +18,24 @@ from hexathon import (
|
||||
add_0x,
|
||||
)
|
||||
from chainsyncer.backend.memory import MemBackend
|
||||
from chainsyncer.driver import HeadSyncer
|
||||
from chainsyncer.driver.head import HeadSyncer
|
||||
from chainlib.eth.connection import EthHTTPConnection
|
||||
from chainlib.eth.block import (
|
||||
block_latest,
|
||||
block_by_number,
|
||||
Block,
|
||||
)
|
||||
from chainlib.hash import keccak256_string_to_hex
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.gas import OverrideGasOracle
|
||||
from chainlib.eth.nonce import RPCNonceOracle
|
||||
from chainlib.eth.tx import TxFactory
|
||||
from chainlib.jsonrpc import jsonrpc_template
|
||||
from chainlib.jsonrpc import JSONRPCRequest
|
||||
from chainlib.eth.error import EthException
|
||||
from chainlib.chain import ChainSpec
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
from cic_types.models.person import Person
|
||||
from eth_erc20 import ERC20
|
||||
from cic_base.eth.syncer import chain_interface
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
@ -75,7 +74,7 @@ args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'key-file')
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'y')
|
||||
}
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
@ -184,27 +183,6 @@ class Handler:
|
||||
# logg.error('key record not found in imports: {}'.format(e).ljust(200))
|
||||
|
||||
|
||||
#class BlockGetter:
|
||||
#
|
||||
# def __init__(self, conn, gas_oracle, nonce_oracle, chain_spec):
|
||||
# self.conn = conn
|
||||
# self.tx_factory = ERC20(signer=signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle, chain_id=chain_id)
|
||||
#
|
||||
#
|
||||
# def get(self, n):
|
||||
# o = block_by_number(n)
|
||||
# r = self.conn.do(o)
|
||||
# b = None
|
||||
# try:
|
||||
# b = Block(r)
|
||||
# except TypeError as e:
|
||||
# if r == None:
|
||||
# logg.debug('block not found {}'.format(n))
|
||||
# else:
|
||||
# logg.error('block retrieve error {}'.format(e))
|
||||
# return b
|
||||
|
||||
|
||||
def progress_callback(block_number, tx_index):
|
||||
sys.stdout.write(str(block_number).ljust(200) + "\n")
|
||||
|
||||
@ -225,11 +203,13 @@ def main():
|
||||
data = add_0x(registry_addressof_method)
|
||||
data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex()
|
||||
txf.set_code(tx, data)
|
||||
|
||||
o = jsonrpc_template()
|
||||
|
||||
j = JSONRPCRequest()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
token_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
logg.info('found token index address {}'.format(token_index_address))
|
||||
@ -243,10 +223,11 @@ def main():
|
||||
z = h.digest()
|
||||
data += eth_abi.encode_single('bytes32', z).hex()
|
||||
txf.set_code(tx, data)
|
||||
o = jsonrpc_template()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
try:
|
||||
sarafu_token_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
@ -300,7 +281,7 @@ def main():
|
||||
f.close()
|
||||
|
||||
syncer_backend.set(block_offset, 0)
|
||||
syncer = HeadSyncer(syncer_backend, block_callback=progress_callback)
|
||||
syncer = HeadSyncer(syncer_backend, chain_interface, block_callback=progress_callback)
|
||||
handler = Handler(conn, chain_spec, user_dir, balances, sarafu_token_address, signer, gas_oracle, nonce_oracle)
|
||||
syncer.add_filter(handler)
|
||||
syncer.loop(1, conn)
|
||||
|
@ -59,7 +59,7 @@ config.process()
|
||||
args_override = {
|
||||
'CIC_REGISTRY_ADDRESS': getattr(args, 'r'),
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'key-file')
|
||||
'KEYSTORE_FILE_PATH': getattr(args, 'y')
|
||||
}
|
||||
config.dict_override(args_override, 'cli')
|
||||
config.add(args.user_dir, '_USERDIR', True)
|
||||
|
@ -1,5 +1,5 @@
|
||||
cic-base[full_graph]==0.1.2b15
|
||||
sarafu-faucet==0.0.3a3
|
||||
cic-eth==0.11.0b16
|
||||
cic-types==0.1.0a11
|
||||
crypto-dev-signer==0.4.14b3
|
||||
cic_base[full_graph]==0.1.3a3+build.4aa03607
|
||||
sarafu-faucet==0.0.4a1
|
||||
cic-eth==0.11.1a1
|
||||
cic-types==0.1.0a13
|
||||
crypto-dev-signer==0.4.14b6
|
||||
|
@ -25,7 +25,7 @@ from chainlib.eth.gas import (
|
||||
)
|
||||
from chainlib.eth.tx import TxFactory
|
||||
from chainlib.hash import keccak256_string_to_hex
|
||||
from chainlib.jsonrpc import jsonrpc_template
|
||||
from chainlib.jsonrpc import JSONRPCRequest
|
||||
from cic_types.models.person import (
|
||||
Person,
|
||||
generate_metadata_pointer,
|
||||
@ -264,9 +264,11 @@ class Verifier:
|
||||
data += eth_abi.encode_single('address', address).hex()
|
||||
tx = self.tx_factory.set_code(tx, data)
|
||||
tx = self.tx_factory.normalize(tx)
|
||||
o = jsonrpc_template()
|
||||
j = JSONRPCRequest()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(tx)
|
||||
o = j.finalize(o)
|
||||
r = self.conn.do(o)
|
||||
logg.debug('index check for {}: {}'.format(address, r))
|
||||
n = eth_abi.decode_single('uint256', bytes.fromhex(strip_0x(r)))
|
||||
@ -429,10 +431,12 @@ def main():
|
||||
data += eth_abi.encode_single('bytes32', b'TokenRegistry').hex()
|
||||
txf.set_code(tx, data)
|
||||
|
||||
o = jsonrpc_template()
|
||||
j = JSONRPCRequest()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
token_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
logg.info('found token index address {}'.format(token_index_address))
|
||||
@ -441,10 +445,11 @@ def main():
|
||||
data += eth_abi.encode_single('bytes32', b'AccountRegistry').hex()
|
||||
txf.set_code(tx, data)
|
||||
|
||||
o = jsonrpc_template()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
account_index_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
logg.info('found account index address {}'.format(account_index_address))
|
||||
@ -453,10 +458,11 @@ def main():
|
||||
data += eth_abi.encode_single('bytes32', b'Faucet').hex()
|
||||
txf.set_code(tx, data)
|
||||
|
||||
o = jsonrpc_template()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
faucet_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
logg.info('found faucet {}'.format(faucet_address))
|
||||
@ -471,10 +477,11 @@ def main():
|
||||
z = h.digest()
|
||||
data += eth_abi.encode_single('bytes32', z).hex()
|
||||
txf.set_code(tx, data)
|
||||
o = jsonrpc_template()
|
||||
o = j.template()
|
||||
o['method'] = 'eth_call'
|
||||
o['params'].append(txf.normalize(tx))
|
||||
o['params'].append('latest')
|
||||
o = j.finalize(o)
|
||||
r = conn.do(o)
|
||||
sarafu_token_address = to_checksum_address(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||
logg.info('found token address {}'.format(sarafu_token_address))
|
||||
|
1
apps/util/requirements/base_requirement.txt
Normal file
1
apps/util/requirements/base_requirement.txt
Normal file
@ -0,0 +1 @@
|
||||
cic-base==0.1.3a3+build.4aa03607
|
1
apps/util/requirements/requirements.txt
Normal file
1
apps/util/requirements/requirements.txt
Normal file
@ -0,0 +1 @@
|
||||
requirements-magic~=0.0.2
|
21
apps/util/requirements/update_base.sh
Normal file
21
apps/util/requirements/update_base.sh
Normal file
@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
which pyreq-merge &> /dev/null
|
||||
if [ $? -gt 0 ]; then
|
||||
>&2 echo pyreq-merge missing, please install requirements
|
||||
exit 1
|
||||
fi
|
||||
|
||||
t=$(mktemp)
|
||||
>&2 echo using tmp $t
|
||||
|
||||
repos=(../../cic-cache ../../cic-eth ../../cic-ussd ../../data-seeding ../../cic-notify)
|
||||
|
||||
for r in ${repos[@]}; do
|
||||
f="$r/requirements.txt"
|
||||
>&2 echo updating $f
|
||||
f="$r/test_requirements.txt"
|
||||
>&2 echo updating $f
|
||||
pyreq-update $f base_requirement.txt > $t
|
||||
cp $t $f
|
||||
done
|
@ -387,44 +387,6 @@ services:
|
||||
# command: "/root/start_retry.sh -q cic-eth -vv"
|
||||
|
||||
|
||||
# cic-eth-server:
|
||||
# build:
|
||||
# context: apps/
|
||||
# dockerfile: cic-eth/docker/Dockerfile
|
||||
# environment:
|
||||
# CIC_CHAIN_SPEC: $CIC_CHAIN_SPEC
|
||||
# CELERY_BROKER_URL: $CELERY_BROKER_URL
|
||||
# CELERY_RESULT_URL: $CELERY_RESULT_URL
|
||||
# SERVER_PORT: 8000
|
||||
# depends_on:
|
||||
# - eth
|
||||
# - postgres
|
||||
# - redis
|
||||
# ports:
|
||||
# - ${HTTP_PORT_CIC_ETH:-63314}:8000
|
||||
# deploy:
|
||||
# restart_policy:
|
||||
# condition: on-failure
|
||||
# volumes:
|
||||
# - contract-config:/tmp/cic/config/:ro
|
||||
# command:
|
||||
# - /bin/bash
|
||||
# - -c
|
||||
# - |
|
||||
# if [[ -f /tmp/cic/config/.env ]]; then source /tmp/cic/config/.env; fi
|
||||
# "/usr/local/bin/uwsgi" \
|
||||
# --wsgi-file /usr/src/cic-eth/cic_eth/runnable/server_agent.py \
|
||||
# --http :80 \
|
||||
# --pyargv -vv
|
||||
## entrypoint:
|
||||
## - "/usr/local/bin/uwsgi"
|
||||
## - "--wsgi-file"
|
||||
## - "/usr/src/cic-eth/cic_eth/runnable/server_agent.py"
|
||||
## - "--http"
|
||||
## - ":80"
|
||||
# # command: "--pyargv -vv"
|
||||
|
||||
|
||||
|
||||
cic-notify-tasker:
|
||||
build:
|
||||
|
Loading…
Reference in New Issue
Block a user