Update README
This commit is contained in:
parent
c415a6b180
commit
3431991565
@ -199,9 +199,10 @@ async function processRequest(req, res) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const responseContentLength = (new TextEncoder().encode(content)).length;
|
||||||
res.writeHead(200, {
|
res.writeHead(200, {
|
||||||
"Content-Type": contentType,
|
"Content-Type": contentType,
|
||||||
"Content-Length": content.length,
|
"Content-Length": responseContentLength,
|
||||||
});
|
});
|
||||||
res.write(content);
|
res.write(content);
|
||||||
res.end();
|
res.end();
|
||||||
|
72
apps/contract-migration/scripts/README
Normal file
72
apps/contract-migration/scripts/README
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
# DATA GENERATION TOOLS
|
||||||
|
|
||||||
|
This folder contains tools to generate and import test data.
|
||||||
|
|
||||||
|
## DATA CREATION
|
||||||
|
|
||||||
|
Does not need the cluster to run.
|
||||||
|
|
||||||
|
Vanilla:
|
||||||
|
|
||||||
|
`python create_import_users.py [--dir <datadir>] <number_of_users>`
|
||||||
|
|
||||||
|
If you want to use the `import_balance.py` script to add to the user's balance from an external address, add:
|
||||||
|
|
||||||
|
`python create_import_users.py --gift-threshold <max_units_to_send> [--dir <datadir>] <number_of_users>`
|
||||||
|
|
||||||
|
|
||||||
|
## IMPORT
|
||||||
|
|
||||||
|
Make sure the following is running in the cluster:
|
||||||
|
* eth
|
||||||
|
* postgres
|
||||||
|
* redis
|
||||||
|
* cic-eth-tasker
|
||||||
|
* cic-eth-dispatcher
|
||||||
|
* cic-eth-manager-head
|
||||||
|
|
||||||
|
|
||||||
|
You will want to run these in sequence:
|
||||||
|
|
||||||
|
|
||||||
|
## 1. Metadata
|
||||||
|
|
||||||
|
`node import_meta.js <datadir> <number_of_users>`
|
||||||
|
|
||||||
|
Monitors a folder for output from the `import_users.py` script, adding the metadata found to the `cic-meta` service.
|
||||||
|
|
||||||
|
|
||||||
|
## 2. Balances
|
||||||
|
|
||||||
|
(Only if you used the `--gift-threshold` option above)
|
||||||
|
|
||||||
|
`python -c config -i <newchain:id> -r <cic_registry_address> -p <eth_provider> --head <datadir>`
|
||||||
|
|
||||||
|
This will monitor new mined blocks and send balances to the newly created accounts.
|
||||||
|
|
||||||
|
|
||||||
|
### 3. Users
|
||||||
|
|
||||||
|
Without any modifications to the cluster and config files:
|
||||||
|
|
||||||
|
`python -c config --redis-host-callback redis <datadir>`
|
||||||
|
|
||||||
|
** A note on the The callback**: The script uses a redis callback to retrieve the newly generated custodial address. This is the redis server _from the perspective of the cic-eth component_.
|
||||||
|
|
||||||
|
|
||||||
|
## VERIFY
|
||||||
|
|
||||||
|
`python verify.py -c config -i <newchain:id> -r <cic_registry_address> -p <eth_provider> <datadir>`
|
||||||
|
|
||||||
|
Checks
|
||||||
|
* Private key is in cic-eth keystore
|
||||||
|
* Address is in accounts index
|
||||||
|
* Address has balance matching the gift threshold
|
||||||
|
* Metadata can be retrieved and has exact match
|
||||||
|
|
||||||
|
Should exit with code 0 if all input data is found in the respective services.
|
||||||
|
|
||||||
|
|
||||||
|
## KNOWN ISSUES
|
||||||
|
|
||||||
|
If the faucet disbursement is set to a non-zero amount, the balances will be off. The verify script needs to be improved to check the faucet amount.
|
214
apps/contract-migration/scripts/create_import_users.py
Normal file
214
apps/contract-migration/scripts/create_import_users.py
Normal file
@ -0,0 +1,214 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
# standard imports
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import random
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import random
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import vobject
|
||||||
|
import celery
|
||||||
|
import web3
|
||||||
|
from faker import Faker
|
||||||
|
import cic_registry
|
||||||
|
import confini
|
||||||
|
from cic_eth.api import Api
|
||||||
|
from cic_types.models.person import (
|
||||||
|
Person,
|
||||||
|
generate_vcard_from_contact_data,
|
||||||
|
get_contact_data_from_vcard,
|
||||||
|
)
|
||||||
|
from chainlib.eth.address import to_checksum
|
||||||
|
|
||||||
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
fake = Faker(['sl', 'en_US', 'no', 'de', 'ro'])
|
||||||
|
|
||||||
|
config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||||
|
|
||||||
|
argparser = argparse.ArgumentParser()
|
||||||
|
argparser.add_argument('-c', type=str, default=config_dir, help='Config dir')
|
||||||
|
argparser.add_argument('--gift-threshold', type=int, help='If set, users will be funded with additional random balance (in token integer units)')
|
||||||
|
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||||
|
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||||
|
argparser.add_argument('--dir', default='out', type=str, help='path to users export dir tree')
|
||||||
|
argparser.add_argument('user_count', type=int, help='amount of users to generate')
|
||||||
|
args = argparser.parse_args()
|
||||||
|
|
||||||
|
if args.v:
|
||||||
|
logg.setLevel(logging.INFO)
|
||||||
|
elif args.vv:
|
||||||
|
logg.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
config = confini.Config(args.c, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||||
|
config.process()
|
||||||
|
logg.info('loaded config\n{}'.format(config))
|
||||||
|
|
||||||
|
|
||||||
|
dt_now = datetime.datetime.utcnow()
|
||||||
|
dt_then = dt_now - datetime.timedelta(weeks=150)
|
||||||
|
ts_now = int(dt_now.timestamp())
|
||||||
|
ts_then = int(dt_then.timestamp())
|
||||||
|
|
||||||
|
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
|
|
||||||
|
api = Api(config.get('CIC_CHAIN_SPEC'))
|
||||||
|
|
||||||
|
gift_max = args.gift_threshold or 0
|
||||||
|
gift_factor = (10**6)
|
||||||
|
|
||||||
|
categories = [
|
||||||
|
"food/water",
|
||||||
|
"fuel/energy",
|
||||||
|
"education",
|
||||||
|
"health",
|
||||||
|
"shop",
|
||||||
|
"environment",
|
||||||
|
"transport",
|
||||||
|
"farming/labor",
|
||||||
|
"savingsgroup",
|
||||||
|
]
|
||||||
|
|
||||||
|
phone_idx = []
|
||||||
|
|
||||||
|
user_dir = args.dir
|
||||||
|
user_count = args.user_count
|
||||||
|
|
||||||
|
def genPhoneIndex(phone):
|
||||||
|
h = hashlib.new('sha256')
|
||||||
|
h.update(phone.encode('utf-8'))
|
||||||
|
h.update(b'cic.msisdn')
|
||||||
|
return h.digest().hex()
|
||||||
|
|
||||||
|
|
||||||
|
def genId(addr, typ):
|
||||||
|
h = hashlib.new('sha256')
|
||||||
|
h.update(bytes.fromhex(addr[2:]))
|
||||||
|
h.update(typ.encode('utf-8'))
|
||||||
|
return h.digest().hex()
|
||||||
|
|
||||||
|
|
||||||
|
def genDate():
|
||||||
|
|
||||||
|
logg.info(ts_then)
|
||||||
|
ts = random.randint(ts_then, ts_now)
|
||||||
|
return datetime.datetime.fromtimestamp(ts).timestamp()
|
||||||
|
|
||||||
|
|
||||||
|
def genPhone():
|
||||||
|
return fake.msisdn()
|
||||||
|
|
||||||
|
|
||||||
|
def genPersonal(phone):
|
||||||
|
fn = fake.first_name()
|
||||||
|
ln = fake.last_name()
|
||||||
|
e = fake.email()
|
||||||
|
|
||||||
|
return generate_vcard_from_contact_data(ln, fn, phone, e)
|
||||||
|
|
||||||
|
|
||||||
|
def genCats():
|
||||||
|
i = random.randint(0, 3)
|
||||||
|
return random.choices(categories, k=i)
|
||||||
|
|
||||||
|
|
||||||
|
def genAmount():
|
||||||
|
return random.randint(0, gift_max) * gift_factor
|
||||||
|
|
||||||
|
def genDob():
|
||||||
|
dob_src = fake.date_of_birth(minimum_age=15)
|
||||||
|
dob = {}
|
||||||
|
|
||||||
|
if random.random() < 0.5:
|
||||||
|
dob['year'] = dob_src.year
|
||||||
|
|
||||||
|
if random.random() > 0.5:
|
||||||
|
dob['month'] = dob_src.month
|
||||||
|
dob['day'] = dob_src.day
|
||||||
|
|
||||||
|
return dob
|
||||||
|
|
||||||
|
|
||||||
|
def gen():
|
||||||
|
old_blockchain_address = '0x' + os.urandom(20).hex()
|
||||||
|
old_blockchain_checksum_address = to_checksum(old_blockchain_address)
|
||||||
|
gender = random.choice(['female', 'male', 'other'])
|
||||||
|
phone = genPhone()
|
||||||
|
city = fake.city_name()
|
||||||
|
v = genPersonal(phone)
|
||||||
|
|
||||||
|
contact_data = get_contact_data_from_vcard(v)
|
||||||
|
p = Person()
|
||||||
|
p.load_vcard(contact_data)
|
||||||
|
|
||||||
|
p.date_registered = genDate()
|
||||||
|
p.date_of_birth = genDob()
|
||||||
|
p.gender = gender
|
||||||
|
p.identities = {
|
||||||
|
'evm': {
|
||||||
|
'oldchain:1': [
|
||||||
|
old_blockchain_checksum_address,
|
||||||
|
],
|
||||||
|
},
|
||||||
|
}
|
||||||
|
p.location['area_name'] = city
|
||||||
|
if random.randint(0, 1):
|
||||||
|
p.identities['latitude'] = (random.random() + 180) - 90 #fake.local_latitude()
|
||||||
|
p.identities['longitude'] = (random.random() + 360) - 180 #fake.local_latitude()
|
||||||
|
|
||||||
|
return (old_blockchain_checksum_address, phone, p)
|
||||||
|
|
||||||
|
|
||||||
|
def prepareLocalFilePath(datadir, address):
|
||||||
|
parts = [
|
||||||
|
address[:2],
|
||||||
|
address[2:4],
|
||||||
|
]
|
||||||
|
dirs = '{}/{}/{}'.format(
|
||||||
|
datadir,
|
||||||
|
parts[0],
|
||||||
|
parts[1],
|
||||||
|
)
|
||||||
|
os.makedirs(dirs, exist_ok=True)
|
||||||
|
return dirs
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
base_dir = os.path.join(user_dir, 'old')
|
||||||
|
os.makedirs(base_dir, exist_ok=True)
|
||||||
|
|
||||||
|
fa = open(os.path.join(user_dir, 'balances.csv'), 'w')
|
||||||
|
|
||||||
|
for i in range(user_count):
|
||||||
|
|
||||||
|
(eth, phone, o) = gen()
|
||||||
|
uid = eth[2:].upper()
|
||||||
|
|
||||||
|
print(o)
|
||||||
|
|
||||||
|
d = prepareLocalFilePath(base_dir, uid)
|
||||||
|
f = open('{}/{}'.format(d, uid + '.json'), 'w')
|
||||||
|
json.dump(o.serialize(), f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
pidx = genPhoneIndex(phone)
|
||||||
|
d = prepareLocalFilePath(os.path.join(user_dir, 'phone'), uid)
|
||||||
|
f = open('{}/{}'.format(d, pidx), 'w')
|
||||||
|
f.write(eth)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
amount = genAmount()
|
||||||
|
fa.write('{},{}\n'.format(eth,amount))
|
||||||
|
logg.debug('pidx {}, uid {}, eth {}, amount {}'.format(pidx, uid, eth, amount))
|
||||||
|
|
||||||
|
fa.close()
|
@ -26,7 +26,7 @@ function sendit(uid, envelope) {
|
|||||||
};
|
};
|
||||||
let url = config.get('META_URL');
|
let url = config.get('META_URL');
|
||||||
url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/');
|
url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/');
|
||||||
//console.debug('url: ' + url + uid);
|
console.log('posting to url: ' + url + uid);
|
||||||
const req = http.request(url + uid, opts, (res) => {
|
const req = http.request(url + uid, opts, (res) => {
|
||||||
res.on('data', process.stdout.write);
|
res.on('data', process.stdout.write);
|
||||||
res.on('end', () => {
|
res.on('end', () => {
|
||||||
|
@ -9,8 +9,9 @@ import re
|
|||||||
import hashlib
|
import hashlib
|
||||||
import csv
|
import csv
|
||||||
import json
|
import json
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
# third-party impotts
|
# external impotts
|
||||||
import celery
|
import celery
|
||||||
import eth_abi
|
import eth_abi
|
||||||
import confini
|
import confini
|
||||||
@ -39,7 +40,10 @@ from chainlib.eth.error import EthException
|
|||||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||||
from crypto_dev_signer.keystore import DictKeystore
|
from crypto_dev_signer.keystore import DictKeystore
|
||||||
from cic_eth.api.api_admin import AdminApi
|
from cic_eth.api.api_admin import AdminApi
|
||||||
from cic_types.models.person import Person
|
from cic_types.models.person import (
|
||||||
|
Person,
|
||||||
|
generate_metadata_pointer,
|
||||||
|
)
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
@ -49,7 +53,9 @@ config_dir = '/usr/local/etc/cic-syncer'
|
|||||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||||
|
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='oldchain:1', help='chain spec')
|
||||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||||
|
argparser.add_argument('--meta-provider', type=str, dest='meta_provider', default='http://localhost:63380', help='cic-meta url')
|
||||||
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||||
@ -79,14 +85,17 @@ logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
|||||||
|
|
||||||
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||||
|
|
||||||
chain_str = config.get('CIC_CHAIN_SPEC')
|
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
chain_str = str(chain_spec)
|
||||||
|
old_chain_spec = ChainSpec.from_chain_str(args.old_chain_spec)
|
||||||
|
old_chain_str = str(old_chain_spec)
|
||||||
user_dir = args.user_dir # user_out_dir from import_users.py
|
user_dir = args.user_dir # user_out_dir from import_users.py
|
||||||
|
meta_url = args.meta_provider
|
||||||
|
|
||||||
|
|
||||||
class Verifier:
|
class Verifier:
|
||||||
|
|
||||||
def __init__(self, conn, cic_eth_api, gas_oracle, chain_spec, index_address, token_address):
|
def __init__(self, conn, cic_eth_api, gas_oracle, chain_spec, index_address, token_address, data_dir):
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
self.gas_oracle = gas_oracle
|
self.gas_oracle = gas_oracle
|
||||||
self.chain_spec = chain_spec
|
self.chain_spec = chain_spec
|
||||||
@ -95,6 +104,7 @@ class Verifier:
|
|||||||
self.erc20_tx_factory = ERC20TxFactory(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle)
|
self.erc20_tx_factory = ERC20TxFactory(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle)
|
||||||
self.tx_factory = TxFactory(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle)
|
self.tx_factory = TxFactory(chain_id=chain_spec.chain_id(), gas_oracle=gas_oracle)
|
||||||
self.api = cic_eth_api
|
self.api = cic_eth_api
|
||||||
|
self.data_dir = data_dir
|
||||||
|
|
||||||
|
|
||||||
def verify_accounts_index(self, address):
|
def verify_accounts_index(self, address):
|
||||||
@ -129,12 +139,37 @@ class Verifier:
|
|||||||
raise ValueError(address, r)
|
raise ValueError(address, r)
|
||||||
|
|
||||||
|
|
||||||
|
def verify_metadata(self, address):
|
||||||
|
k = generate_metadata_pointer(bytes.fromhex(strip_0x(address)), ':cic.person')
|
||||||
|
url = os.path.join(meta_url, k)
|
||||||
|
logg.debug('verify metadata url {}'.format(url))
|
||||||
|
res = urlopen(url)
|
||||||
|
b = res.read()
|
||||||
|
o_retrieved = json.loads(b.decode('utf-8'))
|
||||||
|
|
||||||
|
upper_address = strip_0x(address).upper()
|
||||||
|
f = open(os.path.join(
|
||||||
|
self.data_dir,
|
||||||
|
'new',
|
||||||
|
upper_address[:2],
|
||||||
|
upper_address[2:4],
|
||||||
|
upper_address + '.json',
|
||||||
|
), 'r'
|
||||||
|
)
|
||||||
|
o_original = json.load(f)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
if o_original != o_retrieved:
|
||||||
|
raise ValueError(o_retrieved)
|
||||||
|
|
||||||
|
|
||||||
def verify(self, address, balance):
|
def verify(self, address, balance):
|
||||||
logg.debug('verify {} {}'.format(address, balance))
|
logg.debug('verify {} {}'.format(address, balance))
|
||||||
|
|
||||||
self.verify_local_key(address)
|
self.verify_local_key(address)
|
||||||
self.verify_accounts_index(address)
|
self.verify_accounts_index(address)
|
||||||
self.verify_balance(address, balance)
|
self.verify_balance(address, balance)
|
||||||
|
self.verify_metadata(address)
|
||||||
|
|
||||||
|
|
||||||
class MockClient:
|
class MockClient:
|
||||||
@ -195,22 +230,6 @@ def main():
|
|||||||
sarafu_token_address = to_checksum(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
sarafu_token_address = to_checksum(eth_abi.decode_single('address', bytes.fromhex(strip_0x(r))))
|
||||||
logg.info('found token address {}'.format(sarafu_token_address))
|
logg.info('found token address {}'.format(sarafu_token_address))
|
||||||
|
|
||||||
# addresses = {}
|
|
||||||
# f = open('{}/addresses.csv'.format(user_dir, 'r'))
|
|
||||||
# while True:
|
|
||||||
# l = f.readline()
|
|
||||||
# if l == None:
|
|
||||||
# break
|
|
||||||
# r = l.split(',')
|
|
||||||
# try:
|
|
||||||
# k = r[0]
|
|
||||||
# v = r[1].rstrip()
|
|
||||||
# addresses[k] = v
|
|
||||||
# sys.stdout.write('loading address mapping {} -> {}'.format(k, v).ljust(200) + "\r")
|
|
||||||
# except IndexError as e:
|
|
||||||
# break
|
|
||||||
# f.close()
|
|
||||||
|
|
||||||
balances = {}
|
balances = {}
|
||||||
f = open('{}/balances.csv'.format(user_dir, 'r'))
|
f = open('{}/balances.csv'.format(user_dir, 'r'))
|
||||||
remove_zeros = 10**12
|
remove_zeros = 10**12
|
||||||
@ -233,7 +252,7 @@ def main():
|
|||||||
|
|
||||||
api = AdminApi(MockClient())
|
api = AdminApi(MockClient())
|
||||||
|
|
||||||
verifier = Verifier(conn, api, gas_oracle, chain_spec, account_index_address, sarafu_token_address)
|
verifier = Verifier(conn, api, gas_oracle, chain_spec, account_index_address, sarafu_token_address, user_dir)
|
||||||
|
|
||||||
user_new_dir = os.path.join(user_dir, 'new')
|
user_new_dir = os.path.join(user_dir, 'new')
|
||||||
for x in os.walk(user_new_dir):
|
for x in os.walk(user_new_dir):
|
||||||
@ -252,7 +271,7 @@ def main():
|
|||||||
u = Person(o)
|
u = Person(o)
|
||||||
|
|
||||||
new_address = u.identities['evm'][chain_str][0]
|
new_address = u.identities['evm'][chain_str][0]
|
||||||
old_address = u.identities['evm']['xdai:1'][0]
|
old_address = u.identities['evm'][old_chain_str][0]
|
||||||
balance = balances[old_address]
|
balance = balances[old_address]
|
||||||
logg.debug('checking {} -> {} = {}'.format(old_address, new_address, balance))
|
logg.debug('checking {} -> {} = {}'.format(old_address, new_address, balance))
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user