WIP user generation revamp
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
[pgp]
|
||||
exports_dir = ../testdata/pgp
|
||||
private_key_file = ge.priv.asc
|
||||
public_key_file = ge.pub.asc
|
||||
passphrase = ge
|
||||
private_key_file = privatekeys_meta.asc
|
||||
public_key_file = publickeys_meta.asc
|
||||
passphrase = merman
|
||||
|
||||
233
apps/contract-migration/scripts/create_users.py
Normal file
233
apps/contract-migration/scripts/create_users.py
Normal file
@@ -0,0 +1,233 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# standard imports
|
||||
import json
|
||||
import time
|
||||
import datetime
|
||||
import random
|
||||
import logging
|
||||
import os
|
||||
import base64
|
||||
import hashlib
|
||||
import sys
|
||||
import argparse
|
||||
import random
|
||||
|
||||
# external imports
|
||||
import vobject
|
||||
import celery
|
||||
import web3
|
||||
from faker import Faker
|
||||
import cic_registry
|
||||
import confini
|
||||
from cic_eth.api import Api
|
||||
from cic_types.models.person import (
|
||||
Person,
|
||||
generate_vcard_from_contact_data,
|
||||
get_contact_data_from_vcard,
|
||||
)
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
fake = Faker(['sl', 'en_US', 'no', 'de', 'ro'])
|
||||
|
||||
config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='Config dir')
|
||||
argparser.add_argument('--gift-threshold', action='store_true', help='If set, users will be funded with additional random balance (in token integer units)')
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
argparser.add_argument('--skip-identities', action='store_true', help='do not include generated ethereum address in user data')
|
||||
argparser.add_argument('--dir', default='out', type=str, help='path to users export dir tree')
|
||||
argparser.add_argument('user_count', type=int, help='amount of users to generate')
|
||||
args = argparser.parse_args()
|
||||
|
||||
if args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
elif args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
|
||||
config = confini.Config(args.c, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
logg.info('loaded config\n{}'.format(config))
|
||||
|
||||
|
||||
dt_now = datetime.datetime.utcnow()
|
||||
dt_then = dt_now - datetime.timedelta(weeks=150)
|
||||
ts_now = int(dt_now.timestamp())
|
||||
ts_then = int(dt_then.timestamp())
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
api = Api(config.get('CIC_CHAIN_SPEC'))
|
||||
|
||||
gift_max = args.gift_threshold or 0
|
||||
gift_factor = (10**6)
|
||||
|
||||
categories = [
|
||||
"food/water",
|
||||
"fuel/energy",
|
||||
"education",
|
||||
"health",
|
||||
"shop",
|
||||
"environment",
|
||||
"transport",
|
||||
"farming/labor",
|
||||
"savingsgroup",
|
||||
]
|
||||
|
||||
phone_idx = []
|
||||
|
||||
user_dir = args.dir
|
||||
user_count = args.user_count
|
||||
|
||||
def genPhoneIndex(phone):
|
||||
h = hashlib.new('sha256')
|
||||
h.update(phone.encode('utf-8'))
|
||||
h.update(b'cic.msisdn')
|
||||
return h.digest().hex()
|
||||
|
||||
|
||||
def genId(addr, typ):
|
||||
h = hashlib.new('sha256')
|
||||
h.update(bytes.fromhex(addr[2:]))
|
||||
h.update(typ.encode('utf-8'))
|
||||
return h.digest().hex()
|
||||
|
||||
|
||||
def genDate():
|
||||
|
||||
logg.info(ts_then)
|
||||
ts = random.randint(ts_then, ts_now)
|
||||
return datetime.datetime.fromtimestamp(ts).timestamp()
|
||||
|
||||
|
||||
def genPhone():
|
||||
return fake.msisdn()
|
||||
|
||||
|
||||
def genPersonal(phone):
|
||||
fn = fake.first_name()
|
||||
ln = fake.last_name()
|
||||
e = fake.email()
|
||||
|
||||
return generate_vcard_from_contact_data(ln, fn, phone, e)
|
||||
|
||||
# v = vobject.vCard()
|
||||
# first_name = fake.first_name()
|
||||
# last_name = fake.last_name()
|
||||
# v.add('n')
|
||||
# v.n.value = vobject.vcard.Name(family=last_name, given=first_name)
|
||||
# v.add('fn')
|
||||
# v.fn.value = '{} {}'.format(first_name, last_name)
|
||||
# v.add('tel')
|
||||
# v.tel.typ_param = 'CELL'
|
||||
# v.tel.value = phone
|
||||
# v.add('email')
|
||||
# v.email.value = fake.email()
|
||||
#
|
||||
# vcard_serialized = v.serialize()
|
||||
# vcard_base64 = base64.b64encode(vcard_serialized.encode('utf-8'))
|
||||
|
||||
# return vcard_base64.decode('utf-8')
|
||||
|
||||
|
||||
def genCats():
|
||||
i = random.randint(0, 3)
|
||||
return random.choices(categories, k=i)
|
||||
|
||||
|
||||
def genAmount():
|
||||
return random.randint(0, gift_max) * gift_factor
|
||||
|
||||
|
||||
def gen():
|
||||
old_blockchain_address = '0x' + os.urandom(20).hex()
|
||||
#accounts_index_account = config.get('DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER')
|
||||
#if not accounts_index_account:
|
||||
# accounts_index_account = None
|
||||
#logg.debug('accounts indexwriter {}'.format(accounts_index_account))
|
||||
#t = api.create_account(register=True)
|
||||
#new_blockchain_address = t.get()
|
||||
gender = random.choice(['female', 'male', 'other'])
|
||||
phone = genPhone()
|
||||
#phone = '254{}'.format(random.randint(4791000000, 4792000000))
|
||||
#phone = '254791549131'
|
||||
logg.debug('phone {}'.format(phone))
|
||||
v = genPersonal(phone)
|
||||
contact_data = get_contact_data_from_vcard(v)
|
||||
|
||||
p = Person()
|
||||
p.load_vcard(contact_data)
|
||||
p.year = '0000'
|
||||
# o = {
|
||||
# 'date_registered': genDate(),
|
||||
# 'vcard': v,
|
||||
# 'gender': gender,
|
||||
# 'key': {
|
||||
# 'ethereum': [
|
||||
# old_blockchain_address,
|
||||
# new_blockchain_address,
|
||||
# ],
|
||||
# },
|
||||
# 'location': {
|
||||
# 'latitude': str(fake.latitude()),
|
||||
# 'longitude': str(fake.longitude()),
|
||||
# 'external': { # add osm lookup
|
||||
# }
|
||||
# },
|
||||
# 'selling': genCats(),
|
||||
# }
|
||||
# uid = genId(new_blockchain_address, 'cic.person')
|
||||
|
||||
#logg.info('gifting {} to {}'.format(amount, new_blockchain_address))
|
||||
|
||||
return (old_blockchain_address, phone, p)
|
||||
|
||||
|
||||
def prepareLocalFilePath(datadir, address):
|
||||
parts = [
|
||||
address[:2],
|
||||
address[2:4],
|
||||
]
|
||||
dirs = '{}/old/{}/{}'.format(
|
||||
datadir,
|
||||
parts[0],
|
||||
parts[1],
|
||||
)
|
||||
os.makedirs(dirs, exist_ok=True)
|
||||
return dirs
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
base_dir = os.path.join(user_dir, 'old')
|
||||
os.makedirs(base_dir, exist_ok=True)
|
||||
|
||||
fa = open(os.path.join(base_dir, 'balances.csv'), 'w')
|
||||
|
||||
for i in range(user_count):
|
||||
|
||||
(eth, phone, o) = gen()
|
||||
uid = eth[2:].upper()
|
||||
|
||||
print(o)
|
||||
|
||||
d = prepareLocalFilePath(base_dir, uid + '.json')
|
||||
f = open('{}/{}'.format(d, uid), 'w')
|
||||
json.dump(o.serialize(), f)
|
||||
f.close()
|
||||
|
||||
pidx = genPhoneIndex(phone)
|
||||
d = prepareLocalFilePath(os.path.join(user_dir, 'phone'), uid)
|
||||
f = open('{}/{}'.format(d, pidx), 'w')
|
||||
f.write(eth)
|
||||
f.close()
|
||||
|
||||
amount = genAmount()
|
||||
fa.write('{},{}\n'.format(eth,amount))
|
||||
logg.debug('pidx {}, uid {}, eth {}, amount {}'.format(pidx, uid, eth, amount))
|
||||
|
||||
fa.close()
|
||||
@@ -132,9 +132,14 @@ class Handler:
|
||||
recipient[2:].upper(),
|
||||
)
|
||||
filepath = os.path.join(self.user_dir, user_file)
|
||||
f = open(filepath, 'r')
|
||||
o = json.load(f)
|
||||
f.close()
|
||||
o = None
|
||||
try:
|
||||
f = open(filepath, 'r')
|
||||
o = json.load(f)
|
||||
f.close()
|
||||
except FileNotFoundError:
|
||||
logg.error('no import record of address {}'.format(recipient))
|
||||
return
|
||||
u = Person(o)
|
||||
original_address = u.identities['evm']['xdai:1'][0]
|
||||
balance = self.balances[original_address]
|
||||
@@ -152,7 +157,7 @@ class Handler:
|
||||
# except EthException as e:
|
||||
# logg.error('send error {}'.format(e).ljust(200))
|
||||
#except KeyError as e:
|
||||
# logg.error('key error {}'.format(e).ljust(200))
|
||||
# logg.error('key record not found in imports: {}'.format(e).ljust(200))
|
||||
|
||||
|
||||
class BlockGetter:
|
||||
|
||||
@@ -1,33 +1,17 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const cic = require('cic-client-meta');
|
||||
const http = require('http');
|
||||
|
||||
const cic = require('cic-client-meta');
|
||||
|
||||
//const conf = JSON.parse(fs.readFileSync('./cic.conf'));
|
||||
|
||||
const config = new cic.Config('./config');
|
||||
config.process();
|
||||
console.log(config);
|
||||
|
||||
// Stolen from https://coderrocketfuel.com/article/recursively-list-all-the-files-in-a-directory-using-node-js
|
||||
// Thanks!
|
||||
const getAllFiles = function(dirPath, arrayOfFiles) {
|
||||
files = fs.readdirSync(dirPath)
|
||||
|
||||
arrayOfFiles = arrayOfFiles || []
|
||||
|
||||
files.forEach(function(file) {
|
||||
if (fs.statSync(dirPath + "/" + file).isDirectory()) {
|
||||
arrayOfFiles = getAllFiles(dirPath + "/" + file, arrayOfFiles)
|
||||
} else if (file.substr(-5) == '.json') {
|
||||
arrayOfFiles.push(path.join(dirPath, "/", file))
|
||||
}
|
||||
})
|
||||
|
||||
return arrayOfFiles
|
||||
}
|
||||
|
||||
async function sendit(uid, envelope) {
|
||||
function sendit(uid, envelope) {
|
||||
const d = envelope.toJSON();
|
||||
|
||||
const opts = {
|
||||
@@ -41,27 +25,27 @@ async function sendit(uid, envelope) {
|
||||
};
|
||||
let url = config.get('META_URL');
|
||||
url = url.replace(new RegExp('^(.+://[^/]+)/*$'), '$1/');
|
||||
console.debug('url: ' + url);
|
||||
console.debug('url: ' + url + uid);
|
||||
const req = http.request(url + uid, opts, (res) => {
|
||||
res.on('data', process.stdout.write);
|
||||
res.on('end', () => {
|
||||
console.log('result', res.statusCode, res.headers);
|
||||
});
|
||||
});
|
||||
|
||||
req.write(d);
|
||||
req.end();
|
||||
}
|
||||
|
||||
function doit(keystore) {
|
||||
getAllFiles(process.argv[2]).forEach((filename) => {
|
||||
const signer = new cic.PGPSigner(keystore);
|
||||
const parts = filename.split('.');
|
||||
const uid = path.basename(parts[0]);
|
||||
|
||||
const d = fs.readFileSync(filename, 'utf-8');
|
||||
function doOne(keystore, filePath) {
|
||||
const signer = new cic.PGPSigner(keystore);
|
||||
const parts = path.basename(filePath).split('.');
|
||||
const ethereum_address = path.basename(parts[0]);
|
||||
|
||||
cic.User.toKey('0x' + ethereum_address).then((uid) => {
|
||||
const d = fs.readFileSync(filePath, 'utf-8');
|
||||
const o = JSON.parse(d);
|
||||
console.log(o);
|
||||
fs.unlinkSync(filePath);
|
||||
|
||||
const s = new cic.Syncable(uid, o);
|
||||
s.setSigner(signer);
|
||||
@@ -83,6 +67,51 @@ new cic.PGPKeyStore(
|
||||
pubk,
|
||||
undefined,
|
||||
undefined,
|
||||
doit,
|
||||
importMeta,
|
||||
);
|
||||
|
||||
const batchSize = 4;
|
||||
const batchDelay = 1000;
|
||||
const total = parseInt(process.argv[3]);
|
||||
const workDir = path.join(process.argv[2], 'meta');
|
||||
let count = 0;
|
||||
let batchCount = 0;
|
||||
|
||||
|
||||
function importMeta(keystore) {
|
||||
let err;
|
||||
let files;
|
||||
|
||||
try {
|
||||
err, files = fs.readdirSync(workDir);
|
||||
} catch {
|
||||
console.error('source directory not yet ready', workDir);
|
||||
setTimeout(importMeta, batchDelay, keystore);
|
||||
return;
|
||||
}
|
||||
let limit = batchSize;
|
||||
if (files.length < limit) {
|
||||
limit = files.length;
|
||||
}
|
||||
for (let i = 0; i < limit; i++) {
|
||||
const file = files[i];
|
||||
if (file.substr(-5) != '.json') {
|
||||
console.debug('skipping file', file);
|
||||
}
|
||||
const filePath = path.join(workDir, file);
|
||||
console.log('file', count, filePath);
|
||||
doOne(keystore, filePath);
|
||||
count++;
|
||||
batchCount++;
|
||||
if (batchCount == batchSize) {
|
||||
console.debug('reached batch size, breathing');
|
||||
batchCount=0;
|
||||
setTimeout(importMeta, batchDelay, keystore);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (count == total) {
|
||||
return;
|
||||
}
|
||||
setTimeout(importMeta, 100, keystore);
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ from chainlib.eth.address import to_checksum
|
||||
from cic_types.models.person import Person
|
||||
from cic_eth.api.api_task import Api
|
||||
from cic_registry.chain import ChainSpec
|
||||
from cic_types.processor import generate_metadata_pointer
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
@@ -68,8 +69,14 @@ r = redis.Redis(redis_host, redis_port, redis_db)
|
||||
|
||||
ps = r.pubsub()
|
||||
|
||||
user_dir = args.user_dir
|
||||
os.makedirs(os.path.join(user_dir, 'new'))
|
||||
user_new_dir = os.path.join(args.user_dir, 'new')
|
||||
os.makedirs(user_new_dir)
|
||||
|
||||
meta_dir = os.path.join(args.user_dir, 'meta')
|
||||
os.makedirs(meta_dir)
|
||||
|
||||
user_old_dir = os.path.join(args.user_dir, 'old')
|
||||
os.stat(user_old_dir)
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
chain_str = str(chain_spec)
|
||||
@@ -109,8 +116,6 @@ if __name__ == '__main__':
|
||||
|
||||
i = 0
|
||||
j = 0
|
||||
user_new_dir = os.path.join(user_dir, 'new')
|
||||
user_old_dir = os.path.join(user_dir, 'old')
|
||||
for x in os.walk(user_old_dir):
|
||||
for y in x[2]:
|
||||
if y[len(y)-5:] != '.json':
|
||||
@@ -147,6 +152,9 @@ if __name__ == '__main__':
|
||||
|
||||
#old_address = to_checksum(add_0x(y[:len(y)-5]))
|
||||
#fi.write('{},{}\n'.format(new_address, old_address))
|
||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), 'cic.person')
|
||||
meta_filepath = os.path.join(meta_dir, '{}.json'.format(new_address_clean.upper()))
|
||||
os.symlink(filepath, meta_filepath)
|
||||
|
||||
i += 1
|
||||
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
psycopg2==2.8.6
|
||||
cic-types==0.1.0a4
|
||||
chainlib~=0.0.1a14
|
||||
cic-types==0.1.0a7
|
||||
chainlib~=0.0.1a15
|
||||
chainsyncer==0.0.1a7
|
||||
cic-eth==0.10.0a27
|
||||
confini==0.3.6b2
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
hexathon==0.0.1a3
|
||||
faker==4.17.1
|
||||
|
||||
Reference in New Issue
Block a user