Dataseeding container

This commit is contained in:
Philip Wafula 2021-09-21 06:04:44 +00:00
parent 2f005195e5
commit 7b57f1b4c2
11 changed files with 123 additions and 51 deletions

1
.gitignore vendored
View File

@ -14,3 +14,4 @@ build/
**/.venv
.idea
**/.vim
**/*secret.yaml

View File

@ -12,13 +12,13 @@ include:
stages:
- build
- test
- deploy
- deploy
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
variables:
DOCKER_BUILDKIT: "1"
COMPOSE_DOCKER_CLI_BUILD: "1"
COMPOSE_DOCKER_CLI_BUILD: "1"
CI_DEBUG_TRACE: "true"
before_script:

View File

@ -16,7 +16,7 @@ from crypto_dev_signer.keystore.dict import DictKeystore
from import_util import BalanceProcessor, get_celery_worker_status
from import_task import ImportTask, MetadataTask
default_config_dir = './config'
default_config_dir = '/usr/local/etc/data-seeding/'
logg = logging.getLogger()
arg_parser = argparse.ArgumentParser(description='Daemon worker that handles data seeding tasks.')

View File

@ -1,11 +1,10 @@
[cic]
registry_address = 0x32E860c2A0645d1B7B005273696905F5D6DC5D05
registry_address =
token_index_address =
accounts_index_address =
declarator_address =
approval_escrow_address =
chain_spec = evm:bloxberg:8996
chain_spec =
tx_retry_delay =
trust_address = 0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
trust_address =
user_ussd_svc_service_port =

View File

@ -28,12 +28,11 @@ logg = logging.getLogger()
fake = Faker(['sl', 'en_US', 'no', 'de', 'ro'])
script_dir = os.path.realpath(os.path.dirname(__file__))
# config_dir = os.environ.get('CONFINI_DIR', '/usr/local/etc/cic')
config_dir = os.environ.get('CONFINI_DIR', os.path.join(script_dir, 'config'))
default_config_dir = './config'
argparser = argparse.ArgumentParser()
argparser.add_argument('-c', type=str, default=config_dir, help='Config dir')
argparser.add_argument('-c', type=str, default=default_config_dir, help='Config dir')
argparser.add_argument('--tag', type=str, action='append',
help='Tags to add to record')
argparser.add_argument('--gift-threshold', type=int,
@ -53,7 +52,7 @@ elif args.vv:
config = confini.Config(args.c, os.environ.get('CONFINI_ENV_PREFIX'))
config.process()
logg.info('loaded config\n{}'.format(config))
logg.debug('loaded config\n{}'.format(config))
dt_now = datetime.datetime.utcnow()

View File

@ -13,7 +13,8 @@ COPY package.json \
RUN npm ci --production
#RUN --mount=type=cache,mode=0755,target=/root/node_modules npm install
COPY requirements.txt .
COPY requirements.txt .
COPY config/ /usr/local/etc/data-seeding
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"

View File

@ -1,60 +1,94 @@
#!/usr/bin/env bash
#!/bin/bash
set -e
if [[ -d "$OUT_DIR" ]]
then
echo "found existing IMPORT DIR cleaning up..."
rm -rf "$OUT_DIR"
mkdir -p "$OUT_DIR"
else
echo "IMPORT DIR does not exist creating it."
mkdir -p "$OUT_DIR"
fi
# using timeout because the timeout flag for celery inspect does not work
timeout 5 celery inspect ping -b "$CELERY_BROKER_URL"
if [[ $? -eq 124 ]]
then
>&2 echo "Celery workers not available. Is the CELERY_BROKER_URL ($CELERY_BROKER_URL) correct?"
exit 1
fi
echo "Creating seed data..."
python create_import_users.py -vv --dir "$IMPORT_DIR" "$ACCOUNT_COUNT"
python create_import_users.py -vv -c "$CONFIG" --dir "$OUT_DIR" "$NUMBER_OF_USERS"
wait $!
echo "Check for running celery workers ..."
if [ -f ./cic-ussd-import.pid ];
then
echo "Found a running worker. Killing ..."
kill -9 $(<cic-ussd-import.pid)
fi
echo "Purge tasks from celery worker"
celery -A cic_ussd.import_task purge -Q "$CELERY_QUEUE" --broker redis://"$REDIS_HOST":"$REDIS_PORT" -f
echo "Start celery work and import balance job"
if [ "$INCLUDE_BALANCES" != "y" ]
then
echo "Running worker without opening balance transactions"
TARGET_TX_COUNT=$ACCOUNT_COUNT
python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$IMPORT_DIR" &
TARGET_TX_COUNT=$NUMBER_OF_USERS
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" > nohup.out 2> nohup.err < /dev/null &
else
echo "Running worker with opening balance transactions"
TARGET_TX_COUNT=$((ACCOUNT_COUNT*2))
python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$IMPORT_DIR" &
TARGET_TX_COUNT=$((NUMBER_OF_USERS*2))
nohup python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$OUT_DIR" &
fi
echo "Target count set to ${TARGET_TX_COUNT}"
until [ -f ./cic-import-ussd.pid ]
do
echo "Polling for celery worker pid file..."
sleep 1
done
IMPORT_BALANCE_JOB=$(<cic-import-ussd.pid)
echo "Start import users job"
if [ "$USSD_SSL" == "y" ]
then
echo "Targeting secure ussd-user server"
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" "$IMPORT_DIR"
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" "$OUT_DIR"
else
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" --ussd-no-ssl "$IMPORT_DIR"
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" --ussd-no-ssl "$OUT_DIR"
fi
echo "Waiting for import balance job to complete ..."
tail --pid="$IMPORT_BALANCE_JOB" -f /dev/null
set -e
echo "Importing pins"
python cic_ussd/import_pins.py -c "$CONFIG" -vv "$IMPORT_DIR"
python cic_ussd/import_pins.py -c "$CONFIG" -vv "$OUT_DIR"
set +e
wait $!
set -e
echo "Importing ussd data"
python cic_ussd/import_ussd_data.py -c "$CONFIG" -vv "$IMPORT_DIR"
python cic_ussd/import_ussd_data.py -c "$CONFIG" -vv "$OUT_DIR"
set +e
wait $!
echo "Importing person metadata"
node cic_meta/import_meta.js "$IMPORT_DIR" "$ACCOUNT_COUNT"
node cic_meta/import_meta.js "$OUT_DIR" "$NUMBER_OF_USERS"
echo "Import preferences metadata"
node cic_meta/import_meta_preferences.js "$IMPORT_DIR" "$ACCOUNT_COUNT"
node cic_meta/import_meta_preferences.js "$OUT_DIR" "$NUMBER_OF_USERS"
CIC_NOTIFY_DATABASE=postgres://$DATABASE_USER:$DATABASE_PASSWORD@$DATABASE_HOST:$DATABASE_PORT/$NOTIFY_DATABASE_NAME
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
while [[ "$NOTIFICATION_COUNT" < "$TARGET_TX_COUNT" ]]
do
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
sleep 5
echo "Notification count is: ${NOTIFICATION_COUNT}. Checking after 5 ..."
echo "Notification count is: ${NOTIFICATION_COUNT} of ${TARGET_TX_COUNT}. Checking after 5 ..."
done
python verify.py -c "$CONFIG" -v -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --exclude "$EXCLUSIONS" --token-symbol "$TOKEN_SYMBOL" "$IMPORT_DIR"
echo "Running verify script."
python verify.py -c "$CONFIG" -v -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --exclude "$EXCLUSIONS" --meta-provider "$META_URL" --token-symbol "$TOKEN_SYMBOL" --ussd-provider "$USSD_PROVIDER" "$OUT_DIR"

View File

@ -1,13 +1,14 @@
sarafu-faucet~=0.0.7a1
sarafu-faucet~=0.0.7a2
cic-eth[tools]~=0.12.4a8
cic-types~=0.1.0a15
crypto-dev-signer>=0.4.15a4,<=0.4.15
crypto-dev-signer~=0.4.15a7
faker==4.17.1
chainsyncer~=0.0.6a3
chainlib-eth~=0.0.9a13
chainlib-eth~=0.0.9a14
eth-address-index~=0.2.3a4
eth-contract-registry~=0.6.3a3
eth-accounts-index~=0.1.2a3
eth-erc20~=0.1.2a2
eth-erc20~=0.1.2a3
erc20-faucet~=0.3.2a2
psycopg2==2.8.6
liveness~=0.0.1a7

View File

@ -164,20 +164,8 @@ for t in custodial_tests:
logg.info('activating custodial module'.format(t))
break
cols = os.get_terminal_size().columns
def to_terminalwidth(s):
ss = s.ljust(int(cols)-1)
ss += "\r"
return ss
def default_outfunc(s):
ss = to_terminalwidth(s)
sys.stdout.write(ss)
outfunc = default_outfunc
if logg.isEnabledFor(logging.DEBUG):
outfunc = logg.debug
outfunc = logg.debug
def send_ussd_request(address, data_dir):

View File

@ -1,12 +1,12 @@
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: [""]
variables:
KANIKO_CACHE_ARGS: "--cache=true --cache-copy-layers=true --cache-ttl=24h"
MR_IMAGE_TAG: $CI_REGISTRY_IMAGE/mergerequest/$APP_NAME:$CI_COMMIT_SHORT_SHA
.py_build_merge_request:
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: [""]
stage: build
script:
- mkdir -p /kaniko/.docker
@ -16,6 +16,9 @@ variables:
--cache-repo $CI_REGISTRY_IMAGE --destination $MR_IMAGE_TAG
.py_build_target_dev:
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: [""]
stage: build
variables:
IMAGE_TAG_BASE: $CI_REGISTRY_IMAGE/$APP_NAME:mr-unittest-$CI_COMMIT_SHORT_SHA
@ -28,6 +31,9 @@ variables:
--destination $MR_IMAGE_TAG
.py_build_push:
image:
name: gcr.io/kaniko-project/executor:debug
entrypoint: [""]
stage: build
variables:
IMAGE_TAG_BASE: $CI_REGISTRY_IMAGE/$APP_NAME:$CI_COMMIT_BRANCH-$CI_COMMIT_SHORT_SHA

View File

@ -94,8 +94,8 @@ services:
RUN_MASK: ${RUN_MASK:-0} # bit flags; 1: contract migrations 2: seed data
DEV_FAUCET_AMOUNT: ${DEV_FAUCET_AMOUNT:-0}
DEV_ETH_GAS_PRICE: $DEV_ETH_GAS_PRICE
TOKEN_NAME: ${TOKEN_NAME:-Demurrage Token}
TOKEN_SYMBOL: ${TOKEN_SYMBOL:-DET}
TOKEN_NAME: ${TOKEN_NAME:-Giftable Token}
TOKEN_SYMBOL: ${TOKEN_SYMBOL:-GFT}
TOKEN_DECIMALS: $TOKEN_DECIMALS
TOKEN_REDISTRIBUTION_PERIOD: $TOKEN_DEMURRAGE_REDISTRIBUTION_PERIOD
TASKS_TRANSFER_CALLBACKS: ${TASKS_TRANSFER_CALLBACKS:-"cic-eth:cic_eth.callbacks.noop.noop,cic-ussd:cic_ussd.tasks.callback_handler.transaction_callback"}
@ -115,6 +115,49 @@ services:
volumes:
- contract-config:/tmp/cic/config
data-seeding:
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/data-seeding:${TAG:-latest}
build:
context: apps/data-seeding
dockerfile: docker/Dockerfile
args:
pip_index_url: ${PIP_DEFAULT_INDEX_URL:-https://pypi.org/simple}
pip_extra_args: $PIP_EXTRA_ARGS
environment:
CIC_REGISTRY_ADDRESS: ${CIC_REGISTRY_ADDRESS:-0xea6225212005e86a4490018ded4bf37f3e772161}
OUT_DIR: out
NUMBER_OF_USERS: 10
CONFIG: /usr/local/etc/data-seeding
CIC_CHAIN_SPEC: ${CIC_CHAIN_SPEC:-evm:bloxberg:8996}
ETH_PROVIDER: ${CIC_HTTP_PROVIDER:-http://eth:8545}
TOKEN_SYMBOL: GFT
KEYSTORE_PATH: keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c
USSD_HOST: cic-user-ussd-server
USSD_PORT: 9000
INCLUDE_BALANCES: y
USSD_SSL: n
DATABASE_NAME: ${DATABASE_NAME:-cic_ussd}
DATABASE_USER: ${DATABASE_USER:-grassroots}
DATABASE_PASSWORD: ${DATABASE_PASSWORD:-tralala} # this is is set at initdb see: postgres/initdb/create_db.sql
DATABASE_HOST: ${DATABASE_HOST:-postgres}
DATABASE_PORT: ${DATABASE_PORT:-5432}
CELERY_BROKER_URL: ${CELERY_BROKER_URL:-redis://redis:6379}
CELERY_RESULT_URL: ${CELERY_RESULT_URL:-redis://redis:6379}
NOTIFY_DATABASE_NAME: cic_notify
REDIS_HOST: redis
REDIS_PORT: 6379
REDIS_DB: 0
META_HOST: meta
META_PORT: 8000
META_URL: http://meta:8000
USSD_PROVIDER: http://cic-user-ussd-server:9000
CELERY_QUEUE: cic-import-ussd
EXCLUSIONS: ussd
command: bash import_ussd.sh
volumes:
- contract-config:/tmp/cic/config/:ro
cic-cache-tracker:
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-cache:${TAG:-latest}
profiles: