Compare commits

..

34 Commits

Author SHA1 Message Date
nolash
5b8ea95a8e Bump chainlib to take dynamic arg vector for argparse 2021-09-17 08:12:51 +02:00
nolash
32b15d6d04 Update signer to fill in missing sign to wire symbol 2021-09-15 21:19:05 +02:00
1e0c475f39 Merge branch 'lash/contract-migration-dump' into 'master'
fix(contract-migration): Replace missing environment

See merge request grassrootseconomics/cic-internal-integration!271
2021-09-15 13:36:10 +00:00
Louis Holbrook
3e6cf594e3 fix(contract-migration): Replace missing environment 2021-09-15 13:36:09 +00:00
b8f79a2dd1 Merge branch 'bvander/extend-timoeout-contract-migration' into 'master'
chore: extend timeout for contract migration

See merge request grassrootseconomics/cic-internal-integration!272
2021-09-15 11:08:14 +00:00
540c2fd950 chore: extend timeout for contract migration 2021-09-15 14:04:18 +03:00
b9b06eced8 Merge branch 'staging' into 'master'
feat: add multi-project build to trigger deploy-k8s-dev in the devops repo

See merge request grassrootseconomics/cic-internal-integration!268
2021-09-09 21:28:49 +00:00
949bb29379 feat: add multi-project build to trigger deploy-k8s-dev in the devops repo 2021-09-09 21:28:49 +00:00
Louis Holbrook
0468906601 Merge branch 'lash/contract-migration-dump' into 'master'
fix(contract-migration): Make http signer work with local deployment

See merge request grassrootseconomics/cic-internal-integration!270
2021-09-09 13:38:10 +00:00
nolash
471243488e Add latest crypto-dev-signer install explicitly in dockerfile for cic-eth 2021-09-09 14:27:47 +02:00
3c4acd82ff test eth and meta only mrs 2021-09-07 11:56:17 -07:00
e07f992c5a Add new file 2021-09-07 18:38:20 +00:00
17e95cb19c Merge branch 'bvander/signer-as-service' into 'master'
run signer as a service over http

See merge request grassrootseconomics/cic-internal-integration!265
2021-09-06 19:10:08 +00:00
3c3a97ce15 run signer as a service over http 2021-09-06 12:07:57 -07:00
a492be4927 Merge branch 'lash/contract-migration-config' into 'master'
Sanitize contract migration configs

See merge request grassrootseconomics/cic-internal-integration!262
2021-09-06 10:06:58 +00:00
Louis Holbrook
1f555748b0 Sanitize contract migration configs 2021-09-06 10:06:58 +00:00
8aa4d20eea Update .gitlab-ci.yml 2021-09-01 20:00:44 +00:00
Louis Holbrook
90cf24dcee Merge branch 'lash/lockfix' into 'master'
Rehabilitate imports

See merge request grassrootseconomics/cic-internal-integration!261
2021-09-01 16:54:10 +00:00
Louis Holbrook
75b711dbd5 Rehabilitate imports 2021-09-01 16:54:10 +00:00
c21c1eb2ef fix data seeding node installs 2021-08-31 11:43:01 -07:00
eb5e612105 minor update to import_ussd script 2021-08-30 11:09:47 -07:00
e017d11770 update readme 2021-08-30 10:14:22 -07:00
e327af68e1 Merge branch 'philip/refactor-import-scripts' into 'master'
Consolidated ussd dataseeding script

See merge request grassrootseconomics/cic-internal-integration!252
2021-08-29 09:55:47 +00:00
92cc6a3f27 Consolidated ussd dataseeding script 2021-08-29 09:55:47 +00:00
f42bf7754a Merge branch 'lash/lockfix' into 'master'
Normalize initial INIT lock address

See merge request grassrootseconomics/cic-internal-integration!260
2021-08-29 09:07:46 +00:00
nolash
7342927e91 Normalize initial INIT lock address 2021-08-29 10:43:12 +02:00
17333af88f Merge branch 'bvander/docker-vm-builds' into 'master'
docker vm builds

See merge request grassrootseconomics/cic-internal-integration!259
2021-08-28 16:26:16 +00:00
6a68d2ed32 docker vm builds 2021-08-28 16:26:16 +00:00
Louis Holbrook
ef77f4c99a Merge branch 'lash/normalize-backend-tx' into 'master'
Normalize tx data for backend

Closes cic-eth#133

See merge request grassrootseconomics/cic-internal-integration!258
2021-08-28 11:10:18 +00:00
Louis Holbrook
56dbe8a502 Normalize tx data for backend 2021-08-28 11:10:18 +00:00
Louis Holbrook
2dc8ac6a12 Merge branch 'lash/upgrade-outer-tools' into 'master'
Upgrade outer tools

See merge request grassrootseconomics/cic-internal-integration!257
2021-08-27 13:13:00 +00:00
Louis Holbrook
0ced68e224 Upgrade outer tools 2021-08-27 13:13:00 +00:00
2afb20e715 Merge branch 'philip/ussd-post-test-bug-fixes' into 'master'
USSD post-test bug fixes

See merge request grassrootseconomics/cic-internal-integration!244
2021-08-25 10:33:35 +00:00
3b0113d0e4 USSD post-test bug fixes 2021-08-25 10:33:35 +00:00
112 changed files with 3644 additions and 1728 deletions

View File

@@ -1,14 +1,43 @@
include: include:
- local: 'ci_templates/.cic-template.yml' #- local: 'ci_templates/.cic-template.yml' #kaniko build templates
- local: 'apps/contract-migration/.gitlab-ci.yml' # these includes are app specific unit tests
- local: 'apps/cic-eth/.gitlab-ci.yml' - local: 'apps/cic-eth/.gitlab-ci.yml'
- local: 'apps/cic-ussd/.gitlab-ci.yml' - local: 'apps/cic-ussd/.gitlab-ci.yml'
- local: 'apps/cic-notify/.gitlab-ci.yml' - local: 'apps/cic-notify/.gitlab-ci.yml'
- local: 'apps/cic-meta/.gitlab-ci.yml' - local: 'apps/cic-meta/.gitlab-ci.yml'
- local: 'apps/cic-cache/.gitlab-ci.yml' - local: 'apps/cic-cache/.gitlab-ci.yml'
- local: 'apps/data-seeding/.gitlab-ci.yml' #- local: 'apps/contract-migration/.gitlab-ci.yml'
#- local: 'apps/data-seeding/.gitlab-ci.yml'
stages: stages:
- build - build
- test - test
- release - deploy
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
variables:
DOCKER_BUILDKIT: "1"
COMPOSE_DOCKER_CLI_BUILD: "1"
CI_DEBUG_TRACE: "true"
before_script:
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
# runs on protected branches and pushes to repo
build-push:
stage: build
tags:
- integration
#script:
# - TAG=$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA sh ./scripts/build-push.sh
script:
- TAG=latest sh ./scripts/build-push.sh
rules:
- if: $CI_COMMIT_REF_PROTECTED == "true"
when: always
deploy-dev:
stage: deploy
trigger: grassrootseconomics/devops
when: manual

View File

@@ -2,25 +2,21 @@
## Getting started ## Getting started
## Make some keys This repo uses docker-compose and docker buildkit. Set the following environment variables to get started:
``` ```
docker build -t bloxie . && docker run -v "$(pwd)/keys:/root/keys" --rm -it -t bloxie account new --chain /root/bloxberg.json --keys-path /root/keys export COMPOSE_DOCKER_CLI_BUILD=1
export DOCKER_BUILDKIT=1
``` ```
start services, database, redis and local ethereum node
### Prepare the repo
This is stuff we need to put in makefile but for now...
File mounts and permisssions need to be set
``` ```
chmod -R 755 scripts/initdb apps/cic-meta/scripts/initdb docker-compose up -d
````
start cluster
``` ```
docker-compose up
Run app/contract-migration to deploy contracts
```
RUN_MASK=3 docker-compose up contract-migration
``` ```
stop cluster stop cluster
@@ -28,7 +24,7 @@ stop cluster
docker-compose down docker-compose down
``` ```
delete data stop cluster and delete data
``` ```
docker-compose down -v docker-compose down -v
``` ```
@@ -38,5 +34,4 @@ rebuild an images
docker-compose up --build <service_name> docker-compose up --build <service_name>
``` ```
Deployment variables are writtend to service-configs/.env after everthing is up.

View File

@@ -1,34 +0,0 @@
# The solc image messes up the alpine environment, so we have to go all over again
FROM python:3.8.6-slim-buster
LABEL authors="Louis Holbrook <dev@holbrook.no> 0826EDA1702D1E87C6E2875121D2E7BB88C2A746"
LABEL spdx-license-identifier="GPL-3.0-or-later"
LABEL description="Base layer for buiding development images for the cic component suite"
RUN apt-get update && \
apt-get install -y git gcc g++ libpq-dev && \
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client
RUN echo installing nodejs tooling
COPY ./dev/nvm.sh /root/
# Install nvm with node and npm
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
ENV NVM_DIR /root/.nvm
ENV NODE_VERSION 15.3.0
ENV BANCOR_NODE_VERSION 10.16.0
RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash \
&& . $NVM_DIR/nvm.sh \
&& nvm install $NODE_VERSION \
&& nvm alias default $NODE_VERSION \
&& nvm use $NODE_VERSION \
# So many ridiculously stupid issues with node in docker that take oceans of absolutely wasted time to resolve
# owner of these files is "1001" by default - wtf
&& chown -R root:root "$NVM_DIR/versions/node/v$NODE_VERSION"
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH

View File

@@ -1 +0,0 @@
## this is an example base image if we wanted one for all the other apps. Its just OS level things

View File

@@ -1,52 +1,17 @@
.cic_cache_variables: build-test-cic-cache:
variables: stage: test
APP_NAME: cic-cache tags:
DOCKERFILE_PATH: docker/Dockerfile_ci - integration
CONTEXT: apps/$APP_NAME variables:
APP_NAME: cic-cache
build-mr-cic-cache: MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
extends: script:
- .py_build_merge_request - cd apps/cic-cache
- .cic_cache_variables - docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
rules: - docker run $MR_IMAGE_TAG sh docker/run_tests.sh
- if: $CI_PIPELINE_SOURCE == "merge_request_event" allow_failure: true
changes: rules:
- apps/cic-cache/**/* - if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: always changes:
- apps/$APP_NAME/**/*
test-mr-cic-cache: when: always
stage: test
extends:
- .cic_cache_variables
cache:
key:
files:
- test_requirements.txt
paths:
- /root/.cache/pip
image: $MR_IMAGE_TAG
script:
- cd apps/$APP_NAME/
- >
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
-r test_requirements.txt
- export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
needs: ["build-mr-cic-cache"]
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/$APP_NAME/**/*
when: always
build-push-cic-cache:
extends:
- .py_build_push
- .cic_cache_variables
rules:
- if: $CI_COMMIT_BRANCH == "master"
changes:
- apps/cic-cache/**/*
when: always

View File

@@ -0,0 +1 @@
# CIC-CACHE

View File

@@ -5,7 +5,7 @@ version = (
0, 0,
2, 2,
1, 1,
'alpha.1', 'alpha.2',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@@ -1,38 +0,0 @@
# syntax = docker/dockerfile:1.2
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
COPY requirements.txt .
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
#RUN pip install $pip_extra_index_url_flag .
#RUN pip install .[server]
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
ARG EXTRA_PIP_ARGS=""
RUN pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt
COPY . .
RUN python setup.py install
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
COPY config/ /usr/local/etc/cic-cache/
# for db migrations
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
COPY /docker/start_tracker.sh ./start_tracker.sh
COPY /docker/db.sh ./db.sh
RUN chmod 755 ./*.sh
# Tracker
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
# Server
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
ENTRYPOINT []

View File

@@ -0,0 +1,10 @@
#! /bin/bash
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
-r test_requirements.txt
export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests

View File

@@ -8,8 +8,8 @@ semver==2.13.0
psycopg2==2.8.6 psycopg2==2.8.6
celery==4.4.7 celery==4.4.7
redis==3.5.3 redis==3.5.3
chainsyncer[sql]>=0.0.6a1,<0.1.0 chainsyncer[sql]>=0.0.6a3,<0.1.0
erc20-faucet>=0.3.2a1, <0.4.0 erc20-faucet>=0.3.2a1, <0.4.0
chainlib-eth>=0.0.9a3,<0.1.0 chainlib-eth>=0.0.9a7,<0.1.0
chainlib>=0.0.9a2,<0.1.0 chainlib>=0.0.9a3,<0.1.0
eth-address-index>=0.2.3a1,<0.3.0 eth-address-index>=0.2.3a1,<0.3.0

View File

@@ -1,52 +1,16 @@
.cic_eth_variables: build-test-cic-eth:
variables: stage: test
APP_NAME: cic-eth tags:
DOCKERFILE_PATH: docker/Dockerfile_ci - integration
CONTEXT: apps/$APP_NAME variables:
APP_NAME: cic-eth
build-mr-cic-eth: MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
extends: script:
- .cic_eth_variables - cd apps/cic-eth
- .py_build_target_dev - docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
rules: - docker run $MR_IMAGE_TAG sh docker/run_tests.sh
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes: changes:
- apps/cic-eth/**/* - apps/$APP_NAME/**/*
when: always
test-mr-cic-eth:
stage: test
extends:
- .cic_eth_variables
cache:
key:
files:
- test_requirements.txt
paths:
- /root/.cache/pip
image: $MR_IMAGE_TAG
script:
- cd apps/$APP_NAME/
- >
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
-r admin_requirements.txt
-r services_requirements.txt
-r test_requirements.txt
- export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
needs: ["build-mr-cic-eth"]
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/cic-eth/**/*
when: always
build-push-cic-eth:
extends:
- .py_build_push
- .cic_eth_variables
rules:
- if: $CI_COMMIT_BRANCH == "master"
changes:
- apps/cic-eth/**/*
when: always when: always

View File

@@ -1,5 +1,5 @@
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
cic-eth-registry>=0.6.1a2,<0.7.0 cic-eth-registry>=0.6.1a2,<0.7.0
hexathon~=0.0.1a8 hexathon~=0.0.1a8
chainqueue>=0.0.3a2,<0.1.0 chainqueue>=0.0.4a6,<0.1.0
eth-erc20>=0.1.2a2,<0.2.0 eth-erc20>=0.1.2a2,<0.2.0

View File

@@ -4,7 +4,6 @@ import logging
# external imports # external imports
import celery import celery
from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from hexathon import ( from hexathon import (
add_0x, add_0x,
@@ -20,18 +19,17 @@ from cic_eth.task import (
CriticalSQLAlchemyTask, CriticalSQLAlchemyTask,
) )
from cic_eth.error import LockedError from cic_eth.error import LockedError
from cic_eth.encode import (
tx_normalize,
ZERO_ADDRESS_NORMAL,
)
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
def normalize_address(a):
if a == None:
return None
return add_0x(hex_uniform(strip_0x(a)))
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL, tx_hash=None): def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL, tx_hash=None):
"""Task wrapper to set arbitrary locks """Task wrapper to set arbitrary locks
:param chain_str: Chain spec string representation :param chain_str: Chain spec string representation
@@ -43,7 +41,7 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
address = normalize_address(address) address = tx_normalize.wallet_address(address)
chain_str = '::' chain_str = '::'
if chain_spec_dict != None: if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = str(ChainSpec.from_dict(chain_spec_dict))
@@ -53,7 +51,7 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL): def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL):
"""Task wrapper to reset arbitrary locks """Task wrapper to reset arbitrary locks
:param chain_str: Chain spec string representation :param chain_str: Chain spec string representation
@@ -65,7 +63,7 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
address = normalize_address(address) address = tx_normalize.wallet_address(address)
chain_str = '::' chain_str = '::'
if chain_spec_dict != None: if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = str(ChainSpec.from_dict(chain_spec_dict))
@@ -75,7 +73,7 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None): def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
"""Task wrapper to set send lock """Task wrapper to set send lock
:param chain_str: Chain spec string representation :param chain_str: Chain spec string representation
@@ -85,7 +83,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
address = normalize_address(address) address = tx_normalize.wallet_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash) r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
logg.debug('Send locked for {}, flag now {}'.format(address, r)) logg.debug('Send locked for {}, flag now {}'.format(address, r))
@@ -93,7 +91,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS): def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
"""Task wrapper to reset send lock """Task wrapper to reset send lock
:param chain_str: Chain spec string representation :param chain_str: Chain spec string representation
@@ -103,7 +101,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
address = normalize_address(address) address = tx_normalize.wallet_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.reset(chain_str, LockEnum.SEND, address=address) r = Lock.reset(chain_str, LockEnum.SEND, address=address)
logg.debug('Send unlocked for {}, flag now {}'.format(address, r)) logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
@@ -111,7 +109,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None): def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
"""Task wrapper to set queue direct lock """Task wrapper to set queue direct lock
:param chain_str: Chain spec string representation :param chain_str: Chain spec string representation
@@ -121,7 +119,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
address = normalize_address(address) address = tx_normalize.wallet_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash) r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r)) logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
@@ -129,7 +127,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS): def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
"""Task wrapper to reset queue direct lock """Task wrapper to reset queue direct lock
:param chain_str: Chain spec string representation :param chain_str: Chain spec string representation
@@ -139,7 +137,7 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
:returns: New lock state for address :returns: New lock state for address
:rtype: number :rtype: number
""" """
address = normalize_address(address) address = tx_normalize.wallet_address(address)
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = str(ChainSpec.from_dict(chain_spec_dict))
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address) r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r)) logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
@@ -148,12 +146,13 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None): def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
address = normalize_address(address) if address != None:
address = tx_normalize.wallet_address(address)
chain_str = '::' chain_str = '::'
if chain_spec_dict != None: if chain_spec_dict != None:
chain_str = str(ChainSpec.from_dict(chain_spec_dict)) chain_str = str(ChainSpec.from_dict(chain_spec_dict))
session = SessionBase.create_session() session = SessionBase.create_session()
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS, session=session) r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS_NORMAL, session=session)
if address != None: if address != None:
r |= Lock.check(chain_str, lock_flags, address=address, session=session) r |= Lock.check(chain_str, lock_flags, address=address, session=session)
if r > 0: if r > 0:

View File

@@ -33,6 +33,7 @@ from cic_eth.admin.ctrl import (
from cic_eth.queue.tx import queue_create from cic_eth.queue.tx import queue_create
from cic_eth.eth.gas import create_check_gas_task from cic_eth.eth.gas import create_check_gas_task
from cic_eth.task import BaseTask from cic_eth.task import BaseTask
from cic_eth.encode import tx_normalize
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
@@ -73,7 +74,7 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session) set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
query_address = add_0x(hex_uniform(strip_0x(address))) # aaaaargh query_address = tx_normalize.wallet_address(address)
q = session.query(Otx) q = session.query(Otx)
q = q.join(TxCache) q = q.join(TxCache)
q = q.filter(TxCache.sender==query_address) q = q.filter(TxCache.sender==query_address)

View File

@@ -32,7 +32,6 @@ from chainqueue.db.enum import (
status_str, status_str,
) )
from chainqueue.error import TxStateChangeError from chainqueue.error import TxStateChangeError
from chainqueue.sql.query import get_tx
from eth_erc20 import ERC20 from eth_erc20 import ERC20
# local imports # local imports
@@ -40,6 +39,7 @@ from cic_eth.db.models.base import SessionBase
from cic_eth.db.models.role import AccountRole from cic_eth.db.models.role import AccountRole
from cic_eth.db.models.nonce import Nonce from cic_eth.db.models.nonce import Nonce
from cic_eth.error import InitializationError from cic_eth.error import InitializationError
from cic_eth.queue.query import get_tx_local
app = celery.current_app app = celery.current_app
@@ -284,7 +284,7 @@ class AdminApi:
tx_hash_hex = None tx_hash_hex = None
session = SessionBase.create_session() session = SessionBase.create_session()
for k in txs.keys(): for k in txs.keys():
tx_dict = get_tx(chain_spec, k, session=session) tx_dict = get_tx_local(chain_spec, k, session=session)
if tx_dict['nonce'] == nonce: if tx_dict['nonce'] == nonce:
tx_hash_hex = k tx_hash_hex = k
session.close() session.close()

View File

@@ -35,14 +35,14 @@ class RPC:
def from_config(config, use_signer=False, default_label='default', signer_label='signer'): def from_config(config, use_signer=False, default_label='default', signer_label='signer'):
chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC')) chain_spec = ChainSpec.from_chain_str(config.get('CHAIN_SPEC'))
RPCConnection.register_location(config.get('RPC_HTTP_PROVIDER'), chain_spec, default_label) RPCConnection.register_location(config.get('RPC_PROVIDER'), chain_spec, default_label)
if use_signer: if use_signer:
RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, signer_label) RPCConnection.register_constructor(ConnType.UNIX, EthUnixSignerConnection, signer_label)
RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, signer_label) RPCConnection.register_constructor(ConnType.HTTP, EthHTTPSignerConnection, signer_label)
RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, signer_label) RPCConnection.register_constructor(ConnType.HTTP_SSL, EthHTTPSignerConnection, signer_label)
RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, signer_label) RPCConnection.register_location(config.get('SIGNER_PROVIDER'), chain_spec, signer_label)
rpc = RPC(chain_spec, config.get('RPC_HTTP_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER')) rpc = RPC(chain_spec, config.get('RPC_PROVIDER'), signer_provider=config.get('SIGNER_PROVIDER'))
logg.info('set up rpc: {}'.format(rpc)) logg.info('set up rpc: {}'.format(rpc))
return rpc return rpc

View File

@@ -8,8 +8,8 @@ Create Date: 2021-04-02 18:41:20.864265
import datetime import datetime
from alembic import op from alembic import op
import sqlalchemy as sa import sqlalchemy as sa
from chainlib.eth.constant import ZERO_ADDRESS
from cic_eth.db.enum import LockEnum from cic_eth.db.enum import LockEnum
from cic_eth.encode import ZERO_ADDRESS_NORMAL
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
@@ -30,7 +30,7 @@ def upgrade():
sa.Column("otx_id", sa.Integer, sa.ForeignKey('otx.id'), nullable=True), sa.Column("otx_id", sa.Integer, sa.ForeignKey('otx.id'), nullable=True),
) )
op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True) op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True)
op.execute("INSERT INTO lock (address, date_created, blockchain, flags) VALUES('{}', '{}', '::', {})".format(ZERO_ADDRESS, datetime.datetime.utcnow(), LockEnum.INIT | LockEnum.SEND | LockEnum.QUEUE)) op.execute("INSERT INTO lock (address, date_created, blockchain, flags) VALUES('{}', '{}', '::', {})".format(ZERO_ADDRESS_NORMAL, datetime.datetime.utcnow(), LockEnum.INIT | LockEnum.SEND | LockEnum.QUEUE))
def downgrade(): def downgrade():

View File

@@ -4,12 +4,12 @@ import logging
# third-party imports # third-party imports
from sqlalchemy import Column, String, Integer, DateTime, ForeignKey from sqlalchemy import Column, String, Integer, DateTime, ForeignKey
from chainlib.eth.constant import ZERO_ADDRESS
from chainqueue.db.models.tx import TxCache from chainqueue.db.models.tx import TxCache
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
# local imports # local imports
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.encode import ZERO_ADDRESS_NORMAL
logg = logging.getLogger() logg = logging.getLogger()
@@ -37,7 +37,7 @@ class Lock(SessionBase):
@staticmethod @staticmethod
def set(chain_str, flags, address=ZERO_ADDRESS, session=None, tx_hash=None): def set(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None, tx_hash=None):
"""Sets flags associated with the given address and chain. """Sets flags associated with the given address and chain.
If a flags entry does not exist it is created. If a flags entry does not exist it is created.
@@ -90,7 +90,7 @@ class Lock(SessionBase):
@staticmethod @staticmethod
def reset(chain_str, flags, address=ZERO_ADDRESS, session=None): def reset(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None):
"""Resets flags associated with the given address and chain. """Resets flags associated with the given address and chain.
If the resulting flags entry value is 0, the entry will be deleted. If the resulting flags entry value is 0, the entry will be deleted.
@@ -134,7 +134,7 @@ class Lock(SessionBase):
@staticmethod @staticmethod
def check(chain_str, flags, address=ZERO_ADDRESS, session=None): def check(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None):
"""Checks whether all given flags are set for given address and chain. """Checks whether all given flags are set for given address and chain.
Does not validate the address against any other tables or components. Does not validate the address against any other tables or components.

View File

@@ -0,0 +1,16 @@
# external imports
from chainlib.eth.constant import ZERO_ADDRESS
from chainqueue.encode import TxHexNormalizer
from chainlib.eth.tx import unpack
tx_normalize = TxHexNormalizer()
ZERO_ADDRESS_NORMAL = tx_normalize.wallet_address(ZERO_ADDRESS)
def unpack_normal(signed_tx_bytes, chain_spec):
tx = unpack(signed_tx_bytes, chain_spec)
tx['hash'] = tx_normalize.tx_hash(tx['hash'])
tx['from'] = tx_normalize.wallet_address(tx['from'])
tx['to'] = tx_normalize.wallet_address(tx['to'])
return tx

View File

@@ -14,10 +14,7 @@ from chainlib.eth.sign import (
sign_message, sign_message,
) )
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.tx import ( from chainlib.eth.tx import TxFormat
TxFormat,
unpack,
)
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.error import JSONRPCException from chainlib.error import JSONRPCException
from eth_accounts_index.registry import AccountRegistry from eth_accounts_index.registry import AccountRegistry
@@ -49,6 +46,10 @@ from cic_eth.eth.nonce import (
from cic_eth.queue.tx import ( from cic_eth.queue.tx import (
register_tx, register_tx,
) )
from cic_eth.encode import (
unpack_normal,
ZERO_ADDRESS_NORMAL,
)
logg = logging.getLogger() logg = logging.getLogger()
celery_app = celery.current_app celery_app = celery.current_app
@@ -295,17 +296,17 @@ def cache_gift_data(
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex)) tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx = unpack(tx_signed_raw_bytes, chain_spec) tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
tx_data = Faucet.parse_give_to_request(tx['data']) tx_data = Faucet.parse_give_to_request(tx['data'])
session = self.create_session() session = self.create_session()
tx_dict = { tx_dict = {
'hash': tx_hash_hex, 'hash': tx['hash'],
'from': tx['from'], 'from': tx['from'],
'to': tx['to'], 'to': tx['to'],
'source_token': ZERO_ADDRESS, 'source_token': ZERO_ADDRESS_NORMAL,
'destination_token': ZERO_ADDRESS, 'destination_token': ZERO_ADDRESS_NORMAL,
'from_value': 0, 'from_value': 0,
'to_value': 0, 'to_value': 0,
} }
@@ -334,17 +335,17 @@ def cache_account_data(
:rtype: tuple :rtype: tuple
""" """
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:]) tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx = unpack(tx_signed_raw_bytes, chain_spec) tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
tx_data = AccountsIndex.parse_add_request(tx['data']) tx_data = AccountsIndex.parse_add_request(tx['data'])
session = SessionBase.create_session() session = SessionBase.create_session()
tx_dict = { tx_dict = {
'hash': tx_hash_hex, 'hash': tx['hash'],
'from': tx['from'], 'from': tx['from'],
'to': tx['to'], 'to': tx['to'],
'source_token': ZERO_ADDRESS, 'source_token': ZERO_ADDRESS_NORMAL,
'destination_token': ZERO_ADDRESS, 'destination_token': ZERO_ADDRESS_NORMAL,
'from_value': 0, 'from_value': 0,
'to_value': 0, 'to_value': 0,
} }

View File

@@ -3,8 +3,11 @@ import logging
# external imports # external imports
import celery import celery
from hexathon import strip_0x from hexathon import (
from chainlib.eth.constant import ZERO_ADDRESS strip_0x,
add_0x,
)
#from chainlib.eth.constant import ZERO_ADDRESS
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.address import is_checksum_address from chainlib.eth.address import is_checksum_address
from chainlib.connection import RPCConnection from chainlib.connection import RPCConnection
@@ -21,7 +24,6 @@ from chainlib.eth.error import (
from chainlib.eth.tx import ( from chainlib.eth.tx import (
TxFactory, TxFactory,
TxFormat, TxFormat,
unpack,
) )
from chainlib.eth.contract import ( from chainlib.eth.contract import (
abi_decode_single, abi_decode_single,
@@ -45,6 +47,7 @@ from cic_eth.eth.nonce import CustodialTaskNonceOracle
from cic_eth.queue.tx import ( from cic_eth.queue.tx import (
queue_create, queue_create,
register_tx, register_tx,
unpack,
) )
from cic_eth.queue.query import get_tx from cic_eth.queue.query import get_tx
from cic_eth.task import ( from cic_eth.task import (
@@ -53,6 +56,11 @@ from cic_eth.task import (
CriticalSQLAlchemyAndSignerTask, CriticalSQLAlchemyAndSignerTask,
CriticalWeb3AndSignerTask, CriticalWeb3AndSignerTask,
) )
from cic_eth.encode import (
tx_normalize,
ZERO_ADDRESS_NORMAL,
unpack_normal,
)
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
@@ -66,6 +74,7 @@ class MaxGasOracle:
return MAXIMUM_FEE_UNITS return MAXIMUM_FEE_UNITS
#def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None): def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
"""Creates a celery task signature for a check_gas task that adds the task to the outgoing queue to be processed by the dispatcher. """Creates a celery task signature for a check_gas task that adds the task to the outgoing queue to be processed by the dispatcher.
@@ -130,16 +139,16 @@ def cache_gas_data(
""" """
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex)) tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx = unpack(tx_signed_raw_bytes, chain_spec) tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
session = SessionBase.create_session() session = SessionBase.create_session()
tx_dict = { tx_dict = {
'hash': tx_hash_hex, 'hash': tx['hash'],
'from': tx['from'], 'from': tx['from'],
'to': tx['to'], 'to': tx['to'],
'source_token': ZERO_ADDRESS, 'source_token': ZERO_ADDRESS_NORMAL,
'destination_token': ZERO_ADDRESS, 'destination_token': ZERO_ADDRESS_NORMAL,
'from_value': tx['value'], 'from_value': tx['value'],
'to_value': tx['value'], 'to_value': tx['value'],
} }
@@ -150,7 +159,7 @@ def cache_gas_data(
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task) @celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task)
def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=MAXIMUM_FEE_UNITS): def check_gas(self, tx_hashes_hex, chain_spec_dict, txs_hex=[], address=None, gas_required=MAXIMUM_FEE_UNITS):
"""Check the gas level of the sender address of a transaction. """Check the gas level of the sender address of a transaction.
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser. If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
@@ -170,6 +179,21 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
:return: Signed raw transaction data list :return: Signed raw transaction data list
:rtype: param txs, unchanged :rtype: param txs, unchanged
""" """
if address != None:
if not is_checksum_address(address):
raise ValueError('invalid address {}'.format(address))
address = tx_normalize.wallet_address(address)
address = add_0x(address)
tx_hashes = []
txs = []
for tx_hash in tx_hashes_hex:
tx_hash = tx_normalize.tx_hash(tx_hash)
tx_hashes.append(tx_hash)
for tx in txs_hex:
tx = tx_normalize.tx_wire(tx)
txs.append(tx)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
logg.debug('txs {} tx_hashes {}'.format(txs, tx_hashes)) logg.debug('txs {} tx_hashes {}'.format(txs, tx_hashes))
@@ -187,9 +211,6 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
raise ValueError('txs passed to check gas must all have same sender; had {} got {}'.format(address, tx['from'])) raise ValueError('txs passed to check gas must all have same sender; had {} got {}'.format(address, tx['from']))
addresspass.append(address) addresspass.append(address)
if not is_checksum_address(address):
raise ValueError('invalid address {}'.format(address))
queue = self.request.delivery_info.get('routing_key') queue = self.request.delivery_info.get('routing_key')
conn = RPCConnection.connect(chain_spec) conn = RPCConnection.connect(chain_spec)
@@ -304,6 +325,7 @@ def refill_gas(self, recipient_address, chain_spec_dict):
# Determine value of gas tokens to send # Determine value of gas tokens to send
# if an uncompleted gas refill for the same recipient already exists, we still need to spend the nonce # if an uncompleted gas refill for the same recipient already exists, we still need to spend the nonce
# however, we will perform a 0-value transaction instead # however, we will perform a 0-value transaction instead
recipient_address = tx_normalize.wallet_address(recipient_address)
zero_amount = False zero_amount = False
session = SessionBase.create_session() session = SessionBase.create_session()
status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
@@ -378,6 +400,7 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_spec_dict, gas=None, defa
:returns: Transaction hash :returns: Transaction hash
:rtype: str, 0x-hex :rtype: str, 0x-hex
""" """
txold_hash_hex = tx_normalize.tx_hash(txold_hash_hex)
session = SessionBase.create_session() session = SessionBase.create_session()
otx = Otx.load(txold_hash_hex, session) otx = Otx.load(txold_hash_hex, session)

View File

@@ -15,6 +15,7 @@ from chainqueue.db.enum import (
# local imports # local imports
from cic_eth.db import SessionBase from cic_eth.db import SessionBase
from cic_eth.task import CriticalSQLAlchemyTask from cic_eth.task import CriticalSQLAlchemyTask
from cic_eth.encode import tx_normalize
celery_app = celery.current_app celery_app = celery.current_app
@@ -22,6 +23,9 @@ logg = logging.getLogger()
def __balance_outgoing_compatible(token_address, holder_address): def __balance_outgoing_compatible(token_address, holder_address):
token_address = tx_normalize.executable_address(token_address)
holder_address = tx_normalize.wallet_address(holder_address)
session = SessionBase.create_session() session = SessionBase.create_session()
q = session.query(TxCache.from_value) q = session.query(TxCache.from_value)
q = q.join(Otx) q = q.join(Otx)
@@ -58,6 +62,9 @@ def balance_outgoing(tokens, holder_address, chain_spec_dict):
def __balance_incoming_compatible(token_address, receiver_address): def __balance_incoming_compatible(token_address, receiver_address):
token_address = tx_normalize.executable_address(token_address)
receiver_address = tx_normalize.wallet_address(receiver_address)
session = SessionBase.create_session() session = SessionBase.create_session()
q = session.query(TxCache.to_value) q = session.query(TxCache.to_value)
q = q.join(Otx) q = q.join(Otx)
@@ -110,7 +117,7 @@ def assemble_balances(balances_collection):
logg.debug('received collection {}'.format(balances_collection)) logg.debug('received collection {}'.format(balances_collection))
for c in balances_collection: for c in balances_collection:
for b in c: for b in c:
address = b['address'] address = tx_normalize.executable_address(b['address'])
if tokens.get(address) == None: if tokens.get(address) == None:
tokens[address] = { tokens[address] = {
'address': address, 'address': address,

View File

@@ -6,6 +6,7 @@ import celery
from cic_eth.task import CriticalSQLAlchemyTask from cic_eth.task import CriticalSQLAlchemyTask
from cic_eth.db import SessionBase from cic_eth.db import SessionBase
from cic_eth.db.models.lock import Lock from cic_eth.db.models.lock import Lock
from cic_eth.encode import tx_normalize
celery_app = celery.current_app celery_app = celery.current_app
@@ -21,6 +22,9 @@ def get_lock(address=None):
:returns: List of locks :returns: List of locks
:rtype: list of dicts :rtype: list of dicts
""" """
if address != None:
address = tx_normalize.wallet_address(address)
session = SessionBase.create_session() session = SessionBase.create_session()
q = session.query( q = session.query(
Lock.date_created, Lock.date_created,

View File

@@ -4,8 +4,8 @@ import datetime
# external imports # external imports
import celery import celery
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.tx import unpack
import chainqueue.sql.query import chainqueue.sql.query
from chainlib.eth.tx import unpack
from chainqueue.db.enum import ( from chainqueue.db.enum import (
StatusEnum, StatusEnum,
is_alive, is_alive,
@@ -20,6 +20,10 @@ from cic_eth.db.enum import LockEnum
from cic_eth.task import CriticalSQLAlchemyTask from cic_eth.task import CriticalSQLAlchemyTask
from cic_eth.db.models.lock import Lock from cic_eth.db.models.lock import Lock
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.encode import (
tx_normalize,
unpack_normal,
)
celery_app = celery.current_app celery_app = celery.current_app
@@ -27,49 +31,76 @@ celery_app = celery.current_app
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def get_tx_cache(chain_spec_dict, tx_hash): def get_tx_cache(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() return get_tx_cache_local(chain_spec, tx_hash)
def get_tx_cache_local(chain_spec, tx_hash, session=None):
tx_hash = tx_normalize.tx_hash(tx_hash)
session = SessionBase.bind_session(session)
r = chainqueue.sql.query.get_tx_cache(chain_spec, tx_hash, session=session) r = chainqueue.sql.query.get_tx_cache(chain_spec, tx_hash, session=session)
session.close() SessionBase.release_session(session)
return r return r
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def get_tx(chain_spec_dict, tx_hash): def get_tx(chain_spec_dict, tx_hash):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() return get_tx_local(chain_spec, tx_hash)
def get_tx_local(chain_spec, tx_hash, session=None):
tx_hash = tx_normalize.tx_hash(tx_hash)
session = SessionBase.bind_session(session)
r = chainqueue.sql.query.get_tx(chain_spec, tx_hash, session=session) r = chainqueue.sql.query.get_tx(chain_spec, tx_hash, session=session)
session.close() SessionBase.release_session(session)
return r return r
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None): def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() return get_account_tx_local(chain_spec, address, as_sender=as_sender, as_recipient=as_recipient, counterpart=counterpart)
def get_account_tx_local(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=None):
address = tx_normalize.wallet_address(address)
session = SessionBase.bind_session(session)
r = chainqueue.sql.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session) r = chainqueue.sql.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session)
session.close() SessionBase.release_session(session)
return r return r
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None): def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0):
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
return get_upcoming_tx_nolock_local(chain_spec, status=status, not_status=not_status, recipient=recipient, before=before, limit=limit)
def get_upcoming_tx_nolock_local(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
recipient = tx_normalize.wallet_address(recipient)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack) r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack_normal)
session.close() session.close()
return r return r
def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None): def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None):
return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack) return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack_normal)
def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None): def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None):
return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack) sender = tx_normalize.wallet_address(sender)
return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack_normal)
def get_nonce_tx(chain_spec, nonce, sender): def get_nonce_tx(chain_spec, nonce, sender):
return get_nonce_tx_cache(chain_spec, nonce, sender, decoder=unpack) sender = tx_normalize.wallet_address(sender)
return get_nonce_tx_local(chain_spec, nonce, sender)
def get_nonce_tx_local(chain_spec, nonce, sender, session=None):
sender = tx_normalize.wallet_address(sender)
return chainqueue.sql.query.get_nonce_tx_cache(chain_spec, nonce, sender, decoder=unpack_normal, session=session)
def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None): def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
@@ -91,6 +122,8 @@ def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, re
:returns: Transactions :returns: Transactions
:rtype: dict, with transaction hash as key, signed raw transaction as value :rtype: dict, with transaction hash as key, signed raw transaction as value
""" """
if recipient != None:
recipient = tx_normalize.wallet_address(recipient)
session = SessionBase.bind_session(session) session = SessionBase.bind_session(session)
q_outer = session.query( q_outer = session.query(
TxCache.sender, TxCache.sender,

View File

@@ -6,12 +6,14 @@ import chainqueue.sql.state
import celery import celery
from cic_eth.task import CriticalSQLAlchemyTask from cic_eth.task import CriticalSQLAlchemyTask
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.encode import tx_normalize
celery_app = celery.current_app celery_app = celery.current_app
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_sent(chain_spec_dict, tx_hash, fail=False): def set_sent(chain_spec_dict, tx_hash, fail=False):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_sent(chain_spec, tx_hash, fail, session=session) r = chainqueue.sql.state.set_sent(chain_spec, tx_hash, fail, session=session)
@@ -21,6 +23,7 @@ def set_sent(chain_spec_dict, tx_hash, fail=False):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False): def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session) r = chainqueue.sql.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session)
@@ -30,6 +33,7 @@ def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_cancel(chain_spec_dict, tx_hash, manual=False): def set_cancel(chain_spec_dict, tx_hash, manual=False):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_cancel(chain_spec, tx_hash, manual, session=session) r = chainqueue.sql.state.set_cancel(chain_spec, tx_hash, manual, session=session)
@@ -39,6 +43,7 @@ def set_cancel(chain_spec_dict, tx_hash, manual=False):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_rejected(chain_spec_dict, tx_hash): def set_rejected(chain_spec_dict, tx_hash):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_rejected(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_rejected(chain_spec, tx_hash, session=session)
@@ -48,6 +53,7 @@ def set_rejected(chain_spec_dict, tx_hash):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_fubar(chain_spec_dict, tx_hash): def set_fubar(chain_spec_dict, tx_hash):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_fubar(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_fubar(chain_spec, tx_hash, session=session)
@@ -57,6 +63,7 @@ def set_fubar(chain_spec_dict, tx_hash):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_manual(chain_spec_dict, tx_hash): def set_manual(chain_spec_dict, tx_hash):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_manual(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_manual(chain_spec, tx_hash, session=session)
@@ -66,6 +73,7 @@ def set_manual(chain_spec_dict, tx_hash):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_ready(chain_spec_dict, tx_hash): def set_ready(chain_spec_dict, tx_hash):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_ready(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_ready(chain_spec, tx_hash, session=session)
@@ -75,6 +83,7 @@ def set_ready(chain_spec_dict, tx_hash):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_reserved(chain_spec_dict, tx_hash): def set_reserved(chain_spec_dict, tx_hash):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_reserved(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_reserved(chain_spec, tx_hash, session=session)
@@ -84,6 +93,7 @@ def set_reserved(chain_spec_dict, tx_hash):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def set_waitforgas(chain_spec_dict, tx_hash): def set_waitforgas(chain_spec_dict, tx_hash):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.set_waitforgas(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.set_waitforgas(chain_spec, tx_hash, session=session)
@@ -93,6 +103,7 @@ def set_waitforgas(chain_spec_dict, tx_hash):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def get_state_log(chain_spec_dict, tx_hash): def get_state_log(chain_spec_dict, tx_hash):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.get_state_log(chain_spec, tx_hash, session=session) r = chainqueue.sql.state.get_state_log(chain_spec, tx_hash, session=session)
@@ -102,6 +113,7 @@ def get_state_log(chain_spec_dict, tx_hash):
@celery_app.task(base=CriticalSQLAlchemyTask) @celery_app.task(base=CriticalSQLAlchemyTask)
def obsolete(chain_spec_dict, tx_hash, final): def obsolete(chain_spec_dict, tx_hash, final):
tx_hash = tx_normalize.tx_hash(tx_hash)
chain_spec = ChainSpec.from_dict(chain_spec_dict) chain_spec = ChainSpec.from_dict(chain_spec_dict)
session = SessionBase.create_session() session = SessionBase.create_session()
r = chainqueue.sql.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session) r = chainqueue.sql.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session)

View File

@@ -13,6 +13,7 @@ from chainqueue.error import NotLocalTxError
# local imports # local imports
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.encode import tx_normalize
celery_app = celery.current_app celery_app = celery.current_app
@@ -20,6 +21,7 @@ logg = logging.getLogger()
def tx_times(tx_hash, chain_spec, session=None): def tx_times(tx_hash, chain_spec, session=None):
tx_hash = tx_normalize.tx_hash(tx_hash)
session = SessionBase.bind_session(session) session = SessionBase.bind_session(session)

View File

@@ -32,12 +32,16 @@ from cic_eth.db import SessionBase
from cic_eth.db.enum import LockEnum from cic_eth.db.enum import LockEnum
from cic_eth.task import CriticalSQLAlchemyTask from cic_eth.task import CriticalSQLAlchemyTask
from cic_eth.error import LockedError from cic_eth.error import LockedError
from cic_eth.encode import tx_normalize
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger() logg = logging.getLogger()
def queue_create(chain_spec, nonce, holder_address, tx_hash, signed_tx, session=None): def queue_create(chain_spec, nonce, holder_address, tx_hash, signed_tx, session=None):
tx_hash = tx_normalize.tx_hash(tx_hash)
signed_tx = tx_normalize.tx_hash(signed_tx)
holder_address = tx_normalize.wallet_address(holder_address)
session = SessionBase.bind_session(session) session = SessionBase.bind_session(session)
lock = Lock.check_aggregate(str(chain_spec), LockEnum.QUEUE, holder_address, session=session) lock = Lock.check_aggregate(str(chain_spec), LockEnum.QUEUE, holder_address, session=session)
@@ -67,6 +71,8 @@ def register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=No
:returns: Tuple; Transaction hash, signed raw transaction data :returns: Tuple; Transaction hash, signed raw transaction data
:rtype: tuple :rtype: tuple
""" """
tx_hash_hex = tx_normalize.tx_hash(tx_hash_hex)
tx_signed_raw_hex = tx_normalize.tx_hash(tx_signed_raw_hex)
logg.debug('adding queue tx {}:{} -> {}'.format(chain_spec, tx_hash_hex, tx_signed_raw_hex)) logg.debug('adding queue tx {}:{} -> {}'.format(chain_spec, tx_hash_hex, tx_signed_raw_hex))
tx_signed_raw = bytes.fromhex(strip_0x(tx_signed_raw_hex)) tx_signed_raw = bytes.fromhex(strip_0x(tx_signed_raw_hex))
tx = unpack(tx_signed_raw, chain_spec) tx = unpack(tx_signed_raw, chain_spec)

View File

@@ -10,15 +10,14 @@ from chainlib.eth.tx import unpack
from chainqueue.db.enum import StatusBits from chainqueue.db.enum import StatusBits
from chainqueue.db.models.tx import TxCache from chainqueue.db.models.tx import TxCache
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.sql.query import get_paused_tx_cache as get_paused_tx
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
# local imports # local imports
from cic_eth.db.models.base import SessionBase from cic_eth.db.models.base import SessionBase
from cic_eth.eth.gas import create_check_gas_task from cic_eth.eth.gas import create_check_gas_task
from cic_eth.queue.query import get_paused_tx
from .base import SyncFilter from .base import SyncFilter
#logg = logging.getLogger().getChild(__name__)
logg = logging.getLogger() logg = logging.getLogger()

View File

@@ -69,10 +69,15 @@ from cic_eth.registry import (
) )
from cic_eth.task import BaseTask from cic_eth.task import BaseTask
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
env = list(os.environ.keys())
env.sort()
for k in env:
logg.debug("env {} {}".format(k, os.environ[k]))
arg_flags = cic_eth.cli.argflag_std_read arg_flags = cic_eth.cli.argflag_std_read
local_arg_flags = cic_eth.cli.argflag_local_task local_arg_flags = cic_eth.cli.argflag_local_task
argparser = cic_eth.cli.ArgumentParser(arg_flags) argparser = cic_eth.cli.ArgumentParser(arg_flags)

View File

@@ -13,7 +13,6 @@ from chainlib.eth.nonce import RPCNonceOracle
from chainlib.eth.gas import RPCGasOracle from chainlib.eth.gas import RPCGasOracle
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
from cic_eth_registry.error import UnknownContractError from cic_eth_registry.error import UnknownContractError
import liveness.linux
# local imports # local imports
from cic_eth.error import SeppukuError from cic_eth.error import SeppukuError
@@ -48,6 +47,7 @@ class BaseTask(celery.Task):
def on_failure(self, exc, task_id, args, kwargs, einfo): def on_failure(self, exc, task_id, args, kwargs, einfo):
if isinstance(exc, SeppukuError): if isinstance(exc, SeppukuError):
import liveness.linux
liveness.linux.reset(rundir=self.run_dir) liveness.linux.reset(rundir=self.run_dir)
logg.critical(einfo) logg.critical(einfo)
msg = 'received critical exception {}, calling shutdown'.format(str(exc)) msg = 'received critical exception {}, calling shutdown'.format(str(exc))

View File

@@ -10,7 +10,7 @@ version = (
0, 0,
12, 12,
4, 4,
'alpha.4', 'alpha.8',
) )
version_object = semver.VersionInfo( version_object = semver.VersionInfo(

View File

@@ -1,8 +1,6 @@
@node cic-eth configuration @node cic-eth configuration
@section Configuration @section Configuration
(refer to @code{cic-base} for a general overview of the config pipeline)
Configuration parameters are grouped by configuration filename. Configuration parameters are grouped by configuration filename.
@@ -40,7 +38,26 @@ Boolean value. If set, the amount of available context for a task in the result
@subsection database @subsection database
See ref cic-base when ready @table @var
@item host
Database host
@item port
Database port
@item name
Database name
@item user
Database user
@item password
Database password
@item engine
The engine part of the dsn connection string (@code{postgresql} in @code{postgresql+psycopg2})
@item driver
The driver part of the dsn connection string (@code{psycopg2} in @code{postgresql+psycopg2})
@item pool_size
Connection pool size for database drivers that provide connection pooling
@item debug
Output actual sql queries to logs. Potentially very verbose
@end table
@subsection eth @subsection eth

View File

@@ -13,7 +13,16 @@ ARG EXTRA_PIP_ARGS=""
# pip install --index-url https://pypi.org/simple \ # pip install --index-url https://pypi.org/simple \
# --force-reinstall \ # --force-reinstall \
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \ # --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
# -r requirements.txt # -r requirements.txt
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY \
--extra-index-url $EXTRA_INDEX_URL \
$EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
COPY *requirements.txt ./ COPY *requirements.txt ./
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url https://pypi.org/simple \ pip install --index-url https://pypi.org/simple \
@@ -22,19 +31,21 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
$EXTRA_PIP_ARGS \ $EXTRA_PIP_ARGS \
-r requirements.txt \ -r requirements.txt \
-r services_requirements.txt \ -r services_requirements.txt \
-r admin_requirements.txt -r admin_requirements.txt
COPY . . # always install the latest signer
RUN python setup.py install
ENV PYTHONPATH .
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
pip install --index-url https://pypi.org/simple \ pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY \ --extra-index-url $GITLAB_PYTHON_REGISTRY \
--extra-index-url $EXTRA_INDEX_URL \ --extra-index-url $EXTRA_INDEX_URL \
$EXTRA_PIP_ARGS \ $EXTRA_PIP_ARGS \
cic-eth-aux-erc20-demurrage-token~=0.0.2a6 crypto-dev-signer
COPY . .
RUN python setup.py install
ENV PYTHONPATH .
COPY docker/entrypoints/* ./ COPY docker/entrypoints/* ./
RUN chmod 755 *.sh RUN chmod 755 *.sh

View File

@@ -1,71 +0,0 @@
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
WORKDIR /usr/src/cic-eth
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
# do load these all into setup.py later
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
#COPY cic-eth/requirements.txt .
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
ARG EXTRA_PIP_ARGS=""
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
# pip install --index-url https://pypi.org/simple \
# --force-reinstall \
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
# -r requirements.txt
COPY *requirements.txt .
RUN pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY \
--extra-index-url $EXTRA_INDEX_URL \
$EXTRA_PIP_ARGS \
-r requirements.txt \
-r services_requirements.txt \
-r admin_requirements.txt
COPY . .
RUN python setup.py install
COPY docker/entrypoints/* ./
RUN chmod 755 *.sh
# # ini files in config directory defines the configurable parameters for the application
# # they can all be overridden by environment variables
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
COPY config/ /usr/local/etc/cic-eth/
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
#COPY util/liveness/health.sh /usr/local/bin/health.sh
ENTRYPOINT []
# ------------------ PRODUCTION CONTAINER ----------------------
#FROM python:3.8.6-slim-buster as prod
#
#RUN apt-get update && \
# apt install -y gnupg libpq-dev procps
#
#WORKDIR /root
#
#COPY --from=dev /usr/local/bin/ /usr/local/bin/
#COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
# /usr/local/lib/python3.8/site-packages/
#
#COPY docker/entrypoints/* ./
#RUN chmod 755 *.sh
#
## # ini files in config directory defines the configurable parameters for the application
## # they can all be overridden by environment variables
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
#COPY config/ /usr/local/etc/cic-eth/
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
#COPY scripts/ scripts/
#
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
##COPY util/liveness/health.sh /usr/local/bin/health.sh
#
#ENTRYPOINT []
#

View File

@@ -5,27 +5,27 @@ set -e
# set CONFINI_ENV_PREFIX to override the env prefix to override env vars # set CONFINI_ENV_PREFIX to override the env prefix to override env vars
echo "!!! starting signer" #echo "!!! starting signer"
python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer -vv 2> /tmp/signer.log & #python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer -vv 2> /tmp/signer.log &
echo "!!! starting taskerd" echo "!!! starting taskerd"
/usr/local/bin/cic-eth-taskerd $@ /usr/local/bin/cic-eth-taskerd $@
# thanks! https://docs.docker.com/config/containers/multi-service_container/ # thanks! https://docs.docker.com/config/containers/multi-service_container/
sleep 1; #sleep 1;
echo "!!! entering monitor loop" #echo "!!! entering monitor loop"
while true; do #while true; do
ps aux | grep crypto-dev-daemon | grep -q -v grep # ps aux | grep crypto-dev-daemon | grep -q -v grep
PROCESS_1_STATUS=$? # PROCESS_1_STATUS=$?
ps aux | grep cic-eth-tasker |grep -q -v grep # ps aux | grep cic-eth-tasker |grep -q -v grep
PROCESS_2_STATUS=$? # PROCESS_2_STATUS=$?
# If the greps above find anything, they exit with 0 status # # If the greps above find anything, they exit with 0 status
# If they are not both 0, then something is wrong # # If they are not both 0, then something is wrong
if [ $PROCESS_1_STATUS -ne 0 -o $PROCESS_2_STATUS -ne 0 ]; then # if [ $PROCESS_1_STATUS -ne 0 -o $PROCESS_2_STATUS -ne 0 ]; then
echo "One of the processes has already exited." # echo "One of the processes has already exited."
exit 1 # exit 1
fi # fi
sleep 15; # sleep 15;
done #done
#
set +e set +e

View File

@@ -0,0 +1,11 @@
#! /bin/bash
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
-r admin_requirements.txt \
-r services_requirements.txt \
-r test_requirements.txt
export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests

View File

@@ -1,3 +1,3 @@
celery==4.4.7 celery==4.4.7
chainlib-eth>=0.0.9a3,<0.1.0 chainlib-eth>=0.0.9a11,<0.1.0
semver==2.13.0 semver==2.13.0

View File

@@ -1,5 +1,5 @@
chainqueue>=0.0.3a2,<0.1.0 chainqueue>=0.0.5a1,<0.1.0
chainsyncer[sql]>=0.0.6a1,<0.1.0 chainsyncer[sql]>=0.0.6a3,<0.1.0
alembic==1.4.2 alembic==1.4.2
confini>=0.3.6rc4,<0.5.0 confini>=0.3.6rc4,<0.5.0
redis==3.5.3 redis==3.5.3

View File

@@ -1,7 +1,6 @@
# external imports # external imports
from chainlib.connection import RPCConnection from chainlib.connection import RPCConnection
from chainlib.eth.nonce import OverrideNonceOracle from chainlib.eth.nonce import OverrideNonceOracle
from chainqueue.sql.tx import create as queue_create
from chainlib.eth.tx import ( from chainlib.eth.tx import (
TxFormat, TxFormat,
unpack, unpack,
@@ -26,6 +25,8 @@ from chainqueue.db.enum import StatusBits
# local imports # local imports
from cic_eth.runnable.daemons.filters.gas import GasFilter from cic_eth.runnable.daemons.filters.gas import GasFilter
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
from cic_eth.encode import tx_normalize
from cic_eth.queue.tx import queue_create
def test_filter_gas( def test_filter_gas(

View File

@@ -22,10 +22,11 @@ from hexathon import (
strip_0x, strip_0x,
add_0x, add_0x,
) )
from chainqueue.sql.query import get_account_tx
# local imports # local imports
from cic_eth.runnable.daemons.filters.register import RegistrationFilter from cic_eth.runnable.daemons.filters.register import RegistrationFilter
from cic_eth.encode import tx_normalize
from cic_eth.queue.query import get_account_tx_local
logg = logging.getLogger() logg = logging.getLogger()
@@ -79,7 +80,7 @@ def test_register_filter(
t.get_leaf() t.get_leaf()
assert t.successful() assert t.successful()
gift_txs = get_account_tx(default_chain_spec.asdict(), agent_roles['ALICE'], as_sender=True, session=init_database) gift_txs = get_account_tx_local(default_chain_spec, agent_roles['ALICE'], as_sender=True, session=init_database)
ks = list(gift_txs.keys()) ks = list(gift_txs.keys())
assert len(ks) == 1 assert len(ks) == 1

View File

@@ -0,0 +1,10 @@
#! /bin/bash
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
-r admin_requirements.txt
-r services_requirements.txt
-r test_requirements.txt
export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests

View File

@@ -34,10 +34,6 @@ from chainqueue.sql.state import (
set_ready, set_ready,
set_reserved, set_reserved,
) )
from chainqueue.sql.query import (
get_tx,
get_nonce_tx_cache,
)
# local imports # local imports
from cic_eth.api.admin import AdminApi from cic_eth.api.admin import AdminApi
@@ -46,6 +42,11 @@ from cic_eth.db.enum import LockEnum
from cic_eth.error import InitializationError from cic_eth.error import InitializationError
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
from cic_eth.queue.tx import queue_create from cic_eth.queue.tx import queue_create
from cic_eth.queue.query import (
get_tx,
get_nonce_tx_local,
)
from cic_eth.encode import tx_normalize
logg = logging.getLogger() logg = logging.getLogger()
@@ -286,13 +287,15 @@ def test_fix_nonce(
assert t.successful() assert t.successful()
init_database.commit() init_database.commit()
txs = get_nonce_tx_cache(default_chain_spec, 3, agent_roles['ALICE'], session=init_database) logg.debug('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
ks = txs.keys() ks = txs.keys()
assert len(ks) == 2 assert len(ks) == 2
for k in ks: for k in ks:
hsh = add_0x(k) #hsh = add_0x(k)
hsh = tx_normalize.tx_hash(k)
otx = Otx.load(hsh, session=init_database) otx = Otx.load(hsh, session=init_database)
init_database.refresh(otx) init_database.refresh(otx)
logg.debug('checking nonce {} tx {} status {}'.format(3, otx.tx_hash, otx.status)) logg.debug('checking nonce {} tx {} status {}'.format(3, otx.tx_hash, otx.status))

View File

@@ -30,7 +30,6 @@ from chainqueue.sql.state import (
) )
from chainqueue.db.models.otx import Otx from chainqueue.db.models.otx import Otx
from chainqueue.db.enum import StatusBits from chainqueue.db.enum import StatusBits
from chainqueue.sql.query import get_nonce_tx_cache
from eth_erc20 import ERC20 from eth_erc20 import ERC20
from cic_eth_registry import CICRegistry from cic_eth_registry import CICRegistry
@@ -38,6 +37,7 @@ from cic_eth_registry import CICRegistry
from cic_eth.api.admin import AdminApi from cic_eth.api.admin import AdminApi
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
from cic_eth.eth.erc20 import cache_transfer_data from cic_eth.eth.erc20 import cache_transfer_data
from cic_eth.queue.query import get_nonce_tx_local
logg = logging.getLogger() logg = logging.getLogger()
@@ -312,7 +312,7 @@ def test_resend_inplace(
otx = Otx.load(tx_hash_hex, session=init_database) otx = Otx.load(tx_hash_hex, session=init_database)
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database) txs = get_nonce_tx_local(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
assert len(txs) == 2 assert len(txs) == 2
@@ -363,10 +363,10 @@ def test_resend_clone(
assert otx.status & StatusBits.IN_NETWORK == StatusBits.IN_NETWORK assert otx.status & StatusBits.IN_NETWORK == StatusBits.IN_NETWORK
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database) txs = get_nonce_tx_local(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
assert len(txs) == 1 assert len(txs) == 1
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce + 1, agent_roles['ALICE'], session=init_database) txs = get_nonce_tx_local(default_chain_spec, otx.nonce + 1, agent_roles['ALICE'], session=init_database)
assert len(txs) == 1 assert len(txs) == 1
otx = Otx.load(txs[0], session=init_database) otx = Otx.load(txs[0], session=init_database)

View File

@@ -21,7 +21,6 @@ from chainlib.eth.constant import (
MINIMUM_FEE_UNITS, MINIMUM_FEE_UNITS,
MINIMUM_FEE_PRICE, MINIMUM_FEE_PRICE,
) )
from chainqueue.sql.tx import create as queue_create
from chainqueue.sql.query import get_tx from chainqueue.sql.query import get_tx
from chainqueue.db.enum import StatusBits from chainqueue.db.enum import StatusBits
from chainqueue.sql.state import ( from chainqueue.sql.state import (
@@ -35,6 +34,7 @@ from hexathon import strip_0x
# local imports # local imports
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
from cic_eth.error import OutOfGasError from cic_eth.error import OutOfGasError
from cic_eth.queue.tx import queue_create
logg = logging.getLogger() logg = logging.getLogger()
@@ -75,7 +75,7 @@ def test_task_check_gas_ok(
'cic_eth.eth.gas.check_gas', 'cic_eth.eth.gas.check_gas',
[ [
[ [
tx_hash_hex, strip_0x(tx_hash_hex),
], ],
default_chain_spec.asdict(), default_chain_spec.asdict(),
[], [],
@@ -283,4 +283,3 @@ def test_task_resend_explicit(
tx_after = unpack(bytes.fromhex(strip_0x(otx.signed_tx)), default_chain_spec) tx_after = unpack(bytes.fromhex(strip_0x(otx.signed_tx)), default_chain_spec)
logg.debug('gasprices before {} after {}'.format(tx_before['gasPrice'], tx_after['gasPrice'])) logg.debug('gasprices before {} after {}'.format(tx_before['gasPrice'], tx_after['gasPrice']))
assert tx_after['gasPrice'] > tx_before['gasPrice'] assert tx_after['gasPrice'] > tx_before['gasPrice']

View File

@@ -51,6 +51,7 @@ def test_ext_tx_collate(
tx_hash_hex, tx_hash_hex,
tx_signed_raw_hex, tx_signed_raw_hex,
) )
otx.block = 666
init_database.add(otx) init_database.add(otx)
init_database.commit() init_database.commit()

View File

@@ -46,6 +46,7 @@ def test_set(
tx_hash_hex, tx_hash_hex,
tx_signed_raw_hex, tx_signed_raw_hex,
) )
otx.block = 666
init_database.add(otx) init_database.add(otx)
init_database.commit() init_database.commit()
@@ -74,7 +75,6 @@ def test_set(
assert (tx_stored.destination_token_address == ZERO_ADDRESS) assert (tx_stored.destination_token_address == ZERO_ADDRESS)
assert (tx_stored.from_value == tx['value']) assert (tx_stored.from_value == tx['value'])
assert (tx_stored.to_value == to_value) assert (tx_stored.to_value == to_value)
assert (tx_stored.block_number == 666)
assert (tx_stored.tx_index == 13) assert (tx_stored.tx_index == 13)

View File

@@ -13,6 +13,7 @@ from cic_eth.queue.balance import (
balance_incoming, balance_incoming,
assemble_balances, assemble_balances,
) )
from cic_eth.encode import tx_normalize
logg = logging.getLogger() logg = logging.getLogger()
@@ -51,8 +52,8 @@ def test_assemble():
r = assemble_balances(b) r = assemble_balances(b)
logg.debug('r {}'.format(r)) logg.debug('r {}'.format(r))
assert r[0]['address'] == token_foo assert r[0]['address'] == tx_normalize.executable_address(token_foo)
assert r[1]['address'] == token_bar assert r[1]['address'] == tx_normalize.executable_address(token_bar)
assert r[0].get('balance_foo') != None assert r[0].get('balance_foo') != None
assert r[0].get('balance_bar') != None assert r[0].get('balance_bar') != None
assert r[1].get('balance_baz') != None assert r[1].get('balance_baz') != None
@@ -74,11 +75,11 @@ def test_outgoing_balance(
token_address = '0x' + os.urandom(20).hex() token_address = '0x' + os.urandom(20).hex()
sender = '0x' + os.urandom(20).hex() sender = '0x' + os.urandom(20).hex()
txc = TxCache( txc = TxCache(
tx_hash, tx_normalize.tx_hash(tx_hash),
sender, tx_normalize.wallet_address(sender),
recipient, tx_normalize.wallet_address(recipient),
token_address, tx_normalize.executable_address(token_address),
token_address, tx_normalize.executable_address(token_address),
1000, 1000,
1000, 1000,
session=init_database, session=init_database,
@@ -125,11 +126,11 @@ def test_incoming_balance(
token_address = '0x' + os.urandom(20).hex() token_address = '0x' + os.urandom(20).hex()
sender = '0x' + os.urandom(20).hex() sender = '0x' + os.urandom(20).hex()
txc = TxCache( txc = TxCache(
tx_hash, tx_normalize.tx_hash(tx_hash),
sender, tx_normalize.wallet_address(sender),
recipient, tx_normalize.wallet_address(recipient),
token_address, tx_normalize.executable_address(token_address),
token_address, tx_normalize.executable_address(token_address),
1000, 1000,
1000, 1000,
session=init_database, session=init_database,

View File

@@ -21,6 +21,7 @@ from cic_eth.db.models.lock import Lock
from cic_eth.queue.query import get_upcoming_tx from cic_eth.queue.query import get_upcoming_tx
from cic_eth.queue.tx import register_tx from cic_eth.queue.tx import register_tx
from cic_eth.eth.gas import cache_gas_data from cic_eth.eth.gas import cache_gas_data
from cic_eth.encode import tx_normalize
# test imports # test imports
from tests.util.nonce import StaticNonceOracle from tests.util.nonce import StaticNonceOracle
@@ -39,8 +40,8 @@ def test_upcoming_with_lock(
gas_oracle = RPCGasOracle(eth_rpc) gas_oracle = RPCGasOracle(eth_rpc)
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle) c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
alice_normal = add_0x(hex_uniform(strip_0x(agent_roles['ALICE']))) alice_normal = tx_normalize.wallet_address(agent_roles['ALICE'])
bob_normal = add_0x(hex_uniform(strip_0x(agent_roles['BOB']))) bob_normal = tx_normalize.wallet_address(agent_roles['BOB'])
(tx_hash_hex, tx_rpc) = c.create(alice_normal, bob_normal, 100 * (10 ** 6)) (tx_hash_hex, tx_rpc) = c.create(alice_normal, bob_normal, 100 * (10 ** 6))
tx_signed_raw_hex = tx_rpc['params'][0] tx_signed_raw_hex = tx_rpc['params'][0]

View File

@@ -9,7 +9,7 @@ from cic_eth.db.models.lock import Lock
from cic_eth.db.enum import LockEnum from cic_eth.db.enum import LockEnum
from cic_eth.error import LockedError from cic_eth.error import LockedError
from cic_eth.queue.tx import queue_create from cic_eth.queue.tx import queue_create
from cic_eth.encode import tx_normalize
def test_queue_lock( def test_queue_lock(
init_database, init_database,
@@ -21,6 +21,8 @@ def test_queue_lock(
address = '0x' + os.urandom(20).hex() address = '0x' + os.urandom(20).hex()
tx_hash = '0x' + os.urandom(32).hex() tx_hash = '0x' + os.urandom(32).hex()
tx_raw = '0x' + os.urandom(128).hex() tx_raw = '0x' + os.urandom(128).hex()
address_normal = tx_normalize.wallet_address(address)
tx_hash_normal = tx_normalize.tx_hash(tx_hash)
Lock.set(chain_str, LockEnum.QUEUE) Lock.set(chain_str, LockEnum.QUEUE)
with pytest.raises(LockedError): with pytest.raises(LockedError):
@@ -32,7 +34,7 @@ def test_queue_lock(
tx_raw, tx_raw,
) )
Lock.set(chain_str, LockEnum.QUEUE, address=address) Lock.set(chain_str, LockEnum.QUEUE, address=address_normal)
with pytest.raises(LockedError): with pytest.raises(LockedError):
queue_create( queue_create(
default_chain_spec, default_chain_spec,
@@ -52,7 +54,7 @@ def test_queue_lock(
tx_raw, tx_raw,
) )
Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash) Lock.set(chain_str, LockEnum.QUEUE, address=address_normal, tx_hash=tx_hash_normal)
with pytest.raises(LockedError): with pytest.raises(LockedError):
queue_create( queue_create(
default_chain_spec, default_chain_spec,
@@ -61,5 +63,3 @@ def test_queue_lock(
tx_hash, tx_hash,
tx_raw, tx_raw,
) )

View File

@@ -1,5 +1,5 @@
crypto-dev-signer>=0.4.15a1,<=0.4.15 crypto-dev-signer>=0.4.15a4,<=0.4.15
chainqueue>=0.0.3a1,<0.1.0 chainqueue>=0.0.5a1,<0.1.0
cic-eth-registry>=0.6.1a2,<0.7.0 cic-eth-registry>=0.6.1a2,<0.7.0
redis==3.5.3 redis==3.5.3
hexathon~=0.0.1a8 hexathon~=0.0.1a8

View File

@@ -1,43 +1,16 @@
build-test-cic-meta:
.cic_meta_variables: stage: test
variables: tags:
APP_NAME: cic-meta - integration
DOCKERFILE_PATH: docker/Dockerfile_ci variables:
CONTEXT: apps/$APP_NAME APP_NAME: cic-meta
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
build-mr-cic-meta: script:
extends: - cd apps/cic-meta
- .py_build_merge_request - docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
- .cic_meta_variables - docker run --entrypoint=sh $MR_IMAGE_TAG docker/run_tests.sh
rules: rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" - if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes: changes:
- apps/cic-meta/**/* - apps/$APP_NAME/**/*
when: always
test-mr-cic-meta:
extends:
- .cic_meta_variables
stage: test
image: $MR_IMAGE_TAG
script:
- cd /root
- npm install --dev
- npm run test
- npm run test:coverage
needs: ["build-mr-cic-meta"]
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/cic-meta/**/*
when: always
build-push-cic-meta:
extends:
- .py_build_push
- .cic_meta_variables
rules:
- if: $CI_COMMIT_BRANCH == "master"
changes:
- apps/cic-meta/**/*
when: always when: always

1
apps/cic-meta/README.md Normal file
View File

@@ -0,0 +1 @@
# CIC-Meta

View File

@@ -15,11 +15,10 @@ RUN --mount=type=cache,mode=0755,target=/root/.npm \
COPY webpack.config.js . COPY webpack.config.js .
COPY tsconfig.json . COPY tsconfig.json .
## required to build the cic-client-meta module ## required to build the cic-client-meta module
COPY src/ src/ COPY . .
COPY scripts/ scripts/
COPY tests/ tests/
COPY tests/*.asc /root/pgp/ COPY tests/*.asc /root/pgp/
## copy runtime configs ## copy runtime configs
COPY .config/ /usr/local/etc/cic-meta/ COPY .config/ /usr/local/etc/cic-meta/
# #

View File

@@ -1,32 +0,0 @@
# syntax = docker/dockerfile:1.2
#FROM node:15.3.0-alpine3.10
FROM node:lts-alpine3.14
WORKDIR /root
RUN apk add --no-cache postgresql bash
# copy the dependencies
COPY package.json package-lock.json .
RUN npm set cache /root/.npm && \
npm ci
COPY webpack.config.js .
COPY tsconfig.json .
## required to build the cic-client-meta module
COPY src/ src/
COPY scripts/ scripts/
COPY tests/ tests/
COPY tests/*.asc /root/pgp/
## copy runtime configs
COPY .config/ /usr/local/etc/cic-meta/
#
## db migrations
COPY docker/db.sh ./db.sh
RUN chmod 755 ./db.sh
#
RUN alias tsc=node_modules/typescript/bin/tsc
COPY docker/start_server.sh ./start_server.sh
RUN chmod 755 ./start_server.sh
ENTRYPOINT ["sh", "./start_server.sh"]

View File

@@ -0,0 +1,7 @@
#! /bin/bash
set -e
npm install --dev
npm run test
npm run test:coverage

View File

@@ -1,52 +1,17 @@
.cic_notify_variables: build-test-cic-notify:
variables: stage: test
APP_NAME: cic-notify tags:
DOCKERFILE_PATH: docker/Dockerfile_ci - integration
CONTEXT: apps/$APP_NAME variables:
APP_NAME: cic-notify
build-mr-cic-notify: MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
extends: script:
- .py_build_merge_request - cd apps/cic-notify
- .cic_notify_variables - docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
rules: - docker run $MR_IMAGE_TAG sh docker/run_tests.sh
- if: $CI_PIPELINE_SOURCE == "merge_request_event" allow_failure: true
changes: rules:
- apps/cic-notify/**/* - if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: always changes:
- apps/$APP_NAME/**/*
test-mr-cic-notify: when: always
stage: test
extends:
- .cic_notify_variables
cache:
key:
files:
- test_requirements.txt
paths:
- /root/.cache/pip
image: $MR_IMAGE_TAG
script:
- cd apps/$APP_NAME/
- >
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
-r test_requirements.txt
- export PYTHONPATH=. && pytest -x --cov=cic_notify --cov-fail-under=90 --cov-report term-missing tests
needs: ["build-mr-cic-notify"]
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/$APP_NAME/**/*
when: always
build-push-cic-notify:
extends:
- .py_build_push
- .cic_notify_variables
rules:
- if: $CI_COMMIT_BRANCH == "master"
changes:
- apps/cic-notify/**/*
when: always

View File

@@ -11,12 +11,12 @@ celery_app = celery.current_app
@celery_app.task @celery_app.task
def persist_notification(recipient, message): def persist_notification(message, recipient):
""" """
:param recipient:
:type recipient:
:param message: :param message:
:type message: :type message:
:param recipient:
:type recipient:
:return: :return:
:rtype: :rtype:
""" """

View File

@@ -11,12 +11,13 @@ local_logg = logging.getLogger(__name__)
@celery_app.task @celery_app.task
def log(recipient, message): def log(message, recipient):
""" """
:param recipient:
:type recipient:
:param message: :param message:
:type message: :type message:
:param recipient:
:type recipient:
:return: :return:
:rtype: :rtype:
""" """

View File

@@ -1,27 +0,0 @@
# syntax = docker/dockerfile:1.2
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
COPY requirements.txt .
RUN pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
-r requirements.txt
COPY . .
RUN python setup.py install
COPY docker/*.sh .
RUN chmod +x *.sh
# ini files in config directory defines the configurable parameters for the application
# they can all be overridden by environment variables
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
COPY .config/ /usr/local/etc/cic-notify/
COPY cic_notify/db/migrations/ /usr/local/share/cic-notify/alembic/
ENTRYPOINT []

View File

@@ -0,0 +1,9 @@
#! /bin/bash
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
-r test_requirements.txt
export PYTHONPATH=. && pytest -x --cov=cic_notify --cov-fail-under=90 --cov-report term-missing tests

View File

@@ -1,52 +1,16 @@
.cic_ussd_variables: build-test-cic-ussd:
variables: stage: test
APP_NAME: cic-ussd tags:
DOCKERFILE_PATH: docker/Dockerfile_ci - integration
CONTEXT: apps/$APP_NAME variables:
APP_NAME: cic-ussd
build-mr-cic-ussd: MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
extends: script:
- .py_build_merge_request - cd apps/cic-ussd
- .cic_ussd_variables - docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
rules: - docker run $MR_IMAGE_TAG sh docker/run_tests.sh
- if: $CI_PIPELINE_SOURCE == "merge_request_event" rules:
changes: - if: $CI_PIPELINE_SOURCE == "merge_request_event"
- apps/cic-ussd/**/* changes:
when: always - apps/$APP_NAME/**/*
when: always
test-mr-cic-ussd:
stage: test
extends:
- .cic_ussd_variables
cache:
key:
files:
- test_requirements.txt
paths:
- /root/.cache/pip
image: $MR_IMAGE_TAG
script:
- cd apps/$APP_NAME/
- >
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
-r test_requirements.txt
- export PYTHONPATH=. && pytest -x --cov=cic_ussd --cov-fail-under=90 --cov-report term-missing tests/cic_ussd
needs: ["build-mr-cic-ussd"]
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes:
- apps/$APP_NAME/**/*
when: always
build-push-cic-ussd:
extends:
- .py_build_push
- .cic_ussd_variables
rules:
- if: $CI_COMMIT_BRANCH == "master"
changes:
- apps/cic-ussd/**/*
when: always

View File

@@ -20,7 +20,7 @@ def get_balances(address: str,
asynchronous: bool = False, asynchronous: bool = False,
callback_param: any = None, callback_param: any = None,
callback_queue='cic-ussd', callback_queue='cic-ussd',
callback_task='cic_ussd.tasks.callback_handler.process_balances_callback') -> Optional[list]: callback_task='cic_ussd.tasks.callback_handler.balances_callback') -> Optional[list]:
"""This function queries cic-eth for an account's balances, It provides a means to receive the balance either """This function queries cic-eth for an account's balances, It provides a means to receive the balance either
asynchronously or synchronously.. It returns a dictionary containing the network, outgoing and incoming balances. asynchronously or synchronously.. It returns a dictionary containing the network, outgoing and incoming balances.
:param address: Ethereum address of an account. :param address: Ethereum address of an account.

View File

@@ -1,5 +1,6 @@
# standard import # standard import
import decimal import decimal
import json
import logging import logging
from typing import Dict, Tuple from typing import Dict, Tuple
@@ -8,6 +9,8 @@ from cic_eth.api import Api
from sqlalchemy.orm.session import Session from sqlalchemy.orm.session import Session
# local import # local import
from cic_ussd.account.chain import Chain
from cic_ussd.account.tokens import get_cached_default_token
from cic_ussd.db.models.account import Account from cic_ussd.db.models.account import Account
from cic_ussd.db.models.base import SessionBase from cic_ussd.db.models.base import SessionBase
from cic_ussd.error import UnknownUssdRecipient from cic_ussd.error import UnknownUssdRecipient
@@ -59,7 +62,9 @@ def from_wei(value: int) -> float:
:return: SRF equivalent of value in Wei :return: SRF equivalent of value in Wei
:rtype: float :rtype: float
""" """
value = float(value) / 1e+6 cached_token_data = json.loads(get_cached_default_token(Chain.spec.__str__()))
token_decimals: int = cached_token_data.get('decimals')
value = float(value) / (10**token_decimals)
return truncate(value=value, decimals=2) return truncate(value=value, decimals=2)
@@ -70,7 +75,9 @@ def to_wei(value: int) -> int:
:return: Wei equivalent of value in SRF :return: Wei equivalent of value in SRF
:rtype: int :rtype: int
""" """
return int(value * 1e+6) cached_token_data = json.loads(get_cached_default_token(Chain.spec.__str__()))
token_decimals: int = cached_token_data.get('decimals')
return int(value * (10**token_decimals))
def truncate(value: float, decimals: int): def truncate(value: float, decimals: int):
@@ -117,18 +124,18 @@ def transaction_actors(transaction: dict) -> Tuple[Dict, Dict]:
return recipient_transaction_data, sender_transaction_data return recipient_transaction_data, sender_transaction_data
def validate_transaction_account(session: Session, transaction: dict) -> Account: def validate_transaction_account(blockchain_address: str, role: str, session: Session) -> Account:
"""This function checks whether the blockchain address specified in a parsed transaction object resolves to an """This function checks whether the blockchain address specified in a parsed transaction object resolves to an
account object in the ussd system. account object in the ussd system.
:param session: Database session object. :param blockchain_address:
:type session: Session :type blockchain_address:
:param transaction: Parsed transaction data object. :param role:
:type transaction: dict :type role:
:param session:
:type session:
:return: :return:
:rtype: :rtype:
""" """
blockchain_address = transaction.get('blockchain_address')
role = transaction.get('role')
session = SessionBase.bind_session(session) session = SessionBase.bind_session(session)
account = session.query(Account).filter_by(blockchain_address=blockchain_address).first() account = session.query(Account).filter_by(blockchain_address=blockchain_address).first()
if not account: if not account:

View File

@@ -44,7 +44,7 @@ class MetadataRequestsHandler(Metadata):
def create(self, data: Union[Dict, str]): def create(self, data: Union[Dict, str]):
"""""" """"""
data = json.dumps(data) data = json.dumps(data).encode('utf-8')
result = make_request(method='POST', url=self.url, data=data, headers=self.headers) result = make_request(method='POST', url=self.url, data=data, headers=self.headers)
error_handler(result=result) error_handler(result=result)

View File

@@ -67,6 +67,7 @@ def resume_last_ussd_session(last_state: str) -> Document:
'exit', 'exit',
'exit_invalid_pin', 'exit_invalid_pin',
'exit_invalid_new_pin', 'exit_invalid_new_pin',
'exit_invalid_recipient',
'exit_invalid_request', 'exit_invalid_request',
'exit_pin_blocked', 'exit_pin_blocked',
'exit_pin_mismatch', 'exit_pin_mismatch',

View File

@@ -146,7 +146,7 @@ def create_ussd_session(
) )
def update_ussd_session(ussd_session: UssdSession, def update_ussd_session(ussd_session: DbUssdSession,
user_input: str, user_input: str,
state: str, state: str,
data: Optional[dict] = None) -> UssdSession: data: Optional[dict] = None) -> UssdSession:

View File

@@ -4,6 +4,7 @@ from typing import Tuple
# third party imports # third party imports
import celery import celery
from phonenumbers.phonenumberutil import NumberParseException
# local imports # local imports
from cic_ussd.account.balance import get_cached_available_balance from cic_ussd.account.balance import get_cached_available_balance
@@ -21,23 +22,21 @@ logg = logging.getLogger(__file__)
def is_valid_recipient(state_machine_data: Tuple[str, dict, Account, Session]) -> bool: def is_valid_recipient(state_machine_data: Tuple[str, dict, Account, Session]) -> bool:
"""This function checks that a user exists, is not the initiator of the transaction, has an active account status """This function checks that a phone number provided as the recipient of a transaction does not match the sending
and is authorized to perform standard transactions. party's own phone number.
:param state_machine_data: A tuple containing user input, a ussd session and user object. :param state_machine_data: A tuple containing user input, a ussd session and user object.
:type state_machine_data: tuple :type state_machine_data: tuple
:return: A user's validity :return: A recipient account's validity for a transaction
:rtype: bool :rtype: bool
""" """
user_input, ussd_session, account, session = state_machine_data user_input, ussd_session, account, session = state_machine_data
phone_number = process_phone_number(user_input, E164Format.region) try:
session = SessionBase.bind_session(session=session) phone_number = process_phone_number(user_input, E164Format.region)
recipient = Account.get_by_phone_number(phone_number=phone_number, session=session) except NumberParseException:
SessionBase.release_session(session=session) phone_number = None
is_not_initiator = phone_number != account.phone_number is_not_initiator = phone_number != account.phone_number
has_active_account_status = False is_present = Account.get_by_phone_number(phone_number, session) is not None
if recipient: return phone_number is not None and phone_number.startswith('+') and is_present and is_not_initiator
has_active_account_status = recipient.get_status(session) == AccountStatus.ACTIVE.name
return is_not_initiator and has_active_account_status and recipient is not None
def is_valid_transaction_amount(state_machine_data: Tuple[str, dict, Account, Session]) -> bool: def is_valid_transaction_amount(state_machine_data: Tuple[str, dict, Account, Session]) -> bool:

View File

@@ -138,26 +138,14 @@ def transaction_balances_callback(self, result: list, param: dict, status_code:
balances_data = result[0] balances_data = result[0]
available_balance = calculate_available_balance(balances_data) available_balance = calculate_available_balance(balances_data)
transaction = param transaction = param
blockchain_address = param.get('blockchain_address')
transaction['available_balance'] = available_balance transaction['available_balance'] = available_balance
queue = self.request.delivery_info.get('routing_key') queue = self.request.delivery_info.get('routing_key')
s_preferences_metadata = celery.signature(
'cic_ussd.tasks.metadata.query_preferences_metadata', [blockchain_address], queue=queue
)
s_process_account_metadata = celery.signature( s_process_account_metadata = celery.signature(
'cic_ussd.tasks.processor.parse_transaction', [transaction], queue=queue 'cic_ussd.tasks.processor.parse_transaction', [transaction], queue=queue
) )
s_notify_account = celery.signature('cic_ussd.tasks.notifications.transaction', queue=queue) s_notify_account = celery.signature('cic_ussd.tasks.notifications.transaction', queue=queue)
celery.chain(s_process_account_metadata, s_notify_account).apply_async()
if param.get('transaction_type') == 'transfer':
celery.chain(s_preferences_metadata, s_process_account_metadata, s_notify_account).apply_async()
if param.get('transaction_type') == 'tokengift':
s_process_account_metadata = celery.signature(
'cic_ussd.tasks.processor.parse_transaction', [{}, transaction], queue=queue
)
celery.chain(s_process_account_metadata, s_notify_account).apply_async()
@celery_app.task @celery_app.task
@@ -184,10 +172,11 @@ def transaction_callback(result: dict, param: str, status_code: int):
source_token_value = result.get('source_token_value') source_token_value = result.get('source_token_value')
recipient_metadata = { recipient_metadata = {
"token_symbol": destination_token_symbol, "alt_blockchain_address": sender_blockchain_address,
"token_value": destination_token_value,
"blockchain_address": recipient_blockchain_address, "blockchain_address": recipient_blockchain_address,
"role": "recipient", "role": "recipient",
"token_symbol": destination_token_symbol,
"token_value": destination_token_value,
"transaction_type": param "transaction_type": param
} }
@@ -201,10 +190,11 @@ def transaction_callback(result: dict, param: str, status_code: int):
if param == 'transfer': if param == 'transfer':
sender_metadata = { sender_metadata = {
"alt_blockchain_address": recipient_blockchain_address,
"blockchain_address": sender_blockchain_address, "blockchain_address": sender_blockchain_address,
"role": "sender",
"token_symbol": source_token_symbol, "token_symbol": source_token_symbol,
"token_value": source_token_value, "token_value": source_token_value,
"role": "sender",
"transaction_type": param "transaction_type": param
} }

View File

@@ -29,7 +29,8 @@ def transaction(notification_data: dict):
phone_number = notification_data.get('phone_number') phone_number = notification_data.get('phone_number')
preferred_language = notification_data.get('preferred_language') preferred_language = notification_data.get('preferred_language')
token_symbol = notification_data.get('token_symbol') token_symbol = notification_data.get('token_symbol')
transaction_account_metadata = notification_data.get('metadata_id') alt_metadata_id = notification_data.get('alt_metadata_id')
metadata_id = notification_data.get('metadata_id')
transaction_type = notification_data.get('transaction_type') transaction_type = notification_data.get('transaction_type')
timestamp = datetime.datetime.now().strftime('%d-%m-%y, %H:%M %p') timestamp = datetime.datetime.now().strftime('%d-%m-%y, %H:%M %p')
@@ -47,7 +48,8 @@ def transaction(notification_data: dict):
preferred_language=preferred_language, preferred_language=preferred_language,
amount=amount, amount=amount,
token_symbol=token_symbol, token_symbol=token_symbol,
tx_sender_information=transaction_account_metadata, tx_recipient_information=metadata_id,
tx_sender_information=alt_metadata_id,
timestamp=timestamp, timestamp=timestamp,
balance=balance) balance=balance)
if role == 'sender': if role == 'sender':
@@ -56,6 +58,7 @@ def transaction(notification_data: dict):
preferred_language=preferred_language, preferred_language=preferred_language,
amount=amount, amount=amount,
token_symbol=token_symbol, token_symbol=token_symbol,
tx_recipient_information=transaction_account_metadata, tx_recipient_information=alt_metadata_id,
tx_sender_information=metadata_id,
timestamp=timestamp, timestamp=timestamp,
balance=balance) balance=balance)

View File

@@ -8,9 +8,11 @@ import i18n
from chainlib.hash import strip_0x from chainlib.hash import strip_0x
# local imports # local imports
from cic_ussd.account.metadata import get_cached_preferred_language
from cic_ussd.account.statement import get_cached_statement from cic_ussd.account.statement import get_cached_statement
from cic_ussd.account.transaction import aux_transaction_data, validate_transaction_account from cic_ussd.account.transaction import aux_transaction_data, validate_transaction_account
from cic_ussd.cache import cache_data, cache_data_key from cic_ussd.cache import cache_data, cache_data_key
from cic_ussd.db.models.account import Account
from cic_ussd.db.models.base import SessionBase from cic_ussd.db.models.base import SessionBase
@@ -57,28 +59,32 @@ def cache_statement(parsed_transaction: dict, querying_party: str):
@celery_app.task @celery_app.task
def parse_transaction(preferences: dict, transaction: dict) -> dict: def parse_transaction(transaction: dict) -> dict:
"""This function parses transaction objects and collates all relevant data for system use i.e: """This function parses transaction objects and collates all relevant data for system use i.e:
- An account's set preferred language. - An account's set preferred language.
- Account identifier that facilitates notification. - Account identifier that facilitates notification.
- Contextual tags i.e action and direction tags. - Contextual tags i.e action and direction tags.
:param preferences: An account's set preferences.
:type preferences: dict
:param transaction: Transaction object. :param transaction: Transaction object.
:type transaction: dict :type transaction: dict
:return: Transaction object with contextual data for use in the system. :return: Transaction object with contextual data for use in the system.
:rtype: dict :rtype: dict
""" """
preferred_language = preferences.get('preferred_language') preferred_language = get_cached_preferred_language(transaction.get('blockchain_address'))
if not preferred_language: if not preferred_language:
preferred_language = i18n.config.get('fallback') preferred_language = i18n.config.get('fallback')
transaction['preferred_language'] = preferred_language
transaction = aux_transaction_data(preferred_language, transaction) transaction = aux_transaction_data(preferred_language, transaction)
session = SessionBase.create_session() session = SessionBase.create_session()
account = validate_transaction_account(session, transaction) role = transaction.get('role')
metadata_id = account.standard_metadata_id() alt_blockchain_address = transaction.get('alt_blockchain_address')
transaction['metadata_id'] = metadata_id blockchain_address = transaction.get('blockchain_address')
account = validate_transaction_account(blockchain_address, role, session)
alt_account = session.query(Account).filter_by(blockchain_address=alt_blockchain_address).first()
if alt_account:
transaction['alt_metadata_id'] = alt_account.standard_metadata_id()
else:
transaction['alt_metadata_id'] = 'GRASSROOTS ECONOMICS'
transaction['metadata_id'] = account.standard_metadata_id()
transaction['phone_number'] = account.phone_number transaction['phone_number'] = account.phone_number
session.commit()
session.close() session.close()
return transaction return transaction

View File

@@ -1,7 +1,7 @@
# standard imports # standard imports
import semver import semver
version = (0, 3, 1, 'alpha.1') version = (0, 3, 1, 'alpha.4')
version_object = semver.VersionInfo( version_object = semver.VersionInfo(
major=version[0], major=version[0],

View File

@@ -1,32 +0,0 @@
# syntax = docker/dockerfile:1.2
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
RUN apt-get install -y redis-server
# create secrets directory
RUN mkdir -vp pgp/keys
# create application directory
RUN mkdir -vp cic-ussd
RUN mkdir -vp data
COPY requirements.txt .
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
RUN pip install --index-url https://pypi.org/simple \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
-r requirements.txt
COPY . .
RUN python setup.py install
COPY cic_ussd/db/ussd_menu.json data/
COPY docker/*.sh .
RUN chmod +x /root/*.sh
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
COPY config/ /usr/local/etc/cic-ussd/
COPY cic_ussd/db/migrations/ /usr/local/share/cic-ussd/alembic
ENTRYPOINT []

View File

@@ -0,0 +1,10 @@
#! /bin/bash
set -e
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
-r test_requirements.txt
export PYTHONPATH=. && pytest -x --cov=cic_ussd --cov-fail-under=90 --cov-report term-missing tests/cic_ussd

View File

@@ -1,7 +1,7 @@
alembic==1.4.2 alembic==1.4.2
bcrypt==3.2.0 bcrypt==3.2.0
celery==4.4.7 celery==4.4.7
cic-eth[services]~=0.12.4a3 cic-eth[services]~=0.12.4a7
cic-notify~=0.4.0a10 cic-notify~=0.4.0a10
cic-types~=0.1.0a14 cic-types~=0.1.0a14
confini>=0.4.1a1,<0.5.0 confini>=0.4.1a1,<0.5.0
@@ -13,11 +13,5 @@ redis==3.5.3
semver==2.13.0 semver==2.13.0
SQLAlchemy==1.3.20 SQLAlchemy==1.3.20
tinydb==4.2.0 tinydb==4.2.0
phonenumbers==8.12.12
redis==3.5.3
celery==4.4.7
python-i18n[YAML]==0.3.9
pyxdg==0.27
bcrypt==3.2.0
uWSGI==2.0.19.1
transitions==0.8.4 transitions==0.8.4
uWSGI==2.0.19.1

View File

@@ -75,17 +75,21 @@ def test_transaction_actors(activated_account, transaction_result, valid_recipie
def test_validate_transaction_account(activated_account, init_database, transactions_list): def test_validate_transaction_account(activated_account, init_database, transactions_list):
sample_transaction = transactions_list[0] sample_transaction = transactions_list[0]
recipient_transaction, sender_transaction = transaction_actors(sample_transaction) recipient_transaction, sender_transaction = transaction_actors(sample_transaction)
recipient_account = validate_transaction_account(init_database, recipient_transaction) recipient_account = validate_transaction_account(
sender_account = validate_transaction_account(init_database, sender_transaction) recipient_transaction.get('blockchain_address'), recipient_transaction.get('role'), init_database)
sender_account = validate_transaction_account(
sender_transaction.get('blockchain_address'), sender_transaction.get('role'), init_database)
assert isinstance(recipient_account, Account) assert isinstance(recipient_account, Account)
assert isinstance(sender_account, Account) assert isinstance(sender_account, Account)
sample_transaction = transactions_list[1] sample_transaction = transactions_list[1]
recipient_transaction, sender_transaction = transaction_actors(sample_transaction) recipient_transaction, sender_transaction = transaction_actors(sample_transaction)
with pytest.raises(UnknownUssdRecipient) as error: with pytest.raises(UnknownUssdRecipient) as error:
validate_transaction_account(init_database, recipient_transaction) validate_transaction_account(
recipient_transaction.get('blockchain_address'), recipient_transaction.get('role'), init_database)
assert str( assert str(
error.value) == f'Tx for recipient: {recipient_transaction.get("blockchain_address")} has no matching account in the system.' error.value) == f'Tx for recipient: {recipient_transaction.get("blockchain_address")} has no matching account in the system.'
validate_transaction_account(init_database, sender_transaction) validate_transaction_account(
sender_transaction.get('blockchain_address'), sender_transaction.get('role'), init_database)
assert f'Tx from sender: {sender_transaction.get("blockchain_address")} has no matching account in system.' assert f'Tx from sender: {sender_transaction.get("blockchain_address")} has no matching account in system.'

View File

@@ -30,8 +30,6 @@ def test_is_valid_recipient(activated_account,
valid_recipient): valid_recipient):
state_machine = ('0112365478', generic_ussd_session, valid_recipient, init_database) state_machine = ('0112365478', generic_ussd_session, valid_recipient, init_database)
assert is_valid_recipient(state_machine) is False assert is_valid_recipient(state_machine) is False
state_machine = (pending_account.phone_number, generic_ussd_session, valid_recipient, init_database)
assert is_valid_recipient(state_machine) is False
state_machine = (valid_recipient.phone_number, generic_ussd_session, activated_account, init_database) state_machine = (valid_recipient.phone_number, generic_ussd_session, activated_account, init_database)
assert is_valid_recipient(state_machine) is True assert is_valid_recipient(state_machine) is True

View File

@@ -24,7 +24,8 @@ def test_transaction(celery_session_worker,
phone_number = notification_data.get('phone_number') phone_number = notification_data.get('phone_number')
preferred_language = notification_data.get('preferred_language') preferred_language = notification_data.get('preferred_language')
token_symbol = notification_data.get('token_symbol') token_symbol = notification_data.get('token_symbol')
transaction_account_metadata = notification_data.get('metadata_id') alt_metadata_id = notification_data.get('alt_metadata_id')
metadata_id = notification_data.get('metadata_id')
timestamp = datetime.datetime.now().strftime('%d-%m-%y, %H:%M %p') timestamp = datetime.datetime.now().strftime('%d-%m-%y, %H:%M %p')
s_transaction = celery.signature( s_transaction = celery.signature(
'cic_ussd.tasks.notifications.transaction', [notification_data] 'cic_ussd.tasks.notifications.transaction', [notification_data]
@@ -36,7 +37,8 @@ def test_transaction(celery_session_worker,
preferred_language=preferred_language, preferred_language=preferred_language,
amount=amount, amount=amount,
token_symbol=token_symbol, token_symbol=token_symbol,
tx_recipient_information=transaction_account_metadata, tx_recipient_information=alt_metadata_id,
tx_sender_information=metadata_id,
timestamp=timestamp, timestamp=timestamp,
balance=balance) balance=balance)
assert mock_notifier_api.get('message') == message assert mock_notifier_api.get('message') == message
@@ -52,7 +54,8 @@ def test_transaction(celery_session_worker,
preferred_language=preferred_language, preferred_language=preferred_language,
amount=amount, amount=amount,
token_symbol=token_symbol, token_symbol=token_symbol,
tx_sender_information=transaction_account_metadata, tx_recipient_information=metadata_id,
tx_sender_information=alt_metadata_id,
timestamp=timestamp, timestamp=timestamp,
balance=balance) balance=balance)
assert mock_notifier_api.get('message') == message assert mock_notifier_api.get('message') == message

View File

@@ -5,12 +5,18 @@ import random
import pytest import pytest
# local import # local import
from cic_ussd.account.balance import get_cached_available_balance
# tests imports # tests imports
@pytest.fixture(scope='function') @pytest.fixture(scope='function')
def notification_data(activated_account, cache_person_metadata, cache_preferences, preferences): def notification_data(activated_account,
cache_person_metadata,
cache_preferences,
cache_balances,
preferences,
valid_recipient):
return { return {
'blockchain_address': activated_account.blockchain_address, 'blockchain_address': activated_account.blockchain_address,
'token_symbol': 'GFT', 'token_symbol': 'GFT',
@@ -18,6 +24,7 @@ def notification_data(activated_account, cache_person_metadata, cache_preference
'role': 'sender', 'role': 'sender',
'action_tag': 'Sent', 'action_tag': 'Sent',
'direction_tag': 'To', 'direction_tag': 'To',
'alt_metadata_id': valid_recipient.standard_metadata_id(),
'metadata_id': activated_account.standard_metadata_id(), 'metadata_id': activated_account.standard_metadata_id(),
'phone_number': activated_account.phone_number, 'phone_number': activated_account.phone_number,
'available_balance': 50.0, 'available_balance': 50.0,

View File

@@ -2,9 +2,9 @@ en:
account_successfully_created: |- account_successfully_created: |-
You have been registered on Sarafu Network! To use dial *384*96# on Safaricom and *483*96# on other networks. For help %{support_phone}. You have been registered on Sarafu Network! To use dial *384*96# on Safaricom and *483*96# on other networks. For help %{support_phone}.
received_tokens: |- received_tokens: |-
Successfully received %{amount} %{token_symbol} from %{tx_sender_information} %{timestamp}. New balance is %{balance} %{token_symbol}. Successfully received %{amount} %{token_symbol} from %{tx_sender_information} %{timestamp} to %{tx_recipient_information}. New balance is %{balance} %{token_symbol}.
sent_tokens: |- sent_tokens: |-
Successfully sent %{amount} %{token_symbol} to %{tx_recipient_information} %{timestamp}. New balance is %{balance} %{token_symbol}. Successfully sent %{amount} %{token_symbol} to %{tx_recipient_information} %{timestamp} from %{tx_sender_information}. New balance is %{balance} %{token_symbol}.
terms: |- terms: |-
By using the service, you agree to the terms and conditions at http://grassecon.org/tos By using the service, you agree to the terms and conditions at http://grassecon.org/tos
upsell_unregistered_recipient: |- upsell_unregistered_recipient: |-

View File

@@ -2,9 +2,9 @@ sw:
account_successfully_created: |- account_successfully_created: |-
Umesajiliwa kwa huduma ya Sarafu! Kutumia bonyeza *384*96# Safaricom ama *483*46# kwa utandao tofauti. Kwa Usaidizi %{support_phone}. Umesajiliwa kwa huduma ya Sarafu! Kutumia bonyeza *384*96# Safaricom ama *483*46# kwa utandao tofauti. Kwa Usaidizi %{support_phone}.
received_tokens: |- received_tokens: |-
Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp}. Salio lako ni %{balance} %{token_symbol}. Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp} ikapokewa na %{tx_recipient_information}. Salio lako ni %{balance} %{token_symbol}.
sent_tokens: |- sent_tokens: |-
Umetuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp}. Salio lako ni %{balance} %{token_symbol}. Umetuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kutoka kwa %{tx_sender_information}. Salio lako ni %{balance} %{token_symbol}.
terms: |- terms: |-
Kwa kutumia hii huduma, umekubali sheria na masharti yafuatayo http://grassecon.org/tos Kwa kutumia hii huduma, umekubali sheria na masharti yafuatayo http://grassecon.org/tos
upsell_unregistered_recipient: |- upsell_unregistered_recipient: |-

View File

@@ -7,10 +7,8 @@ en:
3. Help 3. Help
initial_pin_entry: |- initial_pin_entry: |-
CON Please enter a new four number PIN for your account. CON Please enter a new four number PIN for your account.
0. Back
initial_pin_confirmation: |- initial_pin_confirmation: |-
CON Enter your four number PIN again CON Enter your four number PIN again
0. Back
enter_given_name: |- enter_given_name: |-
CON Enter first name CON Enter first name
0. Back 0. Back
@@ -183,7 +181,7 @@ en:
exit_pin_mismatch: |- exit_pin_mismatch: |-
END The new PIN does not match the one you entered. Please try again. For help, call %{support_phone}. END The new PIN does not match the one you entered. Please try again. For help, call %{support_phone}.
exit_invalid_recipient: |- exit_invalid_recipient: |-
CON Recipient phone number is incorrect. CON Recipient's phone number is not registered or is invalid:
00. Retry 00. Retry
99. Exit 99. Exit
exit_successful_transaction: |- exit_successful_transaction: |-

View File

@@ -7,13 +7,10 @@ sw:
3. Help 3. Help
initial_pin_entry: |- initial_pin_entry: |-
CON Tafadhali weka pin mpya yenye nambari nne kwa akaunti yako CON Tafadhali weka pin mpya yenye nambari nne kwa akaunti yako
0. Nyuma
initial_pin_confirmation: |- initial_pin_confirmation: |-
CON Weka PIN yako tena CON Weka PIN yako tena
0. Nyuma
enter_given_name: |- enter_given_name: |-
CON Weka jina lako la kwanza CON Weka jina lako la kwanza
0. Nyuma
enter_family_name: |- enter_family_name: |-
CON Weka jina lako la mwisho CON Weka jina lako la mwisho
0. Nyuma 0. Nyuma

View File

@@ -1,25 +1,25 @@
.contract_migration_variables: #.contract_migration_variables:
variables: # variables:
APP_NAME: contract-migration # APP_NAME: contract-migration
DOCKERFILE_PATH: docker/Dockerfile_ci # DOCKERFILE_PATH: docker/Dockerfile_ci
CONTEXT: apps/$APP_NAME # CONTEXT: apps/$APP_NAME
#
build-mr-contract-migration: #build-mr-contract-migration:
extends: # extends:
- .py_build_merge_request # - .py_build_merge_request
- .contract_migration_variables # - .contract_migration_variables
rules: # rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event" # - if: $CI_PIPELINE_SOURCE == "merge_request_event"
changes: # changes:
- apps/contract-migration/**/* # - apps/contract-migration/**/*
when: always # when: always
#
build-push-contract-migration: #build-push-contract-migration:
extends: # extends:
- .py_build_push # - .py_build_push
- .contract_migration_variables # - .contract_migration_variables
rules: # rules:
- if: $CI_COMMIT_BRANCH == "master" # - if: $CI_COMMIT_BRANCH == "master"
changes: # changes:
- apps/contract-migration/**/* # - apps/contract-migration/**/*
when: always # when: always

View File

@@ -0,0 +1,43 @@
#!/bin/bash
set -a
if [ -z $DEV_DATA_DIR ]; then
export DEV_DATA_DIR=`mktemp -d`
else
mkdir -p $DEV_DATA_DIR
fi
if [ -z $DEV_CONFIG_RESET ]; then
if [ -f ${DEV_DATA_DIR}/env_reset ]; then
>&2 echo "importing existing configuration values from ${DEV_DATA_DIR}/env_reset"
. ${DEV_DATA_DIR}/env_reset
fi
fi
# Handle wallet
export WALLET_KEY_FILE=${WALLET_KEY_FILE:-`realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c`}
if [ ! -f $WALLET_KEY_FILE ]; then
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
exit 1
fi
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $WALLET_KEY_FILE | jq -r .address)`
# Wallet dependent variable defaults
export DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
export DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
export CIC_TRUST_ADDRESS=${CIC_TRUST_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
export TOKEN_SINK_ADDRESS=${TOKEN_SINK_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
# Legacy variable defaults
# Migration variable processing
confini-dump -vv --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
cat ${DEV_DATA_DIR}/env_reset
set +a

View File

@@ -0,0 +1,23 @@
[token]
name = Giftable Token
symbol = GFT
type = giftable_erc20_token
demurrage_level = 196454828847045000000000000000000
redistribution_period =
supply_limit =
sink_address =
[dev]
eth_account_contract_deployer =
eth_account_reserve_minter =
eth_account_accounts_index_writer =
reserve_amount = 10000000000000000000000000000000000
faucet_amount = 0
gas_amount = 100000000000000000000000
token_amount = 100000000000000000000000
eth_gas_price =
data_dir =
pip_extra_index_url =
eth_provider_host =
eth_provider_port =

View File

@@ -1,4 +0,0 @@
[redis]
host =
port =
db =

View File

@@ -1,44 +0,0 @@
# syntax = docker/dockerfile:1.2
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e
WORKDIR /root
RUN touch /etc/apt/sources.list.d/ethereum.list
RUN echo 'deb http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main' > /etc/apt/sources.list.d/ethereum.list
RUN echo 'deb-src http://ppa.launchpad.net/ethereum/ethereum/ubuntu bionic main' >> /etc/apt/sources.list.d/ethereum.list
RUN cat /etc/apt/sources.list.d/ethereum.list
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys 2A518C819BE37D2C2031944D1C52189C923F6CA9
#RUN apt-get install solc
RUN mkdir -vp /usr/local/etc/cic
ENV CONFINI_DIR /usr/local/etc/cic/
COPY config_template/ /usr/local/etc/cic/
COPY requirements.txt .
COPY override_requirements.txt .
ARG pip_index_url=https://pypi.org/simple
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
ARG EXTRA_PIP_ARGS=""
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
ARG pip_trusted_host=pypi.org
RUN pip install --index-url https://pypi.org/simple \
pip install --index-url https://pypi.org/simple \
--pre \
--force-reinstall \
--trusted-host $pip_trusted_host \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
-r requirements.txt
RUN pip install --index-url https://pypi.org/simple \
--force-reinstall \
--pre \
--trusted-host $pip_trusted_host \
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
-r override_requirements.txt
COPY . .
RUN chmod +x *.sh

View File

@@ -56,6 +56,7 @@ ETH_PROVIDER
ETH_ABI_DIR ETH_ABI_DIR
SIGNER_SOCKET_PATH SIGNER_SOCKET_PATH
SIGNER_SECRET SIGNER_SECRET
SIGNER_PROVIDER
CELERY_BROKER_URL CELERY_BROKER_URL
CELERY_RESULT_URL CELERY_RESULT_URL
META_PROVIDER META_PROVIDER

View File

@@ -1,6 +1,6 @@
cic-eth[tools]==0.12.4a4 cic-eth[tools]==0.12.4a8
chainlib-eth>=0.0.9a3,<0.1.0 chainlib-eth>=0.0.9a14,<0.1.0
eth-erc20>=0.1.2a2,<0.2.0 eth-erc20>=0.1.2a3,<0.2.0
erc20-demurrage-token>=0.0.5a2,<0.1.0 erc20-demurrage-token>=0.0.5a2,<0.1.0
eth-accounts-index>=0.1.2a2,<0.2.0 eth-accounts-index>=0.1.2a2,<0.2.0
eth-address-index>=0.2.3a4,<0.3.0 eth-address-index>=0.2.3a4,<0.3.0
@@ -8,3 +8,5 @@ cic-eth-registry>=0.6.1a2,<0.7.0
erc20-transfer-authorization>=0.3.5a2,<0.4.0 erc20-transfer-authorization>=0.3.5a2,<0.4.0
erc20-faucet>=0.3.2a2,<0.4.0 erc20-faucet>=0.3.2a2,<0.4.0
sarafu-faucet>=0.0.7a2,<0.1.0 sarafu-faucet>=0.0.7a2,<0.1.0
confini>=0.4.2rc3,<1.0.0
crypto-dev-signer>=0.4.15a7,<=0.4.15

View File

@@ -2,179 +2,118 @@
set -a set -a
default_token=giftable_erc20_token . ${DEV_DATA_DIR}/env_reset
TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL}
TOKEN_NAME=${TOKEN_NAME}
TOKEN_TYPE=${TOKEN_TYPE:-$default_token}
cat <<EOF
external token settings:
token_type: $TOKEN_TYPE
token_symbol: $TOKEN_SYMBOL
token_name: $TOKEN_NAME
token_decimals: $TOKEN_DECIMALS
token_demurrage: $TOKEN_DEMURRAGE_LEVEL
token_redistribution_period: $TOKEN_REDISTRIBUTION_PERIOD
token_supply_limit: $TOKEN_SUPPLY_LIMIT
EOF
CHAIN_SPEC=${CHAIN_SPEC:-$CIC_CHAIN_SPEC} WAIT_FOR_TIMEOUT=${WAIT_FOR_TIMEOUT:-60}
RPC_HTTP_PROVIDER=${RPC_HTTP_PROVIDER:-$ETH_PROVIDER}
DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
DEV_RESERVE_AMOUNT=${DEV_ETH_RESERVE_AMOUNT:-""10000000000000000000000000000000000}
DEV_FAUCET_AMOUNT=${DEV_FAUCET_AMOUNT:-0}
DEV_ETH_KEYSTORE_FILE=${DEV_ETH_KEYSTORE_FILE:-`realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c`}
set -e set -e
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $DEV_ETH_KEYSTORE_FILE | jq -r .address)`
if [ ! -z $DEV_ETH_GAS_PRICE ]; then if [ ! -z $DEV_ETH_GAS_PRICE ]; then
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE" gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE" fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
>&2 echo using static gas price $DEV_ETH_GAS_PRICE
fi fi
echo "environment:"
printenv
echo \n
echo "using wallet address '$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER' from keystore file $DEV_ETH_KEYSTORE_FILE"
# This is a grassroots team convention for building the Bancor contracts using the bancor protocol repository truffle setup
# Running this in docker-internal dev container (built from Docker folder in this repo) will write a
# source-able env file to CIC_DATA_DIR. Services dependent on these contracts can mount this file OR
# define these parameters at runtime
# pushd /usr/src
if [ -z $CIC_DATA_DIR ]; then
CIC_DATA_DIR=`mktemp -d`
fi
>&2 echo using data dir $CIC_DATA_DIR
init_level_file=${CIC_DATA_DIR}/.init
if [ ! -f ${CIC_DATA_DIR}/.init ]; then
echo "Creating .init file..."
mkdir -p $CIC_DATA_DIR
touch $CIC_DATA_DIR/.init
# touch $init_level_file
fi
echo -n 1 > $init_level_file
# Abort on any error (including if wait-for-it fails).
# Wait for the backend to be up, if we know where it is. # Wait for the backend to be up, if we know where it is.
if [[ -n "${ETH_PROVIDER}" ]]; then if [ -z "${RPC_PROVIDER}" ]; then
echo "\$RPC_PROVIDER not set!"
export CONFINI_DIR=$_CONFINI_DIR
unset CONFINI_DIR
if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
echo "waiting for ${ETH_PROVIDER}..."
./wait-for-it.sh "${ETH_PROVIDER_HOST}:${ETH_PROVIDER_PORT}"
fi
if [ "$TOKEN_TYPE" == "$default_token" ]; then
if [ -z "$TOKEN_SYMBOL" ]; then
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
TOKEN_SYMBOL="GFT"
TOKEN_NAME="Giftable Token"
elif [ -z "$TOKEN_NAME" ]; then
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
TOKEN_NAME=$TOKEN_SYMBOL
fi
>&2 echo deploying default token $TOKEN_TYPE
echo giftable-token-deploy $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv
DEV_RESERVE_ADDRESS=`giftable-token-deploy $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv`
elif [ "$TOKEN_TYPE" == "erc20_demurrage_token" ]; then
if [ -z "$TOKEN_SYMBOL" ]; then
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
TOKEN_SYMBOL="SARAFU"
TOKEN_NAME="Sarafu Token"
elif [ -z "$TOKEN_NAME" ]; then
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
TOKEN_NAME=$TOKEN_SYMBOL
fi
>&2 echo deploying token $TOKEN_TYPE
if [ -z $TOKEN_SINK_ADDRESS ]; then
if [ ! -z $TOKEN_REDISTRIBUTION_PERIOD ]; then
>&2 echo -e "\033[;93mtoken sink address not set, so redistribution will be BURNED\033[;39m"
fi
fi
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL -vv -ww -s`
else
>&2 echo unknown token type $TOKEN_TYPE
exit 1
fi
echo "giftable-token-gift $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT"
giftable-token-gift $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -u -vv -s -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT
>&2 echo "deploy account index contract"
DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -vv -s -u -w`
>&2 echo "add deployer address as account index writer"
eth-accounts-index-writer $fee_price_arg -s -u -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww -vv $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
>&2 echo "deploy contract registry contract"
CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $DEV_ETH_KEYSTORE_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry -p $ETH_PROVIDER -vv -s -u -w`
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier ContractRegistry $CIC_REGISTRY_ADDRESS
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
# Deploy address declarator registry
>&2 echo "deploy address declarator contract"
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -s -u -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv $declarator_description`
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier AddressDeclarator $DEV_DECLARATOR_ADDRESS
# Deploy transfer authorization contact
>&2 echo "deploy transfer auth contract"
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv`
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS
# Deploy token index contract
>&2 echo "deploy token index contract"
DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -s -u $fee_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv`
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS
>&2 echo "add reserve token to token index"
eth-token-index-add $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
# Sarafu faucet contract
>&2 echo "deploy token faucet contract"
DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy $fee_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS -s`
>&2 echo "set token faucet amount"
sarafu-faucet-set $fee_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_FAUCET_ADDRESS -vv -s --fee-limit 100000 $DEV_FAUCET_AMOUNT
>&2 echo "register faucet in registry"
eth-contract-registry-set -s -u $fee_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier Faucet $DEV_FAUCET_ADDRESS
>&2 echo "set faucet as token minter"
giftable-token-minter -s -u $fee_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -e $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv $DEV_FAUCET_ADDRESS
export CONFINI_DIR=$_CONFINI_DIR
else
echo "\$ETH_PROVIDER not set!"
exit 1 exit 1
fi fi
mkdir -p $CIC_DATA_DIR unset CONFINI_DIR
>&2 echo using data dir $CIC_DATA_DIR for environment variable dump
# this is consumed in downstream services to set environment variables if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
cat << EOF > $CIC_DATA_DIR/.env IFS=: read -a p <<< "$RPC_PROVIDER"
export CIC_REGISTRY_ADDRESS=$CIC_REGISTRY_ADDRESS read -i "/" rpc_provider_port <<< "${p[2]}"
export CIC_TRUST_ADDRESS=$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER rpc_provider_host=${p[1]:2}
export CIC_DECLARATOR_ADDRESS=$CIC_DECLARATOR_ADDRESS echo "waiting for provider host $rpc_provider_host port $rpc_provider_port..."
EOF ./wait-for-it.sh "$rpc_provider_host:$rpc_provider_port" -t $WAIT_FOR_TIMEOUT
fi
cat ./envlist | bash from_env.sh > $CIC_DATA_DIR/.env_all if [ "$TOKEN_TYPE" == "giftable_erc20_token" ]; then
cat ./envlist if [ -z "$TOKEN_SYMBOL" ]; then
# popd >&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
TOKEN_SYMBOL="GFT"
TOKEN_NAME="Giftable Token"
elif [ -z "$TOKEN_NAME" ]; then
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
TOKEN_NAME=$TOKEN_SYMBOL
fi
>&2 echo deploying default token $TOKEN_TYPE
echo giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv
DEV_RESERVE_ADDRESS=`giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv`
elif [ "$TOKEN_TYPE" == "erc20_demurrage_token" ]; then
if [ -z "$TOKEN_SYMBOL" ]; then
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
TOKEN_SYMBOL="DET"
TOKEN_NAME="Demurrage Token"
elif [ -z "$TOKEN_NAME" ]; then
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
TOKEN_NAME=$TOKEN_SYMBOL
fi
>&2 echo deploying token $TOKEN_TYPE
if [ -z $TOKEN_SINK_ADDRESS ]; then
if [ ! -z $TOKEN_REDISTRIBUTION_PERIOD ]; then
>&2 echo -e "\033[;93mtoken sink address not set, so redistribution will be BURNED\033[;39m"
fi
fi
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL -vv -ww -s`
else
>&2 echo unknown token type $TOKEN_TYPE
exit 1
fi
echo "giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT"
giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -u -vv -s -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT
>&2 echo "deploy account index contract"
DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -y $WALLET_KEY_FILE -vv -s -u -w`
>&2 echo "add deployer address as account index writer"
eth-accounts-index-writer $fee_price_arg -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww -vv $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
>&2 echo "deploy contract registry contract"
CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry -p $RPC_PROVIDER -vv -s -u -w`
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier ContractRegistry $CIC_REGISTRY_ADDRESS
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
# Deploy address declarator registry
>&2 echo "deploy address declarator contract"
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv $declarator_description`
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AddressDeclarator $DEV_DECLARATOR_ADDRESS
# Deploy transfer authorization contact
>&2 echo "deploy transfer auth contract"
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS
# Deploy token index contract
>&2 echo "deploy token index contract"
DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS
>&2 echo "add reserve token to token index"
eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
# Sarafu faucet contract
>&2 echo "deploy token faucet contract"
DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS -s`
>&2 echo "set token faucet amount"
sarafu-faucet-set $fee_price_arg -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_FAUCET_ADDRESS -vv -s --fee-limit 100000 $DEV_FAUCET_AMOUNT
>&2 echo "register faucet in registry"
eth-contract-registry-set -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier Faucet $DEV_FAUCET_ADDRESS
>&2 echo "set faucet as token minter"
giftable-token-minter -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv $DEV_FAUCET_ADDRESS
#echo "export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL" >> ${DEV_DATA_DIR}/env_reset
export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
confini-dump -vv --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config
set +a set +a
set +e set +e
echo -n 2 > $init_level_file
exec "$@" exec "$@"

View File

@@ -1,5 +1,13 @@
#! /bin/bash #! /bin/bash
>&2 echo -e "\033[;96mRUNNING\033[;39m configurations"
./config.sh
if [ $? -ne "0" ]; then
>&2 echo -e "\033[;31mFAILED\033[;39m configurations"
exit 1;
fi
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m configurations"
if [[ $((RUN_MASK & 1)) -eq 1 ]] if [[ $((RUN_MASK & 1)) -eq 1 ]]
then then
>&2 echo -e "\033[;96mRUNNING\033[;39m RUN_MASK 1 - contract deployment" >&2 echo -e "\033[;96mRUNNING\033[;39m RUN_MASK 1 - contract deployment"

View File

@@ -1,68 +1,40 @@
#!/bin/bash #!/bin/bash
# defaults # defaults
#initlevel=`cat ${CIC_DATA_DIR}/.init` source ${DEV_DATA_DIR}/env_reset
#echo $inilevel cat ${DEV_DATA_DIR}/env_reset
#if [ $initlevel -lt 2 ]; then
# >&2 echo "initlevel too low $initlevel"
# exit 1
#fi
source ${CIC_DATA_DIR}/.env
source ${CIC_DATA_DIR}/.env_all
DEV_PIP_EXTRA_INDEX_URL=${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433}
DEV_DATABASE_NAME_CIC_ETH=${DEV_DATABASE_NAME_CIC_ETH:-"cic-eth"}
CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic}
ETH_PASSPHRASE=''
CIC_DEFAULT_TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
TOKEN_SYMBOL=$CIC_DEFAULT_TOKEN_SYMBOL
CHAIN_SPEC=${CHAIN_SPEC:-$CIC_CHAIN_SPEC}
RPC_HTTP_PROVIDER=${RPC_HTTP_PROVIDER:-$ETH_PROVIDER}
# Debug flag # Debug flag
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
keystore_file=./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c
debug='-vv' debug='-vv'
gas_amount=100000000000000000000000
token_amount=${gas_amount}
env_out_file=${CIC_DATA_DIR}/.env_seed
init_level_file=${CIC_DATA_DIR}/.init
empty_config_dir=$CONFINI_DIR/empty empty_config_dir=$CONFINI_DIR/empty
truncate $env_out_file -s 0
set -e set -e
set -a set -a
#pip install --extra-index-url $DEV_PIP_EXTRA_INDEX_URL eth-address-index==0.1.1a7
export CONFINI_DIR=$_CONFINI_DIR
unset CONFINI_DIR unset CONFINI_DIR
# get required addresses from registries # get required addresses from registries
DEV_TOKEN_INDEX_ADDRESS=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $ETH_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw TokenRegistry` token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw TokenRegistry`
DEV_ACCOUNT_INDEX_ADDRESS=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $ETH_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw AccountRegistry` account_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw AccountRegistry`
DEV_RESERVE_ADDRESS=`eth-token-index-list -i $CHAIN_SPEC -u -p $ETH_PROVIDER -e $DEV_TOKEN_INDEX_ADDRESS -vv --raw $CIC_DEFAULT_TOKEN_SYMBOL` reserve_address=`eth-token-index-list -i $CHAIN_SPEC -u -p $RPC_PROVIDER -e $token_index_address -vv --raw $CIC_DEFAULT_TOKEN_SYMBOL`
cat <<EOF
Token registry: $DEV_TOKEN_INDEX_ADDRESS >&2 echo "Token registry: $token_index_address"
Account reigstry: $DEV_ACCOUNT_INDEX_ADDRESS >&2 echo "Account registry: $account_index_address"
Reserve address: $DEV_RESERVE_ADDRESS ($CIC_DEFAULT_TOKEN_SYMBOL) >&2 echo "Reserve address: $reserve_address ($TOKEN_SYMBOL)"
EOF
>&2 echo "create account for gas gifter" >&2 echo "create account for gas gifter"
old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER
DEV_ETH_ACCOUNT_GAS_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register` #DEV_ETH_ACCOUNT_GAS_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_GAS_GIFTER >> $env_out_file DEV_ETH_ACCOUNT_GAS_GIFTER=`cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
cic-eth-tag -i $CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER cic-eth-tag -i $CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER
>&2 echo "create account for sarafu gifter" >&2 echo "create account for sarafu gifter"
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register` DEV_ETH_ACCOUNT_SARAFU_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_SARAFU_GIFTER=$DEV_ETH_ACCOUNT_SARAFU_GIFTER >> $env_out_file
cic-eth-tag -i $CHAIN_SPEC SARAFU_GIFTER $DEV_ETH_ACCOUNT_SARAFU_GIFTER cic-eth-tag -i $CHAIN_SPEC SARAFU_GIFTER $DEV_ETH_ACCOUNT_SARAFU_GIFTER
>&2 echo "create account for approval escrow owner" >&2 echo "create account for approval escrow owner"
DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register` DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=$DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER >> $env_out_file
cic-eth-tag -i $CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER cic-eth-tag -i $CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER
#>&2 echo "create account for faucet owner" #>&2 echo "create account for faucet owner"
@@ -72,50 +44,46 @@ cic-eth-tag -i $CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFE
>&2 echo "create account for accounts index writer" >&2 echo "create account for accounts index writer"
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register` DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
echo DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=$DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER >> $env_out_file
cic-eth-tag -i $CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER cic-eth-tag -i $CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
>&2 echo "add acccounts index writer account as writer on contract" >&2 echo "add acccounts index writer account as writer on contract"
eth-accounts-index-writer -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER eth-accounts-index-writer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $account_index_address -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
# Transfer gas to custodial gas provider adddress # Transfer gas to custodial gas provider adddress
_CONFINI_DIR=$CONFINI_DIR _CONFINI_DIR=$CONFINI_DIR
unset CONFINI_DIR unset CONFINI_DIR
>&2 echo gift gas to gas gifter >&2 echo gift gas to gas gifter
>&2 eth-gas -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_GAS_GIFTER $gas_amount >&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_GAS_GIFTER $DEV_GAS_AMOUNT
>&2 echo gift gas to sarafu token owner >&2 echo gift gas to sarafu token owner
>&2 eth-gas -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER $gas_amount >&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER $DEV_GAS_AMOUNT
>&2 echo gift gas to account index owner >&2 echo gift gas to account index owner
>&2 eth-gas -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $gas_amount >&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $DEV_GAS_AMOUNT
# Send token to token creator # Send token to token creator
>&2 echo "gift tokens to sarafu owner" >&2 echo "gift tokens to sarafu owner"
echo "giftable-token-gift -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_RESERVE_ADDRESS -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $token_amount" echo "giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT"
>&2 giftable-token-gift -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_RESERVE_ADDRESS -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $token_amount >&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT
# Send token to token gifter # Send token to token gifter
>&2 echo "gift tokens to keystore address" >&2 echo "gift tokens to keystore address"
>&2 giftable-token-gift -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_RESERVE_ADDRESS -a $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER -w $debug $token_amount >&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER -w $debug $DEV_TOKEN_AMOUNT
>&2 echo "set sarafu token to reserve token (temporarily while bancor contracts are not connected)" >&2 echo "set sarafu token to reserve token (temporarily while bancor contracts are not connected)"
echo DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS >> $env_out_file
export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
# Transfer tokens to gifter address # Transfer tokens to gifter address
>&2 echo "transfer tokens to token gifter address" >&2 echo "transfer tokens to token gifter address"
>&2 erc20-transfer -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER --fee-limit 100000 -e $DEV_RESERVE_ADDRESS -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${token_amount:0:-1} >&2 erc20-transfer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER --fee-limit 100000 -e $reserve_address -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${DEV_TOKEN_AMOUNT:0:-1}
#echo -n 0 > $init_level_file
#CONFINI_DIR=$_CONFINI_DIR
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration # Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
echo "cic-eth-ctl -i $CHAIN_SPEC unlock INIT"
cic-eth-ctl -i $CHAIN_SPEC unlock INIT cic-eth-ctl -i $CHAIN_SPEC unlock INIT
cic-eth-ctl -i $CHAIN_SPEC unlock SEND cic-eth-ctl -i $CHAIN_SPEC unlock SEND
cic-eth-ctl -i $CHAIN_SPEC unlock QUEUE cic-eth-ctl -i $CHAIN_SPEC unlock QUEUE
export CONFINI_DIR=$_CONFINI_DIR confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config
set +a set +a
set +e set +e

View File

@@ -2,7 +2,7 @@
.cache .cache
.dot .dot
**/doc **/doc
**/node_modules node_modules/
**/venv **/venv
**/.venv **/.venv

View File

@@ -1,64 +1,61 @@
# standard imports
import argparse import argparse
import logging import logging
import sys
import os import os
import sys
# external imports # external imports
import celery import celery
import confini
import redis
from chainlib.chain import ChainSpec from chainlib.chain import ChainSpec
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.connection import EthHTTPConnection from chainlib.eth.connection import EthHTTPConnection
from confini import Config
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
from crypto_dev_signer.keystore.dict import DictKeystore from crypto_dev_signer.keystore.dict import DictKeystore
# local imports # local imports
from import_task import ImportTask, MetadataTask
from import_util import BalanceProcessor, get_celery_worker_status from import_util import BalanceProcessor, get_celery_worker_status
from import_task import ImportTask, MetadataTask
logging.basicConfig(level=logging.WARNING) default_config_dir = './config'
logg = logging.getLogger() logg = logging.getLogger()
config_dir = './config' arg_parser = argparse.ArgumentParser(description='Daemon worker that handles data seeding tasks.')
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
arg_parser.add_argument('--env-prefix',
default=os.environ.get('CONFINI_ENV_PREFIX'),
dest='env_prefix',
type=str,
help='environment prefix for variables to overwrite configuration.')
arg_parser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
arg_parser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
arg_parser.add_argument('--include-balances', dest='include_balances', help='include opening balance transactions',
action='store_true')
arg_parser.add_argument('--meta-host', dest='meta_host', type=str, help='metadata server host')
arg_parser.add_argument('--meta-port', dest='meta_port', type=int, help='metadata server host')
arg_parser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit data seeding tasks to.')
arg_parser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
arg_parser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
arg_parser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
arg_parser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
arg_parser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol',
help='Token symbol to use for transactions')
arg_parser.add_argument('-v', help='be verbose', action='store_true')
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
arg_parser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
arg_parser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from')
arg_parser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1',
help='chain spec')
arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
args = arg_parser.parse_args()
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks') if args.vv:
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address') logging.getLogger().setLevel(logging.DEBUG)
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing') elif args.v:
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
argparser.add_argument('--meta-host', dest='meta_host', type=str, help='metadata server host')
argparser.add_argument('--meta-port', dest='meta_port', type=int, help='metadata server host')
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
argparser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol',
help='Token symbol to use for transactions')
argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str,
help='environment prefix for variables to overwrite configuration')
argparser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit transaction tasks to')
argparser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from')
argparser.add_argument('-v', help='be verbose', action='store_true')
argparser.add_argument('-vv', help='be more verbose', action='store_true')
argparser.add_argument('user_dir', default='out', type=str, help='user export directory')
args = argparser.parse_args(sys.argv[1:])
if args.v:
logging.getLogger().setLevel(logging.INFO) logging.getLogger().setLevel(logging.INFO)
elif args.vv: config = Config(args.c, args.env_prefix)
logging.getLogger().setLevel(logging.DEBUG)
config_dir = os.path.join(args.c)
os.makedirs(config_dir, 0o777, True)
config = confini.Config(config_dir, args.env_prefix)
config.process() config.process()
# override args
args_override = { args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'), 'CIC_CHAIN_SPEC': getattr(args, 'i'),
'ETH_PROVIDER': getattr(args, 'p'), 'ETH_PROVIDER': getattr(args, 'p'),
@@ -73,88 +70,76 @@ args_override = {
config.dict_override(args_override, 'cli flag') config.dict_override(args_override, 'cli flag')
config.censor('PASSWORD', 'DATABASE') config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL') config.censor('PASSWORD', 'SSL')
logg.debug('config loaded from {}:\n{}'.format(config_dir, config)) logg.debug(f'config loaded from {args.c}:\n{config}')
redis_host = config.get('REDIS_HOST') db_config = {
redis_port = config.get('REDIS_PORT') 'database': config.get('DATABASE_NAME'),
redis_db = config.get('REDIS_DB') 'host': config.get('DATABASE_HOST'),
r = redis.Redis(redis_host, redis_port, redis_db) 'port': config.get('DATABASE_PORT'),
'user': config.get('DATABASE_USER'),
'password': config.get('DATABASE_PASSWORD')
}
ImportTask.db_config = db_config
# create celery apps
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
status = get_celery_worker_status(celery_app=celery_app)
signer_address = None
keystore = DictKeystore() keystore = DictKeystore()
if args.y is not None: os.path.isfile(args.y)
logg.debug('loading keystore file {}'.format(args.y)) logg.debug(f'loading keystore file {args.y}')
signer_address = keystore.import_keystore_file(args.y) signer_address = keystore.import_keystore_file(args.y)
logg.debug('now have key for signer address {}'.format(signer_address)) logg.debug(f'now have key for signer address {signer_address}')
# define signer
signer = EIP155Signer(keystore) signer = EIP155Signer(keystore)
queue = args.q block_offset = -1 if args.head else args.offset
chain_str = config.get('CIC_CHAIN_SPEC')
block_offset = 0
if args.head:
block_offset = -1
else:
block_offset = args.offset
chain_str = config.get('CIC_CHAIN_SPEC')
chain_spec = ChainSpec.from_chain_str(chain_str) chain_spec = ChainSpec.from_chain_str(chain_str)
ImportTask.chain_spec = chain_spec
old_chain_spec_str = args.old_chain_spec old_chain_spec_str = args.old_chain_spec
old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str) old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str)
user_dir = args.user_dir # user_out_dir from import_users.py
token_symbol = args.token_symbol
MetadataTask.meta_host = config.get('META_HOST') MetadataTask.meta_host = config.get('META_HOST')
MetadataTask.meta_port = config.get('META_PORT') MetadataTask.meta_port = config.get('META_PORT')
ImportTask.chain_spec = chain_spec
txs_dir = os.path.join(args.import_dir, 'txs')
os.makedirs(txs_dir, exist_ok=True)
sys.stdout.write(f'created txs dir: {txs_dir}')
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
get_celery_worker_status(celery_app)
def main(): def main():
conn = EthHTTPConnection(config.get('ETH_PROVIDER')) conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
ImportTask.balance_processor = BalanceProcessor(conn,
ImportTask.balance_processor = BalanceProcessor(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'), chain_spec,
signer_address, signer) config.get('CIC_REGISTRY_ADDRESS'),
ImportTask.balance_processor.init(token_symbol) signer_address,
signer)
# TODO get decimals from token ImportTask.balance_processor.init(args.token_symbol)
balances = {} balances = {}
f = open('{}/balances.csv'.format(user_dir, 'r')) accuracy = 10 ** 6
remove_zeros = 10 ** 6 count = 0
i = 0 with open(f'{args.import_dir}/balances.csv', 'r') as balances_file:
while True: while True:
l = f.readline() line = balances_file.readline()
if l is None: if line is None:
break break
r = l.split(',') balance_data = line.split(',')
try: try:
address = to_checksum_address(r[0]) blockchain_address = to_checksum_address(balance_data[0])
sys.stdout.write('loading balance {} {} {}'.format(i, address, r[1]).ljust(200) + "\r") logg.info(
except ValueError: 'loading balance: {} {} {}'.format(count, blockchain_address, balance_data[1].ljust(200) + "\r"))
break except ValueError:
balance = int(int(r[1].rstrip()) / remove_zeros) break
balances[address] = balance balance = int(int(balance_data[1].rstrip()) / accuracy)
i += 1 balances[blockchain_address] = balance
count += 1
f.close()
ImportTask.balances = balances ImportTask.balances = balances
ImportTask.count = i ImportTask.count = count
ImportTask.import_dir = user_dir ImportTask.include_balances = args.include_balances is True
ImportTask.import_dir = args.import_dir
s = celery.signature( s_send_txs = celery.signature(
'import_task.send_txs', 'import_task.send_txs', [ImportTask.balance_processor.nonce_offset], queue=args.q)
[ s_send_txs.apply_async()
MetadataTask.balance_processor.nonce_offset,
],
queue=queue,
)
s.apply_async()
argv = ['worker'] argv = ['worker']
if args.vv: if args.vv:
@@ -165,6 +150,7 @@ def main():
argv.append(args.q) argv.append(args.q)
argv.append('-n') argv.append('-n')
argv.append(args.q) argv.append(args.q)
argv.append(f'--pidfile={args.q}.pid')
celery_app.worker_main(argv) celery_app.worker_main(argv)

View File

@@ -1,71 +1,63 @@
# standard import # standard imports
import argparse import argparse
import csv import csv
import logging import logging
import os import os
import psycopg2
# third-party imports # external imports
import celery from confini import Config
import confini
# local imports # local imports
from import_util import get_celery_worker_status
default_config_dir = './config'
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
default_config_dir = './config' arg_parser = argparse.ArgumentParser(description='Pins import script.')
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
arg_parser.add_argument('--env-prefix', arg_parser.add_argument('--env-prefix',
default=os.environ.get('CONFINI_ENV_PREFIX'), default=os.environ.get('CONFINI_ENV_PREFIX'),
dest='env_prefix', dest='env_prefix',
type=str, type=str,
help='environment prefix for variables to overwrite configuration') help='environment prefix for variables to overwrite configuration.')
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit transaction tasks to') arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
arg_parser.add_argument('-v', help='be verbose', action='store_true') arg_parser.add_argument('-v', help='be verbose', action='store_true')
arg_parser.add_argument('-vv', help='be more verbose', action='store_true') arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
arg_parser.add_argument('pins_dir', default='out', type=str, help='user export directory')
args = arg_parser.parse_args() args = arg_parser.parse_args()
# set log levels if args.vv:
if args.v: logging.getLogger().setLevel(logging.DEBUG)
logg.setLevel(logging.INFO) elif args.v:
elif args.vv: logging.getLogger().setLevel(logging.INFO)
logg.setLevel(logging.DEBUG)
# process configs config = Config(args.c, args.env_prefix)
config_dir = args.c
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
config.process() config.process()
config.censor('PASSWORD', 'DATABASE') config.censor('PASSWORD', 'DATABASE')
logg.debug('config loaded from {}:\n{}'.format(args.c, config)) logg.debug(f'config loaded from {args.c}:\n{config}')
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
status = get_celery_worker_status(celery_app=celery_app)
db_configs = {
'database': config.get('DATABASE_NAME'),
'host': config.get('DATABASE_HOST'),
'port': config.get('DATABASE_PORT'),
'user': config.get('DATABASE_USER'),
'password': config.get('DATABASE_PASSWORD')
}
def main(): def main():
with open(f'{args.pins_dir}/pins.csv') as pins_file: with open(f'{args.import_dir}/pins.csv') as pins_file:
phone_to_pins = [tuple(row) for row in csv.reader(pins_file)] phone_to_pins = [tuple(row) for row in csv.reader(pins_file)]
s_import_pins = celery.signature( db_conn = psycopg2.connect(
'import_task.set_pins', database=config.get('DATABASE_NAME'),
(db_configs, phone_to_pins), host=config.get('DATABASE_HOST'),
queue=args.q port=config.get('DATABASE_PORT'),
user=config.get('DATABASE_USER'),
password=config.get('DATABASE_PASSWORD')
) )
result = s_import_pins.apply_async() db_cursor = db_conn.cursor()
logg.debug(f'TASK: {result.id}, STATUS: {result.status}') sql = 'UPDATE account SET password_hash = %s WHERE phone_number = %s'
for element in phone_to_pins:
db_cursor.execute(sql, (element[1], element[0]))
logg.debug(f'Updating account: {element[0]} with: {element[1]}')
db_conn.commit()
db_cursor.close()
db_conn.close()
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -1,38 +1,37 @@
# standard imports # standard imports
import csv
import json import json
import logging import logging
import os import os
import random import random
import urllib.error import uuid
import urllib.parse from urllib import error, parse, request
import urllib.request
# external imports # external imports
import celery import celery
import psycopg2 import psycopg2
from celery import Task
from chainlib.chain import ChainSpec
from chainlib.eth.address import to_checksum_address from chainlib.eth.address import to_checksum_address
from chainlib.eth.tx import ( from chainlib.eth.tx import raw, unpack
unpack, from cic_types.models.person import Person, generate_metadata_pointer
raw, from hexathon import add_0x, strip_0x
)
from cic_types.models.person import Person # local imports
from cic_types.processor import generate_metadata_pointer
from hexathon import (
strip_0x,
add_0x,
)
logg = logging.getLogger()
celery_app = celery.current_app celery_app = celery.current_app
logg = logging.getLogger()
class ImportTask(celery.Task): class ImportTask(Task):
balances = None balances = None
import_dir = 'out'
count = 0
chain_spec = None
balance_processor = None balance_processor = None
chain_spec: ChainSpec = None
count = 0
db_config: dict = None
import_dir = ''
include_balances = False
max_retries = None max_retries = None
@@ -41,121 +40,70 @@ class MetadataTask(ImportTask):
meta_port = None meta_port = None
meta_path = '' meta_path = ''
meta_ssl = False meta_ssl = False
autoretry_for = ( autoretry_for = (error.HTTPError, OSError,)
urllib.error.HTTPError,
OSError,
)
retry_jitter = True retry_jitter = True
retry_backoff = True retry_backoff = True
retry_backoff_max = 60 retry_backoff_max = 60
@classmethod @classmethod
def meta_url(self): def meta_url(cls):
scheme = 'http' scheme = 'http'
if self.meta_ssl: if cls.meta_ssl:
scheme += 's' scheme += 's'
url = urllib.parse.urlparse('{}://{}:{}/{}'.format(scheme, self.meta_host, self.meta_port, self.meta_path)) url = parse.urlparse(f'{scheme}://{cls.meta_host}:{cls.meta_port}/{cls.meta_path}')
return urllib.parse.urlunparse(url) return parse.urlunparse(url)
def old_address_from_phone(base_path, phone): def old_address_from_phone(base_path: str, phone_number: str):
pidx = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone') pid_x = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
phone_idx_path = os.path.join('{}/phone/{}/{}/{}'.format( phone_idx_path = os.path.join(f'{base_path}/phone/{pid_x[:2]}/{pid_x[2:4]}/{pid_x}')
base_path, with open(phone_idx_path, 'r') as f:
pidx[:2], old_address = f.read()
pidx[2:4],
pidx,
)
)
f = open(phone_idx_path, 'r')
old_address = f.read()
f.close()
return old_address return old_address
@celery_app.task(bind=True, base=MetadataTask) @celery_app.task(bind=True, base=MetadataTask)
def resolve_phone(self, phone): def generate_person_metadata(self, blockchain_address: str, phone_number: str):
identifier = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone') logg.debug(f'blockchain address: {blockchain_address}')
url = urllib.parse.urljoin(self.meta_url(), identifier) old_blockchain_address = old_address_from_phone(self.import_dir, phone_number)
logg.debug('attempt getting phone pointer at {} for phone {}'.format(url, phone)) old_address_upper = strip_0x(old_blockchain_address).upper()
r = urllib.request.urlopen(url) metadata_path = f'{self.import_dir}/old/{old_address_upper[:2]}/{old_address_upper[2:4]}/{old_address_upper}.json'
address = json.load(r) with open(metadata_path, 'r') as metadata_file:
address = address.replace('"', '') person_metadata = json.load(metadata_file)
logg.debug('address {} for phone {}'.format(address, phone)) person = Person.deserialize(person_metadata)
if not person.identities.get('evm'):
return address person.identities['evm'] = {}
sub_chain_str = f'{self.chain_spec.common_name()}:{self.chain_spec.network_id()}'
person.identities['evm'][sub_chain_str] = [add_0x(blockchain_address)]
@celery_app.task(bind=True, base=MetadataTask) blockchain_address = strip_0x(blockchain_address)
def generate_metadata(self, address, phone): file_path = os.path.join(
old_address = old_address_from_phone(self.import_dir, phone)
logg.debug('address {}'.format(address))
old_address_upper = strip_0x(old_address).upper()
metadata_path = '{}/old/{}/{}/{}.json'.format(
self.import_dir,
old_address_upper[:2],
old_address_upper[2:4],
old_address_upper,
)
f = open(metadata_path, 'r')
o = json.load(f)
f.close()
u = Person.deserialize(o)
if u.identities.get('evm') == None:
u.identities['evm'] = {}
sub_chain_str = '{}:{}'.format(self.chain_spec.common_name(), self.chain_spec.network_id())
u.identities['evm'][sub_chain_str] = [add_0x(address)]
new_address_clean = strip_0x(address)
filepath = os.path.join(
self.import_dir, self.import_dir,
'new', 'new',
new_address_clean[:2].upper(), blockchain_address[:2].upper(),
new_address_clean[2:4].upper(), blockchain_address[2:4].upper(),
new_address_clean.upper() + '.json', blockchain_address.upper() + '.json'
) )
os.makedirs(os.path.dirname(filepath), exist_ok=True) os.makedirs(os.path.dirname(file_path), exist_ok=True)
serialized_person_metadata = person.serialize()
o = u.serialize() with open(file_path, 'w') as metadata_file:
f = open(filepath, 'w') metadata_file.write(json.dumps(serialized_person_metadata))
f.write(json.dumps(o)) logg.debug(f'written person metadata for address: {blockchain_address}')
f.close()
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person')
meta_filepath = os.path.join( meta_filepath = os.path.join(
self.import_dir, self.import_dir,
'meta', 'meta',
'{}.json'.format(new_address_clean.upper()), '{}.json'.format(blockchain_address.upper()),
) )
os.symlink(os.path.realpath(filepath), meta_filepath) os.symlink(os.path.realpath(file_path), meta_filepath)
return blockchain_address
# write ussd data
ussd_data = {
'phone': phone,
'is_activated': 1,
'preferred_language': random.sample(['en', 'sw'], 1)[0],
'is_disabled': False
}
ussd_data_dir = os.path.join(self.import_dir, 'ussd')
ussd_data_file_path = os.path.join(ussd_data_dir, f'{old_address}.json')
f = open(ussd_data_file_path, 'w')
f.write(json.dumps(ussd_data))
f.close()
# write preferences data @celery_app.task(bind=True, base=MetadataTask)
def generate_preferences_data(self, data: tuple):
blockchain_address: str = data[0]
preferences = data[1]
preferences_dir = os.path.join(self.import_dir, 'preferences') preferences_dir = os.path.join(self.import_dir, 'preferences')
preferences_data = { preferences_key = generate_metadata_pointer(bytes.fromhex(strip_0x(blockchain_address)), ':cic.preferences')
'preferred_language': ussd_data['preferred_language']
}
preferences_key = generate_metadata_pointer(bytes.fromhex(new_address_clean[2:]), ':cic.preferences')
preferences_filepath = os.path.join(preferences_dir, 'meta', preferences_key) preferences_filepath = os.path.join(preferences_dir, 'meta', preferences_key)
filepath = os.path.join( filepath = os.path.join(
preferences_dir, preferences_dir,
'new', 'new',
@@ -164,95 +112,95 @@ def generate_metadata(self, address, phone):
preferences_key.upper() + '.json' preferences_key.upper() + '.json'
) )
os.makedirs(os.path.dirname(filepath), exist_ok=True) os.makedirs(os.path.dirname(filepath), exist_ok=True)
with open(filepath, 'w') as preferences_file:
f = open(filepath, 'w') preferences_file.write(json.dumps(preferences))
f.write(json.dumps(preferences_data)) logg.debug(f'written preferences metadata: {preferences} for address: {blockchain_address}')
f.close()
os.symlink(os.path.realpath(filepath), preferences_filepath) os.symlink(os.path.realpath(filepath), preferences_filepath)
return blockchain_address
logg.debug('found metadata {} for phone {}'.format(o, phone))
return address
@celery_app.task(bind=True, base=MetadataTask) @celery_app.task(bind=True, base=MetadataTask)
def opening_balance_tx(self, address, phone, serial): def generate_pins_data(self, blockchain_address: str, phone_number: str):
old_address = old_address_from_phone(self.import_dir, phone) pins_file = f'{self.import_dir}/pins.csv'
file_op = 'a' if os.path.exists(pins_file) else 'w'
with open(pins_file, file_op) as pins_file:
password_hash = uuid.uuid4().hex
pins_file.write(f'{phone_number},{password_hash}\n')
logg.debug(f'written pin data for address: {blockchain_address}')
return blockchain_address
k = to_checksum_address(strip_0x(old_address))
balance = self.balances[k]
logg.debug('found balance {} for address {} phone {}'.format(balance, old_address, phone))
@celery_app.task(bind=True, base=MetadataTask)
def generate_ussd_data(self, blockchain_address: str, phone_number: str):
ussd_data_file = f'{self.import_dir}/ussd_data.csv'
file_op = 'a' if os.path.exists(ussd_data_file) else 'w'
preferred_language = random.sample(["en", "sw"], 1)[0]
preferences = {'preferred_language': preferred_language}
with open(ussd_data_file, file_op) as ussd_data_file:
ussd_data_file.write(f'{phone_number}, { 1}, {preferred_language}, {False}\n')
logg.debug(f'written ussd data for address: {blockchain_address}')
return blockchain_address, preferences
@celery_app.task(bind=True, base=MetadataTask)
def opening_balance_tx(self, blockchain_address: str, phone_number: str, serial: str):
old_blockchain_address = old_address_from_phone(self.import_dir, phone_number)
address = to_checksum_address(strip_0x(old_blockchain_address))
balance = self.balances[address]
logg.debug(f'found balance: {balance} for address: {address} phone: {phone_number}')
decimal_balance = self.balance_processor.get_decimal_amount(int(balance)) decimal_balance = self.balance_processor.get_decimal_amount(int(balance))
tx_hash_hex, o = self.balance_processor.get_rpc_tx(blockchain_address, decimal_balance, serial)
(tx_hash_hex, o) = self.balance_processor.get_rpc_tx(address, decimal_balance, serial)
tx = unpack(bytes.fromhex(strip_0x(o)), self.chain_spec) tx = unpack(bytes.fromhex(strip_0x(o)), self.chain_spec)
logg.debug('generated tx token value {} to {} tx hash {}'.format(decimal_balance, address, tx_hash_hex)) logg.debug(f'generated tx token value: {decimal_balance}: {blockchain_address} tx hash {tx_hash_hex}')
tx_path = os.path.join(self.import_dir, 'txs', strip_0x(tx_hash_hex))
tx_path = os.path.join( with open(tx_path, 'w') as tx_file:
self.import_dir, tx_file.write(strip_0x(o))
'txs', logg.debug(f'written tx with tx hash: {tx["hash"]} for address: {blockchain_address}')
strip_0x(tx_hash_hex), tx_nonce_path = os.path.join(self.import_dir, 'txs', '.' + str(tx['nonce']))
)
f = open(tx_path, 'w')
f.write(strip_0x(o))
f.close()
tx_nonce_path = os.path.join(
self.import_dir,
'txs',
'.' + str(tx['nonce']),
)
os.symlink(os.path.realpath(tx_path), tx_nonce_path) os.symlink(os.path.realpath(tx_path), tx_nonce_path)
return tx['hash'] return tx['hash']
@celery_app.task(bind=True, base=ImportTask, autoretry_for=(FileNotFoundError,), max_retries=None, @celery_app.task(bind=True, base=MetadataTask)
def resolve_phone(self, phone_number: str):
identifier = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
url = parse.urljoin(self.meta_url(), identifier)
logg.debug(f'attempt getting phone pointer at: {url} for phone: {phone_number}')
r = request.urlopen(url)
address = json.load(r)
address = address.replace('"', '')
logg.debug(f'address: {address} for phone: {phone_number}')
return address
@celery_app.task(autoretry_for=(FileNotFoundError,),
bind=True,
base=ImportTask,
max_retries=None,
default_retry_delay=0.1) default_retry_delay=0.1)
def send_txs(self, nonce): def send_txs(self, nonce):
if nonce == self.count + self.balance_processor.nonce_offset:
logg.info('reached nonce {} (offset {} + count {}) exiting'.format(nonce, self.balance_processor.nonce_offset,
self.count))
return
logg.debug('attempt to open symlink for nonce {}'.format(nonce))
tx_nonce_path = os.path.join(
self.import_dir,
'txs',
'.' + str(nonce),
)
f = open(tx_nonce_path, 'r')
tx_signed_raw_hex = f.read()
f.close()
os.unlink(tx_nonce_path)
o = raw(add_0x(tx_signed_raw_hex))
tx_hash_hex = self.balance_processor.conn.do(o)
logg.info('sent nonce {} tx hash {}'.format(nonce, tx_hash_hex)) # tx_signed_raw_hex))
nonce += 1
queue = self.request.delivery_info.get('routing_key') queue = self.request.delivery_info.get('routing_key')
s = celery.signature( if nonce == self.count + self.balance_processor.nonce_offset:
'import_task.send_txs', logg.info(f'reached nonce {nonce} (offset {self.balance_processor.nonce_offset} + count {self.count}).')
[ celery_app.control.broadcast('shutdown', destination=[f'celery@{queue}'])
nonce,
],
queue=queue,
)
s.apply_async()
logg.debug(f'attempt to open symlink for nonce {nonce}')
tx_nonce_path = os.path.join(self.import_dir, 'txs', '.' + str(nonce))
with open(tx_nonce_path, 'r') as tx_nonce_file:
tx_signed_raw_hex = tx_nonce_file.read()
os.unlink(tx_nonce_path)
o = raw(add_0x(tx_signed_raw_hex))
if self.include_balances:
tx_hash_hex = self.balance_processor.conn.do(o)
logg.info(f'sent nonce {nonce} tx hash {tx_hash_hex}')
nonce += 1
s = celery.signature('import_task.send_txs', [nonce], queue=queue)
s.apply_async()
return nonce return nonce
@celery_app.task @celery_app.task()
def set_pins(config: dict, phone_to_pins: list): def set_pin_data(config: dict, phone_to_pins: list):
# define db connection
db_conn = psycopg2.connect( db_conn = psycopg2.connect(
database=config.get('database'), database=config.get('database'),
host=config.get('host'), host=config.get('host'),
@@ -261,24 +209,17 @@ def set_pins(config: dict, phone_to_pins: list):
password=config.get('password') password=config.get('password')
) )
db_cursor = db_conn.cursor() db_cursor = db_conn.cursor()
sql = 'UPDATE account SET password_hash = %s WHERE phone_number = %s'
# update db
for element in phone_to_pins: for element in phone_to_pins:
sql = 'UPDATE account SET password_hash = %s WHERE phone_number = %s'
db_cursor.execute(sql, (element[1], element[0])) db_cursor.execute(sql, (element[1], element[0]))
logg.debug(f'Updating: {element[0]} with: {element[1]}') logg.debug(f'Updating: {element[0]} with: {element[1]}')
# commit changes
db_conn.commit() db_conn.commit()
# close connections
db_cursor.close() db_cursor.close()
db_conn.close() db_conn.close()
@celery_app.task @celery_app.task
def set_ussd_data(config: dict, ussd_data: dict): def set_ussd_data(config: dict, ussd_data: list):
# define db connection
db_conn = psycopg2.connect( db_conn = psycopg2.connect(
database=config.get('database'), database=config.get('database'),
host=config.get('host'), host=config.get('host'),
@@ -287,20 +228,12 @@ def set_ussd_data(config: dict, ussd_data: dict):
password=config.get('password') password=config.get('password')
) )
db_cursor = db_conn.cursor() db_cursor = db_conn.cursor()
# process ussd_data
account_status = 1
if ussd_data['is_activated'] == 1:
account_status = 2
preferred_language = ussd_data['preferred_language']
phone_number = ussd_data['phone']
sql = 'UPDATE account SET status = %s, preferred_language = %s WHERE phone_number = %s' sql = 'UPDATE account SET status = %s, preferred_language = %s WHERE phone_number = %s'
db_cursor.execute(sql, (account_status, preferred_language, phone_number)) for element in ussd_data:
status = 2 if int(element[1]) == 1 else 1
# commit changes preferred_language = element[2]
phone_number = element[0]
db_cursor.execute(sql, (status, preferred_language, phone_number))
db_conn.commit() db_conn.commit()
# close connections
db_cursor.close() db_cursor.close()
db_conn.close() db_conn.close()

View File

@@ -3,56 +3,61 @@ import argparse
import json import json
import logging import logging
import os import os
import redis
import sys import sys
import time import time
import urllib.request
import uuid import uuid
from urllib import request
from urllib.parse import urlencode from urllib.parse import urlencode
# external imports # external imports
import celery import celery
import confini
import phonenumbers import phonenumbers
import redis
from chainlib.chain import ChainSpec
from cic_types.models.person import Person from cic_types.models.person import Person
from confini import Config
# local imports # local imports
from import_util import get_celery_worker_status from import_util import get_celery_worker_status
default_config_dir = './config'
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
default_config_dir = '/usr/local/etc/cic' arg_parser = argparse.ArgumentParser(description='Daemon worker that handles data seeding tasks.')
# batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size
arg_parser.add_argument('--batch-size',
dest='batch_size',
default=100,
type=int,
help='burst size of sending transactions to node')
arg_parser.add_argument('--batch-delay', dest='batch_delay', default=3, type=int, help='seconds delay between batches')
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
arg_parser.add_argument('--env-prefix',
default=os.environ.get('CONFINI_ENV_PREFIX'),
dest='env_prefix',
type=str,
help='environment prefix for variables to overwrite configuration.')
arg_parser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit data seeding tasks to.')
arg_parser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
arg_parser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
arg_parser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
arg_parser.add_argument('--ussd-host', dest='ussd_host', type=str,
help="host to ussd app responsible for processing ussd requests.")
arg_parser.add_argument('--ussd-no-ssl', dest='ussd_no_ssl', help='do not use ssl (careful)', action='store_true')
arg_parser.add_argument('--ussd-port', dest='ussd_port', type=str,
help="port to ussd app responsible for processing ussd requests.")
arg_parser.add_argument('-v', help='be verbose', action='store_true')
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
args = arg_parser.parse_args()
argparser = argparse.ArgumentParser() if args.vv:
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file') logging.getLogger().setLevel(logging.DEBUG)
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string') elif args.v:
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission') logging.getLogger().setLevel(logging.INFO)
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
argparser.add_argument('--batch-size', dest='batch_size', default=100, type=int,
help='burst size of sending transactions to node') # batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size
argparser.add_argument('--batch-delay', dest='batch_delay', default=3, type=int, help='seconds delay between batches')
argparser.add_argument('--timeout', default=60.0, type=float, help='Callback timeout')
argparser.add_argument('--ussd-host', dest='ussd_host', type=str,
help="host to ussd app responsible for processing ussd requests.")
argparser.add_argument('--ussd-port', dest='ussd_port', type=str,
help="port to ussd app responsible for processing ussd requests.")
argparser.add_argument('--ussd-no-ssl', dest='ussd_no_ssl', help='do not use ssl (careful)', action='store_true')
argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue')
argparser.add_argument('-v', action='store_true', help='Be verbose')
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
argparser.add_argument('user_dir', type=str, help='path to users export dir tree')
args = argparser.parse_args()
if args.v: config = Config(args.c, args.env_prefix)
logg.setLevel(logging.INFO)
elif args.vv:
logg.setLevel(logging.DEBUG)
config_dir = args.c
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
config.process() config.process()
args_override = { args_override = {
'CIC_CHAIN_SPEC': getattr(args, 'i'), 'CIC_CHAIN_SPEC': getattr(args, 'i'),
@@ -60,44 +65,29 @@ args_override = {
'REDIS_PORT': getattr(args, 'redis_port'), 'REDIS_PORT': getattr(args, 'redis_port'),
'REDIS_DB': getattr(args, 'redis_db'), 'REDIS_DB': getattr(args, 'redis_db'),
} }
config.dict_override(args_override, 'cli') config.dict_override(args_override, 'cli flag')
logg.debug('config loaded from {}:\n{}'.format(args.c, config)) config.censor('PASSWORD', 'DATABASE')
config.censor('PASSWORD', 'SSL')
logg.debug(f'config loaded from {args.c}:\n{config}')
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) old_account_dir = os.path.join(args.import_dir, 'old')
get_celery_worker_status(celery_app=celery_app) os.stat(old_account_dir)
logg.debug(f'created old system data dir: {old_account_dir}')
redis_host = config.get('REDIS_HOST') new_account_dir = os.path.join(args.import_dir, 'new')
redis_port = config.get('REDIS_PORT') os.makedirs(new_account_dir, exist_ok=True)
redis_db = config.get('REDIS_DB') logg.debug(f'created new system data dir: {new_account_dir}')
r = redis.Redis(redis_host, redis_port, redis_db)
ps = r.pubsub() person_metadata_dir = os.path.join(args.import_dir, 'meta')
os.makedirs(person_metadata_dir, exist_ok=True)
logg.debug(f'created person metadata dir: {person_metadata_dir}')
user_new_dir = os.path.join(args.user_dir, 'new') preferences_dir = os.path.join(args.import_dir, 'preferences')
os.makedirs(user_new_dir, exist_ok=True)
ussd_data_dir = os.path.join(args.user_dir, 'ussd')
os.makedirs(ussd_data_dir, exist_ok=True)
preferences_dir = os.path.join(args.user_dir, 'preferences')
os.makedirs(os.path.join(preferences_dir, 'meta'), exist_ok=True) os.makedirs(os.path.join(preferences_dir, 'meta'), exist_ok=True)
logg.debug(f'created preferences metadata dir: {preferences_dir}')
meta_dir = os.path.join(args.user_dir, 'meta') valid_service_codes = config.get('USSD_SERVICE_CODE').split(",")
os.makedirs(meta_dir, exist_ok=True)
user_old_dir = os.path.join(args.user_dir, 'old')
os.stat(user_old_dir)
txs_dir = os.path.join(args.user_dir, 'txs')
os.makedirs(txs_dir, exist_ok=True)
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
chain_str = str(chain_spec)
batch_size = args.batch_size
batch_delay = args.batch_delay
ussd_port = args.ussd_port
ussd_host = args.ussd_host
ussd_no_ssl = args.ussd_no_ssl ussd_no_ssl = args.ussd_no_ssl
if ussd_no_ssl is True: if ussd_no_ssl is True:
ussd_ssl = False ussd_ssl = False
@@ -105,7 +95,17 @@ else:
ussd_ssl = True ussd_ssl = True
def build_ussd_request(phone, host, port, service_code, username, password, ssl=False): celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
get_celery_worker_status(celery_app)
def build_ussd_request(host: str,
password: str,
phone_number: str,
port: str,
service_code: str,
username: str,
ssl: bool = False):
url = 'http' url = 'http'
if ssl: if ssl:
url += 's' url += 's'
@@ -115,16 +115,16 @@ def build_ussd_request(phone, host, port, service_code, username, password, ssl=
url += '/?username={}&password={}'.format(username, password) url += '/?username={}&password={}'.format(username, password)
logg.info('ussd service url {}'.format(url)) logg.info('ussd service url {}'.format(url))
logg.info('ussd phone {}'.format(phone)) logg.info('ussd phone {}'.format(phone_number))
session = uuid.uuid4().hex session = uuid.uuid4().hex
data = { data = {
'sessionId': session, 'sessionId': session,
'serviceCode': service_code, 'serviceCode': service_code,
'phoneNumber': phone, 'phoneNumber': phone_number,
'text': service_code, 'text': service_code,
} }
req = urllib.request.Request(url) req = request.Request(url)
req.method = 'POST' req.method = 'POST'
data_str = urlencode(data) data_str = urlencode(data)
data_bytes = data_str.encode('utf-8') data_bytes = data_str.encode('utf-8')
@@ -134,85 +134,77 @@ def build_ussd_request(phone, host, port, service_code, username, password, ssl=
return req return req
def register_ussd(i, u): def e164_phone_number(phone_number: str):
phone_object = phonenumbers.parse(u.tel) phone_object = phonenumbers.parse(phone_number)
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164) return phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
logg.debug('tel {} {}'.format(u.tel, phone))
req = build_ussd_request(
phone, def register_account(person: Person):
ussd_host, phone_number = e164_phone_number(person.tel)
ussd_port, logg.debug(f'tel: {phone_number}')
config.get('APP_SERVICE_CODE'), req = build_ussd_request(args.ussd_host,
'', '',
'', phone_number,
ussd_ssl args.ussd_port,
) valid_service_codes[0],
response = urllib.request.urlopen(req) '',
ussd_ssl)
response = request.urlopen(req)
response_data = response.read().decode('utf-8') response_data = response.read().decode('utf-8')
state = response_data[:3] logg.debug(f'ussd response: {response_data[4:]}')
out = response_data[4:]
logg.debug('ussd reponse: {}'.format(out))
if __name__ == '__main__': if __name__ == '__main__':
i = 0 i = 0
j = 0 j = 0
for x in os.walk(user_old_dir): for x in os.walk(old_account_dir):
for y in x[2]: for y in x[2]:
if y[len(y) - 5:] != '.json': if y[len(y) - 5:] != '.json':
continue continue
# handle json containing person object
filepath = os.path.join(x[0], y)
f = open(filepath, 'r')
try:
o = json.load(f)
except json.decoder.JSONDecodeError as e:
f.close()
logg.error('load error for {}: {}'.format(y, e))
continue
f.close()
u = Person.deserialize(o)
register_ussd(i, u) file_path = os.path.join(x[0], y)
with open(file_path, 'r') as account_file:
phone_object = phonenumbers.parse(u.tel) try:
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164) account_data = json.load(account_file)
except json.decoder.JSONDecodeError as e:
s_phone = celery.signature( logg.error('load error for {}: {}'.format(y, e))
'import_task.resolve_phone', continue
[ person = Person.deserialize(account_data)
phone, register_account(person)
], phone_number = e164_phone_number(person.tel)
queue='cic-import-ussd', s_resolve_phone = celery.signature(
'import_task.resolve_phone', [phone_number], queue=args.q
) )
s_meta = celery.signature( s_person_metadata = celery.signature(
'import_task.generate_metadata', 'import_task.generate_person_metadata', [phone_number], queue=args.q
[
phone,
],
queue='cic-import-ussd',
) )
s_balance = celery.signature( s_ussd_data = celery.signature(
'import_task.opening_balance_tx', 'import_task.generate_ussd_data', [phone_number], queue=args.q
[
phone,
i,
],
queue='cic-import-ussd',
) )
s_meta.link(s_balance) s_preferences_metadata = celery.signature(
s_phone.link(s_meta) 'import_task.generate_preferences_data', [], queue=args.q
# block time plus a bit of time for ussd processing )
s_phone.apply_async(countdown=7)
s_pins_data = celery.signature(
'import_task.generate_pins_data', [phone_number], queue=args.q
)
s_opening_balance = celery.signature(
'import_task.opening_balance_tx', [phone_number, i], queue=args.q
)
celery.chain(s_resolve_phone,
s_person_metadata,
s_ussd_data,
s_preferences_metadata,
s_pins_data,
s_opening_balance).apply_async(countdown=7)
i += 1 i += 1
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r") sys.stdout.write('imported: {} {}'.format(i, person).ljust(200) + "\r\n")
j += 1 j += 1
if j == batch_size: if j == args.batch_size:
time.sleep(batch_delay) time.sleep(args.batch_delay)
j = 0 j = 0

View File

@@ -1,67 +1,67 @@
# standard imports # standard imports
import argparse import argparse
import json import csv
import logging import logging
import os import os
import psycopg2
# external imports # external imports
import celery
from confini import Config from confini import Config
# local imports # local imports
default_config_dir = './config'
logging.basicConfig(level=logging.WARNING) logging.basicConfig(level=logging.WARNING)
logg = logging.getLogger() logg = logging.getLogger()
default_config_dir = '/usr/local/etc/cic' arg_parser = argparse.ArgumentParser(description='Pins import script.')
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
arg_parser.add_argument('--env-prefix',
default=os.environ.get('CONFINI_ENV_PREFIX'),
dest='env_prefix',
type=str,
help='environment prefix for variables to overwrite configuration.')
arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
arg_parser.add_argument('-v', help='be verbose', action='store_true')
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config file')
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='Task queue')
arg_parser.add_argument('-v', action='store_true', help='Be verbose')
arg_parser.add_argument('-vv', action='store_true', help='Be more verbose')
arg_parser.add_argument('user_dir', type=str, help='path to users export dir tree')
args = arg_parser.parse_args() args = arg_parser.parse_args()
if args.v: if args.vv:
logg.setLevel(logging.INFO) logging.getLogger().setLevel(logging.DEBUG)
elif args.vv: elif args.v:
logg.setLevel(logging.DEBUG) logging.getLogger().setLevel(logging.INFO)
config_dir = args.c config = Config(args.c, args.env_prefix)
config = Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
config.process() config.process()
logg.debug('config loaded from {}:\n{}'.format(args.c, config)) config.censor('PASSWORD', 'DATABASE')
logg.debug(f'config loaded from {args.c}:\n{config}')
ussd_data_dir = os.path.join(args.user_dir, 'ussd')
db_configs = { def main():
'database': config.get('DATABASE_NAME'), with open(f'{args.import_dir}/ussd_data.csv') as ussd_data_file:
'host': config.get('DATABASE_HOST'), ussd_data = [tuple(row) for row in csv.reader(ussd_data_file)]
'port': config.get('DATABASE_PORT'),
'user': config.get('DATABASE_USER'), db_conn = psycopg2.connect(
'password': config.get('DATABASE_PASSWORD') database=config.get('DATABASE_NAME'),
} host=config.get('DATABASE_HOST'),
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL')) port=config.get('DATABASE_PORT'),
user=config.get('DATABASE_USER'),
password=config.get('DATABASE_PASSWORD')
)
db_cursor = db_conn.cursor()
sql = 'UPDATE account SET status = %s, preferred_language = %s WHERE phone_number = %s'
for element in ussd_data:
status = 2 if int(element[1]) == 1 else 1
preferred_language = element[2]
phone_number = element[0]
db_cursor.execute(sql, (status, preferred_language, phone_number))
logg.debug(f'Updating account:{phone_number} with: preferred language: {preferred_language} status: {status}.')
db_conn.commit()
db_cursor.close()
db_conn.close()
if __name__ == '__main__': if __name__ == '__main__':
for x in os.walk(ussd_data_dir): main()
for y in x[2]:
if y[len(y) - 5:] == '.json':
filepath = os.path.join(x[0], y)
f = open(filepath, 'r')
try:
ussd_data = json.load(f)
logg.debug(f'LOADING USSD DATA: {ussd_data}')
except json.decoder.JSONDecodeError as e:
f.close()
logg.error('load error for {}: {}'.format(y, e))
continue
f.close()
s_set_ussd_data = celery.signature(
'import_task.set_ussd_data',
[db_configs, ussd_data]
)
s_set_ussd_data.apply_async(queue='cic-import-ussd')

View File

@@ -1,27 +1,4 @@
[app] [app]
ALLOWED_IP=0.0.0.0/0 allowed_ip=0.0.0.0/0
LOCALE_FALLBACK=en max_body_length=1024
LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/ password_pepper=
MAX_BODY_LENGTH=1024
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
SERVICE_CODE=*483*46#
[phone_number]
REGION=KE
[ussd]
MENU_FILE=/usr/src/data/ussd_menu.json
user =
pass =
[statemachine]
STATES=/usr/src/cic-ussd/states/
TRANSITIONS=/usr/src/cic-ussd/transitions/
[client]
host =
port =
ssl =
[keystore]
file_path = keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c

View File

@@ -1,10 +1,10 @@
[database] [database]
NAME=sempo name=cic_ussd
USER=postgres user=postgres
PASSWORD= password=
HOST=localhost host=localhost
PORT=5432 port=5432
ENGINE=postgresql engine=postgresql
DRIVER=psycopg2 driver=psycopg2
DEBUG=0 debug=0
POOL_SIZE=1 pool_size=1

Some files were not shown because too many files have changed in this diff Show More