Compare commits
32 Commits
lash/upgra
...
lash/contr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cec96a4f63
|
||
|
|
56f18487f9 | ||
|
|
b52a727f14
|
||
|
|
c1b2aea2f8
|
||
| b9b06eced8 | |||
| 949bb29379 | |||
|
|
0468906601 | ||
|
|
471243488e
|
||
| 3c4acd82ff | |||
| e07f992c5a | |||
| 17e95cb19c | |||
| 3c3a97ce15 | |||
| a492be4927 | |||
|
|
1f555748b0 | ||
| 8aa4d20eea | |||
|
|
90cf24dcee | ||
|
|
75b711dbd5 | ||
| c21c1eb2ef | |||
| eb5e612105 | |||
| e017d11770 | |||
| e327af68e1 | |||
| 92cc6a3f27 | |||
| f42bf7754a | |||
|
|
7342927e91
|
||
| 17333af88f | |||
| 6a68d2ed32 | |||
|
|
ef77f4c99a | ||
|
|
56dbe8a502 | ||
|
|
2dc8ac6a12 | ||
|
|
0ced68e224 | ||
| 2afb20e715 | |||
| 3b0113d0e4 |
@@ -1,14 +1,43 @@
|
||||
include:
|
||||
- local: 'ci_templates/.cic-template.yml'
|
||||
- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
#- local: 'ci_templates/.cic-template.yml' #kaniko build templates
|
||||
# these includes are app specific unit tests
|
||||
- local: 'apps/cic-eth/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-ussd/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-notify/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-meta/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-cache/.gitlab-ci.yml'
|
||||
- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
#- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
#- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- release
|
||||
- deploy
|
||||
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
|
||||
|
||||
variables:
|
||||
DOCKER_BUILDKIT: "1"
|
||||
COMPOSE_DOCKER_CLI_BUILD: "1"
|
||||
CI_DEBUG_TRACE: "true"
|
||||
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
|
||||
# runs on protected branches and pushes to repo
|
||||
build-push:
|
||||
stage: build
|
||||
tags:
|
||||
- integration
|
||||
#script:
|
||||
# - TAG=$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA sh ./scripts/build-push.sh
|
||||
script:
|
||||
- TAG=latest sh ./scripts/build-push.sh
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
|
||||
deploy-dev:
|
||||
stage: deploy
|
||||
trigger: grassrootseconomics/devops
|
||||
when: manual
|
||||
|
||||
25
README.md
25
README.md
@@ -2,25 +2,21 @@
|
||||
|
||||
## Getting started
|
||||
|
||||
## Make some keys
|
||||
This repo uses docker-compose and docker buildkit. Set the following environment variables to get started:
|
||||
|
||||
```
|
||||
docker build -t bloxie . && docker run -v "$(pwd)/keys:/root/keys" --rm -it -t bloxie account new --chain /root/bloxberg.json --keys-path /root/keys
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
export DOCKER_BUILDKIT=1
|
||||
```
|
||||
|
||||
|
||||
### Prepare the repo
|
||||
|
||||
This is stuff we need to put in makefile but for now...
|
||||
|
||||
File mounts and permisssions need to be set
|
||||
start services, database, redis and local ethereum node
|
||||
```
|
||||
chmod -R 755 scripts/initdb apps/cic-meta/scripts/initdb
|
||||
````
|
||||
|
||||
start cluster
|
||||
docker-compose up -d
|
||||
```
|
||||
docker-compose up
|
||||
|
||||
Run app/contract-migration to deploy contracts
|
||||
```
|
||||
RUN_MASK=3 docker-compose up contract-migration
|
||||
```
|
||||
|
||||
stop cluster
|
||||
@@ -28,7 +24,7 @@ stop cluster
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
delete data
|
||||
stop cluster and delete data
|
||||
```
|
||||
docker-compose down -v
|
||||
```
|
||||
@@ -38,5 +34,4 @@ rebuild an images
|
||||
docker-compose up --build <service_name>
|
||||
```
|
||||
|
||||
Deployment variables are writtend to service-configs/.env after everthing is up.
|
||||
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
# The solc image messes up the alpine environment, so we have to go all over again
|
||||
FROM python:3.8.6-slim-buster
|
||||
|
||||
LABEL authors="Louis Holbrook <dev@holbrook.no> 0826EDA1702D1E87C6E2875121D2E7BB88C2A746"
|
||||
LABEL spdx-license-identifier="GPL-3.0-or-later"
|
||||
LABEL description="Base layer for buiding development images for the cic component suite"
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y git gcc g++ libpq-dev && \
|
||||
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client
|
||||
|
||||
|
||||
RUN echo installing nodejs tooling
|
||||
|
||||
COPY ./dev/nvm.sh /root/
|
||||
|
||||
# Install nvm with node and npm
|
||||
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
||||
ENV NVM_DIR /root/.nvm
|
||||
ENV NODE_VERSION 15.3.0
|
||||
ENV BANCOR_NODE_VERSION 10.16.0
|
||||
|
||||
RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash \
|
||||
&& . $NVM_DIR/nvm.sh \
|
||||
&& nvm install $NODE_VERSION \
|
||||
&& nvm alias default $NODE_VERSION \
|
||||
&& nvm use $NODE_VERSION \
|
||||
# So many ridiculously stupid issues with node in docker that take oceans of absolutely wasted time to resolve
|
||||
# owner of these files is "1001" by default - wtf
|
||||
&& chown -R root:root "$NVM_DIR/versions/node/v$NODE_VERSION"
|
||||
|
||||
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
||||
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
## this is an example base image if we wanted one for all the other apps. Its just OS level things
|
||||
@@ -1,52 +1,17 @@
|
||||
.cic_cache_variables:
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-cache:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_cache_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-cache/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-cache:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_cache_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-cache"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-cache:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_cache_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-cache/**/*
|
||||
when: always
|
||||
|
||||
|
||||
build-test-cic-cache:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-cache
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
# CIC-CACHE
|
||||
|
||||
@@ -5,7 +5,7 @@ version = (
|
||||
0,
|
||||
2,
|
||||
1,
|
||||
'alpha.1',
|
||||
'alpha.2',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
||||
@@ -1,38 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||
|
||||
COPY requirements.txt .
|
||||
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||
#RUN pip install $pip_extra_index_url_flag .
|
||||
#RUN pip install .[server]
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN python setup.py install
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-cache/
|
||||
|
||||
# for db migrations
|
||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
|
||||
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||
COPY /docker/db.sh ./db.sh
|
||||
RUN chmod 755 ./*.sh
|
||||
# Tracker
|
||||
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||
# Server
|
||||
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||
ENTRYPOINT []
|
||||
10
apps/cic-cache/docker/run_tests.sh
Normal file
10
apps/cic-cache/docker/run_tests.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||
|
||||
@@ -8,8 +8,8 @@ semver==2.13.0
|
||||
psycopg2==2.8.6
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
chainsyncer[sql]>=0.0.6a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
erc20-faucet>=0.3.2a1, <0.4.0
|
||||
chainlib-eth>=0.0.9a3,<0.1.0
|
||||
chainlib>=0.0.9a2,<0.1.0
|
||||
chainlib-eth>=0.0.9a7,<0.1.0
|
||||
chainlib>=0.0.9a3,<0.1.0
|
||||
eth-address-index>=0.2.3a1,<0.3.0
|
||||
|
||||
@@ -1,52 +1,16 @@
|
||||
.cic_eth_variables:
|
||||
variables:
|
||||
APP_NAME: cic-eth
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-eth:
|
||||
extends:
|
||||
- .cic_eth_variables
|
||||
- .py_build_target_dev
|
||||
rules:
|
||||
build-test-cic-eth:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-eth
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-eth
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-eth:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_eth_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r admin_requirements.txt
|
||||
-r services_requirements.txt
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-eth"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-eth:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_eth_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
SQLAlchemy==1.3.20
|
||||
cic-eth-registry>=0.6.1a2,<0.7.0
|
||||
hexathon~=0.0.1a8
|
||||
chainqueue>=0.0.3a2,<0.1.0
|
||||
chainqueue>=0.0.4a6,<0.1.0
|
||||
eth-erc20>=0.1.2a2,<0.2.0
|
||||
|
||||
@@ -4,7 +4,6 @@ import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainlib.chain import ChainSpec
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
@@ -20,18 +19,17 @@ from cic_eth.task import (
|
||||
CriticalSQLAlchemyTask,
|
||||
)
|
||||
from cic_eth.error import LockedError
|
||||
from cic_eth.encode import (
|
||||
tx_normalize,
|
||||
ZERO_ADDRESS_NORMAL,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def normalize_address(a):
|
||||
if a == None:
|
||||
return None
|
||||
return add_0x(hex_uniform(strip_0x(a)))
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL, tx_hash=None):
|
||||
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL, tx_hash=None):
|
||||
"""Task wrapper to set arbitrary locks
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@@ -43,7 +41,7 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
@@ -53,7 +51,7 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL):
|
||||
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL):
|
||||
"""Task wrapper to reset arbitrary locks
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@@ -65,7 +63,7 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
@@ -75,7 +73,7 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
||||
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
|
||||
"""Task wrapper to set send lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@@ -85,7 +83,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
||||
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
||||
@@ -93,7 +91,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
|
||||
"""Task wrapper to reset send lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@@ -103,7 +101,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
||||
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
||||
@@ -111,7 +109,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
||||
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
|
||||
"""Task wrapper to set queue direct lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@@ -121,7 +119,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
||||
@@ -129,7 +127,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
|
||||
"""Task wrapper to reset queue direct lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@@ -139,7 +137,7 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
||||
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
||||
@@ -148,12 +146,13 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
|
||||
address = normalize_address(address)
|
||||
if address != None:
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
session = SessionBase.create_session()
|
||||
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS, session=session)
|
||||
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS_NORMAL, session=session)
|
||||
if address != None:
|
||||
r |= Lock.check(chain_str, lock_flags, address=address, session=session)
|
||||
if r > 0:
|
||||
|
||||
@@ -33,6 +33,7 @@ from cic_eth.admin.ctrl import (
|
||||
from cic_eth.queue.tx import queue_create
|
||||
from cic_eth.eth.gas import create_check_gas_task
|
||||
from cic_eth.task import BaseTask
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
@@ -73,7 +74,7 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||
|
||||
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
|
||||
|
||||
query_address = add_0x(hex_uniform(strip_0x(address))) # aaaaargh
|
||||
query_address = tx_normalize.wallet_address(address)
|
||||
q = session.query(Otx)
|
||||
q = q.join(TxCache)
|
||||
q = q.filter(TxCache.sender==query_address)
|
||||
|
||||
@@ -32,7 +32,6 @@ from chainqueue.db.enum import (
|
||||
status_str,
|
||||
)
|
||||
from chainqueue.error import TxStateChangeError
|
||||
from chainqueue.sql.query import get_tx
|
||||
from eth_erc20 import ERC20
|
||||
|
||||
# local imports
|
||||
@@ -40,6 +39,7 @@ from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.db.models.role import AccountRole
|
||||
from cic_eth.db.models.nonce import Nonce
|
||||
from cic_eth.error import InitializationError
|
||||
from cic_eth.queue.query import get_tx_local
|
||||
|
||||
app = celery.current_app
|
||||
|
||||
@@ -284,7 +284,7 @@ class AdminApi:
|
||||
tx_hash_hex = None
|
||||
session = SessionBase.create_session()
|
||||
for k in txs.keys():
|
||||
tx_dict = get_tx(chain_spec, k, session=session)
|
||||
tx_dict = get_tx_local(chain_spec, k, session=session)
|
||||
if tx_dict['nonce'] == nonce:
|
||||
tx_hash_hex = k
|
||||
session.close()
|
||||
|
||||
@@ -8,8 +8,8 @@ Create Date: 2021-04-02 18:41:20.864265
|
||||
import datetime
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.encode import ZERO_ADDRESS_NORMAL
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -30,7 +30,7 @@ def upgrade():
|
||||
sa.Column("otx_id", sa.Integer, sa.ForeignKey('otx.id'), nullable=True),
|
||||
)
|
||||
op.create_index('idx_chain_address', 'lock', ['blockchain', 'address'], unique=True)
|
||||
op.execute("INSERT INTO lock (address, date_created, blockchain, flags) VALUES('{}', '{}', '::', {})".format(ZERO_ADDRESS, datetime.datetime.utcnow(), LockEnum.INIT | LockEnum.SEND | LockEnum.QUEUE))
|
||||
op.execute("INSERT INTO lock (address, date_created, blockchain, flags) VALUES('{}', '{}', '::', {})".format(ZERO_ADDRESS_NORMAL, datetime.datetime.utcnow(), LockEnum.INIT | LockEnum.SEND | LockEnum.QUEUE))
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -4,12 +4,12 @@ import logging
|
||||
|
||||
# third-party imports
|
||||
from sqlalchemy import Column, String, Integer, DateTime, ForeignKey
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.db.models.otx import Otx
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import ZERO_ADDRESS_NORMAL
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@@ -37,7 +37,7 @@ class Lock(SessionBase):
|
||||
|
||||
|
||||
@staticmethod
|
||||
def set(chain_str, flags, address=ZERO_ADDRESS, session=None, tx_hash=None):
|
||||
def set(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None, tx_hash=None):
|
||||
"""Sets flags associated with the given address and chain.
|
||||
|
||||
If a flags entry does not exist it is created.
|
||||
@@ -90,7 +90,7 @@ class Lock(SessionBase):
|
||||
|
||||
|
||||
@staticmethod
|
||||
def reset(chain_str, flags, address=ZERO_ADDRESS, session=None):
|
||||
def reset(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None):
|
||||
"""Resets flags associated with the given address and chain.
|
||||
|
||||
If the resulting flags entry value is 0, the entry will be deleted.
|
||||
@@ -134,7 +134,7 @@ class Lock(SessionBase):
|
||||
|
||||
|
||||
@staticmethod
|
||||
def check(chain_str, flags, address=ZERO_ADDRESS, session=None):
|
||||
def check(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None):
|
||||
"""Checks whether all given flags are set for given address and chain.
|
||||
|
||||
Does not validate the address against any other tables or components.
|
||||
|
||||
16
apps/cic-eth/cic_eth/encode.py
Normal file
16
apps/cic-eth/cic_eth/encode.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# external imports
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainqueue.encode import TxHexNormalizer
|
||||
from chainlib.eth.tx import unpack
|
||||
|
||||
tx_normalize = TxHexNormalizer()
|
||||
|
||||
ZERO_ADDRESS_NORMAL = tx_normalize.wallet_address(ZERO_ADDRESS)
|
||||
|
||||
|
||||
def unpack_normal(signed_tx_bytes, chain_spec):
|
||||
tx = unpack(signed_tx_bytes, chain_spec)
|
||||
tx['hash'] = tx_normalize.tx_hash(tx['hash'])
|
||||
tx['from'] = tx_normalize.wallet_address(tx['from'])
|
||||
tx['to'] = tx_normalize.wallet_address(tx['to'])
|
||||
return tx
|
||||
@@ -14,10 +14,7 @@ from chainlib.eth.sign import (
|
||||
sign_message,
|
||||
)
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.tx import (
|
||||
TxFormat,
|
||||
unpack,
|
||||
)
|
||||
from chainlib.eth.tx import TxFormat
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.error import JSONRPCException
|
||||
from eth_accounts_index.registry import AccountRegistry
|
||||
@@ -49,6 +46,10 @@ from cic_eth.eth.nonce import (
|
||||
from cic_eth.queue.tx import (
|
||||
register_tx,
|
||||
)
|
||||
from cic_eth.encode import (
|
||||
unpack_normal,
|
||||
ZERO_ADDRESS_NORMAL,
|
||||
)
|
||||
|
||||
logg = logging.getLogger()
|
||||
celery_app = celery.current_app
|
||||
@@ -295,17 +296,17 @@ def cache_gift_data(
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
|
||||
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||
tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
|
||||
tx_data = Faucet.parse_give_to_request(tx['data'])
|
||||
|
||||
session = self.create_session()
|
||||
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'hash': tx['hash'],
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'source_token': ZERO_ADDRESS_NORMAL,
|
||||
'destination_token': ZERO_ADDRESS_NORMAL,
|
||||
'from_value': 0,
|
||||
'to_value': 0,
|
||||
}
|
||||
@@ -334,17 +335,17 @@ def cache_account_data(
|
||||
:rtype: tuple
|
||||
"""
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
|
||||
tx_data = AccountsIndex.parse_add_request(tx['data'])
|
||||
|
||||
session = SessionBase.create_session()
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'hash': tx['hash'],
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'source_token': ZERO_ADDRESS_NORMAL,
|
||||
'destination_token': ZERO_ADDRESS_NORMAL,
|
||||
'from_value': 0,
|
||||
'to_value': 0,
|
||||
}
|
||||
|
||||
@@ -3,8 +3,11 @@ import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from hexathon import strip_0x
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
)
|
||||
#from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import is_checksum_address
|
||||
from chainlib.connection import RPCConnection
|
||||
@@ -21,7 +24,6 @@ from chainlib.eth.error import (
|
||||
from chainlib.eth.tx import (
|
||||
TxFactory,
|
||||
TxFormat,
|
||||
unpack,
|
||||
)
|
||||
from chainlib.eth.contract import (
|
||||
abi_decode_single,
|
||||
@@ -45,6 +47,7 @@ from cic_eth.eth.nonce import CustodialTaskNonceOracle
|
||||
from cic_eth.queue.tx import (
|
||||
queue_create,
|
||||
register_tx,
|
||||
unpack,
|
||||
)
|
||||
from cic_eth.queue.query import get_tx
|
||||
from cic_eth.task import (
|
||||
@@ -53,6 +56,11 @@ from cic_eth.task import (
|
||||
CriticalSQLAlchemyAndSignerTask,
|
||||
CriticalWeb3AndSignerTask,
|
||||
)
|
||||
from cic_eth.encode import (
|
||||
tx_normalize,
|
||||
ZERO_ADDRESS_NORMAL,
|
||||
unpack_normal,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
@@ -66,6 +74,7 @@ class MaxGasOracle:
|
||||
return MAXIMUM_FEE_UNITS
|
||||
|
||||
|
||||
#def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
|
||||
def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
|
||||
"""Creates a celery task signature for a check_gas task that adds the task to the outgoing queue to be processed by the dispatcher.
|
||||
|
||||
@@ -130,16 +139,16 @@ def cache_gas_data(
|
||||
"""
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||
tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'hash': tx['hash'],
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'source_token': ZERO_ADDRESS_NORMAL,
|
||||
'destination_token': ZERO_ADDRESS_NORMAL,
|
||||
'from_value': tx['value'],
|
||||
'to_value': tx['value'],
|
||||
}
|
||||
@@ -150,7 +159,7 @@ def cache_gas_data(
|
||||
|
||||
|
||||
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task)
|
||||
def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=MAXIMUM_FEE_UNITS):
|
||||
def check_gas(self, tx_hashes_hex, chain_spec_dict, txs_hex=[], address=None, gas_required=MAXIMUM_FEE_UNITS):
|
||||
"""Check the gas level of the sender address of a transaction.
|
||||
|
||||
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
|
||||
@@ -170,6 +179,21 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
|
||||
:return: Signed raw transaction data list
|
||||
:rtype: param txs, unchanged
|
||||
"""
|
||||
if address != None:
|
||||
if not is_checksum_address(address):
|
||||
raise ValueError('invalid address {}'.format(address))
|
||||
address = tx_normalize.wallet_address(address)
|
||||
address = add_0x(address)
|
||||
|
||||
tx_hashes = []
|
||||
txs = []
|
||||
for tx_hash in tx_hashes_hex:
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
tx_hashes.append(tx_hash)
|
||||
for tx in txs_hex:
|
||||
tx = tx_normalize.tx_wire(tx)
|
||||
txs.append(tx)
|
||||
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
logg.debug('txs {} tx_hashes {}'.format(txs, tx_hashes))
|
||||
|
||||
@@ -187,9 +211,6 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
|
||||
raise ValueError('txs passed to check gas must all have same sender; had {} got {}'.format(address, tx['from']))
|
||||
addresspass.append(address)
|
||||
|
||||
if not is_checksum_address(address):
|
||||
raise ValueError('invalid address {}'.format(address))
|
||||
|
||||
queue = self.request.delivery_info.get('routing_key')
|
||||
|
||||
conn = RPCConnection.connect(chain_spec)
|
||||
@@ -304,6 +325,7 @@ def refill_gas(self, recipient_address, chain_spec_dict):
|
||||
# Determine value of gas tokens to send
|
||||
# if an uncompleted gas refill for the same recipient already exists, we still need to spend the nonce
|
||||
# however, we will perform a 0-value transaction instead
|
||||
recipient_address = tx_normalize.wallet_address(recipient_address)
|
||||
zero_amount = False
|
||||
session = SessionBase.create_session()
|
||||
status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
|
||||
@@ -378,6 +400,7 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_spec_dict, gas=None, defa
|
||||
:returns: Transaction hash
|
||||
:rtype: str, 0x-hex
|
||||
"""
|
||||
txold_hash_hex = tx_normalize.tx_hash(txold_hash_hex)
|
||||
session = SessionBase.create_session()
|
||||
|
||||
otx = Otx.load(txold_hash_hex, session)
|
||||
|
||||
@@ -15,6 +15,7 @@ from chainqueue.db.enum import (
|
||||
# local imports
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@@ -22,6 +23,9 @@ logg = logging.getLogger()
|
||||
|
||||
|
||||
def __balance_outgoing_compatible(token_address, holder_address):
|
||||
token_address = tx_normalize.executable_address(token_address)
|
||||
holder_address = tx_normalize.wallet_address(holder_address)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
q = session.query(TxCache.from_value)
|
||||
q = q.join(Otx)
|
||||
@@ -58,6 +62,9 @@ def balance_outgoing(tokens, holder_address, chain_spec_dict):
|
||||
|
||||
|
||||
def __balance_incoming_compatible(token_address, receiver_address):
|
||||
token_address = tx_normalize.executable_address(token_address)
|
||||
receiver_address = tx_normalize.wallet_address(receiver_address)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
q = session.query(TxCache.to_value)
|
||||
q = q.join(Otx)
|
||||
@@ -110,7 +117,7 @@ def assemble_balances(balances_collection):
|
||||
logg.debug('received collection {}'.format(balances_collection))
|
||||
for c in balances_collection:
|
||||
for b in c:
|
||||
address = b['address']
|
||||
address = tx_normalize.executable_address(b['address'])
|
||||
if tokens.get(address) == None:
|
||||
tokens[address] = {
|
||||
'address': address,
|
||||
|
||||
@@ -6,6 +6,7 @@ import celery
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@@ -21,6 +22,9 @@ def get_lock(address=None):
|
||||
:returns: List of locks
|
||||
:rtype: list of dicts
|
||||
"""
|
||||
if address != None:
|
||||
address = tx_normalize.wallet_address(address)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
q = session.query(
|
||||
Lock.date_created,
|
||||
|
||||
@@ -4,8 +4,8 @@ import datetime
|
||||
# external imports
|
||||
import celery
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.tx import unpack
|
||||
import chainqueue.sql.query
|
||||
from chainlib.eth.tx import unpack
|
||||
from chainqueue.db.enum import (
|
||||
StatusEnum,
|
||||
is_alive,
|
||||
@@ -20,6 +20,10 @@ from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import (
|
||||
tx_normalize,
|
||||
unpack_normal,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@@ -27,49 +31,76 @@ celery_app = celery.current_app
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_tx_cache(chain_spec_dict, tx_hash):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
return get_tx_cache_local(chain_spec, tx_hash)
|
||||
|
||||
|
||||
def get_tx_cache_local(chain_spec, tx_hash, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
session = SessionBase.bind_session(session)
|
||||
r = chainqueue.sql.query.get_tx_cache(chain_spec, tx_hash, session=session)
|
||||
session.close()
|
||||
SessionBase.release_session(session)
|
||||
return r
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_tx(chain_spec_dict, tx_hash):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
return get_tx_local(chain_spec, tx_hash)
|
||||
|
||||
|
||||
def get_tx_local(chain_spec, tx_hash, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
session = SessionBase.bind_session(session)
|
||||
r = chainqueue.sql.query.get_tx(chain_spec, tx_hash, session=session)
|
||||
session.close()
|
||||
SessionBase.release_session(session)
|
||||
return r
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
return get_account_tx_local(chain_spec, address, as_sender=as_sender, as_recipient=as_recipient, counterpart=counterpart)
|
||||
|
||||
|
||||
def get_account_tx_local(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=None):
|
||||
address = tx_normalize.wallet_address(address)
|
||||
session = SessionBase.bind_session(session)
|
||||
r = chainqueue.sql.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session)
|
||||
session.close()
|
||||
SessionBase.release_session(session)
|
||||
return r
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
|
||||
def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
return get_upcoming_tx_nolock_local(chain_spec, status=status, not_status=not_status, recipient=recipient, before=before, limit=limit)
|
||||
|
||||
|
||||
def get_upcoming_tx_nolock_local(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
|
||||
recipient = tx_normalize.wallet_address(recipient)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack)
|
||||
r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack_normal)
|
||||
session.close()
|
||||
return r
|
||||
|
||||
|
||||
def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None):
|
||||
return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack)
|
||||
return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack_normal)
|
||||
|
||||
|
||||
def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None):
|
||||
return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack)
|
||||
sender = tx_normalize.wallet_address(sender)
|
||||
return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack_normal)
|
||||
|
||||
|
||||
def get_nonce_tx(chain_spec, nonce, sender):
|
||||
return get_nonce_tx_cache(chain_spec, nonce, sender, decoder=unpack)
|
||||
sender = tx_normalize.wallet_address(sender)
|
||||
return get_nonce_tx_local(chain_spec, nonce, sender)
|
||||
|
||||
|
||||
def get_nonce_tx_local(chain_spec, nonce, sender, session=None):
|
||||
sender = tx_normalize.wallet_address(sender)
|
||||
return chainqueue.sql.query.get_nonce_tx_cache(chain_spec, nonce, sender, decoder=unpack_normal, session=session)
|
||||
|
||||
|
||||
def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
|
||||
@@ -91,6 +122,8 @@ def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, re
|
||||
:returns: Transactions
|
||||
:rtype: dict, with transaction hash as key, signed raw transaction as value
|
||||
"""
|
||||
if recipient != None:
|
||||
recipient = tx_normalize.wallet_address(recipient)
|
||||
session = SessionBase.bind_session(session)
|
||||
q_outer = session.query(
|
||||
TxCache.sender,
|
||||
|
||||
@@ -6,12 +6,14 @@ import chainqueue.sql.state
|
||||
import celery
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_sent(chain_spec_dict, tx_hash, fail=False):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_sent(chain_spec, tx_hash, fail, session=session)
|
||||
@@ -21,6 +23,7 @@ def set_sent(chain_spec_dict, tx_hash, fail=False):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session)
|
||||
@@ -30,6 +33,7 @@ def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_cancel(chain_spec_dict, tx_hash, manual=False):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_cancel(chain_spec, tx_hash, manual, session=session)
|
||||
@@ -39,6 +43,7 @@ def set_cancel(chain_spec_dict, tx_hash, manual=False):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_rejected(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_rejected(chain_spec, tx_hash, session=session)
|
||||
@@ -48,6 +53,7 @@ def set_rejected(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_fubar(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_fubar(chain_spec, tx_hash, session=session)
|
||||
@@ -57,6 +63,7 @@ def set_fubar(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_manual(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_manual(chain_spec, tx_hash, session=session)
|
||||
@@ -66,6 +73,7 @@ def set_manual(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_ready(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_ready(chain_spec, tx_hash, session=session)
|
||||
@@ -75,6 +83,7 @@ def set_ready(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_reserved(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_reserved(chain_spec, tx_hash, session=session)
|
||||
@@ -84,6 +93,7 @@ def set_reserved(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_waitforgas(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_waitforgas(chain_spec, tx_hash, session=session)
|
||||
@@ -93,6 +103,7 @@ def set_waitforgas(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_state_log(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.get_state_log(chain_spec, tx_hash, session=session)
|
||||
@@ -102,6 +113,7 @@ def get_state_log(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def obsolete(chain_spec_dict, tx_hash, final):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session)
|
||||
|
||||
@@ -13,6 +13,7 @@ from chainqueue.error import NotLocalTxError
|
||||
# local imports
|
||||
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@@ -20,6 +21,7 @@ logg = logging.getLogger()
|
||||
|
||||
|
||||
def tx_times(tx_hash, chain_spec, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
|
||||
@@ -32,12 +32,16 @@ from cic_eth.db import SessionBase
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.error import LockedError
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def queue_create(chain_spec, nonce, holder_address, tx_hash, signed_tx, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
signed_tx = tx_normalize.tx_hash(signed_tx)
|
||||
holder_address = tx_normalize.wallet_address(holder_address)
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
lock = Lock.check_aggregate(str(chain_spec), LockEnum.QUEUE, holder_address, session=session)
|
||||
@@ -67,6 +71,8 @@ def register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=No
|
||||
:returns: Tuple; Transaction hash, signed raw transaction data
|
||||
:rtype: tuple
|
||||
"""
|
||||
tx_hash_hex = tx_normalize.tx_hash(tx_hash_hex)
|
||||
tx_signed_raw_hex = tx_normalize.tx_hash(tx_signed_raw_hex)
|
||||
logg.debug('adding queue tx {}:{} -> {}'.format(chain_spec, tx_hash_hex, tx_signed_raw_hex))
|
||||
tx_signed_raw = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack(tx_signed_raw, chain_spec)
|
||||
|
||||
@@ -10,15 +10,14 @@ from chainlib.eth.tx import unpack
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from chainqueue.sql.query import get_paused_tx_cache as get_paused_tx
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.eth.gas import create_check_gas_task
|
||||
from cic_eth.queue.query import get_paused_tx
|
||||
from .base import SyncFilter
|
||||
|
||||
#logg = logging.getLogger().getChild(__name__)
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,6 @@ from chainlib.eth.nonce import RPCNonceOracle
|
||||
from chainlib.eth.gas import RPCGasOracle
|
||||
from cic_eth_registry import CICRegistry
|
||||
from cic_eth_registry.error import UnknownContractError
|
||||
import liveness.linux
|
||||
|
||||
# local imports
|
||||
from cic_eth.error import SeppukuError
|
||||
@@ -48,6 +47,7 @@ class BaseTask(celery.Task):
|
||||
|
||||
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
||||
if isinstance(exc, SeppukuError):
|
||||
import liveness.linux
|
||||
liveness.linux.reset(rundir=self.run_dir)
|
||||
logg.critical(einfo)
|
||||
msg = 'received critical exception {}, calling shutdown'.format(str(exc))
|
||||
|
||||
@@ -10,7 +10,7 @@ version = (
|
||||
0,
|
||||
12,
|
||||
4,
|
||||
'alpha.4',
|
||||
'alpha.8',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
@node cic-eth configuration
|
||||
@section Configuration
|
||||
|
||||
(refer to @code{cic-base} for a general overview of the config pipeline)
|
||||
|
||||
Configuration parameters are grouped by configuration filename.
|
||||
|
||||
|
||||
@@ -40,7 +38,26 @@ Boolean value. If set, the amount of available context for a task in the result
|
||||
|
||||
@subsection database
|
||||
|
||||
See ref cic-base when ready
|
||||
@table @var
|
||||
@item host
|
||||
Database host
|
||||
@item port
|
||||
Database port
|
||||
@item name
|
||||
Database name
|
||||
@item user
|
||||
Database user
|
||||
@item password
|
||||
Database password
|
||||
@item engine
|
||||
The engine part of the dsn connection string (@code{postgresql} in @code{postgresql+psycopg2})
|
||||
@item driver
|
||||
The driver part of the dsn connection string (@code{psycopg2} in @code{postgresql+psycopg2})
|
||||
@item pool_size
|
||||
Connection pool size for database drivers that provide connection pooling
|
||||
@item debug
|
||||
Output actual sql queries to logs. Potentially very verbose
|
||||
@end table
|
||||
|
||||
|
||||
@subsection eth
|
||||
|
||||
@@ -13,7 +13,16 @@ ARG EXTRA_PIP_ARGS=""
|
||||
# pip install --index-url https://pypi.org/simple \
|
||||
# --force-reinstall \
|
||||
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
# -r requirements.txt
|
||||
# -r requirements.txt
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
||||
|
||||
|
||||
COPY *requirements.txt ./
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
@@ -22,19 +31,21 @@ RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
$EXTRA_PIP_ARGS \
|
||||
-r requirements.txt \
|
||||
-r services_requirements.txt \
|
||||
-r admin_requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
ENV PYTHONPATH .
|
||||
|
||||
-r admin_requirements.txt
|
||||
|
||||
# always install the latest signer
|
||||
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
cic-eth-aux-erc20-demurrage-token~=0.0.2a6
|
||||
crypto-dev-signer
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
ENV PYTHONPATH .
|
||||
|
||||
|
||||
COPY docker/entrypoints/* ./
|
||||
RUN chmod 755 *.sh
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
WORKDIR /usr/src/cic-eth
|
||||
|
||||
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
||||
# do load these all into setup.py later
|
||||
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
||||
#COPY cic-eth/requirements.txt .
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
# pip install --index-url https://pypi.org/simple \
|
||||
# --force-reinstall \
|
||||
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
# -r requirements.txt
|
||||
COPY *requirements.txt .
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
-r requirements.txt \
|
||||
-r services_requirements.txt \
|
||||
-r admin_requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY docker/entrypoints/* ./
|
||||
RUN chmod 755 *.sh
|
||||
|
||||
# # ini files in config directory defines the configurable parameters for the application
|
||||
# # they can all be overridden by environment variables
|
||||
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-eth/
|
||||
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
|
||||
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||
#COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||
ENTRYPOINT []
|
||||
|
||||
# ------------------ PRODUCTION CONTAINER ----------------------
|
||||
#FROM python:3.8.6-slim-buster as prod
|
||||
#
|
||||
#RUN apt-get update && \
|
||||
# apt install -y gnupg libpq-dev procps
|
||||
#
|
||||
#WORKDIR /root
|
||||
#
|
||||
#COPY --from=dev /usr/local/bin/ /usr/local/bin/
|
||||
#COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
|
||||
# /usr/local/lib/python3.8/site-packages/
|
||||
#
|
||||
#COPY docker/entrypoints/* ./
|
||||
#RUN chmod 755 *.sh
|
||||
#
|
||||
## # ini files in config directory defines the configurable parameters for the application
|
||||
## # they can all be overridden by environment variables
|
||||
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
#COPY config/ /usr/local/etc/cic-eth/
|
||||
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
#COPY scripts/ scripts/
|
||||
#
|
||||
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||
##COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||
#
|
||||
#ENTRYPOINT []
|
||||
#
|
||||
@@ -5,27 +5,27 @@ set -e
|
||||
|
||||
# set CONFINI_ENV_PREFIX to override the env prefix to override env vars
|
||||
|
||||
echo "!!! starting signer"
|
||||
python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer -vv 2> /tmp/signer.log &
|
||||
#echo "!!! starting signer"
|
||||
#python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer -vv 2> /tmp/signer.log &
|
||||
|
||||
echo "!!! starting taskerd"
|
||||
/usr/local/bin/cic-eth-taskerd $@
|
||||
|
||||
# thanks! https://docs.docker.com/config/containers/multi-service_container/
|
||||
sleep 1;
|
||||
echo "!!! entering monitor loop"
|
||||
while true; do
|
||||
ps aux | grep crypto-dev-daemon | grep -q -v grep
|
||||
PROCESS_1_STATUS=$?
|
||||
ps aux | grep cic-eth-tasker |grep -q -v grep
|
||||
PROCESS_2_STATUS=$?
|
||||
# If the greps above find anything, they exit with 0 status
|
||||
# If they are not both 0, then something is wrong
|
||||
if [ $PROCESS_1_STATUS -ne 0 -o $PROCESS_2_STATUS -ne 0 ]; then
|
||||
echo "One of the processes has already exited."
|
||||
exit 1
|
||||
fi
|
||||
sleep 15;
|
||||
done
|
||||
|
||||
#sleep 1;
|
||||
#echo "!!! entering monitor loop"
|
||||
#while true; do
|
||||
# ps aux | grep crypto-dev-daemon | grep -q -v grep
|
||||
# PROCESS_1_STATUS=$?
|
||||
# ps aux | grep cic-eth-tasker |grep -q -v grep
|
||||
# PROCESS_2_STATUS=$?
|
||||
# # If the greps above find anything, they exit with 0 status
|
||||
# # If they are not both 0, then something is wrong
|
||||
# if [ $PROCESS_1_STATUS -ne 0 -o $PROCESS_2_STATUS -ne 0 ]; then
|
||||
# echo "One of the processes has already exited."
|
||||
# exit 1
|
||||
# fi
|
||||
# sleep 15;
|
||||
#done
|
||||
#
|
||||
set +e
|
||||
|
||||
11
apps/cic-eth/docker/run_tests.sh
Normal file
11
apps/cic-eth/docker/run_tests.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r admin_requirements.txt \
|
||||
-r services_requirements.txt \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
celery==4.4.7
|
||||
chainlib-eth>=0.0.9a3,<0.1.0
|
||||
chainlib-eth>=0.0.9a11,<0.1.0
|
||||
semver==2.13.0
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
chainqueue>=0.0.3a2,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a1,<0.1.0
|
||||
chainqueue>=0.0.5a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
alembic==1.4.2
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
redis==3.5.3
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# external imports
|
||||
from chainlib.connection import RPCConnection
|
||||
from chainlib.eth.nonce import OverrideNonceOracle
|
||||
from chainqueue.sql.tx import create as queue_create
|
||||
from chainlib.eth.tx import (
|
||||
TxFormat,
|
||||
unpack,
|
||||
@@ -26,6 +25,8 @@ from chainqueue.db.enum import StatusBits
|
||||
# local imports
|
||||
from cic_eth.runnable.daemons.filters.gas import GasFilter
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.encode import tx_normalize
|
||||
from cic_eth.queue.tx import queue_create
|
||||
|
||||
|
||||
def test_filter_gas(
|
||||
|
||||
@@ -22,10 +22,11 @@ from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
)
|
||||
from chainqueue.sql.query import get_account_tx
|
||||
|
||||
# local imports
|
||||
from cic_eth.runnable.daemons.filters.register import RegistrationFilter
|
||||
from cic_eth.encode import tx_normalize
|
||||
from cic_eth.queue.query import get_account_tx_local
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@@ -79,7 +80,7 @@ def test_register_filter(
|
||||
t.get_leaf()
|
||||
assert t.successful()
|
||||
|
||||
gift_txs = get_account_tx(default_chain_spec.asdict(), agent_roles['ALICE'], as_sender=True, session=init_database)
|
||||
gift_txs = get_account_tx_local(default_chain_spec, agent_roles['ALICE'], as_sender=True, session=init_database)
|
||||
ks = list(gift_txs.keys())
|
||||
assert len(ks) == 1
|
||||
|
||||
|
||||
10
apps/cic-eth/tests/run_tests.sh
Normal file
10
apps/cic-eth/tests/run_tests.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r admin_requirements.txt
|
||||
-r services_requirements.txt
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||
@@ -34,10 +34,6 @@ from chainqueue.sql.state import (
|
||||
set_ready,
|
||||
set_reserved,
|
||||
)
|
||||
from chainqueue.sql.query import (
|
||||
get_tx,
|
||||
get_nonce_tx_cache,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from cic_eth.api.admin import AdminApi
|
||||
@@ -46,6 +42,11 @@ from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.error import InitializationError
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.queue.tx import queue_create
|
||||
from cic_eth.queue.query import (
|
||||
get_tx,
|
||||
get_nonce_tx_local,
|
||||
)
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@@ -286,13 +287,15 @@ def test_fix_nonce(
|
||||
assert t.successful()
|
||||
|
||||
init_database.commit()
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
|
||||
|
||||
logg.debug('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
|
||||
txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
|
||||
ks = txs.keys()
|
||||
assert len(ks) == 2
|
||||
|
||||
for k in ks:
|
||||
hsh = add_0x(k)
|
||||
#hsh = add_0x(k)
|
||||
hsh = tx_normalize.tx_hash(k)
|
||||
otx = Otx.load(hsh, session=init_database)
|
||||
init_database.refresh(otx)
|
||||
logg.debug('checking nonce {} tx {} status {}'.format(3, otx.tx_hash, otx.status))
|
||||
|
||||
@@ -30,7 +30,6 @@ from chainqueue.sql.state import (
|
||||
)
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.sql.query import get_nonce_tx_cache
|
||||
from eth_erc20 import ERC20
|
||||
from cic_eth_registry import CICRegistry
|
||||
|
||||
@@ -38,6 +37,7 @@ from cic_eth_registry import CICRegistry
|
||||
from cic_eth.api.admin import AdminApi
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.eth.erc20 import cache_transfer_data
|
||||
from cic_eth.queue.query import get_nonce_tx_local
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@@ -312,7 +312,7 @@ def test_resend_inplace(
|
||||
otx = Otx.load(tx_hash_hex, session=init_database)
|
||||
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
txs = get_nonce_tx_local(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
assert len(txs) == 2
|
||||
|
||||
|
||||
@@ -363,10 +363,10 @@ def test_resend_clone(
|
||||
assert otx.status & StatusBits.IN_NETWORK == StatusBits.IN_NETWORK
|
||||
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
txs = get_nonce_tx_local(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
assert len(txs) == 1
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce + 1, agent_roles['ALICE'], session=init_database)
|
||||
txs = get_nonce_tx_local(default_chain_spec, otx.nonce + 1, agent_roles['ALICE'], session=init_database)
|
||||
assert len(txs) == 1
|
||||
|
||||
otx = Otx.load(txs[0], session=init_database)
|
||||
|
||||
@@ -21,7 +21,6 @@ from chainlib.eth.constant import (
|
||||
MINIMUM_FEE_UNITS,
|
||||
MINIMUM_FEE_PRICE,
|
||||
)
|
||||
from chainqueue.sql.tx import create as queue_create
|
||||
from chainqueue.sql.query import get_tx
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.sql.state import (
|
||||
@@ -35,6 +34,7 @@ from hexathon import strip_0x
|
||||
# local imports
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.error import OutOfGasError
|
||||
from cic_eth.queue.tx import queue_create
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@@ -75,7 +75,7 @@ def test_task_check_gas_ok(
|
||||
'cic_eth.eth.gas.check_gas',
|
||||
[
|
||||
[
|
||||
tx_hash_hex,
|
||||
strip_0x(tx_hash_hex),
|
||||
],
|
||||
default_chain_spec.asdict(),
|
||||
[],
|
||||
@@ -283,4 +283,3 @@ def test_task_resend_explicit(
|
||||
tx_after = unpack(bytes.fromhex(strip_0x(otx.signed_tx)), default_chain_spec)
|
||||
logg.debug('gasprices before {} after {}'.format(tx_before['gasPrice'], tx_after['gasPrice']))
|
||||
assert tx_after['gasPrice'] > tx_before['gasPrice']
|
||||
|
||||
|
||||
@@ -51,6 +51,7 @@ def test_ext_tx_collate(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
)
|
||||
otx.block = 666
|
||||
init_database.add(otx)
|
||||
init_database.commit()
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ def test_set(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
)
|
||||
otx.block = 666
|
||||
init_database.add(otx)
|
||||
init_database.commit()
|
||||
|
||||
@@ -74,7 +75,6 @@ def test_set(
|
||||
assert (tx_stored.destination_token_address == ZERO_ADDRESS)
|
||||
assert (tx_stored.from_value == tx['value'])
|
||||
assert (tx_stored.to_value == to_value)
|
||||
assert (tx_stored.block_number == 666)
|
||||
assert (tx_stored.tx_index == 13)
|
||||
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ from cic_eth.queue.balance import (
|
||||
balance_incoming,
|
||||
assemble_balances,
|
||||
)
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@@ -51,8 +52,8 @@ def test_assemble():
|
||||
r = assemble_balances(b)
|
||||
logg.debug('r {}'.format(r))
|
||||
|
||||
assert r[0]['address'] == token_foo
|
||||
assert r[1]['address'] == token_bar
|
||||
assert r[0]['address'] == tx_normalize.executable_address(token_foo)
|
||||
assert r[1]['address'] == tx_normalize.executable_address(token_bar)
|
||||
assert r[0].get('balance_foo') != None
|
||||
assert r[0].get('balance_bar') != None
|
||||
assert r[1].get('balance_baz') != None
|
||||
@@ -74,11 +75,11 @@ def test_outgoing_balance(
|
||||
token_address = '0x' + os.urandom(20).hex()
|
||||
sender = '0x' + os.urandom(20).hex()
|
||||
txc = TxCache(
|
||||
tx_hash,
|
||||
sender,
|
||||
recipient,
|
||||
token_address,
|
||||
token_address,
|
||||
tx_normalize.tx_hash(tx_hash),
|
||||
tx_normalize.wallet_address(sender),
|
||||
tx_normalize.wallet_address(recipient),
|
||||
tx_normalize.executable_address(token_address),
|
||||
tx_normalize.executable_address(token_address),
|
||||
1000,
|
||||
1000,
|
||||
session=init_database,
|
||||
@@ -125,11 +126,11 @@ def test_incoming_balance(
|
||||
token_address = '0x' + os.urandom(20).hex()
|
||||
sender = '0x' + os.urandom(20).hex()
|
||||
txc = TxCache(
|
||||
tx_hash,
|
||||
sender,
|
||||
recipient,
|
||||
token_address,
|
||||
token_address,
|
||||
tx_normalize.tx_hash(tx_hash),
|
||||
tx_normalize.wallet_address(sender),
|
||||
tx_normalize.wallet_address(recipient),
|
||||
tx_normalize.executable_address(token_address),
|
||||
tx_normalize.executable_address(token_address),
|
||||
1000,
|
||||
1000,
|
||||
session=init_database,
|
||||
|
||||
@@ -21,6 +21,7 @@ from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.queue.query import get_upcoming_tx
|
||||
from cic_eth.queue.tx import register_tx
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
# test imports
|
||||
from tests.util.nonce import StaticNonceOracle
|
||||
@@ -39,8 +40,8 @@ def test_upcoming_with_lock(
|
||||
gas_oracle = RPCGasOracle(eth_rpc)
|
||||
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
||||
|
||||
alice_normal = add_0x(hex_uniform(strip_0x(agent_roles['ALICE'])))
|
||||
bob_normal = add_0x(hex_uniform(strip_0x(agent_roles['BOB'])))
|
||||
alice_normal = tx_normalize.wallet_address(agent_roles['ALICE'])
|
||||
bob_normal = tx_normalize.wallet_address(agent_roles['BOB'])
|
||||
|
||||
(tx_hash_hex, tx_rpc) = c.create(alice_normal, bob_normal, 100 * (10 ** 6))
|
||||
tx_signed_raw_hex = tx_rpc['params'][0]
|
||||
|
||||
@@ -9,7 +9,7 @@ from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.error import LockedError
|
||||
from cic_eth.queue.tx import queue_create
|
||||
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
def test_queue_lock(
|
||||
init_database,
|
||||
@@ -21,6 +21,8 @@ def test_queue_lock(
|
||||
address = '0x' + os.urandom(20).hex()
|
||||
tx_hash = '0x' + os.urandom(32).hex()
|
||||
tx_raw = '0x' + os.urandom(128).hex()
|
||||
address_normal = tx_normalize.wallet_address(address)
|
||||
tx_hash_normal = tx_normalize.tx_hash(tx_hash)
|
||||
|
||||
Lock.set(chain_str, LockEnum.QUEUE)
|
||||
with pytest.raises(LockedError):
|
||||
@@ -32,7 +34,7 @@ def test_queue_lock(
|
||||
tx_raw,
|
||||
)
|
||||
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address)
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address_normal)
|
||||
with pytest.raises(LockedError):
|
||||
queue_create(
|
||||
default_chain_spec,
|
||||
@@ -52,7 +54,7 @@ def test_queue_lock(
|
||||
tx_raw,
|
||||
)
|
||||
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address_normal, tx_hash=tx_hash_normal)
|
||||
with pytest.raises(LockedError):
|
||||
queue_create(
|
||||
default_chain_spec,
|
||||
@@ -61,5 +63,3 @@ def test_queue_lock(
|
||||
tx_hash,
|
||||
tx_raw,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
crypto-dev-signer>=0.4.15a1,<=0.4.15
|
||||
chainqueue>=0.0.3a1,<0.1.0
|
||||
crypto-dev-signer>=0.4.15a4,<=0.4.15
|
||||
chainqueue>=0.0.5a1,<0.1.0
|
||||
cic-eth-registry>=0.6.1a2,<0.7.0
|
||||
redis==3.5.3
|
||||
hexathon~=0.0.1a8
|
||||
|
||||
@@ -1,43 +1,16 @@
|
||||
|
||||
.cic_meta_variables:
|
||||
variables:
|
||||
APP_NAME: cic-meta
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-meta:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_meta_variables
|
||||
rules:
|
||||
build-test-cic-meta:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-meta
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-meta
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run --entrypoint=sh $MR_IMAGE_TAG docker/run_tests.sh
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-meta/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-meta:
|
||||
extends:
|
||||
- .cic_meta_variables
|
||||
stage: test
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd /root
|
||||
- npm install --dev
|
||||
- npm run test
|
||||
- npm run test:coverage
|
||||
needs: ["build-mr-cic-meta"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-meta/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-meta:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_meta_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-meta/**/*
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
1
apps/cic-meta/README.md
Normal file
1
apps/cic-meta/README.md
Normal file
@@ -0,0 +1 @@
|
||||
# CIC-Meta
|
||||
@@ -15,11 +15,10 @@ RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||
COPY webpack.config.js .
|
||||
COPY tsconfig.json .
|
||||
## required to build the cic-client-meta module
|
||||
COPY src/ src/
|
||||
COPY scripts/ scripts/
|
||||
COPY tests/ tests/
|
||||
COPY . .
|
||||
COPY tests/*.asc /root/pgp/
|
||||
|
||||
|
||||
## copy runtime configs
|
||||
COPY .config/ /usr/local/etc/cic-meta/
|
||||
#
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
#FROM node:15.3.0-alpine3.10
|
||||
FROM node:lts-alpine3.14
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
RUN apk add --no-cache postgresql bash
|
||||
|
||||
# copy the dependencies
|
||||
COPY package.json package-lock.json .
|
||||
RUN npm set cache /root/.npm && \
|
||||
npm ci
|
||||
|
||||
COPY webpack.config.js .
|
||||
COPY tsconfig.json .
|
||||
## required to build the cic-client-meta module
|
||||
COPY src/ src/
|
||||
COPY scripts/ scripts/
|
||||
COPY tests/ tests/
|
||||
COPY tests/*.asc /root/pgp/
|
||||
|
||||
## copy runtime configs
|
||||
COPY .config/ /usr/local/etc/cic-meta/
|
||||
#
|
||||
## db migrations
|
||||
COPY docker/db.sh ./db.sh
|
||||
RUN chmod 755 ./db.sh
|
||||
#
|
||||
RUN alias tsc=node_modules/typescript/bin/tsc
|
||||
COPY docker/start_server.sh ./start_server.sh
|
||||
RUN chmod 755 ./start_server.sh
|
||||
ENTRYPOINT ["sh", "./start_server.sh"]
|
||||
7
apps/cic-meta/docker/run_tests.sh
Normal file
7
apps/cic-meta/docker/run_tests.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
npm install --dev
|
||||
npm run test
|
||||
npm run test:coverage
|
||||
@@ -1,52 +1,17 @@
|
||||
.cic_notify_variables:
|
||||
variables:
|
||||
APP_NAME: cic-notify
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-notify:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_notify_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-notify/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-notify:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_notify_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_notify --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-notify"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-notify:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_notify_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-notify/**/*
|
||||
when: always
|
||||
|
||||
|
||||
build-test-cic-notify:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-notify
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-notify
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
@@ -11,12 +11,12 @@ celery_app = celery.current_app
|
||||
|
||||
|
||||
@celery_app.task
|
||||
def persist_notification(recipient, message):
|
||||
def persist_notification(message, recipient):
|
||||
"""
|
||||
:param recipient:
|
||||
:type recipient:
|
||||
:param message:
|
||||
:type message:
|
||||
:param recipient:
|
||||
:type recipient:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
|
||||
@@ -11,12 +11,13 @@ local_logg = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@celery_app.task
|
||||
def log(recipient, message):
|
||||
def log(message, recipient):
|
||||
"""
|
||||
:param recipient:
|
||||
:type recipient:
|
||||
|
||||
:param message:
|
||||
:type message:
|
||||
:param recipient:
|
||||
:type recipient:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN python setup.py install
|
||||
|
||||
COPY docker/*.sh .
|
||||
RUN chmod +x *.sh
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY .config/ /usr/local/etc/cic-notify/
|
||||
COPY cic_notify/db/migrations/ /usr/local/share/cic-notify/alembic/
|
||||
|
||||
ENTRYPOINT []
|
||||
9
apps/cic-notify/docker/run_tests.sh
Normal file
9
apps/cic-notify/docker/run_tests.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_notify --cov-fail-under=90 --cov-report term-missing tests
|
||||
@@ -1,52 +1,16 @@
|
||||
.cic_ussd_variables:
|
||||
variables:
|
||||
APP_NAME: cic-ussd
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-ussd:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_ussd_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-ussd/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-ussd:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_ussd_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_ussd --cov-fail-under=90 --cov-report term-missing tests/cic_ussd
|
||||
needs: ["build-mr-cic-ussd"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-ussd:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_ussd_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-ussd/**/*
|
||||
when: always
|
||||
|
||||
|
||||
build-test-cic-ussd:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-ussd
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-ussd
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
@@ -20,7 +20,7 @@ def get_balances(address: str,
|
||||
asynchronous: bool = False,
|
||||
callback_param: any = None,
|
||||
callback_queue='cic-ussd',
|
||||
callback_task='cic_ussd.tasks.callback_handler.process_balances_callback') -> Optional[list]:
|
||||
callback_task='cic_ussd.tasks.callback_handler.balances_callback') -> Optional[list]:
|
||||
"""This function queries cic-eth for an account's balances, It provides a means to receive the balance either
|
||||
asynchronously or synchronously.. It returns a dictionary containing the network, outgoing and incoming balances.
|
||||
:param address: Ethereum address of an account.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
# standard import
|
||||
import decimal
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict, Tuple
|
||||
|
||||
@@ -8,6 +9,8 @@ from cic_eth.api import Api
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
# local import
|
||||
from cic_ussd.account.chain import Chain
|
||||
from cic_ussd.account.tokens import get_cached_default_token
|
||||
from cic_ussd.db.models.account import Account
|
||||
from cic_ussd.db.models.base import SessionBase
|
||||
from cic_ussd.error import UnknownUssdRecipient
|
||||
@@ -59,7 +62,9 @@ def from_wei(value: int) -> float:
|
||||
:return: SRF equivalent of value in Wei
|
||||
:rtype: float
|
||||
"""
|
||||
value = float(value) / 1e+6
|
||||
cached_token_data = json.loads(get_cached_default_token(Chain.spec.__str__()))
|
||||
token_decimals: int = cached_token_data.get('decimals')
|
||||
value = float(value) / (10**token_decimals)
|
||||
return truncate(value=value, decimals=2)
|
||||
|
||||
|
||||
@@ -70,7 +75,9 @@ def to_wei(value: int) -> int:
|
||||
:return: Wei equivalent of value in SRF
|
||||
:rtype: int
|
||||
"""
|
||||
return int(value * 1e+6)
|
||||
cached_token_data = json.loads(get_cached_default_token(Chain.spec.__str__()))
|
||||
token_decimals: int = cached_token_data.get('decimals')
|
||||
return int(value * (10**token_decimals))
|
||||
|
||||
|
||||
def truncate(value: float, decimals: int):
|
||||
@@ -117,18 +124,18 @@ def transaction_actors(transaction: dict) -> Tuple[Dict, Dict]:
|
||||
return recipient_transaction_data, sender_transaction_data
|
||||
|
||||
|
||||
def validate_transaction_account(session: Session, transaction: dict) -> Account:
|
||||
def validate_transaction_account(blockchain_address: str, role: str, session: Session) -> Account:
|
||||
"""This function checks whether the blockchain address specified in a parsed transaction object resolves to an
|
||||
account object in the ussd system.
|
||||
:param session: Database session object.
|
||||
:type session: Session
|
||||
:param transaction: Parsed transaction data object.
|
||||
:type transaction: dict
|
||||
:param blockchain_address:
|
||||
:type blockchain_address:
|
||||
:param role:
|
||||
:type role:
|
||||
:param session:
|
||||
:type session:
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
blockchain_address = transaction.get('blockchain_address')
|
||||
role = transaction.get('role')
|
||||
session = SessionBase.bind_session(session)
|
||||
account = session.query(Account).filter_by(blockchain_address=blockchain_address).first()
|
||||
if not account:
|
||||
|
||||
@@ -44,7 +44,7 @@ class MetadataRequestsHandler(Metadata):
|
||||
|
||||
def create(self, data: Union[Dict, str]):
|
||||
""""""
|
||||
data = json.dumps(data)
|
||||
data = json.dumps(data).encode('utf-8')
|
||||
result = make_request(method='POST', url=self.url, data=data, headers=self.headers)
|
||||
|
||||
error_handler(result=result)
|
||||
|
||||
@@ -67,6 +67,7 @@ def resume_last_ussd_session(last_state: str) -> Document:
|
||||
'exit',
|
||||
'exit_invalid_pin',
|
||||
'exit_invalid_new_pin',
|
||||
'exit_invalid_recipient',
|
||||
'exit_invalid_request',
|
||||
'exit_pin_blocked',
|
||||
'exit_pin_mismatch',
|
||||
|
||||
@@ -146,7 +146,7 @@ def create_ussd_session(
|
||||
)
|
||||
|
||||
|
||||
def update_ussd_session(ussd_session: UssdSession,
|
||||
def update_ussd_session(ussd_session: DbUssdSession,
|
||||
user_input: str,
|
||||
state: str,
|
||||
data: Optional[dict] = None) -> UssdSession:
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Tuple
|
||||
|
||||
# third party imports
|
||||
import celery
|
||||
from phonenumbers.phonenumberutil import NumberParseException
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.balance import get_cached_available_balance
|
||||
@@ -21,23 +22,21 @@ logg = logging.getLogger(__file__)
|
||||
|
||||
|
||||
def is_valid_recipient(state_machine_data: Tuple[str, dict, Account, Session]) -> bool:
|
||||
"""This function checks that a user exists, is not the initiator of the transaction, has an active account status
|
||||
and is authorized to perform standard transactions.
|
||||
"""This function checks that a phone number provided as the recipient of a transaction does not match the sending
|
||||
party's own phone number.
|
||||
:param state_machine_data: A tuple containing user input, a ussd session and user object.
|
||||
:type state_machine_data: tuple
|
||||
:return: A user's validity
|
||||
:return: A recipient account's validity for a transaction
|
||||
:rtype: bool
|
||||
"""
|
||||
user_input, ussd_session, account, session = state_machine_data
|
||||
phone_number = process_phone_number(user_input, E164Format.region)
|
||||
session = SessionBase.bind_session(session=session)
|
||||
recipient = Account.get_by_phone_number(phone_number=phone_number, session=session)
|
||||
SessionBase.release_session(session=session)
|
||||
try:
|
||||
phone_number = process_phone_number(user_input, E164Format.region)
|
||||
except NumberParseException:
|
||||
phone_number = None
|
||||
is_not_initiator = phone_number != account.phone_number
|
||||
has_active_account_status = False
|
||||
if recipient:
|
||||
has_active_account_status = recipient.get_status(session) == AccountStatus.ACTIVE.name
|
||||
return is_not_initiator and has_active_account_status and recipient is not None
|
||||
is_present = Account.get_by_phone_number(phone_number, session) is not None
|
||||
return phone_number is not None and phone_number.startswith('+') and is_present and is_not_initiator
|
||||
|
||||
|
||||
def is_valid_transaction_amount(state_machine_data: Tuple[str, dict, Account, Session]) -> bool:
|
||||
|
||||
@@ -138,26 +138,14 @@ def transaction_balances_callback(self, result: list, param: dict, status_code:
|
||||
balances_data = result[0]
|
||||
available_balance = calculate_available_balance(balances_data)
|
||||
transaction = param
|
||||
blockchain_address = param.get('blockchain_address')
|
||||
transaction['available_balance'] = available_balance
|
||||
queue = self.request.delivery_info.get('routing_key')
|
||||
|
||||
s_preferences_metadata = celery.signature(
|
||||
'cic_ussd.tasks.metadata.query_preferences_metadata', [blockchain_address], queue=queue
|
||||
)
|
||||
s_process_account_metadata = celery.signature(
|
||||
'cic_ussd.tasks.processor.parse_transaction', [transaction], queue=queue
|
||||
)
|
||||
s_notify_account = celery.signature('cic_ussd.tasks.notifications.transaction', queue=queue)
|
||||
|
||||
if param.get('transaction_type') == 'transfer':
|
||||
celery.chain(s_preferences_metadata, s_process_account_metadata, s_notify_account).apply_async()
|
||||
|
||||
if param.get('transaction_type') == 'tokengift':
|
||||
s_process_account_metadata = celery.signature(
|
||||
'cic_ussd.tasks.processor.parse_transaction', [{}, transaction], queue=queue
|
||||
)
|
||||
celery.chain(s_process_account_metadata, s_notify_account).apply_async()
|
||||
celery.chain(s_process_account_metadata, s_notify_account).apply_async()
|
||||
|
||||
|
||||
@celery_app.task
|
||||
@@ -184,10 +172,11 @@ def transaction_callback(result: dict, param: str, status_code: int):
|
||||
source_token_value = result.get('source_token_value')
|
||||
|
||||
recipient_metadata = {
|
||||
"token_symbol": destination_token_symbol,
|
||||
"token_value": destination_token_value,
|
||||
"alt_blockchain_address": sender_blockchain_address,
|
||||
"blockchain_address": recipient_blockchain_address,
|
||||
"role": "recipient",
|
||||
"token_symbol": destination_token_symbol,
|
||||
"token_value": destination_token_value,
|
||||
"transaction_type": param
|
||||
}
|
||||
|
||||
@@ -201,10 +190,11 @@ def transaction_callback(result: dict, param: str, status_code: int):
|
||||
|
||||
if param == 'transfer':
|
||||
sender_metadata = {
|
||||
"alt_blockchain_address": recipient_blockchain_address,
|
||||
"blockchain_address": sender_blockchain_address,
|
||||
"role": "sender",
|
||||
"token_symbol": source_token_symbol,
|
||||
"token_value": source_token_value,
|
||||
"role": "sender",
|
||||
"transaction_type": param
|
||||
}
|
||||
|
||||
|
||||
@@ -29,7 +29,8 @@ def transaction(notification_data: dict):
|
||||
phone_number = notification_data.get('phone_number')
|
||||
preferred_language = notification_data.get('preferred_language')
|
||||
token_symbol = notification_data.get('token_symbol')
|
||||
transaction_account_metadata = notification_data.get('metadata_id')
|
||||
alt_metadata_id = notification_data.get('alt_metadata_id')
|
||||
metadata_id = notification_data.get('metadata_id')
|
||||
transaction_type = notification_data.get('transaction_type')
|
||||
timestamp = datetime.datetime.now().strftime('%d-%m-%y, %H:%M %p')
|
||||
|
||||
@@ -47,7 +48,8 @@ def transaction(notification_data: dict):
|
||||
preferred_language=preferred_language,
|
||||
amount=amount,
|
||||
token_symbol=token_symbol,
|
||||
tx_sender_information=transaction_account_metadata,
|
||||
tx_recipient_information=metadata_id,
|
||||
tx_sender_information=alt_metadata_id,
|
||||
timestamp=timestamp,
|
||||
balance=balance)
|
||||
if role == 'sender':
|
||||
@@ -56,6 +58,7 @@ def transaction(notification_data: dict):
|
||||
preferred_language=preferred_language,
|
||||
amount=amount,
|
||||
token_symbol=token_symbol,
|
||||
tx_recipient_information=transaction_account_metadata,
|
||||
tx_recipient_information=alt_metadata_id,
|
||||
tx_sender_information=metadata_id,
|
||||
timestamp=timestamp,
|
||||
balance=balance)
|
||||
|
||||
@@ -8,9 +8,11 @@ import i18n
|
||||
from chainlib.hash import strip_0x
|
||||
|
||||
# local imports
|
||||
from cic_ussd.account.metadata import get_cached_preferred_language
|
||||
from cic_ussd.account.statement import get_cached_statement
|
||||
from cic_ussd.account.transaction import aux_transaction_data, validate_transaction_account
|
||||
from cic_ussd.cache import cache_data, cache_data_key
|
||||
from cic_ussd.db.models.account import Account
|
||||
from cic_ussd.db.models.base import SessionBase
|
||||
|
||||
|
||||
@@ -57,28 +59,32 @@ def cache_statement(parsed_transaction: dict, querying_party: str):
|
||||
|
||||
|
||||
@celery_app.task
|
||||
def parse_transaction(preferences: dict, transaction: dict) -> dict:
|
||||
def parse_transaction(transaction: dict) -> dict:
|
||||
"""This function parses transaction objects and collates all relevant data for system use i.e:
|
||||
- An account's set preferred language.
|
||||
- Account identifier that facilitates notification.
|
||||
- Contextual tags i.e action and direction tags.
|
||||
:param preferences: An account's set preferences.
|
||||
:type preferences: dict
|
||||
:param transaction: Transaction object.
|
||||
:type transaction: dict
|
||||
:return: Transaction object with contextual data for use in the system.
|
||||
:rtype: dict
|
||||
"""
|
||||
preferred_language = preferences.get('preferred_language')
|
||||
preferred_language = get_cached_preferred_language(transaction.get('blockchain_address'))
|
||||
if not preferred_language:
|
||||
preferred_language = i18n.config.get('fallback')
|
||||
|
||||
transaction['preferred_language'] = preferred_language
|
||||
transaction = aux_transaction_data(preferred_language, transaction)
|
||||
session = SessionBase.create_session()
|
||||
account = validate_transaction_account(session, transaction)
|
||||
metadata_id = account.standard_metadata_id()
|
||||
transaction['metadata_id'] = metadata_id
|
||||
role = transaction.get('role')
|
||||
alt_blockchain_address = transaction.get('alt_blockchain_address')
|
||||
blockchain_address = transaction.get('blockchain_address')
|
||||
account = validate_transaction_account(blockchain_address, role, session)
|
||||
alt_account = session.query(Account).filter_by(blockchain_address=alt_blockchain_address).first()
|
||||
if alt_account:
|
||||
transaction['alt_metadata_id'] = alt_account.standard_metadata_id()
|
||||
else:
|
||||
transaction['alt_metadata_id'] = 'GRASSROOTS ECONOMICS'
|
||||
transaction['metadata_id'] = account.standard_metadata_id()
|
||||
transaction['phone_number'] = account.phone_number
|
||||
session.commit()
|
||||
session.close()
|
||||
return transaction
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# standard imports
|
||||
import semver
|
||||
|
||||
version = (0, 3, 1, 'alpha.1')
|
||||
version = (0, 3, 1, 'alpha.4')
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
RUN apt-get install -y redis-server
|
||||
|
||||
# create secrets directory
|
||||
RUN mkdir -vp pgp/keys
|
||||
|
||||
# create application directory
|
||||
RUN mkdir -vp cic-ussd
|
||||
RUN mkdir -vp data
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY cic_ussd/db/ussd_menu.json data/
|
||||
|
||||
COPY docker/*.sh .
|
||||
RUN chmod +x /root/*.sh
|
||||
|
||||
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
||||
COPY config/ /usr/local/etc/cic-ussd/
|
||||
COPY cic_ussd/db/migrations/ /usr/local/share/cic-ussd/alembic
|
||||
|
||||
ENTRYPOINT []
|
||||
10
apps/cic-ussd/docker/run_tests.sh
Normal file
10
apps/cic-ussd/docker/run_tests.sh
Normal file
@@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_ussd --cov-fail-under=90 --cov-report term-missing tests/cic_ussd
|
||||
@@ -1,7 +1,7 @@
|
||||
alembic==1.4.2
|
||||
bcrypt==3.2.0
|
||||
celery==4.4.7
|
||||
cic-eth[services]~=0.12.4a3
|
||||
cic-eth[services]~=0.12.4a7
|
||||
cic-notify~=0.4.0a10
|
||||
cic-types~=0.1.0a14
|
||||
confini>=0.4.1a1,<0.5.0
|
||||
@@ -13,11 +13,5 @@ redis==3.5.3
|
||||
semver==2.13.0
|
||||
SQLAlchemy==1.3.20
|
||||
tinydb==4.2.0
|
||||
phonenumbers==8.12.12
|
||||
redis==3.5.3
|
||||
celery==4.4.7
|
||||
python-i18n[YAML]==0.3.9
|
||||
pyxdg==0.27
|
||||
bcrypt==3.2.0
|
||||
uWSGI==2.0.19.1
|
||||
transitions==0.8.4
|
||||
uWSGI==2.0.19.1
|
||||
|
||||
@@ -75,17 +75,21 @@ def test_transaction_actors(activated_account, transaction_result, valid_recipie
|
||||
def test_validate_transaction_account(activated_account, init_database, transactions_list):
|
||||
sample_transaction = transactions_list[0]
|
||||
recipient_transaction, sender_transaction = transaction_actors(sample_transaction)
|
||||
recipient_account = validate_transaction_account(init_database, recipient_transaction)
|
||||
sender_account = validate_transaction_account(init_database, sender_transaction)
|
||||
recipient_account = validate_transaction_account(
|
||||
recipient_transaction.get('blockchain_address'), recipient_transaction.get('role'), init_database)
|
||||
sender_account = validate_transaction_account(
|
||||
sender_transaction.get('blockchain_address'), sender_transaction.get('role'), init_database)
|
||||
assert isinstance(recipient_account, Account)
|
||||
assert isinstance(sender_account, Account)
|
||||
sample_transaction = transactions_list[1]
|
||||
recipient_transaction, sender_transaction = transaction_actors(sample_transaction)
|
||||
with pytest.raises(UnknownUssdRecipient) as error:
|
||||
validate_transaction_account(init_database, recipient_transaction)
|
||||
validate_transaction_account(
|
||||
recipient_transaction.get('blockchain_address'), recipient_transaction.get('role'), init_database)
|
||||
assert str(
|
||||
error.value) == f'Tx for recipient: {recipient_transaction.get("blockchain_address")} has no matching account in the system.'
|
||||
validate_transaction_account(init_database, sender_transaction)
|
||||
validate_transaction_account(
|
||||
sender_transaction.get('blockchain_address'), sender_transaction.get('role'), init_database)
|
||||
assert f'Tx from sender: {sender_transaction.get("blockchain_address")} has no matching account in system.'
|
||||
|
||||
|
||||
|
||||
@@ -30,8 +30,6 @@ def test_is_valid_recipient(activated_account,
|
||||
valid_recipient):
|
||||
state_machine = ('0112365478', generic_ussd_session, valid_recipient, init_database)
|
||||
assert is_valid_recipient(state_machine) is False
|
||||
state_machine = (pending_account.phone_number, generic_ussd_session, valid_recipient, init_database)
|
||||
assert is_valid_recipient(state_machine) is False
|
||||
state_machine = (valid_recipient.phone_number, generic_ussd_session, activated_account, init_database)
|
||||
assert is_valid_recipient(state_machine) is True
|
||||
|
||||
|
||||
@@ -24,7 +24,8 @@ def test_transaction(celery_session_worker,
|
||||
phone_number = notification_data.get('phone_number')
|
||||
preferred_language = notification_data.get('preferred_language')
|
||||
token_symbol = notification_data.get('token_symbol')
|
||||
transaction_account_metadata = notification_data.get('metadata_id')
|
||||
alt_metadata_id = notification_data.get('alt_metadata_id')
|
||||
metadata_id = notification_data.get('metadata_id')
|
||||
timestamp = datetime.datetime.now().strftime('%d-%m-%y, %H:%M %p')
|
||||
s_transaction = celery.signature(
|
||||
'cic_ussd.tasks.notifications.transaction', [notification_data]
|
||||
@@ -36,7 +37,8 @@ def test_transaction(celery_session_worker,
|
||||
preferred_language=preferred_language,
|
||||
amount=amount,
|
||||
token_symbol=token_symbol,
|
||||
tx_recipient_information=transaction_account_metadata,
|
||||
tx_recipient_information=alt_metadata_id,
|
||||
tx_sender_information=metadata_id,
|
||||
timestamp=timestamp,
|
||||
balance=balance)
|
||||
assert mock_notifier_api.get('message') == message
|
||||
@@ -52,7 +54,8 @@ def test_transaction(celery_session_worker,
|
||||
preferred_language=preferred_language,
|
||||
amount=amount,
|
||||
token_symbol=token_symbol,
|
||||
tx_sender_information=transaction_account_metadata,
|
||||
tx_recipient_information=metadata_id,
|
||||
tx_sender_information=alt_metadata_id,
|
||||
timestamp=timestamp,
|
||||
balance=balance)
|
||||
assert mock_notifier_api.get('message') == message
|
||||
|
||||
9
apps/cic-ussd/tests/fixtures/transaction.py
vendored
9
apps/cic-ussd/tests/fixtures/transaction.py
vendored
@@ -5,12 +5,18 @@ import random
|
||||
import pytest
|
||||
|
||||
# local import
|
||||
from cic_ussd.account.balance import get_cached_available_balance
|
||||
|
||||
# tests imports
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def notification_data(activated_account, cache_person_metadata, cache_preferences, preferences):
|
||||
def notification_data(activated_account,
|
||||
cache_person_metadata,
|
||||
cache_preferences,
|
||||
cache_balances,
|
||||
preferences,
|
||||
valid_recipient):
|
||||
return {
|
||||
'blockchain_address': activated_account.blockchain_address,
|
||||
'token_symbol': 'GFT',
|
||||
@@ -18,6 +24,7 @@ def notification_data(activated_account, cache_person_metadata, cache_preference
|
||||
'role': 'sender',
|
||||
'action_tag': 'Sent',
|
||||
'direction_tag': 'To',
|
||||
'alt_metadata_id': valid_recipient.standard_metadata_id(),
|
||||
'metadata_id': activated_account.standard_metadata_id(),
|
||||
'phone_number': activated_account.phone_number,
|
||||
'available_balance': 50.0,
|
||||
|
||||
@@ -2,9 +2,9 @@ en:
|
||||
account_successfully_created: |-
|
||||
You have been registered on Sarafu Network! To use dial *384*96# on Safaricom and *483*96# on other networks. For help %{support_phone}.
|
||||
received_tokens: |-
|
||||
Successfully received %{amount} %{token_symbol} from %{tx_sender_information} %{timestamp}. New balance is %{balance} %{token_symbol}.
|
||||
Successfully received %{amount} %{token_symbol} from %{tx_sender_information} %{timestamp} to %{tx_recipient_information}. New balance is %{balance} %{token_symbol}.
|
||||
sent_tokens: |-
|
||||
Successfully sent %{amount} %{token_symbol} to %{tx_recipient_information} %{timestamp}. New balance is %{balance} %{token_symbol}.
|
||||
Successfully sent %{amount} %{token_symbol} to %{tx_recipient_information} %{timestamp} from %{tx_sender_information}. New balance is %{balance} %{token_symbol}.
|
||||
terms: |-
|
||||
By using the service, you agree to the terms and conditions at http://grassecon.org/tos
|
||||
upsell_unregistered_recipient: |-
|
||||
|
||||
@@ -2,9 +2,9 @@ sw:
|
||||
account_successfully_created: |-
|
||||
Umesajiliwa kwa huduma ya Sarafu! Kutumia bonyeza *384*96# Safaricom ama *483*46# kwa utandao tofauti. Kwa Usaidizi %{support_phone}.
|
||||
received_tokens: |-
|
||||
Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp}. Salio lako ni %{balance} %{token_symbol}.
|
||||
Umepokea %{amount} %{token_symbol} kutoka kwa %{tx_sender_information} %{timestamp} ikapokewa na %{tx_recipient_information}. Salio lako ni %{balance} %{token_symbol}.
|
||||
sent_tokens: |-
|
||||
Umetuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp}. Salio lako ni %{balance} %{token_symbol}.
|
||||
Umetuma %{amount} %{token_symbol} kwa %{tx_recipient_information} %{timestamp} kutoka kwa %{tx_sender_information}. Salio lako ni %{balance} %{token_symbol}.
|
||||
terms: |-
|
||||
Kwa kutumia hii huduma, umekubali sheria na masharti yafuatayo http://grassecon.org/tos
|
||||
upsell_unregistered_recipient: |-
|
||||
|
||||
@@ -7,10 +7,8 @@ en:
|
||||
3. Help
|
||||
initial_pin_entry: |-
|
||||
CON Please enter a new four number PIN for your account.
|
||||
0. Back
|
||||
initial_pin_confirmation: |-
|
||||
CON Enter your four number PIN again
|
||||
0. Back
|
||||
enter_given_name: |-
|
||||
CON Enter first name
|
||||
0. Back
|
||||
@@ -183,7 +181,7 @@ en:
|
||||
exit_pin_mismatch: |-
|
||||
END The new PIN does not match the one you entered. Please try again. For help, call %{support_phone}.
|
||||
exit_invalid_recipient: |-
|
||||
CON Recipient phone number is incorrect.
|
||||
CON Recipient's phone number is not registered or is invalid:
|
||||
00. Retry
|
||||
99. Exit
|
||||
exit_successful_transaction: |-
|
||||
|
||||
@@ -7,13 +7,10 @@ sw:
|
||||
3. Help
|
||||
initial_pin_entry: |-
|
||||
CON Tafadhali weka pin mpya yenye nambari nne kwa akaunti yako
|
||||
0. Nyuma
|
||||
initial_pin_confirmation: |-
|
||||
CON Weka PIN yako tena
|
||||
0. Nyuma
|
||||
enter_given_name: |-
|
||||
CON Weka jina lako la kwanza
|
||||
0. Nyuma
|
||||
enter_family_name: |-
|
||||
CON Weka jina lako la mwisho
|
||||
0. Nyuma
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
.contract_migration_variables:
|
||||
variables:
|
||||
APP_NAME: contract-migration
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-contract-migration:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .contract_migration_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/contract-migration/**/*
|
||||
when: always
|
||||
|
||||
build-push-contract-migration:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .contract_migration_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/contract-migration/**/*
|
||||
when: always
|
||||
#.contract_migration_variables:
|
||||
# variables:
|
||||
# APP_NAME: contract-migration
|
||||
# DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
# CONTEXT: apps/$APP_NAME
|
||||
#
|
||||
#build-mr-contract-migration:
|
||||
# extends:
|
||||
# - .py_build_merge_request
|
||||
# - .contract_migration_variables
|
||||
# rules:
|
||||
# - if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
# changes:
|
||||
# - apps/contract-migration/**/*
|
||||
# when: always
|
||||
#
|
||||
#build-push-contract-migration:
|
||||
# extends:
|
||||
# - .py_build_push
|
||||
# - .contract_migration_variables
|
||||
# rules:
|
||||
# - if: $CI_COMMIT_BRANCH == "master"
|
||||
# changes:
|
||||
# - apps/contract-migration/**/*
|
||||
# when: always
|
||||
|
||||
43
apps/contract-migration/config.sh
Normal file
43
apps/contract-migration/config.sh
Normal file
@@ -0,0 +1,43 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -a
|
||||
|
||||
if [ -z $DEV_DATA_DIR ]; then
|
||||
export DEV_DATA_DIR=`mktemp -d`
|
||||
else
|
||||
mkdir -p $DEV_DATA_DIR
|
||||
fi
|
||||
|
||||
if [ -z $DEV_CONFIG_RESET ]; then
|
||||
if [ -f ${DEV_DATA_DIR}/env_reset ]; then
|
||||
>&2 echo "importing existing configuration values from ${DEV_DATA_DIR}/env_reset"
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
fi
|
||||
fi
|
||||
|
||||
# Handle wallet
|
||||
export WALLET_KEY_FILE=${WALLET_KEY_FILE:-`realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c`}
|
||||
if [ ! -f $WALLET_KEY_FILE ]; then
|
||||
>&2 echo "wallet path '$WALLET_KEY_FILE' does not point to a file"
|
||||
exit 1
|
||||
fi
|
||||
export DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $WALLET_KEY_FILE | jq -r .address)`
|
||||
|
||||
# Wallet dependent variable defaults
|
||||
export DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
export DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
export CIC_TRUST_ADDRESS=${CIC_TRUST_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
|
||||
export TOKEN_SINK_ADDRESS=${TOKEN_SINK_ADDRESS:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
|
||||
|
||||
# Legacy variable defaults
|
||||
|
||||
|
||||
# Migration variable processing
|
||||
|
||||
confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
cat ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set +a
|
||||
23
apps/contract-migration/config/config.ini
Normal file
23
apps/contract-migration/config/config.ini
Normal file
@@ -0,0 +1,23 @@
|
||||
[token]
|
||||
name = Giftable Token
|
||||
symbol = GFT
|
||||
type = giftable_erc20_token
|
||||
demurrage_level = 196454828847045000000000000000000
|
||||
redistribution_period =
|
||||
supply_limit =
|
||||
sink_address =
|
||||
|
||||
|
||||
[dev]
|
||||
eth_account_contract_deployer =
|
||||
eth_account_reserve_minter =
|
||||
eth_account_accounts_index_writer =
|
||||
reserve_amount = 10000000000000000000000000000000000
|
||||
faucet_amount = 0
|
||||
gas_amount = 100000000000000000000000
|
||||
token_amount = 100000000000000000000000
|
||||
eth_gas_price =
|
||||
data_dir =
|
||||
pip_extra_index_url =
|
||||
eth_provider_host =
|
||||
eth_provider_port =
|
||||
@@ -1,6 +1,6 @@
|
||||
cic-eth[tools]==0.12.4a4
|
||||
chainlib-eth>=0.0.9a3,<0.1.0
|
||||
eth-erc20>=0.1.2a2,<0.2.0
|
||||
cic-eth[tools]==0.12.4a8
|
||||
chainlib-eth>=0.0.9a9,<0.1.0
|
||||
eth-erc20>=0.1.2a3,<0.2.0
|
||||
erc20-demurrage-token>=0.0.5a2,<0.1.0
|
||||
eth-accounts-index>=0.1.2a2,<0.2.0
|
||||
eth-address-index>=0.2.3a4,<0.3.0
|
||||
@@ -8,3 +8,5 @@ cic-eth-registry>=0.6.1a2,<0.7.0
|
||||
erc20-transfer-authorization>=0.3.5a2,<0.4.0
|
||||
erc20-faucet>=0.3.2a2,<0.4.0
|
||||
sarafu-faucet>=0.0.7a2,<0.1.0
|
||||
confini>=0.4.2rc3,<1.0.0
|
||||
crypto-dev-signer>=0.4.15a4,<=0.4.15
|
||||
|
||||
@@ -2,179 +2,115 @@
|
||||
|
||||
set -a
|
||||
|
||||
default_token=giftable_erc20_token
|
||||
TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL}
|
||||
TOKEN_NAME=${TOKEN_NAME}
|
||||
TOKEN_TYPE=${TOKEN_TYPE:-$default_token}
|
||||
cat <<EOF
|
||||
external token settings:
|
||||
token_type: $TOKEN_TYPE
|
||||
token_symbol: $TOKEN_SYMBOL
|
||||
token_name: $TOKEN_NAME
|
||||
token_decimals: $TOKEN_DECIMALS
|
||||
token_demurrage: $TOKEN_DEMURRAGE_LEVEL
|
||||
token_redistribution_period: $TOKEN_REDISTRIBUTION_PERIOD
|
||||
token_supply_limit: $TOKEN_SUPPLY_LIMIT
|
||||
EOF
|
||||
|
||||
CHAIN_SPEC=${CHAIN_SPEC:-$CIC_CHAIN_SPEC}
|
||||
RPC_HTTP_PROVIDER=${RPC_HTTP_PROVIDER:-$ETH_PROVIDER}
|
||||
|
||||
DEV_ETH_ACCOUNT_RESERVE_MINTER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
DEV_ETH_ACCOUNT_ACCOUNTS_INDEX_WRITER=${DEV_ETH_ACCOUNT_RESERVE_MINTER:-$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER}
|
||||
DEV_RESERVE_AMOUNT=${DEV_ETH_RESERVE_AMOUNT:-""10000000000000000000000000000000000}
|
||||
DEV_FAUCET_AMOUNT=${DEV_FAUCET_AMOUNT:-0}
|
||||
DEV_ETH_KEYSTORE_FILE=${DEV_ETH_KEYSTORE_FILE:-`realpath ./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c`}
|
||||
. ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
set -e
|
||||
|
||||
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=`eth-checksum $(cat $DEV_ETH_KEYSTORE_FILE | jq -r .address)`
|
||||
|
||||
if [ ! -z $DEV_ETH_GAS_PRICE ]; then
|
||||
gas_price_arg="--gas-price $DEV_ETH_GAS_PRICE"
|
||||
fee_price_arg="--fee-price $DEV_ETH_GAS_PRICE"
|
||||
>&2 echo using static gas price $DEV_ETH_GAS_PRICE
|
||||
fi
|
||||
|
||||
echo "environment:"
|
||||
printenv
|
||||
echo \n
|
||||
|
||||
echo "using wallet address '$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER' from keystore file $DEV_ETH_KEYSTORE_FILE"
|
||||
|
||||
# This is a grassroots team convention for building the Bancor contracts using the bancor protocol repository truffle setup
|
||||
# Running this in docker-internal dev container (built from Docker folder in this repo) will write a
|
||||
# source-able env file to CIC_DATA_DIR. Services dependent on these contracts can mount this file OR
|
||||
# define these parameters at runtime
|
||||
# pushd /usr/src
|
||||
|
||||
if [ -z $CIC_DATA_DIR ]; then
|
||||
CIC_DATA_DIR=`mktemp -d`
|
||||
fi
|
||||
>&2 echo using data dir $CIC_DATA_DIR
|
||||
|
||||
init_level_file=${CIC_DATA_DIR}/.init
|
||||
if [ ! -f ${CIC_DATA_DIR}/.init ]; then
|
||||
echo "Creating .init file..."
|
||||
mkdir -p $CIC_DATA_DIR
|
||||
touch $CIC_DATA_DIR/.init
|
||||
# touch $init_level_file
|
||||
fi
|
||||
echo -n 1 > $init_level_file
|
||||
|
||||
# Abort on any error (including if wait-for-it fails).
|
||||
|
||||
# Wait for the backend to be up, if we know where it is.
|
||||
if [[ -n "${ETH_PROVIDER}" ]]; then
|
||||
|
||||
export CONFINI_DIR=$_CONFINI_DIR
|
||||
unset CONFINI_DIR
|
||||
|
||||
if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
|
||||
echo "waiting for ${ETH_PROVIDER}..."
|
||||
./wait-for-it.sh "${ETH_PROVIDER_HOST}:${ETH_PROVIDER_PORT}"
|
||||
fi
|
||||
|
||||
if [ "$TOKEN_TYPE" == "$default_token" ]; then
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
||||
TOKEN_SYMBOL="GFT"
|
||||
TOKEN_NAME="Giftable Token"
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
>&2 echo deploying default token $TOKEN_TYPE
|
||||
echo giftable-token-deploy $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv
|
||||
DEV_RESERVE_ADDRESS=`giftable-token-deploy $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv`
|
||||
elif [ "$TOKEN_TYPE" == "erc20_demurrage_token" ]; then
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
||||
TOKEN_SYMBOL="SARAFU"
|
||||
TOKEN_NAME="Sarafu Token"
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
>&2 echo deploying token $TOKEN_TYPE
|
||||
if [ -z $TOKEN_SINK_ADDRESS ]; then
|
||||
if [ ! -z $TOKEN_REDISTRIBUTION_PERIOD ]; then
|
||||
>&2 echo -e "\033[;93mtoken sink address not set, so redistribution will be BURNED\033[;39m"
|
||||
fi
|
||||
fi
|
||||
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL -vv -ww -s`
|
||||
else
|
||||
>&2 echo unknown token type $TOKEN_TYPE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "giftable-token-gift $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -vv -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT"
|
||||
giftable-token-gift $fee_price_arg -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -u -vv -s -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT
|
||||
|
||||
>&2 echo "deploy account index contract"
|
||||
DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -y $DEV_ETH_KEYSTORE_FILE -vv -s -u -w`
|
||||
>&2 echo "add deployer address as account index writer"
|
||||
eth-accounts-index-writer $fee_price_arg -s -u -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww -vv $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
||||
|
||||
>&2 echo "deploy contract registry contract"
|
||||
CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $DEV_ETH_KEYSTORE_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry -p $ETH_PROVIDER -vv -s -u -w`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier ContractRegistry $CIC_REGISTRY_ADDRESS
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
|
||||
|
||||
# Deploy address declarator registry
|
||||
>&2 echo "deploy address declarator contract"
|
||||
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
|
||||
DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -s -u -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv $declarator_description`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier AddressDeclarator $DEV_DECLARATOR_ADDRESS
|
||||
|
||||
# Deploy transfer authorization contact
|
||||
>&2 echo "deploy transfer auth contract"
|
||||
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS
|
||||
|
||||
# Deploy token index contract
|
||||
>&2 echo "deploy token index contract"
|
||||
DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -s -u $fee_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS
|
||||
>&2 echo "add reserve token to token index"
|
||||
eth-token-index-add $fee_price_arg -s -u -w -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
|
||||
|
||||
# Sarafu faucet contract
|
||||
>&2 echo "deploy token faucet contract"
|
||||
DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy $fee_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -w -vv --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS -s`
|
||||
|
||||
>&2 echo "set token faucet amount"
|
||||
sarafu-faucet-set $fee_price_arg -y $DEV_ETH_KEYSTORE_FILE -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_FAUCET_ADDRESS -vv -s --fee-limit 100000 $DEV_FAUCET_AMOUNT
|
||||
|
||||
>&2 echo "register faucet in registry"
|
||||
eth-contract-registry-set -s -u $fee_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv --identifier Faucet $DEV_FAUCET_ADDRESS
|
||||
|
||||
>&2 echo "set faucet as token minter"
|
||||
giftable-token-minter -s -u $fee_price_arg -w -y $DEV_ETH_KEYSTORE_FILE -e $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $ETH_PROVIDER -vv $DEV_FAUCET_ADDRESS
|
||||
|
||||
export CONFINI_DIR=$_CONFINI_DIR
|
||||
else
|
||||
echo "\$ETH_PROVIDER not set!"
|
||||
if [ -z "${RPC_PROVIDER}" ]; then
|
||||
echo "\$RPC_PROVIDER not set!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p $CIC_DATA_DIR
|
||||
>&2 echo using data dir $CIC_DATA_DIR for environment variable dump
|
||||
unset CONFINI_DIR
|
||||
|
||||
# this is consumed in downstream services to set environment variables
|
||||
cat << EOF > $CIC_DATA_DIR/.env
|
||||
export CIC_REGISTRY_ADDRESS=$CIC_REGISTRY_ADDRESS
|
||||
export CIC_TRUST_ADDRESS=$DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
||||
export CIC_DECLARATOR_ADDRESS=$CIC_DECLARATOR_ADDRESS
|
||||
EOF
|
||||
if [ ! -z "$DEV_USE_DOCKER_WAIT_SCRIPT" ]; then
|
||||
IFS=: read -a p <<< "$RPC_PROVIDER"
|
||||
read -i "/" rpc_provider_port <<< "${p[2]}"
|
||||
rpc_provider_host=${p[1]:2}
|
||||
echo "waiting for provider host $rpc_provider_host port $rpc_provider_port..."
|
||||
./wait-for-it.sh "$rpc_provider_host:$rpc_provider_port"
|
||||
fi
|
||||
|
||||
cat ./envlist | bash from_env.sh > $CIC_DATA_DIR/.env_all
|
||||
cat ./envlist
|
||||
# popd
|
||||
if [ "$TOKEN_TYPE" == "giftable_erc20_token" ]; then
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
||||
TOKEN_SYMBOL="GFT"
|
||||
TOKEN_NAME="Giftable Token"
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
>&2 echo deploying default token $TOKEN_TYPE
|
||||
echo giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv
|
||||
DEV_RESERVE_ADDRESS=`giftable-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -s -ww --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL --decimals 6 -vv`
|
||||
elif [ "$TOKEN_TYPE" == "erc20_demurrage_token" ]; then
|
||||
if [ -z "$TOKEN_SYMBOL" ]; then
|
||||
>&2 echo token symbol not set, setting defaults for type $TOKEN_TYPE
|
||||
TOKEN_SYMBOL="DET"
|
||||
TOKEN_NAME="Demurrage Token"
|
||||
elif [ -z "$TOKEN_NAME" ]; then
|
||||
>&2 echo token name not set, setting same as symbol for type $TOKEN_TYPE
|
||||
TOKEN_NAME=$TOKEN_SYMBOL
|
||||
fi
|
||||
>&2 echo deploying token $TOKEN_TYPE
|
||||
if [ -z $TOKEN_SINK_ADDRESS ]; then
|
||||
if [ ! -z $TOKEN_REDISTRIBUTION_PERIOD ]; then
|
||||
>&2 echo -e "\033[;93mtoken sink address not set, so redistribution will be BURNED\033[;39m"
|
||||
fi
|
||||
fi
|
||||
DEV_RESERVE_ADDRESS=`erc20-demurrage-token-deploy $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC --name "$TOKEN_NAME" --symbol $TOKEN_SYMBOL -vv -ww -s`
|
||||
else
|
||||
>&2 echo unknown token type $TOKEN_TYPE
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -vv -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT"
|
||||
giftable-token-gift $fee_price_arg -p $RPC_PROVIDER -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -u -vv -s -w -e $DEV_RESERVE_ADDRESS $DEV_RESERVE_AMOUNT
|
||||
|
||||
>&2 echo "deploy account index contract"
|
||||
DEV_ACCOUNT_INDEX_ADDRESS=`eth-accounts-index-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -y $WALLET_KEY_FILE -vv -s -u -w`
|
||||
>&2 echo "add deployer address as account index writer"
|
||||
eth-accounts-index-writer $fee_price_arg -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww -vv $debug $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER
|
||||
|
||||
>&2 echo "deploy contract registry contract"
|
||||
CIC_REGISTRY_ADDRESS=`eth-contract-registry-deploy $fee_price_arg -i $CIC_CHAIN_SPEC -y $WALLET_KEY_FILE --identifier AccountRegistry --identifier TokenRegistry --identifier AddressDeclarator --identifier Faucet --identifier TransferAuthorization --identifier ContractRegistry -p $RPC_PROVIDER -vv -s -u -w`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier ContractRegistry $CIC_REGISTRY_ADDRESS
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AccountRegistry $DEV_ACCOUNT_INDEX_ADDRESS
|
||||
|
||||
# Deploy address declarator registry
|
||||
>&2 echo "deploy address declarator contract"
|
||||
declarator_description=0x546869732069732074686520434943206e6574776f726b000000000000000000
|
||||
DEV_DECLARATOR_ADDRESS=`eth-address-declarator-deploy -s -u -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv $declarator_description`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier AddressDeclarator $DEV_DECLARATOR_ADDRESS
|
||||
|
||||
# Deploy transfer authorization contact
|
||||
>&2 echo "deploy transfer auth contract"
|
||||
DEV_TRANSFER_AUTHORIZATION_ADDRESS=`erc20-transfer-auth-deploy $gas_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TransferAuthorization $DEV_TRANSFER_AUTHORIZATION_ADDRESS
|
||||
|
||||
# Deploy token index contract
|
||||
>&2 echo "deploy token index contract"
|
||||
DEV_TOKEN_INDEX_ADDRESS=`eth-token-index-deploy -s -u $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv`
|
||||
eth-contract-registry-set $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier TokenRegistry $DEV_TOKEN_INDEX_ADDRESS
|
||||
>&2 echo "add reserve token to token index"
|
||||
eth-token-index-add $fee_price_arg -s -u -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv -e $DEV_TOKEN_INDEX_ADDRESS $DEV_RESERVE_ADDRESS
|
||||
|
||||
# Sarafu faucet contract
|
||||
>&2 echo "deploy token faucet contract"
|
||||
DEV_FAUCET_ADDRESS=`sarafu-faucet-deploy $fee_price_arg -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -w -vv --account-index-address $DEV_ACCOUNT_INDEX_ADDRESS $DEV_RESERVE_ADDRESS -s`
|
||||
|
||||
>&2 echo "set token faucet amount"
|
||||
sarafu-faucet-set $fee_price_arg -w -y $WALLET_KEY_FILE -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -e $DEV_FAUCET_ADDRESS -vv -s --fee-limit 100000 $DEV_FAUCET_AMOUNT
|
||||
|
||||
>&2 echo "register faucet in registry"
|
||||
eth-contract-registry-set -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $CIC_REGISTRY_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv --identifier Faucet $DEV_FAUCET_ADDRESS
|
||||
|
||||
>&2 echo "set faucet as token minter"
|
||||
giftable-token-minter -s -u $fee_price_arg -w -y $WALLET_KEY_FILE -e $DEV_RESERVE_ADDRESS -i $CIC_CHAIN_SPEC -p $RPC_PROVIDER -vv $DEV_FAUCET_ADDRESS
|
||||
|
||||
|
||||
#echo "export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL" >> ${DEV_DATA_DIR}/env_reset
|
||||
export CIC_DEFAULT_TOKEN_SYMBOL=$TOKEN_SYMBOL
|
||||
confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config --prefix export > ${DEV_DATA_DIR}/env_reset
|
||||
confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config
|
||||
|
||||
set +a
|
||||
set +e
|
||||
|
||||
echo -n 2 > $init_level_file
|
||||
|
||||
exec "$@"
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
#! /bin/bash
|
||||
|
||||
>&2 echo -e "\033[;96mRUNNING\033[;39m configurations"
|
||||
. ./config.sh
|
||||
if [ $? -ne "0" ]; then
|
||||
>&2 echo -e "\033[;31mFAILED\033[;39m configurations"
|
||||
exit 1;
|
||||
fi
|
||||
>&2 echo -e "\033[;32mSUCCEEDED\033[;39m configurations"
|
||||
|
||||
if [[ $((RUN_MASK & 1)) -eq 1 ]]
|
||||
then
|
||||
>&2 echo -e "\033[;96mRUNNING\033[;39m RUN_MASK 1 - contract deployment"
|
||||
|
||||
@@ -1,68 +1,40 @@
|
||||
#!/bin/bash
|
||||
|
||||
# defaults
|
||||
#initlevel=`cat ${CIC_DATA_DIR}/.init`
|
||||
#echo $inilevel
|
||||
#if [ $initlevel -lt 2 ]; then
|
||||
# >&2 echo "initlevel too low $initlevel"
|
||||
# exit 1
|
||||
#fi
|
||||
source ${CIC_DATA_DIR}/.env
|
||||
source ${CIC_DATA_DIR}/.env_all
|
||||
DEV_PIP_EXTRA_INDEX_URL=${DEV_PIP_EXTRA_INDEX_URL:-https://pip.grassrootseconomics.net:8433}
|
||||
DEV_DATABASE_NAME_CIC_ETH=${DEV_DATABASE_NAME_CIC_ETH:-"cic-eth"}
|
||||
CIC_DATA_DIR=${CIC_DATA_DIR:-/tmp/cic}
|
||||
ETH_PASSPHRASE=''
|
||||
CIC_DEFAULT_TOKEN_SYMBOL=${CIC_DEFAULT_TOKEN_SYMBOL:-GFT}
|
||||
TOKEN_SYMBOL=$CIC_DEFAULT_TOKEN_SYMBOL
|
||||
|
||||
CHAIN_SPEC=${CHAIN_SPEC:-$CIC_CHAIN_SPEC}
|
||||
RPC_HTTP_PROVIDER=${RPC_HTTP_PROVIDER:-$ETH_PROVIDER}
|
||||
source ${DEV_DATA_DIR}/env_reset
|
||||
cat ${DEV_DATA_DIR}/env_reset
|
||||
|
||||
# Debug flag
|
||||
DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER=0xEb3907eCad74a0013c259D5874AE7f22DcBcC95C
|
||||
keystore_file=./keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c
|
||||
debug='-vv'
|
||||
gas_amount=100000000000000000000000
|
||||
token_amount=${gas_amount}
|
||||
env_out_file=${CIC_DATA_DIR}/.env_seed
|
||||
init_level_file=${CIC_DATA_DIR}/.init
|
||||
empty_config_dir=$CONFINI_DIR/empty
|
||||
truncate $env_out_file -s 0
|
||||
|
||||
set -e
|
||||
set -a
|
||||
|
||||
#pip install --extra-index-url $DEV_PIP_EXTRA_INDEX_URL eth-address-index==0.1.1a7
|
||||
|
||||
export CONFINI_DIR=$_CONFINI_DIR
|
||||
unset CONFINI_DIR
|
||||
|
||||
# get required addresses from registries
|
||||
DEV_TOKEN_INDEX_ADDRESS=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $ETH_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw TokenRegistry`
|
||||
DEV_ACCOUNT_INDEX_ADDRESS=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $ETH_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw AccountRegistry`
|
||||
DEV_RESERVE_ADDRESS=`eth-token-index-list -i $CHAIN_SPEC -u -p $ETH_PROVIDER -e $DEV_TOKEN_INDEX_ADDRESS -vv --raw $CIC_DEFAULT_TOKEN_SYMBOL`
|
||||
cat <<EOF
|
||||
Token registry: $DEV_TOKEN_INDEX_ADDRESS
|
||||
Account reigstry: $DEV_ACCOUNT_INDEX_ADDRESS
|
||||
Reserve address: $DEV_RESERVE_ADDRESS ($CIC_DEFAULT_TOKEN_SYMBOL)
|
||||
EOF
|
||||
token_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw TokenRegistry`
|
||||
account_index_address=`eth-contract-registry-list -u -i $CHAIN_SPEC -p $RPC_PROVIDER -e $CIC_REGISTRY_ADDRESS -vv --raw AccountRegistry`
|
||||
reserve_address=`eth-token-index-list -i $CHAIN_SPEC -u -p $RPC_PROVIDER -e $token_index_address -vv --raw $CIC_DEFAULT_TOKEN_SYMBOL`
|
||||
|
||||
>&2 echo "Token registry: $token_index_address"
|
||||
>&2 echo "Account registry: $account_index_address"
|
||||
>&2 echo "Reserve address: $reserve_address ($TOKEN_SYMBOL)"
|
||||
|
||||
>&2 echo "create account for gas gifter"
|
||||
old_gas_provider=$DEV_ETH_ACCOUNT_GAS_PROVIDER
|
||||
DEV_ETH_ACCOUNT_GAS_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
echo DEV_ETH_ACCOUNT_GAS_GIFTER=$DEV_ETH_ACCOUNT_GAS_GIFTER >> $env_out_file
|
||||
#DEV_ETH_ACCOUNT_GAS_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
DEV_ETH_ACCOUNT_GAS_GIFTER=`cic-eth-create --redis-timeout 120 $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
cic-eth-tag -i $CHAIN_SPEC GAS_GIFTER $DEV_ETH_ACCOUNT_GAS_GIFTER
|
||||
|
||||
|
||||
>&2 echo "create account for sarafu gifter"
|
||||
DEV_ETH_ACCOUNT_SARAFU_GIFTER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
echo DEV_ETH_ACCOUNT_SARAFU_GIFTER=$DEV_ETH_ACCOUNT_SARAFU_GIFTER >> $env_out_file
|
||||
cic-eth-tag -i $CHAIN_SPEC SARAFU_GIFTER $DEV_ETH_ACCOUNT_SARAFU_GIFTER
|
||||
|
||||
>&2 echo "create account for approval escrow owner"
|
||||
DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
echo DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER=$DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER >> $env_out_file
|
||||
cic-eth-tag -i $CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFER_AUTHORIZATION_OWNER
|
||||
|
||||
#>&2 echo "create account for faucet owner"
|
||||
@@ -72,50 +44,45 @@ cic-eth-tag -i $CHAIN_SPEC TRANSFER_AUTHORIZATION_OWNER $DEV_ETH_ACCOUNT_TRANSFE
|
||||
|
||||
>&2 echo "create account for accounts index writer"
|
||||
DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=`CONFINI_DIR=$empty_config_dir cic-eth-create $debug --redis-host $REDIS_HOST --redis-host-callback=$REDIS_HOST --redis-port-callback=$REDIS_PORT --no-register`
|
||||
echo DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER=$DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER >> $env_out_file
|
||||
cic-eth-tag -i $CHAIN_SPEC ACCOUNT_REGISTRY_WRITER $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
|
||||
>&2 echo "add acccounts index writer account as writer on contract"
|
||||
eth-accounts-index-writer -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_ACCOUNT_INDEX_ADDRESS -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
|
||||
eth-accounts-index-writer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $account_index_address -ww $debug $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER
|
||||
|
||||
# Transfer gas to custodial gas provider adddress
|
||||
_CONFINI_DIR=$CONFINI_DIR
|
||||
unset CONFINI_DIR
|
||||
>&2 echo gift gas to gas gifter
|
||||
>&2 eth-gas -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_GAS_GIFTER $gas_amount
|
||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_GAS_GIFTER $DEV_GAS_AMOUNT
|
||||
|
||||
>&2 echo gift gas to sarafu token owner
|
||||
>&2 eth-gas -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER $gas_amount
|
||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER $DEV_GAS_AMOUNT
|
||||
|
||||
>&2 echo gift gas to account index owner
|
||||
>&2 eth-gas -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $gas_amount
|
||||
>&2 eth-gas -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -w $debug -a $DEV_ETH_ACCOUNT_ACCOUNT_REGISTRY_WRITER $DEV_GAS_AMOUNT
|
||||
|
||||
|
||||
# Send token to token creator
|
||||
>&2 echo "gift tokens to sarafu owner"
|
||||
echo "giftable-token-gift -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_RESERVE_ADDRESS -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $token_amount"
|
||||
>&2 giftable-token-gift -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_RESERVE_ADDRESS -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $token_amount
|
||||
echo "giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT"
|
||||
>&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER -w $debug $DEV_TOKEN_AMOUNT
|
||||
|
||||
# Send token to token gifter
|
||||
>&2 echo "gift tokens to keystore address"
|
||||
>&2 giftable-token-gift -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER -e $DEV_RESERVE_ADDRESS -a $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER -w $debug $token_amount
|
||||
>&2 giftable-token-gift -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER -e $reserve_address -a $DEV_ETH_ACCOUNT_CONTRACT_DEPLOYER -w $debug $DEV_TOKEN_AMOUNT
|
||||
|
||||
>&2 echo "set sarafu token to reserve token (temporarily while bancor contracts are not connected)"
|
||||
echo DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS >> $env_out_file
|
||||
export DEV_ETH_SARAFU_TOKEN_ADDRESS=$DEV_ETH_RESERVE_ADDRESS
|
||||
|
||||
# Transfer tokens to gifter address
|
||||
>&2 echo "transfer tokens to token gifter address"
|
||||
>&2 erc20-transfer -s -u -y $keystore_file -i $CHAIN_SPEC -p $ETH_PROVIDER --fee-limit 100000 -e $DEV_RESERVE_ADDRESS -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${token_amount:0:-1}
|
||||
>&2 erc20-transfer -s -u -y $WALLET_KEY_FILE -i $CHAIN_SPEC -p $RPC_PROVIDER --fee-limit 100000 -e $reserve_address -w $debug -a $DEV_ETH_ACCOUNT_SARAFU_GIFTER ${DEV_TOKEN_AMOUNT:0:-1}
|
||||
|
||||
#echo -n 0 > $init_level_file
|
||||
|
||||
#CONFINI_DIR=$_CONFINI_DIR
|
||||
# Remove the SEND (8), QUEUE (16) and INIT (2) locks (or'ed), set by default at migration
|
||||
cic-eth-ctl -i $CHAIN_SPEC unlock INIT
|
||||
cic-eth-ctl -i $CHAIN_SPEC unlock SEND
|
||||
cic-eth-ctl -i $CHAIN_SPEC unlock QUEUE
|
||||
|
||||
export CONFINI_DIR=$_CONFINI_DIR
|
||||
confini-dump --schema-module chainlib.eth.data.config --schema-module cic_eth.data.config --schema-dir ./config
|
||||
|
||||
set +a
|
||||
set +e
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
.cache
|
||||
.dot
|
||||
**/doc
|
||||
**/node_modules
|
||||
node_modules/
|
||||
**/venv
|
||||
**/.venv
|
||||
|
||||
|
||||
@@ -1,64 +1,61 @@
|
||||
# standard imports
|
||||
import argparse
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
import sys
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import confini
|
||||
import redis
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.connection import EthHTTPConnection
|
||||
from confini import Config
|
||||
from crypto_dev_signer.eth.signer import ReferenceSigner as EIP155Signer
|
||||
from crypto_dev_signer.keystore.dict import DictKeystore
|
||||
|
||||
# local imports
|
||||
from import_task import ImportTask, MetadataTask
|
||||
from import_util import BalanceProcessor, get_celery_worker_status
|
||||
from import_task import ImportTask, MetadataTask
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
default_config_dir = './config'
|
||||
logg = logging.getLogger()
|
||||
|
||||
config_dir = './config'
|
||||
arg_parser = argparse.ArgumentParser(description='Daemon worker that handles data seeding tasks.')
|
||||
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
|
||||
arg_parser.add_argument('--env-prefix',
|
||||
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
||||
dest='env_prefix',
|
||||
type=str,
|
||||
help='environment prefix for variables to overwrite configuration.')
|
||||
arg_parser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
|
||||
arg_parser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
arg_parser.add_argument('--include-balances', dest='include_balances', help='include opening balance transactions',
|
||||
action='store_true')
|
||||
arg_parser.add_argument('--meta-host', dest='meta_host', type=str, help='metadata server host')
|
||||
arg_parser.add_argument('--meta-port', dest='meta_port', type=int, help='metadata server host')
|
||||
arg_parser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit data seeding tasks to.')
|
||||
arg_parser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||
arg_parser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||
arg_parser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||
arg_parser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||
arg_parser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol',
|
||||
help='Token symbol to use for transactions')
|
||||
arg_parser.add_argument('-v', help='be verbose', action='store_true')
|
||||
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
arg_parser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||
arg_parser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from')
|
||||
arg_parser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1',
|
||||
help='chain spec')
|
||||
arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
argparser = argparse.ArgumentParser(description='daemon that monitors transactions in new blocks')
|
||||
argparser.add_argument('-p', '--provider', dest='p', type=str, help='chain rpc provider address')
|
||||
argparser.add_argument('-y', '--key-file', dest='y', type=str, help='Ethereum keystore file to use for signing')
|
||||
argparser.add_argument('-c', type=str, default=config_dir, help='config root to use')
|
||||
argparser.add_argument('--old-chain-spec', type=str, dest='old_chain_spec', default='evm:oldchain:1', help='chain spec')
|
||||
argparser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
argparser.add_argument('-r', '--registry-address', type=str, dest='r', help='CIC Registry address')
|
||||
argparser.add_argument('--meta-host', dest='meta_host', type=str, help='metadata server host')
|
||||
argparser.add_argument('--meta-port', dest='meta_port', type=int, help='metadata server host')
|
||||
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||
argparser.add_argument('--token-symbol', default='GFT', type=str, dest='token_symbol',
|
||||
help='Token symbol to use for transactions')
|
||||
argparser.add_argument('--head', action='store_true', help='start at current block height (overrides --offset)')
|
||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str,
|
||||
help='environment prefix for variables to overwrite configuration')
|
||||
argparser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit transaction tasks to')
|
||||
argparser.add_argument('--offset', type=int, default=0, help='block offset to start syncer from')
|
||||
argparser.add_argument('-v', help='be verbose', action='store_true')
|
||||
argparser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
argparser.add_argument('user_dir', default='out', type=str, help='user export directory')
|
||||
args = argparser.parse_args(sys.argv[1:])
|
||||
|
||||
if args.v:
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
elif args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = os.path.join(args.c)
|
||||
os.makedirs(config_dir, 0o777, True)
|
||||
config = confini.Config(config_dir, args.env_prefix)
|
||||
config = Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
|
||||
# override args
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
'ETH_PROVIDER': getattr(args, 'p'),
|
||||
@@ -73,88 +70,76 @@ args_override = {
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug('config loaded from {}:\n{}'.format(config_dir, config))
|
||||
logg.debug(f'config loaded from {args.c}:\n{config}')
|
||||
|
||||
redis_host = config.get('REDIS_HOST')
|
||||
redis_port = config.get('REDIS_PORT')
|
||||
redis_db = config.get('REDIS_DB')
|
||||
r = redis.Redis(redis_host, redis_port, redis_db)
|
||||
db_config = {
|
||||
'database': config.get('DATABASE_NAME'),
|
||||
'host': config.get('DATABASE_HOST'),
|
||||
'port': config.get('DATABASE_PORT'),
|
||||
'user': config.get('DATABASE_USER'),
|
||||
'password': config.get('DATABASE_PASSWORD')
|
||||
}
|
||||
ImportTask.db_config = db_config
|
||||
|
||||
# create celery apps
|
||||
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
||||
status = get_celery_worker_status(celery_app=celery_app)
|
||||
|
||||
signer_address = None
|
||||
keystore = DictKeystore()
|
||||
if args.y is not None:
|
||||
logg.debug('loading keystore file {}'.format(args.y))
|
||||
signer_address = keystore.import_keystore_file(args.y)
|
||||
logg.debug('now have key for signer address {}'.format(signer_address))
|
||||
|
||||
# define signer
|
||||
os.path.isfile(args.y)
|
||||
logg.debug(f'loading keystore file {args.y}')
|
||||
signer_address = keystore.import_keystore_file(args.y)
|
||||
logg.debug(f'now have key for signer address {signer_address}')
|
||||
signer = EIP155Signer(keystore)
|
||||
|
||||
queue = args.q
|
||||
chain_str = config.get('CIC_CHAIN_SPEC')
|
||||
block_offset = 0
|
||||
if args.head:
|
||||
block_offset = -1
|
||||
else:
|
||||
block_offset = args.offset
|
||||
block_offset = -1 if args.head else args.offset
|
||||
|
||||
chain_str = config.get('CIC_CHAIN_SPEC')
|
||||
chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||
ImportTask.chain_spec = chain_spec
|
||||
old_chain_spec_str = args.old_chain_spec
|
||||
old_chain_spec = ChainSpec.from_chain_str(old_chain_spec_str)
|
||||
|
||||
user_dir = args.user_dir # user_out_dir from import_users.py
|
||||
|
||||
token_symbol = args.token_symbol
|
||||
|
||||
MetadataTask.meta_host = config.get('META_HOST')
|
||||
MetadataTask.meta_port = config.get('META_PORT')
|
||||
ImportTask.chain_spec = chain_spec
|
||||
|
||||
txs_dir = os.path.join(args.import_dir, 'txs')
|
||||
os.makedirs(txs_dir, exist_ok=True)
|
||||
sys.stdout.write(f'created txs dir: {txs_dir}')
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
get_celery_worker_status(celery_app)
|
||||
|
||||
|
||||
def main():
|
||||
conn = EthHTTPConnection(config.get('ETH_PROVIDER'))
|
||||
|
||||
ImportTask.balance_processor = BalanceProcessor(conn, chain_spec, config.get('CIC_REGISTRY_ADDRESS'),
|
||||
signer_address, signer)
|
||||
ImportTask.balance_processor.init(token_symbol)
|
||||
|
||||
# TODO get decimals from token
|
||||
ImportTask.balance_processor = BalanceProcessor(conn,
|
||||
chain_spec,
|
||||
config.get('CIC_REGISTRY_ADDRESS'),
|
||||
signer_address,
|
||||
signer)
|
||||
ImportTask.balance_processor.init(args.token_symbol)
|
||||
balances = {}
|
||||
f = open('{}/balances.csv'.format(user_dir, 'r'))
|
||||
remove_zeros = 10 ** 6
|
||||
i = 0
|
||||
while True:
|
||||
l = f.readline()
|
||||
if l is None:
|
||||
break
|
||||
r = l.split(',')
|
||||
try:
|
||||
address = to_checksum_address(r[0])
|
||||
sys.stdout.write('loading balance {} {} {}'.format(i, address, r[1]).ljust(200) + "\r")
|
||||
except ValueError:
|
||||
break
|
||||
balance = int(int(r[1].rstrip()) / remove_zeros)
|
||||
balances[address] = balance
|
||||
i += 1
|
||||
|
||||
f.close()
|
||||
|
||||
accuracy = 10 ** 6
|
||||
count = 0
|
||||
with open(f'{args.import_dir}/balances.csv', 'r') as balances_file:
|
||||
while True:
|
||||
line = balances_file.readline()
|
||||
if line is None:
|
||||
break
|
||||
balance_data = line.split(',')
|
||||
try:
|
||||
blockchain_address = to_checksum_address(balance_data[0])
|
||||
logg.info(
|
||||
'loading balance: {} {} {}'.format(count, blockchain_address, balance_data[1].ljust(200) + "\r"))
|
||||
except ValueError:
|
||||
break
|
||||
balance = int(int(balance_data[1].rstrip()) / accuracy)
|
||||
balances[blockchain_address] = balance
|
||||
count += 1
|
||||
ImportTask.balances = balances
|
||||
ImportTask.count = i
|
||||
ImportTask.import_dir = user_dir
|
||||
|
||||
s = celery.signature(
|
||||
'import_task.send_txs',
|
||||
[
|
||||
MetadataTask.balance_processor.nonce_offset,
|
||||
],
|
||||
queue=queue,
|
||||
)
|
||||
s.apply_async()
|
||||
ImportTask.count = count
|
||||
ImportTask.include_balances = args.include_balances is True
|
||||
ImportTask.import_dir = args.import_dir
|
||||
s_send_txs = celery.signature(
|
||||
'import_task.send_txs', [ImportTask.balance_processor.nonce_offset], queue=args.q)
|
||||
s_send_txs.apply_async()
|
||||
|
||||
argv = ['worker']
|
||||
if args.vv:
|
||||
@@ -165,6 +150,7 @@ def main():
|
||||
argv.append(args.q)
|
||||
argv.append('-n')
|
||||
argv.append(args.q)
|
||||
argv.append(f'--pidfile={args.q}.pid')
|
||||
celery_app.worker_main(argv)
|
||||
|
||||
|
||||
|
||||
@@ -1,71 +1,63 @@
|
||||
# standard import
|
||||
# standard imports
|
||||
import argparse
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
import psycopg2
|
||||
|
||||
# third-party imports
|
||||
import celery
|
||||
import confini
|
||||
# external imports
|
||||
from confini import Config
|
||||
|
||||
# local imports
|
||||
from import_util import get_celery_worker_status
|
||||
|
||||
|
||||
default_config_dir = './config'
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_config_dir = './config'
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use')
|
||||
arg_parser = argparse.ArgumentParser(description='Pins import script.')
|
||||
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
|
||||
arg_parser.add_argument('--env-prefix',
|
||||
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
||||
dest='env_prefix',
|
||||
type=str,
|
||||
help='environment prefix for variables to overwrite configuration')
|
||||
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit transaction tasks to')
|
||||
help='environment prefix for variables to overwrite configuration.')
|
||||
arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
|
||||
arg_parser.add_argument('-v', help='be verbose', action='store_true')
|
||||
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
arg_parser.add_argument('pins_dir', default='out', type=str, help='user export directory')
|
||||
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
# set log levels
|
||||
if args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
elif args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
# process configs
|
||||
config_dir = args.c
|
||||
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config = Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
status = get_celery_worker_status(celery_app=celery_app)
|
||||
|
||||
|
||||
db_configs = {
|
||||
'database': config.get('DATABASE_NAME'),
|
||||
'host': config.get('DATABASE_HOST'),
|
||||
'port': config.get('DATABASE_PORT'),
|
||||
'user': config.get('DATABASE_USER'),
|
||||
'password': config.get('DATABASE_PASSWORD')
|
||||
}
|
||||
logg.debug(f'config loaded from {args.c}:\n{config}')
|
||||
|
||||
|
||||
def main():
|
||||
with open(f'{args.pins_dir}/pins.csv') as pins_file:
|
||||
with open(f'{args.import_dir}/pins.csv') as pins_file:
|
||||
phone_to_pins = [tuple(row) for row in csv.reader(pins_file)]
|
||||
|
||||
s_import_pins = celery.signature(
|
||||
'import_task.set_pins',
|
||||
(db_configs, phone_to_pins),
|
||||
queue=args.q
|
||||
db_conn = psycopg2.connect(
|
||||
database=config.get('DATABASE_NAME'),
|
||||
host=config.get('DATABASE_HOST'),
|
||||
port=config.get('DATABASE_PORT'),
|
||||
user=config.get('DATABASE_USER'),
|
||||
password=config.get('DATABASE_PASSWORD')
|
||||
)
|
||||
result = s_import_pins.apply_async()
|
||||
logg.debug(f'TASK: {result.id}, STATUS: {result.status}')
|
||||
db_cursor = db_conn.cursor()
|
||||
sql = 'UPDATE account SET password_hash = %s WHERE phone_number = %s'
|
||||
for element in phone_to_pins:
|
||||
db_cursor.execute(sql, (element[1], element[0]))
|
||||
logg.debug(f'Updating account: {element[0]} with: {element[1]}')
|
||||
db_conn.commit()
|
||||
db_cursor.close()
|
||||
db_conn.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,38 +1,37 @@
|
||||
# standard imports
|
||||
import csv
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import random
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
import uuid
|
||||
from urllib import error, parse, request
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import psycopg2
|
||||
from celery import Task
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.tx import (
|
||||
unpack,
|
||||
raw,
|
||||
)
|
||||
from cic_types.models.person import Person
|
||||
from cic_types.processor import generate_metadata_pointer
|
||||
from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
)
|
||||
from chainlib.eth.tx import raw, unpack
|
||||
from cic_types.models.person import Person, generate_metadata_pointer
|
||||
from hexathon import add_0x, strip_0x
|
||||
|
||||
# local imports
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
class ImportTask(celery.Task):
|
||||
class ImportTask(Task):
|
||||
balances = None
|
||||
import_dir = 'out'
|
||||
count = 0
|
||||
chain_spec = None
|
||||
balance_processor = None
|
||||
chain_spec: ChainSpec = None
|
||||
count = 0
|
||||
db_config: dict = None
|
||||
import_dir = ''
|
||||
include_balances = False
|
||||
max_retries = None
|
||||
|
||||
|
||||
@@ -41,121 +40,70 @@ class MetadataTask(ImportTask):
|
||||
meta_port = None
|
||||
meta_path = ''
|
||||
meta_ssl = False
|
||||
autoretry_for = (
|
||||
urllib.error.HTTPError,
|
||||
OSError,
|
||||
)
|
||||
autoretry_for = (error.HTTPError, OSError,)
|
||||
retry_jitter = True
|
||||
retry_backoff = True
|
||||
retry_backoff_max = 60
|
||||
|
||||
@classmethod
|
||||
def meta_url(self):
|
||||
def meta_url(cls):
|
||||
scheme = 'http'
|
||||
if self.meta_ssl:
|
||||
if cls.meta_ssl:
|
||||
scheme += 's'
|
||||
url = urllib.parse.urlparse('{}://{}:{}/{}'.format(scheme, self.meta_host, self.meta_port, self.meta_path))
|
||||
return urllib.parse.urlunparse(url)
|
||||
url = parse.urlparse(f'{scheme}://{cls.meta_host}:{cls.meta_port}/{cls.meta_path}')
|
||||
return parse.urlunparse(url)
|
||||
|
||||
|
||||
def old_address_from_phone(base_path, phone):
|
||||
pidx = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||
phone_idx_path = os.path.join('{}/phone/{}/{}/{}'.format(
|
||||
base_path,
|
||||
pidx[:2],
|
||||
pidx[2:4],
|
||||
pidx,
|
||||
)
|
||||
)
|
||||
f = open(phone_idx_path, 'r')
|
||||
old_address = f.read()
|
||||
f.close()
|
||||
|
||||
def old_address_from_phone(base_path: str, phone_number: str):
|
||||
pid_x = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
|
||||
phone_idx_path = os.path.join(f'{base_path}/phone/{pid_x[:2]}/{pid_x[2:4]}/{pid_x}')
|
||||
with open(phone_idx_path, 'r') as f:
|
||||
old_address = f.read()
|
||||
return old_address
|
||||
|
||||
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def resolve_phone(self, phone):
|
||||
identifier = generate_metadata_pointer(phone.encode('utf-8'), ':cic.phone')
|
||||
url = urllib.parse.urljoin(self.meta_url(), identifier)
|
||||
logg.debug('attempt getting phone pointer at {} for phone {}'.format(url, phone))
|
||||
r = urllib.request.urlopen(url)
|
||||
address = json.load(r)
|
||||
address = address.replace('"', '')
|
||||
logg.debug('address {} for phone {}'.format(address, phone))
|
||||
|
||||
return address
|
||||
|
||||
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def generate_metadata(self, address, phone):
|
||||
old_address = old_address_from_phone(self.import_dir, phone)
|
||||
|
||||
logg.debug('address {}'.format(address))
|
||||
old_address_upper = strip_0x(old_address).upper()
|
||||
metadata_path = '{}/old/{}/{}/{}.json'.format(
|
||||
self.import_dir,
|
||||
old_address_upper[:2],
|
||||
old_address_upper[2:4],
|
||||
old_address_upper,
|
||||
)
|
||||
|
||||
f = open(metadata_path, 'r')
|
||||
o = json.load(f)
|
||||
f.close()
|
||||
|
||||
u = Person.deserialize(o)
|
||||
|
||||
if u.identities.get('evm') == None:
|
||||
u.identities['evm'] = {}
|
||||
sub_chain_str = '{}:{}'.format(self.chain_spec.common_name(), self.chain_spec.network_id())
|
||||
u.identities['evm'][sub_chain_str] = [add_0x(address)]
|
||||
|
||||
new_address_clean = strip_0x(address)
|
||||
filepath = os.path.join(
|
||||
def generate_person_metadata(self, blockchain_address: str, phone_number: str):
|
||||
logg.debug(f'blockchain address: {blockchain_address}')
|
||||
old_blockchain_address = old_address_from_phone(self.import_dir, phone_number)
|
||||
old_address_upper = strip_0x(old_blockchain_address).upper()
|
||||
metadata_path = f'{self.import_dir}/old/{old_address_upper[:2]}/{old_address_upper[2:4]}/{old_address_upper}.json'
|
||||
with open(metadata_path, 'r') as metadata_file:
|
||||
person_metadata = json.load(metadata_file)
|
||||
person = Person.deserialize(person_metadata)
|
||||
if not person.identities.get('evm'):
|
||||
person.identities['evm'] = {}
|
||||
sub_chain_str = f'{self.chain_spec.common_name()}:{self.chain_spec.network_id()}'
|
||||
person.identities['evm'][sub_chain_str] = [add_0x(blockchain_address)]
|
||||
blockchain_address = strip_0x(blockchain_address)
|
||||
file_path = os.path.join(
|
||||
self.import_dir,
|
||||
'new',
|
||||
new_address_clean[:2].upper(),
|
||||
new_address_clean[2:4].upper(),
|
||||
new_address_clean.upper() + '.json',
|
||||
blockchain_address[:2].upper(),
|
||||
blockchain_address[2:4].upper(),
|
||||
blockchain_address.upper() + '.json'
|
||||
)
|
||||
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
||||
|
||||
o = u.serialize()
|
||||
f = open(filepath, 'w')
|
||||
f.write(json.dumps(o))
|
||||
f.close()
|
||||
|
||||
meta_key = generate_metadata_pointer(bytes.fromhex(new_address_clean), ':cic.person')
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
serialized_person_metadata = person.serialize()
|
||||
with open(file_path, 'w') as metadata_file:
|
||||
metadata_file.write(json.dumps(serialized_person_metadata))
|
||||
logg.debug(f'written person metadata for address: {blockchain_address}')
|
||||
meta_filepath = os.path.join(
|
||||
self.import_dir,
|
||||
'meta',
|
||||
'{}.json'.format(new_address_clean.upper()),
|
||||
'{}.json'.format(blockchain_address.upper()),
|
||||
)
|
||||
os.symlink(os.path.realpath(filepath), meta_filepath)
|
||||
os.symlink(os.path.realpath(file_path), meta_filepath)
|
||||
return blockchain_address
|
||||
|
||||
# write ussd data
|
||||
ussd_data = {
|
||||
'phone': phone,
|
||||
'is_activated': 1,
|
||||
'preferred_language': random.sample(['en', 'sw'], 1)[0],
|
||||
'is_disabled': False
|
||||
}
|
||||
ussd_data_dir = os.path.join(self.import_dir, 'ussd')
|
||||
ussd_data_file_path = os.path.join(ussd_data_dir, f'{old_address}.json')
|
||||
f = open(ussd_data_file_path, 'w')
|
||||
f.write(json.dumps(ussd_data))
|
||||
f.close()
|
||||
|
||||
# write preferences data
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def generate_preferences_data(self, data: tuple):
|
||||
blockchain_address: str = data[0]
|
||||
preferences = data[1]
|
||||
preferences_dir = os.path.join(self.import_dir, 'preferences')
|
||||
preferences_data = {
|
||||
'preferred_language': ussd_data['preferred_language']
|
||||
}
|
||||
|
||||
preferences_key = generate_metadata_pointer(bytes.fromhex(new_address_clean[2:]), ':cic.preferences')
|
||||
preferences_key = generate_metadata_pointer(bytes.fromhex(strip_0x(blockchain_address)), ':cic.preferences')
|
||||
preferences_filepath = os.path.join(preferences_dir, 'meta', preferences_key)
|
||||
|
||||
filepath = os.path.join(
|
||||
preferences_dir,
|
||||
'new',
|
||||
@@ -164,95 +112,95 @@ def generate_metadata(self, address, phone):
|
||||
preferences_key.upper() + '.json'
|
||||
)
|
||||
os.makedirs(os.path.dirname(filepath), exist_ok=True)
|
||||
|
||||
f = open(filepath, 'w')
|
||||
f.write(json.dumps(preferences_data))
|
||||
f.close()
|
||||
with open(filepath, 'w') as preferences_file:
|
||||
preferences_file.write(json.dumps(preferences))
|
||||
logg.debug(f'written preferences metadata: {preferences} for address: {blockchain_address}')
|
||||
os.symlink(os.path.realpath(filepath), preferences_filepath)
|
||||
|
||||
logg.debug('found metadata {} for phone {}'.format(o, phone))
|
||||
|
||||
return address
|
||||
return blockchain_address
|
||||
|
||||
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def opening_balance_tx(self, address, phone, serial):
|
||||
old_address = old_address_from_phone(self.import_dir, phone)
|
||||
def generate_pins_data(self, blockchain_address: str, phone_number: str):
|
||||
pins_file = f'{self.import_dir}/pins.csv'
|
||||
file_op = 'a' if os.path.exists(pins_file) else 'w'
|
||||
with open(pins_file, file_op) as pins_file:
|
||||
password_hash = uuid.uuid4().hex
|
||||
pins_file.write(f'{phone_number},{password_hash}\n')
|
||||
logg.debug(f'written pin data for address: {blockchain_address}')
|
||||
return blockchain_address
|
||||
|
||||
k = to_checksum_address(strip_0x(old_address))
|
||||
balance = self.balances[k]
|
||||
logg.debug('found balance {} for address {} phone {}'.format(balance, old_address, phone))
|
||||
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def generate_ussd_data(self, blockchain_address: str, phone_number: str):
|
||||
ussd_data_file = f'{self.import_dir}/ussd_data.csv'
|
||||
file_op = 'a' if os.path.exists(ussd_data_file) else 'w'
|
||||
preferred_language = random.sample(["en", "sw"], 1)[0]
|
||||
preferences = {'preferred_language': preferred_language}
|
||||
with open(ussd_data_file, file_op) as ussd_data_file:
|
||||
ussd_data_file.write(f'{phone_number}, { 1}, {preferred_language}, {False}\n')
|
||||
logg.debug(f'written ussd data for address: {blockchain_address}')
|
||||
return blockchain_address, preferences
|
||||
|
||||
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def opening_balance_tx(self, blockchain_address: str, phone_number: str, serial: str):
|
||||
old_blockchain_address = old_address_from_phone(self.import_dir, phone_number)
|
||||
address = to_checksum_address(strip_0x(old_blockchain_address))
|
||||
balance = self.balances[address]
|
||||
logg.debug(f'found balance: {balance} for address: {address} phone: {phone_number}')
|
||||
decimal_balance = self.balance_processor.get_decimal_amount(int(balance))
|
||||
|
||||
(tx_hash_hex, o) = self.balance_processor.get_rpc_tx(address, decimal_balance, serial)
|
||||
|
||||
tx_hash_hex, o = self.balance_processor.get_rpc_tx(blockchain_address, decimal_balance, serial)
|
||||
tx = unpack(bytes.fromhex(strip_0x(o)), self.chain_spec)
|
||||
logg.debug('generated tx token value {} to {} tx hash {}'.format(decimal_balance, address, tx_hash_hex))
|
||||
|
||||
tx_path = os.path.join(
|
||||
self.import_dir,
|
||||
'txs',
|
||||
strip_0x(tx_hash_hex),
|
||||
)
|
||||
|
||||
f = open(tx_path, 'w')
|
||||
f.write(strip_0x(o))
|
||||
f.close()
|
||||
|
||||
tx_nonce_path = os.path.join(
|
||||
self.import_dir,
|
||||
'txs',
|
||||
'.' + str(tx['nonce']),
|
||||
)
|
||||
logg.debug(f'generated tx token value: {decimal_balance}: {blockchain_address} tx hash {tx_hash_hex}')
|
||||
tx_path = os.path.join(self.import_dir, 'txs', strip_0x(tx_hash_hex))
|
||||
with open(tx_path, 'w') as tx_file:
|
||||
tx_file.write(strip_0x(o))
|
||||
logg.debug(f'written tx with tx hash: {tx["hash"]} for address: {blockchain_address}')
|
||||
tx_nonce_path = os.path.join(self.import_dir, 'txs', '.' + str(tx['nonce']))
|
||||
os.symlink(os.path.realpath(tx_path), tx_nonce_path)
|
||||
|
||||
return tx['hash']
|
||||
|
||||
|
||||
@celery_app.task(bind=True, base=ImportTask, autoretry_for=(FileNotFoundError,), max_retries=None,
|
||||
@celery_app.task(bind=True, base=MetadataTask)
|
||||
def resolve_phone(self, phone_number: str):
|
||||
identifier = generate_metadata_pointer(phone_number.encode('utf-8'), ':cic.phone')
|
||||
url = parse.urljoin(self.meta_url(), identifier)
|
||||
logg.debug(f'attempt getting phone pointer at: {url} for phone: {phone_number}')
|
||||
r = request.urlopen(url)
|
||||
address = json.load(r)
|
||||
address = address.replace('"', '')
|
||||
logg.debug(f'address: {address} for phone: {phone_number}')
|
||||
return address
|
||||
|
||||
|
||||
@celery_app.task(autoretry_for=(FileNotFoundError,),
|
||||
bind=True,
|
||||
base=ImportTask,
|
||||
max_retries=None,
|
||||
default_retry_delay=0.1)
|
||||
def send_txs(self, nonce):
|
||||
if nonce == self.count + self.balance_processor.nonce_offset:
|
||||
logg.info('reached nonce {} (offset {} + count {}) exiting'.format(nonce, self.balance_processor.nonce_offset,
|
||||
self.count))
|
||||
return
|
||||
|
||||
logg.debug('attempt to open symlink for nonce {}'.format(nonce))
|
||||
tx_nonce_path = os.path.join(
|
||||
self.import_dir,
|
||||
'txs',
|
||||
'.' + str(nonce),
|
||||
)
|
||||
f = open(tx_nonce_path, 'r')
|
||||
tx_signed_raw_hex = f.read()
|
||||
f.close()
|
||||
|
||||
os.unlink(tx_nonce_path)
|
||||
|
||||
o = raw(add_0x(tx_signed_raw_hex))
|
||||
tx_hash_hex = self.balance_processor.conn.do(o)
|
||||
|
||||
logg.info('sent nonce {} tx hash {}'.format(nonce, tx_hash_hex)) # tx_signed_raw_hex))
|
||||
|
||||
nonce += 1
|
||||
|
||||
queue = self.request.delivery_info.get('routing_key')
|
||||
s = celery.signature(
|
||||
'import_task.send_txs',
|
||||
[
|
||||
nonce,
|
||||
],
|
||||
queue=queue,
|
||||
)
|
||||
s.apply_async()
|
||||
if nonce == self.count + self.balance_processor.nonce_offset:
|
||||
logg.info(f'reached nonce {nonce} (offset {self.balance_processor.nonce_offset} + count {self.count}).')
|
||||
celery_app.control.broadcast('shutdown', destination=[f'celery@{queue}'])
|
||||
|
||||
logg.debug(f'attempt to open symlink for nonce {nonce}')
|
||||
tx_nonce_path = os.path.join(self.import_dir, 'txs', '.' + str(nonce))
|
||||
with open(tx_nonce_path, 'r') as tx_nonce_file:
|
||||
tx_signed_raw_hex = tx_nonce_file.read()
|
||||
os.unlink(tx_nonce_path)
|
||||
o = raw(add_0x(tx_signed_raw_hex))
|
||||
if self.include_balances:
|
||||
tx_hash_hex = self.balance_processor.conn.do(o)
|
||||
logg.info(f'sent nonce {nonce} tx hash {tx_hash_hex}')
|
||||
nonce += 1
|
||||
s = celery.signature('import_task.send_txs', [nonce], queue=queue)
|
||||
s.apply_async()
|
||||
return nonce
|
||||
|
||||
|
||||
@celery_app.task
|
||||
def set_pins(config: dict, phone_to_pins: list):
|
||||
# define db connection
|
||||
@celery_app.task()
|
||||
def set_pin_data(config: dict, phone_to_pins: list):
|
||||
db_conn = psycopg2.connect(
|
||||
database=config.get('database'),
|
||||
host=config.get('host'),
|
||||
@@ -261,24 +209,17 @@ def set_pins(config: dict, phone_to_pins: list):
|
||||
password=config.get('password')
|
||||
)
|
||||
db_cursor = db_conn.cursor()
|
||||
|
||||
# update db
|
||||
sql = 'UPDATE account SET password_hash = %s WHERE phone_number = %s'
|
||||
for element in phone_to_pins:
|
||||
sql = 'UPDATE account SET password_hash = %s WHERE phone_number = %s'
|
||||
db_cursor.execute(sql, (element[1], element[0]))
|
||||
logg.debug(f'Updating: {element[0]} with: {element[1]}')
|
||||
|
||||
# commit changes
|
||||
db_conn.commit()
|
||||
|
||||
# close connections
|
||||
db_cursor.close()
|
||||
db_conn.close()
|
||||
|
||||
|
||||
@celery_app.task
|
||||
def set_ussd_data(config: dict, ussd_data: dict):
|
||||
# define db connection
|
||||
def set_ussd_data(config: dict, ussd_data: list):
|
||||
db_conn = psycopg2.connect(
|
||||
database=config.get('database'),
|
||||
host=config.get('host'),
|
||||
@@ -287,20 +228,12 @@ def set_ussd_data(config: dict, ussd_data: dict):
|
||||
password=config.get('password')
|
||||
)
|
||||
db_cursor = db_conn.cursor()
|
||||
|
||||
# process ussd_data
|
||||
account_status = 1
|
||||
if ussd_data['is_activated'] == 1:
|
||||
account_status = 2
|
||||
preferred_language = ussd_data['preferred_language']
|
||||
phone_number = ussd_data['phone']
|
||||
|
||||
sql = 'UPDATE account SET status = %s, preferred_language = %s WHERE phone_number = %s'
|
||||
db_cursor.execute(sql, (account_status, preferred_language, phone_number))
|
||||
|
||||
# commit changes
|
||||
for element in ussd_data:
|
||||
status = 2 if int(element[1]) == 1 else 1
|
||||
preferred_language = element[2]
|
||||
phone_number = element[0]
|
||||
db_cursor.execute(sql, (status, preferred_language, phone_number))
|
||||
db_conn.commit()
|
||||
|
||||
# close connections
|
||||
db_cursor.close()
|
||||
db_conn.close()
|
||||
|
||||
@@ -3,56 +3,61 @@ import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import redis
|
||||
import sys
|
||||
import time
|
||||
import urllib.request
|
||||
import uuid
|
||||
from urllib import request
|
||||
from urllib.parse import urlencode
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
import confini
|
||||
import phonenumbers
|
||||
import redis
|
||||
from chainlib.chain import ChainSpec
|
||||
from cic_types.models.person import Person
|
||||
from confini import Config
|
||||
|
||||
# local imports
|
||||
from import_util import get_celery_worker_status
|
||||
|
||||
default_config_dir = './config'
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_config_dir = '/usr/local/etc/cic'
|
||||
arg_parser = argparse.ArgumentParser(description='Daemon worker that handles data seeding tasks.')
|
||||
# batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size
|
||||
arg_parser.add_argument('--batch-size',
|
||||
dest='batch_size',
|
||||
default=100,
|
||||
type=int,
|
||||
help='burst size of sending transactions to node')
|
||||
arg_parser.add_argument('--batch-delay', dest='batch_delay', default=3, type=int, help='seconds delay between batches')
|
||||
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
|
||||
arg_parser.add_argument('--env-prefix',
|
||||
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
||||
dest='env_prefix',
|
||||
type=str,
|
||||
help='environment prefix for variables to overwrite configuration.')
|
||||
arg_parser.add_argument('-i', '--chain-spec', type=str, dest='i', help='chain spec')
|
||||
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='celery queue to submit data seeding tasks to.')
|
||||
arg_parser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||
arg_parser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||
arg_parser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||
arg_parser.add_argument('--ussd-host', dest='ussd_host', type=str,
|
||||
help="host to ussd app responsible for processing ussd requests.")
|
||||
arg_parser.add_argument('--ussd-no-ssl', dest='ussd_no_ssl', help='do not use ssl (careful)', action='store_true')
|
||||
arg_parser.add_argument('--ussd-port', dest='ussd_port', type=str,
|
||||
help="port to ussd app responsible for processing ussd requests.")
|
||||
arg_parser.add_argument('-v', help='be verbose', action='store_true')
|
||||
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
argparser = argparse.ArgumentParser()
|
||||
argparser.add_argument('-c', type=str, default=default_config_dir, help='config file')
|
||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='Chain specification string')
|
||||
argparser.add_argument('--redis-host', dest='redis_host', type=str, help='redis host to use for task submission')
|
||||
argparser.add_argument('--redis-port', dest='redis_port', type=int, help='redis host to use for task submission')
|
||||
argparser.add_argument('--redis-db', dest='redis_db', type=int, help='redis db to use for task submission and callback')
|
||||
argparser.add_argument('--batch-size', dest='batch_size', default=100, type=int,
|
||||
help='burst size of sending transactions to node') # batch size should be slightly below cumulative gas limit worth, eg 80000 gas txs with 8000000 limit is a bit less than 100 batch size
|
||||
argparser.add_argument('--batch-delay', dest='batch_delay', default=3, type=int, help='seconds delay between batches')
|
||||
argparser.add_argument('--timeout', default=60.0, type=float, help='Callback timeout')
|
||||
argparser.add_argument('--ussd-host', dest='ussd_host', type=str,
|
||||
help="host to ussd app responsible for processing ussd requests.")
|
||||
argparser.add_argument('--ussd-port', dest='ussd_port', type=str,
|
||||
help="port to ussd app responsible for processing ussd requests.")
|
||||
argparser.add_argument('--ussd-no-ssl', dest='ussd_no_ssl', help='do not use ssl (careful)', action='store_true')
|
||||
argparser.add_argument('-q', type=str, default='cic-eth', help='Task queue')
|
||||
argparser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
argparser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
argparser.add_argument('user_dir', type=str, help='path to users export dir tree')
|
||||
args = argparser.parse_args()
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
if args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
elif args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
|
||||
config_dir = args.c
|
||||
config = confini.Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config = Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
args_override = {
|
||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
||||
@@ -60,44 +65,29 @@ args_override = {
|
||||
'REDIS_PORT': getattr(args, 'redis_port'),
|
||||
'REDIS_DB': getattr(args, 'redis_db'),
|
||||
}
|
||||
config.dict_override(args_override, 'cli')
|
||||
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||
config.dict_override(args_override, 'cli flag')
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
config.censor('PASSWORD', 'SSL')
|
||||
logg.debug(f'config loaded from {args.c}:\n{config}')
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
get_celery_worker_status(celery_app=celery_app)
|
||||
old_account_dir = os.path.join(args.import_dir, 'old')
|
||||
os.stat(old_account_dir)
|
||||
logg.debug(f'created old system data dir: {old_account_dir}')
|
||||
|
||||
redis_host = config.get('REDIS_HOST')
|
||||
redis_port = config.get('REDIS_PORT')
|
||||
redis_db = config.get('REDIS_DB')
|
||||
r = redis.Redis(redis_host, redis_port, redis_db)
|
||||
new_account_dir = os.path.join(args.import_dir, 'new')
|
||||
os.makedirs(new_account_dir, exist_ok=True)
|
||||
logg.debug(f'created new system data dir: {new_account_dir}')
|
||||
|
||||
ps = r.pubsub()
|
||||
person_metadata_dir = os.path.join(args.import_dir, 'meta')
|
||||
os.makedirs(person_metadata_dir, exist_ok=True)
|
||||
logg.debug(f'created person metadata dir: {person_metadata_dir}')
|
||||
|
||||
user_new_dir = os.path.join(args.user_dir, 'new')
|
||||
os.makedirs(user_new_dir, exist_ok=True)
|
||||
|
||||
ussd_data_dir = os.path.join(args.user_dir, 'ussd')
|
||||
os.makedirs(ussd_data_dir, exist_ok=True)
|
||||
|
||||
preferences_dir = os.path.join(args.user_dir, 'preferences')
|
||||
preferences_dir = os.path.join(args.import_dir, 'preferences')
|
||||
os.makedirs(os.path.join(preferences_dir, 'meta'), exist_ok=True)
|
||||
logg.debug(f'created preferences metadata dir: {preferences_dir}')
|
||||
|
||||
meta_dir = os.path.join(args.user_dir, 'meta')
|
||||
os.makedirs(meta_dir, exist_ok=True)
|
||||
valid_service_codes = config.get('USSD_SERVICE_CODE').split(",")
|
||||
|
||||
user_old_dir = os.path.join(args.user_dir, 'old')
|
||||
os.stat(user_old_dir)
|
||||
|
||||
txs_dir = os.path.join(args.user_dir, 'txs')
|
||||
os.makedirs(txs_dir, exist_ok=True)
|
||||
|
||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
||||
chain_str = str(chain_spec)
|
||||
|
||||
batch_size = args.batch_size
|
||||
batch_delay = args.batch_delay
|
||||
ussd_port = args.ussd_port
|
||||
ussd_host = args.ussd_host
|
||||
ussd_no_ssl = args.ussd_no_ssl
|
||||
if ussd_no_ssl is True:
|
||||
ussd_ssl = False
|
||||
@@ -105,7 +95,17 @@ else:
|
||||
ussd_ssl = True
|
||||
|
||||
|
||||
def build_ussd_request(phone, host, port, service_code, username, password, ssl=False):
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
get_celery_worker_status(celery_app)
|
||||
|
||||
|
||||
def build_ussd_request(host: str,
|
||||
password: str,
|
||||
phone_number: str,
|
||||
port: str,
|
||||
service_code: str,
|
||||
username: str,
|
||||
ssl: bool = False):
|
||||
url = 'http'
|
||||
if ssl:
|
||||
url += 's'
|
||||
@@ -115,16 +115,16 @@ def build_ussd_request(phone, host, port, service_code, username, password, ssl=
|
||||
url += '/?username={}&password={}'.format(username, password)
|
||||
|
||||
logg.info('ussd service url {}'.format(url))
|
||||
logg.info('ussd phone {}'.format(phone))
|
||||
logg.info('ussd phone {}'.format(phone_number))
|
||||
|
||||
session = uuid.uuid4().hex
|
||||
data = {
|
||||
'sessionId': session,
|
||||
'serviceCode': service_code,
|
||||
'phoneNumber': phone,
|
||||
'phoneNumber': phone_number,
|
||||
'text': service_code,
|
||||
}
|
||||
req = urllib.request.Request(url)
|
||||
req = request.Request(url)
|
||||
req.method = 'POST'
|
||||
data_str = urlencode(data)
|
||||
data_bytes = data_str.encode('utf-8')
|
||||
@@ -134,85 +134,77 @@ def build_ussd_request(phone, host, port, service_code, username, password, ssl=
|
||||
return req
|
||||
|
||||
|
||||
def register_ussd(i, u):
|
||||
phone_object = phonenumbers.parse(u.tel)
|
||||
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||
logg.debug('tel {} {}'.format(u.tel, phone))
|
||||
req = build_ussd_request(
|
||||
phone,
|
||||
ussd_host,
|
||||
ussd_port,
|
||||
config.get('APP_SERVICE_CODE'),
|
||||
'',
|
||||
'',
|
||||
ussd_ssl
|
||||
)
|
||||
response = urllib.request.urlopen(req)
|
||||
def e164_phone_number(phone_number: str):
|
||||
phone_object = phonenumbers.parse(phone_number)
|
||||
return phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||
|
||||
|
||||
def register_account(person: Person):
|
||||
phone_number = e164_phone_number(person.tel)
|
||||
logg.debug(f'tel: {phone_number}')
|
||||
req = build_ussd_request(args.ussd_host,
|
||||
'',
|
||||
phone_number,
|
||||
args.ussd_port,
|
||||
valid_service_codes[0],
|
||||
'',
|
||||
ussd_ssl)
|
||||
response = request.urlopen(req)
|
||||
response_data = response.read().decode('utf-8')
|
||||
state = response_data[:3]
|
||||
out = response_data[4:]
|
||||
logg.debug('ussd reponse: {}'.format(out))
|
||||
logg.debug(f'ussd response: {response_data[4:]}')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
i = 0
|
||||
j = 0
|
||||
for x in os.walk(user_old_dir):
|
||||
for x in os.walk(old_account_dir):
|
||||
for y in x[2]:
|
||||
if y[len(y) - 5:] != '.json':
|
||||
continue
|
||||
# handle json containing person object
|
||||
filepath = os.path.join(x[0], y)
|
||||
f = open(filepath, 'r')
|
||||
try:
|
||||
o = json.load(f)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
f.close()
|
||||
logg.error('load error for {}: {}'.format(y, e))
|
||||
continue
|
||||
f.close()
|
||||
u = Person.deserialize(o)
|
||||
|
||||
register_ussd(i, u)
|
||||
|
||||
phone_object = phonenumbers.parse(u.tel)
|
||||
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||
|
||||
s_phone = celery.signature(
|
||||
'import_task.resolve_phone',
|
||||
[
|
||||
phone,
|
||||
],
|
||||
queue='cic-import-ussd',
|
||||
file_path = os.path.join(x[0], y)
|
||||
with open(file_path, 'r') as account_file:
|
||||
try:
|
||||
account_data = json.load(account_file)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
logg.error('load error for {}: {}'.format(y, e))
|
||||
continue
|
||||
person = Person.deserialize(account_data)
|
||||
register_account(person)
|
||||
phone_number = e164_phone_number(person.tel)
|
||||
s_resolve_phone = celery.signature(
|
||||
'import_task.resolve_phone', [phone_number], queue=args.q
|
||||
)
|
||||
|
||||
s_meta = celery.signature(
|
||||
'import_task.generate_metadata',
|
||||
[
|
||||
phone,
|
||||
],
|
||||
queue='cic-import-ussd',
|
||||
s_person_metadata = celery.signature(
|
||||
'import_task.generate_person_metadata', [phone_number], queue=args.q
|
||||
)
|
||||
|
||||
s_balance = celery.signature(
|
||||
'import_task.opening_balance_tx',
|
||||
[
|
||||
phone,
|
||||
i,
|
||||
],
|
||||
queue='cic-import-ussd',
|
||||
s_ussd_data = celery.signature(
|
||||
'import_task.generate_ussd_data', [phone_number], queue=args.q
|
||||
)
|
||||
|
||||
s_meta.link(s_balance)
|
||||
s_phone.link(s_meta)
|
||||
# block time plus a bit of time for ussd processing
|
||||
s_phone.apply_async(countdown=7)
|
||||
s_preferences_metadata = celery.signature(
|
||||
'import_task.generate_preferences_data', [], queue=args.q
|
||||
)
|
||||
|
||||
s_pins_data = celery.signature(
|
||||
'import_task.generate_pins_data', [phone_number], queue=args.q
|
||||
)
|
||||
|
||||
s_opening_balance = celery.signature(
|
||||
'import_task.opening_balance_tx', [phone_number, i], queue=args.q
|
||||
)
|
||||
celery.chain(s_resolve_phone,
|
||||
s_person_metadata,
|
||||
s_ussd_data,
|
||||
s_preferences_metadata,
|
||||
s_pins_data,
|
||||
s_opening_balance).apply_async(countdown=7)
|
||||
|
||||
i += 1
|
||||
sys.stdout.write('imported {} {}'.format(i, u).ljust(200) + "\r")
|
||||
|
||||
sys.stdout.write('imported: {} {}'.format(i, person).ljust(200) + "\r\n")
|
||||
j += 1
|
||||
if j == batch_size:
|
||||
time.sleep(batch_delay)
|
||||
if j == args.batch_size:
|
||||
time.sleep(args.batch_delay)
|
||||
j = 0
|
||||
|
||||
@@ -1,67 +1,67 @@
|
||||
# standard imports
|
||||
import argparse
|
||||
import json
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
import psycopg2
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from confini import Config
|
||||
|
||||
# local imports
|
||||
|
||||
|
||||
default_config_dir = './config'
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
default_config_dir = '/usr/local/etc/cic'
|
||||
arg_parser = argparse.ArgumentParser(description='Pins import script.')
|
||||
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config root to use.')
|
||||
arg_parser.add_argument('--env-prefix',
|
||||
default=os.environ.get('CONFINI_ENV_PREFIX'),
|
||||
dest='env_prefix',
|
||||
type=str,
|
||||
help='environment prefix for variables to overwrite configuration.')
|
||||
arg_parser.add_argument('import_dir', default='out', type=str, help='user export directory')
|
||||
arg_parser.add_argument('-v', help='be verbose', action='store_true')
|
||||
arg_parser.add_argument('-vv', help='be more verbose', action='store_true')
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='config file')
|
||||
arg_parser.add_argument('-q', type=str, default='cic-import-ussd', help='Task queue')
|
||||
arg_parser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
arg_parser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
arg_parser.add_argument('user_dir', type=str, help='path to users export dir tree')
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
if args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
elif args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
if args.vv:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
elif args.v:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
config_dir = args.c
|
||||
config = Config(config_dir, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config = Config(args.c, args.env_prefix)
|
||||
config.process()
|
||||
logg.debug('config loaded from {}:\n{}'.format(args.c, config))
|
||||
config.censor('PASSWORD', 'DATABASE')
|
||||
logg.debug(f'config loaded from {args.c}:\n{config}')
|
||||
|
||||
ussd_data_dir = os.path.join(args.user_dir, 'ussd')
|
||||
|
||||
db_configs = {
|
||||
'database': config.get('DATABASE_NAME'),
|
||||
'host': config.get('DATABASE_HOST'),
|
||||
'port': config.get('DATABASE_PORT'),
|
||||
'user': config.get('DATABASE_USER'),
|
||||
'password': config.get('DATABASE_PASSWORD')
|
||||
}
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
def main():
|
||||
with open(f'{args.import_dir}/ussd_data.csv') as ussd_data_file:
|
||||
ussd_data = [tuple(row) for row in csv.reader(ussd_data_file)]
|
||||
|
||||
db_conn = psycopg2.connect(
|
||||
database=config.get('DATABASE_NAME'),
|
||||
host=config.get('DATABASE_HOST'),
|
||||
port=config.get('DATABASE_PORT'),
|
||||
user=config.get('DATABASE_USER'),
|
||||
password=config.get('DATABASE_PASSWORD')
|
||||
)
|
||||
db_cursor = db_conn.cursor()
|
||||
sql = 'UPDATE account SET status = %s, preferred_language = %s WHERE phone_number = %s'
|
||||
for element in ussd_data:
|
||||
status = 2 if int(element[1]) == 1 else 1
|
||||
preferred_language = element[2]
|
||||
phone_number = element[0]
|
||||
db_cursor.execute(sql, (status, preferred_language, phone_number))
|
||||
logg.debug(f'Updating account:{phone_number} with: preferred language: {preferred_language} status: {status}.')
|
||||
db_conn.commit()
|
||||
db_cursor.close()
|
||||
db_conn.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for x in os.walk(ussd_data_dir):
|
||||
for y in x[2]:
|
||||
|
||||
if y[len(y) - 5:] == '.json':
|
||||
filepath = os.path.join(x[0], y)
|
||||
f = open(filepath, 'r')
|
||||
try:
|
||||
ussd_data = json.load(f)
|
||||
logg.debug(f'LOADING USSD DATA: {ussd_data}')
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
f.close()
|
||||
logg.error('load error for {}: {}'.format(y, e))
|
||||
continue
|
||||
f.close()
|
||||
|
||||
s_set_ussd_data = celery.signature(
|
||||
'import_task.set_ussd_data',
|
||||
[db_configs, ussd_data]
|
||||
)
|
||||
s_set_ussd_data.apply_async(queue='cic-import-ussd')
|
||||
main()
|
||||
|
||||
@@ -1,27 +1,4 @@
|
||||
[app]
|
||||
ALLOWED_IP=0.0.0.0/0
|
||||
LOCALE_FALLBACK=en
|
||||
LOCALE_PATH=/usr/src/cic-ussd/var/lib/locale/
|
||||
MAX_BODY_LENGTH=1024
|
||||
PASSWORD_PEPPER=QYbzKff6NhiQzY3ygl2BkiKOpER8RE/Upqs/5aZWW+I=
|
||||
SERVICE_CODE=*483*46#
|
||||
|
||||
[phone_number]
|
||||
REGION=KE
|
||||
|
||||
[ussd]
|
||||
MENU_FILE=/usr/src/data/ussd_menu.json
|
||||
user =
|
||||
pass =
|
||||
|
||||
[statemachine]
|
||||
STATES=/usr/src/cic-ussd/states/
|
||||
TRANSITIONS=/usr/src/cic-ussd/transitions/
|
||||
|
||||
[client]
|
||||
host =
|
||||
port =
|
||||
ssl =
|
||||
|
||||
[keystore]
|
||||
file_path = keystore/UTC--2021-01-08T17-18-44.521011372Z--eb3907ecad74a0013c259d5874ae7f22dcbcc95c
|
||||
allowed_ip=0.0.0.0/0
|
||||
max_body_length=1024
|
||||
password_pepper=
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
[database]
|
||||
NAME=sempo
|
||||
USER=postgres
|
||||
PASSWORD=
|
||||
HOST=localhost
|
||||
PORT=5432
|
||||
ENGINE=postgresql
|
||||
DRIVER=psycopg2
|
||||
DEBUG=0
|
||||
POOL_SIZE=1
|
||||
name=cic_ussd
|
||||
user=postgres
|
||||
password=
|
||||
host=localhost
|
||||
port=5432
|
||||
engine=postgresql
|
||||
driver=psycopg2
|
||||
debug=0
|
||||
pool_size=1
|
||||
|
||||
5
apps/data-seeding/config/ussd.ini
Normal file
5
apps/data-seeding/config/ussd.ini
Normal file
@@ -0,0 +1,5 @@
|
||||
[ussd]
|
||||
menu_file=data/ussd_menu.json
|
||||
service_code=*483*46#,*483*061#,*384*96#
|
||||
user =
|
||||
pass =
|
||||
@@ -1,91 +0,0 @@
|
||||
# standard imports
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import uuid
|
||||
|
||||
# third-party imports
|
||||
import bcrypt
|
||||
import celery
|
||||
import confini
|
||||
import phonenumbers
|
||||
import random
|
||||
from cic_types.models.person import Person
|
||||
from cryptography.fernet import Fernet
|
||||
|
||||
# local imports
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logg = logging.getLogger()
|
||||
|
||||
script_dir = os.path.realpath(os.path.dirname(__file__))
|
||||
default_config_dir = os.environ.get('CONFINI_DIR', os.path.join(script_dir, 'config'))
|
||||
|
||||
arg_parser = argparse.ArgumentParser()
|
||||
arg_parser.add_argument('-c', type=str, default=default_config_dir, help='Config dir')
|
||||
arg_parser.add_argument('-v', action='store_true', help='Be verbose')
|
||||
arg_parser.add_argument('-vv', action='store_true', help='Be more verbose')
|
||||
arg_parser.add_argument('--userdir', type=str, help='path to users export dir tree')
|
||||
arg_parser.add_argument('pins_dir', type=str, help='path to pin export dir tree')
|
||||
|
||||
|
||||
args = arg_parser.parse_args()
|
||||
|
||||
if args.v:
|
||||
logg.setLevel(logging.INFO)
|
||||
elif args.vv:
|
||||
logg.setLevel(logging.DEBUG)
|
||||
|
||||
config = confini.Config(args.c, os.environ.get('CONFINI_ENV_PREFIX'))
|
||||
config.process()
|
||||
logg.info('loaded config\n{}'.format(config))
|
||||
|
||||
celery_app = celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||
|
||||
user_dir = args.userdir
|
||||
pins_dir = args.pins_dir
|
||||
|
||||
|
||||
def generate_password_hash():
|
||||
key = Fernet.generate_key()
|
||||
fnt = Fernet(key)
|
||||
pin = str(random.randint(1000, 9999))
|
||||
return fnt.encrypt(bcrypt.hashpw(pin.encode('utf-8'), bcrypt.gensalt())).decode()
|
||||
|
||||
|
||||
user_old_dir = os.path.join(user_dir, 'old')
|
||||
logg.debug(f'reading user data from: {user_old_dir}')
|
||||
|
||||
pins_file = open(f'{pins_dir}/pins.csv', 'w')
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for x in os.walk(user_old_dir):
|
||||
for y in x[2]:
|
||||
# skip non-json files
|
||||
if y[len(y) - 5:] != '.json':
|
||||
continue
|
||||
|
||||
# define file path for
|
||||
filepath = None
|
||||
if y[:15] != '_ussd_data.json':
|
||||
filepath = os.path.join(x[0], y)
|
||||
f = open(filepath, 'r')
|
||||
try:
|
||||
o = json.load(f)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
f.close()
|
||||
logg.error('load error for {}: {}'.format(y, e))
|
||||
continue
|
||||
f.close()
|
||||
u = Person.deserialize(o)
|
||||
|
||||
phone_object = phonenumbers.parse(u.tel)
|
||||
phone = phonenumbers.format_number(phone_object, phonenumbers.PhoneNumberFormat.E164)
|
||||
password_hash = uuid.uuid4().hex
|
||||
pins_file.write(f'{phone},{password_hash}\n')
|
||||
logg.info(f'Writing phone: {phone}, password_hash: {password_hash}')
|
||||
|
||||
pins_file.close()
|
||||
@@ -9,7 +9,9 @@ COPY package.json \
|
||||
package-lock.json \
|
||||
.
|
||||
|
||||
RUN --mount=type=cache,mode=0755,target=/root/node_modules npm install
|
||||
|
||||
RUN npm ci --production
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/node_modules npm install
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
|
||||
60
apps/data-seeding/import_ussd.sh
Normal file
60
apps/data-seeding/import_ussd.sh
Normal file
@@ -0,0 +1,60 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "Creating seed data..."
|
||||
python create_import_users.py -vv --dir "$IMPORT_DIR" "$ACCOUNT_COUNT"
|
||||
wait $!
|
||||
echo "Purge tasks from celery worker"
|
||||
celery -A cic_ussd.import_task purge -Q "$CELERY_QUEUE" --broker redis://"$REDIS_HOST":"$REDIS_PORT" -f
|
||||
echo "Start celery work and import balance job"
|
||||
if [ "$INCLUDE_BALANCES" != "y" ]
|
||||
then
|
||||
echo "Running worker without opening balance transactions"
|
||||
TARGET_TX_COUNT=$ACCOUNT_COUNT
|
||||
python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$IMPORT_DIR" &
|
||||
else
|
||||
echo "Running worker with opening balance transactions"
|
||||
TARGET_TX_COUNT=$((ACCOUNT_COUNT*2))
|
||||
python cic_ussd/import_balance.py -vv -c "$CONFIG" -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --include-balances --token-symbol "$TOKEN_SYMBOL" -y "$KEYSTORE_PATH" "$IMPORT_DIR" &
|
||||
fi
|
||||
|
||||
until [ -f ./cic-import-ussd.pid ]
|
||||
do
|
||||
echo "Polling for celery worker pid file..."
|
||||
sleep 1
|
||||
done
|
||||
IMPORT_BALANCE_JOB=$(<cic-import-ussd.pid)
|
||||
echo "Start import users job"
|
||||
if [ "$USSD_SSL" == "y" ]
|
||||
then
|
||||
echo "Targeting secure ussd-user server"
|
||||
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" "$IMPORT_DIR"
|
||||
else
|
||||
python cic_ussd/import_users.py -vv -c "$CONFIG" --ussd-host "$USSD_HOST" --ussd-port "$USSD_PORT" --ussd-no-ssl "$IMPORT_DIR"
|
||||
fi
|
||||
echo "Waiting for import balance job to complete ..."
|
||||
tail --pid="$IMPORT_BALANCE_JOB" -f /dev/null
|
||||
set -e
|
||||
echo "Importing pins"
|
||||
python cic_ussd/import_pins.py -c "$CONFIG" -vv "$IMPORT_DIR"
|
||||
set +e
|
||||
wait $!
|
||||
set -e
|
||||
echo "Importing ussd data"
|
||||
python cic_ussd/import_ussd_data.py -c "$CONFIG" -vv "$IMPORT_DIR"
|
||||
set +e
|
||||
wait $!
|
||||
echo "Importing person metadata"
|
||||
node cic_meta/import_meta.js "$IMPORT_DIR" "$ACCOUNT_COUNT"
|
||||
echo "Import preferences metadata"
|
||||
node cic_meta/import_meta_preferences.js "$IMPORT_DIR" "$ACCOUNT_COUNT"
|
||||
CIC_NOTIFY_DATABASE=postgres://$DATABASE_USER:$DATABASE_PASSWORD@$DATABASE_HOST:$DATABASE_PORT/$NOTIFY_DATABASE_NAME
|
||||
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
||||
while [[ "$NOTIFICATION_COUNT" < "$TARGET_TX_COUNT" ]]
|
||||
do
|
||||
NOTIFICATION_COUNT=$(psql -qtA "$CIC_NOTIFY_DATABASE" -c 'SELECT COUNT(message) FROM notification WHERE message IS NOT NULL')
|
||||
sleep 5
|
||||
echo "Notification count is: ${NOTIFICATION_COUNT}. Checking after 5 ..."
|
||||
done
|
||||
python verify.py -c "$CONFIG" -v -p "$ETH_PROVIDER" -r "$CIC_REGISTRY_ADDRESS" --exclude "$EXCLUSIONS" --token-symbol "$TOKEN_SYMBOL" "$IMPORT_DIR"
|
||||
2216
apps/data-seeding/package-lock.json
generated
2216
apps/data-seeding/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user