Compare commits
78 Commits
bvander/ex
...
lash/defau
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66eafad1b1 | ||
| c52885a016 | |||
| f0dd257e05 | |||
| e8c870d230 | |||
| 4bb36a448d | |||
| 231163e2fc | |||
|
|
4308862493 | ||
| e599933ef8 | |||
| 266bc3362d | |||
| 8350381754 | |||
| 6ddeacf036 | |||
| fe017d2b0f | |||
| d7973436e6 | |||
|
|
4a413ad737
|
||
| 5025c31af6 | |||
|
|
651cd888f4
|
||
|
|
021c736707 | ||
|
|
e3028a5060 | ||
|
|
5f6c57647f | ||
|
|
ed029a936c | ||
|
|
c559bb2fee | ||
|
|
9b79034ed3 | ||
| b7d5c6799f | |||
| eef8bb2cf7 | |||
| cf96fee430 | |||
| 9740963431 | |||
|
|
a3c4932488 | ||
|
|
aa667951be | ||
|
|
c2459cfd65 | ||
|
|
e7102ff02d | ||
| a942c785f6 | |||
| 70704b09ec | |||
|
|
a075c55957 | ||
|
|
6464f651ec | ||
|
|
5145282946 | ||
|
|
1e87f2ed31
|
||
|
|
c852f41d76 | ||
|
|
f8e68cff96 | ||
| 7027d77836 | |||
| d356f8167d | |||
| 753d21fe95 | |||
| 3b6e031746 | |||
| b1d5d45eef | |||
| 53317cb912 | |||
| 18382a1f35 | |||
| 29e91fafab | |||
| 5b20a9a24a | |||
| a252195bdc | |||
|
|
f1be3b633c | ||
|
|
e59a71188c
|
||
| 1d0eb06f2f | |||
| 57127132b5 | |||
| 0bf2c35fcd | |||
| d046595764 | |||
|
9dd7ec88fd
|
|||
| 282fd2ff52 | |||
| 8f85598861 | |||
| 8529c349ca | |||
| 4368d2bf59 | |||
| da3c812bf5 | |||
| 82b1e87462 | |||
| e13c423daf | |||
| 56b3bd751d | |||
| 4f41c5bacf | |||
| 07583f0c3b | |||
| 0ae912082c | |||
| 094f4d4298 | |||
|
|
9471b1d8ab | ||
|
|
57100366d8 | ||
| 71e0973020 | |||
|
|
12ab5c2f66 | ||
| a804552620 | |||
| 0319fa6076 | |||
| 91dfc51d54 | |||
| 4fd861f080 | |||
|
|
28de7a4eac | ||
|
|
a31e79b0f7 | ||
|
eb2f71aee0
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -11,3 +11,6 @@ build/
|
|||||||
**/*sqlite
|
**/*sqlite
|
||||||
**/.nyc_output
|
**/.nyc_output
|
||||||
**/coverage
|
**/coverage
|
||||||
|
**/.venv
|
||||||
|
.idea
|
||||||
|
**/.vim
|
||||||
|
|||||||
@@ -2,4 +2,5 @@
|
|||||||
omit =
|
omit =
|
||||||
.venv/*
|
.venv/*
|
||||||
scripts/*
|
scripts/*
|
||||||
cic_cache/db/postgres/*
|
cic_cache/db/migrations/*
|
||||||
|
cic_cache/version.py
|
||||||
|
|||||||
4
apps/cic-cache/.dockerignore
Normal file
4
apps/cic-cache/.dockerignore
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
.git
|
||||||
|
.cache
|
||||||
|
.dot
|
||||||
|
**/doc
|
||||||
@@ -1,22 +1,52 @@
|
|||||||
.cic_cache_variables:
|
.cic_cache_variables:
|
||||||
variables:
|
variables:
|
||||||
APP_NAME: cic-cache
|
APP_NAME: cic-cache
|
||||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||||
|
CONTEXT: apps/$APP_NAME
|
||||||
.cic_cache_changes_target:
|
|
||||||
rules:
|
|
||||||
- changes:
|
|
||||||
- $CONTEXT/$APP_NAME/*
|
|
||||||
|
|
||||||
build-mr-cic-cache:
|
build-mr-cic-cache:
|
||||||
extends:
|
extends:
|
||||||
- .cic_cache_changes_target
|
|
||||||
- .py_build_merge_request
|
- .py_build_merge_request
|
||||||
- .cic_cache_variables
|
- .cic_cache_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/cic-cache/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
|
test-mr-cic-cache:
|
||||||
|
stage: test
|
||||||
|
extends:
|
||||||
|
- .cic_cache_variables
|
||||||
|
cache:
|
||||||
|
key:
|
||||||
|
files:
|
||||||
|
- test_requirements.txt
|
||||||
|
paths:
|
||||||
|
- /root/.cache/pip
|
||||||
|
image: $MR_IMAGE_TAG
|
||||||
|
script:
|
||||||
|
- cd apps/$APP_NAME/
|
||||||
|
- >
|
||||||
|
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||||
|
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||||
|
-r test_requirements.txt
|
||||||
|
- export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||||
|
needs: ["build-mr-cic-cache"]
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/$APP_NAME/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
build-push-cic-cache:
|
build-push-cic-cache:
|
||||||
extends:
|
extends:
|
||||||
- .py_build_push
|
- .py_build_push
|
||||||
- .cic_cache_variables
|
- .cic_cache_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == "master"
|
||||||
|
changes:
|
||||||
|
- apps/cic-cache/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,6 +15,8 @@ from cic_cache.db.list import (
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_FILTER_SIZE = 8192 * 8
|
||||||
|
|
||||||
class Cache:
|
class Cache:
|
||||||
|
|
||||||
def __init__(self, session):
|
def __init__(self, session):
|
||||||
@@ -25,7 +27,7 @@ class BloomCache(Cache):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def __get_filter_size(n):
|
def __get_filter_size(n):
|
||||||
n = 8192 * 8
|
n = DEFAULT_FILTER_SIZE
|
||||||
logg.warning('filter size hardcoded to {}'.format(n))
|
logg.warning('filter size hardcoded to {}'.format(n))
|
||||||
return n
|
return n
|
||||||
|
|
||||||
|
|||||||
@@ -4,6 +4,9 @@ import json
|
|||||||
import re
|
import re
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
from hexathon import add_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache.cache import (
|
from cic_cache.cache import (
|
||||||
BloomCache,
|
BloomCache,
|
||||||
@@ -11,10 +14,11 @@ from cic_cache.cache import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
#logg = logging.getLogger()
|
||||||
|
|
||||||
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
re_transactions_all_bloom = r'/tx/(\d+)?/?(\d+)/?'
|
||||||
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)/?(\d+)?/?(\d+)/?'
|
re_transactions_account_bloom = r'/tx/user/((0x)?[a-fA-F0-9]+)(/(\d+)(/(\d+))?)?/?'
|
||||||
re_transactions_all_data = r'/txa/(\d+)/(\d+)/?'
|
re_transactions_all_data = r'/txa/(\d+)?/?(\d+)/?'
|
||||||
|
|
||||||
DEFAULT_LIMIT = 100
|
DEFAULT_LIMIT = 100
|
||||||
|
|
||||||
@@ -26,13 +30,13 @@ def process_transactions_account_bloom(session, env):
|
|||||||
|
|
||||||
address = r[1]
|
address = r[1]
|
||||||
if r[2] == None:
|
if r[2] == None:
|
||||||
address = '0x' + address
|
address = add_0x(address)
|
||||||
offset = DEFAULT_LIMIT
|
offset = 0
|
||||||
if r.lastindex > 2:
|
if r.lastindex > 2:
|
||||||
offset = r[3]
|
offset = r[4]
|
||||||
limit = 0
|
limit = DEFAULT_LIMIT
|
||||||
if r.lastindex > 3:
|
if r.lastindex > 4:
|
||||||
limit = r[4]
|
limit = r[6]
|
||||||
|
|
||||||
c = BloomCache(session)
|
c = BloomCache(session)
|
||||||
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
(lowest_block, highest_block, bloom_filter_block, bloom_filter_tx) = c.load_transactions_account(address, offset, limit)
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import cic_base.config
|
|||||||
import cic_base.log
|
import cic_base.log
|
||||||
import cic_base.argparse
|
import cic_base.argparse
|
||||||
import cic_base.rpc
|
import cic_base.rpc
|
||||||
|
from cic_base.eth.syncer import chain_interface
|
||||||
from cic_eth_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
from cic_eth_registry.error import UnknownContractError
|
from cic_eth_registry.error import UnknownContractError
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
@@ -28,10 +29,8 @@ from hexathon import (
|
|||||||
strip_0x,
|
strip_0x,
|
||||||
)
|
)
|
||||||
from chainsyncer.backend.sql import SQLBackend
|
from chainsyncer.backend.sql import SQLBackend
|
||||||
from chainsyncer.driver import (
|
from chainsyncer.driver.head import HeadSyncer
|
||||||
HeadSyncer,
|
from chainsyncer.driver.history import HistorySyncer
|
||||||
HistorySyncer,
|
|
||||||
)
|
|
||||||
from chainsyncer.db.models.base import SessionBase
|
from chainsyncer.db.models.base import SessionBase
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@@ -113,10 +112,10 @@ def main():
|
|||||||
logg.info('resuming sync session {}'.format(syncer_backend))
|
logg.info('resuming sync session {}'.format(syncer_backend))
|
||||||
|
|
||||||
for syncer_backend in syncer_backends:
|
for syncer_backend in syncer_backends:
|
||||||
syncers.append(HistorySyncer(syncer_backend))
|
syncers.append(HistorySyncer(syncer_backend, chain_interface))
|
||||||
|
|
||||||
syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
|
syncer_backend = SQLBackend.live(chain_spec, block_offset+1)
|
||||||
syncers.append(HeadSyncer(syncer_backend))
|
syncers.append(HeadSyncer(syncer_backend, chain_interface))
|
||||||
|
|
||||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||||
if trusted_addresses_src == None:
|
if trusted_addresses_src == None:
|
||||||
|
|||||||
@@ -1,52 +1,38 @@
|
|||||||
FROM python:3.8.6-slim-buster
|
# syntax = docker/dockerfile:1.2
|
||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
|
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||||
|
|
||||||
#COPY --from=0 /usr/local/share/cic/solidity/ /usr/local/share/cic/solidity/
|
COPY requirements.txt .
|
||||||
|
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||||
|
#RUN pip install $pip_extra_index_url_flag .
|
||||||
|
#RUN pip install .[server]
|
||||||
|
|
||||||
WORKDIR /usr/src/cic-cache
|
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
|
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||||
|
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||||
|
pip install --index-url https://pypi.org/simple \
|
||||||
|
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
|
COPY . .
|
||||||
ARG root_requirement_file='requirements.txt'
|
|
||||||
|
|
||||||
#RUN apk update && \
|
RUN python setup.py install
|
||||||
# apk add gcc musl-dev gnupg libpq
|
|
||||||
#RUN apk add postgresql-dev
|
|
||||||
#RUN apk add linux-headers
|
|
||||||
#RUN apk add libffi-dev
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
|
||||||
|
|
||||||
# Copy shared requirements from top of mono-repo
|
|
||||||
RUN echo "copying root req file ${root_requirement_file}"
|
|
||||||
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
|
||||||
|
|
||||||
COPY cic-cache/requirements.txt ./
|
|
||||||
COPY cic-cache/setup.cfg \
|
|
||||||
cic-cache/setup.py \
|
|
||||||
./
|
|
||||||
COPY cic-cache/cic_cache/ ./cic_cache/
|
|
||||||
COPY cic-cache/scripts/ ./scripts/
|
|
||||||
COPY cic-cache/test_requirements.txt ./
|
|
||||||
RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
|
||||||
RUN pip install $pip_extra_index_url_flag .
|
|
||||||
RUN pip install .[server]
|
|
||||||
|
|
||||||
COPY cic-cache/tests/ ./tests/
|
|
||||||
#COPY db/ cic-cache/db
|
|
||||||
#RUN apk add postgresql-client
|
|
||||||
|
|
||||||
# ini files in config directory defines the configurable parameters for the application
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
# they can all be overridden by environment variables
|
# they can all be overridden by environment variables
|
||||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
COPY cic-cache/config/ /usr/local/etc/cic-cache/
|
COPY config/ /usr/local/etc/cic-cache/
|
||||||
|
|
||||||
# for db migrations
|
# for db migrations
|
||||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||||
COPY cic-cache/cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||||
|
|
||||||
COPY cic-cache/docker/start_tracker.sh ./start_tracker.sh
|
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||||
COPY cic-cache/docker/db.sh ./db.sh
|
COPY /docker/db.sh ./db.sh
|
||||||
RUN chmod 755 ./*.sh
|
RUN chmod 755 ./*.sh
|
||||||
# Tracker
|
# Tracker
|
||||||
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||||
# Server
|
# Server
|
||||||
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||||
|
ENTRYPOINT []
|
||||||
|
|||||||
37
apps/cic-cache/docker/Dockerfile_ci
Normal file
37
apps/cic-cache/docker/Dockerfile_ci
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# syntax = docker/dockerfile:1.2
|
||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
|
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||||
|
#RUN pip install $pip_extra_index_url_flag .
|
||||||
|
#RUN pip install .[server]
|
||||||
|
|
||||||
|
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
|
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||||
|
RUN pip install --index-url https://pypi.org/simple \
|
||||||
|
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN python setup.py install
|
||||||
|
|
||||||
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
|
# they can all be overridden by environment variables
|
||||||
|
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
|
COPY config/ /usr/local/etc/cic-cache/
|
||||||
|
|
||||||
|
# for db migrations
|
||||||
|
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||||
|
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||||
|
|
||||||
|
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||||
|
COPY /docker/db.sh ./db.sh
|
||||||
|
RUN chmod 755 ./*.sh
|
||||||
|
# Tracker
|
||||||
|
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||||
|
# Server
|
||||||
|
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||||
|
ENTRYPOINT []
|
||||||
@@ -1,12 +1,13 @@
|
|||||||
cic-base~=0.1.2b10
|
cic-base==0.1.3a3+build.984b5cff
|
||||||
alembic==1.4.2
|
alembic==1.4.2
|
||||||
confini~=0.3.6rc3
|
confini~=0.3.6rc3
|
||||||
uwsgi==2.0.19.1
|
uwsgi==2.0.19.1
|
||||||
moolb~=0.1.0
|
moolb~=0.1.0
|
||||||
cic-eth-registry~=0.5.5a4
|
cic-eth-registry~=0.5.6a1
|
||||||
SQLAlchemy==1.3.20
|
SQLAlchemy==1.3.20
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
psycopg2==2.8.6
|
psycopg2==2.8.6
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
redis==3.5.3
|
redis==3.5.3
|
||||||
chainsyncer[sql]~=0.0.2a4
|
chainsyncer[sql]~=0.0.3a3
|
||||||
|
erc20-faucet~=0.2.2a1
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
|
||||||
import alembic
|
import alembic
|
||||||
from alembic.config import Config as AlembicConfig
|
from alembic.config import Config as AlembicConfig
|
||||||
@@ -23,6 +24,8 @@ argparser = argparse.ArgumentParser()
|
|||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||||
|
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||||
|
argparser.add_argument('-f', action='store_true', help='force action')
|
||||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
@@ -53,4 +56,10 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
|
|||||||
ac.set_main_option('sqlalchemy.url', dsn)
|
ac.set_main_option('sqlalchemy.url', dsn)
|
||||||
ac.set_main_option('script_location', migrations_dir)
|
ac.set_main_option('script_location', migrations_dir)
|
||||||
|
|
||||||
|
if args.reset:
|
||||||
|
if not args.f:
|
||||||
|
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
|
||||||
|
logg.error('user chickened out on requested reset, bailing')
|
||||||
|
sys.exit(1)
|
||||||
|
alembic.command.downgrade(ac, 'base')
|
||||||
alembic.command.upgrade(ac, 'head')
|
alembic.command.upgrade(ac, 'head')
|
||||||
|
|||||||
@@ -6,6 +6,5 @@ sqlparse==0.4.1
|
|||||||
pytest-celery==0.0.0a1
|
pytest-celery==0.0.0a1
|
||||||
eth_tester==0.5.0b3
|
eth_tester==0.5.0b3
|
||||||
py-evm==0.3.0a20
|
py-evm==0.3.0a20
|
||||||
web3==5.12.2
|
cic_base[full]==0.1.3a3+build.984b5cff
|
||||||
cic-eth-registry~=0.5.5a3
|
sarafu-faucet~=0.0.4a1
|
||||||
cic-base[full]==0.1.2b8
|
|
||||||
|
|||||||
@@ -5,9 +5,12 @@ import datetime
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
|
import moolb
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_cache import db
|
from cic_cache import db
|
||||||
|
from cic_cache import BloomCache
|
||||||
|
from cic_cache.cache import DEFAULT_FILTER_SIZE
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
root_dir = os.path.dirname(script_dir)
|
root_dir = os.path.dirname(script_dir)
|
||||||
@@ -101,3 +104,7 @@ def tag_txs(
|
|||||||
|
|
||||||
db.tag_transaction(init_database, txs[1], 'taag', domain='test')
|
db.tag_transaction(init_database, txs[1], 'taag', domain='test')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def zero_filter():
|
||||||
|
return moolb.Bloom(DEFAULT_FILTER_SIZE, 3)
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from sqlalchemy import text
|
|||||||
from chainlib.eth.tx import Tx
|
from chainlib.eth.tx import Tx
|
||||||
from chainlib.eth.block import Block
|
from chainlib.eth.block import Block
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.error import RequestMismatchException
|
||||||
from hexathon import (
|
from hexathon import (
|
||||||
strip_0x,
|
strip_0x,
|
||||||
add_0x,
|
add_0x,
|
||||||
@@ -18,10 +19,21 @@ from hexathon import (
|
|||||||
# local imports
|
# local imports
|
||||||
from cic_cache.db import add_tag
|
from cic_cache.db import add_tag
|
||||||
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
from cic_cache.runnable.daemons.filters.erc20 import ERC20TransferFilter
|
||||||
|
from cic_cache.runnable.daemons.filters.base import TagSyncFilter
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_base_filter_str(
|
||||||
|
init_database,
|
||||||
|
):
|
||||||
|
f = TagSyncFilter('foo')
|
||||||
|
assert 'foo' == str(f)
|
||||||
|
f = TagSyncFilter('foo', domain='bar')
|
||||||
|
assert 'bar.foo' == str(f)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_erc20_filter(
|
def test_erc20_filter(
|
||||||
eth_rpc,
|
eth_rpc,
|
||||||
foo_token,
|
foo_token,
|
||||||
@@ -67,3 +79,95 @@ def test_erc20_filter(
|
|||||||
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
s = text("SELECT x.tx_hash FROM tag a INNER JOIN tag_tx_link l ON l.tag_id = a.id INNER JOIN tx x ON x.id = l.tx_id WHERE a.domain = :a AND a.value = :b")
|
||||||
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
r = init_database.execute(s, {'a': fltr.tag_domain, 'b': fltr.tag_name}).fetchone()
|
||||||
assert r[0] == tx.hash
|
assert r[0] == tx.hash
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_filter_nocontract(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
# incomplete args
|
||||||
|
data = 'a9059cbb'
|
||||||
|
data += strip_0x(list_actors['alice'])
|
||||||
|
data += '1000'.ljust(64, '0')
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': os.urandom(20).hex(),
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'contract_method,contract_input,expected_exception',
|
||||||
|
[
|
||||||
|
('a9059cbb', os.urandom(32).hex(), ValueError), # not enough args
|
||||||
|
('a9059cbb', os.urandom(31).hex(), ValueError), # wrong arg boundary
|
||||||
|
('a9059cbc', os.urandom(64).hex(), RequestMismatchException), # wrong method
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_erc20_filter_bogus(
|
||||||
|
eth_rpc,
|
||||||
|
foo_token,
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
tags,
|
||||||
|
contract_method,
|
||||||
|
contract_input,
|
||||||
|
expected_exception,
|
||||||
|
):
|
||||||
|
|
||||||
|
chain_spec = ChainSpec('foo', 'bar', 42, 'baz')
|
||||||
|
|
||||||
|
fltr = ERC20TransferFilter(chain_spec)
|
||||||
|
add_tag(init_database, fltr.tag_name, domain=fltr.tag_domain)
|
||||||
|
|
||||||
|
# incomplete args
|
||||||
|
data = contract_method
|
||||||
|
data += contract_input
|
||||||
|
block = Block({
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'number': 42,
|
||||||
|
'timestamp': datetime.datetime.utcnow().timestamp(),
|
||||||
|
'transactions': [],
|
||||||
|
})
|
||||||
|
|
||||||
|
tx = Tx({
|
||||||
|
'to': foo_token,
|
||||||
|
'from': list_actors['bob'],
|
||||||
|
'data': data,
|
||||||
|
'value': 0,
|
||||||
|
'hash': os.urandom(32).hex(),
|
||||||
|
'nonce': 13,
|
||||||
|
'gasPrice': 10000000,
|
||||||
|
'gas': 123456,
|
||||||
|
})
|
||||||
|
block.txs.append(tx)
|
||||||
|
tx.block = block
|
||||||
|
|
||||||
|
assert not fltr.filter(eth_rpc, block, tx, db_session=init_database)
|
||||||
|
|||||||
230
apps/cic-cache/tests/test_query.py
Normal file
230
apps/cic-cache/tests/test_query.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import json
|
||||||
|
import base64
|
||||||
|
import copy
|
||||||
|
import re
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from hexathon import strip_0x
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from cic_cache.runnable.daemons.query import *
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'query_path_prefix, query_role, query_address_index, query_offset, query_offset_index, query_limit, query_limit_index, match_re',
|
||||||
|
[
|
||||||
|
('/tx/user/', 'alice', 0, None, 3, None, 5, re_transactions_account_bloom),
|
||||||
|
('/tx/user/', 'alice', 0, 42, 3, None, 5, re_transactions_account_bloom),
|
||||||
|
('/tx/user/', 'alice', 0, 42, 3, 13, 5, re_transactions_account_bloom),
|
||||||
|
('/tx/', None, 0, None, 3, None, 5, re_transactions_all_bloom),
|
||||||
|
('/tx/', None, 0, 42, 3, None, 5, re_transactions_all_bloom),
|
||||||
|
('/tx/', None, 0, 42, 3, 13, 5, re_transactions_all_bloom),
|
||||||
|
('/txa/', None, 0, None, 3, None, 5, re_transactions_all_data),
|
||||||
|
('/txa/', None, 0, 42, 3, None, 5, re_transactions_all_data),
|
||||||
|
('/txa/', None, 0, 42, 3, 13, 5, re_transactions_all_data),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_regex(
|
||||||
|
list_actors,
|
||||||
|
query_path_prefix,
|
||||||
|
query_role,
|
||||||
|
query_address_index,
|
||||||
|
query_offset,
|
||||||
|
query_offset_index,
|
||||||
|
query_limit,
|
||||||
|
query_limit_index,
|
||||||
|
match_re,
|
||||||
|
):
|
||||||
|
|
||||||
|
paths = []
|
||||||
|
path = query_path_prefix
|
||||||
|
query_address = None
|
||||||
|
if query_role != None:
|
||||||
|
query_address = strip_0x(list_actors[query_role])
|
||||||
|
paths.append(path + '0x' + query_address)
|
||||||
|
paths.append(path + query_address)
|
||||||
|
if query_offset != None:
|
||||||
|
if query_limit != None:
|
||||||
|
for i in range(len(paths)-1):
|
||||||
|
paths[i] += '/{}/{}'.format(query_offset, query_limit)
|
||||||
|
else:
|
||||||
|
for i in range(len(paths)-1):
|
||||||
|
paths[i] += '/' + str(query_offset)
|
||||||
|
|
||||||
|
for i in range(len(paths)):
|
||||||
|
paths.append(paths[i] + '/')
|
||||||
|
|
||||||
|
for p in paths:
|
||||||
|
logg.debug('testing path {} against {}'.format(p, match_re))
|
||||||
|
m = re.match(match_re, p)
|
||||||
|
l = len(m.groups())
|
||||||
|
logg.debug('laast index match {} groups {}'.format(m.lastindex, l))
|
||||||
|
for i in range(l+1):
|
||||||
|
logg.debug('group {} {}'.format(i, m[i]))
|
||||||
|
if m.lastindex >= query_offset_index:
|
||||||
|
assert query_offset == int(m[query_offset_index + 1])
|
||||||
|
if m.lastindex >= query_limit_index:
|
||||||
|
assert query_limit == int(m[query_limit_index + 1])
|
||||||
|
if query_address_index != None:
|
||||||
|
match_address = strip_0x(m[query_address_index + 1])
|
||||||
|
assert query_address == match_address
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'role_name, query_offset, query_limit, query_match',
|
||||||
|
[
|
||||||
|
('alice', None, None, [(420000, 13), (419999, 42)]),
|
||||||
|
('alice', None, 1, [(420000, 13)]),
|
||||||
|
('alice', 1, None, [(419999, 42)]), # 420000 == list_defaults['block']
|
||||||
|
('alice', 2, None, []), # 420000 == list_defaults['block']
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_process_txs_account(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
zero_filter,
|
||||||
|
role_name,
|
||||||
|
query_offset,
|
||||||
|
query_limit,
|
||||||
|
query_match,
|
||||||
|
):
|
||||||
|
|
||||||
|
actor = None
|
||||||
|
try:
|
||||||
|
actor = list_actors[role_name]
|
||||||
|
except KeyError:
|
||||||
|
actor = os.urandom(20).hex()
|
||||||
|
path_info = '/tx/user/0x' + strip_0x(actor)
|
||||||
|
if query_offset != None:
|
||||||
|
path_info += '/' + str(query_offset)
|
||||||
|
if query_limit != None:
|
||||||
|
if query_offset == None:
|
||||||
|
path_info += '/0'
|
||||||
|
path_info += '/' + str(query_limit)
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': path_info,
|
||||||
|
}
|
||||||
|
logg.debug('using path {}'.format(path_info))
|
||||||
|
r = process_transactions_account_bloom(init_database, env)
|
||||||
|
assert r != None
|
||||||
|
|
||||||
|
o = json.loads(r[1])
|
||||||
|
block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
|
||||||
|
zero_filter_data = zero_filter.to_bytes()
|
||||||
|
if len(query_match) == 0:
|
||||||
|
assert block_filter_data == zero_filter_data
|
||||||
|
return
|
||||||
|
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
block_filter = copy.copy(zero_filter)
|
||||||
|
block_filter.merge(block_filter_data)
|
||||||
|
block_filter_data = block_filter.to_bytes()
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
|
||||||
|
for (block, tx) in query_match:
|
||||||
|
block = block.to_bytes(4, byteorder='big')
|
||||||
|
assert block_filter.check(block)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'query_offset, query_limit, query_match',
|
||||||
|
[
|
||||||
|
(None, 2, [(420000, 13), (419999, 42)]),
|
||||||
|
(0, 1, [(420000, 13)]),
|
||||||
|
(1, 1, [(419999, 42)]),
|
||||||
|
(2, 0, []),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_process_txs_bloom(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
zero_filter,
|
||||||
|
query_offset,
|
||||||
|
query_limit,
|
||||||
|
query_match,
|
||||||
|
):
|
||||||
|
|
||||||
|
path_info = '/tx'
|
||||||
|
if query_offset != None:
|
||||||
|
path_info += '/' + str(query_offset)
|
||||||
|
if query_limit != None:
|
||||||
|
if query_offset == None:
|
||||||
|
path_info += '/0'
|
||||||
|
path_info += '/' + str(query_limit)
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': path_info,
|
||||||
|
}
|
||||||
|
logg.debug('using path {}'.format(path_info))
|
||||||
|
r = process_transactions_all_bloom(init_database, env)
|
||||||
|
assert r != None
|
||||||
|
|
||||||
|
o = json.loads(r[1])
|
||||||
|
block_filter_data = base64.b64decode(o['block_filter'].encode('utf-8'))
|
||||||
|
zero_filter_data = zero_filter.to_bytes()
|
||||||
|
if len(query_match) == 0:
|
||||||
|
assert block_filter_data == zero_filter_data
|
||||||
|
return
|
||||||
|
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
block_filter = copy.copy(zero_filter)
|
||||||
|
block_filter.merge(block_filter_data)
|
||||||
|
block_filter_data = block_filter.to_bytes()
|
||||||
|
assert block_filter_data != zero_filter_data
|
||||||
|
|
||||||
|
for (block, tx) in query_match:
|
||||||
|
block = block.to_bytes(4, byteorder='big')
|
||||||
|
assert block_filter.check(block)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'query_block_start, query_block_end, query_match_count',
|
||||||
|
[
|
||||||
|
(None, 42, 0),
|
||||||
|
(420000, 420001, 1),
|
||||||
|
(419999, 419999, 1), # matches are inclusive
|
||||||
|
(419999, 420000, 2),
|
||||||
|
(419999, 420001, 2),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_query_process_txs_data(
|
||||||
|
init_database,
|
||||||
|
list_defaults,
|
||||||
|
list_actors,
|
||||||
|
list_tokens,
|
||||||
|
txs,
|
||||||
|
zero_filter,
|
||||||
|
query_block_start,
|
||||||
|
query_block_end,
|
||||||
|
query_match_count,
|
||||||
|
):
|
||||||
|
|
||||||
|
path_info = '/txa'
|
||||||
|
if query_block_start != None:
|
||||||
|
path_info += '/' + str(query_block_start)
|
||||||
|
if query_block_end != None:
|
||||||
|
if query_block_start == None:
|
||||||
|
path_info += '/0'
|
||||||
|
path_info += '/' + str(query_block_end)
|
||||||
|
env = {
|
||||||
|
'PATH_INFO': path_info,
|
||||||
|
'HTTP_X_CIC_CACHE_MODE': 'all',
|
||||||
|
}
|
||||||
|
logg.debug('using path {}'.format(path_info))
|
||||||
|
r = process_transactions_all_data(init_database, env)
|
||||||
|
assert r != None
|
||||||
|
|
||||||
|
o = json.loads(r[1])
|
||||||
|
assert len(o['data']) == query_match_count
|
||||||
1
apps/cic-eth-aux/erc20-demurrage-token/MANIFEST.in
Normal file
1
apps/cic-eth-aux/erc20-demurrage-token/MANIFEST.in
Normal file
@@ -0,0 +1 @@
|
|||||||
|
include *requirements.txt
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from erc20_demurrage_token.demurrage import DemurrageCalculator
|
||||||
|
from chainlib.connection import RPCConnection
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
|
from cic_eth_registry import CICRegistry
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
celery_app = celery.current_app
|
||||||
|
|
||||||
|
|
||||||
|
class NoopCalculator:
|
||||||
|
|
||||||
|
def amount_since(self, amount, timestamp):
|
||||||
|
logg.debug('noopcalculator amount {} timestamp {}'.format(amount, timestamp))
|
||||||
|
return amount
|
||||||
|
|
||||||
|
|
||||||
|
class DemurrageCalculationTask(celery.Task):
|
||||||
|
|
||||||
|
demurrage_token_calcs = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def register_token(cls, rpc, chain_spec, token_symbol, sender_address=ZERO_ADDRESS):
|
||||||
|
registry = CICRegistry(chain_spec, rpc)
|
||||||
|
token_address = registry.by_name(token_symbol, sender_address=sender_address)
|
||||||
|
try:
|
||||||
|
c = DemurrageCalculator.from_contract(rpc, chain_spec, token_address, sender_address=sender_address)
|
||||||
|
logg.info('found demurrage calculator for ERC20 {} @ {}'.format(token_symbol, token_address))
|
||||||
|
except:
|
||||||
|
logg.warning('Token {} at address {} does not appear to be a demurrage contract. Calls to balance adjust for this token will always return the same amount'.format(token_symbol, token_address))
|
||||||
|
c = NoopCalculator()
|
||||||
|
|
||||||
|
cls.demurrage_token_calcs[token_symbol] = c
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=DemurrageCalculationTask)
|
||||||
|
def get_adjusted_balance(self, token_symbol, amount, timestamp):
|
||||||
|
c = self.demurrage_token_calcs[token_symbol]
|
||||||
|
return c.amount_since(amount, timestamp)
|
||||||
|
|
||||||
|
|
||||||
|
def aux_setup(rpc, config, sender_address=ZERO_ADDRESS):
|
||||||
|
chain_spec_str = config.get('CIC_CHAIN_SPEC')
|
||||||
|
chain_spec = ChainSpec.from_chain_str(chain_spec_str)
|
||||||
|
token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||||
|
|
||||||
|
DemurrageCalculationTask.register_token(rpc, chain_spec, token_symbol, sender_address=sender_address)
|
||||||
@@ -0,0 +1,30 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from cic_eth.api.base import ApiBase
|
||||||
|
|
||||||
|
app = celery.current_app
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Api(ApiBase):
|
||||||
|
|
||||||
|
def get_adjusted_balance(self, token_symbol, balance, timestamp):
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth_aux.erc20_demurrage_token.get_adjusted_balance',
|
||||||
|
[
|
||||||
|
token_symbol,
|
||||||
|
balance,
|
||||||
|
timestamp,
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s.link(self.callback_success)
|
||||||
|
s.link.on_error(self.callback_error)
|
||||||
|
|
||||||
|
t = s.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
5
apps/cic-eth-aux/erc20-demurrage-token/requirements.txt
Normal file
5
apps/cic-eth-aux/erc20-demurrage-token/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
celery==4.4.7
|
||||||
|
erc20-demurrage-token~=0.0.2a3
|
||||||
|
cic-eth-registry~=0.5.6a1
|
||||||
|
chainlib~=0.0.5a1
|
||||||
|
cic_eth~=0.12.0a2
|
||||||
30
apps/cic-eth-aux/erc20-demurrage-token/setup.cfg
Normal file
30
apps/cic-eth-aux/erc20-demurrage-token/setup.cfg
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
[metadata]
|
||||||
|
name = cic-eth-aux-erc20-demurrage-token
|
||||||
|
version = 0.0.2a4
|
||||||
|
description = cic-eth tasks supporting erc20 demurrage token
|
||||||
|
author = Louis Holbrook
|
||||||
|
author_email = dev@holbrook.no
|
||||||
|
url = https://gitlab.com/ccicnet/erc20-demurrage-token
|
||||||
|
keywords =
|
||||||
|
ethereum
|
||||||
|
blockchain
|
||||||
|
cryptocurrency
|
||||||
|
erc20
|
||||||
|
classifiers =
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Operating System :: OS Independent
|
||||||
|
Development Status :: 3 - Alpha
|
||||||
|
Environment :: No Input/Output (Daemon)
|
||||||
|
Intended Audience :: Developers
|
||||||
|
License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
|
||||||
|
Topic :: Internet
|
||||||
|
#Topic :: Blockchain :: EVM
|
||||||
|
license = GPL3
|
||||||
|
licence_files =
|
||||||
|
LICENSE
|
||||||
|
|
||||||
|
[options]
|
||||||
|
include_package_data = True
|
||||||
|
python_requires = >= 3.6
|
||||||
|
packages =
|
||||||
|
cic_eth_aux.erc20_demurrage_token
|
||||||
25
apps/cic-eth-aux/erc20-demurrage-token/setup.py
Normal file
25
apps/cic-eth-aux/erc20-demurrage-token/setup.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from setuptools import setup
|
||||||
|
|
||||||
|
requirements = []
|
||||||
|
f = open('requirements.txt', 'r')
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == '':
|
||||||
|
break
|
||||||
|
requirements.append(l.rstrip())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
test_requirements = []
|
||||||
|
f = open('test_requirements.txt', 'r')
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == '':
|
||||||
|
break
|
||||||
|
test_requirements.append(l.rstrip())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
setup(
|
||||||
|
install_requires=requirements,
|
||||||
|
tests_require=test_requirements,
|
||||||
|
)
|
||||||
12
apps/cic-eth-aux/erc20-demurrage-token/test_requirements.txt
Normal file
12
apps/cic-eth-aux/erc20-demurrage-token/test_requirements.txt
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
pytest==6.0.1
|
||||||
|
pytest-celery==0.0.0a1
|
||||||
|
pytest-mock==3.3.1
|
||||||
|
pytest-cov==2.10.1
|
||||||
|
eth-tester==0.5.0b3
|
||||||
|
py-evm==0.3.0a20
|
||||||
|
SQLAlchemy==1.3.20
|
||||||
|
cic-eth~=0.12.0a1
|
||||||
|
liveness~=0.0.1a7
|
||||||
|
eth-accounts-index==0.0.12a1
|
||||||
|
eth-contract-registry==0.5.6a1
|
||||||
|
eth-address-index==0.1.2a1
|
||||||
88
apps/cic-eth-aux/erc20-demurrage-token/tests/conftest.py
Normal file
88
apps/cic-eth-aux/erc20-demurrage-token/tests/conftest.py
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from chainlib.eth.pytest.fixtures_chain import *
|
||||||
|
from chainlib.eth.pytest.fixtures_ethtester import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||||
|
from cic_eth_registry.pytest.fixtures_tokens import *
|
||||||
|
from erc20_demurrage_token.unittest.base import TestTokenDeploy
|
||||||
|
from erc20_demurrage_token.token import DemurrageToken
|
||||||
|
from eth_token_index.index import TokenUniqueSymbolIndex
|
||||||
|
from eth_address_declarator.declarator import AddressDeclarator
|
||||||
|
|
||||||
|
# cic-eth imports
|
||||||
|
from cic_eth.pytest.fixtures_celery import *
|
||||||
|
from cic_eth.pytest.fixtures_token import *
|
||||||
|
from cic_eth.pytest.fixtures_config import *
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def demurrage_token(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
token_registry,
|
||||||
|
contract_roles,
|
||||||
|
eth_signer,
|
||||||
|
):
|
||||||
|
d = TestTokenDeploy(eth_rpc, token_symbol='BAR', token_name='Bar Token')
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], conn=eth_rpc)
|
||||||
|
c = DemurrageToken(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
token_address = d.deploy(eth_rpc, contract_roles['CONTRACT_DEPLOYER'], c, 'SingleNocap')
|
||||||
|
logg.debug('demurrage token contract "BAR" deployed to {}'.format(token_address))
|
||||||
|
|
||||||
|
return token_address
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def demurrage_token_symbol(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
demurrage_token,
|
||||||
|
contract_roles,
|
||||||
|
):
|
||||||
|
|
||||||
|
c = DemurrageToken(default_chain_spec)
|
||||||
|
o = c.symbol(demurrage_token, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
return c.parse_symbol(r)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def demurrage_token_declaration(
|
||||||
|
foo_token_declaration,
|
||||||
|
):
|
||||||
|
return foo_token_declaration
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def register_demurrage_token(
|
||||||
|
default_chain_spec,
|
||||||
|
token_registry,
|
||||||
|
eth_rpc,
|
||||||
|
eth_signer,
|
||||||
|
register_lookups,
|
||||||
|
contract_roles,
|
||||||
|
demurrage_token_declaration,
|
||||||
|
demurrage_token,
|
||||||
|
address_declarator,
|
||||||
|
):
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['CONTRACT_DEPLOYER'], eth_rpc)
|
||||||
|
|
||||||
|
c = TokenUniqueSymbolIndex(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash_hex, o) = c.register(token_registry, contract_roles['CONTRACT_DEPLOYER'], demurrage_token)
|
||||||
|
eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(contract_roles['TRUSTED_DECLARATOR'], eth_rpc)
|
||||||
|
c = AddressDeclarator(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash_hex, o) = c.add_declaration(address_declarator, contract_roles['TRUSTED_DECLARATOR'], demurrage_token, demurrage_token_declaration)
|
||||||
|
|
||||||
|
eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash_hex)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
return token_registry
|
||||||
|
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
import copy
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
|
||||||
|
# cic-eth imports
|
||||||
|
from cic_eth_aux.erc20_demurrage_token import (
|
||||||
|
DemurrageCalculationTask,
|
||||||
|
aux_setup,
|
||||||
|
)
|
||||||
|
from cic_eth_aux.erc20_demurrage_token.api import Api as AuxApi
|
||||||
|
|
||||||
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def test_demurrage_calulate_task(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
cic_registry,
|
||||||
|
celery_session_worker,
|
||||||
|
register_demurrage_token,
|
||||||
|
demurrage_token_symbol,
|
||||||
|
contract_roles,
|
||||||
|
load_config,
|
||||||
|
):
|
||||||
|
|
||||||
|
config = copy.copy(load_config)
|
||||||
|
config.add(str(default_chain_spec), 'CIC_CHAIN_SPEC', exists_ok=True)
|
||||||
|
config.add(demurrage_token_symbol, 'CIC_DEFAULT_TOKEN_SYMBOL', exists_ok=True)
|
||||||
|
aux_setup(eth_rpc, load_config, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
||||||
|
|
||||||
|
since = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
|
||||||
|
s = celery.signature(
|
||||||
|
'cic_eth_aux.erc20_demurrage_token.get_adjusted_balance',
|
||||||
|
[
|
||||||
|
demurrage_token_symbol,
|
||||||
|
1000,
|
||||||
|
since.timestamp(),
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
t = s.apply_async()
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
assert r == 980
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def test_demurrage_calculate_api(
|
||||||
|
default_chain_spec,
|
||||||
|
eth_rpc,
|
||||||
|
cic_registry,
|
||||||
|
celery_session_worker,
|
||||||
|
register_demurrage_token,
|
||||||
|
demurrage_token_symbol,
|
||||||
|
contract_roles,
|
||||||
|
load_config,
|
||||||
|
):
|
||||||
|
|
||||||
|
api = AuxApi(str(default_chain_spec), queue=None)
|
||||||
|
since = datetime.datetime.utcnow() - datetime.timedelta(minutes=1)
|
||||||
|
t = api.get_adjusted_balance(demurrage_token_symbol, 1000, since.timestamp())
|
||||||
|
r = t.get_leaf()
|
||||||
|
assert t.successful()
|
||||||
|
assert r == 980
|
||||||
|
|
||||||
6
apps/cic-eth/.dockerignore
Normal file
6
apps/cic-eth/.dockerignore
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
.git
|
||||||
|
.cache
|
||||||
|
.dot
|
||||||
|
**/doc
|
||||||
|
**/.venv
|
||||||
|
**/venv
|
||||||
@@ -1,33 +1,52 @@
|
|||||||
.cic_eth_variables:
|
.cic_eth_variables:
|
||||||
variables:
|
variables:
|
||||||
APP_NAME: cic-eth
|
APP_NAME: cic-eth
|
||||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||||
|
CONTEXT: apps/$APP_NAME
|
||||||
.cic_eth_changes_target:
|
|
||||||
rules:
|
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
|
||||||
#changes:
|
|
||||||
#- $CONTEXT/$APP_NAME/**/*
|
|
||||||
when: always
|
|
||||||
|
|
||||||
build-mr-cic-eth:
|
build-mr-cic-eth:
|
||||||
extends:
|
extends:
|
||||||
- .cic_eth_variables
|
- .cic_eth_variables
|
||||||
- .cic_eth_changes_target
|
- .py_build_target_dev
|
||||||
- .py_build_target_test
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/cic-eth/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
test-mr-cic-eth:
|
test-mr-cic-eth:
|
||||||
|
stage: test
|
||||||
extends:
|
extends:
|
||||||
- .cic_eth_variables
|
- .cic_eth_variables
|
||||||
- .cic_eth_changes_target
|
cache:
|
||||||
stage: test
|
key:
|
||||||
image: $CI_REGISTRY_IMAGE/$APP_NAME-test:latest
|
files:
|
||||||
|
- test_requirements.txt
|
||||||
|
paths:
|
||||||
|
- /root/.cache/pip
|
||||||
|
image: $MR_IMAGE_TAG
|
||||||
script:
|
script:
|
||||||
- cd apps/$APP_NAME/
|
- cd apps/$APP_NAME/
|
||||||
- pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
- >
|
||||||
|
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||||
|
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||||
|
-r admin_requirements.txt
|
||||||
|
-r services_requirements.txt
|
||||||
|
-r test_requirements.txt
|
||||||
|
- export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||||
needs: ["build-mr-cic-eth"]
|
needs: ["build-mr-cic-eth"]
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/cic-eth/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
build-push-cic-eth:
|
build-push-cic-eth:
|
||||||
extends:
|
extends:
|
||||||
- .py_build_push
|
- .py_build_push
|
||||||
- .cic_eth_variables
|
- .cic_eth_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == "master"
|
||||||
|
changes:
|
||||||
|
- apps/cic-eth/**/*
|
||||||
|
when: always
|
||||||
|
|||||||
2
apps/cic-eth/MANIFEST.in
Normal file
2
apps/cic-eth/MANIFEST.in
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
include *requirements.txt config/test/*
|
||||||
|
|
||||||
5
apps/cic-eth/admin_requirements.txt
Normal file
5
apps/cic-eth/admin_requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
SQLAlchemy==1.3.20
|
||||||
|
cic-eth-registry~=0.5.6a1
|
||||||
|
hexathon~=0.0.1a7
|
||||||
|
chainqueue~=0.0.2b5
|
||||||
|
eth-erc20==0.0.10a2
|
||||||
@@ -5,4 +5,3 @@
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
from .api_task import Api
|
from .api_task import Api
|
||||||
from .api_admin import AdminApi
|
|
||||||
|
|||||||
@@ -562,13 +562,13 @@ class AdminApi:
|
|||||||
tx['source_token_symbol'] = source_token.symbol
|
tx['source_token_symbol'] = source_token.symbol
|
||||||
o = erc20_c.balance_of(tx['source_token'], tx['sender'], sender_address=self.call_address)
|
o = erc20_c.balance_of(tx['source_token'], tx['sender'], sender_address=self.call_address)
|
||||||
r = self.rpc.do(o)
|
r = self.rpc.do(o)
|
||||||
tx['sender_token_balance'] = erc20_c.parse_balance_of(r)
|
tx['sender_token_balance'] = erc20_c.parse_balance(r)
|
||||||
|
|
||||||
if destination_token != None:
|
if destination_token != None:
|
||||||
tx['destination_token_symbol'] = destination_token.symbol
|
tx['destination_token_symbol'] = destination_token.symbol
|
||||||
o = erc20_c.balance_of(tx['destination_token'], tx['recipient'], sender_address=self.call_address)
|
o = erc20_c.balance_of(tx['destination_token'], tx['recipient'], sender_address=self.call_address)
|
||||||
r = self.rpc.do(o)
|
r = self.rpc.do(o)
|
||||||
tx['recipient_token_balance'] = erc20_c.parse_balance_of(r)
|
tx['recipient_token_balance'] = erc20_c.parse_balance(r)
|
||||||
#tx['recipient_token_balance'] = destination_token.function('balanceOf')(tx['recipient']).call()
|
#tx['recipient_token_balance'] = destination_token.function('balanceOf')(tx['recipient']).call()
|
||||||
|
|
||||||
# TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which
|
# TODO: this can mean either not subitted or culled, need to check other txs with same nonce to determine which
|
||||||
@@ -8,59 +8,19 @@ import logging
|
|||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
from cic_eth_registry import CICRegistry
|
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.api.base import ApiBase
|
||||||
|
from cic_eth.enum import LockEnum
|
||||||
|
|
||||||
app = celery.current_app
|
app = celery.current_app
|
||||||
|
|
||||||
logg = logging.getLogger(__name__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Api:
|
class Api(ApiBase):
|
||||||
"""Creates task chains to perform well-known CIC operations.
|
|
||||||
|
|
||||||
Each method that sends tasks returns details about the root task. The root task uuid can be provided in the callback, to enable to caller to correlate the result with individual calls. It can also be used to independently poll the completion of a task chain.
|
|
||||||
|
|
||||||
:param callback_param: Static value to pass to callback
|
|
||||||
:type callback_param: str
|
|
||||||
:param callback_task: Callback task that executes callback_param call. (Must be included by the celery worker)
|
|
||||||
:type callback_task: string
|
|
||||||
:param queue: Name of worker queue to submit tasks to
|
|
||||||
:type queue: str
|
|
||||||
"""
|
|
||||||
def __init__(self, chain_str, queue='cic-eth', callback_param=None, callback_task='cic_eth.callbacks.noop.noop', callback_queue=None):
|
|
||||||
self.chain_str = chain_str
|
|
||||||
self.chain_spec = ChainSpec.from_chain_str(chain_str)
|
|
||||||
self.callback_param = callback_param
|
|
||||||
self.callback_task = callback_task
|
|
||||||
self.queue = queue
|
|
||||||
logg.debug('api using queue {}'.format(self.queue))
|
|
||||||
self.callback_success = None
|
|
||||||
self.callback_error = None
|
|
||||||
if callback_queue == None:
|
|
||||||
callback_queue=self.queue
|
|
||||||
|
|
||||||
if callback_param != None:
|
|
||||||
self.callback_success = celery.signature(
|
|
||||||
callback_task,
|
|
||||||
[
|
|
||||||
callback_param,
|
|
||||||
0,
|
|
||||||
],
|
|
||||||
queue=callback_queue,
|
|
||||||
)
|
|
||||||
self.callback_error = celery.signature(
|
|
||||||
callback_task,
|
|
||||||
[
|
|
||||||
callback_param,
|
|
||||||
1,
|
|
||||||
],
|
|
||||||
queue=callback_queue,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def default_token(self):
|
def default_token(self):
|
||||||
s_token = celery.signature(
|
s_token = celery.signature(
|
||||||
@@ -204,6 +164,82 @@ class Api:
|
|||||||
# return t
|
# return t
|
||||||
|
|
||||||
|
|
||||||
|
def transfer_from(self, from_address, to_address, value, token_symbol, spender_address):
|
||||||
|
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens by one address on behalf of another address to a third party.
|
||||||
|
|
||||||
|
:param from_address: Ethereum address of sender
|
||||||
|
:type from_address: str, 0x-hex
|
||||||
|
:param to_address: Ethereum address of recipient
|
||||||
|
:type to_address: str, 0x-hex
|
||||||
|
:param value: Estimated return from conversion
|
||||||
|
:type value: int
|
||||||
|
:param token_symbol: ERC20 token symbol of token to send
|
||||||
|
:type token_symbol: str
|
||||||
|
:param spender_address: Ethereum address of recipient
|
||||||
|
:type spender_address: str, 0x-hex
|
||||||
|
:returns: uuid of root task
|
||||||
|
:rtype: celery.Task
|
||||||
|
"""
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.admin.ctrl.check_lock',
|
||||||
|
[
|
||||||
|
[token_symbol],
|
||||||
|
self.chain_spec.asdict(),
|
||||||
|
LockEnum.QUEUE,
|
||||||
|
from_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_nonce = celery.signature(
|
||||||
|
'cic_eth.eth.nonce.reserve_nonce',
|
||||||
|
[
|
||||||
|
self.chain_spec.asdict(),
|
||||||
|
from_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_tokens = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.resolve_tokens_by_symbol',
|
||||||
|
[
|
||||||
|
self.chain_spec.asdict(),
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_allow = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.check_allowance',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
value,
|
||||||
|
self.chain_spec.asdict(),
|
||||||
|
spender_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_transfer = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.transfer_from',
|
||||||
|
[
|
||||||
|
from_address,
|
||||||
|
to_address,
|
||||||
|
value,
|
||||||
|
self.chain_spec.asdict(),
|
||||||
|
spender_address,
|
||||||
|
],
|
||||||
|
queue=self.queue,
|
||||||
|
)
|
||||||
|
s_tokens.link(s_allow)
|
||||||
|
s_nonce.link(s_tokens)
|
||||||
|
s_check.link(s_nonce)
|
||||||
|
if self.callback_param != None:
|
||||||
|
s_transfer.link(self.callback_success)
|
||||||
|
s_allow.link(s_transfer).on_error(self.callback_error)
|
||||||
|
else:
|
||||||
|
s_allow.link(s_transfer)
|
||||||
|
|
||||||
|
t = s_check.apply_async(queue=self.queue)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def transfer(self, from_address, to_address, value, token_symbol):
|
def transfer(self, from_address, to_address, value, token_symbol):
|
||||||
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another.
|
"""Executes a chain of celery tasks that performs a transfer of ERC20 tokens from one address to another.
|
||||||
|
|
||||||
|
|||||||
52
apps/cic-eth/cic_eth/api/base.py
Normal file
52
apps/cic-eth/cic_eth/api/base.py
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# standard imports
|
||||||
|
import logging
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
import celery
|
||||||
|
from chainlib.chain import ChainSpec
|
||||||
|
|
||||||
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class ApiBase:
|
||||||
|
"""Creates task chains to perform well-known CIC operations.
|
||||||
|
|
||||||
|
Each method that sends tasks returns details about the root task. The root task uuid can be provided in the callback, to enable to caller to correlate the result with individual calls. It can also be used to independently poll the completion of a task chain.
|
||||||
|
|
||||||
|
:param callback_param: Static value to pass to callback
|
||||||
|
:type callback_param: str
|
||||||
|
:param callback_task: Callback task that executes callback_param call. (Must be included by the celery worker)
|
||||||
|
:type callback_task: string
|
||||||
|
:param queue: Name of worker queue to submit tasks to
|
||||||
|
:type queue: str
|
||||||
|
"""
|
||||||
|
def __init__(self, chain_str, queue='cic-eth', callback_param=None, callback_task='cic_eth.callbacks.noop.noop', callback_queue=None):
|
||||||
|
self.chain_str = chain_str
|
||||||
|
self.chain_spec = ChainSpec.from_chain_str(chain_str)
|
||||||
|
self.callback_param = callback_param
|
||||||
|
self.callback_task = callback_task
|
||||||
|
self.queue = queue
|
||||||
|
logg.debug('api using queue {}'.format(self.queue))
|
||||||
|
self.callback_success = None
|
||||||
|
self.callback_error = None
|
||||||
|
if callback_queue == None:
|
||||||
|
callback_queue=self.queue
|
||||||
|
|
||||||
|
if callback_param != None:
|
||||||
|
self.callback_success = celery.signature(
|
||||||
|
callback_task,
|
||||||
|
[
|
||||||
|
callback_param,
|
||||||
|
0,
|
||||||
|
],
|
||||||
|
queue=callback_queue,
|
||||||
|
)
|
||||||
|
self.callback_error = celery.signature(
|
||||||
|
callback_task,
|
||||||
|
[
|
||||||
|
callback_param,
|
||||||
|
1,
|
||||||
|
],
|
||||||
|
queue=callback_queue,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1,158 +1 @@
|
|||||||
# standard imports
|
from cic_eth.enum import *
|
||||||
import enum
|
|
||||||
|
|
||||||
|
|
||||||
@enum.unique
|
|
||||||
class StatusBits(enum.IntEnum):
|
|
||||||
"""Individual bit flags that are combined to define the state and legacy of a queued transaction
|
|
||||||
|
|
||||||
"""
|
|
||||||
QUEUED = 0x01 # transaction should be sent to network
|
|
||||||
IN_NETWORK = 0x08 # transaction is in network
|
|
||||||
|
|
||||||
DEFERRED = 0x10 # an attempt to send the transaction to network has failed
|
|
||||||
GAS_ISSUES = 0x20 # transaction is pending sender account gas funding
|
|
||||||
|
|
||||||
LOCAL_ERROR = 0x100 # errors that originate internally from the component
|
|
||||||
NODE_ERROR = 0x200 # errors originating in the node (invalid RLP input...)
|
|
||||||
NETWORK_ERROR = 0x400 # errors that originate from the network (REVERT)
|
|
||||||
UNKNOWN_ERROR = 0x800 # unclassified errors (the should not occur)
|
|
||||||
|
|
||||||
FINAL = 0x1000 # transaction processing has completed
|
|
||||||
OBSOLETE = 0x2000 # transaction has been replaced by a different transaction with higher fee
|
|
||||||
MANUAL = 0x8000 # transaction processing has been manually overridden
|
|
||||||
|
|
||||||
|
|
||||||
@enum.unique
|
|
||||||
class StatusEnum(enum.IntEnum):
|
|
||||||
"""
|
|
||||||
|
|
||||||
- Inactive, not finalized. (<0)
|
|
||||||
* PENDING: The initial state of a newly added transaction record. No action has been performed on this transaction yet.
|
|
||||||
* SENDFAIL: The transaction was not received by the node.
|
|
||||||
* RETRY: The transaction is queued for a new send attempt after previously failing.
|
|
||||||
* READYSEND: The transaction is queued for its first send attempt
|
|
||||||
* OBSOLETED: A new transaction with the same nonce and higher gas has been sent to network.
|
|
||||||
* WAITFORGAS: The transaction is on hold pending gas funding.
|
|
||||||
- Active state: (==0)
|
|
||||||
* SENT: The transaction has been sent to the mempool.
|
|
||||||
- Inactive, finalized. (>0)
|
|
||||||
* FUBAR: Unknown error occurred and transaction is abandoned. Manual intervention needed.
|
|
||||||
* CANCELLED: The transaction was sent, but was not mined and has disappered from the mempool. This usually follows a transaction being obsoleted.
|
|
||||||
* OVERRIDDEN: Transaction has been manually overriden.
|
|
||||||
* REJECTED: The transaction was rejected by the node.
|
|
||||||
* REVERTED: The transaction was mined, but exception occurred during EVM execution. (Block number will be set)
|
|
||||||
* SUCCESS: THe transaction was successfully mined. (Block number will be set)
|
|
||||||
|
|
||||||
"""
|
|
||||||
PENDING = 0
|
|
||||||
|
|
||||||
SENDFAIL = StatusBits.DEFERRED | StatusBits.LOCAL_ERROR
|
|
||||||
RETRY = StatusBits.QUEUED | StatusBits.DEFERRED
|
|
||||||
READYSEND = StatusBits.QUEUED
|
|
||||||
|
|
||||||
OBSOLETED = StatusBits.OBSOLETE | StatusBits.IN_NETWORK
|
|
||||||
|
|
||||||
WAITFORGAS = StatusBits.GAS_ISSUES
|
|
||||||
|
|
||||||
SENT = StatusBits.IN_NETWORK
|
|
||||||
FUBAR = StatusBits.FINAL | StatusBits.UNKNOWN_ERROR
|
|
||||||
CANCELLED = StatusBits.IN_NETWORK | StatusBits.FINAL | StatusBits.OBSOLETE
|
|
||||||
OVERRIDDEN = StatusBits.FINAL | StatusBits.OBSOLETE | StatusBits.MANUAL
|
|
||||||
|
|
||||||
REJECTED = StatusBits.NODE_ERROR | StatusBits.FINAL
|
|
||||||
REVERTED = StatusBits.IN_NETWORK | StatusBits.FINAL | StatusBits.NETWORK_ERROR
|
|
||||||
SUCCESS = StatusBits.IN_NETWORK | StatusBits.FINAL
|
|
||||||
|
|
||||||
|
|
||||||
@enum.unique
|
|
||||||
class LockEnum(enum.IntEnum):
|
|
||||||
"""
|
|
||||||
STICKY: When set, reset is not possible
|
|
||||||
CREATE: Disable creation of accounts
|
|
||||||
SEND: Disable sending to network
|
|
||||||
QUEUE: Disable queueing new or modified transactions
|
|
||||||
"""
|
|
||||||
STICKY=1
|
|
||||||
INIT=2
|
|
||||||
CREATE=4
|
|
||||||
SEND=8
|
|
||||||
QUEUE=16
|
|
||||||
QUERY=32
|
|
||||||
ALL=int(0xfffffffffffffffe)
|
|
||||||
|
|
||||||
|
|
||||||
def status_str(v, bits_only=False):
|
|
||||||
"""Render a human-readable string describing the status
|
|
||||||
|
|
||||||
If the bit field exactly matches a StatusEnum value, the StatusEnum label will be returned.
|
|
||||||
|
|
||||||
If a StatusEnum cannot be matched, the string will be postfixed with "*", unless explicitly instructed to return bit field labels only.
|
|
||||||
|
|
||||||
:param v: Status bit field
|
|
||||||
:type v: number
|
|
||||||
:param bits_only: Only render individual bit labels.
|
|
||||||
:type bits_only: bool
|
|
||||||
:returns: Status string
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
s = ''
|
|
||||||
if not bits_only:
|
|
||||||
try:
|
|
||||||
s = StatusEnum(v).name
|
|
||||||
return s
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if v == 0:
|
|
||||||
return 'NONE'
|
|
||||||
|
|
||||||
for i in range(16):
|
|
||||||
b = (1 << i)
|
|
||||||
if (b & 0xffff) & v:
|
|
||||||
n = StatusBits(b).name
|
|
||||||
if len(s) > 0:
|
|
||||||
s += ','
|
|
||||||
s += n
|
|
||||||
if not bits_only:
|
|
||||||
s += '*'
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def all_errors():
|
|
||||||
"""Bit mask of all error states
|
|
||||||
|
|
||||||
:returns: Error flags
|
|
||||||
:rtype: number
|
|
||||||
"""
|
|
||||||
return StatusBits.LOCAL_ERROR | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
|
|
||||||
|
|
||||||
|
|
||||||
def is_error_status(v):
|
|
||||||
"""Check if value is an error state
|
|
||||||
|
|
||||||
:param v: Status bit field
|
|
||||||
:type v: number
|
|
||||||
:returns: True if error
|
|
||||||
:rtype: bool
|
|
||||||
"""
|
|
||||||
return bool(v & all_errors())
|
|
||||||
|
|
||||||
|
|
||||||
def dead():
|
|
||||||
"""Bit mask defining whether a transaction is still likely to be processed on the network.
|
|
||||||
|
|
||||||
:returns: Bit mask
|
|
||||||
:rtype: number
|
|
||||||
"""
|
|
||||||
return StatusBits.FINAL | StatusBits.OBSOLETE
|
|
||||||
|
|
||||||
|
|
||||||
def is_alive(v):
|
|
||||||
"""Check if transaction is still likely to be processed on the network.
|
|
||||||
|
|
||||||
The contingency of "likely" refers to the case a transaction has been obsoleted after sent to the network, but the network still confirms the obsoleted transaction. The return value of this method will not change as a result of this, BUT the state itself will (as the FINAL bit will be set).
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
"""
|
|
||||||
return bool(v & dead() == 0)
|
|
||||||
|
|||||||
158
apps/cic-eth/cic_eth/enum.py
Normal file
158
apps/cic-eth/cic_eth/enum.py
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
# standard imports
|
||||||
|
import enum
|
||||||
|
|
||||||
|
|
||||||
|
@enum.unique
|
||||||
|
class StatusBits(enum.IntEnum):
|
||||||
|
"""Individual bit flags that are combined to define the state and legacy of a queued transaction
|
||||||
|
|
||||||
|
"""
|
||||||
|
QUEUED = 0x01 # transaction should be sent to network
|
||||||
|
IN_NETWORK = 0x08 # transaction is in network
|
||||||
|
|
||||||
|
DEFERRED = 0x10 # an attempt to send the transaction to network has failed
|
||||||
|
GAS_ISSUES = 0x20 # transaction is pending sender account gas funding
|
||||||
|
|
||||||
|
LOCAL_ERROR = 0x100 # errors that originate internally from the component
|
||||||
|
NODE_ERROR = 0x200 # errors originating in the node (invalid RLP input...)
|
||||||
|
NETWORK_ERROR = 0x400 # errors that originate from the network (REVERT)
|
||||||
|
UNKNOWN_ERROR = 0x800 # unclassified errors (the should not occur)
|
||||||
|
|
||||||
|
FINAL = 0x1000 # transaction processing has completed
|
||||||
|
OBSOLETE = 0x2000 # transaction has been replaced by a different transaction with higher fee
|
||||||
|
MANUAL = 0x8000 # transaction processing has been manually overridden
|
||||||
|
|
||||||
|
|
||||||
|
@enum.unique
|
||||||
|
class StatusEnum(enum.IntEnum):
|
||||||
|
"""
|
||||||
|
|
||||||
|
- Inactive, not finalized. (<0)
|
||||||
|
* PENDING: The initial state of a newly added transaction record. No action has been performed on this transaction yet.
|
||||||
|
* SENDFAIL: The transaction was not received by the node.
|
||||||
|
* RETRY: The transaction is queued for a new send attempt after previously failing.
|
||||||
|
* READYSEND: The transaction is queued for its first send attempt
|
||||||
|
* OBSOLETED: A new transaction with the same nonce and higher gas has been sent to network.
|
||||||
|
* WAITFORGAS: The transaction is on hold pending gas funding.
|
||||||
|
- Active state: (==0)
|
||||||
|
* SENT: The transaction has been sent to the mempool.
|
||||||
|
- Inactive, finalized. (>0)
|
||||||
|
* FUBAR: Unknown error occurred and transaction is abandoned. Manual intervention needed.
|
||||||
|
* CANCELLED: The transaction was sent, but was not mined and has disappered from the mempool. This usually follows a transaction being obsoleted.
|
||||||
|
* OVERRIDDEN: Transaction has been manually overriden.
|
||||||
|
* REJECTED: The transaction was rejected by the node.
|
||||||
|
* REVERTED: The transaction was mined, but exception occurred during EVM execution. (Block number will be set)
|
||||||
|
* SUCCESS: THe transaction was successfully mined. (Block number will be set)
|
||||||
|
|
||||||
|
"""
|
||||||
|
PENDING = 0
|
||||||
|
|
||||||
|
SENDFAIL = StatusBits.DEFERRED | StatusBits.LOCAL_ERROR
|
||||||
|
RETRY = StatusBits.QUEUED | StatusBits.DEFERRED
|
||||||
|
READYSEND = StatusBits.QUEUED
|
||||||
|
|
||||||
|
OBSOLETED = StatusBits.OBSOLETE | StatusBits.IN_NETWORK
|
||||||
|
|
||||||
|
WAITFORGAS = StatusBits.GAS_ISSUES
|
||||||
|
|
||||||
|
SENT = StatusBits.IN_NETWORK
|
||||||
|
FUBAR = StatusBits.FINAL | StatusBits.UNKNOWN_ERROR
|
||||||
|
CANCELLED = StatusBits.IN_NETWORK | StatusBits.FINAL | StatusBits.OBSOLETE
|
||||||
|
OVERRIDDEN = StatusBits.FINAL | StatusBits.OBSOLETE | StatusBits.MANUAL
|
||||||
|
|
||||||
|
REJECTED = StatusBits.NODE_ERROR | StatusBits.FINAL
|
||||||
|
REVERTED = StatusBits.IN_NETWORK | StatusBits.FINAL | StatusBits.NETWORK_ERROR
|
||||||
|
SUCCESS = StatusBits.IN_NETWORK | StatusBits.FINAL
|
||||||
|
|
||||||
|
|
||||||
|
@enum.unique
|
||||||
|
class LockEnum(enum.IntEnum):
|
||||||
|
"""
|
||||||
|
STICKY: When set, reset is not possible
|
||||||
|
CREATE: Disable creation of accounts
|
||||||
|
SEND: Disable sending to network
|
||||||
|
QUEUE: Disable queueing new or modified transactions
|
||||||
|
"""
|
||||||
|
STICKY=1
|
||||||
|
INIT=2
|
||||||
|
CREATE=4
|
||||||
|
SEND=8
|
||||||
|
QUEUE=16
|
||||||
|
QUERY=32
|
||||||
|
ALL=int(0xfffffffffffffffe)
|
||||||
|
|
||||||
|
|
||||||
|
def status_str(v, bits_only=False):
|
||||||
|
"""Render a human-readable string describing the status
|
||||||
|
|
||||||
|
If the bit field exactly matches a StatusEnum value, the StatusEnum label will be returned.
|
||||||
|
|
||||||
|
If a StatusEnum cannot be matched, the string will be postfixed with "*", unless explicitly instructed to return bit field labels only.
|
||||||
|
|
||||||
|
:param v: Status bit field
|
||||||
|
:type v: number
|
||||||
|
:param bits_only: Only render individual bit labels.
|
||||||
|
:type bits_only: bool
|
||||||
|
:returns: Status string
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
s = ''
|
||||||
|
if not bits_only:
|
||||||
|
try:
|
||||||
|
s = StatusEnum(v).name
|
||||||
|
return s
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if v == 0:
|
||||||
|
return 'NONE'
|
||||||
|
|
||||||
|
for i in range(16):
|
||||||
|
b = (1 << i)
|
||||||
|
if (b & 0xffff) & v:
|
||||||
|
n = StatusBits(b).name
|
||||||
|
if len(s) > 0:
|
||||||
|
s += ','
|
||||||
|
s += n
|
||||||
|
if not bits_only:
|
||||||
|
s += '*'
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def all_errors():
|
||||||
|
"""Bit mask of all error states
|
||||||
|
|
||||||
|
:returns: Error flags
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
return StatusBits.LOCAL_ERROR | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
|
||||||
|
|
||||||
|
|
||||||
|
def is_error_status(v):
|
||||||
|
"""Check if value is an error state
|
||||||
|
|
||||||
|
:param v: Status bit field
|
||||||
|
:type v: number
|
||||||
|
:returns: True if error
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return bool(v & all_errors())
|
||||||
|
|
||||||
|
|
||||||
|
def dead():
|
||||||
|
"""Bit mask defining whether a transaction is still likely to be processed on the network.
|
||||||
|
|
||||||
|
:returns: Bit mask
|
||||||
|
:rtype: number
|
||||||
|
"""
|
||||||
|
return StatusBits.FINAL | StatusBits.OBSOLETE
|
||||||
|
|
||||||
|
|
||||||
|
def is_alive(v):
|
||||||
|
"""Check if transaction is still likely to be processed on the network.
|
||||||
|
|
||||||
|
The contingency of "likely" refers to the case a transaction has been obsoleted after sent to the network, but the network still confirms the obsoleted transaction. The return value of this method will not change as a result of this, BUT the state itself will (as the FINAL bit will be set).
|
||||||
|
|
||||||
|
:returns:
|
||||||
|
"""
|
||||||
|
return bool(v & dead() == 0)
|
||||||
@@ -80,3 +80,8 @@ class SignerError(SeppukuError):
|
|||||||
class RoleAgencyError(SeppukuError):
|
class RoleAgencyError(SeppukuError):
|
||||||
"""Exception raise when a role cannot perform its function. This is a critical exception
|
"""Exception raise when a role cannot perform its function. This is a critical exception
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class YouAreBrokeError(Exception):
|
||||||
|
"""Exception raised when a value transfer is attempted without access to sufficient funds
|
||||||
|
"""
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ from cic_eth.error import (
|
|||||||
TokenCountError,
|
TokenCountError,
|
||||||
PermanentTxError,
|
PermanentTxError,
|
||||||
OutOfGasError,
|
OutOfGasError,
|
||||||
|
YouAreBrokeError,
|
||||||
)
|
)
|
||||||
from cic_eth.queue.tx import register_tx
|
from cic_eth.queue.tx import register_tx
|
||||||
from cic_eth.eth.gas import (
|
from cic_eth.eth.gas import (
|
||||||
@@ -71,6 +72,117 @@ def balance(tokens, holder_address, chain_spec_dict):
|
|||||||
return tokens
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True)
|
||||||
|
def check_allowance(self, tokens, holder_address, value, chain_spec_dict, spender_address):
|
||||||
|
"""Best-effort verification that the allowance for a transfer from spend is sufficient.
|
||||||
|
|
||||||
|
:raises YouAreBrokeError: If allowance is insufficient
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param value: Amount of token, in 'wei'
|
||||||
|
:type value: int
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param spender_address: Address of account spending on behalf of holder
|
||||||
|
:type spender_address: str, 0x-hex
|
||||||
|
:return: Token list as passed to task
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
logg.debug('tokens {}'.format(tokens))
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
t = tokens[0]
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
|
||||||
|
caller_address = ERC20Token.caller_address
|
||||||
|
c = ERC20(chain_spec)
|
||||||
|
o = c.allowance(t['address'], holder_address, spender_address, sender_address=caller_address)
|
||||||
|
r = rpc.do(o)
|
||||||
|
allowance = c.parse_allowance(r)
|
||||||
|
if allowance < value:
|
||||||
|
errstr = 'allowance {} insufficent to transfer {} {} by {} on behalf of {}'.format(allowance, value, t['symbol'], spender_address, holder_address)
|
||||||
|
logg.error(errstr)
|
||||||
|
raise YouAreBrokeError(errstr)
|
||||||
|
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
||||||
|
def transfer_from(self, tokens, holder_address, receiver_address, value, chain_spec_dict, spender_address):
|
||||||
|
"""Transfer ERC20 tokens between addresses
|
||||||
|
|
||||||
|
First argument is a list of tokens, to enable the task to be chained to the symbol to token address resolver function. However, it accepts only one token as argument.
|
||||||
|
|
||||||
|
:param tokens: Token addresses
|
||||||
|
:type tokens: list of str, 0x-hex
|
||||||
|
:param holder_address: Token holder address
|
||||||
|
:type holder_address: str, 0x-hex
|
||||||
|
:param receiver_address: Token receiver address
|
||||||
|
:type receiver_address: str, 0x-hex
|
||||||
|
:param value: Amount of token, in 'wei'
|
||||||
|
:type value: int
|
||||||
|
:param chain_str: Chain spec string representation
|
||||||
|
:type chain_str: str
|
||||||
|
:param spender_address: Address of account spending on behalf of holder
|
||||||
|
:type spender_address: str, 0x-hex
|
||||||
|
:raises TokenCountError: Either none or more then one tokens have been passed as tokens argument
|
||||||
|
:return: Transaction hash for tranfer operation
|
||||||
|
:rtype: str, 0x-hex
|
||||||
|
"""
|
||||||
|
# we only allow one token, one transfer
|
||||||
|
logg.debug('tokens {}'.format(tokens))
|
||||||
|
if len(tokens) != 1:
|
||||||
|
raise TokenCountError
|
||||||
|
t = tokens[0]
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
queue = self.request.delivery_info.get('routing_key')
|
||||||
|
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
rpc_signer = RPCConnection.connect(chain_spec, 'signer')
|
||||||
|
|
||||||
|
session = self.create_session()
|
||||||
|
nonce_oracle = CustodialTaskNonceOracle(holder_address, self.request.root_id, session=session)
|
||||||
|
gas_oracle = self.create_gas_oracle(rpc, MaxGasOracle.gas)
|
||||||
|
c = ERC20(chain_spec, signer=rpc_signer, gas_oracle=gas_oracle, nonce_oracle=nonce_oracle)
|
||||||
|
try:
|
||||||
|
(tx_hash_hex, tx_signed_raw_hex) = c.transfer_from(t['address'], spender_address, holder_address, receiver_address, value, tx_format=TxFormat.RLP_SIGNED)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
raise SignerError(e)
|
||||||
|
except ConnectionError as e:
|
||||||
|
raise SignerError(e)
|
||||||
|
|
||||||
|
|
||||||
|
rpc_signer.disconnect()
|
||||||
|
rpc.disconnect()
|
||||||
|
|
||||||
|
cache_task = 'cic_eth.eth.erc20.cache_transfer_from_data'
|
||||||
|
|
||||||
|
register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=cache_task, session=session)
|
||||||
|
session.commit()
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
gas_pair = gas_oracle.get_gas(tx_signed_raw_hex)
|
||||||
|
gas_budget = gas_pair[0] * gas_pair[1]
|
||||||
|
logg.debug('transfer tx {} {} {}'.format(tx_hash_hex, queue, gas_budget))
|
||||||
|
|
||||||
|
s = create_check_gas_task(
|
||||||
|
[tx_signed_raw_hex],
|
||||||
|
chain_spec,
|
||||||
|
holder_address,
|
||||||
|
gas_budget,
|
||||||
|
[tx_hash_hex],
|
||||||
|
queue,
|
||||||
|
)
|
||||||
|
s.apply_async()
|
||||||
|
return tx_hash_hex
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
@celery_app.task(bind=True, base=CriticalSQLAlchemyAndSignerTask)
|
||||||
def transfer(self, tokens, holder_address, receiver_address, value, chain_spec_dict):
|
def transfer(self, tokens, holder_address, receiver_address, value, chain_spec_dict):
|
||||||
"""Transfer ERC20 tokens between addresses
|
"""Transfer ERC20 tokens between addresses
|
||||||
@@ -232,6 +344,7 @@ def resolve_tokens_by_symbol(self, token_symbols, chain_spec_dict):
|
|||||||
logg.debug('token {}'.format(token_address))
|
logg.debug('token {}'.format(token_address))
|
||||||
tokens.append({
|
tokens.append({
|
||||||
'address': token_address,
|
'address': token_address,
|
||||||
|
'symbol': token_symbol,
|
||||||
'converters': [],
|
'converters': [],
|
||||||
})
|
})
|
||||||
rpc.disconnect()
|
rpc.disconnect()
|
||||||
@@ -279,6 +392,48 @@ def cache_transfer_data(
|
|||||||
return (tx_hash_hex, cache_id)
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
||||||
|
|
||||||
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
|
def cache_transfer_from_data(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx_signed_raw_hex,
|
||||||
|
chain_spec_dict,
|
||||||
|
):
|
||||||
|
"""Helper function for otx_cache_transfer_from
|
||||||
|
|
||||||
|
:param tx_hash_hex: Transaction hash
|
||||||
|
:type tx_hash_hex: str, 0x-hex
|
||||||
|
:param tx: Signed raw transaction
|
||||||
|
:type tx: str, 0x-hex
|
||||||
|
:returns: Transaction hash and id of cache element in storage backend, respectively
|
||||||
|
:rtype: tuple
|
||||||
|
"""
|
||||||
|
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||||
|
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||||
|
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||||
|
|
||||||
|
tx_data = ERC20.parse_transfer_from_request(tx['data'])
|
||||||
|
spender_address = tx_data[0]
|
||||||
|
recipient_address = tx_data[1]
|
||||||
|
token_value = tx_data[2]
|
||||||
|
|
||||||
|
session = SessionBase.create_session()
|
||||||
|
tx_cache = TxCache(
|
||||||
|
tx_hash_hex,
|
||||||
|
tx['from'],
|
||||||
|
recipient_address,
|
||||||
|
tx['to'],
|
||||||
|
tx['to'],
|
||||||
|
token_value,
|
||||||
|
token_value,
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
session.add(tx_cache)
|
||||||
|
session.commit()
|
||||||
|
cache_id = tx_cache.id
|
||||||
|
session.close()
|
||||||
|
return (tx_hash_hex, cache_id)
|
||||||
|
|
||||||
|
|
||||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||||
def cache_approve_data(
|
def cache_approve_data(
|
||||||
tx_hash_hex,
|
tx_hash_hex,
|
||||||
|
|||||||
@@ -37,7 +37,7 @@ def celery_includes():
|
|||||||
'cic_eth.eth.account',
|
'cic_eth.eth.account',
|
||||||
'cic_eth.callbacks.noop',
|
'cic_eth.callbacks.noop',
|
||||||
'cic_eth.callbacks.http',
|
'cic_eth.callbacks.http',
|
||||||
'tests.mock.filter',
|
'cic_eth.pytest.mock.filter',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -2,18 +2,20 @@
|
|||||||
import os
|
import os
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
# third-party imports
|
# external imports
|
||||||
import pytest
|
import pytest
|
||||||
import confini
|
import confini
|
||||||
|
|
||||||
script_dir = os.path.dirname(os.path.realpath(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
root_dir = os.path.dirname(script_dir)
|
root_dir = os.path.dirname(os.path.dirname(script_dir))
|
||||||
logg = logging.getLogger(__file__)
|
logg = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def load_config():
|
def load_config():
|
||||||
config_dir = os.path.join(root_dir, 'config/test')
|
config_dir = os.environ.get('CONFINI_DIR')
|
||||||
|
if config_dir == None:
|
||||||
|
config_dir = os.path.join(root_dir, 'config/test')
|
||||||
conf = confini.Config(config_dir, 'CICTEST')
|
conf = confini.Config(config_dir, 'CICTEST')
|
||||||
conf.process()
|
conf.process()
|
||||||
logg.debug('config {}'.format(conf))
|
logg.debug('config {}'.format(conf))
|
||||||
@@ -37,7 +37,8 @@ def init_database(
|
|||||||
database_engine,
|
database_engine,
|
||||||
):
|
):
|
||||||
|
|
||||||
rootdir = os.path.dirname(os.path.dirname(__file__))
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
rootdir = os.path.dirname(os.path.dirname(script_dir))
|
||||||
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
|
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
|
||||||
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
|
migrationsdir = os.path.join(dbdir, 'migrations', load_config.get('DATABASE_ENGINE'))
|
||||||
if not os.path.isdir(migrationsdir):
|
if not os.path.isdir(migrationsdir):
|
||||||
19
apps/cic-eth/cic_eth/pytest/fixtures_token.py
Normal file
19
apps/cic-eth/cic_eth/pytest/fixtures_token.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# external imports
|
||||||
|
import pytest
|
||||||
|
from eth_erc20 import ERC20
|
||||||
|
|
||||||
|
# TODO: missing dep fixture includes
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='function')
|
||||||
|
def foo_token_symbol(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
eth_rpc,
|
||||||
|
contract_roles,
|
||||||
|
):
|
||||||
|
|
||||||
|
c = ERC20(default_chain_spec)
|
||||||
|
o = c.symbol(foo_token, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
return c.parse_symbol(r)
|
||||||
@@ -12,7 +12,7 @@ from chainlib.eth.constant import ZERO_ADDRESS
|
|||||||
from chainlib.eth.address import is_checksum_address
|
from chainlib.eth.address import is_checksum_address
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import AdminApi
|
from cic_eth.api.admin import AdminApi
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.db.enum import LockEnum
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
|
|||||||
@@ -1,136 +0,0 @@
|
|||||||
# standard imports
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import logging
|
|
||||||
import argparse
|
|
||||||
import json
|
|
||||||
|
|
||||||
# third-party imports
|
|
||||||
import web3
|
|
||||||
import confini
|
|
||||||
import celery
|
|
||||||
from json.decoder import JSONDecodeError
|
|
||||||
from cic_registry.chain import ChainSpec
|
|
||||||
|
|
||||||
# local imports
|
|
||||||
from cic_eth.db import dsn_from_config
|
|
||||||
from cic_eth.db.models.base import SessionBase
|
|
||||||
from cic_eth.eth.util import unpack_signed_raw_tx
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
|
||||||
logg = logging.getLogger()
|
|
||||||
|
|
||||||
rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
||||||
dbdir = os.path.join(rootdir, 'cic_eth', 'db')
|
|
||||||
migrationsdir = os.path.join(dbdir, 'migrations')
|
|
||||||
|
|
||||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
|
||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
|
||||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
|
||||||
argparser.add_argument('-q', type=str, default='cic-eth', help='queue name for worker tasks')
|
|
||||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
|
||||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
|
||||||
args = argparser.parse_args()
|
|
||||||
|
|
||||||
if args.vv:
|
|
||||||
logging.getLogger().setLevel(logging.DEBUG)
|
|
||||||
elif args.v:
|
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
|
||||||
|
|
||||||
config = confini.Config(args.c, args.env_prefix)
|
|
||||||
config.process()
|
|
||||||
args_override = {
|
|
||||||
'CIC_CHAIN_SPEC': getattr(args, 'i'),
|
|
||||||
}
|
|
||||||
config.censor('PASSWORD', 'DATABASE')
|
|
||||||
config.censor('PASSWORD', 'SSL')
|
|
||||||
logg.debug('config:\n{}'.format(config))
|
|
||||||
|
|
||||||
dsn = dsn_from_config(config)
|
|
||||||
SessionBase.connect(dsn)
|
|
||||||
|
|
||||||
celery_app = celery.Celery(backend=config.get('CELERY_RESULT_URL'), broker=config.get('CELERY_BROKER_URL'))
|
|
||||||
queue = args.q
|
|
||||||
|
|
||||||
re_something = r'^/something/?'
|
|
||||||
|
|
||||||
chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|
||||||
|
|
||||||
|
|
||||||
def process_something(session, env):
|
|
||||||
r = re.match(re_something, env.get('PATH_INFO'))
|
|
||||||
if not r:
|
|
||||||
return None
|
|
||||||
|
|
||||||
#if env.get('CONTENT_TYPE') != 'application/json':
|
|
||||||
# raise AttributeError('content type')
|
|
||||||
|
|
||||||
#if env.get('REQUEST_METHOD') != 'POST':
|
|
||||||
# raise AttributeError('method')
|
|
||||||
|
|
||||||
#post_data = json.load(env.get('wsgi.input'))
|
|
||||||
|
|
||||||
#return ('text/plain', 'foo'.encode('utf-8'),)
|
|
||||||
|
|
||||||
|
|
||||||
# uwsgi application
|
|
||||||
def application(env, start_response):
|
|
||||||
|
|
||||||
for k in env.keys():
|
|
||||||
logg.debug('env {} {}'.format(k, env[k]))
|
|
||||||
|
|
||||||
headers = []
|
|
||||||
content = b''
|
|
||||||
err = None
|
|
||||||
|
|
||||||
session = SessionBase.create_session()
|
|
||||||
for handler in [
|
|
||||||
process_something,
|
|
||||||
]:
|
|
||||||
try:
|
|
||||||
r = handler(session, env)
|
|
||||||
except AttributeError as e:
|
|
||||||
logg.error('handler fail attribute {}'.format(e))
|
|
||||||
err = '400 Impertinent request'
|
|
||||||
break
|
|
||||||
except JSONDecodeError as e:
|
|
||||||
logg.error('handler fail json {}'.format(e))
|
|
||||||
err = '400 Invalid data format'
|
|
||||||
break
|
|
||||||
except KeyError as e:
|
|
||||||
logg.error('handler fail key {}'.format(e))
|
|
||||||
err = '400 Invalid JSON'
|
|
||||||
break
|
|
||||||
except ValueError as e:
|
|
||||||
logg.error('handler fail value {}'.format(e))
|
|
||||||
err = '400 Invalid data'
|
|
||||||
break
|
|
||||||
except RuntimeError as e:
|
|
||||||
logg.error('task fail value {}'.format(e))
|
|
||||||
err = '500 Task failed, sorry I cannot tell you more'
|
|
||||||
break
|
|
||||||
if r != None:
|
|
||||||
(mime_type, content) = r
|
|
||||||
break
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
if err != None:
|
|
||||||
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
|
||||||
start_response(err, headers)
|
|
||||||
session.close()
|
|
||||||
return [content]
|
|
||||||
|
|
||||||
headers.append(('Content-Length', str(len(content))),)
|
|
||||||
headers.append(('Access-Control-Allow-Origin', '*',));
|
|
||||||
|
|
||||||
if len(content) == 0:
|
|
||||||
headers.append(('Content-Type', 'text/plain, charset=UTF-8',))
|
|
||||||
start_response('404 Looked everywhere, sorry', headers)
|
|
||||||
else:
|
|
||||||
headers.append(('Content-Type', mime_type,))
|
|
||||||
start_response('200 OK', headers)
|
|
||||||
|
|
||||||
return [content]
|
|
||||||
@@ -7,6 +7,8 @@ import tempfile
|
|||||||
import re
|
import re
|
||||||
import urllib
|
import urllib
|
||||||
import websocket
|
import websocket
|
||||||
|
import stat
|
||||||
|
import importlib
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
import celery
|
import celery
|
||||||
@@ -68,6 +70,8 @@ from cic_eth.task import BaseTask
|
|||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
script_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
|
||||||
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
config_dir = os.path.join('/usr/local/etc/cic-eth')
|
||||||
|
|
||||||
argparser = argparse.ArgumentParser()
|
argparser = argparse.ArgumentParser()
|
||||||
@@ -79,6 +83,8 @@ argparser.add_argument('--default-token-symbol', dest='default_token_symbol', ty
|
|||||||
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
argparser.add_argument('--trace-queue-status', default=None, dest='trace_queue_status', action='store_true', help='set to perist all queue entry status changes to storage')
|
||||||
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
argparser.add_argument('-i', '--chain-spec', dest='i', type=str, help='chain spec')
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
|
argparser.add_argument('--aux-all', action='store_true', help='include tasks from all submodules from the aux module path')
|
||||||
|
argparser.add_argument('--aux', action='append', type=str, default=[], help='add single submodule from the aux module path')
|
||||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
@@ -109,6 +115,8 @@ if len(health_modules) != 0:
|
|||||||
health_modules = health_modules.split(',')
|
health_modules = health_modules.split(',')
|
||||||
logg.debug('health mods {}'.format(health_modules))
|
logg.debug('health mods {}'.format(health_modules))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# connect to database
|
# connect to database
|
||||||
dsn = dsn_from_config(config)
|
dsn = dsn_from_config(config)
|
||||||
SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
SessionBase.connect(dsn, pool_size=int(config.get('DATABASE_POOL_SIZE')), debug=config.true('DATABASE_DEBUG'))
|
||||||
@@ -167,6 +175,84 @@ Otx.tracing = config.true('TASKS_TRACE_QUEUE_STATUS')
|
|||||||
# raise RuntimeError()
|
# raise RuntimeError()
|
||||||
liveness.linux.load(health_modules, rundir=config.get('CIC_RUN_DIR'), config=config, unit='cic-eth-tasker')
|
liveness.linux.load(health_modules, rundir=config.get('CIC_RUN_DIR'), config=config, unit='cic-eth-tasker')
|
||||||
|
|
||||||
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
try:
|
||||||
|
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||||
|
except UnknownContractError as e:
|
||||||
|
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
||||||
|
sys.exit(1)
|
||||||
|
logg.info('connected contract registry {}'.format(config.get('CIC_REGISTRY_ADDRESS')))
|
||||||
|
|
||||||
|
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
||||||
|
if trusted_addresses_src == None:
|
||||||
|
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
||||||
|
sys.exit(1)
|
||||||
|
trusted_addresses = trusted_addresses_src.split(',')
|
||||||
|
for address in trusted_addresses:
|
||||||
|
logg.info('using trusted address {}'.format(address))
|
||||||
|
|
||||||
|
connect_declarator(rpc, chain_spec, trusted_addresses)
|
||||||
|
connect_token_registry(rpc, chain_spec)
|
||||||
|
|
||||||
|
# detect aux
|
||||||
|
# TODO: move to separate file
|
||||||
|
#aux_dir = os.path.join(script_dir, '..', '..', 'aux')
|
||||||
|
aux = []
|
||||||
|
if args.aux_all:
|
||||||
|
if len(args.aux) > 0:
|
||||||
|
logg.warning('--aux-all is set so --aux will have no effect')
|
||||||
|
for p in sys.path:
|
||||||
|
logg.debug('checking for aux modules in {}'.format(p))
|
||||||
|
aux_dir = os.path.join(p, 'cic_eth_aux')
|
||||||
|
try:
|
||||||
|
d = os.listdir(aux_dir)
|
||||||
|
except FileNotFoundError:
|
||||||
|
logg.debug('no aux module found in {}'.format(aux_dir))
|
||||||
|
continue
|
||||||
|
for v in d:
|
||||||
|
if v[:1] == '.':
|
||||||
|
logg.debug('dotfile, skip {}'.format(v))
|
||||||
|
continue
|
||||||
|
aux_mod_path = os.path.join(aux_dir, v)
|
||||||
|
st = os.stat(aux_mod_path)
|
||||||
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
|
logg.debug('not a dir, skip {}'.format(v))
|
||||||
|
continue
|
||||||
|
aux_mod_file = os.path.join(aux_dir, v,'__init__.py')
|
||||||
|
try:
|
||||||
|
st = os.stat(aux_mod_file)
|
||||||
|
except FileNotFoundError:
|
||||||
|
logg.debug('__init__.py not found, skip {}'.format(v))
|
||||||
|
continue
|
||||||
|
aux.append(v)
|
||||||
|
logg.debug('found module {} in {}'.format(v, aux_dir))
|
||||||
|
|
||||||
|
elif len(args.aux) > 0:
|
||||||
|
for p in sys.path:
|
||||||
|
v_found = None
|
||||||
|
for v in args.aux:
|
||||||
|
aux_dir = os.path.join(p, 'cic_eth_aux')
|
||||||
|
aux_mod_file = os.path.join(aux_dir, v, '__init__.py')
|
||||||
|
try:
|
||||||
|
st = os.stat(aux_mod_file)
|
||||||
|
v_found = v
|
||||||
|
except FileNotFoundError:
|
||||||
|
logg.debug('cannot find explicity requested aux module {} in path {}'.format(v, aux_dir))
|
||||||
|
continue
|
||||||
|
if v_found == None:
|
||||||
|
logg.critical('excplicity requested aux module {} not found in any path'.format(v))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
logg.info('aux module {} found in path {}'.format(v, aux_dir))
|
||||||
|
aux.append(v)
|
||||||
|
|
||||||
|
for v in aux:
|
||||||
|
mname = 'cic_eth_aux.' + v
|
||||||
|
mod = importlib.import_module(mname)
|
||||||
|
mod.aux_setup(rpc, config)
|
||||||
|
logg.info('loaded aux module {}'.format(mname))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
argv = ['worker']
|
argv = ['worker']
|
||||||
if args.vv:
|
if args.vv:
|
||||||
@@ -189,23 +275,6 @@ def main():
|
|||||||
|
|
||||||
rpc = RPCConnection.connect(chain_spec, 'default')
|
rpc = RPCConnection.connect(chain_spec, 'default')
|
||||||
|
|
||||||
try:
|
|
||||||
registry = connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
|
||||||
except UnknownContractError as e:
|
|
||||||
logg.exception('Registry contract connection failed for {}: {}'.format(config.get('CIC_REGISTRY_ADDRESS'), e))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
trusted_addresses_src = config.get('CIC_TRUST_ADDRESS')
|
|
||||||
if trusted_addresses_src == None:
|
|
||||||
logg.critical('At least one trusted address must be declared in CIC_TRUST_ADDRESS')
|
|
||||||
sys.exit(1)
|
|
||||||
trusted_addresses = trusted_addresses_src.split(',')
|
|
||||||
for address in trusted_addresses:
|
|
||||||
logg.info('using trusted address {}'.format(address))
|
|
||||||
|
|
||||||
connect_declarator(rpc, chain_spec, trusted_addresses)
|
|
||||||
connect_token_registry(rpc, chain_spec)
|
|
||||||
|
|
||||||
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
BaseTask.default_token_symbol = config.get('CIC_DEFAULT_TOKEN_SYMBOL')
|
||||||
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
|
BaseTask.default_token_address = registry.by_name(BaseTask.default_token_symbol)
|
||||||
default_token = ERC20Token(chain_spec, rpc, BaseTask.default_token_address)
|
default_token = ERC20Token(chain_spec, rpc, BaseTask.default_token_address)
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import cic_base.config
|
|||||||
import cic_base.log
|
import cic_base.log
|
||||||
import cic_base.argparse
|
import cic_base.argparse
|
||||||
import cic_base.rpc
|
import cic_base.rpc
|
||||||
|
from cic_base.eth.syncer import chain_interface
|
||||||
from cic_eth_registry.error import UnknownContractError
|
from cic_eth_registry.error import UnknownContractError
|
||||||
from chainlib.chain import ChainSpec
|
from chainlib.chain import ChainSpec
|
||||||
from chainlib.eth.constant import ZERO_ADDRESS
|
from chainlib.eth.constant import ZERO_ADDRESS
|
||||||
@@ -26,10 +27,8 @@ from hexathon import (
|
|||||||
strip_0x,
|
strip_0x,
|
||||||
)
|
)
|
||||||
from chainsyncer.backend.sql import SQLBackend
|
from chainsyncer.backend.sql import SQLBackend
|
||||||
from chainsyncer.driver import (
|
from chainsyncer.driver.head import HeadSyncer
|
||||||
HeadSyncer,
|
from chainsyncer.driver.history import HistorySyncer
|
||||||
HistorySyncer,
|
|
||||||
)
|
|
||||||
from chainsyncer.db.models.base import SessionBase
|
from chainsyncer.db.models.base import SessionBase
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
@@ -80,6 +79,7 @@ chain_spec = ChainSpec.from_chain_str(config.get('CIC_CHAIN_SPEC'))
|
|||||||
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
cic_base.rpc.setup(chain_spec, config.get('ETH_PROVIDER'))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
# connect to celery
|
# connect to celery
|
||||||
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
celery.Celery(broker=config.get('CELERY_BROKER_URL'), backend=config.get('CELERY_RESULT_URL'))
|
||||||
@@ -121,11 +121,11 @@ def main():
|
|||||||
|
|
||||||
for syncer_backend in syncer_backends:
|
for syncer_backend in syncer_backends:
|
||||||
try:
|
try:
|
||||||
syncers.append(HistorySyncer(syncer_backend))
|
syncers.append(HistorySyncer(syncer_backend, chain_interface))
|
||||||
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
|
logg.info('Initializing HISTORY syncer on backend {}'.format(syncer_backend))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
|
logg.info('Initializing HEAD syncer on backend {}'.format(syncer_backend))
|
||||||
syncers.append(HeadSyncer(syncer_backend))
|
syncers.append(HeadSyncer(syncer_backend, chain_interface))
|
||||||
|
|
||||||
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
connect_registry(rpc, chain_spec, config.get('CIC_REGISTRY_ADDRESS'))
|
||||||
|
|
||||||
|
|||||||
@@ -12,10 +12,8 @@ import confini
|
|||||||
import celery
|
import celery
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import (
|
from cic_eth.api import Api
|
||||||
Api,
|
from cic_eth.api.admin import AdminApi
|
||||||
AdminApi,
|
|
||||||
)
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from chainlib.chain import ChainSpec
|
|||||||
from chainlib.eth.connection import EthHTTPConnection
|
from chainlib.eth.connection import EthHTTPConnection
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api.api_admin import AdminApi
|
from cic_eth.api.admin import AdminApi
|
||||||
|
|
||||||
logging.basicConfig(level=logging.WARNING)
|
logging.basicConfig(level=logging.WARNING)
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from chainlib.chain import ChainSpec
|
|||||||
from xdg.BaseDirectory import xdg_config_home
|
from xdg.BaseDirectory import xdg_config_home
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import AdminApi
|
from cic_eth.api.admin import AdminApi
|
||||||
from cic_eth.db import dsn_from_config
|
from cic_eth.db import dsn_from_config
|
||||||
from cic_eth.db.models.base import SessionBase
|
from cic_eth.db.models.base import SessionBase
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ from chainlib.eth.connection import EthHTTPConnection
|
|||||||
from hexathon import add_0x
|
from hexathon import add_0x
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import AdminApi
|
from cic_eth.api.admin import AdminApi
|
||||||
from cic_eth.db.enum import (
|
from cic_eth.db.enum import (
|
||||||
StatusEnum,
|
StatusEnum,
|
||||||
status_str,
|
status_str,
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
# import
|
# import
|
||||||
import time
|
import time
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
@@ -76,7 +75,7 @@ class CriticalSQLAlchemyTask(CriticalTask):
|
|||||||
|
|
||||||
class CriticalWeb3Task(CriticalTask):
|
class CriticalWeb3Task(CriticalTask):
|
||||||
autoretry_for = (
|
autoretry_for = (
|
||||||
requests.exceptions.ConnectionError,
|
ConnectionError,
|
||||||
)
|
)
|
||||||
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
||||||
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
||||||
@@ -86,7 +85,7 @@ class CriticalSQLAlchemyAndWeb3Task(CriticalTask):
|
|||||||
autoretry_for = (
|
autoretry_for = (
|
||||||
sqlalchemy.exc.DatabaseError,
|
sqlalchemy.exc.DatabaseError,
|
||||||
sqlalchemy.exc.TimeoutError,
|
sqlalchemy.exc.TimeoutError,
|
||||||
requests.exceptions.ConnectionError,
|
ConnectionError,
|
||||||
sqlalchemy.exc.ResourceClosedError,
|
sqlalchemy.exc.ResourceClosedError,
|
||||||
)
|
)
|
||||||
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
||||||
@@ -102,7 +101,7 @@ class CriticalSQLAlchemyAndSignerTask(CriticalTask):
|
|||||||
|
|
||||||
class CriticalWeb3AndSignerTask(CriticalTask):
|
class CriticalWeb3AndSignerTask(CriticalTask):
|
||||||
autoretry_for = (
|
autoretry_for = (
|
||||||
requests.exceptions.ConnectionError,
|
ConnectionError,
|
||||||
)
|
)
|
||||||
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
safe_gas_threshold_amount = 2000000000 * 60000 * 3
|
||||||
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
safe_gas_refill_amount = safe_gas_threshold_amount * 5
|
||||||
|
|||||||
@@ -8,9 +8,9 @@ import semver
|
|||||||
|
|
||||||
version = (
|
version = (
|
||||||
0,
|
0,
|
||||||
11,
|
12,
|
||||||
0,
|
0,
|
||||||
'beta.16',
|
'alpha.2',
|
||||||
)
|
)
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
|
|||||||
@@ -1,68 +1,71 @@
|
|||||||
FROM python:3.8.6-slim-buster as compile
|
# syntax = docker/dockerfile:1.2
|
||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
WORKDIR /usr/src/cic-eth
|
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
||||||
|
# do load these all into setup.py later
|
||||||
|
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
||||||
|
#COPY cic-eth/requirements.txt .
|
||||||
|
|
||||||
RUN apt-get update && \
|
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||||
|
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||||
#RUN python -m venv venv && . venv/bin/activate
|
# pip install --index-url https://pypi.org/simple \
|
||||||
|
# --force-reinstall \
|
||||||
ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
|
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||||
RUN /usr/local/bin/python -m pip install --upgrade pip
|
# -r requirements.txt
|
||||||
RUN pip install semver
|
COPY *requirements.txt .
|
||||||
|
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||||
# TODO use a packaging style that lets us copy requirments only ie. pip-tools
|
pip install --index-url https://pypi.org/simple \
|
||||||
COPY cic-eth/ .
|
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||||
RUN pip install $pip_extra_index_url_flag .
|
--extra-index-url $EXTRA_INDEX_URL \
|
||||||
|
-r requirements.txt \
|
||||||
# --- TEST IMAGE ---
|
-r services_requirements.txt \
|
||||||
FROM python:3.8.6-slim-buster as test
|
-r admin_requirements.txt
|
||||||
|
|
||||||
RUN apt-get update && \
|
COPY . .
|
||||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps git
|
RUN python setup.py install
|
||||||
|
|
||||||
WORKDIR /usr/src/cic-eth
|
|
||||||
|
|
||||||
RUN /usr/local/bin/python -m pip install --upgrade pip
|
|
||||||
|
|
||||||
COPY --from=compile /usr/local/bin/ /usr/local/bin/
|
|
||||||
COPY --from=compile /usr/local/lib/python3.8/site-packages/ \
|
|
||||||
/usr/local/lib/python3.8/site-packages/
|
|
||||||
# TODO we could use venv inside container to isolate the system and app deps further
|
|
||||||
# COPY --from=compile /usr/src/cic-eth/ .
|
|
||||||
# RUN . venv/bin/activate
|
|
||||||
|
|
||||||
COPY cic-eth/test_requirements.txt .
|
|
||||||
RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
|
||||||
|
|
||||||
COPY cic-eth .
|
|
||||||
|
|
||||||
ENV PYTHONPATH .
|
ENV PYTHONPATH .
|
||||||
|
|
||||||
ENTRYPOINT ["pytest"]
|
COPY docker/entrypoints/* ./
|
||||||
|
|
||||||
# --- RUNTIME ---
|
|
||||||
FROM python:3.8.6-slim-buster as runtime
|
|
||||||
|
|
||||||
RUN apt-get update && \
|
|
||||||
apt install -y gnupg libpq-dev procps
|
|
||||||
|
|
||||||
WORKDIR /usr/src/cic-eth
|
|
||||||
|
|
||||||
COPY --from=compile /usr/local/bin/ /usr/local/bin/
|
|
||||||
COPY --from=compile /usr/local/lib/python3.8/site-packages/ \
|
|
||||||
/usr/local/lib/python3.8/site-packages/
|
|
||||||
|
|
||||||
COPY cic-eth/docker/* ./
|
|
||||||
RUN chmod 755 *.sh
|
RUN chmod 755 *.sh
|
||||||
|
|
||||||
COPY cic-eth/scripts/ scripts/
|
|
||||||
# # ini files in config directory defines the configurable parameters for the application
|
# # ini files in config directory defines the configurable parameters for the application
|
||||||
# # they can all be overridden by environment variables
|
# # they can all be overridden by environment variables
|
||||||
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
COPY cic-eth/config/ /usr/local/etc/cic-eth/
|
COPY config/ /usr/local/etc/cic-eth/
|
||||||
COPY cic-eth/cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||||
COPY cic-eth/crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||||
|
|
||||||
COPY util/liveness/health.sh /usr/local/bin/health.sh
|
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||||
|
#COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||||
|
ENTRYPOINT []
|
||||||
|
|
||||||
|
## ------------------ PRODUCTION CONTAINER ----------------------
|
||||||
|
#FROM python:3.8.6-slim-buster as prod
|
||||||
|
#
|
||||||
|
#RUN apt-get update && \
|
||||||
|
# apt install -y gnupg libpq-dev procps
|
||||||
|
#
|
||||||
|
#WORKDIR /root
|
||||||
|
#
|
||||||
|
#COPY --from=dev /usr/local/bin/ /usr/local/bin/
|
||||||
|
#COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
|
||||||
|
# /usr/local/lib/python3.8/site-packages/
|
||||||
|
#
|
||||||
|
#COPY docker/entrypoints/* ./
|
||||||
|
#RUN chmod 755 *.sh
|
||||||
|
#
|
||||||
|
## # ini files in config directory defines the configurable parameters for the application
|
||||||
|
## # they can all be overridden by environment variables
|
||||||
|
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
|
#COPY config/ /usr/local/etc/cic-eth/
|
||||||
|
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||||
|
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||||
|
#COPY scripts/ scripts/
|
||||||
|
#
|
||||||
|
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||||
|
##COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||||
|
#
|
||||||
|
#ENTRYPOINT []
|
||||||
|
|
||||||
|
|||||||
69
apps/cic-eth/docker/Dockerfile_ci
Normal file
69
apps/cic-eth/docker/Dockerfile_ci
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
|
WORKDIR /usr/src/cic-eth
|
||||||
|
|
||||||
|
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
||||||
|
# do load these all into setup.py later
|
||||||
|
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
||||||
|
#COPY cic-eth/requirements.txt .
|
||||||
|
|
||||||
|
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
|
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||||
|
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||||
|
# pip install --index-url https://pypi.org/simple \
|
||||||
|
# --force-reinstall \
|
||||||
|
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||||
|
# -r requirements.txt
|
||||||
|
COPY *requirements.txt .
|
||||||
|
RUN pip install --index-url https://pypi.org/simple \
|
||||||
|
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||||
|
--extra-index-url $EXTRA_INDEX_URL \
|
||||||
|
-r requirements.txt \
|
||||||
|
-r services_requirements.txt \
|
||||||
|
-r admin_requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN python setup.py install
|
||||||
|
|
||||||
|
COPY docker/entrypoints/* ./
|
||||||
|
RUN chmod 755 *.sh
|
||||||
|
|
||||||
|
# # ini files in config directory defines the configurable parameters for the application
|
||||||
|
# # they can all be overridden by environment variables
|
||||||
|
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
|
COPY config/ /usr/local/etc/cic-eth/
|
||||||
|
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||||
|
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||||
|
|
||||||
|
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||||
|
#COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||||
|
ENTRYPOINT []
|
||||||
|
|
||||||
|
# ------------------ PRODUCTION CONTAINER ----------------------
|
||||||
|
#FROM python:3.8.6-slim-buster as prod
|
||||||
|
#
|
||||||
|
#RUN apt-get update && \
|
||||||
|
# apt install -y gnupg libpq-dev procps
|
||||||
|
#
|
||||||
|
#WORKDIR /root
|
||||||
|
#
|
||||||
|
#COPY --from=dev /usr/local/bin/ /usr/local/bin/
|
||||||
|
#COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
|
||||||
|
# /usr/local/lib/python3.8/site-packages/
|
||||||
|
#
|
||||||
|
#COPY docker/entrypoints/* ./
|
||||||
|
#RUN chmod 755 *.sh
|
||||||
|
#
|
||||||
|
## # ini files in config directory defines the configurable parameters for the application
|
||||||
|
## # they can all be overridden by environment variables
|
||||||
|
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
|
#COPY config/ /usr/local/etc/cic-eth/
|
||||||
|
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||||
|
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||||
|
#COPY scripts/ scripts/
|
||||||
|
#
|
||||||
|
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||||
|
##COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||||
|
#
|
||||||
|
#ENTRYPOINT []
|
||||||
|
#
|
||||||
@@ -8,7 +8,7 @@ set -e
|
|||||||
echo "!!! starting signer"
|
echo "!!! starting signer"
|
||||||
python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer -vv 2> /tmp/signer.log &
|
python /usr/local/bin/crypto-dev-daemon -c /usr/local/etc/crypto-dev-signer -vv 2> /tmp/signer.log &
|
||||||
|
|
||||||
echo "!!! starting tracker"
|
echo "!!! starting taskerd"
|
||||||
/usr/local/bin/cic-eth-taskerd $@
|
/usr/local/bin/cic-eth-taskerd $@
|
||||||
|
|
||||||
# thanks! https://docs.docker.com/config/containers/multi-service_container/
|
# thanks! https://docs.docker.com/config/containers/multi-service_container/
|
||||||
@@ -1,25 +1,3 @@
|
|||||||
cic-base~=0.1.2b15
|
|
||||||
celery==4.4.7
|
celery==4.4.7
|
||||||
crypto-dev-signer~=0.4.14b3
|
chainlib~=0.0.5a1
|
||||||
confini~=0.3.6rc3
|
|
||||||
cic-eth-registry~=0.5.5a7
|
|
||||||
redis==3.5.3
|
|
||||||
alembic==1.4.2
|
|
||||||
websockets==8.1
|
|
||||||
requests~=2.24.0
|
|
||||||
eth_accounts_index~=0.0.11a12
|
|
||||||
erc20-transfer-authorization~=0.3.1a7
|
|
||||||
uWSGI==2.0.19.1
|
|
||||||
semver==2.13.0
|
semver==2.13.0
|
||||||
websocket-client==0.57.0
|
|
||||||
moolb~=0.1.1b2
|
|
||||||
eth-address-index~=0.1.1a11
|
|
||||||
chainlib~=0.0.3rc2
|
|
||||||
hexathon~=0.0.1a7
|
|
||||||
chainsyncer[sql]~=0.0.2a5
|
|
||||||
chainqueue~=0.0.2b3
|
|
||||||
sarafu-faucet==0.0.3a3
|
|
||||||
erc20-faucet==0.2.1a4
|
|
||||||
coincurve==15.0.0
|
|
||||||
potaahto~=0.0.1a2
|
|
||||||
pycryptodome==3.10.1
|
|
||||||
|
|||||||
@@ -2,6 +2,8 @@
|
|||||||
import os
|
import os
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
import alembic
|
import alembic
|
||||||
from alembic.config import Config as AlembicConfig
|
from alembic.config import Config as AlembicConfig
|
||||||
@@ -23,6 +25,8 @@ argparser = argparse.ArgumentParser()
|
|||||||
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
argparser.add_argument('-c', type=str, default=config_dir, help='config file')
|
||||||
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
argparser.add_argument('--env-prefix', default=os.environ.get('CONFINI_ENV_PREFIX'), dest='env_prefix', type=str, help='environment prefix for variables to overwrite configuration')
|
||||||
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
argparser.add_argument('--migrations-dir', dest='migrations_dir', default=migrationsdir, type=str, help='path to alembic migrations directory')
|
||||||
|
argparser.add_argument('--reset', action='store_true', help='downgrade before upgrading')
|
||||||
|
argparser.add_argument('-f', action='store_true', help='force action')
|
||||||
argparser.add_argument('-v', action='store_true', help='be verbose')
|
argparser.add_argument('-v', action='store_true', help='be verbose')
|
||||||
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
argparser.add_argument('-vv', action='store_true', help='be more verbose')
|
||||||
args = argparser.parse_args()
|
args = argparser.parse_args()
|
||||||
@@ -53,4 +57,10 @@ ac = AlembicConfig(os.path.join(migrations_dir, 'alembic.ini'))
|
|||||||
ac.set_main_option('sqlalchemy.url', dsn)
|
ac.set_main_option('sqlalchemy.url', dsn)
|
||||||
ac.set_main_option('script_location', migrations_dir)
|
ac.set_main_option('script_location', migrations_dir)
|
||||||
|
|
||||||
|
if args.reset:
|
||||||
|
if not args.f:
|
||||||
|
if not re.match(r'[yY][eE]?[sS]?', input('EEK! this will DELETE the existing db. are you sure??')):
|
||||||
|
logg.error('user chickened out on requested reset, bailing')
|
||||||
|
sys.exit(1)
|
||||||
|
alembic.command.downgrade(ac, 'base')
|
||||||
alembic.command.upgrade(ac, 'head')
|
alembic.command.upgrade(ac, 'head')
|
||||||
|
|||||||
15
apps/cic-eth/services_requirements.txt
Normal file
15
apps/cic-eth/services_requirements.txt
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
chainsyncer[sql]~=0.0.3a3
|
||||||
|
chainqueue~=0.0.2b5
|
||||||
|
alembic==1.4.2
|
||||||
|
confini~=0.3.6rc4
|
||||||
|
redis==3.5.3
|
||||||
|
hexathon~=0.0.1a7
|
||||||
|
pycryptodome==3.10.1
|
||||||
|
liveness~=0.0.1a7
|
||||||
|
eth-address-index~=0.1.2a1
|
||||||
|
eth-accounts-index~=0.0.12a1
|
||||||
|
cic-eth-registry~=0.5.6a1
|
||||||
|
erc20-faucet~=0.2.2a1
|
||||||
|
erc20-transfer-authorization~=0.3.2a1
|
||||||
|
sarafu-faucet~=0.0.4a1
|
||||||
|
moolb~=0.1.1b2
|
||||||
@@ -39,22 +39,25 @@ packages =
|
|||||||
cic_eth.callbacks
|
cic_eth.callbacks
|
||||||
cic_eth.sync
|
cic_eth.sync
|
||||||
cic_eth.check
|
cic_eth.check
|
||||||
|
# should be concealed behind extras "test" if possible (but its not unfortunately)
|
||||||
|
cic_eth.pytest
|
||||||
|
cic_eth.pytest.mock
|
||||||
scripts =
|
scripts =
|
||||||
./scripts/migrate.py
|
./scripts/migrate.py
|
||||||
|
|
||||||
[options.entry_points]
|
[options.entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
# daemons
|
# daemons
|
||||||
cic-eth-taskerd = cic_eth.runnable.daemons.tasker:main
|
cic-eth-taskerd = cic_eth.runnable.daemons.tasker:main [services]
|
||||||
cic-eth-trackerd = cic_eth.runnable.daemons.tracker:main
|
cic-eth-trackerd = cic_eth.runnable.daemons.tracker:main [services]
|
||||||
cic-eth-dispatcherd = cic_eth.runnable.daemons.dispatcher:main
|
cic-eth-dispatcherd = cic_eth.runnable.daemons.dispatcher:main [services]
|
||||||
cic-eth-retrierd = cic_eth.runnable.daemons.retry:main
|
cic-eth-retrierd = cic_eth.runnable.daemons.retry:main [services]
|
||||||
# tools
|
# tools
|
||||||
cic-eth-create = cic_eth.runnable.create:main
|
cic-eth-create = cic_eth.runnable.create:main [tools]
|
||||||
cic-eth-inspect = cic_eth.runnable.view:main
|
cic-eth-inspect = cic_eth.runnable.view:main [tools]
|
||||||
cic-eth-ctl = cic_eth.runnable.ctrl:main
|
cic-eth-ctl = cic_eth.runnable.ctrl:main [tools]
|
||||||
cic-eth-info = cic_eth.runnable.info:main
|
cic-eth-info = cic_eth.runnable.info:main [tools]
|
||||||
# TODO: Merge this with ctl when subcmds sorted to submodules
|
# TODO: Merge this with ctl when subcmds sorted to submodules
|
||||||
cic-eth-tag = cic_eth.runnable.tag:main
|
cic-eth-tag = cic_eth.runnable.tag:main [tools]
|
||||||
cic-eth-resend = cic_eth.runnable.resend:main
|
cic-eth-resend = cic_eth.runnable.resend:main [tools]
|
||||||
cic-eth-transfer = cic_eth.runnable.transfer:main
|
cic-eth-transfer = cic_eth.runnable.transfer:main [tools]
|
||||||
|
|||||||
@@ -11,6 +11,41 @@ while True:
|
|||||||
requirements.append(l.rstrip())
|
requirements.append(l.rstrip())
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
admin_requirements = []
|
||||||
|
f = open('admin_requirements.txt', 'r')
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == '':
|
||||||
|
break
|
||||||
|
admin_requirements.append(l.rstrip())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
tools_requirements = []
|
||||||
|
f = open('tools_requirements.txt', 'r')
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == '':
|
||||||
|
break
|
||||||
|
tools_requirements.append(l.rstrip())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
|
||||||
|
services_requirements = []
|
||||||
|
f = open('services_requirements.txt', 'r')
|
||||||
|
while True:
|
||||||
|
l = f.readline()
|
||||||
|
if l == '':
|
||||||
|
break
|
||||||
|
services_requirements.append(l.rstrip())
|
||||||
|
f.close()
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
install_requires=requirements
|
install_requires=requirements,
|
||||||
)
|
extras_require = {
|
||||||
|
'tools': tools_requirements,
|
||||||
|
'admin_api': admin_requirements,
|
||||||
|
'services': services_requirements,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|||||||
@@ -2,5 +2,8 @@ pytest==6.0.1
|
|||||||
pytest-celery==0.0.0a1
|
pytest-celery==0.0.0a1
|
||||||
pytest-mock==3.3.1
|
pytest-mock==3.3.1
|
||||||
pytest-cov==2.10.1
|
pytest-cov==2.10.1
|
||||||
|
pytest-redis==2.0.0
|
||||||
|
redis==3.5.3
|
||||||
eth-tester==0.5.0b3
|
eth-tester==0.5.0b3
|
||||||
py-evm==0.3.0a20
|
py-evm==0.3.0a20
|
||||||
|
eth-erc20~=0.0.10a2
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import logging
|
|||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
# external imports
|
# external imports
|
||||||
|
import pytest
|
||||||
from eth_erc20 import ERC20
|
from eth_erc20 import ERC20
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
@@ -17,11 +18,12 @@ root_dir = os.path.dirname(script_dir)
|
|||||||
sys.path.insert(0, root_dir)
|
sys.path.insert(0, root_dir)
|
||||||
|
|
||||||
# assemble fixtures
|
# assemble fixtures
|
||||||
from tests.fixtures_config import *
|
from cic_eth.pytest.fixtures_config import *
|
||||||
from tests.fixtures_database import *
|
from cic_eth.pytest.fixtures_celery import *
|
||||||
from tests.fixtures_celery import *
|
from cic_eth.pytest.fixtures_database import *
|
||||||
from tests.fixtures_role import *
|
from cic_eth.pytest.fixtures_role import *
|
||||||
from tests.fixtures_contract import *
|
from cic_eth.pytest.fixtures_contract import *
|
||||||
|
from cic_eth.pytest.fixtures_token import *
|
||||||
from chainlib.eth.pytest import *
|
from chainlib.eth.pytest import *
|
||||||
from eth_contract_registry.pytest import *
|
from eth_contract_registry.pytest import *
|
||||||
from cic_eth_registry.pytest.fixtures_contracts import *
|
from cic_eth_registry.pytest.fixtures_contracts import *
|
||||||
@@ -37,20 +39,6 @@ def api(
|
|||||||
return Api(chain_str, queue=None, callback_param='foo')
|
return Api(chain_str, queue=None, callback_param='foo')
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
|
||||||
def foo_token_symbol(
|
|
||||||
default_chain_spec,
|
|
||||||
foo_token,
|
|
||||||
eth_rpc,
|
|
||||||
contract_roles,
|
|
||||||
):
|
|
||||||
|
|
||||||
c = ERC20(default_chain_spec)
|
|
||||||
o = c.symbol(foo_token, sender_address=contract_roles['CONTRACT_DEPLOYER'])
|
|
||||||
r = eth_rpc.do(o)
|
|
||||||
return c.parse_symbol(r)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='function')
|
@pytest.fixture(scope='function')
|
||||||
def default_token(
|
def default_token(
|
||||||
foo_token,
|
foo_token,
|
||||||
|
|||||||
@@ -40,7 +40,7 @@ from chainqueue.sql.query import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api import AdminApi
|
from cic_eth.api.admin import AdminApi
|
||||||
from cic_eth.db.models.role import AccountRole
|
from cic_eth.db.models.role import AccountRole
|
||||||
from cic_eth.db.enum import LockEnum
|
from cic_eth.db.enum import LockEnum
|
||||||
from cic_eth.error import InitializationError
|
from cic_eth.error import InitializationError
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ from eth_erc20 import ERC20
|
|||||||
from cic_eth_registry import CICRegistry
|
from cic_eth_registry import CICRegistry
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.api.api_admin import AdminApi
|
from cic_eth.api.admin import AdminApi
|
||||||
from cic_eth.eth.gas import cache_gas_data
|
from cic_eth.eth.gas import cache_gas_data
|
||||||
from cic_eth.eth.erc20 import cache_transfer_data
|
from cic_eth.eth.erc20 import cache_transfer_data
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ from cic_eth.db.models.nonce import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
# test imports
|
# test imports
|
||||||
from tests.mock.filter import (
|
from cic_eth.pytest.mock.filter import (
|
||||||
block_filter,
|
block_filter,
|
||||||
tx_filter,
|
tx_filter,
|
||||||
)
|
)
|
||||||
@@ -110,7 +110,7 @@ def test_list_tx(
|
|||||||
logg.debug('r {}'.format(r))
|
logg.debug('r {}'.format(r))
|
||||||
|
|
||||||
# test the api
|
# test the api
|
||||||
t = api.list(agent_roles['ALICE'], external_task='tests.mock.filter.filter')
|
t = api.list(agent_roles['ALICE'], external_task='cic_eth.pytest.mock.filter.filter')
|
||||||
r = t.get_leaf()
|
r = t.get_leaf()
|
||||||
assert t.successful()
|
assert t.successful()
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
from tests.fixtures_celery import *
|
from cic_eth.pytest.fixtures_celery import *
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from chainlib.eth.tx import (
|
|||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_eth.queue.tx import register_tx
|
from cic_eth.queue.tx import register_tx
|
||||||
|
from cic_eth.error import YouAreBrokeError
|
||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
@@ -167,3 +168,101 @@ def test_erc20_approve_task(
|
|||||||
r = t.get_leaf()
|
r = t.get_leaf()
|
||||||
|
|
||||||
logg.debug('result {}'.format(r))
|
logg.debug('result {}'.format(r))
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_transfer_from_task(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
agent_roles,
|
||||||
|
custodial_roles,
|
||||||
|
eth_signer,
|
||||||
|
eth_rpc,
|
||||||
|
init_database,
|
||||||
|
celery_session_worker,
|
||||||
|
token_roles,
|
||||||
|
):
|
||||||
|
|
||||||
|
token_object = {
|
||||||
|
'address': foo_token,
|
||||||
|
}
|
||||||
|
transfer_value = 100 * (10 ** 6)
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], conn=eth_rpc)
|
||||||
|
c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash, o) = c.approve(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
s_nonce = celery.signature(
|
||||||
|
'cic_eth.eth.nonce.reserve_nonce',
|
||||||
|
[
|
||||||
|
[token_object],
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
custodial_roles['FOO_TOKEN_GIFTER'],
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
s_transfer = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.transfer_from',
|
||||||
|
[
|
||||||
|
custodial_roles['FOO_TOKEN_GIFTER'],
|
||||||
|
agent_roles['BOB'],
|
||||||
|
transfer_value,
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
agent_roles['ALICE'],
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
s_nonce.link(s_transfer)
|
||||||
|
t = s_nonce.apply_async()
|
||||||
|
r = t.get_leaf()
|
||||||
|
|
||||||
|
logg.debug('result {}'.format(r))
|
||||||
|
|
||||||
|
|
||||||
|
def test_erc20_allowance_check_task(
|
||||||
|
default_chain_spec,
|
||||||
|
foo_token,
|
||||||
|
agent_roles,
|
||||||
|
custodial_roles,
|
||||||
|
eth_signer,
|
||||||
|
eth_rpc,
|
||||||
|
init_database,
|
||||||
|
celery_session_worker,
|
||||||
|
token_roles,
|
||||||
|
):
|
||||||
|
|
||||||
|
token_object = {
|
||||||
|
'address': foo_token,
|
||||||
|
'symbol': 'FOO',
|
||||||
|
}
|
||||||
|
transfer_value = 100 * (10 ** 6)
|
||||||
|
|
||||||
|
s_check = celery.signature(
|
||||||
|
'cic_eth.eth.erc20.check_allowance',
|
||||||
|
[
|
||||||
|
[token_object],
|
||||||
|
custodial_roles['FOO_TOKEN_GIFTER'],
|
||||||
|
transfer_value,
|
||||||
|
default_chain_spec.asdict(),
|
||||||
|
agent_roles['ALICE']
|
||||||
|
],
|
||||||
|
queue=None,
|
||||||
|
)
|
||||||
|
t = s_check.apply_async()
|
||||||
|
with pytest.raises(YouAreBrokeError):
|
||||||
|
t.get()
|
||||||
|
|
||||||
|
nonce_oracle = RPCNonceOracle(token_roles['FOO_TOKEN_OWNER'], conn=eth_rpc)
|
||||||
|
c = ERC20(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle)
|
||||||
|
(tx_hash, o) = c.approve(foo_token, token_roles['FOO_TOKEN_OWNER'], agent_roles['ALICE'], transfer_value)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
o = receipt(tx_hash)
|
||||||
|
r = eth_rpc.do(o)
|
||||||
|
assert r['status'] == 1
|
||||||
|
|
||||||
|
t = s_check.apply_async()
|
||||||
|
t.get()
|
||||||
|
assert t.successful()
|
||||||
|
|||||||
8
apps/cic-eth/tools_requirements.txt
Normal file
8
apps/cic-eth/tools_requirements.txt
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
crypto-dev-signer~=0.4.14b6
|
||||||
|
chainqueue~=0.0.2b5
|
||||||
|
confini~=0.3.6rc4
|
||||||
|
cic-eth-registry~=0.5.6a1
|
||||||
|
redis==3.5.3
|
||||||
|
hexathon~=0.0.1a7
|
||||||
|
pycryptodome==3.10.1
|
||||||
|
pyxdg==0.27
|
||||||
5
apps/cic-meta/.dockerignore
Normal file
5
apps/cic-meta/.dockerignore
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
.git
|
||||||
|
.cache
|
||||||
|
.dot
|
||||||
|
**/doc
|
||||||
|
**/node_modules
|
||||||
@@ -2,43 +2,42 @@
|
|||||||
.cic_meta_variables:
|
.cic_meta_variables:
|
||||||
variables:
|
variables:
|
||||||
APP_NAME: cic-meta
|
APP_NAME: cic-meta
|
||||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||||
IMAGE_TAG: $CI_REGISTRY_IMAGE/$APP_NAME:unittest-$CI_COMMIT_SHORT_SHA
|
CONTEXT: apps/$APP_NAME
|
||||||
|
|
||||||
.cic_meta_changes_target:
|
build-mr-cic-meta:
|
||||||
|
extends:
|
||||||
|
- .py_build_merge_request
|
||||||
|
- .cic_meta_variables
|
||||||
rules:
|
rules:
|
||||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
# - changes:
|
changes:
|
||||||
# - $CONTEXT/$APP_NAME/*
|
- apps/cic-meta/**/*
|
||||||
- when: always
|
when: always
|
||||||
|
|
||||||
cic-meta-build-mr:
|
|
||||||
stage: build
|
|
||||||
extends:
|
|
||||||
- .cic_meta_variables
|
|
||||||
- .cic_meta_changes_target
|
|
||||||
script:
|
|
||||||
- mkdir -p /kaniko/.docker
|
|
||||||
- echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > "/kaniko/.docker/config.json"
|
|
||||||
# - /kaniko/executor --context $CONTEXT --dockerfile $DOCKERFILE_PATH $KANIKO_CACHE_ARGS --destination $IMAGE_TAG
|
|
||||||
- /kaniko/executor --context $CONTEXT --dockerfile $DOCKERFILE_PATH $KANIKO_CACHE_ARGS --destination $IMAGE_TAG
|
|
||||||
|
|
||||||
test-mr-cic-meta:
|
test-mr-cic-meta:
|
||||||
extends:
|
extends:
|
||||||
- .cic_meta_variables
|
- .cic_meta_variables
|
||||||
- .cic_meta_changes_target
|
|
||||||
stage: test
|
stage: test
|
||||||
image: $IMAGE_TAG
|
image: $MR_IMAGE_TAG
|
||||||
script:
|
script:
|
||||||
- cd /tmp/src/cic-meta
|
- cd /root
|
||||||
- npm install --dev
|
- npm install --dev
|
||||||
- npm run test
|
- npm run test
|
||||||
- npm run test:coverage
|
- npm run test:coverage
|
||||||
needs: ["cic-meta-build-mr"]
|
needs: ["build-mr-cic-meta"]
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/cic-meta/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
build-push-cic-meta:
|
build-push-cic-meta:
|
||||||
extends:
|
extends:
|
||||||
- .py_build_push
|
- .py_build_push
|
||||||
- .cic_meta_variables
|
- .cic_meta_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == "master"
|
||||||
|
changes:
|
||||||
|
- apps/cic-meta/**/*
|
||||||
|
when: always
|
||||||
|
|||||||
@@ -1,31 +1,33 @@
|
|||||||
FROM node:15.3.0-alpine3.10
|
# syntax = docker/dockerfile:1.2
|
||||||
|
#FROM node:15.3.0-alpine3.10
|
||||||
|
FROM node:lts-alpine3.14
|
||||||
|
|
||||||
WORKDIR /tmp/src/cic-meta
|
WORKDIR /root
|
||||||
|
|
||||||
RUN apk add --no-cache postgresql bash
|
RUN apk add --no-cache postgresql bash
|
||||||
|
|
||||||
# required to build the cic-client-meta module
|
|
||||||
COPY cic-meta/src/ src/
|
|
||||||
COPY cic-meta/scripts/ scripts/
|
|
||||||
|
|
||||||
# copy the dependencies
|
# copy the dependencies
|
||||||
COPY cic-meta/package.json .
|
COPY package.json package-lock.json .
|
||||||
COPY cic-meta/tsconfig.json .
|
RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||||
COPY cic-meta/webpack.config.js .
|
npm set cache /root/.npm && \
|
||||||
|
npm ci
|
||||||
RUN npm install
|
|
||||||
|
|
||||||
COPY cic-meta/tests/ tests/
|
COPY webpack.config.js .
|
||||||
COPY cic-meta/tests/*.asc /root/pgp/
|
COPY tsconfig.json .
|
||||||
|
## required to build the cic-client-meta module
|
||||||
|
COPY src/ src/
|
||||||
|
COPY scripts/ scripts/
|
||||||
|
COPY tests/ tests/
|
||||||
|
COPY tests/*.asc /root/pgp/
|
||||||
|
|
||||||
# copy runtime configs
|
## copy runtime configs
|
||||||
COPY cic-meta/.config/ /usr/local/etc/cic-meta/
|
COPY .config/ /usr/local/etc/cic-meta/
|
||||||
|
#
|
||||||
# db migrations
|
## db migrations
|
||||||
COPY cic-meta/docker/db.sh ./db.sh
|
COPY docker/db.sh ./db.sh
|
||||||
RUN chmod 755 ./db.sh
|
RUN chmod 755 ./db.sh
|
||||||
|
#
|
||||||
RUN alias tsc=node_modules/typescript/bin/tsc
|
RUN alias tsc=node_modules/typescript/bin/tsc
|
||||||
COPY cic-meta/docker/start_server.sh ./start_server.sh
|
COPY docker/start_server.sh ./start_server.sh
|
||||||
RUN chmod 755 ./start_server.sh
|
RUN chmod 755 ./start_server.sh
|
||||||
ENTRYPOINT ["sh", "./start_server.sh"]
|
ENTRYPOINT ["sh", "./start_server.sh"]
|
||||||
|
|||||||
32
apps/cic-meta/docker/Dockerfile_ci
Normal file
32
apps/cic-meta/docker/Dockerfile_ci
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# syntax = docker/dockerfile:1.2
|
||||||
|
#FROM node:15.3.0-alpine3.10
|
||||||
|
FROM node:lts-alpine3.14
|
||||||
|
|
||||||
|
WORKDIR /root
|
||||||
|
|
||||||
|
RUN apk add --no-cache postgresql bash
|
||||||
|
|
||||||
|
# copy the dependencies
|
||||||
|
COPY package.json package-lock.json .
|
||||||
|
RUN npm set cache /root/.npm && \
|
||||||
|
npm ci
|
||||||
|
|
||||||
|
COPY webpack.config.js .
|
||||||
|
COPY tsconfig.json .
|
||||||
|
## required to build the cic-client-meta module
|
||||||
|
COPY src/ src/
|
||||||
|
COPY scripts/ scripts/
|
||||||
|
COPY tests/ tests/
|
||||||
|
COPY tests/*.asc /root/pgp/
|
||||||
|
|
||||||
|
## copy runtime configs
|
||||||
|
COPY .config/ /usr/local/etc/cic-meta/
|
||||||
|
#
|
||||||
|
## db migrations
|
||||||
|
COPY docker/db.sh ./db.sh
|
||||||
|
RUN chmod 755 ./db.sh
|
||||||
|
#
|
||||||
|
RUN alias tsc=node_modules/typescript/bin/tsc
|
||||||
|
COPY docker/start_server.sh ./start_server.sh
|
||||||
|
RUN chmod 755 ./start_server.sh
|
||||||
|
ENTRYPOINT ["sh", "./start_server.sh"]
|
||||||
5452
apps/cic-meta/package-lock.json
generated
5452
apps/cic-meta/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,52 @@
|
|||||||
.cic_notify_variables:
|
.cic_notify_variables:
|
||||||
variables:
|
variables:
|
||||||
APP_NAME: cic-notify
|
APP_NAME: cic-notify
|
||||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||||
|
CONTEXT: apps/$APP_NAME
|
||||||
.cic_notify_changes_target:
|
|
||||||
rules:
|
|
||||||
- changes:
|
|
||||||
- $CONTEXT/$APP_NAME/*
|
|
||||||
|
|
||||||
build-mr-cic-notify:
|
build-mr-cic-notify:
|
||||||
extends:
|
extends:
|
||||||
- .cic_notify_changes_target
|
|
||||||
- .py_build_merge_request
|
- .py_build_merge_request
|
||||||
- .cic_notify_variables
|
- .cic_notify_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/cic-notify/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
|
test-mr-cic-notify:
|
||||||
|
stage: test
|
||||||
|
extends:
|
||||||
|
- .cic_notify_variables
|
||||||
|
cache:
|
||||||
|
key:
|
||||||
|
files:
|
||||||
|
- test_requirements.txt
|
||||||
|
paths:
|
||||||
|
- /root/.cache/pip
|
||||||
|
image: $MR_IMAGE_TAG
|
||||||
|
script:
|
||||||
|
- cd apps/$APP_NAME/
|
||||||
|
- >
|
||||||
|
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||||
|
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||||
|
-r test_requirements.txt
|
||||||
|
- export PYTHONPATH=. && pytest -x --cov=cic_notify --cov-fail-under=90 --cov-report term-missing tests
|
||||||
|
needs: ["build-mr-cic-notify"]
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/$APP_NAME/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
build-push-cic-notify:
|
build-push-cic-notify:
|
||||||
extends:
|
extends:
|
||||||
- .py_build_push
|
- .py_build_push
|
||||||
- .cic_notify_variables
|
- .cic_notify_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == "master"
|
||||||
|
changes:
|
||||||
|
- apps/cic-notify/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ def get_sms_queue_tasks(app, task_prefix='cic_notify.tasks.sms.'):
|
|||||||
for q in qs[host]:
|
for q in qs[host]:
|
||||||
if re.match(re_q, q['name']):
|
if re.match(re_q, q['name']):
|
||||||
host_queues.append((host, q['name'],))
|
host_queues.append((host, q['name'],))
|
||||||
|
|
||||||
task_prefix_len = len(task_prefix)
|
task_prefix_len = len(task_prefix)
|
||||||
queue_tasks = []
|
queue_tasks = []
|
||||||
for (host, queue) in host_queues:
|
for (host, queue) in host_queues:
|
||||||
@@ -35,17 +35,18 @@ def get_sms_queue_tasks(app, task_prefix='cic_notify.tasks.sms.'):
|
|||||||
for task in tasks:
|
for task in tasks:
|
||||||
if len(task) >= task_prefix_len and task[:task_prefix_len] == task_prefix:
|
if len(task) >= task_prefix_len and task[:task_prefix_len] == task_prefix:
|
||||||
queue_tasks.append((queue, task,))
|
queue_tasks.append((queue, task,))
|
||||||
|
|
||||||
return queue_tasks
|
return queue_tasks
|
||||||
|
|
||||||
|
|
||||||
class Api:
|
class Api:
|
||||||
# TODO: Implement callback strategy
|
# TODO: Implement callback strategy
|
||||||
def __init__(self, queue='cic-notify'):
|
def __init__(self, queue=None):
|
||||||
"""
|
"""
|
||||||
:param queue: The queue on which to execute notification tasks
|
:param queue: The queue on which to execute notification tasks
|
||||||
:type queue: str
|
:type queue: str
|
||||||
"""
|
"""
|
||||||
|
self.queue = queue
|
||||||
self.sms_tasks = get_sms_queue_tasks(app)
|
self.sms_tasks = get_sms_queue_tasks(app)
|
||||||
logg.debug('sms tasks {}'.format(self.sms_tasks))
|
logg.debug('sms tasks {}'.format(self.sms_tasks))
|
||||||
|
|
||||||
@@ -61,13 +62,19 @@ class Api:
|
|||||||
"""
|
"""
|
||||||
signatures = []
|
signatures = []
|
||||||
for q in self.sms_tasks:
|
for q in self.sms_tasks:
|
||||||
|
|
||||||
|
if not self.queue:
|
||||||
|
queue = q[0]
|
||||||
|
else:
|
||||||
|
queue = self.queue
|
||||||
|
|
||||||
signature = celery.signature(
|
signature = celery.signature(
|
||||||
q[1],
|
q[1],
|
||||||
[
|
[
|
||||||
message,
|
message,
|
||||||
recipient,
|
recipient,
|
||||||
],
|
],
|
||||||
queue=q[0],
|
queue=queue,
|
||||||
)
|
)
|
||||||
signatures.append(signature)
|
signatures.append(signature)
|
||||||
|
|
||||||
|
|||||||
@@ -87,10 +87,18 @@ for key in config.store.keys():
|
|||||||
module = importlib.import_module(config.store[key])
|
module = importlib.import_module(config.store[key])
|
||||||
if key == 'TASKS_AFRICASTALKING':
|
if key == 'TASKS_AFRICASTALKING':
|
||||||
africastalking_notifier = module.AfricasTalkingNotifier
|
africastalking_notifier = module.AfricasTalkingNotifier
|
||||||
|
|
||||||
|
api_sender_id = config.get('AFRICASTALKING_API_SENDER_ID')
|
||||||
|
logg.debug(f'SENDER ID VALUE IS: {api_sender_id}')
|
||||||
|
|
||||||
|
if not api_sender_id:
|
||||||
|
api_sender_id = None
|
||||||
|
logg.debug(f'SENDER ID RESOLVED TO NONE: {api_sender_id}')
|
||||||
|
|
||||||
africastalking_notifier.initialize(
|
africastalking_notifier.initialize(
|
||||||
config.get('AFRICASTALKING_API_USERNAME'),
|
config.get('AFRICASTALKING_API_USERNAME'),
|
||||||
config.get('AFRICASTALKING_API_KEY'),
|
config.get('AFRICASTALKING_API_KEY'),
|
||||||
config.get('AFRICASTALKING_API_SENDER_ID')
|
api_sender_id
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import semver
|
|||||||
|
|
||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
version = (0, 4, 0, 'alpha.5')
|
version = (0, 4, 0, 'alpha.7')
|
||||||
|
|
||||||
version_object = semver.VersionInfo(
|
version_object = semver.VersionInfo(
|
||||||
major=version[0],
|
major=version[0],
|
||||||
|
|||||||
@@ -1,38 +1,30 @@
|
|||||||
FROM python:3.8.6-slim-buster
|
# syntax = docker/dockerfile:1.2
|
||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
RUN apt-get update && \
|
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||||
apt install -y gcc gnupg libpq-dev wget make g++ gnupg bash procps
|
|
||||||
|
|
||||||
WORKDIR /usr/src/cic-notify
|
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
|
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
ARG pip_extra_index_url_flag='--index https://pypi.org/simple --extra-index-url https://pip.grassrootseconomics.net:8433'
|
RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||||
RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
pip install --index-url https://pypi.org/simple \
|
||||||
|
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
COPY cic-notify/setup.cfg \
|
COPY . .
|
||||||
cic-notify/setup.py \
|
|
||||||
./
|
|
||||||
|
|
||||||
COPY cic-notify/cic_notify/ ./cic_notify/
|
RUN python setup.py install
|
||||||
|
|
||||||
COPY cic-notify/requirements.txt \
|
# TODO please review..can this go into requirements?
|
||||||
cic-notify/test_requirements.txt \
|
|
||||||
./
|
|
||||||
|
|
||||||
COPY cic-notify/scripts/ scripts/
|
|
||||||
RUN pip install $pip_extra_index_url_flag .[africastalking,notifylog]
|
RUN pip install $pip_extra_index_url_flag .[africastalking,notifylog]
|
||||||
|
|
||||||
COPY cic-notify/tests/ tests/
|
COPY docker/*.sh .
|
||||||
COPY cic-notify/docker/db.sh \
|
|
||||||
cic-notify/docker/start_tasker.sh \
|
|
||||||
/root/
|
|
||||||
|
|
||||||
#RUN apk add postgresql-client
|
|
||||||
#RUN apk add bash
|
|
||||||
|
|
||||||
# ini files in config directory defines the configurable parameters for the application
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
# they can all be overridden by environment variables
|
# they can all be overridden by environment variables
|
||||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
COPY cic-notify/.config/ /usr/local/etc/cic-notify/
|
COPY .config/ /usr/local/etc/cic-notify/
|
||||||
COPY cic-notify/cic_notify/db/migrations/ /usr/local/share/cic-notify/alembic/
|
COPY cic_notify/db/migrations/ /usr/local/share/cic-notify/alembic/
|
||||||
|
|
||||||
WORKDIR /root
|
ENTRYPOINT []
|
||||||
|
|||||||
29
apps/cic-notify/docker/Dockerfile_ci
Normal file
29
apps/cic-notify/docker/Dockerfile_ci
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# syntax = docker/dockerfile:1.2
|
||||||
|
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||||
|
|
||||||
|
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||||
|
|
||||||
|
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||||
|
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
RUN pip install --index-url https://pypi.org/simple \
|
||||||
|
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||||
|
-r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN python setup.py install
|
||||||
|
|
||||||
|
# TODO please review..can this go into requirements?
|
||||||
|
RUN pip install $pip_extra_index_url_flag .[africastalking,notifylog]
|
||||||
|
|
||||||
|
COPY docker/*.sh .
|
||||||
|
|
||||||
|
# ini files in config directory defines the configurable parameters for the application
|
||||||
|
# they can all be overridden by environment variables
|
||||||
|
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||||
|
COPY .config/ /usr/local/etc/cic-notify/
|
||||||
|
COPY cic_notify/db/migrations/ /usr/local/share/cic-notify/alembic/
|
||||||
|
|
||||||
|
ENTRYPOINT []
|
||||||
@@ -1 +1 @@
|
|||||||
cic_base[full_graph]~=0.1.2a61
|
cic_base[full_graph]==0.1.3a3+build.984b5cff
|
||||||
|
|||||||
@@ -2,4 +2,3 @@ pytest~=6.0.1
|
|||||||
pytest-celery~=0.0.0a1
|
pytest-celery~=0.0.0a1
|
||||||
pytest-mock~=3.3.1
|
pytest-mock~=3.3.1
|
||||||
pysqlite3~=0.4.3
|
pysqlite3~=0.4.3
|
||||||
|
|
||||||
|
|||||||
4
apps/cic-ussd/.dockerignore
Normal file
4
apps/cic-ussd/.dockerignore
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
.git
|
||||||
|
.cache
|
||||||
|
.dot
|
||||||
|
**/doc
|
||||||
@@ -1,22 +1,52 @@
|
|||||||
.cic_ussd_variables:
|
.cic_ussd_variables:
|
||||||
variables:
|
variables:
|
||||||
APP_NAME: cic-ussd
|
APP_NAME: cic-ussd
|
||||||
DOCKERFILE_PATH: $APP_NAME/docker/Dockerfile
|
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||||
|
CONTEXT: apps/$APP_NAME
|
||||||
.cic_ussd_changes_target:
|
|
||||||
rules:
|
|
||||||
- changes:
|
|
||||||
- $CONTEXT/$APP_NAME/*
|
|
||||||
|
|
||||||
build-mr-cic-ussd:
|
build-mr-cic-ussd:
|
||||||
extends:
|
extends:
|
||||||
- .cic_ussd_changes_target
|
|
||||||
- .py_build_merge_request
|
- .py_build_merge_request
|
||||||
- .cic_ussd_variables
|
- .cic_ussd_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/cic-ussd/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
|
test-mr-cic-ussd:
|
||||||
|
stage: test
|
||||||
|
extends:
|
||||||
|
- .cic_ussd_variables
|
||||||
|
cache:
|
||||||
|
key:
|
||||||
|
files:
|
||||||
|
- test_requirements.txt
|
||||||
|
paths:
|
||||||
|
- /root/.cache/pip
|
||||||
|
image: $MR_IMAGE_TAG
|
||||||
|
script:
|
||||||
|
- cd apps/$APP_NAME/
|
||||||
|
- >
|
||||||
|
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||||
|
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||||
|
-r test_requirements.txt
|
||||||
|
- export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||||
|
needs: ["build-mr-cic-ussd"]
|
||||||
|
rules:
|
||||||
|
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||||
|
changes:
|
||||||
|
- apps/$APP_NAME/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
build-push-cic-ussd:
|
build-push-cic-ussd:
|
||||||
extends:
|
extends:
|
||||||
- .py_build_push
|
- .py_build_push
|
||||||
- .cic_ussd_variables
|
- .cic_ussd_variables
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_BRANCH == "master"
|
||||||
|
changes:
|
||||||
|
- apps/cic-ussd/**/*
|
||||||
|
when: always
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
9
apps/cic-ussd/cic_ussd/db/enum.py
Normal file
9
apps/cic-ussd/cic_ussd/db/enum.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# standard import
|
||||||
|
from enum import IntEnum
|
||||||
|
|
||||||
|
|
||||||
|
class AccountStatus(IntEnum):
|
||||||
|
PENDING = 1
|
||||||
|
ACTIVE = 2
|
||||||
|
LOCKED = 3
|
||||||
|
RESET = 4
|
||||||
@@ -1,19 +1,13 @@
|
|||||||
# standard imports
|
# standard imports
|
||||||
from enum import IntEnum
|
|
||||||
|
|
||||||
# third party imports
|
|
||||||
from sqlalchemy import Column, Integer, String
|
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
|
from cic_ussd.db.enum import AccountStatus
|
||||||
from cic_ussd.db.models.base import SessionBase
|
from cic_ussd.db.models.base import SessionBase
|
||||||
from cic_ussd.encoder import check_password_hash, create_password_hash
|
from cic_ussd.encoder import check_password_hash, create_password_hash
|
||||||
|
|
||||||
|
# third party imports
|
||||||
class AccountStatus(IntEnum):
|
from sqlalchemy import Column, Integer, String
|
||||||
PENDING = 1
|
from sqlalchemy.orm.session import Session
|
||||||
ACTIVE = 2
|
|
||||||
LOCKED = 3
|
|
||||||
RESET = 4
|
|
||||||
|
|
||||||
|
|
||||||
class Account(SessionBase):
|
class Account(SessionBase):
|
||||||
@@ -30,6 +24,21 @@ class Account(SessionBase):
|
|||||||
account_status = Column(Integer)
|
account_status = Column(Integer)
|
||||||
preferred_language = Column(String)
|
preferred_language = Column(String)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_by_phone_number(phone_number: str, session: Session):
|
||||||
|
"""Retrieves an account from a phone number.
|
||||||
|
:param phone_number: The E164 format of a phone number.
|
||||||
|
:type phone_number:str
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
|
:return: An account object.
|
||||||
|
:rtype: Account
|
||||||
|
"""
|
||||||
|
session = SessionBase.bind_session(session=session)
|
||||||
|
account = session.query(Account).filter_by(phone_number=phone_number).first()
|
||||||
|
SessionBase.release_session(session=session)
|
||||||
|
return account
|
||||||
|
|
||||||
def __init__(self, blockchain_address, phone_number):
|
def __init__(self, blockchain_address, phone_number):
|
||||||
self.blockchain_address = blockchain_address
|
self.blockchain_address = blockchain_address
|
||||||
self.phone_number = phone_number
|
self.phone_number = phone_number
|
||||||
|
|||||||
@@ -275,6 +275,18 @@
|
|||||||
"display_key": "ussd.kenya.new_pin_confirmation",
|
"display_key": "ussd.kenya.new_pin_confirmation",
|
||||||
"name": "new_pin_confirmation",
|
"name": "new_pin_confirmation",
|
||||||
"parent": "metadata_management"
|
"parent": "metadata_management"
|
||||||
|
},
|
||||||
|
"47": {
|
||||||
|
"description": "Year of birth entry menu.",
|
||||||
|
"display_key": "ussd.kenya.enter_date_of_birth",
|
||||||
|
"name": "enter_date_of_birth",
|
||||||
|
"parent": "metadata_management"
|
||||||
|
},
|
||||||
|
"48": {
|
||||||
|
"description": "Pin entry menu for changing year of birth data.",
|
||||||
|
"display_key": "ussd.kenya.dob_edit_pin_authorization",
|
||||||
|
"name": "dob_edit_pin_authorization",
|
||||||
|
"parent": "metadata_management"
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
12
apps/cic-ussd/cic_ussd/metadata/custom.py
Normal file
12
apps/cic-ussd/cic_ussd/metadata/custom.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# standard imports
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import MetadataRequestsHandler
|
||||||
|
|
||||||
|
|
||||||
|
class CustomMetadata(MetadataRequestsHandler):
|
||||||
|
|
||||||
|
def __init__(self, identifier: bytes):
|
||||||
|
super().__init__(cic_type=':cic.custom', identifier=identifier)
|
||||||
12
apps/cic-ussd/cic_ussd/metadata/preferences.py
Normal file
12
apps/cic-ussd/cic_ussd/metadata/preferences.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# standard imports
|
||||||
|
|
||||||
|
# external imports
|
||||||
|
|
||||||
|
# local imports
|
||||||
|
from .base import MetadataRequestsHandler
|
||||||
|
|
||||||
|
|
||||||
|
class PreferencesMetadata(MetadataRequestsHandler):
|
||||||
|
|
||||||
|
def __init__(self, identifier: bytes):
|
||||||
|
super().__init__(cic_type=':cic.preferences', identifier=identifier)
|
||||||
@@ -6,11 +6,13 @@ import logging
|
|||||||
import celery
|
import celery
|
||||||
import i18n
|
import i18n
|
||||||
from cic_eth.api.api_task import Api
|
from cic_eth.api.api_task import Api
|
||||||
|
from sqlalchemy.orm.session import Session
|
||||||
from tinydb.table import Document
|
from tinydb.table import Document
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
# local imports
|
# local imports
|
||||||
from cic_ussd.db.models.account import Account
|
from cic_ussd.db.models.account import Account
|
||||||
|
from cic_ussd.db.models.base import SessionBase
|
||||||
from cic_ussd.db.models.ussd_session import UssdSession
|
from cic_ussd.db.models.ussd_session import UssdSession
|
||||||
from cic_ussd.db.models.task_tracker import TaskTracker
|
from cic_ussd.db.models.task_tracker import TaskTracker
|
||||||
from cic_ussd.menu.ussd_menu import UssdMenu
|
from cic_ussd.menu.ussd_menu import UssdMenu
|
||||||
@@ -22,15 +24,18 @@ from cic_ussd.validator import check_known_user, validate_response_type
|
|||||||
logg = logging.getLogger()
|
logg = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
def add_tasks_to_tracker(task_uuid):
|
def add_tasks_to_tracker(session, task_uuid: str):
|
||||||
"""
|
"""This function takes tasks spawned over api interfaces and records their creation time for tracking.
|
||||||
This function takes tasks spawned over api interfaces and records their creation time for tracking.
|
:param session:
|
||||||
|
:type session:
|
||||||
:param task_uuid: The uuid for an initiated task.
|
:param task_uuid: The uuid for an initiated task.
|
||||||
:type task_uuid: str
|
:type task_uuid: str
|
||||||
"""
|
"""
|
||||||
|
session = SessionBase.bind_session(session=session)
|
||||||
task_record = TaskTracker(task_uuid=task_uuid)
|
task_record = TaskTracker(task_uuid=task_uuid)
|
||||||
TaskTracker.session.add(task_record)
|
session.add(task_record)
|
||||||
TaskTracker.session.commit()
|
session.flush()
|
||||||
|
SessionBase.release_session(session=session)
|
||||||
|
|
||||||
|
|
||||||
def define_response_with_content(headers: list, response: str) -> tuple:
|
def define_response_with_content(headers: list, response: str) -> tuple:
|
||||||
@@ -95,6 +100,7 @@ def create_or_update_session(
|
|||||||
service_code: str,
|
service_code: str,
|
||||||
user_input: str,
|
user_input: str,
|
||||||
current_menu: str,
|
current_menu: str,
|
||||||
|
session,
|
||||||
session_data: Optional[dict] = None) -> InMemoryUssdSession:
|
session_data: Optional[dict] = None) -> InMemoryUssdSession:
|
||||||
"""
|
"""
|
||||||
Handles the creation or updating of session as necessary.
|
Handles the creation or updating of session as necessary.
|
||||||
@@ -108,12 +114,15 @@ def create_or_update_session(
|
|||||||
:type user_input: str
|
:type user_input: str
|
||||||
:param current_menu: Menu name that is currently being displayed on the ussd session
|
:param current_menu: Menu name that is currently being displayed on the ussd session
|
||||||
:type current_menu: str
|
:type current_menu: str
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
:param session_data: Any additional data that was persisted during the user's interaction with the system.
|
:param session_data: Any additional data that was persisted during the user's interaction with the system.
|
||||||
:type session_data: dict.
|
:type session_data: dict.
|
||||||
:return: ussd session object
|
:return: ussd session object
|
||||||
:rtype: InMemoryUssdSession
|
:rtype: InMemoryUssdSession
|
||||||
"""
|
"""
|
||||||
existing_ussd_session = UssdSession.session.query(UssdSession).filter_by(
|
session = SessionBase.bind_session(session=session)
|
||||||
|
existing_ussd_session = session.query(UssdSession).filter_by(
|
||||||
external_session_id=external_session_id).first()
|
external_session_id=external_session_id).first()
|
||||||
|
|
||||||
if existing_ussd_session:
|
if existing_ussd_session:
|
||||||
@@ -132,20 +141,25 @@ def create_or_update_session(
|
|||||||
current_menu=current_menu,
|
current_menu=current_menu,
|
||||||
session_data=session_data
|
session_data=session_data
|
||||||
)
|
)
|
||||||
|
SessionBase.release_session(session=session)
|
||||||
return ussd_session
|
return ussd_session
|
||||||
|
|
||||||
|
|
||||||
def get_account_status(phone_number) -> str:
|
def get_account_status(phone_number, session: Session) -> str:
|
||||||
"""Get the status of a user's account.
|
"""Get the status of a user's account.
|
||||||
:param phone_number: The phone number to be checked.
|
:param phone_number: The phone number to be checked.
|
||||||
:type phone_number: str
|
:type phone_number: str
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
:return: The user account status.
|
:return: The user account status.
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
user = Account.session.query(Account).filter_by(phone_number=phone_number).first()
|
session = SessionBase.bind_session(session=session)
|
||||||
status = user.get_account_status()
|
account = Account.get_by_phone_number(phone_number=phone_number, session=session)
|
||||||
Account.session.add(user)
|
status = account.get_account_status()
|
||||||
Account.session.commit()
|
session.add(account)
|
||||||
|
session.flush()
|
||||||
|
SessionBase.release_session(session=session)
|
||||||
|
|
||||||
return status
|
return status
|
||||||
|
|
||||||
@@ -165,6 +179,7 @@ def initiate_account_creation_request(chain_str: str,
|
|||||||
external_session_id: str,
|
external_session_id: str,
|
||||||
phone_number: str,
|
phone_number: str,
|
||||||
service_code: str,
|
service_code: str,
|
||||||
|
session,
|
||||||
user_input: str) -> str:
|
user_input: str) -> str:
|
||||||
"""This function issues a task to create a blockchain account on cic-eth. It then creates a record of the ussd
|
"""This function issues a task to create a blockchain account on cic-eth. It then creates a record of the ussd
|
||||||
session corresponding to the creation of the account and returns a response denoting that the user's account is
|
session corresponding to the creation of the account and returns a response denoting that the user's account is
|
||||||
@@ -177,6 +192,8 @@ def initiate_account_creation_request(chain_str: str,
|
|||||||
:type phone_number: str
|
:type phone_number: str
|
||||||
:param service_code: The service code dialed.
|
:param service_code: The service code dialed.
|
||||||
:type service_code: str
|
:type service_code: str
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
:param user_input: The input entered by the user.
|
:param user_input: The input entered by the user.
|
||||||
:type user_input: str
|
:type user_input: str
|
||||||
:return: A response denoting that the account is being created.
|
:return: A response denoting that the account is being created.
|
||||||
@@ -190,7 +207,7 @@ def initiate_account_creation_request(chain_str: str,
|
|||||||
creation_task_id = cic_eth_api.create_account().id
|
creation_task_id = cic_eth_api.create_account().id
|
||||||
|
|
||||||
# record task initiation time
|
# record task initiation time
|
||||||
add_tasks_to_tracker(task_uuid=creation_task_id)
|
add_tasks_to_tracker(task_uuid=creation_task_id, session=session)
|
||||||
|
|
||||||
# cache account creation data
|
# cache account creation data
|
||||||
cache_account_creation_task_id(phone_number=phone_number, task_id=creation_task_id)
|
cache_account_creation_task_id(phone_number=phone_number, task_id=creation_task_id)
|
||||||
@@ -204,6 +221,7 @@ def initiate_account_creation_request(chain_str: str,
|
|||||||
phone=phone_number,
|
phone=phone_number,
|
||||||
service_code=service_code,
|
service_code=service_code,
|
||||||
current_menu=current_menu.get('name'),
|
current_menu=current_menu.get('name'),
|
||||||
|
session=session,
|
||||||
user_input=user_input)
|
user_input=user_input)
|
||||||
|
|
||||||
# define response to relay to user
|
# define response to relay to user
|
||||||
@@ -268,12 +286,14 @@ def cache_account_creation_task_id(phone_number: str, task_id: str):
|
|||||||
redis_cache.persist(name=task_id)
|
redis_cache.persist(name=task_id)
|
||||||
|
|
||||||
|
|
||||||
def process_current_menu(ussd_session: Optional[dict], user: Account, user_input: str) -> Document:
|
def process_current_menu(account: Account, session: Session, ussd_session: Optional[dict], user_input: str) -> Document:
|
||||||
"""This function checks user input and returns a corresponding ussd menu
|
"""This function checks user input and returns a corresponding ussd menu
|
||||||
:param ussd_session: An in db ussd session object.
|
:param ussd_session: An in db ussd session object.
|
||||||
:type ussd_session: UssdSession
|
:type ussd_session: UssdSession
|
||||||
:param user: A user object.
|
:param account: A account object.
|
||||||
:type user: Account
|
:type account: Account
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
:param user_input: The user's input.
|
:param user_input: The user's input.
|
||||||
:type user_input: str
|
:type user_input: str
|
||||||
:return: An in memory ussd menu object.
|
:return: An in memory ussd menu object.
|
||||||
@@ -285,7 +305,13 @@ def process_current_menu(ussd_session: Optional[dict], user: Account, user_input
|
|||||||
else:
|
else:
|
||||||
# get current state
|
# get current state
|
||||||
latest_input = get_latest_input(user_input=user_input)
|
latest_input = get_latest_input(user_input=user_input)
|
||||||
current_menu = process_request(ussd_session=ussd_session, user_input=latest_input, user=user)
|
session = SessionBase.bind_session(session=session)
|
||||||
|
current_menu = process_request(
|
||||||
|
account=account,
|
||||||
|
session=session,
|
||||||
|
ussd_session=ussd_session,
|
||||||
|
user_input=latest_input)
|
||||||
|
SessionBase.release_session(session=session)
|
||||||
return current_menu
|
return current_menu
|
||||||
|
|
||||||
|
|
||||||
@@ -294,6 +320,7 @@ def process_menu_interaction_requests(chain_str: str,
|
|||||||
phone_number: str,
|
phone_number: str,
|
||||||
queue: str,
|
queue: str,
|
||||||
service_code: str,
|
service_code: str,
|
||||||
|
session,
|
||||||
user_input: str) -> str:
|
user_input: str) -> str:
|
||||||
"""This function handles requests intended for interaction with ussd menu, it checks whether a user matching the
|
"""This function handles requests intended for interaction with ussd menu, it checks whether a user matching the
|
||||||
provided phone number exists and in the absence of which it creates an account for the user.
|
provided phone number exists and in the absence of which it creates an account for the user.
|
||||||
@@ -308,25 +335,29 @@ def process_menu_interaction_requests(chain_str: str,
|
|||||||
:type queue: str
|
:type queue: str
|
||||||
:param service_code: The service dialed by the user making the request.
|
:param service_code: The service dialed by the user making the request.
|
||||||
:type service_code: str
|
:type service_code: str
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
:param user_input: The inputs entered by the user.
|
:param user_input: The inputs entered by the user.
|
||||||
:type user_input: str
|
:type user_input: str
|
||||||
:return: A response based on the request received.
|
:return: A response based on the request received.
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
# check whether the user exists
|
# check whether the user exists
|
||||||
if not check_known_user(phone=phone_number):
|
if not check_known_user(phone_number=phone_number, session=session):
|
||||||
response = initiate_account_creation_request(chain_str=chain_str,
|
response = initiate_account_creation_request(chain_str=chain_str,
|
||||||
external_session_id=external_session_id,
|
external_session_id=external_session_id,
|
||||||
phone_number=phone_number,
|
phone_number=phone_number,
|
||||||
service_code=service_code,
|
service_code=service_code,
|
||||||
|
session=session,
|
||||||
user_input=user_input)
|
user_input=user_input)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# get user
|
# get account
|
||||||
user = Account.session.query(Account).filter_by(phone_number=phone_number).first()
|
session = SessionBase.bind_session(session=session)
|
||||||
|
account = Account.get_by_phone_number(phone_number=phone_number, session=session)
|
||||||
|
|
||||||
# retrieve and cache user's metadata
|
# retrieve and cache user's metadata
|
||||||
blockchain_address = user.blockchain_address
|
blockchain_address = account.blockchain_address
|
||||||
s_query_person_metadata = celery.signature(
|
s_query_person_metadata = celery.signature(
|
||||||
'cic_ussd.tasks.metadata.query_person_metadata',
|
'cic_ussd.tasks.metadata.query_person_metadata',
|
||||||
[blockchain_address]
|
[blockchain_address]
|
||||||
@@ -334,24 +365,25 @@ def process_menu_interaction_requests(chain_str: str,
|
|||||||
s_query_person_metadata.apply_async(queue='cic-ussd')
|
s_query_person_metadata.apply_async(queue='cic-ussd')
|
||||||
|
|
||||||
# find any existing ussd session
|
# find any existing ussd session
|
||||||
existing_ussd_session = UssdSession.session.query(UssdSession).filter_by(
|
existing_ussd_session = session.query(UssdSession).filter_by(external_session_id=external_session_id).first()
|
||||||
external_session_id=external_session_id).first()
|
|
||||||
|
|
||||||
# validate user inputs
|
# validate user inputs
|
||||||
if existing_ussd_session:
|
if existing_ussd_session:
|
||||||
current_menu = process_current_menu(
|
current_menu = process_current_menu(
|
||||||
|
account=account,
|
||||||
|
session=session,
|
||||||
ussd_session=existing_ussd_session.to_json(),
|
ussd_session=existing_ussd_session.to_json(),
|
||||||
user=user,
|
|
||||||
user_input=user_input
|
user_input=user_input
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
current_menu = process_current_menu(
|
current_menu = process_current_menu(
|
||||||
|
account=account,
|
||||||
|
session=session,
|
||||||
ussd_session=None,
|
ussd_session=None,
|
||||||
user=user,
|
|
||||||
user_input=user_input
|
user_input=user_input
|
||||||
)
|
)
|
||||||
|
|
||||||
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=user.phone_number)
|
last_ussd_session = retrieve_most_recent_ussd_session(phone_number=account.phone_number, session=session)
|
||||||
|
|
||||||
if last_ussd_session:
|
if last_ussd_session:
|
||||||
# create or update the ussd session as appropriate
|
# create or update the ussd session as appropriate
|
||||||
@@ -361,6 +393,7 @@ def process_menu_interaction_requests(chain_str: str,
|
|||||||
service_code=service_code,
|
service_code=service_code,
|
||||||
user_input=user_input,
|
user_input=user_input,
|
||||||
current_menu=current_menu.get('name'),
|
current_menu=current_menu.get('name'),
|
||||||
|
session=session,
|
||||||
session_data=last_ussd_session.session_data
|
session_data=last_ussd_session.session_data
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
@@ -369,15 +402,17 @@ def process_menu_interaction_requests(chain_str: str,
|
|||||||
phone=phone_number,
|
phone=phone_number,
|
||||||
service_code=service_code,
|
service_code=service_code,
|
||||||
user_input=user_input,
|
user_input=user_input,
|
||||||
current_menu=current_menu.get('name')
|
current_menu=current_menu.get('name'),
|
||||||
|
session=session
|
||||||
)
|
)
|
||||||
|
|
||||||
# define appropriate response
|
# define appropriate response
|
||||||
response = custom_display_text(
|
response = custom_display_text(
|
||||||
|
account=account,
|
||||||
display_key=current_menu.get('display_key'),
|
display_key=current_menu.get('display_key'),
|
||||||
menu_name=current_menu.get('name'),
|
menu_name=current_menu.get('name'),
|
||||||
|
session=session,
|
||||||
ussd_session=ussd_session.to_json(),
|
ussd_session=ussd_session.to_json(),
|
||||||
user=user
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# check that the response from the processor is valid
|
# check that the response from the processor is valid
|
||||||
@@ -386,21 +421,26 @@ def process_menu_interaction_requests(chain_str: str,
|
|||||||
|
|
||||||
# persist session to db
|
# persist session to db
|
||||||
persist_session_to_db_task(external_session_id=external_session_id, queue=queue)
|
persist_session_to_db_task(external_session_id=external_session_id, queue=queue)
|
||||||
|
SessionBase.release_session(session=session)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
def reset_pin(phone_number: str) -> str:
|
def reset_pin(phone_number: str, session: Session) -> str:
|
||||||
"""Reset account status from Locked to Pending.
|
"""Reset account status from Locked to Pending.
|
||||||
:param phone_number: The phone number belonging to the account to be unlocked.
|
:param phone_number: The phone number belonging to the account to be unlocked.
|
||||||
:type phone_number: str
|
:type phone_number: str
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
:return: The status of the pin reset.
|
:return: The status of the pin reset.
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
user = Account.session.query(Account).filter_by(phone_number=phone_number).first()
|
session = SessionBase.bind_session(session=session)
|
||||||
user.reset_account_pin()
|
account = Account.get_by_phone_number(phone_number=phone_number, session=session)
|
||||||
Account.session.add(user)
|
account.reset_account_pin()
|
||||||
Account.session.commit()
|
session.add(account)
|
||||||
|
session.flush()
|
||||||
|
SessionBase.release_session(session=session)
|
||||||
|
|
||||||
response = f'Pin reset for user {phone_number} is successful!'
|
response = f'Pin reset for user {phone_number} is successful!'
|
||||||
return response
|
return response
|
||||||
@@ -438,11 +478,13 @@ def update_ussd_session(
|
|||||||
return session
|
return session
|
||||||
|
|
||||||
|
|
||||||
def save_to_in_memory_ussd_session_data(queue: str, session_data: dict, ussd_session: dict):
|
def save_to_in_memory_ussd_session_data(queue: str, session: Session, session_data: dict, ussd_session: dict):
|
||||||
"""This function is used to save information to the session data attribute of a ussd session object in the redis
|
"""This function is used to save information to the session data attribute of a ussd session object in the redis
|
||||||
cache.
|
cache.
|
||||||
:param queue: The queue on which the celery task should run.
|
:param queue: The queue on which the celery task should run.
|
||||||
:type queue: str
|
:type queue: str
|
||||||
|
:param session:
|
||||||
|
:type session:
|
||||||
:param session_data: A dictionary containing data for a specific ussd session in redis that needs to be saved
|
:param session_data: A dictionary containing data for a specific ussd session in redis that needs to be saved
|
||||||
temporarily.
|
temporarily.
|
||||||
:type session_data: dict
|
:type session_data: dict
|
||||||
@@ -473,7 +515,7 @@ def save_to_in_memory_ussd_session_data(queue: str, session_data: dict, ussd_ses
|
|||||||
service_code=in_redis_ussd_session.get('service_code'),
|
service_code=in_redis_ussd_session.get('service_code'),
|
||||||
user_input=in_redis_ussd_session.get('user_input'),
|
user_input=in_redis_ussd_session.get('user_input'),
|
||||||
current_menu=in_redis_ussd_session.get('state'),
|
current_menu=in_redis_ussd_session.get('state'),
|
||||||
|
session=session,
|
||||||
session_data=session_data
|
session_data=session_data
|
||||||
)
|
)
|
||||||
persist_session_to_db_task(external_session_id=external_session_id, queue=queue)
|
persist_session_to_db_task(external_session_id=external_session_id, queue=queue)
|
||||||
|
|
||||||
|
|||||||
@@ -8,6 +8,10 @@ import phonenumbers
|
|||||||
from cic_ussd.db.models.account import Account
|
from cic_ussd.db.models.account import Account
|
||||||
|
|
||||||
|
|
||||||
|
class E164Format:
|
||||||
|
region = None
|
||||||
|
|
||||||
|
|
||||||
def process_phone_number(phone_number: str, region: str):
|
def process_phone_number(phone_number: str, region: str):
|
||||||
"""This function parses any phone number for the provided region
|
"""This function parses any phone number for the provided region
|
||||||
:param phone_number: A string with a phone number.
|
:param phone_number: A string with a phone number.
|
||||||
@@ -29,19 +33,5 @@ def process_phone_number(phone_number: str, region: str):
|
|||||||
|
|
||||||
return parsed_phone_number
|
return parsed_phone_number
|
||||||
|
|
||||||
|
|
||||||
def get_user_by_phone_number(phone_number: str) -> Optional[Account]:
|
|
||||||
"""This function queries the database for a user based on the provided phone number.
|
|
||||||
:param phone_number: A valid phone number.
|
|
||||||
:type phone_number: str
|
|
||||||
:return: A user object matching a given phone number
|
|
||||||
:rtype: Account|None
|
|
||||||
"""
|
|
||||||
# consider adding region to user's metadata
|
|
||||||
phone_number = process_phone_number(phone_number=phone_number, region='KE')
|
|
||||||
user = Account.session.query(Account).filter_by(phone_number=phone_number).first()
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
class Support:
|
class Support:
|
||||||
phone_number = None
|
phone_number = None
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user