Merge remote-tracking branch 'origin/master' into bvander/deploy-to-k8s-dev
This commit is contained in:
commit
01c6f06b4b
@ -1,18 +1,39 @@
|
||||
include:
|
||||
- local: 'ci_templates/.cic-template.yml'
|
||||
- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
#- local: 'ci_templates/.cic-template.yml' #kaniko build templates
|
||||
# these includes are app specific unit tests
|
||||
- local: 'apps/cic-eth/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-ussd/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-notify/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-meta/.gitlab-ci.yml'
|
||||
- local: 'apps/cic-cache/.gitlab-ci.yml'
|
||||
- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
#- local: 'apps/contract-migration/.gitlab-ci.yml'
|
||||
#- local: 'apps/data-seeding/.gitlab-ci.yml'
|
||||
|
||||
stages:
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/docker-with-compose:latest
|
||||
|
||||
variables:
|
||||
DOCKER_BUILDKIT: "1"
|
||||
COMPOSE_DOCKER_CLI_BUILD: "1"
|
||||
CI_DEBUG_TRACE: "true"
|
||||
|
||||
before_script:
|
||||
- docker login -u gitlab-ci-token -p $CI_JOB_TOKEN $CI_REGISTRY
|
||||
|
||||
# runs on protected branches and pushes to repo
|
||||
build-push:
|
||||
stage: build
|
||||
tags:
|
||||
- integration
|
||||
script:
|
||||
- TAG=$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA sh ./scripts/build-push.sh
|
||||
rules:
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
|
||||
deploy-k8s-dev:
|
||||
stage: deploy
|
||||
|
@ -1,34 +0,0 @@
|
||||
# The solc image messes up the alpine environment, so we have to go all over again
|
||||
FROM python:3.8.6-slim-buster
|
||||
|
||||
LABEL authors="Louis Holbrook <dev@holbrook.no> 0826EDA1702D1E87C6E2875121D2E7BB88C2A746"
|
||||
LABEL spdx-license-identifier="GPL-3.0-or-later"
|
||||
LABEL description="Base layer for buiding development images for the cic component suite"
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y git gcc g++ libpq-dev && \
|
||||
apt-get install -y vim gawk jq telnet openssl iputils-ping curl wget gnupg socat bash procps make python2 postgresql-client
|
||||
|
||||
|
||||
RUN echo installing nodejs tooling
|
||||
|
||||
COPY ./dev/nvm.sh /root/
|
||||
|
||||
# Install nvm with node and npm
|
||||
# https://stackoverflow.com/questions/25899912/how-to-install-nvm-in-docker
|
||||
ENV NVM_DIR /root/.nvm
|
||||
ENV NODE_VERSION 15.3.0
|
||||
ENV BANCOR_NODE_VERSION 10.16.0
|
||||
|
||||
RUN wget -qO- https://raw.githubusercontent.com/nvm-sh/nvm/v0.37.2/install.sh | bash \
|
||||
&& . $NVM_DIR/nvm.sh \
|
||||
&& nvm install $NODE_VERSION \
|
||||
&& nvm alias default $NODE_VERSION \
|
||||
&& nvm use $NODE_VERSION \
|
||||
# So many ridiculously stupid issues with node in docker that take oceans of absolutely wasted time to resolve
|
||||
# owner of these files is "1001" by default - wtf
|
||||
&& chown -R root:root "$NVM_DIR/versions/node/v$NODE_VERSION"
|
||||
|
||||
ENV NODE_PATH $NVM_DIR/versions/node//v$NODE_VERSION/lib/node_modules
|
||||
ENV PATH $NVM_DIR/versions/node//v$NODE_VERSION/bin:$PATH
|
||||
|
@ -1 +0,0 @@
|
||||
## this is an example base image if we wanted one for all the other apps. Its just OS level things
|
@ -1,52 +1,17 @@
|
||||
.cic_cache_variables:
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-cache:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_cache_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-cache/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-cache:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_cache_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-cache"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-cache:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_cache_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-cache/**/*
|
||||
when: always
|
||||
|
||||
|
||||
build-test-cic-cache:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-cache
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-cache
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
@ -0,0 +1 @@
|
||||
# CIC-CACHE
|
@ -5,7 +5,7 @@ version = (
|
||||
0,
|
||||
2,
|
||||
1,
|
||||
'alpha.1',
|
||||
'alpha.2',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
@ -1,38 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
# RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2b9
|
||||
|
||||
COPY requirements.txt .
|
||||
#RUN pip install $pip_extra_index_url_flag -r test_requirements.txt
|
||||
#RUN pip install $pip_extra_index_url_flag .
|
||||
#RUN pip install .[server]
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL $EXTRA_PIP_ARGS \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN python setup.py install
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-cache/
|
||||
|
||||
# for db migrations
|
||||
RUN git clone https://github.com/vishnubob/wait-for-it.git /usr/local/bin/wait-for-it/
|
||||
COPY cic_cache/db/migrations/ /usr/local/share/cic-cache/alembic/
|
||||
|
||||
COPY /docker/start_tracker.sh ./start_tracker.sh
|
||||
COPY /docker/db.sh ./db.sh
|
||||
RUN chmod 755 ./*.sh
|
||||
# Tracker
|
||||
# ENTRYPOINT ["/usr/local/bin/cic-cache-tracker", "-vv"]
|
||||
# Server
|
||||
# ENTRYPOINT [ "/usr/local/bin/uwsgi", "--wsgi-file", "/usr/local/lib/python3.8/site-packages/cic_cache/runnable/server.py", "--http", ":80", "--pyargv", "-vv" ]
|
||||
ENTRYPOINT []
|
10
apps/cic-cache/docker/run_tests.sh
Normal file
10
apps/cic-cache/docker/run_tests.sh
Normal file
@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_cache --cov-fail-under=90 --cov-report term-missing tests
|
||||
|
@ -8,8 +8,8 @@ semver==2.13.0
|
||||
psycopg2==2.8.6
|
||||
celery==4.4.7
|
||||
redis==3.5.3
|
||||
chainsyncer[sql]>=0.0.6a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
erc20-faucet>=0.3.2a1, <0.4.0
|
||||
chainlib-eth>=0.0.9a3,<0.1.0
|
||||
chainlib>=0.0.9a2,<0.1.0
|
||||
chainlib-eth>=0.0.9a7,<0.1.0
|
||||
chainlib>=0.0.9a3,<0.1.0
|
||||
eth-address-index>=0.2.3a1,<0.3.0
|
||||
|
@ -1,52 +1,16 @@
|
||||
.cic_eth_variables:
|
||||
variables:
|
||||
APP_NAME: cic-eth
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-eth:
|
||||
extends:
|
||||
- .cic_eth_variables
|
||||
- .py_build_target_dev
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-eth:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_eth_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r admin_requirements.txt
|
||||
-r services_requirements.txt
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-eth"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-eth:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_eth_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-eth/**/*
|
||||
when: always
|
||||
build-test-cic-eth:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-eth
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-eth
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
#rules:
|
||||
#- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
# changes:
|
||||
# - apps/$APP_NAME/**/*
|
||||
# when: always
|
||||
|
@ -1,5 +1,5 @@
|
||||
SQLAlchemy==1.3.20
|
||||
cic-eth-registry>=0.6.1a2,<0.7.0
|
||||
hexathon~=0.0.1a8
|
||||
chainqueue>=0.0.3a2,<0.1.0
|
||||
chainqueue>=0.0.4a6,<0.1.0
|
||||
eth-erc20>=0.1.2a2,<0.2.0
|
||||
|
@ -4,7 +4,6 @@ import logging
|
||||
|
||||
# external imports
|
||||
import celery
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainlib.chain import ChainSpec
|
||||
from hexathon import (
|
||||
add_0x,
|
||||
@ -20,18 +19,17 @@ from cic_eth.task import (
|
||||
CriticalSQLAlchemyTask,
|
||||
)
|
||||
from cic_eth.error import LockedError
|
||||
from cic_eth.encode import (
|
||||
tx_normalize,
|
||||
ZERO_ADDRESS_NORMAL,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def normalize_address(a):
|
||||
if a == None:
|
||||
return None
|
||||
return add_0x(hex_uniform(strip_0x(a)))
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL, tx_hash=None):
|
||||
def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL, tx_hash=None):
|
||||
"""Task wrapper to set arbitrary locks
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@ -43,7 +41,7 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
@ -53,7 +51,7 @@ def lock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.AL
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.ALL):
|
||||
def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, flags=LockEnum.ALL):
|
||||
"""Task wrapper to reset arbitrary locks
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@ -65,7 +63,7 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
@ -75,7 +73,7 @@ def unlock(chained_input, chain_spec_dict, address=ZERO_ADDRESS, flags=LockEnum.
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
||||
def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
|
||||
"""Task wrapper to set send lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@ -85,7 +83,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.set(chain_str, LockEnum.SEND, address=address, tx_hash=tx_hash)
|
||||
logg.debug('Send locked for {}, flag now {}'.format(address, r))
|
||||
@ -93,7 +91,7 @@ def lock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
|
||||
"""Task wrapper to reset send lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@ -103,7 +101,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.reset(chain_str, LockEnum.SEND, address=address)
|
||||
logg.debug('Send unlocked for {}, flag now {}'.format(address, r))
|
||||
@ -111,7 +109,7 @@ def unlock_send(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=None):
|
||||
def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL, tx_hash=None):
|
||||
"""Task wrapper to set queue direct lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@ -121,7 +119,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||
logg.debug('Queue direct locked for {}, flag now {}'.format(address, r))
|
||||
@ -129,7 +127,7 @@ def lock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS, tx_hash=Non
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS_NORMAL):
|
||||
"""Task wrapper to reset queue direct lock
|
||||
|
||||
:param chain_str: Chain spec string representation
|
||||
@ -139,7 +137,7 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
:returns: New lock state for address
|
||||
:rtype: number
|
||||
"""
|
||||
address = normalize_address(address)
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
r = Lock.reset(chain_str, LockEnum.QUEUE, address=address)
|
||||
logg.debug('Queue direct unlocked for {}, flag now {}'.format(address, r))
|
||||
@ -148,12 +146,13 @@ def unlock_queue(chained_input, chain_spec_dict, address=ZERO_ADDRESS):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def check_lock(chained_input, chain_spec_dict, lock_flags, address=None):
|
||||
address = normalize_address(address)
|
||||
if address != None:
|
||||
address = tx_normalize.wallet_address(address)
|
||||
chain_str = '::'
|
||||
if chain_spec_dict != None:
|
||||
chain_str = str(ChainSpec.from_dict(chain_spec_dict))
|
||||
session = SessionBase.create_session()
|
||||
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS, session=session)
|
||||
r = Lock.check(chain_str, lock_flags, address=ZERO_ADDRESS_NORMAL, session=session)
|
||||
if address != None:
|
||||
r |= Lock.check(chain_str, lock_flags, address=address, session=session)
|
||||
if r > 0:
|
||||
|
@ -33,6 +33,7 @@ from cic_eth.admin.ctrl import (
|
||||
from cic_eth.queue.tx import queue_create
|
||||
from cic_eth.eth.gas import create_check_gas_task
|
||||
from cic_eth.task import BaseTask
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
@ -73,7 +74,7 @@ def shift_nonce(self, chainspec_dict, tx_hash_orig_hex, delta=1):
|
||||
|
||||
set_cancel(chain_spec, strip_0x(tx['hash']), manual=True, session=session)
|
||||
|
||||
query_address = add_0x(hex_uniform(strip_0x(address))) # aaaaargh
|
||||
query_address = tx_normalize.wallet_address(address)
|
||||
q = session.query(Otx)
|
||||
q = q.join(TxCache)
|
||||
q = q.filter(TxCache.sender==query_address)
|
||||
|
@ -32,7 +32,6 @@ from chainqueue.db.enum import (
|
||||
status_str,
|
||||
)
|
||||
from chainqueue.error import TxStateChangeError
|
||||
from chainqueue.sql.query import get_tx
|
||||
from eth_erc20 import ERC20
|
||||
|
||||
# local imports
|
||||
@ -40,6 +39,7 @@ from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.db.models.role import AccountRole
|
||||
from cic_eth.db.models.nonce import Nonce
|
||||
from cic_eth.error import InitializationError
|
||||
from cic_eth.queue.query import get_tx_local
|
||||
|
||||
app = celery.current_app
|
||||
|
||||
@ -284,7 +284,7 @@ class AdminApi:
|
||||
tx_hash_hex = None
|
||||
session = SessionBase.create_session()
|
||||
for k in txs.keys():
|
||||
tx_dict = get_tx(chain_spec, k, session=session)
|
||||
tx_dict = get_tx_local(chain_spec, k, session=session)
|
||||
if tx_dict['nonce'] == nonce:
|
||||
tx_hash_hex = k
|
||||
session.close()
|
||||
|
@ -4,12 +4,12 @@ import logging
|
||||
|
||||
# third-party imports
|
||||
from sqlalchemy import Column, String, Integer, DateTime, ForeignKey
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.db.models.otx import Otx
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import ZERO_ADDRESS_NORMAL
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@ -37,7 +37,7 @@ class Lock(SessionBase):
|
||||
|
||||
|
||||
@staticmethod
|
||||
def set(chain_str, flags, address=ZERO_ADDRESS, session=None, tx_hash=None):
|
||||
def set(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None, tx_hash=None):
|
||||
"""Sets flags associated with the given address and chain.
|
||||
|
||||
If a flags entry does not exist it is created.
|
||||
@ -90,7 +90,7 @@ class Lock(SessionBase):
|
||||
|
||||
|
||||
@staticmethod
|
||||
def reset(chain_str, flags, address=ZERO_ADDRESS, session=None):
|
||||
def reset(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None):
|
||||
"""Resets flags associated with the given address and chain.
|
||||
|
||||
If the resulting flags entry value is 0, the entry will be deleted.
|
||||
@ -134,7 +134,7 @@ class Lock(SessionBase):
|
||||
|
||||
|
||||
@staticmethod
|
||||
def check(chain_str, flags, address=ZERO_ADDRESS, session=None):
|
||||
def check(chain_str, flags, address=ZERO_ADDRESS_NORMAL, session=None):
|
||||
"""Checks whether all given flags are set for given address and chain.
|
||||
|
||||
Does not validate the address against any other tables or components.
|
||||
|
16
apps/cic-eth/cic_eth/encode.py
Normal file
16
apps/cic-eth/cic_eth/encode.py
Normal file
@ -0,0 +1,16 @@
|
||||
# external imports
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainqueue.encode import TxHexNormalizer
|
||||
from chainlib.eth.tx import unpack
|
||||
|
||||
tx_normalize = TxHexNormalizer()
|
||||
|
||||
ZERO_ADDRESS_NORMAL = tx_normalize.wallet_address(ZERO_ADDRESS)
|
||||
|
||||
|
||||
def unpack_normal(signed_tx_bytes, chain_spec):
|
||||
tx = unpack(signed_tx_bytes, chain_spec)
|
||||
tx['hash'] = tx_normalize.tx_hash(tx['hash'])
|
||||
tx['from'] = tx_normalize.wallet_address(tx['from'])
|
||||
tx['to'] = tx_normalize.wallet_address(tx['to'])
|
||||
return tx
|
@ -14,10 +14,7 @@ from chainlib.eth.sign import (
|
||||
sign_message,
|
||||
)
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
from chainlib.eth.tx import (
|
||||
TxFormat,
|
||||
unpack,
|
||||
)
|
||||
from chainlib.eth.tx import TxFormat
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.error import JSONRPCException
|
||||
from eth_accounts_index.registry import AccountRegistry
|
||||
@ -49,6 +46,10 @@ from cic_eth.eth.nonce import (
|
||||
from cic_eth.queue.tx import (
|
||||
register_tx,
|
||||
)
|
||||
from cic_eth.encode import (
|
||||
unpack_normal,
|
||||
ZERO_ADDRESS_NORMAL,
|
||||
)
|
||||
|
||||
logg = logging.getLogger()
|
||||
celery_app = celery.current_app
|
||||
@ -295,17 +296,17 @@ def cache_gift_data(
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
|
||||
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||
tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
|
||||
tx_data = Faucet.parse_give_to_request(tx['data'])
|
||||
|
||||
session = self.create_session()
|
||||
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'hash': tx['hash'],
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'source_token': ZERO_ADDRESS_NORMAL,
|
||||
'destination_token': ZERO_ADDRESS_NORMAL,
|
||||
'from_value': 0,
|
||||
'to_value': 0,
|
||||
}
|
||||
@ -334,17 +335,17 @@ def cache_account_data(
|
||||
:rtype: tuple
|
||||
"""
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
tx_signed_raw_bytes = bytes.fromhex(tx_signed_raw_hex[2:])
|
||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
|
||||
tx_data = AccountsIndex.parse_add_request(tx['data'])
|
||||
|
||||
session = SessionBase.create_session()
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'hash': tx['hash'],
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'source_token': ZERO_ADDRESS_NORMAL,
|
||||
'destination_token': ZERO_ADDRESS_NORMAL,
|
||||
'from_value': 0,
|
||||
'to_value': 0,
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ import logging
|
||||
# external imports
|
||||
import celery
|
||||
from hexathon import strip_0x
|
||||
from chainlib.eth.constant import ZERO_ADDRESS
|
||||
#from chainlib.eth.constant import ZERO_ADDRESS
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.address import is_checksum_address
|
||||
from chainlib.connection import RPCConnection
|
||||
@ -21,7 +21,6 @@ from chainlib.eth.error import (
|
||||
from chainlib.eth.tx import (
|
||||
TxFactory,
|
||||
TxFormat,
|
||||
unpack,
|
||||
)
|
||||
from chainlib.eth.contract import (
|
||||
abi_decode_single,
|
||||
@ -45,6 +44,7 @@ from cic_eth.eth.nonce import CustodialTaskNonceOracle
|
||||
from cic_eth.queue.tx import (
|
||||
queue_create,
|
||||
register_tx,
|
||||
unpack,
|
||||
)
|
||||
from cic_eth.queue.query import get_tx
|
||||
from cic_eth.task import (
|
||||
@ -53,6 +53,11 @@ from cic_eth.task import (
|
||||
CriticalSQLAlchemyAndSignerTask,
|
||||
CriticalWeb3AndSignerTask,
|
||||
)
|
||||
from cic_eth.encode import (
|
||||
tx_normalize,
|
||||
ZERO_ADDRESS_NORMAL,
|
||||
unpack_normal,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
@ -66,6 +71,7 @@ class MaxGasOracle:
|
||||
return MAXIMUM_FEE_UNITS
|
||||
|
||||
|
||||
#def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
|
||||
def create_check_gas_task(tx_signed_raws_hex, chain_spec, holder_address, gas=None, tx_hashes_hex=None, queue=None):
|
||||
"""Creates a celery task signature for a check_gas task that adds the task to the outgoing queue to be processed by the dispatcher.
|
||||
|
||||
@ -130,16 +136,16 @@ def cache_gas_data(
|
||||
"""
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
tx_signed_raw_bytes = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack(tx_signed_raw_bytes, chain_spec)
|
||||
tx = unpack_normal(tx_signed_raw_bytes, chain_spec)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
|
||||
tx_dict = {
|
||||
'hash': tx_hash_hex,
|
||||
'hash': tx['hash'],
|
||||
'from': tx['from'],
|
||||
'to': tx['to'],
|
||||
'source_token': ZERO_ADDRESS,
|
||||
'destination_token': ZERO_ADDRESS,
|
||||
'source_token': ZERO_ADDRESS_NORMAL,
|
||||
'destination_token': ZERO_ADDRESS_NORMAL,
|
||||
'from_value': tx['value'],
|
||||
'to_value': tx['value'],
|
||||
}
|
||||
@ -150,7 +156,7 @@ def cache_gas_data(
|
||||
|
||||
|
||||
@celery_app.task(bind=True, throws=(OutOfGasError), base=CriticalSQLAlchemyAndWeb3Task)
|
||||
def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_required=MAXIMUM_FEE_UNITS):
|
||||
def check_gas(self, tx_hashes_hex, chain_spec_dict, txs_hex=[], address=None, gas_required=MAXIMUM_FEE_UNITS):
|
||||
"""Check the gas level of the sender address of a transaction.
|
||||
|
||||
If the account balance is not sufficient for the required gas, gas refill is requested and OutOfGasError raiser.
|
||||
@ -170,6 +176,20 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
|
||||
:return: Signed raw transaction data list
|
||||
:rtype: param txs, unchanged
|
||||
"""
|
||||
if address != None:
|
||||
if not is_checksum_address(address):
|
||||
raise ValueError('invalid address {}'.format(address))
|
||||
address = tx_normalize.wallet_address(address)
|
||||
|
||||
tx_hashes = []
|
||||
txs = []
|
||||
for tx_hash in tx_hashes_hex:
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
tx_hashes.append(tx_hash)
|
||||
for tx in txs_hex:
|
||||
tx = tx_normalize.tx_wire(tx)
|
||||
txs.append(tx)
|
||||
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
logg.debug('txs {} tx_hashes {}'.format(txs, tx_hashes))
|
||||
|
||||
@ -187,9 +207,6 @@ def check_gas(self, tx_hashes, chain_spec_dict, txs=[], address=None, gas_requir
|
||||
raise ValueError('txs passed to check gas must all have same sender; had {} got {}'.format(address, tx['from']))
|
||||
addresspass.append(address)
|
||||
|
||||
if not is_checksum_address(address):
|
||||
raise ValueError('invalid address {}'.format(address))
|
||||
|
||||
queue = self.request.delivery_info.get('routing_key')
|
||||
|
||||
conn = RPCConnection.connect(chain_spec)
|
||||
@ -304,6 +321,7 @@ def refill_gas(self, recipient_address, chain_spec_dict):
|
||||
# Determine value of gas tokens to send
|
||||
# if an uncompleted gas refill for the same recipient already exists, we still need to spend the nonce
|
||||
# however, we will perform a 0-value transaction instead
|
||||
recipient_address = tx_normalize.wallet_address(recipient_address)
|
||||
zero_amount = False
|
||||
session = SessionBase.create_session()
|
||||
status_filter = StatusBits.FINAL | StatusBits.NODE_ERROR | StatusBits.NETWORK_ERROR | StatusBits.UNKNOWN_ERROR
|
||||
@ -378,6 +396,7 @@ def resend_with_higher_gas(self, txold_hash_hex, chain_spec_dict, gas=None, defa
|
||||
:returns: Transaction hash
|
||||
:rtype: str, 0x-hex
|
||||
"""
|
||||
txold_hash_hex = tx_normalize.tx_hash(txold_hash_hex)
|
||||
session = SessionBase.create_session()
|
||||
|
||||
otx = Otx.load(txold_hash_hex, session)
|
||||
|
@ -15,6 +15,7 @@ from chainqueue.db.enum import (
|
||||
# local imports
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@ -22,6 +23,9 @@ logg = logging.getLogger()
|
||||
|
||||
|
||||
def __balance_outgoing_compatible(token_address, holder_address):
|
||||
token_address = tx_normalize.executable_address(token_address)
|
||||
holder_address = tx_normalize.wallet_address(holder_address)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
q = session.query(TxCache.from_value)
|
||||
q = q.join(Otx)
|
||||
@ -58,6 +62,9 @@ def balance_outgoing(tokens, holder_address, chain_spec_dict):
|
||||
|
||||
|
||||
def __balance_incoming_compatible(token_address, receiver_address):
|
||||
token_address = tx_normalize.executable_address(token_address)
|
||||
receiver_address = tx_normalize.wallet_address(receiver_address)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
q = session.query(TxCache.to_value)
|
||||
q = q.join(Otx)
|
||||
@ -110,7 +117,7 @@ def assemble_balances(balances_collection):
|
||||
logg.debug('received collection {}'.format(balances_collection))
|
||||
for c in balances_collection:
|
||||
for b in c:
|
||||
address = b['address']
|
||||
address = tx_normalize.executable_address(b['address'])
|
||||
if tokens.get(address) == None:
|
||||
tokens[address] = {
|
||||
'address': address,
|
||||
|
@ -6,6 +6,7 @@ import celery
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.db import SessionBase
|
||||
from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@ -21,6 +22,9 @@ def get_lock(address=None):
|
||||
:returns: List of locks
|
||||
:rtype: list of dicts
|
||||
"""
|
||||
if address != None:
|
||||
address = tx_normalize.wallet_address(address)
|
||||
|
||||
session = SessionBase.create_session()
|
||||
q = session.query(
|
||||
Lock.date_created,
|
||||
|
@ -4,8 +4,8 @@ import datetime
|
||||
# external imports
|
||||
import celery
|
||||
from chainlib.chain import ChainSpec
|
||||
from chainlib.eth.tx import unpack
|
||||
import chainqueue.sql.query
|
||||
from chainlib.eth.tx import unpack
|
||||
from chainqueue.db.enum import (
|
||||
StatusEnum,
|
||||
is_alive,
|
||||
@ -20,6 +20,10 @@ from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import (
|
||||
tx_normalize,
|
||||
unpack_normal,
|
||||
)
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@ -27,49 +31,76 @@ celery_app = celery.current_app
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_tx_cache(chain_spec_dict, tx_hash):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
return get_tx_cache_local(chain_spec, tx_hash)
|
||||
|
||||
|
||||
def get_tx_cache_local(chain_spec, tx_hash, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
session = SessionBase.bind_session(session)
|
||||
r = chainqueue.sql.query.get_tx_cache(chain_spec, tx_hash, session=session)
|
||||
session.close()
|
||||
SessionBase.release_session(session)
|
||||
return r
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_tx(chain_spec_dict, tx_hash):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
return get_tx_local(chain_spec, tx_hash)
|
||||
|
||||
|
||||
def get_tx_local(chain_spec, tx_hash, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
session = SessionBase.bind_session(session)
|
||||
r = chainqueue.sql.query.get_tx(chain_spec, tx_hash, session=session)
|
||||
session.close()
|
||||
SessionBase.release_session(session)
|
||||
return r
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_account_tx(chain_spec_dict, address, as_sender=True, as_recipient=True, counterpart=None):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
return get_account_tx_local(chain_spec, address, as_sender=as_sender, as_recipient=as_recipient, counterpart=counterpart)
|
||||
|
||||
|
||||
def get_account_tx_local(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=None):
|
||||
address = tx_normalize.wallet_address(address)
|
||||
session = SessionBase.bind_session(session)
|
||||
r = chainqueue.sql.query.get_account_tx(chain_spec, address, as_sender=True, as_recipient=True, counterpart=None, session=session)
|
||||
session.close()
|
||||
SessionBase.release_session(session)
|
||||
return r
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
|
||||
def get_upcoming_tx_nolock(chain_spec_dict, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0):
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
return get_upcoming_tx_nolock_local(chain_spec, status=status, not_status=not_status, recipient=recipient, before=before, limit=limit)
|
||||
|
||||
|
||||
def get_upcoming_tx_nolock_local(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
|
||||
recipient = tx_normalize.wallet_address(recipient)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack)
|
||||
r = chainqueue.sql.query.get_upcoming_tx(chain_spec, status, not_status=not_status, recipient=recipient, before=before, limit=limit, session=session, decoder=unpack_normal)
|
||||
session.close()
|
||||
return r
|
||||
|
||||
|
||||
def get_status_tx(chain_spec, status, not_status=None, before=None, exact=False, limit=0, session=None):
|
||||
return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack)
|
||||
return chainqueue.sql.query.get_status_tx_cache(chain_spec, status, not_status=not_status, before=before, exact=exact, limit=limit, session=session, decoder=unpack_normal)
|
||||
|
||||
|
||||
def get_paused_tx(chain_spec, status=None, sender=None, session=None, decoder=None):
|
||||
return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack)
|
||||
sender = tx_normalize.wallet_address(sender)
|
||||
return chainqueue.sql.query.get_paused_tx_cache(chain_spec, status=status, sender=sender, session=session, decoder=unpack_normal)
|
||||
|
||||
|
||||
def get_nonce_tx(chain_spec, nonce, sender):
|
||||
return get_nonce_tx_cache(chain_spec, nonce, sender, decoder=unpack)
|
||||
sender = tx_normalize.wallet_address(sender)
|
||||
return get_nonce_tx_local(chain_spec, nonce, sender)
|
||||
|
||||
|
||||
def get_nonce_tx_local(chain_spec, nonce, sender, session=None):
|
||||
sender = tx_normalize.wallet_address(sender)
|
||||
return chainqueue.sql.query.get_nonce_tx_cache(chain_spec, nonce, sender, decoder=unpack_normal, session=session)
|
||||
|
||||
|
||||
def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, recipient=None, before=None, limit=0, session=None):
|
||||
@ -91,6 +122,8 @@ def get_upcoming_tx(chain_spec, status=StatusEnum.READYSEND, not_status=None, re
|
||||
:returns: Transactions
|
||||
:rtype: dict, with transaction hash as key, signed raw transaction as value
|
||||
"""
|
||||
if recipient != None:
|
||||
recipient = tx_normalize.wallet_address(recipient)
|
||||
session = SessionBase.bind_session(session)
|
||||
q_outer = session.query(
|
||||
TxCache.sender,
|
||||
|
@ -6,12 +6,14 @@ import chainqueue.sql.state
|
||||
import celery
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_sent(chain_spec_dict, tx_hash, fail=False):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_sent(chain_spec, tx_hash, fail, session=session)
|
||||
@ -21,6 +23,7 @@ def set_sent(chain_spec_dict, tx_hash, fail=False):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_final(chain_spec, tx_hash, block=block, tx_index=tx_index, fail=fail, session=session)
|
||||
@ -30,6 +33,7 @@ def set_final(chain_spec_dict, tx_hash, block=None, tx_index=None, fail=False):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_cancel(chain_spec_dict, tx_hash, manual=False):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_cancel(chain_spec, tx_hash, manual, session=session)
|
||||
@ -39,6 +43,7 @@ def set_cancel(chain_spec_dict, tx_hash, manual=False):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_rejected(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_rejected(chain_spec, tx_hash, session=session)
|
||||
@ -48,6 +53,7 @@ def set_rejected(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_fubar(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_fubar(chain_spec, tx_hash, session=session)
|
||||
@ -57,6 +63,7 @@ def set_fubar(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_manual(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_manual(chain_spec, tx_hash, session=session)
|
||||
@ -66,6 +73,7 @@ def set_manual(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_ready(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_ready(chain_spec, tx_hash, session=session)
|
||||
@ -75,6 +83,7 @@ def set_ready(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_reserved(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_reserved(chain_spec, tx_hash, session=session)
|
||||
@ -84,6 +93,7 @@ def set_reserved(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def set_waitforgas(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.set_waitforgas(chain_spec, tx_hash, session=session)
|
||||
@ -93,6 +103,7 @@ def set_waitforgas(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def get_state_log(chain_spec_dict, tx_hash):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.get_state_log(chain_spec, tx_hash, session=session)
|
||||
@ -102,6 +113,7 @@ def get_state_log(chain_spec_dict, tx_hash):
|
||||
|
||||
@celery_app.task(base=CriticalSQLAlchemyTask)
|
||||
def obsolete(chain_spec_dict, tx_hash, final):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
chain_spec = ChainSpec.from_dict(chain_spec_dict)
|
||||
session = SessionBase.create_session()
|
||||
r = chainqueue.sql.state.obsolete_by_cache(chain_spec, tx_hash, final, session=session)
|
||||
|
@ -13,6 +13,7 @@ from chainqueue.error import NotLocalTxError
|
||||
# local imports
|
||||
from cic_eth.task import CriticalSQLAlchemyAndWeb3Task
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
|
||||
@ -20,6 +21,7 @@ logg = logging.getLogger()
|
||||
|
||||
|
||||
def tx_times(tx_hash, chain_spec, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
|
@ -32,12 +32,16 @@ from cic_eth.db import SessionBase
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.task import CriticalSQLAlchemyTask
|
||||
from cic_eth.error import LockedError
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
celery_app = celery.current_app
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
def queue_create(chain_spec, nonce, holder_address, tx_hash, signed_tx, session=None):
|
||||
tx_hash = tx_normalize.tx_hash(tx_hash)
|
||||
signed_tx = tx_normalize.tx_hash(signed_tx)
|
||||
holder_address = tx_normalize.wallet_address(holder_address)
|
||||
session = SessionBase.bind_session(session)
|
||||
|
||||
lock = Lock.check_aggregate(str(chain_spec), LockEnum.QUEUE, holder_address, session=session)
|
||||
@ -67,6 +71,8 @@ def register_tx(tx_hash_hex, tx_signed_raw_hex, chain_spec, queue, cache_task=No
|
||||
:returns: Tuple; Transaction hash, signed raw transaction data
|
||||
:rtype: tuple
|
||||
"""
|
||||
tx_hash_hex = tx_normalize.tx_hash(tx_hash_hex)
|
||||
tx_signed_raw_hex = tx_normalize.tx_hash(tx_signed_raw_hex)
|
||||
logg.debug('adding queue tx {}:{} -> {}'.format(chain_spec, tx_hash_hex, tx_signed_raw_hex))
|
||||
tx_signed_raw = bytes.fromhex(strip_0x(tx_signed_raw_hex))
|
||||
tx = unpack(tx_signed_raw, chain_spec)
|
||||
|
@ -10,15 +10,14 @@ from chainlib.eth.tx import unpack
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.db.models.tx import TxCache
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from chainqueue.sql.query import get_paused_tx_cache as get_paused_tx
|
||||
from chainlib.eth.address import to_checksum_address
|
||||
|
||||
# local imports
|
||||
from cic_eth.db.models.base import SessionBase
|
||||
from cic_eth.eth.gas import create_check_gas_task
|
||||
from cic_eth.queue.query import get_paused_tx
|
||||
from .base import SyncFilter
|
||||
|
||||
#logg = logging.getLogger().getChild(__name__)
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
||||
|
@ -10,7 +10,7 @@ version = (
|
||||
0,
|
||||
12,
|
||||
4,
|
||||
'alpha.4',
|
||||
'alpha.7',
|
||||
)
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
|
@ -1,71 +0,0 @@
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
WORKDIR /usr/src/cic-eth
|
||||
|
||||
# Copy just the requirements and install....this _might_ give docker a hint on caching but we
|
||||
# do load these all into setup.py later
|
||||
# TODO can we take all the requirements out of setup.py and just do a pip install -r requirements.txt && python setup.py
|
||||
#COPY cic-eth/requirements.txt .
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
ARG EXTRA_PIP_ARGS=""
|
||||
#RUN --mount=type=cache,mode=0755,target=/root/.cache/pip \
|
||||
# pip install --index-url https://pypi.org/simple \
|
||||
# --force-reinstall \
|
||||
# --extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
# -r requirements.txt
|
||||
COPY *requirements.txt .
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY \
|
||||
--extra-index-url $EXTRA_INDEX_URL \
|
||||
$EXTRA_PIP_ARGS \
|
||||
-r requirements.txt \
|
||||
-r services_requirements.txt \
|
||||
-r admin_requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY docker/entrypoints/* ./
|
||||
RUN chmod 755 *.sh
|
||||
|
||||
# # ini files in config directory defines the configurable parameters for the application
|
||||
# # they can all be overridden by environment variables
|
||||
# # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY config/ /usr/local/etc/cic-eth/
|
||||
COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
|
||||
# TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||
#COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||
ENTRYPOINT []
|
||||
|
||||
# ------------------ PRODUCTION CONTAINER ----------------------
|
||||
#FROM python:3.8.6-slim-buster as prod
|
||||
#
|
||||
#RUN apt-get update && \
|
||||
# apt install -y gnupg libpq-dev procps
|
||||
#
|
||||
#WORKDIR /root
|
||||
#
|
||||
#COPY --from=dev /usr/local/bin/ /usr/local/bin/
|
||||
#COPY --from=dev /usr/local/lib/python3.8/site-packages/ \
|
||||
# /usr/local/lib/python3.8/site-packages/
|
||||
#
|
||||
#COPY docker/entrypoints/* ./
|
||||
#RUN chmod 755 *.sh
|
||||
#
|
||||
## # ini files in config directory defines the configurable parameters for the application
|
||||
## # they can all be overridden by environment variables
|
||||
## # to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
#COPY config/ /usr/local/etc/cic-eth/
|
||||
#COPY cic_eth/db/migrations/ /usr/local/share/cic-eth/alembic/
|
||||
#COPY crypto_dev_signer_config/ /usr/local/etc/crypto-dev-signer/
|
||||
#COPY scripts/ scripts/
|
||||
#
|
||||
## TODO this kind of code sharing across projects should be discouraged...can we make util a library?
|
||||
##COPY util/liveness/health.sh /usr/local/bin/health.sh
|
||||
#
|
||||
#ENTRYPOINT []
|
||||
#
|
11
apps/cic-eth/docker/run_tests.sh
Normal file
11
apps/cic-eth/docker/run_tests.sh
Normal file
@ -0,0 +1,11 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r admin_requirements.txt \
|
||||
-r services_requirements.txt \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
||||
|
@ -1,3 +1,3 @@
|
||||
celery==4.4.7
|
||||
chainlib-eth>=0.0.9a3,<0.1.0
|
||||
chainlib-eth>=0.0.9a7,<0.1.0
|
||||
semver==2.13.0
|
||||
|
@ -1,5 +1,5 @@
|
||||
chainqueue>=0.0.3a2,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a1,<0.1.0
|
||||
chainqueue>=0.0.5a1,<0.1.0
|
||||
chainsyncer[sql]>=0.0.6a3,<0.1.0
|
||||
alembic==1.4.2
|
||||
confini>=0.3.6rc4,<0.5.0
|
||||
redis==3.5.3
|
||||
|
@ -1,7 +1,6 @@
|
||||
# external imports
|
||||
from chainlib.connection import RPCConnection
|
||||
from chainlib.eth.nonce import OverrideNonceOracle
|
||||
from chainqueue.sql.tx import create as queue_create
|
||||
from chainlib.eth.tx import (
|
||||
TxFormat,
|
||||
unpack,
|
||||
@ -26,6 +25,8 @@ from chainqueue.db.enum import StatusBits
|
||||
# local imports
|
||||
from cic_eth.runnable.daemons.filters.gas import GasFilter
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.encode import tx_normalize
|
||||
from cic_eth.queue.tx import queue_create
|
||||
|
||||
|
||||
def test_filter_gas(
|
||||
|
@ -22,10 +22,11 @@ from hexathon import (
|
||||
strip_0x,
|
||||
add_0x,
|
||||
)
|
||||
from chainqueue.sql.query import get_account_tx
|
||||
|
||||
# local imports
|
||||
from cic_eth.runnable.daemons.filters.register import RegistrationFilter
|
||||
from cic_eth.encode import tx_normalize
|
||||
from cic_eth.queue.query import get_account_tx_local
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@ -79,7 +80,7 @@ def test_register_filter(
|
||||
t.get_leaf()
|
||||
assert t.successful()
|
||||
|
||||
gift_txs = get_account_tx(default_chain_spec.asdict(), agent_roles['ALICE'], as_sender=True, session=init_database)
|
||||
gift_txs = get_account_tx_local(default_chain_spec, agent_roles['ALICE'], as_sender=True, session=init_database)
|
||||
ks = list(gift_txs.keys())
|
||||
assert len(ks) == 1
|
||||
|
||||
|
10
apps/cic-eth/tests/run_tests.sh
Normal file
10
apps/cic-eth/tests/run_tests.sh
Normal file
@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 --extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r admin_requirements.txt
|
||||
-r services_requirements.txt
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_eth --cov-fail-under=90 --cov-report term-missing tests
|
@ -34,10 +34,6 @@ from chainqueue.sql.state import (
|
||||
set_ready,
|
||||
set_reserved,
|
||||
)
|
||||
from chainqueue.sql.query import (
|
||||
get_tx,
|
||||
get_nonce_tx_cache,
|
||||
)
|
||||
|
||||
# local imports
|
||||
from cic_eth.api.admin import AdminApi
|
||||
@ -46,6 +42,11 @@ from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.error import InitializationError
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.queue.tx import queue_create
|
||||
from cic_eth.queue.query import (
|
||||
get_tx,
|
||||
get_nonce_tx_local,
|
||||
)
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@ -287,12 +288,14 @@ def test_fix_nonce(
|
||||
|
||||
init_database.commit()
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
|
||||
logg.debug('!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
|
||||
txs = get_nonce_tx_local(default_chain_spec, 3, agent_roles['ALICE'], session=init_database)
|
||||
ks = txs.keys()
|
||||
assert len(ks) == 2
|
||||
|
||||
for k in ks:
|
||||
hsh = add_0x(k)
|
||||
#hsh = add_0x(k)
|
||||
hsh = tx_normalize.tx_hash(k)
|
||||
otx = Otx.load(hsh, session=init_database)
|
||||
init_database.refresh(otx)
|
||||
logg.debug('checking nonce {} tx {} status {}'.format(3, otx.tx_hash, otx.status))
|
||||
|
@ -30,7 +30,6 @@ from chainqueue.sql.state import (
|
||||
)
|
||||
from chainqueue.db.models.otx import Otx
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.sql.query import get_nonce_tx_cache
|
||||
from eth_erc20 import ERC20
|
||||
from cic_eth_registry import CICRegistry
|
||||
|
||||
@ -38,6 +37,7 @@ from cic_eth_registry import CICRegistry
|
||||
from cic_eth.api.admin import AdminApi
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.eth.erc20 import cache_transfer_data
|
||||
from cic_eth.queue.query import get_nonce_tx_local
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@ -312,7 +312,7 @@ def test_resend_inplace(
|
||||
otx = Otx.load(tx_hash_hex, session=init_database)
|
||||
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
txs = get_nonce_tx_local(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
assert len(txs) == 2
|
||||
|
||||
|
||||
@ -363,10 +363,10 @@ def test_resend_clone(
|
||||
assert otx.status & StatusBits.IN_NETWORK == StatusBits.IN_NETWORK
|
||||
assert otx.status & StatusBits.OBSOLETE == StatusBits.OBSOLETE
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
txs = get_nonce_tx_local(default_chain_spec, otx.nonce, agent_roles['ALICE'], session=init_database)
|
||||
assert len(txs) == 1
|
||||
|
||||
txs = get_nonce_tx_cache(default_chain_spec, otx.nonce + 1, agent_roles['ALICE'], session=init_database)
|
||||
txs = get_nonce_tx_local(default_chain_spec, otx.nonce + 1, agent_roles['ALICE'], session=init_database)
|
||||
assert len(txs) == 1
|
||||
|
||||
otx = Otx.load(txs[0], session=init_database)
|
||||
|
@ -21,7 +21,6 @@ from chainlib.eth.constant import (
|
||||
MINIMUM_FEE_UNITS,
|
||||
MINIMUM_FEE_PRICE,
|
||||
)
|
||||
from chainqueue.sql.tx import create as queue_create
|
||||
from chainqueue.sql.query import get_tx
|
||||
from chainqueue.db.enum import StatusBits
|
||||
from chainqueue.sql.state import (
|
||||
@ -35,6 +34,7 @@ from hexathon import strip_0x
|
||||
# local imports
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.error import OutOfGasError
|
||||
from cic_eth.queue.tx import queue_create
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
|
@ -51,6 +51,7 @@ def test_ext_tx_collate(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
)
|
||||
otx.block = 666
|
||||
init_database.add(otx)
|
||||
init_database.commit()
|
||||
|
||||
|
@ -46,6 +46,7 @@ def test_set(
|
||||
tx_hash_hex,
|
||||
tx_signed_raw_hex,
|
||||
)
|
||||
otx.block = 666
|
||||
init_database.add(otx)
|
||||
init_database.commit()
|
||||
|
||||
@ -74,7 +75,6 @@ def test_set(
|
||||
assert (tx_stored.destination_token_address == ZERO_ADDRESS)
|
||||
assert (tx_stored.from_value == tx['value'])
|
||||
assert (tx_stored.to_value == to_value)
|
||||
assert (tx_stored.block_number == 666)
|
||||
assert (tx_stored.tx_index == 13)
|
||||
|
||||
|
||||
|
@ -13,6 +13,7 @@ from cic_eth.queue.balance import (
|
||||
balance_incoming,
|
||||
assemble_balances,
|
||||
)
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
logg = logging.getLogger()
|
||||
|
||||
@ -51,8 +52,8 @@ def test_assemble():
|
||||
r = assemble_balances(b)
|
||||
logg.debug('r {}'.format(r))
|
||||
|
||||
assert r[0]['address'] == token_foo
|
||||
assert r[1]['address'] == token_bar
|
||||
assert r[0]['address'] == tx_normalize.executable_address(token_foo)
|
||||
assert r[1]['address'] == tx_normalize.executable_address(token_bar)
|
||||
assert r[0].get('balance_foo') != None
|
||||
assert r[0].get('balance_bar') != None
|
||||
assert r[1].get('balance_baz') != None
|
||||
@ -74,11 +75,11 @@ def test_outgoing_balance(
|
||||
token_address = '0x' + os.urandom(20).hex()
|
||||
sender = '0x' + os.urandom(20).hex()
|
||||
txc = TxCache(
|
||||
tx_hash,
|
||||
sender,
|
||||
recipient,
|
||||
token_address,
|
||||
token_address,
|
||||
tx_normalize.tx_hash(tx_hash),
|
||||
tx_normalize.wallet_address(sender),
|
||||
tx_normalize.wallet_address(recipient),
|
||||
tx_normalize.executable_address(token_address),
|
||||
tx_normalize.executable_address(token_address),
|
||||
1000,
|
||||
1000,
|
||||
session=init_database,
|
||||
@ -125,11 +126,11 @@ def test_incoming_balance(
|
||||
token_address = '0x' + os.urandom(20).hex()
|
||||
sender = '0x' + os.urandom(20).hex()
|
||||
txc = TxCache(
|
||||
tx_hash,
|
||||
sender,
|
||||
recipient,
|
||||
token_address,
|
||||
token_address,
|
||||
tx_normalize.tx_hash(tx_hash),
|
||||
tx_normalize.wallet_address(sender),
|
||||
tx_normalize.wallet_address(recipient),
|
||||
tx_normalize.executable_address(token_address),
|
||||
tx_normalize.executable_address(token_address),
|
||||
1000,
|
||||
1000,
|
||||
session=init_database,
|
||||
|
@ -21,6 +21,7 @@ from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.queue.query import get_upcoming_tx
|
||||
from cic_eth.queue.tx import register_tx
|
||||
from cic_eth.eth.gas import cache_gas_data
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
# test imports
|
||||
from tests.util.nonce import StaticNonceOracle
|
||||
@ -39,8 +40,8 @@ def test_upcoming_with_lock(
|
||||
gas_oracle = RPCGasOracle(eth_rpc)
|
||||
c = Gas(default_chain_spec, signer=eth_signer, nonce_oracle=nonce_oracle, gas_oracle=gas_oracle)
|
||||
|
||||
alice_normal = add_0x(hex_uniform(strip_0x(agent_roles['ALICE'])))
|
||||
bob_normal = add_0x(hex_uniform(strip_0x(agent_roles['BOB'])))
|
||||
alice_normal = tx_normalize.wallet_address(agent_roles['ALICE'])
|
||||
bob_normal = tx_normalize.wallet_address(agent_roles['BOB'])
|
||||
|
||||
(tx_hash_hex, tx_rpc) = c.create(alice_normal, bob_normal, 100 * (10 ** 6))
|
||||
tx_signed_raw_hex = tx_rpc['params'][0]
|
||||
|
@ -9,7 +9,7 @@ from cic_eth.db.models.lock import Lock
|
||||
from cic_eth.db.enum import LockEnum
|
||||
from cic_eth.error import LockedError
|
||||
from cic_eth.queue.tx import queue_create
|
||||
|
||||
from cic_eth.encode import tx_normalize
|
||||
|
||||
def test_queue_lock(
|
||||
init_database,
|
||||
@ -21,6 +21,8 @@ def test_queue_lock(
|
||||
address = '0x' + os.urandom(20).hex()
|
||||
tx_hash = '0x' + os.urandom(32).hex()
|
||||
tx_raw = '0x' + os.urandom(128).hex()
|
||||
address_normal = tx_normalize.wallet_address(address)
|
||||
tx_hash_normal = tx_normalize.tx_hash(tx_hash)
|
||||
|
||||
Lock.set(chain_str, LockEnum.QUEUE)
|
||||
with pytest.raises(LockedError):
|
||||
@ -32,7 +34,7 @@ def test_queue_lock(
|
||||
tx_raw,
|
||||
)
|
||||
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address)
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address_normal)
|
||||
with pytest.raises(LockedError):
|
||||
queue_create(
|
||||
default_chain_spec,
|
||||
@ -52,7 +54,7 @@ def test_queue_lock(
|
||||
tx_raw,
|
||||
)
|
||||
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address, tx_hash=tx_hash)
|
||||
Lock.set(chain_str, LockEnum.QUEUE, address=address_normal, tx_hash=tx_hash_normal)
|
||||
with pytest.raises(LockedError):
|
||||
queue_create(
|
||||
default_chain_spec,
|
||||
@ -61,5 +63,3 @@ def test_queue_lock(
|
||||
tx_hash,
|
||||
tx_raw,
|
||||
)
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
crypto-dev-signer>=0.4.15a1,<=0.4.15
|
||||
chainqueue>=0.0.3a1,<0.1.0
|
||||
chainqueue>=0.0.5a1,<0.1.0
|
||||
cic-eth-registry>=0.6.1a2,<0.7.0
|
||||
redis==3.5.3
|
||||
hexathon~=0.0.1a8
|
||||
|
@ -1,43 +1,16 @@
|
||||
|
||||
.cic_meta_variables:
|
||||
variables:
|
||||
APP_NAME: cic-meta
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-meta:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_meta_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-meta/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-meta:
|
||||
extends:
|
||||
- .cic_meta_variables
|
||||
stage: test
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd /root
|
||||
- npm install --dev
|
||||
- npm run test
|
||||
- npm run test:coverage
|
||||
needs: ["build-mr-cic-meta"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-meta/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-meta:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_meta_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-meta/**/*
|
||||
when: always
|
||||
build-test-cic-meta:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-meta
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-meta
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run --entrypoint=sh $MR_IMAGE_TAG docker/run_tests.sh
|
||||
#rules:
|
||||
#- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
# changes:
|
||||
# - apps/$APP_NAME/**/*
|
||||
# when: always
|
||||
|
@ -15,11 +15,10 @@ RUN --mount=type=cache,mode=0755,target=/root/.npm \
|
||||
COPY webpack.config.js .
|
||||
COPY tsconfig.json .
|
||||
## required to build the cic-client-meta module
|
||||
COPY src/ src/
|
||||
COPY scripts/ scripts/
|
||||
COPY tests/ tests/
|
||||
COPY . .
|
||||
COPY tests/*.asc /root/pgp/
|
||||
|
||||
|
||||
## copy runtime configs
|
||||
COPY .config/ /usr/local/etc/cic-meta/
|
||||
#
|
||||
|
@ -1,32 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
#FROM node:15.3.0-alpine3.10
|
||||
FROM node:lts-alpine3.14
|
||||
|
||||
WORKDIR /root
|
||||
|
||||
RUN apk add --no-cache postgresql bash
|
||||
|
||||
# copy the dependencies
|
||||
COPY package.json package-lock.json .
|
||||
RUN npm set cache /root/.npm && \
|
||||
npm ci
|
||||
|
||||
COPY webpack.config.js .
|
||||
COPY tsconfig.json .
|
||||
## required to build the cic-client-meta module
|
||||
COPY src/ src/
|
||||
COPY scripts/ scripts/
|
||||
COPY tests/ tests/
|
||||
COPY tests/*.asc /root/pgp/
|
||||
|
||||
## copy runtime configs
|
||||
COPY .config/ /usr/local/etc/cic-meta/
|
||||
#
|
||||
## db migrations
|
||||
COPY docker/db.sh ./db.sh
|
||||
RUN chmod 755 ./db.sh
|
||||
#
|
||||
RUN alias tsc=node_modules/typescript/bin/tsc
|
||||
COPY docker/start_server.sh ./start_server.sh
|
||||
RUN chmod 755 ./start_server.sh
|
||||
ENTRYPOINT ["sh", "./start_server.sh"]
|
7
apps/cic-meta/docker/run_tests.sh
Normal file
7
apps/cic-meta/docker/run_tests.sh
Normal file
@ -0,0 +1,7 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
npm install --dev
|
||||
npm run test
|
||||
npm run test:coverage
|
@ -1,52 +1,17 @@
|
||||
.cic_notify_variables:
|
||||
variables:
|
||||
APP_NAME: cic-notify
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-notify:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_notify_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-notify/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-notify:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_notify_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_notify --cov-fail-under=90 --cov-report term-missing tests
|
||||
needs: ["build-mr-cic-notify"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-notify:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_notify_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-notify/**/*
|
||||
when: always
|
||||
|
||||
|
||||
build-test-cic-notify:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-notify
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-notify
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
allow_failure: true
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
@ -1,27 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
|
||||
#RUN pip install $pip_extra_index_url_flag cic-base[full_graph]==0.1.2a62
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
COPY requirements.txt .
|
||||
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN python setup.py install
|
||||
|
||||
COPY docker/*.sh .
|
||||
RUN chmod +x *.sh
|
||||
|
||||
# ini files in config directory defines the configurable parameters for the application
|
||||
# they can all be overridden by environment variables
|
||||
# to generate a list of environment variables from configuration, use: confini-dump -z <dir> (executable provided by confini package)
|
||||
COPY .config/ /usr/local/etc/cic-notify/
|
||||
COPY cic_notify/db/migrations/ /usr/local/share/cic-notify/alembic/
|
||||
|
||||
ENTRYPOINT []
|
9
apps/cic-notify/docker/run_tests.sh
Normal file
9
apps/cic-notify/docker/run_tests.sh
Normal file
@ -0,0 +1,9 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_notify --cov-fail-under=90 --cov-report term-missing tests
|
@ -1,52 +1,16 @@
|
||||
.cic_ussd_variables:
|
||||
variables:
|
||||
APP_NAME: cic-ussd
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-cic-ussd:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .cic_ussd_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/cic-ussd/**/*
|
||||
when: always
|
||||
|
||||
test-mr-cic-ussd:
|
||||
stage: test
|
||||
extends:
|
||||
- .cic_ussd_variables
|
||||
cache:
|
||||
key:
|
||||
files:
|
||||
- test_requirements.txt
|
||||
paths:
|
||||
- /root/.cache/pip
|
||||
image: $MR_IMAGE_TAG
|
||||
script:
|
||||
- cd apps/$APP_NAME/
|
||||
- >
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple
|
||||
-r test_requirements.txt
|
||||
- export PYTHONPATH=. && pytest -x --cov=cic_ussd --cov-fail-under=90 --cov-report term-missing tests/cic_ussd
|
||||
needs: ["build-mr-cic-ussd"]
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
||||
build-push-cic-ussd:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .cic_ussd_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/cic-ussd/**/*
|
||||
when: always
|
||||
|
||||
|
||||
build-test-cic-ussd:
|
||||
stage: test
|
||||
tags:
|
||||
- integration
|
||||
variables:
|
||||
APP_NAME: cic-ussd
|
||||
MR_IMAGE_TAG: mr-$APP_NAME-$CI_COMMIT_REF_SLUG-$CI_COMMIT_SHORT_SHA
|
||||
script:
|
||||
- cd apps/cic-ussd
|
||||
- docker build -t $MR_IMAGE_TAG -f docker/Dockerfile .
|
||||
- docker run $MR_IMAGE_TAG sh docker/run_tests.sh
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/$APP_NAME/**/*
|
||||
when: always
|
||||
|
@ -1,7 +1,7 @@
|
||||
# standard imports
|
||||
import semver
|
||||
|
||||
version = (0, 3, 1, 'alpha.1')
|
||||
version = (0, 3, 1, 'alpha.4')
|
||||
|
||||
version_object = semver.VersionInfo(
|
||||
major=version[0],
|
||||
|
@ -1,32 +0,0 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
FROM registry.gitlab.com/grassrootseconomics/cic-base-images:python-3.8.6-dev-55da5f4e as dev
|
||||
RUN apt-get install -y redis-server
|
||||
|
||||
# create secrets directory
|
||||
RUN mkdir -vp pgp/keys
|
||||
|
||||
# create application directory
|
||||
RUN mkdir -vp cic-ussd
|
||||
RUN mkdir -vp data
|
||||
|
||||
COPY requirements.txt .
|
||||
|
||||
ARG EXTRA_INDEX_URL="https://pip.grassrootseconomics.net:8433"
|
||||
ARG GITLAB_PYTHON_REGISTRY="https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple"
|
||||
RUN pip install --index-url https://pypi.org/simple \
|
||||
--extra-index-url $GITLAB_PYTHON_REGISTRY --extra-index-url $EXTRA_INDEX_URL \
|
||||
-r requirements.txt
|
||||
|
||||
COPY . .
|
||||
RUN python setup.py install
|
||||
|
||||
COPY cic_ussd/db/ussd_menu.json data/
|
||||
|
||||
COPY docker/*.sh .
|
||||
RUN chmod +x /root/*.sh
|
||||
|
||||
# copy config and migration files to definitive file so they can be referenced in path definitions for running scripts
|
||||
COPY config/ /usr/local/etc/cic-ussd/
|
||||
COPY cic_ussd/db/migrations/ /usr/local/share/cic-ussd/alembic
|
||||
|
||||
ENTRYPOINT []
|
10
apps/cic-ussd/docker/run_tests.sh
Normal file
10
apps/cic-ussd/docker/run_tests.sh
Normal file
@ -0,0 +1,10 @@
|
||||
#! /bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
|
||||
pip install --extra-index-url https://pip.grassrootseconomics.net:8433 \
|
||||
--extra-index-url https://gitlab.com/api/v4/projects/27624814/packages/pypi/simple \
|
||||
-r test_requirements.txt
|
||||
|
||||
export PYTHONPATH=. && pytest -x --cov=cic_ussd --cov-fail-under=90 --cov-report term-missing tests/cic_ussd
|
@ -1,7 +1,7 @@
|
||||
alembic==1.4.2
|
||||
bcrypt==3.2.0
|
||||
celery==4.4.7
|
||||
cic-eth[services]~=0.12.4a3
|
||||
cic-eth[services]~=0.12.4a7
|
||||
cic-notify~=0.4.0a10
|
||||
cic-types~=0.1.0a14
|
||||
confini>=0.4.1a1,<0.5.0
|
||||
|
@ -1,25 +1,25 @@
|
||||
.contract_migration_variables:
|
||||
variables:
|
||||
APP_NAME: contract-migration
|
||||
DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
CONTEXT: apps/$APP_NAME
|
||||
|
||||
build-mr-contract-migration:
|
||||
extends:
|
||||
- .py_build_merge_request
|
||||
- .contract_migration_variables
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- apps/contract-migration/**/*
|
||||
when: always
|
||||
|
||||
build-push-contract-migration:
|
||||
extends:
|
||||
- .py_build_push
|
||||
- .contract_migration_variables
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "master"
|
||||
changes:
|
||||
- apps/contract-migration/**/*
|
||||
when: always
|
||||
#.contract_migration_variables:
|
||||
# variables:
|
||||
# APP_NAME: contract-migration
|
||||
# DOCKERFILE_PATH: docker/Dockerfile_ci
|
||||
# CONTEXT: apps/$APP_NAME
|
||||
#
|
||||
#build-mr-contract-migration:
|
||||
# extends:
|
||||
# - .py_build_merge_request
|
||||
# - .contract_migration_variables
|
||||
# rules:
|
||||
# - if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
# changes:
|
||||
# - apps/contract-migration/**/*
|
||||
# when: always
|
||||
#
|
||||
#build-push-contract-migration:
|
||||
# extends:
|
||||
# - .py_build_push
|
||||
# - .contract_migration_variables
|
||||
# rules:
|
||||
# - if: $CI_COMMIT_BRANCH == "master"
|
||||
# changes:
|
||||
# - apps/contract-migration/**/*
|
||||
# when: always
|
||||
|
@ -1,5 +1,5 @@
|
||||
cic-eth[tools]==0.12.4a4
|
||||
chainlib-eth>=0.0.9a3,<0.1.0
|
||||
chainlib-eth>=0.0.9a7,<0.1.0
|
||||
eth-erc20>=0.1.2a2,<0.2.0
|
||||
erc20-demurrage-token>=0.0.5a2,<0.1.0
|
||||
eth-accounts-index>=0.1.2a2,<0.2.0
|
||||
|
@ -1,10 +1,10 @@
|
||||
sarafu-faucet~=0.0.7a1
|
||||
cic-eth[tools]~=0.12.4a4
|
||||
cic-eth[tools]~=0.12.4a7
|
||||
cic-types~=0.1.0a14
|
||||
crypto-dev-signer>=0.4.15a1,<=0.4.15
|
||||
faker==4.17.1
|
||||
chainsyncer~=0.0.6a1
|
||||
chainlib-eth~=0.0.9a4
|
||||
chainsyncer~=0.0.6a3
|
||||
chainlib-eth~=0.0.9a7
|
||||
eth-address-index~=0.2.3a4
|
||||
eth-contract-registry~=0.6.3a3
|
||||
eth-accounts-index~=0.1.2a3
|
||||
|
@ -13,20 +13,8 @@ networks:
|
||||
name: cic-network
|
||||
|
||||
services:
|
||||
# eth:
|
||||
# image: trufflesuite/ganache-cli
|
||||
# ports:
|
||||
# - ${HTTP_PORT_ETH:-8545}
|
||||
# - ${WS_PORT_ETH:-8546}
|
||||
# # Note! -e switch doesnt work, whatever you put there, it will be 100
|
||||
# command: "-i 8996 -e 1000 -l 90000000 \
|
||||
# -m '${DEV_MNEMONIC:-\"history stumble mystery avoid embark arrive mom foil pledge keep grain dice\"}' \
|
||||
# -v --db /tmp/cic/ganache/ganache.db \
|
||||
# --noVMErrorsOnRPCResponse --allowUnlimitedContractSize"
|
||||
# volumes:
|
||||
# - ganache-db:/tmp/cic/ganache
|
||||
|
||||
eth:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/bloxberg-node:${TAG:-latest}
|
||||
build:
|
||||
context: apps/bloxbergValidatorSetup
|
||||
restart: unless-stopped
|
||||
@ -71,6 +59,7 @@ services:
|
||||
- bee-data:/tmp/cic/bee
|
||||
|
||||
contract-migration:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/contract-migration:${TAG:-latest}
|
||||
profiles:
|
||||
- migrations
|
||||
build:
|
||||
@ -129,6 +118,7 @@ services:
|
||||
- contract-config:/tmp/cic/config
|
||||
|
||||
cic-cache-tracker:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-cache:${TAG:-latest}
|
||||
profiles:
|
||||
- cache
|
||||
build:
|
||||
@ -170,6 +160,7 @@ services:
|
||||
- contract-config:/tmp/cic/config/:ro
|
||||
|
||||
cic-cache-tasker:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-cache:${TAG:-latest}
|
||||
profiles:
|
||||
- cache
|
||||
build:
|
||||
@ -210,6 +201,7 @@ services:
|
||||
- contract-config:/tmp/cic/config/:ro
|
||||
|
||||
cic-cache-server:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-cache:${TAG:-latest}
|
||||
profiles:
|
||||
- cache
|
||||
build:
|
||||
@ -245,6 +237,7 @@ services:
|
||||
|
||||
|
||||
cic-eth-tasker:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-eth:${TAG:-latest}
|
||||
build:
|
||||
context: apps/cic-eth
|
||||
dockerfile: docker/Dockerfile
|
||||
@ -298,6 +291,7 @@ services:
|
||||
# command: [/bin/sh, "./start_tasker.sh", -q, cic-eth, -vv ]
|
||||
|
||||
cic-eth-tracker:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-eth:${TAG:-latest}
|
||||
build:
|
||||
context: apps/cic-eth
|
||||
dockerfile: docker/Dockerfile
|
||||
@ -342,6 +336,7 @@ services:
|
||||
|
||||
|
||||
cic-eth-dispatcher:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-eth:${TAG:-latest}
|
||||
build:
|
||||
context: apps/cic-eth
|
||||
dockerfile: docker/Dockerfile
|
||||
@ -386,6 +381,7 @@ services:
|
||||
|
||||
|
||||
cic-eth-retrier:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-eth:${TAG:-latest}
|
||||
build:
|
||||
context: apps/cic-eth
|
||||
dockerfile: docker/Dockerfile
|
||||
@ -433,6 +429,7 @@ services:
|
||||
|
||||
|
||||
cic-notify-tasker:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-notify:${TAG:-latest}
|
||||
build:
|
||||
context: apps/cic-notify
|
||||
dockerfile: docker/Dockerfile
|
||||
@ -461,6 +458,7 @@ services:
|
||||
|
||||
|
||||
cic-meta-server:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-meta:${TAG:-latest}
|
||||
profiles:
|
||||
- custodial-meta
|
||||
hostname: meta
|
||||
@ -496,6 +494,7 @@ services:
|
||||
# command: "/root/start_server.sh -vv"
|
||||
|
||||
cic-user-ussd-server:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-ussd:${TAG:-latest}
|
||||
profiles:
|
||||
- custodial-ussd
|
||||
build:
|
||||
@ -528,6 +527,7 @@ services:
|
||||
command: "/root/start_cic_user_ussd_server.sh -vv"
|
||||
|
||||
cic-user-server:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-ussd:${TAG:-latest}
|
||||
profiles:
|
||||
- custodial-ussd
|
||||
build:
|
||||
@ -553,6 +553,7 @@ services:
|
||||
command: "/root/start_cic_user_server.sh -vv"
|
||||
|
||||
cic-user-tasker:
|
||||
image: registry.gitlab.com/grassrootseconomics/cic-internal-integration/cic-ussd:${TAG:-latest}
|
||||
profiles:
|
||||
- custodial-ussd
|
||||
build:
|
||||
|
9
scripts/build-push.sh
Executable file
9
scripts/build-push.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#! /usr/bin/env sh
|
||||
|
||||
# Exit in case of error
|
||||
set -e
|
||||
|
||||
TAG=${TAG?Variable not set} \
|
||||
sh ./scripts/build.sh
|
||||
|
||||
docker-compose -f docker-compose.yml push
|
9
scripts/build.sh
Executable file
9
scripts/build.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#! /usr/bin/env sh
|
||||
|
||||
# Exit in case of error
|
||||
set -e
|
||||
|
||||
TAG=${TAG?Variable not set} \
|
||||
docker-compose \
|
||||
-f docker-compose.yml \
|
||||
build
|
15
scripts/test-local.sh
Executable file
15
scripts/test-local.sh
Executable file
@ -0,0 +1,15 @@
|
||||
#! /usr/bin/env bash
|
||||
|
||||
# Exit in case of error
|
||||
set -e
|
||||
|
||||
docker-compose down -v --remove-orphans # Remove possibly previous broken stacks left hanging after an error
|
||||
|
||||
if [ $(uname -s) = "Linux" ]; then
|
||||
echo "Remove __pycache__ files"
|
||||
sudo find . -type d -name __pycache__ -exec rm -r {} \+
|
||||
fi
|
||||
|
||||
docker-compose build
|
||||
docker-compose up -d
|
||||
docker-compose exec -T backend bash /app/tests-start.sh "$@"
|
Loading…
Reference in New Issue
Block a user